Exemplo n.º 1
0
    def _set_data(self, attrs):
        assert_file_writable(self.file)
        plugins = self.file.plugin_manager.attribute_plugins.write_order

        if self.mode == self._Mode.ATTRIBUTES and len(plugins) > 0:
            meta = self.parent.meta.to_dict()
            for plugin in plugins:
                attribute_data = exdir.plugin_interface.AttributeData(
                    attrs=attrs, meta=meta)

                attribute_data = plugin.prepare_write(attribute_data)
                meta = attribute_data.meta
                attrs = attribute_data.attrs

            attribute_data_quoted = _quote_strings(attribute_data.attrs)
            self.parent.meta._set_data(meta)
        else:
            attribute_data_quoted = attrs

        with self.filename.open("w", encoding="utf-8") as attribute_file:
            yaml.dump(attribute_data_quoted,
                      attribute_file,
                      default_flow_style=False,
                      allow_unicode=True,
                      Dumper=yaml.RoundTripDumper)
Exemplo n.º 2
0
    def _set_data(self, attrs):
        if self.io_mode == exob.Object.OpenMode.READ_ONLY:
            raise IOError("Cannot write in read only (" r") mode")

        plugins = self.plugin_manager.attribute_plugins.write_order

        if self.mode == self._Mode.ATTRIBUTES and len(plugins) > 0:
            meta = self.parent.meta.to_dict()
            for plugin in plugins:
                attribute_data = exdir.plugin_interface.AttributeData(
                    attrs=attrs, meta=meta)

                attribute_data = plugin.prepare_write(attribute_data)
                meta = attribute_data.meta
                attrs = attribute_data.attrs

            attribute_data_quoted = _quote_strings(attribute_data.attrs)
            self.parent.meta._set_data(meta)
        else:
            attribute_data_quoted = attrs

        with self.filename.open("w", encoding="utf-8") as attribute_file:
            yaml.dump(attribute_data_quoted,
                      attribute_file,
                      default_flow_style=False,
                      allow_unicode=True,
                      Dumper=yaml.RoundTripDumper)
Exemplo n.º 3
0
    def _set_data(self, attrs):
        # Importing here is a workaround for Python 2.7. It could be avoided if OpenMode was
        # in a separate file
        from exdir.core.exdir_object import Object

        if self.io_mode == Object.OpenMode.READ_ONLY:
            raise IOError("Cannot write in read only (" r") mode")

        plugins = self.plugin_manager.attribute_plugins.write_order

        if self.mode == self._Mode.ATTRIBUTES and len(plugins) > 0:
            meta = self.parent.meta.to_dict()
            for plugin in plugins:
                attribute_data = exdir.plugin_interface.AttributeData(
                    attrs=attrs, meta=meta)

                attribute_data = plugin.prepare_write(attribute_data)
                meta = attribute_data.meta
                attrs = attribute_data.attrs

            attribute_data_quoted = _quote_strings(attribute_data.attrs)
            self.parent.meta._set_data(meta)
        else:
            attribute_data_quoted = attrs

        with self.filename.open("w", encoding="utf-8") as attribute_file:
            yaml.dump(attribute_data_quoted,
                      attribute_file,
                      default_flow_style=False,
                      allow_unicode=True,
                      Dumper=yaml.RoundTripDumper)
Exemplo n.º 4
0
def yaml_dump(string):
    yaml = get_yaml()
    try:
        return yaml.dump(string, Dumper=yaml.RoundTripDumper,
                         block_seq_indent=2, default_flow_style=False,
                         indent=4)
    except AttributeError:
        return yaml.dump(string, default_flow_style=False)
Exemplo n.º 5
0
def write_calibration(cals, filename='calibration.yaml'):

    with open(filename, 'w') as stream:
        try:
            ruamel_yaml.dump(cals, stream)
            return 0
        except ruamel_yaml.YAMLError as exc:
            print(exc)
            return 1
Exemplo n.º 6
0
def yaml_dump(string):
    yaml = get_yaml()
    try:
        return yaml.dump(string,
                         Dumper=yaml.RoundTripDumper,
                         block_seq_indent=2,
                         default_flow_style=False,
                         indent=4)
    except AttributeError:
        return yaml.dump(string, default_flow_style=False)
Exemplo n.º 7
0
def _create_object_directory(directory, metadata):
    """
    Create object directory and meta file if directory
    don't already exist.
    """
    if directory.exists():
        raise IOError("The directory '" + str(directory) + "' already exists")
    valid_types = [DATASET_TYPENAME, FILE_TYPENAME, GROUP_TYPENAME]
    typename = metadata[EXDIR_METANAME][TYPE_METANAME]
    if typename not in valid_types:
        raise ValueError(
            "{typename} is not a valid typename".format(typename=typename))
    directory.mkdir()
    meta_filename = directory / META_FILENAME
    with meta_filename.open("w", encoding="utf-8") as meta_file:
        if metadata == _default_metadata(typename):
            # if it is the default, we know how to print it fast
            metadata_string = (''
                               '{exdir_meta}:\n'
                               '   {type_meta}: "{typename}"\n'
                               '   {version_meta}: {version}\n'
                               '').format(exdir_meta=EXDIR_METANAME,
                                          type_meta=TYPE_METANAME,
                                          typename=typename,
                                          version_meta=VERSION_METANAME,
                                          version=1)
        else:
            metadata_string = yaml.dump(metadata)

        try:
            meta_file.write(metadata_string)
        except TypeError:
            # NOTE workaround for Python 2.7
            meta_file.write(metadata_string.decode('utf8'))
Exemplo n.º 8
0
def yaml_dump(string):
    yaml = get_yaml()
    return yaml.dump(string,
                     Dumper=yaml.RoundTripDumper,
                     block_seq_indent=2,
                     default_flow_style=False,
                     indent=2)
Exemplo n.º 9
0
def dump_md(md, **kwargs):
    """Convert metadata content to a yaml string."""
    ascomments = kwargs.get("ascomments", False)
    s = yaml.dump(md, default_flow_style=False)
    if ascomments:
        s = '# ' + "# ".join(s.splitlines(True))  # comment form
    return s
Exemplo n.º 10
0
def yaml_dump(object):
    """dump object to string"""
    return yaml.dump(object,
                     Dumper=yaml.RoundTripDumper,
                     block_seq_indent=2,
                     default_flow_style=False,
                     indent=2)
Exemplo n.º 11
0
def main():

    # Parse the cross-section file in BOSIG+ format and load it into the
    # solver.
    with open("itikawa-2009-O2.txt") as fp:
        processes = parser.parse(fp)

    data = dict()

    for cs in processes:
        cs["kind"] = cs["kind"].lower()

    data['cross_section'] = processes

    outfile = open("oxygen_cross_sections.yaml", "w")
    yaml.dump(data, outfile)
Exemplo n.º 12
0
def save_md(dest, md):
    """Save metadata to a file-like.

    Arguments:
        dest: file-like
        md: dict-like
    """
    # Delete items with _nosave in key
    md2 = copy.deepcopy(md)  # don't mutate original md
    keys = [key for key in md2]
    for key in keys:
        if key.split("_")[-1] == "nosave":
            del md2[key]

    if isinstance(dest, str):  # filename is given
        with open(dest, "w") as f:
            yaml.dump(md2, f, default_flow_style=False)
    elif (isinstance(dest, TextIOBase)):  # file-like is given
        yaml.dump(md2, dest, default_flow_style=False)
Exemplo n.º 13
0
    def to_txt(self, file, update_cdef=True, preamble=None):
        """
        Write the contents of a DataPlusMeta object to a text file.

        file : str
            Name or path of a text file

        Specified file is overwritten without warning if it exists.
        """
        if self.data is None:
            raise RuntimeError('There is no data to store.')

        with open(file, 'w'):
            pass

        if update_cdef:
            self.update_cdef(raise_on_fail=True)
        else:
            self.check_cdef(raise_on_mismatch=True)

        # fill in missing names, if needed
        if self.cdef.index.name is None:
            self.cdef.index.name = 'column'

        if self.data.index.name is None:
            self.data.index.name = self.cdef.index[0]

        with open(file, 'w', encoding=ENCODING) as f:
            f.write(UTF8_BOM)

            if preamble:
                lines = preamble.splitlines()
                for line in lines:
                    f.write(COMMENT_CHAR + ' ' + line + '\n')
                f.write(BLANK_LINE)

            f.write(yaml.dump(self.meta, default_flow_style=False,
                              allow_unicode=True, Dumper=DUMPER))

            f.write(SECTION_SEPARATOR)

            # Note: line_terminator is set to '\n' to avoid double conversion
            # to \n\r on Windows when the buffer is written to the file

            self.cdef[:0].to_csv(f, index=True, **TO_CSV_OPTIONS)
            f.write(BLANK_LINE)
            self.cdef.to_csv(f, header=False, index=True, **TO_CSV_OPTIONS)

            f.write(SECTION_SEPARATOR)

            self.data.iloc[:0].to_csv(f, index=True, **TO_CSV_OPTIONS)
            f.write(BLANK_LINE)
            self.data.to_csv(f, header=False, index=True, **TO_CSV_OPTIONS)
        return
Exemplo n.º 14
0
def _save_file(yaml, filename):
    contents = ryaml.dump(yaml, Dumper=ryaml.RoundTripDumper)

    try:
        # This is to ensure we don't corrupt the file, even if ruamel.yaml is broken
        ryaml.load(contents, Loader=ryaml.RoundTripLoader)
    except YAMLError as e:  # pragma: no cover (should not happen)
        print("ruamel.yaml bug; it failed to parse a file that it generated.", file=sys.stderr)
        print("  the parse error was: " + str(e), file=sys.stderr)
        print("Generated file was:", file=sys.stderr)
        print(contents, file=sys.stderr)
        raise RuntimeError("Bug in ruamel.yaml library; failed to parse a file that it generated: " + str(e))

    if not os.path.isfile(filename):
        # might have to make the directory
        dirname = os.path.dirname(filename)
        makedirs_ok_if_exists(dirname)
    _atomic_replace(filename, contents)
Exemplo n.º 15
0
def _save_file(yaml, filename):
    contents = ryaml.dump(yaml, Dumper=ryaml.RoundTripDumper)

    try:
        # This is to ensure we don't corrupt the file, even if ruamel.yaml is broken
        ryaml.load(contents, Loader=ryaml.RoundTripLoader)
    except YAMLError as e:  # pragma: no cover (should not happen)
        print("ruamel.yaml bug; it failed to parse a file that it generated.", file=sys.stderr)
        print("  the parse error was: " + str(e), file=sys.stderr)
        print("Generated file was:", file=sys.stderr)
        print(contents, file=sys.stderr)
        raise RuntimeError("Bug in ruamel.yaml library; failed to parse a file that it generated: " + str(e))

    if not os.path.isfile(filename):
        # might have to make the directory
        dirname = os.path.dirname(filename)
        makedirs_ok_if_exists(dirname)
    _atomic_replace(filename, contents)
Exemplo n.º 16
0
    def save(self):
        """Write the file to disk, only if any changes have been made.

        Raises ``IOError`` if it fails for some reason.

        Returns:
            None
        """
        self._throw_if_corrupted()

        if not self._dirty:
            return

        contents = ryaml.dump(self._yaml, Dumper=ryaml.RoundTripDumper)

        try:
            # This is to ensure we don't corrupt the file, even if ruamel.yaml is broken
            ryaml.load(contents, Loader=ryaml.RoundTripLoader)
        except YAMLError as e:  # pragma: no cover (should not happen)
            print(
                "ruamel.yaml bug; it failed to parse a file that it generated.",
                file=sys.stderr)
            print("  the parse error was: " + str(e), file=sys.stderr)
            print("Generated file was:", file=sys.stderr)
            print(contents, file=sys.stderr)
            raise RuntimeError(
                "Bug in ruamel.yaml library; failed to parse a file that it generated: "
                + str(e))

        if not os.path.isfile(self.filename):
            # might have to make the directory
            dirname = os.path.dirname(self.filename)
            makedirs_ok_if_exists(dirname)
        _atomic_replace(self.filename, contents)
        self._change_count = self._change_count + 1
        self._dirty = False
Exemplo n.º 17
0
def skeletonize(packages, output_dir=".", version=None, recursive=False,
                all_urls=False, pypi_url='https://pypi.io/pypi/', noprompt=True,
                version_compare=False, python_version=default_python, manual_url=False,
                all_extras=False, noarch_python=False, config=None, setup_options=None,
                extra_specs=[],
                pin_numpy=False):
    package_dicts = {}

    if not setup_options:
        setup_options = []

    if isinstance(setup_options, string_types):
        setup_options = [setup_options]

    if not config:
        config = Config()

    created_recipes = []
    while packages:
        package = packages.pop()
        created_recipes.append(package)

        is_url = ':' in package

        if is_url:
            package_pypi_url = ''
        else:
            package_pypi_url = urljoin(pypi_url, '/'.join((package, 'json')))

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path) and not version_compare:
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package,
            {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'test_commands': '',
                'tests_require': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
            # Make sure there is always something to pass in for this
            pypi_data = {}
        else:
            sort_by_version = lambda l: sorted(l, key=parse_version)

            pypi_resp = requests.get(package_pypi_url, verify=not _ssl_no_verify())

            if pypi_resp.status_code != 200:
                sys.exit("Request to fetch %s failed with status: %d"
                        % (package_pypi_url, pypi_resp.status_code))

            pypi_data = pypi_resp.json()

            versions = sort_by_version(pypi_data['releases'].keys())

            if version_compare:
                version_compare(versions)
            if version:
                if version not in versions:
                    sys.exit("Error: Version %s of %s is not available on PyPI."
                             % (version, package))
                d['version'] = version
            else:
                # select the most visible version from PyPI.
                if not versions:
                    sys.exit("Error: Could not find any versions of package %s" % package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[-1])
                    print("Use --version to specify a different version.")
                d['version'] = versions[-1]

        data, d['pypiurl'], d['filename'], d['digest'] = get_download_data(pypi_data,
                                                                           package,
                                                                           d['version'],
                                                                           is_url, all_urls,
                                                                           noprompt, manual_url)

        d['import_tests'] = ''

        # Get summary and description directly from the metadata returned
        # from PyPI. summary will be pulled from package information in
        # get_package_metadata or a default value set if it turns out that
        # data['summary'] is empty.
        d['summary'] = data.get('summary', '')
        d['description'] = data.get('description', '')
        get_package_metadata(package, d, data, output_dir, python_version,
                             all_extras, recursive, created_recipes, noarch_python,
                             noprompt, packages, extra_specs, config=config,
                             setup_options=setup_options)

        # Set these *after* get_package_metadata so that the preferred hash
        # can be calculated from the downloaded file, if necessary.
        d['hash_type'] = d['digest'][0]
        d['hash_value'] = d['digest'][1]

        # Change requirements to use format that guarantees the numpy
        # version will be pinned when the recipe is built and that
        # the version is included in the build string.
        if pin_numpy:
            for depends in ['build_depends', 'run_depends']:
                deps = d[depends]
                numpy_dep = [idx for idx, dep in enumerate(deps)
                             if 'numpy' in dep]
                if numpy_dep:
                    # Turns out this needs to be inserted before the rest
                    # of the numpy spec.
                    deps.insert(numpy_dep[0], 'numpy x.x')
                    d[depends] = deps

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            rendered_recipe = PYPI_META_HEADER.format(**d)

            ordered_recipe = ruamel_yaml.comments.CommentedMap()
            # Create all keys in expected ordered
            for key in EXPECTED_SECTION_ORDER:
                try:
                    ordered_recipe[key] = PYPI_META_STATIC[key]
                except KeyError:
                    ordered_recipe[key] = ruamel_yaml.comments.CommentedMap()

            if '://' not in pypi_url:
                raise ValueError("pypi_url must have protocol (e.g. http://) included")
            base_url = urlsplit(pypi_url)
            base_url = "://".join((base_url.scheme, base_url.netloc))
            ordered_recipe['source']['url'] = urljoin(base_url, ordered_recipe['source']['url'])

            if d['entry_points']:
                ordered_recipe['build']['entry_points'] = d['entry_points']

            if noarch_python:
                ordered_recipe['build']['noarch'] = 'python'

            ordered_recipe['build']['script'] = 'python setup.py install ' + ' '.join(setup_options)
            if any(re.match(r'^setuptools(?:\s|$)', req) for req in d['build_depends']):
                ordered_recipe['build']['script'] += ('--single-version-externally-managed '
                                                      '--record=record.txt')

            # Always require python as a dependency
            ordered_recipe['requirements'] = ruamel_yaml.comments.CommentedMap()
            ordered_recipe['requirements']['host'] = ['python'] + ensure_list(d['build_depends'])
            ordered_recipe['requirements']['run'] = ['python'] + ensure_list(d['run_depends'])

            if d['import_tests']:
                ordered_recipe['test']['imports'] = d['import_tests']

            if d['test_commands']:
                ordered_recipe['test']['commands'] = d['test_commands']

            if d['tests_require']:
                ordered_recipe['test']['requires'] = d['tests_require']

            ordered_recipe['about'] = ruamel_yaml.comments.CommentedMap()

            for key in ABOUT_ORDER:
                try:
                    ordered_recipe['about'][key] = d[key]
                except KeyError:
                    ordered_recipe['about'][key] = ''
            ordered_recipe['extra']['recipe-maintainers'] = ''

            # Prune any top-level sections that are empty
            for key in EXPECTED_SECTION_ORDER:
                if not ordered_recipe[key]:
                    del ordered_recipe[key]
                else:
                    rendered_recipe += ruamel_yaml.dump({key: ordered_recipe[key]},
                                                Dumper=ruamel_yaml.RoundTripDumper,
                                                default_flow_style=False,
                                                width=200)
                    rendered_recipe += '\n'
            # make sure that recipe ends with one newline, by god.
            rendered_recipe.rstrip()

            # This hackery is necessary because
            #  - the default indentation of lists is not what we would like.
            #    Ideally we'd contact the ruamel.yaml auther to find the right
            #    way to do this. See this PR thread for more:
            #    https://github.com/conda/conda-build/pull/2205#issuecomment-315803714
            #    Brute force fix below.

            # Fix the indents
            recipe_lines = []
            for line in rendered_recipe.splitlines():
                match = re.search('^\s+(-) ', line,
                                  flags=re.MULTILINE)
                if match:
                    pre, sep, post = line.partition('-')
                    sep = '  ' + sep
                    line = pre + sep + post
                recipe_lines.append(line)
            rendered_recipe = '\n'.join(recipe_lines)

            f.write(rendered_recipe)
Exemplo n.º 18
0
def skeletonize(packages, output_dir=".", version=None, recursive=False,
                all_urls=False, pypi_url='https://pypi.io/pypi/', noprompt=True,
                version_compare=False, python_version=default_python, manual_url=False,
                all_extras=False, noarch_python=False, config=None, setup_options=None,
                extra_specs=[],
                pin_numpy=False):
    package_dicts = {}

    if not setup_options:
        setup_options = []

    if isinstance(setup_options, string_types):
        setup_options = [setup_options]

    if not config:
        config = Config()

    created_recipes = []
    while packages:
        package = packages.pop()
        created_recipes.append(package)

        is_url = ':' in package

        if is_url:
            package_pypi_url = ''
        else:
            package_pypi_url = urljoin(pypi_url, '/'.join((package, 'json')))

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path) and not version_compare:
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package,
            {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'test_commands': '',
                'tests_require': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
            # Make sure there is always something to pass in for this
            pypi_data = {}
        else:
            sort_by_version = lambda l: sorted(l, key=parse_version)

            pypi_resp = requests.get(package_pypi_url, verify=not _ssl_no_verify())

            if pypi_resp.status_code != 200:
                sys.exit("Request to fetch %s failed with status: %d"
                        % (package_pypi_url, pypi_resp.status_code))

            pypi_data = pypi_resp.json()

            versions = sort_by_version(pypi_data['releases'].keys())

            if version_compare:
                version_compare(versions)
            if version:
                if version not in versions:
                    sys.exit("Error: Version %s of %s is not available on PyPI."
                             % (version, package))
                d['version'] = version
            else:
                # select the most visible version from PyPI.
                if not versions:
                    sys.exit("Error: Could not find any versions of package %s" % package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[-1])
                    print("Use --version to specify a different version.")
                d['version'] = versions[-1]

        data, d['pypiurl'], d['filename'], d['digest'] = get_download_data(pypi_data,
                                                                           package,
                                                                           d['version'],
                                                                           is_url, all_urls,
                                                                           noprompt, manual_url)

        d['import_tests'] = ''

        # Get summary and description directly from the metadata returned
        # from PyPI. summary will be pulled from package information in
        # get_package_metadata or a default value set if it turns out that
        # data['summary'] is empty.
        d['summary'] = data.get('summary', '')
        d['description'] = data.get('description', '')
        get_package_metadata(package, d, data, output_dir, python_version,
                             all_extras, recursive, created_recipes, noarch_python,
                             noprompt, packages, extra_specs, config=config,
                             setup_options=setup_options)

        # Set these *after* get_package_metadata so that the preferred hash
        # can be calculated from the downloaded file, if necessary.
        d['hash_type'] = d['digest'][0]
        d['hash_value'] = d['digest'][1]

        # Change requirements to use format that guarantees the numpy
        # version will be pinned when the recipe is built and that
        # the version is included in the build string.
        if pin_numpy:
            for depends in ['build_depends', 'run_depends']:
                deps = d[depends]
                numpy_dep = [idx for idx, dep in enumerate(deps)
                             if 'numpy' in dep]
                if numpy_dep:
                    # Turns out this needs to be inserted before the rest
                    # of the numpy spec.
                    deps.insert(numpy_dep[0], 'numpy x.x')
                    d[depends] = deps

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            rendered_recipe = PYPI_META_HEADER.format(**d)

            ordered_recipe = ruamel_yaml.comments.CommentedMap()
            # Create all keys in expected ordered
            for key in EXPECTED_SECTION_ORDER:
                try:
                    ordered_recipe[key] = PYPI_META_STATIC[key]
                except KeyError:
                    ordered_recipe[key] = ruamel_yaml.comments.CommentedMap()

            if '://' not in pypi_url:
                raise ValueError("pypi_url must have protocol (e.g. http://) included")
            base_url = urlsplit(pypi_url)
            base_url = "://".join((base_url.scheme, base_url.netloc))
            ordered_recipe['source']['url'] = urljoin(base_url, ordered_recipe['source']['url'])

            if d['entry_points']:
                ordered_recipe['build']['entry_points'] = d['entry_points']

            if noarch_python:
                ordered_recipe['build']['noarch'] = 'python'

            ordered_recipe['build']['script'] = 'python setup.py install ' + ' '.join(setup_options)
            if any(re.match(r'^setuptools(?:\s|$)', req) for req in d['build_depends']):
                ordered_recipe['build']['script'] += ('--single-version-externally-managed '
                                                      '--record=record.txt')

            # Always require python as a dependency
            ordered_recipe['requirements'] = ruamel_yaml.comments.CommentedMap()
            ordered_recipe['requirements']['host'] = ['python'] + ensure_list(d['build_depends'])
            ordered_recipe['requirements']['run'] = ['python'] + ensure_list(d['run_depends'])

            if d['import_tests']:
                ordered_recipe['test']['imports'] = d['import_tests']

            if d['test_commands']:
                ordered_recipe['test']['commands'] = d['test_commands']

            if d['tests_require']:
                ordered_recipe['test']['requires'] = d['tests_require']

            ordered_recipe['about'] = ruamel_yaml.comments.CommentedMap()

            for key in ABOUT_ORDER:
                try:
                    ordered_recipe['about'][key] = d[key]
                except KeyError:
                    ordered_recipe['about'][key] = ''
            ordered_recipe['extra']['recipe-maintainers'] = ''

            # Prune any top-level sections that are empty
            for key in EXPECTED_SECTION_ORDER:
                if not ordered_recipe[key]:
                    del ordered_recipe[key]
                else:
                    rendered_recipe += ruamel_yaml.dump({key: ordered_recipe[key]},
                                                Dumper=ruamel_yaml.RoundTripDumper,
                                                default_flow_style=False,
                                                width=200)
                    rendered_recipe += '\n'
            # make sure that recipe ends with one newline, by god.
            rendered_recipe.rstrip()

            # This hackery is necessary because
            #  - the default indentation of lists is not what we would like.
            #    Ideally we'd contact the ruamel.yaml auther to find the right
            #    way to do this. See this PR thread for more:
            #    https://github.com/conda/conda-build/pull/2205#issuecomment-315803714
            #    Brute force fix below.

            # Fix the indents
            recipe_lines = []
            for line in rendered_recipe.splitlines():
                match = re.search('^\s+(-) ', line,
                                  flags=re.MULTILINE)
                if match:
                    pre, sep, post = line.partition('-')
                    sep = '  ' + sep
                    line = pre + sep + post
                recipe_lines.append(line)
            rendered_recipe = '\n'.join(recipe_lines)

            f.write(rendered_recipe)
Exemplo n.º 19
0
def write_yaml(content, fname):
    fname = Path(fname)
    with fname.open('wt') as handle:
        yaml.dump(content, handle, indent=4)
Exemplo n.º 20
0
def yaml_dump(string):
    yaml = get_yaml()
    return yaml.dump(string, Dumper=yaml.RoundTripDumper,
                     block_seq_indent=2, default_flow_style=False,
                     indent=4)
Exemplo n.º 21
0
def yaml_dump(object):
    """dump object to string"""
    return yaml.dump(object, Dumper=yaml.RoundTripDumper,
                     block_seq_indent=2, default_flow_style=False,
                     indent=2)
Exemplo n.º 22
0
    data = posedata
    already = len(posedata)

for idx, item in enumerate(bb):
    if idx % interval == 0:
        data.update({
            int(idx / interval) + already: [{
                'cam_R_m2c': r[idx],
                'cam_t_m2c': t[idx],
                'obj_bb': bb[idx],
                'obj_id': 2
            }]
        })

with io.open('posedata.yml', 'w') as outfile:
    yaml.dump(data, outfile)

for idx, item in enumerate(binary):
    if idx % interval == 0:
        #plt.imsave('./segmentation/mask/{0}_mask.png'.format(int(idx/interval)+number_label_file), item)
        cv2.imwrite(
            './segmentation/mask/{:04d}.png'.format(
                int(idx / interval) + number_label_file), item)

for idx, item in enumerate(Color_Images):
    if idx % interval == 0:
        #plt.imsave('./segmentation/rgb/{0}_rgb.png'.format(int(idx/interval)+number_color_file), item)
        cv2.imwrite(
            './segmentation/rgb/{:04d}.png'.format(
                int(idx / interval) + number_color_file), item)
Exemplo n.º 23
0
 def to_yaml(self):
     """Convert Parser content to YAML string"""
     return yaml.dump(self.raw, Dumper=yaml.SafeDumper)
Exemplo n.º 24
0
 def load_json(self, json_file="scan_data.json"):
     with open(json_file) as fp:
         jobj = json.load(fp)
         print(yaml.dump(jobj))
Exemplo n.º 25
0
 def dump_config(self, **kwargs):
     ascomments = kwargs.get("ascomments", False)
     s = yaml.dump(self.content, default_flow_style=False)
     if ascomments:
         s = '# ' + "# ".join(s.splitlines(True))  # comment form
     return s
Exemplo n.º 26
0
def write_configuration(filepath, config):
    with open(filepath, "w") as f:
        config = yaml.dump(config, f, default_flow_style=False)

    return config
Exemplo n.º 27
0
def write(filename, data):
    with open(filename, "w") as f:
        yaml.dump(data, f, default_flow_style=None, indent=True)
    def __init__(self,
                 goal_value,
                 hidden_list,
                 mode='le',
                 batch_size=16,
                 cut_off=20,
                 nepochs=1,
                 nsamples=1,
                 n_init_samples=100,
                 niter=1000,
                 lr=1e-3,
                 beta: float = 0,
                 init_nepochs=1,
                 base_fn='logistic',
                 nr_mix=1,
                 only_positive=False,
                 full_training_last=False):

        l_args, _, _, values = inspect.getargvalues(inspect.currentframe())
        params = dict(zip(l_args, [values[i] for i in l_args]))
        self.unique_name = time.strftime('%Y%m%d%H%M%S')
        self.dir = Path(f'data/search_fig_{self.unique_name}')
        self.dir.mkdir(parents=True, exist_ok=True)
        with open(self.dir / 'params.yaml', 'w') as f:
            yaml.dump(params, f)

        self.dim = 2
        self.bsize = batch_size
        self.hiddens = hidden_list
        self.niter = niter
        self.goal = goal_value
        self.mode = mode
        self.viz_rate = niter // 10
        self.lr = lr
        self.nepochs = nepochs
        self.nsamples = nsamples
        self.n_init_samples = n_init_samples
        self.init_nepochs = init_nepochs
        self.cut_off = cut_off
        self.beta = beta
        self.nr_mix = nr_mix
        self.base_fn = base_fn
        self.only_pos = only_positive
        # whether to run 1000 epochs of training for the later round of iteration
        self.full_training = full_training_last

        self.device = torch.device(
            'cuda') if torch.cuda.is_available() else torch.device('cpu')
        self.cpu = torch.device('cpu')
        self.model = None
        self.opt = None

        self.writer = SummaryWriter()

        # hacky version of passing input vectors around
        x1 = np.linspace(start=-1, stop=1, num=100)
        x2 = np.linspace(start=-1, stop=1, num=100)
        self.input_vectors = [x1, x2]

        # TODO: remove this hacky way of keeping track of delta
        # self.norm_delta = None
        self.norm_delta = x1[-1] - x1[-2]
Exemplo n.º 29
0
 def save_config(self, *args):
     config_file = args[0] if len(args) > 0 else self.config_file
     with open(config_file, "w") as f:
         yaml.dump(self.content, f, default_flow_style=False)
Exemplo n.º 30
0
def _dump_string(yaml):
    return ryaml.dump(yaml, Dumper=ryaml.RoundTripDumper)
Exemplo n.º 31
0
    try:
        about = json.load(tf.extractfile("info/about.json"))
    except KeyError:
        about = {}
    index = json.load(tf.extractfile("info/index.json"))

    return {
        "metadata_version": METADATA_VERSION,
        "name": index["name"],
        "version": index["version"],
        "index": index,
        "about": about,
        "rendered_recipe": rendered_recipe,
        "raw_recipe": raw_recipe,
        "conda_build_config": conda_build_config,
        "files": file_listing,
    }


def harvest_from_filename(filename):
    with open(filename, "rb") as fo:
        return harvest(fo)


if __name__ == "__main__":
    o = harvest_from_filename(sys.argv[1])
    output = io.StringIO()
    ruamel_yaml.dump(o, output)
    print(output.getvalue())