コード例 #1
0
ファイル: display.py プロジェクト: openSUSE/repose
 def list_products_yaml(self, hostname, system):
     from ruamel.yaml import YAML
     yml = YAML(typ='safe', pure=False)
     yml.default_flow_style = False
     yml.explicit_end = True
     yml.explicit_start = True
     yml.indent(mapping=4, sequence=4, offset=2)
     data = system.to_refhost_dict()
     data["name"] = str(hostname)
     yml.dump(data, self.output)
コード例 #2
0
def dumpyaml(
    yamlp: YAML, data: Any, stream: Any = None, **kw: Any
) -> Optional[str]:
    """Dump YAML to string."""
    inefficient = False
    if stream is None:
        inefficient = True
        stream = StringIO()
    # overriding here to get dumping to
    # not sort keys.
    yamlp = YAML()
    yamlp.indent(mapping=4, sequence=6, offset=3)
    # yamlp.compact(seq_seq=False, seq_map=False)
    yamlp.dump(data, stream, **kw)
    if inefficient:
        return cast(str, stream.getvalue())
    return None
コード例 #3
0
class Factory:
    """Helper class to load HermesPy simulation scenarios from YAML configuration files."""

    extensions: Set[str] = ['.yml', '.yaml', '.cfg']
    """List of recognized filename extensions for serialization files."""

    __yaml: YAML
    __clean: bool
    __purge_regex_alpha: Pattern
    __purge_regex_beta: Pattern
    __db_regex: Pattern
    __restore_regex_alpha: Pattern
    __registered_classes: Set[Type[Serializable]]
    __registered_tags: Set[str]

    def __init__(self) -> None:

        # YAML dumper configuration
        self.__yaml = YAML(typ='safe', pure=True)
        self.__yaml.default_flow_style = False
        self.__yaml.compact(seq_seq=False, seq_map=False)
        self.__yaml.encoding = None
        self.__yaml.indent(mapping=4, sequence=4, offset=2)
        self.__clean = True
        self.__registered_classes = set()
        self.__registered_tags = set()

        # Browse the current environment for packages within the 'hermespy' namespace
        for finder, name, ispkg in iter_modules(hermes.__path__, "hermespy."):

            module = import_module(name)

            for _, serializable_class in getmembers(module):

                if not isclass(serializable_class) or not issubclass(
                        serializable_class, Serializable):
                    continue

                self.__registered_classes.add(serializable_class)
                self.__yaml.register_class(serializable_class)

                if serializable_class.yaml_tag is not None:

                    self.__registered_tags.add(serializable_class.yaml_tag)

                    if issubclass(serializable_class, SerializableArray):

                        array_constructor = partial(Factory.__construct_matrix,
                                                    serializable_class)
                        self.__yaml.constructor.add_multi_constructor(
                            serializable_class.yaml_tag, array_constructor)

        # Add constructors for untagged classes
        self.__yaml.constructor.add_constructor('tag:yaml.org,2002:map',
                                                self.__construct_map)
        # self.__yaml.constructor.add_constructor('tag:yaml.org,2002:seq', self.__construct_sequence)

        # Construct regular expressions for purging
        self.__purge_regex_alpha = compile(r': !<.*')
        self.__purge_regex_beta = compile(r"- !<([^']+)>")
        self.__restore_regex_alpha = compile(r"([ ]*)([a-zA-Z]+):\n$")
        self.__restore_regex_beta = compile(r"([ ]*)- ([^\s]+)([^']*)\n$")
        self.__range_regex = compile(
            r'([0-9.e-]*)[ ]*,[ ]*([0-9.e-]*)[ ]*,[ ]*\.\.\.[ ]*,[ ]*([0-9.e-]*)'
        )
        self.__db_regex = compile(r"\[([ 0-9.,-]*)\][ ]*dB")

    @property
    def clean(self) -> bool:
        """Access clean flag.

        Returns:
            bool: Clean flag.
        """

        return self.__clean

    @clean.setter
    def clean(self, flag: bool) -> None:
        """Modify clean flag.

        Args:
            flag (bool): New clean flag.
        """

        self.__clean = flag

    @property
    def registered_classes(self) -> Set[Type[Serializable]]:
        """Classes registered for serialization within the factory."""

        return self.__registered_classes.copy()

    @property
    def registered_tags(self) -> Set[str]:
        """Read registered YAML tags.

        Returns:
            Set[str]: Set of registered YAML tags.
        """

        return self.__registered_tags

    def load(self, path: str) -> List[Serializable]:
        """Load a serialized executable configuration from a filesystem location.

        Args:
            path (str): Path to a file or a folder featuring serialization files.

        Returns:
            executables (List[Serializable]):
                Serializable HermesPy objects.

        Raises:
            RuntimeError: If `path` does not contain an executable object.
            RuntimeError: If `path` contains more than one executable object.
        """

        # Recover serialized objects
        hermes_objects: List[Any] = self.from_path(path)

        executables: List[Serializable] = []

        for hermes_object in hermes_objects:

            if isinstance(hermes_object, Serializable):
                executables.append(hermes_object)

        # Return fully configured executable
        return executables

    @staticmethod
    def __construct_matrix(cls: Any, constructor: SafeConstructor, tag_suffix: str, node: Any)\
            -> Tuple[Any, Tuple[int, ...]]:
        """Construct a matrix node from YAML.

        Args:

            cls (Any):
                The type of class to be constructed. This argument will be managed by ruamel.
                The class `cls` must define a `from_yaml` routine.

            constructor (SafeConstructor):
                A handle to the constructor extracting the YAML information.

            tag_suffix (str):
                Tag suffix in the YAML config describing the channel position within the matrix.

            node (Node):
                YAML node representing the `cls` serialization.

        Returns:
            cls:
                Newly created `cls` instance.

            int:
                First dimension position within the matrix.

            int:
                Second dimension within the matrix.
            """

        indices: List[str] = re.split(' |_', tag_suffix)
        if indices[0] == '':
            indices.pop(0)

        indices: Tuple[int] = tuple([int(idx) for idx in indices])

        return cls.from_yaml(constructor, node), indices

    @staticmethod
    def __construct_map(constructor: SafeConstructor,
                        node: MappingNode) -> Mapping[MappingNode, Any]:
        """A custom map generator.

        Hacks ruamel to accept node names as tags.

        Args:
            constructor (SafeConstructor): Handle to the constructor.
            node (MappingNode): A YAML map node.

        Returns:
            Mapping[MappingNode, Any]: A sequence of objects created from `node`.
        """

        tag = node.value[0][0].value

        if tag in constructor.yaml_constructors:
            return constructor.yaml_constructors[tag](constructor,
                                                      node.value[0][1])

        else:
            return constructor.construct_mapping(node, deep=True)

    @staticmethod
    def __construct_sequence(constructor: SafeConstructor,
                             node: SequenceNode) -> Sequence[Any]:
        """A custom sequence generator.

        Hacks ruamel to accept node names as tags.

        Args:
            constructor (SafeConstructor): Handle to the constructor.
            node (SequenceNode): A YAML sequence node.

        Returns:
            Sequence[Any]: A sequence of objects created from `node`.
        """

        sequence = []
        for node in node.value:

            if node.tag in constructor.yaml_constructors:
                sequence.append(constructor.yaml_constructors[node.tag](
                    constructor, node))

            else:
                sequence.append(
                    constructor.construct_non_recursive_object(node))

        return sequence

    def __purge_tags(self, serialization: str) -> str:
        """Callback to remove explicit YAML tags from serialization stream.

        Args:
            serialization (str): The serialization sequence to be purged.

        Returns:
            str: The purged sequence.
        """

        cleaned_sequence = ''
        for line in serialization.splitlines(True):

            cleaned_line = self.__purge_regex_alpha.sub(r':', line)
            cleaned_line = self.__purge_regex_beta.sub(r'- \1', cleaned_line)
            cleaned_line = cleaned_line.replace('%20', " ")

            cleaned_sequence += cleaned_line

        return cleaned_sequence

    def refurbish_tags(self, serialization: str) -> str:
        """Callback to restore explicit YAML tags to serialization streams."""
        pass

    @staticmethod
    def __decibel_conversion(match: re.Match) -> str:
        """Convert linear series to decibel series.

        Args:
            match (re.Match): The serialization sequence to be converted.

        Returns:
            str: The purged sequence.
        """

        linear_values = [
            db2lin(float(str_rep))
            for str_rep in match[1].replace(' ', '').split(',')
        ]

        string_replacement = "["
        for linear_value in linear_values:
            string_replacement += str(linear_value) + ', '

        string_replacement += "]"
        return string_replacement

    def from_path(self, paths: Union[str, Set[str]]) -> List[Any]:
        """Load a configuration from an arbitrary file system path.

        Args:
            paths (Union[str, Set[str]]): Paths to a file or a folder featuring .yml config files.

        Returns:
            List[Any]: List of serializable objects recalled from `paths`.

        Raises:
            ValueError: If the provided `path` does not exist on the filesystem.
        """

        # Convert single path to a set if required
        if isinstance(paths, str):
            paths = [paths]

        hermes_objects = []
        for path in paths:

            if not os.path.exists(path):
                raise ValueError(f"Lookup path '{path}' not found")

            if os.path.isdir(path):
                hermes_objects += self.from_folder(path)

            elif os.path.isfile(path):
                hermes_objects += self.from_file(path)

            else:
                raise ValueError(
                    "Lookup location '{}' not recognized".format(path))

        return hermes_objects

    def from_folder(self,
                    path: str,
                    recurse: bool = True,
                    follow_links: bool = False) -> List[Any]:
        """Load a configuration from a folder.

        Args:
            path (str): Path to the folder configuration.
            recurse (bool, optional): Recurse into sub-folders within `path`.
            follow_links (bool, optional): Follow links within `path`.

        Returns:
            List[Any]: List of serializable objects recalled from `path`.

        Raises:
            ValueError: If `path` is not a directory.
        """

        if not os.path.exists(path):
            raise ValueError("Lookup path '{}' not found".format(path))

        if not os.path.isdir(path):
            raise ValueError(
                "Lookup path '{}' is not a directory".format(path))

        hermes_objects: List[Any] = []

        for directory, _, files in os.walk(path, followlinks=follow_links):
            for file in files:

                _, extension = os.path.splitext(file)
                if extension in self.extensions:
                    hermes_objects += self.from_file(
                        os.path.join(directory, file))

            if not recurse:
                break

        return hermes_objects

    def to_folder(self, path: str, *args: Any) -> None:
        """Dump a configuration to a folder.

        Args:
            path (str): Path to the folder configuration.
            *args (Any):
                Configuration objects to be dumped.
        """
        pass

    def from_str(self, config: str) -> List[Any]:
        """Load a configuration from a string object.

        Args:
            config (str): The configuration to be loaded.

        Returns:
            List[Any]: List of serialized objects within `path`.
        """

        stream = StringIO(config)
        return self.from_stream(stream)

    def to_str(self, *args: Any) -> str:
        """Dump a configuration to a folder.

        Args:
            *args (Any): Configuration objects to be dumped.

        Returns:
            str: String containing full YAML configuration.

        Raises:
            RepresenterError: If objects in ``*args`` are unregistered classes.
        """

        stream = StringIO()
        self.to_stream(stream, args)
        return stream.getvalue()

    def from_file(self, file: str) -> List[Any]:
        """Load a configuration from a single YAML file.

        Args:
            file (str): Path to the folder configuration.

        Returns:
            List[Any]: List of serialized objects within `path`.
        """

        with open(file, mode='r') as file_stream:

            try:
                return self.from_stream(file_stream)

            # Re-raise constructor errors with the correct file name
            except ConstructorError as constructor_error:

                constructor_error.problem_mark.name = file
                raise constructor_error

    def to_file(self, path: str, *args: Any) -> None:
        """Dump a configuration to a single YML file.

        Args:
            path (str): Path to the configuration file.
            *args (Any): Configuration objects to be dumped.

        Raises:
            RepresenterError: If objects in ``*args`` are unregistered classes.
        """
        pass

    def __restore_callback_alpha(self, m: Match) -> str:
        """Internal regular expression callback.

        Args:
            m (Match): Regular expression match.

        Returns:
            str: The processed match line.
        """

        if m.group(2) in self.registered_tags:
            return m.group(1) + m.group(2) + ": !<" + m.group(2) + ">\n"

        else:
            return m.string

    def __restore_callback_beta(self, m: Match) -> str:
        """Internal regular expression callback.

        Args:
            m (Match): Regular expression match.

        Returns:
            str: The processed match line.
        """

        if m.group(2) in self.registered_tags:

            indices = m.group(3).replace(" ", "%20")
            return m.group(1) + "- !<" + m.group(2) + indices + ">\n"

        else:
            return m.string

    @staticmethod
    def __range_restore_callback(m: Match) -> str:
        """Internal regular expression callback.

        Args:
            m (Match): Regular expression match.

        Returns:
            str: The processed match line.
        """

        # Extract range parameters
        start = float(m.group(1))
        step = float(m.group(2)) - start
        stop = float(m.group(3)) + step

        range = np.arange(start=start, stop=stop, step=step)

        replacement = ''
        for step in range[:-1]:
            replacement += str(step) + ', '

        replacement += str(range[-1])
        return replacement

    def from_stream(self, stream: TextIOBase) -> List[Any]:
        """Load a configuration from an arbitrary text stream.

        Args:
            stream (TextIOBase): Text stream containing the configuration.

        Returns:
            List[Any]: List of serialized objects within `stream`.

        Raises:
            ConstructorError: If YAML parsing fails.
        """

        if not self.__clean:
            return self.__yaml.load(stream)

        clean_stream = ''
        for line in stream.readlines():

            clean_line = self.__range_regex.sub(self.__range_restore_callback,
                                                line)
            clean_line = self.__db_regex.sub(self.__decibel_conversion,
                                             clean_line)
            clean_stream += clean_line

        hermes_objects = self.__yaml.load(StringIO(clean_stream))

        if hermes_objects is None:
            return []

        if isinstance(hermes_objects, Iterable):
            return hermes_objects

        else:
            return [hermes_objects]

    def to_stream(self, stream: TextIOBase, *args: Any) -> None:
        """Dump a configuration to an arbitrary text stream.

        Args:
            stream (TextIOBase): Text stream to the configuration.
            *args (Any): Configuration objects to be dumped.

        Raises:
            RepresenterError: If objects in ``*args`` are unregistered classes.
        """

        for serializable_object in args:

            if self.__clean:
                self.__yaml.dump(*serializable_object,
                                 stream,
                                 transform=self.__purge_tags)

            else:
                self.__yaml.dump(*serializable_object, stream)
コード例 #4
0
 def __init__(self, conf_file):
     yaml = YAML(typ='safe')
     yaml.indent(mapping=2, sequence=4, offset=2)
     with open(conf_file, 'r') as f:
         self._info = yaml.load(f)
     self.name = __package__
コード例 #5
0
ファイル: parser.py プロジェクト: VeritasOS/versioner
 def write_content(self, content, fl):
     yaml = YAML()
     yaml.indent(mapping=2, sequence=4, offset=2)
     yaml.default_flow_style = False
     return yaml.dump(content, fl)
コード例 #6
0
ファイル: views.py プロジェクト: akuma5157/spartanx-django
import core.schemas as schemas
import core.utils as utils

# Set up logging
logging.basicConfig(level=logging.DEBUG)
LOGGER = logging.getLogger(__name__)

logging.getLogger('keyring').setLevel(logging.CRITICAL)
logging.getLogger('requests_oauthlib').setLevel(logging.CRITICAL)
logging.getLogger('urllib3').setLevel(logging.CRITICAL)

# setting up yaml syntax
yaml = YAML()
yaml.explicit_start = True
yaml.indent(sequence=4, offset=2)



try:
    VALIDATE_RESPONSES = settings.SWAGGER_API_VALIDATE_RESPONSES
except AttributeError:
    VALIDATE_RESPONSES = False
LOGGER.info("Swagger API response validation is {}".format(
    "on" if VALIDATE_RESPONSES else "off"
))

# Set up the stub class. If it is not explicitly configured in the settings.py
# file of the project, we default to a mocked class.
try:
    stub_class_path = settings.STUBS_CLASS
コード例 #7
0
#!/usr/bin/env python3
import os
import subprocess
import argparse
from ruamel.yaml import YAML

yaml = YAML()
yaml.indent(offset=2)

BASEPATH = os.path.abspath(os.path.dirname(__file__))
CHARTPATH = os.path.join(BASEPATH, 'binderhub')
ROOTPATH = os.path.dirname(BASEPATH)
NAME = 'binderhub'
PYPKGPATH = os.path.join(ROOTPATH, NAME)
SETUP_PY = os.path.join(ROOTPATH, 'setup.py')

IMAGE_PATH = os.path.join(BASEPATH, 'images', NAME)
# IMAGE_FILES should be all paths that contribute to the binderhub image
# namely, the Python package itself and the image directory
IMAGE_FILES = [SETUP_PY, PYPKGPATH, IMAGE_PATH]

# CHART_FILES should be all files that contribute to the chart
# namely, all image files plus the chart itself
CHART_FILES = IMAGE_FILES + [CHARTPATH]

HELM_CHART_DEPLOY_KEY_NAME = 'travis'


def last_git_modified(paths):
    """Return the short hash of the last commit on one or more paths"""
    if isinstance(paths, str):
コード例 #8
0
def make_definition(pp_group, standard):

    from ppodd.pod.base import pp_register
    from vocal import schema_types
    from ruamel.yaml import YAML

    def make_variable(var, flag=False):
        _meta = {
            'name': var.name if not flag else f'{var.name}_FLAG',
            'datatype':
            schema_types.Float32 if not flag else schema_types.Integer8,
            'required': False
        }

        if not flag:

            _attributes = dict(var.attrs())
            _attributes['coordinates'] = schema_types.DerivedString
        else:
            _attributes = dict(var.flag.cfattrs)
            for item, value in _attributes.items():
                if isinstance(value, Sequence):
                    if isinstance(value, str):
                        continue
                    value = list(value)
                    try:
                        value = [i.item() for i in value]
                    except Exception:
                        pass
                try:
                    value = value.item()
                except Exception:
                    pass
                _attributes[item] = value

            if 'flag_masks' in _attributes or 'flag_values' in _attributes:
                _attributes['flag_masks'] = '<Array[int8]: derived_from_file>'

            if 'valid_range' in _attributes:
                _attributes['valid_range'] = [
                    schema_types.DerivedByte, schema_types.DerivedByte
                ]

        if 'actual_range' in _attributes:
            _attributes['actual_range'] = [
                schema_types.DerivedFloat32, schema_types.DerivedFloat32
            ]

        _attributes['comment'] = '<str: derived_from_file optional>'

        var_vars = ('sensor_serial_number', 'instrument_serial_number',
                    'flag_meanings', 'sensor_type', 'sensor_manufacturer',
                    'sensor_model', 'calibration_date',
                    'calibration_information', 'calibration_url')
        for _var in var_vars:
            if _var in _attributes:
                _attributes[_var] = schema_types.DerivedString

        _dimensions = ['Time']
        if var.frequency > 1:
            _dimensions.append(f'sps{var.frequency:02d}')

        return {
            'meta': _meta,
            'attributes': _attributes,
            'dimensions': _dimensions
        }

    standard = importlib.import_module(standard)

    dimensions_to_add = {}

    _dataset = {
        'meta': {
            'file_pattern': 'core'
        },
        'attributes': {},
        'variables': [{
            'meta': {
                'name': 'Time',
                'datatype': '<int32>'
            },
            'dimensions': ['Time'],
            'attributes': {
                'long_name': 'Time of measurement',
                'standard_name': 'time',
                'calendar': 'gregorian',
                'coverage_content_type': 'coordinate',
                'frequency': 1,
                'units': schema_types.DerivedString
            }
        }],
        'dimensions': [{
            'name': 'Time',
            'size': None
        }]
    }

    for module in pp_register.modules(pp_group, date=datetime.date.today()):
        instance = module.test_instance()
        instance.process()
        instance.finalize()
        for var in instance.dataset.outputs:
            dim_name = f'sps{var.frequency:02d}'
            if dim_name not in dimensions_to_add:
                dimensions_to_add[dim_name] = {
                    'name': dim_name,
                    'size': var.frequency
                }
            if var.write:
                exists = len([
                    i for i in _dataset['variables']
                    if i['meta']['name'] == var.name
                ])
                if exists:
                    continue
                _dataset['variables'].append(make_variable(var))

                if var.flag is not None:
                    pass
                    _dataset['variables'].append(make_variable(var, flag=True))

    for dim_to_add in dimensions_to_add.values():
        _dataset['dimensions'].append(dim_to_add)

    _dataset['dimensions'].sort(
        key=lambda x: x['size'] if x['size'] is not None else -9e99)

    import pprint
    pprint.pprint(_dataset)
    yaml = YAML()
    yaml.indent(mapping=2, sequence=4, offset=2)
    with open('decades_definition.yaml', 'w') as f:
        yaml.dump(_dataset, f)
コード例 #9
0
import os
import re
import glob
import numbers
import argparse

from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import DoubleQuotedScalarString as DQ
from ntc_templates.parse import parse_output

FILE_PATH = os.path.abspath(__file__)
FILE_DIR = os.path.dirname(FILE_PATH)
TEST_DIR = "{0}/tests".format(FILE_DIR)
YAML_OBJECT = YAML()
YAML_OBJECT.explicit_start = True
YAML_OBJECT.indent(sequence=4, offset=2)
YAML_OBJECT.block_style = True
RE_MULTILINE_REMARK = re.compile(r"(.*\n\s*#)(.*)")


def ensure_spacing_for_multiline_comment(remark):
    """
    Finds all comments and ensures a single space after "#" symbol.

    Args:
        remark (str): The remark of a comment from a ``ruamel.yaml.token.CommentToken``.

    Returns:
        str: The ``remark`` formatted with a single space after comment start, "#"

    Example:
コード例 #10
0
def build_terms(terms_in_file, in_dir, out_dir, extension):
    """
    Constructs _terms yaml file
    """

    if extension == 'xlsx':
        xlsx_file  = pd.ExcelFile(terms_in_file)
        term_sheet = None

        for i in xlsx_file.sheet_names:
            if 'terms_' in i:
                term_sheet = i

        if not(term_sheet):
            sys.exit('ERROR: Terms sheet not found, exiting the program')

        terms_df   = xlsx_file.parse(sheet_name = term_sheet, index_col=None, header=0, keep_default_na=False, na_values=[''])

    else:
        terms_df   = pd.read_csv(terms_in_file, index_col=None, header=0, sep = '\t', keep_default_na=False, na_values=[''])


    terms_df       = terms_df.where(terms_df.notnull(), None)

    term_dicts     = terms_df.to_dict('records')

    dict_of_terms  = {'id' : '_terms'}

    for term in term_dicts:
        out_dict     = {}
        property_nm  = ''
        termdef      = {}

        for key, val in term.items():
            key = key[1:-1]

            if key == 'property_or_enum':
                if val == 'id':
                    property_nm = '_id'

                else:
                    val_ = re.sub('[\W]+', '', val.lower().strip().replace(' ', '_'))
                    property_nm = validate_enum(val_) # val

            elif key == 'node':
                node = val

            elif key == 'enum_property':
                enum = val

            elif key == 'description':
                if val:
                    val = fss(validate_desc(val))

                out_dict[key] = val

            elif 'termDef:' in key:
                key_ = key.replace('termDef:','')

                if key_ == 'term':
                    if val:
                        val = fss(validate_desc(val))

                    termdef[key_] = val

                elif key_ == 'term_url':
                    if val:
                        val = dbl_quote(val)

                    termdef[key_] = val

                elif key_ == 'cde_id':
                    try:
                        termdef[key_] = int(val)

                    except:
                        termdef[key_] = val

                elif key_ in ['term_id' , 'term_version']:
                    if val:
                        termdef[key_] = val

                else:
                    termdef[key_] = val

        out_dict['termDef'] = termdef

        if property_nm not in dict_of_terms:
            dict_of_terms[property_nm] = {}

        if node == 'common':
            dict_of_terms[property_nm][node] = out_dict

        else:
            if node in dict_of_terms[property_nm]:
                dict_of_terms[property_nm][node][enum] = out_dict

            else:
                dict_of_terms[property_nm][node]       = {}
                dict_of_terms[property_nm][node][enum] = out_dict

    yaml = YAML()
    yaml.default_flow_style = False
    yaml.indent(offset = 2, sequence = 4, mapping = 2)
    yaml.representer.add_representer(type(None), my_represent_none_blank)

    num_terms  = len(dict_of_terms.keys())
    term_props = cmap(dict_of_terms)

    # insert blank lines in properties
    for k in dict_of_terms.keys():
        term_props.yaml_set_comment_before_after_key(k, before='\n')

    with open('{0}{1}.yaml'.format(out_dir, '_terms'), 'w') as file:
        yaml.dump(term_props, file)

    print('*'*100, '\n')
    print(' '*42, 'TSV  ---->  YAML', ' '*42, '\n')
    print('*'*100, '\n')
    print('Source Directory      : {0}'.format(in_dir), '\n')
    print('Number of Terms       : {0}'.format(num_terms), '\n')
    print('Destination Directory : {0}'.format(out_dir))
    print('*'*100, '\n')
コード例 #11
0
def build_yamls(nodes_in_file, var_in_file, enum_in_file, in_dir, out_dir, extension): #terms_flag,
    """
    Constructs node yaml file
    """

    if extension == 'xlsx':
        xlsx_file  = pd.ExcelFile(nodes_in_file)
        node_sheet = None
        var_sheet  = None
        enum_sheet = None

        for i in xlsx_file.sheet_names:
            if 'nodes_' in i:
                node_sheet = i
            if 'variables_' in i:
                var_sheet  = i
            if 'enums_' in i:
                enum_sheet = i

        if not(node_sheet) or not(var_sheet) or not(enum_sheet):
            sys.exit('ERROR: one or more than one of the sheets (Nodes, Variable & Enum) not found, exiting the program')

        nodes_df     = xlsx_file.parse(sheet_name = node_sheet, index_col=None, header=0, keep_default_na=False, na_values=[''])
        variables_df = xlsx_file.parse(sheet_name = var_sheet, index_col=None, header=0, keep_default_na=False, na_values=[''])
        enum_df      = xlsx_file.parse(sheet_name = enum_sheet, index_col=None, header=0, keep_default_na=False, na_values=[''])

        # nodes_df     = nodes_df.where(nodes_df.notnull(), None)
        # variables_df = variables_df.where(variables_df.notnull(), None)
        # enum_df      = enum_df.where(enum_df.notnull(), None)

    else:
        nodes_df     = pd.read_csv(nodes_in_file, index_col=None, header=0, sep = '\t', keep_default_na=False, na_values=[''])
        variables_df = pd.read_csv(var_in_file, index_col=None, header=0, sep = '\t', keep_default_na=False, na_values=[''])

        # nodes_df     = nodes_df.where(nodes_df.notnull(), None)
        # variables_df = variables_df.where(variables_df.notnull(), None)

        try:
            enum_df  = pd.read_csv(enum_in_file, index_col=None, header=0, sep = '\t', keep_default_na=False, na_values=[''])
            # enum_df  = enum_df.where(enum_df.notnull(), None)
        except pd.io.common.EmptyDataError:
            enum_df  = None

    nodes_df     = nodes_df.where(nodes_df.notnull(), None)
    variables_df = variables_df.where(variables_df.notnull(), None)

    if enum_df is not None:
        enum_df  = enum_df.where(enum_df.notnull(), None)

    var_dict     = build_properties(variables_df, enum_df)
    node_dict    = build_nodes(nodes_df, var_dict) #, terms_flag)

    num_nodes    = len(node_dict.keys())
    num_props    = 0

    yaml = YAML()
    yaml.default_flow_style = False
    yaml.indent(offset = 2, sequence = 4, mapping = 2)
    yaml.representer.add_representer(type(None), my_represent_none)

    for key, val in node_dict.items():
        with open('{0}{1}.yaml'.format(out_dir, key), 'w') as file:
            for block in val:
                if 'properties' in block:
                    num_props += len(block['properties'].keys())
                    dataprop   = cmap(block['properties'])

                    # insert blank lines in properties
                    for k in block['properties'].keys():
                        dataprop.yaml_set_comment_before_after_key(k, before='\n')

                    yaml.dump({'properties': dataprop}, file)

                elif 'uniqueKeys' in block:
                    block = cmap(block)

                    yaml1 = YAML()
                    yaml1.default_flow_style = None
                    yaml1.indent(offset = 2, sequence = 4, mapping = 2)
                    yaml1.representer.add_representer(type(None), my_represent_none)

                    yaml1.dump(block, file)
                    file.write('\n')

                else:
                    yaml.dump(block, file)
                    file.write('\n')

    print('*'*100, '\n')
    print(' '*42, 'TSV  ---->  YAML', ' '*42, '\n')
    print('*'*100, '\n')
    print('Source Directory      : {0}'.format(in_dir), '\n')
    print('Number of Nodes       : {0}'.format(num_nodes))
    print('Number of Properties  : {0}'.format(num_props), '\n')
    print('Destination Directory : {0}'.format(out_dir))
    print('*'*100, '\n')
コード例 #12
0
import os
import sys
from FUTpuppeteer.misc import multi_log, Global
from FUTpuppeteer.core import Session
from FUTpuppeteer import info
from datetime import datetime, timedelta
from time import sleep
from selenium.common.exceptions import TimeoutException, NoSuchElementException, ElementNotVisibleException
import signal
from ruamel.yaml import YAML

yaml = YAML()
yaml.explicit_start = True
yaml.indent(mapping=4)
yaml.preserve_quotes = True
if len(sys.argv) != 3:
    print(
        'Missing arguments in coin_finish_transfer.py. Have {}, need 2: gain_coins, lose_coins'
    ).format(len(sys.argv))
    sys.exit(1)
gain_coins = sys.argv[1]
lose_coins = sys.argv[2]

directory = os.path.dirname(os.path.abspath(__file__))
gain_config_file = '\\'.join(
    directory.split('\\')[:-2]) + '\config\\bot{}.yml'.format(gain_coins)
with open(gain_config_file) as config:
    gain_config = yaml.load(config)
settings = gain_config['coin_transfer']
remaining = []
sorted_players = result = sorted(settings['players'],
コード例 #13
0
ファイル: convert.py プロジェクト: ocefpaf/boa
def main(docname):

    with open(docname, "r") as fi:
        lines = fi.readlines()
    context = {}
    rest_lines = []
    for line in lines:
        # print(line)
        if "{%" in line:
            set_expr = re.search("{%(.*)%}", line)
            set_expr = set_expr.group(1)
            set_expr = set_expr.replace("set", "", 1).strip()
            exec(set_expr, globals(), context)
        else:
            rest_lines.append(line)

    yaml = YAML(typ="rt")
    yaml.preserve_quotes = True
    yaml.default_flow_style = False
    yaml.indent(sequence=4, offset=2)
    yaml.width = 1000
    yaml.Representer = MyRepresenter
    yaml.Loader = ruamel.yaml.RoundTripLoader

    result_yaml = CommentedMap()
    result_yaml["context"] = context

    def has_selector(s):
        return s.strip().endswith("]")

    quoted_lines = []
    for line in rest_lines:
        if has_selector(line):
            selector_start = line.rfind("[")
            selector_end = line.rfind("]")
            selector_content = line[selector_start + 1 : selector_end]

            if line.strip().startswith("-"):
                line = (
                    line[: line.find("-") + 1]
                    + f" sel({selector_content}): "
                    + line[
                        line.find("-") + 1 : min(line.rfind("#"), line.rfind("["))
                    ].strip()
                    + "\n"
                )
        quoted_lines.append(line)
    rest_lines = quoted_lines

    def check_if_quoted(s):
        s = s.strip()
        return s.startswith('"') or s.startswith("'")

    quoted_lines = []
    for line in rest_lines:
        if "{{" in line:
            # make sure that jinja stuff is quoted
            if line.find(":") != -1:
                idx = line.find(":")
            elif line.strip().startswith("-"):
                idx = line.find("-")
            rest = line[idx + 1 :]

            if not check_if_quoted(rest):
                if "'" in rest:
                    rest = rest.replace("'", '"')

                line = line[: idx + 1] + f" '{rest.strip()}'\n"
        quoted_lines.append(line)
    rest_lines = quoted_lines

    skips, wo_skip_lines = [], []
    for line in rest_lines:
        if line.strip().startswith("skip"):
            parts = line.split(":")
            rhs = parts[1].strip()
            if rhs.startswith("true"):
                selector_start = line.rfind("[")
                selector_end = line.rfind("]")
                selector_content = line[selector_start + 1 : selector_end]
                skips.append(selector_content)
            else:
                print("ATTENTION skip: false not handled!")
        else:
            wo_skip_lines.append(line)

    rest_lines = wo_skip_lines
    result_yaml.update(
        ruamel.yaml.load("".join(rest_lines), ruamel.yaml.RoundTripLoader)
    )

    if len(skips) != 0:
        result_yaml["build"]["skip"] = skips

    if result_yaml.get("outputs"):
        for o in result_yaml["outputs"]:
            name = o["name"]
            package = {"name": name}
            del o["name"]
            if o.get("version"):
                package["version"] = o["version"]
                del o["version"]

            build = {}
            if o.get("script"):
                build["script"] = o["script"]
                del o["script"]

            o["package"] = package
            o["build"] = build

        for d in result_yaml["outputs"]:
            print(order_output_dict(d))
        result_yaml["outputs"] = [order_output_dict(d) for d in result_yaml["outputs"]]

    from io import StringIO

    output = StringIO()
    yaml.dump(result_yaml, output)

    # Hacky way to insert an empty line after the context-key-object
    context_output = StringIO()
    yaml.dump(context, context_output)
    context_output = context_output.getvalue()
    context_output_len = len(context_output.split("\n"))

    final_result = output.getvalue()
    final_result_lines = final_result.split("\n")
    final_result_lines.insert(context_output_len, "")

    print("\n".join(final_result_lines))
コード例 #14
0
def store_iam_resources_in_git(
    iam_resources,
    account_id,
    git_url=config.get("cache_iam_resources_for_account.store_in_git.repo"),
    git_message="[Automated] Update IAM Cache",
):
    """
    Experimental function to force-push discovered IAM resources into a Git repository's master branch.
    Use at your own risk.
    """
    accounts_d = async_to_sync(get_account_id_to_name_mapping)()
    tempdir = tempfile.mkdtemp()
    try:
        repo = clone_repo(git_url, tempdir)
        repo.config_writer().set_value("user", "name", "ConsoleMe").release()
        email = config.get("cache_iam_resources_for_account.store_in_git.email")
        if email:
            repo.config_writer().set_value("user", "email", email).release()

        expected_entries = {
            "UserDetailList": {
                "category": "iam_users",
                "resource_name_key": "UserName",
            },
            "GroupDetailList": {
                "category": "iam_groups",
                "resource_name_key": "GroupName",
            },
            "RoleDetailList": {
                "category": "iam_roles",
                "resource_name_key": "RoleName",
            },
            "Policies": {"category": "iam_policies", "resource_name_key": "PolicyName"},
        }

        for key, settings in expected_entries.items():
            category = settings["category"]
            for resource in iam_resources[key]:
                if key == "RoleDetailList":
                    resource.pop("RoleLastUsed", None)
                resource_name = resource[settings["resource_name_key"]]
                yaml = YAML()
                yaml.preserve_quotes = True  # type: ignore
                yaml.indent(mapping=2, sequence=4, offset=2)

                account_name = accounts_d.get(account_id, account_id)
                if not account_name:
                    account_name = "unknown"
                path_in_repo = os.path.join(
                    repo.working_dir, f"{account_name}/{category}/{resource_name}.yaml"
                )
                os.makedirs(Path(path_in_repo).parent.absolute(), exist_ok=True)

                should_write = True
                to_write = sort_dict(resource)
                if os.path.exists(path_in_repo):
                    with open(path_in_repo, "r") as f:
                        # Unfortunately at the time of writing, ruamel.yaml loads this into ordered dictionaries.
                        # We want this to be the same type as `to_write`, so we use the builtin yaml library to load it
                        existing = builtin_yaml.safe_load(f)
                    if not DeepDiff(to_write, existing, ignore_order=True):
                        should_write = False
                if should_write:
                    with open(path_in_repo, "w") as f:
                        yaml.dump(to_write, f)
        repo.git.add("*")
        if repo.index.diff("HEAD"):
            repo.index.commit(git_message)
            origin = repo.remote("origin")
            origin.pull()
            origin.push("master", force=True)
    except Exception:  # noqa
        sentry_sdk.capture_exception()
    shutil.rmtree(tempdir)
コード例 #15
0
ファイル: UpdateYaml.py プロジェクト: agrc/agrc.github.io
                front_matter = prune_keys(front_matter)

                tags = prune_tags(front_matter)
                if tags is not None:
                    front_matter['tags'] = tags

                categories = prune_categories(front_matter)
                if categories is not None:
                    front_matter['categories'] = categories

                stream = StringIO()
                yaml.dump((front_matter), stream)

                front_matter = stream.getvalue()

                content = pluck_content(original)

                updated.write('---\n')
                updated.write(front_matter)
                updated.write('---\n')

                updated.write(content)

            os.rename(file_path + '.bak', file_path)


if __name__ == '__main__':
    yaml.indent(mapping=2, sequence=4, offset=2)
    discover_files(sys.argv[1])
コード例 #16
0
 def __init__(self, meta_yaml):
     _yml = YAML(typ='jinja2')
     _yml.indent(mapping=2, sequence=4, offset=2)
     _yml.width = 160
     _yml.allow_duplicate_keys = True
     self.meta = _yml.load(meta_yaml)
コード例 #17
0
# SPDX-License-Identifier: BSD-3-Clause-Clear OR GPL-3.0-only
"""yet-another-batch-gui - Yet another batch GUI. Inspired by AnotherGUI, scripted in Python."""
__author__ = "flolilo"
__license__ = "See SPDX-License-Identifier"
__contact__ = "See github.com/flolilo/yet-another-batch-gui"
__version__ = "0.0.1"

# import os
# import sys
import subprocess
import shlex
from pathlib import Path
import PySimpleGUI as psg
from ruamel.yaml import YAML
yaml = YAML()
yaml.indent(offset=2, sequence=4)

# binary = input("Provide binary (including path): ")
all_presets = {
    'binaries':
    [str(Path("/bin/ffmpeg").resolve()),
     str(Path("/bin/ffprobe").resolve())],
    'presets': ["bla\ %2", 'asd', '##1'],
    'settings': {
        'threads': 4
    }
}

with Path('./yabg.yaml').open('r+', encoding='utf-8') as file:
    yaml.dump(all_presets, file)
# arguments = input("Provide arguments for that binary: ")
コード例 #18
0
ファイル: file.py プロジェクト: notice4thomas/mautrix-python
import logging

from yarl import URL
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap

from .base import BaseConfig
from .recursive_dict import RecursiveDict

try:
    import pkg_resources
except ImportError:
    pkg_resources = None

yaml = YAML()
yaml.indent(4)
yaml.width = 200

log: logging.Logger = logging.getLogger("mau.util.config")


class BaseFileConfig(BaseConfig, ABC):
    def __init__(self, path: str, base_path: str) -> None:
        super().__init__()
        self._data = CommentedMap()
        self.path: str = path
        self.base_path: str = base_path

    def load(self) -> None:
        with open(self.path, 'r') as stream:
            self._data = yaml.load(stream)
コード例 #19
0
def relist_individually(obj, at_market=False, duration='1 Hour'):
    obj.current_strategy = 'Relist Individually'
    settings = obj.strategy_settings['relist_individually']
    if settings['above_bin'] and settings['below_bin']:
        multi_log(obj, 'Cannot relist players. Settings file has both below_bin and above_bin set to True.', level='error')
        return 1
    if obj.location != 'transfer_list':
        obj.go_to('transfer_list')
    while True:
        expired = obj.__get_items__(p_element='../..', p_type='xpath', gp_element="//*[contains(text(), 'Unsold Items')]", gp_type='xpath', get_price=True)
        if len(expired) == 0:
            break
        for item in expired:
            if item['item_type'] == 'player':
                if at_market:
                    futbin_price = item['futbin_price']
                    tier = info.get_tier(futbin_price)
                    level = 'green'
                    if settings['below_bin']:
                        futbin_price = info.round_down(futbin_price * obj.bin_settings[tier]['sell_percent'], Global.rounding_tiers[tier])
                        level = 'warn'
                    elif settings['above_bin']:
                        new_price = (item['buy_now_price'] + futbin_price) / 2
                        tier = info.get_tier(new_price)
                        futbin_price = max(futbin_price, info.round_down(new_price, Global.rounding_tiers[tier]))
                        level = 'warn'
                    tier = info.get_tier(futbin_price)
                    start_price = futbin_price - obj.bin_settings[tier]['spread']
                    multi_log(obj=obj, message='Relisting {} for {}. Was previously {}'.format(item['item_name'], futbin_price, item['buy_now_price']), level=level)
                else:
                    futbin_price = item['buy_now_price']
                    start_price = item['start_price']
                try:
                    obj.relist_item(item, start_price, futbin_price, duration)
                except TimeoutException:
                    pass
            else:
                obj.relist_item(item, item['start_price'], item['buy_now_price'], duration)
            obj.keep_alive(Global.micro_min)
            break
    try:
        obj.rate_limit()
        obj.__click_xpath__("//*[contains(text(), 'Re-list All')]", timeout=Global.small_min * 2)
        obj.__click_xpath__("//*[contains(text(), 'Yes')]", timeout=Global.small_min * 2)
        multi_log(obj, 'Items re-listed')
        obj.go_to('transfers')
    except TimeoutException:
        pass
    if duration == '1 Hour' and obj.settings['night_mode']['need_relist']:
        from ruamel.yaml import YAML
        yaml = YAML()
        yaml.explicit_start = True
        yaml.indent(mapping=4)
        yaml.preserve_quotes = True

        need_relisting = False
        active_transfers = obj.__get_items__(p_element='../..', p_type='xpath', gp_element="//*[contains(text(), 'Active Transfers')]", gp_type='xpath',
                                             get_price=False)
        for active in active_transfers:
            if active['time_left'] > 3601:
                need_relisting = True
                break
        if not need_relisting:
            with open(obj.config_file) as config:
                new_config = yaml.load(config)
                new_config['settings']['night_mode']['need_relist'] = False
            with open(obj.config_file, 'w') as update:
                yaml.dump(new_config, update)
コード例 #20
0
def main(args: List[str]) -> None:
    logging.basicConfig(level=logging.INFO)
    logger = logging.getLogger(__name__)

    all_evidence = []

    log_records_all_files: Iterable[LogRecord] = \
        itertools.chain.from_iterable(parse_log_file(path) for path in args[1:])
    # noinspection PyTypeHints
    incoming_ips: DefaultDict[bytes, DefaultDict[InstanceUserAgent, TimeWindowAcc]] = \
        DefaultDict(lambda: DefaultDict(TimeWindowAcc))

    for log_record in log_records_all_files:
        if log_record.user_agent is None:
            continue
        instance_user_agent = classify_user_agent(log_record.user_agent)
        if instance_user_agent is None:
            continue
        incoming_ips[log_record.ip][instance_user_agent].add(
            log_record.timestamp)

    possible_instance_ips: Set[bytes] = set(incoming_ips.keys())
    possible_instance_hostnames: Set[str] = set()
    possible_instance_hostnames_and_ports: Set[Tuple[str, int]] = set()

    for ip in incoming_ips.keys():
        for instance_user_agent in incoming_ips[ip].keys():
            time_window = incoming_ips[ip][instance_user_agent]

            if instance_user_agent.url is not None:
                hostname_and_port = extract_hostname_and_port(
                    instance_user_agent.url)
                if hostname_and_port is not None:
                    hostname, port = hostname_and_port

                    possible_instance_hostnames.add(hostname)
                    possible_instance_hostnames_and_ports.add(
                        hostname_and_port)

                    all_evidence.append(
                        UserAgentEvidence(
                            ip=ip,
                            hostname=hostname,
                            domain=get_domain(hostname),
                            port=port,
                            instance_user_agent=instance_user_agent,
                            time_window=time_window,
                        ))

    for ip in possible_instance_ips:
        ip_str = fmt_ip(ip)
        try:
            time = datetime.now(timezone.utc)
            hostname, aliases, addresses = socket.gethostbyaddr(ip_str)
            aliases = [
                alias for alias in aliases
                if not alias.endswith('.in-addr.arpa')
                and not alias.endswith('.ip6.arpa')
            ]
            if addresses != [ip_str]:
                # TODO: when would this happen?
                logger.warning(
                    '%(ip_str)s resolved to multiple IPs: %(addresses)r', {
                        'ip_str': ip_str,
                        'addresses': addresses
                    })

            for alias in [hostname] + aliases:
                all_evidence.append(
                    ReverseDNSEvidence(
                        ip=ip,
                        hostname=alias,
                        domain=get_domain(alias),
                        time=time,
                    ))
        except OSError:
            logger.warning("Exception on reverse DNS lookup for %(ip_str)s!",
                           {'ip_str': ip_str},
                           exc_info=True)

    for hostname in possible_instance_hostnames:
        try:
            time = datetime.now(timezone.utc)
            # noinspection PyArgumentList
            for af, _, _, _, sockaddr in socket.getaddrinfo(
                    hostname,
                    None,
                    family=socket.AF_INET,
                    type=socket.SOCK_STREAM,
                    proto=socket.IPPROTO_IP):
                ip_str = sockaddr[0]
                ip = socket.inet_pton(af, ip_str)
                all_evidence.append(
                    ForwardDNSEvidence(
                        ip=ip,
                        hostname=hostname,
                        domain=get_domain(hostname),
                        time=time,
                    ))
        except OSError:
            logger.warning("Exception on forward DNS lookup for %(hostname)s!",
                           {'hostname': hostname},
                           exc_info=True)

    for hostname, port in possible_instance_hostnames_and_ports:
        logger.info("%s:%d", hostname, port)  # DEBUG
        time = datetime.now(timezone.utc)
        instance_user_agent = get_instance_info(hostname, port)

        if instance_user_agent is not None:
            all_evidence.append(
                TLSCertCheckEvidence(
                    hostname=hostname,
                    domain=get_domain(hostname),
                    port=port,
                    time=time,
                ))

            if instance_user_agent.server != UNKNOWN_SERVER_TYPE \
                    and instance_user_agent.url is not None:
                reported_hostname_and_port = extract_hostname_and_port(
                    instance_user_agent.url)
                if reported_hostname_and_port is not None:
                    reported_hostname, reported_port = reported_hostname_and_port
                    if hostname == reported_hostname and port == reported_port:
                        all_evidence.append(
                            InstanceAPIEvidence(
                                hostname=hostname,
                                domain=get_domain(hostname),
                                port=port,
                                instance_user_agent=instance_user_agent,
                                time=time,
                            ))

    # TODO: Ignores ports: I've not seen a non-443 instance yet.

    # Map of hostname to instance info accumulator.
    # noinspection PyTypeHints
    instances: DefaultDict[str, InstanceInfoAcc] = DefaultDict(InstanceInfoAcc)
    for evidence in all_evidence:
        instances[evidence.domain].add(evidence)

    frozen: OrderedDict[str, InstanceInfoFrozen] = OrderedDict()
    for instance in sorted(instances.keys()):
        frozen[instance] = instances[instance].freeze()

    # Dump output as YAML.
    yaml = YAML()
    yaml.indent(mapping=2, sequence=2, offset=1)
    yaml.dump(CommentedMap(frozen),
              sys.stdout)  # Hack: prevents !!omap annotation in YAML output
コード例 #21
0
ファイル: param_bunch.py プロジェクト: angry-penguins/waddle
def dump_yaml(x, filename):
    yaml = YAML()
    yaml.indent(sequence=4, offset=2)
    yaml.explicit_start = True
    with open(filename, 'w') as f:
        yaml.dump(x, f)
import ibm_boto3
from ruamel.yaml import YAML

# verify that the environment variables are defined
assert 'COS_INPUT_BUCKET' in os.environ, 'Environment variable `COS_INPUT_BUCKET` is not defined.'
assert 'COS_OUTPUT_BUCKET' in os.environ, 'Environment variable `COS_OUTPUT_BUCKET` is not defined.'
assert 'AWS_ACCESS_KEY_ID' in os.environ, 'Environment variable `AWS_ACCESS_KEY_ID` is not defined.'
assert 'AWS_SECRET_ACCESS_KEY' in os.environ, 'Environment variable `AWS_SECRET_ACCESS_KEY` is not defined.'

# update the yaml file with the corresponding buckets
yaml_file = glob.glob('*.yaml')[0]

yaml = YAML(typ='rt')
yaml.allow_duplicate_keys = True
yaml.preserve_quotes = True
yaml.indent(mapping=6, sequence=4)

# open the file
with open(yaml_file, 'r') as fp:
    # Loading configuration file
    config = yaml.load(fp)

# set input bucket
config['train']['data_source']['training_data']['bucket'] = os.environ.get('COS_INPUT_BUCKET')
# set output bucket
config['train']['model_training_results']['trained_model']['bucket'] = os.environ.get('COS_OUTPUT_BUCKET')

# save the file
with open(yaml_file, 'w') as fp:
    yaml.dump(config, fp)
コード例 #23
0
class ConfigManager(object):
    """
    method reading and writing from the configuration file
    """
    def __init__(self):
        self.yaml = YAML()
        self.yaml.indent(mapping=4)
        self.logger = Logger()
        self.installer_config_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            '../../conf/installer_config.yml')
        if not os.path.exists(
                os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             '../../dist')):
            os.makedirs(
                os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             '../../dist'))

    def read(self):
        with open(self.installer_config_path, 'r') as stream:
            try:
                return yaml.load(stream)
                # return self.yaml.load(stream)
            except Exception as e:
                self.logger.error("Veriler okunurken hata oluştu: " + str(e))
                return None

    def read_temp_yml_file(self, path):
        with open(path, 'r') as stream:
            try:
                return self.yaml.load(stream)
            except Exception as e:
                self.logger.error(e)
                return None

    def write_to_yml(self, data, path):
        with io.open(path, 'w', encoding='utf8') as outfile:
            #self.yaml.dump(data, outfile, default_flow_style=False, allow_unicode=True)
            self.yaml.dump(data, outfile)

    def replace_all(self, text, dic):
        try:
            for i, j in dic.items():
                text = text.replace(i, j)
            self.logger.info("Dosya güncellenmesi başarıyla tamamlandı")
            return text
        except Exception as e:
            self.logger.error(
                "Dosya güncellenmesi sırasında beklenmedik bir hata ile karşılaşıldı\n"
                + str(e))

    def date_format(self):
        date_now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        date_parse = date_now.split(" ")
        date_list = []
        for line in date_parse:
            date_line = line + "-"
            date_list.append(date_line)
        date = ''.join(str(x) for x in date_list)
        date = date.strip('-')
        return date
コード例 #24
0
class LSRFileTransformerBase(object):

    # we used to try to not deindent comment lines in the Ansible yaml,
    # but this changed the indentation when comments were used in
    # literal strings, which caused test failures - so for now, we
    # have to live with poorly indented Ansible comments . . .
    # INDENT_RE = re.compile(r'^  (?! *#)', flags=re.MULTILINE)
    INDENT_RE = re.compile(r"^  ", flags=re.MULTILINE)
    HEADER_RE = re.compile(r"^(---\n|.*\n---\n)", flags=re.DOTALL)
    FOOTER_RE = re.compile(r"\n([.][.][.]|[.][.][.]\n.*)$", flags=re.DOTALL)

    def __init__(self, filepath, rolename, args):
        self.filepath = filepath
        self.namespace = args["namespace"]
        self.collection = args["collection"]
        self.prefix = args["prefix"]
        self.subrole_prefix = args["subrole_prefix"]
        self.replace_dot = args["replace_dot"]
        self.role_modules = args["role_modules"]
        self.src_owner = args["src_owner"]
        self.top_dir = args["top_dir"]
        self.rolename = rolename
        buf = open(filepath).read()
        self.ruamel_yaml = YAML(typ="rt")
        match = re.search(LSRFileTransformerBase.HEADER_RE, buf)
        if match:
            self.header = match.group(1)
        else:
            self.header = ""
        match = re.search(LSRFileTransformerBase.FOOTER_RE, buf)
        if match:
            self.footer = match.group(1) + "\n"
        else:
            self.footer = ""
        self.ruamel_yaml.default_flow_style = False
        self.ruamel_yaml.preserve_quotes = True
        self.ruamel_yaml.width = 1024
        self.ruamel_data = self.ruamel_yaml.load(buf)
        self.ruamel_yaml.indent(mapping=2, sequence=4, offset=2)
        self.file_type = get_file_type(self.ruamel_data)
        self.outputfile = None
        self.outputstream = sys.stdout

    def run(self):
        if self.file_type == "vars":
            self.handle_vars(self.ruamel_data)
        elif self.file_type == "meta":
            self.handle_meta(self.ruamel_data)
        else:
            for item in self.ruamel_data:
                self.handle_item(item)

    def write(self):
        def xform(thing):
            logging.debug(f"xform thing {thing}")
            if self.file_type == "tasks":
                thing = re.sub(LSRFileTransformerBase.INDENT_RE, "", thing)
            thing = self.header + thing
            if not thing.endswith("\n"):
                thing = thing + "\n"
            thing = thing + self.footer
            return thing

        if self.outputfile:
            outstrm = open(self.outputfile, "w")
        else:
            outstrm = self.outputstream
        self.ruamel_yaml.dump(self.ruamel_data, outstrm, transform=xform)

    def task_cb(self, task):
        """subclass will override"""
        pass

    def other_cb(self, item):
        """subclass will override"""
        pass

    def vars_cb(self, item):
        """subclass will override"""
        pass

    def meta_cb(self, item):
        """subclass will override"""
        pass

    def handle_item(self, item):
        """handle any type of item - call the appropriate handlers"""
        ans_type = get_item_type(item)
        self.handle_vars(item)
        self.handle_other(item)
        if ans_type == "task":
            self.handle_task(item)
        self.handle_task_list(item)

    def handle_other(self, item):
        """handle properties of Ansible item other than vars and tasks"""
        self.other_cb(item)

    def handle_vars(self, item):
        """handle vars of Ansible item"""
        self.vars_cb(item)

    def handle_meta(self, item):
        """handle meta/main.yml file"""
        self.meta_cb(item)

    def handle_task(self, task):
        """handle a single task"""
        self.task_cb(task)

    def handle_task_list(self, item):
        """item has one or more fields which hold a list of Task objects"""
        for kw in TASK_LIST_KWS:
            if kw in item:
                for task in item[kw]:
                    self.handle_item(task)
コード例 #25
0
def create_yaml():
    yaml = YAML(typ="rt")
    yaml.indent(mapping=2, sequence=4, offset=2)
    yaml.compact(seq_seq=False, seq_map=False)
    return yaml
コード例 #26
0
    if 'identifiers' in tool and ('keywords' not in tool
                                  or len(tool['keywords']) == 0):
        identifiers = tool['identifiers']
        for identifier in identifiers:
            try:
                if 'doi' in identifier:
                    doi = identifier.replace('doi:', '')
                    pubmedid = doi2pmid(doi)
                    print('doi: ' + doi + ' --> ' + 'pmid: ' + pubmedid)
                    if pubmedid is not None:
                        fetch = PubMedFetcher()
                        article = fetch.article_by_pmid(pubmedid)
                        if article.mesh is not None:
                            keywords = []
                            if 'keywords' in tools:
                                keywords = tool['keywords']
                            for keyword_key in article.mesh:
                                keyword = article.mesh[keyword_key]
                                if keyword['descriptor_name'] not in top_words:
                                    keywords.append(keyword['descriptor_name'])
                            keywords = list(dict.fromkeys(keywords))
                            tool['keywords'] = keywords
                        print(article.mesh)
            except Exception as e:
                print('Error doi --' + doi)

    tools[key] = tool

yaml.indent(mapping=4, sequence=6, offset=2)
with open('missing_annotations.yaml', 'w') as outfile:
    yaml.dump(tools, outfile)
コード例 #27
0
from os.path import sep, isfile, isdir
from os import listdir, mkdir, getenv
import sys
import json
from shutil import copyfile
from ruamel.yaml import YAML
import tkinter as tk
from tkinter import filedialog
from tkinter import messagebox, BOTH
from tkinter.ttk import Frame, Button
import traceback
import winreg

global yaml
yaml = YAML()
yaml.indent(mapping=4, sequence=4, offset=4)
yaml.allow_duplicate_keys = True


def yaml_load(filename):
    f = open(filename)
    data = yaml.load(f)
    f.close()
    return data


def yaml_dump(filename, data):
    f = open(filename, 'w')
    data = yaml.dump(data, f)
    f.close()
コード例 #28
0
    i = 0
    if key in d1:
        for d1_key in d1.keys():
            i += 1
            if d1_key == key: break
    d1.insert(
        i - 1,
        "test",
        d2,
    )
    return d1
    #YAML().dump(d1, sys.stdout)


yaml = YAML()
yaml.indent = 2
yaml.width = 4096
q1 = yaml.load(d)
q2 = yaml.load(f)

#print(q1.iter('a'))

#q1.insert(1, "test", q2, )
#yaml.dump(q1, sys.stdout)

#with open('test.yml', 'w') as outfile:
#    yaml.dump(qq, outfile)
#yaml.dump(qq, sys.stdout)
#yaml.dump(q1, sys.stdout)

w1 = insert_after(q1, q2, "b")
コード例 #29
0
def pretty_format_yaml(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--autofix',
        action='store_true',
        dest='autofix',
        help='Automatically fixes encountered not-pretty-formatted files',
    )
    parser.add_argument(
        '--indent',
        type=int,
        default='2',
        help=(
            'The number of indent spaces or a string to be used as delimiter'
            ' for indentation level e.g. 4 or "\t" (Default: 2)'
        ),
    )

    parser.add_argument('filenames', nargs='*', help='Filenames to fix')
    args = parser.parse_args(argv)

    status = 0

    yaml = YAML()
    yaml.indent = args.indent
    # Prevent ruamel.yaml to wrap yaml lines
    yaml.width = maxsize

    separator = '---\n'

    for yaml_file in set(args.filenames):
        with open(yaml_file) as f:
            string_content = ''.join(f.readlines())

        # Split multi-document file into individual documents
        #
        # Not using yaml.load_all() because it reformats primitive (non-YAML) content. It removes
        # newline characters.
        separator_pattern = r'^---\s*\n'
        original_docs = re.split(separator_pattern, string_content, flags=re.MULTILINE)

        pretty_docs = []

        try:
            for doc in original_docs:
                content = _process_single_document(doc, yaml)
                if content is not None:
                    pretty_docs.append(content)

            # Start multi-doc file with separator
            pretty_content = '' if len(pretty_docs) == 1 else separator
            pretty_content += separator.join(pretty_docs)

            if string_content != pretty_content:
                print('File {} is not pretty-formatted'.format(yaml_file))

                if args.autofix:
                    print('Fixing file {}'.format(yaml_file))
                    with io.open(yaml_file, 'w', encoding='UTF-8') as f:
                        f.write(text_type(pretty_content))

                status = 1
        except YAMLError:  # pragma: no cover
            print(
                'Input File {} is not a valid YAML file, consider using check-yaml'.format(
                    yaml_file,
                ),
            )
            return 1

    return status
コード例 #30
0
import great_expectations.exceptions as ge_exceptions
from great_expectations.data_context.store import GeCloudStoreBackend
from great_expectations.data_context.store.store import Store
from great_expectations.data_context.store.tuple_store_backend import TupleStoreBackend
from great_expectations.data_context.types.base import BaseYamlConfig
from great_expectations.data_context.types.resource_identifiers import (
    ConfigurationIdentifier, )
from great_expectations.data_context.util import load_class
from great_expectations.util import (
    filter_properties_dict,
    verify_dynamic_loading_support,
)

yaml = YAML()

yaml.indent(mapping=2, sequence=4, offset=2)
yaml.default_flow_style = False

logger = logging.getLogger(__name__)


class ConfigurationStore(Store):
    """
    Configuration Store provides a way to store any Marshmallow Schema compatible Configuration (using the YAML format).
    """

    _key_class = ConfigurationIdentifier

    _configuration_class = BaseYamlConfig

    def __init__(
コード例 #31
0
def main():
    renku_chartpress_dir = '{}/charts'.format(renku_repo_dir())
    # TODO: make these options
    namespace = 'renku'
    release = 'renku'

    yaml = YAML(typ='rt')
    yaml.indent(mapping=2, offset=2, sequence=4)

    # 1. Check minikube status
    if not bool(os.environ.get('SKIP_MINIKUBE_STATUS')):
        status_minikube()

    # 2. Build Docker images and update chart version with chartpress
    get_minikube_docker_env()
    for dep in dependencies:
        chartpress_dir = os.path.join(dependency_dir(dep['repo_name']),
                                      dep['chartpress_dir'])
        subchart_dirs = [os.path.join(chartpress_dir, dep['repo_name'])]
        if 'chart_name' in dep:
            subchart_dirs.append(
                os.path.join(chartpress_dir, dep['chart_name']))
        for subchart_dir in subchart_dirs:
            update_subchart_dependencies(subchart_dir)
        update_charts(chartpress_dir)
    update_charts(renku_chartpress_dir)

    # 3. Init helm
    kubectl_use_minikube_context()
    helm_init()

    # 4. Package renku chart, with local versions of dependencies
    with TemporaryDirectory() as tmp:
        copy_tree(os.path.join(renku_chartpress_dir, 'renku'),
                  os.path.join(tmp, 'renku'))
        copy_tree(os.path.join(renku_chartpress_dir, 'gitlab'),
                  os.path.join(tmp, 'gitlab'))

        with open(os.path.join(tmp, 'renku', 'requirements.yaml'), 'rt') as f:
            renku_requirements = yaml.load(f)

        for dep in dependencies:
            chartpress_dir = os.path.join(dependency_dir(dep['repo_name']),
                                          dep['chartpress_dir'])
            chart_name = dep.get('chart_name', dep['repo_name'])

            with open(os.path.join(chartpress_dir, chart_name, 'Chart.yaml'),
                      'rt') as f:
                chart = yaml.load(f)
            version = chart.get('version')

            req = next(
                filter(lambda x: x.get('name') == chart_name,
                       renku_requirements.get('dependencies')))
            req['version'] = version
            req['repository'] = 'file://{}'.format(
                os.path.abspath(os.path.join(chartpress_dir, chart_name)))

        with open(os.path.join(tmp, 'renku', 'requirements.yaml'), 'wt') as f:
            yaml.dump(renku_requirements, f)

        run(['cat', 'renku/requirements.yaml'], cwd=tmp).check_returncode()

        package_chart('renku', tmp, tmp)
        renku_chart = os.path.abspath(
            glob(os.path.join(tmp, 'renku-*.tgz'))[0])

        helm_deploy_cmd = [
            'helm',
            'upgrade',
            release,
            renku_chart,
            '--install',
            '--namespace',
            namespace,
            '-f',
            os.path.join(renku_chartpress_dir, 'minikube-values.yaml'),
            '--set-string',
            ','.join((
                'global.renku.domain={mip}',
                'ui.gitlabUrl=http://{mip}/gitlab',
                'ui.jupyterhubUrl=http://{mip}/jupyterhub',
                'ui.gatewayUrl=http://{mip}/api',
                'gateway.keycloakUrl=http://{mip}',
                'gateway.gitlabUrl=http://{mip}/gitlab',
                'notebooks.jupyterhub.hub.extraEnv.GITLAB_URL=http://{mip}/gitlab',
                'notebooks.jupyterhub.hub.services.gateway.oauth_redirect_uri=http://{mip}/api/auth/jupyterhub/token',
                'notebooks.jupyterhub.auth.gitlab.callbackUrl=http://{mip}/jupyterhub/hub/oauth_callback',
                'notebooks.gitlab.registry.host=10.100.123.45:8105',
                'gitlab.registry.externalUrl=http://10.100.123.45:8105/',
                'graph.gitlab.url=http://{mip}/gitlab')).format(
                    mip=minikube_ip()),
            '--timeout',
            '1800',
        ]

        print('Running: {}'.format(' '.join(helm_deploy_cmd)))
        run(helm_deploy_cmd).check_returncode()

    # 5. Deploy GitLab runner
    deploy_runner()
コード例 #32
0
ファイル: __init__.py プロジェクト: dankeder/bmrcli
def handle_load(args):
    # Connect to BMR HC64 controller
    bmr = pybmr.Bmr(*parse_url(args["url"]))

    # Parse YAML from stdin
    yaml = YAML()
    yaml.indent(mapping=2, sequence=4, offset=2)
    config_data = yaml.load(args["file"])

    # TODO: Sanity check of all input data

    # Low mode assignments
    low_mode_assignments_on = [
        circuit['id'] for circuit in config_data["circuits"]
        if circuit['low_mode']
    ]
    low_mode_assignments_off = [
        circuit['id'] for circuit in config_data["circuits"]
        if not circuit['low_mode']
    ]
    if args['verbose']:
        print("low_mode_assignments_on:", low_mode_assignments_on)
        print("low_mode_assignments_off:", low_mode_assignments_off)
    if not args["dry_run"]:
        bmr.setLowModeAssignments(low_mode_assignments_on, True)
        bmr.setLowModeAssignments(low_mode_assignments_off, False)

    # Summer mode assignments
    summer_mode_assignments_on = [
        circuit['id'] for circuit in config_data["circuits"]
        if circuit['summer_mode']
    ]
    summer_mode_assignments_off = [
        circuit['id'] for circuit in config_data["circuits"]
        if not circuit['summer_mode']
    ]
    if args['verbose']:
        print("summer_mode_assignments_on:", summer_mode_assignments_on)
        print("summer_mode_assignments_off:", summer_mode_assignments_off)
    if not args["dry_run"]:
        bmr.setSummerModeAssignments(summer_mode_assignments_on, True)
        bmr.setSummerModeAssignments(summer_mode_assignments_off, False)

    # Circuit schedule assignments
    for circuit in config_data["circuits"]:
        circuit_schedules = circuit["circuit_schedules"]
        if args['verbose']:
            print(
                "schedule assignment:",
                circuit["id"],
                circuit_schedules["day_schedules"],
                circuit_schedules["starting_day"],
            )
        if not args["dry_run"]:
            bmr.setCircuitSchedules(circuit["id"],
                                    circuit_schedules["day_schedules"],
                                    circuit_schedules["starting_day"])

    # Schedules
    for schedule in config_data["schedules"]:
        if args['verbose']:
            print("schedule:", schedule["id"], schedule["name"],
                  schedule["timetable"])
        if not args["dry_run"]:
            if schedule['timetable'] is not None:
                bmr.setSchedule(schedule["id"], schedule["name"],
                                schedule["timetable"])
            else:
                bmr.deleteSchedule(schedule['id'])

    # Low mode
    if args['verbose']:
        print("low_mode:", config_data["low_mode"]["enabled"],
              config_data["low_mode"]["temperature"])
    if not args["dry_run"]:
        bmr.setLowMode(config_data["low_mode"]["enabled"],
                       config_data["low_mode"]["temperature"])

    # Summer mode
    if args['verbose']:
        print("summer_mode:", config_data["summer_mode"])
    if not args["dry_run"]:
        bmr.setSummerMode(config_data["summer_mode"])
コード例 #33
0
 def _get_parser(cls) -> YAML:
     parser = YAML()
     # Make sure the indentation settings are used consistently
     parser.indent(mapping=2, sequence=4, offset=2)
     return parser