Beispiel #1
0
def load_data_definitions(datadir):
    """
    Parse the yaml file of base yaml objects and return the information

    :arg file yaml_file: Open file object to read the yaml from
    :returns: An array of Markets that the user can travel to.
    """
    _define_schemas(datadir)

    data_file = os.path.join(datadir, 'base', 'stellar-base.yml')

    with open(data_file) as f:
        loader = Loader(f.read())
        base_data = loader.get_single_data()
    v_validate(base_data, BASE_SCHEMA)

    data_file = os.path.join(datadir, 'base', 'stellar-sol.yml')
    with open(data_file) as f:
        loader = Loader(f.read())
        system_data = loader.get_single_data()
    v_validate(system_data, SYSTEM_SCHEMA)

    base_data.update(system_data)
    del base_data['version']

    return base_data
Beispiel #2
0
def get_yaml_docs():
    """Parse the YAML file"""
    source = read_yaml_file(args['infile'])

    if args.get('template'):
        source = read_yaml_file(args['template']) + source

    source_str = ''.join([line[0] for line in source])

    def mark_str(mark):
        line = source[mark.line]
        return ("In file " + line[1] + ", line " + str(line[2]) + ", column " +
                str(mark.column + 1) + ":\n" + line[0].rstrip() + "\n" +
                ' ' * mark.column + "^\n")

    # We iterate through all of the documents to properly diagnose errors,
    # because the load_all generator does not handle exceptions correctly.
    docs = []
    load = Loader(source_str)
    while load.check_data():
        try:
            doc = load.get_data()
        except yaml.YAMLError as err:
            sys.exit((mark_str(err.problem_mark) if err.problem_mark else "") +
                     (err.problem + "\n" if err.problem else "") +
                     (err.note + "\n" if err.note else ""))
        else:
            docs.append(doc)
    return docs
Beispiel #3
0
def test_app_config():
    # Get configured auth params
    specified_app_config = utils.get_cluster_var('cartridge_app_config')
    if not specified_app_config:
        return

    # Get all configured instances
    configured_instances = utils.get_configured_instances()

    if not configured_instances:
        return

    # Get cartridge app config
    config_url = '%s/admin/config' % utils.get_any_instance_url()
    session = utils.get_authorized_session()

    response = session.get(config_url)
    assert response.status_code == 200

    loader = Loader(response.content)
    app_config = loader.get_data()

    # Check if app config is equal to configured one
    for section_name, section in specified_app_config.items():
        if section_is_deleted(section):
            assert section_name not in app_config
        else:
            assert section_name in app_config
            assert app_config[section_name] == section['body']
Beispiel #4
0
def load_base_types(datadir):
    """
    Parse the yaml file of base enum types and return the information

    :arg datadir: The data directory to find the types file
    :returns: A list of types
    """
    flog = mlog.fields(func='load_base_types')
    flog.fields(datadir=datadir).debug('Entered load_base_types')

    data_file = os.path.join(datadir, 'base', 'stellar-types.yml')

    with_file_log = flog.fields(filename=data_file)
    with_file_log.debug('constructed data_file path {data_file}', data_file=data_file)

    with_file_log.debug('Opening data_file')
    with open(data_file, 'r') as data_fh:
        with_file_log.debug('reading data_file')
        yaml_data = data_fh.read()
        with_file_log.fields(yaml=yaml_data).debug('parsing yaml string')
        loader = Loader(yaml_data)
        data = loader.get_single_data()

    flog.fields(data=data).debug('Validating type data structure')
    data = v_validate(data, DATA_TYPES_SCHEMA)

    flog.debug('Returning type data')
    return data
Beispiel #5
0
def load_yaml(yaml_data, filename):
    """
    Load YAML data extending it with line number information, nodes get a __line__ attribute
    """
    if yaml_data is None:
        with open(filename, 'r') as data_file:
            yaml_data = data_file.read()

    loader = Loader(yaml_data)

    def compose_node(parent, index):
        # the line number where the previous token has ended (plus empty lines)
        line = loader.line
        node = Composer.compose_node(loader, parent, index)
        node.__line__ = line + 1
        return node

    def construct_mapping(node, deep=False):
        mapping = Constructor.construct_mapping(loader, node, deep=deep)
        mapping['__line__'] = node.__line__
        return mapping

    loader.compose_node = compose_node
    loader.construct_mapping = construct_mapping
    try:
        python_data = loader.get_single_data()
    except ParserError as error:
        print("YAML syntax error parsing file {} :".format(filename),
              file=sys.stderr)
        print(error, file=sys.stderr)
        exit(1)
    return python_data
Beispiel #6
0
    def load(cls,
             stream,
             constructors=None,
             multi_constructors=None,
             implicit_resolvers=None):
        loader = Loader(stream)

        cs = dict(cls._constructors)
        if constructors:
            cs.update(constructors)

        ir = dict(cls._implicit_resolvers)
        if implicit_resolvers:
            ir.update(implicit_resolvers)

        mcs = dict(cls._multi_constructors)
        if multi_constructors:
            mcs.update(multi_constructors)

        if cs:
            for name, constructor in cs.items():
                loader.add_constructor(name, constructor)

        if mcs:
            for name, constructor in mcs.items():
                loader.add_multi_constructor(name, constructor)

        if ir:
            for name, pattern in ir.items():
                loader.add_implicit_resolver(name, pattern, None)

        try:
            return loader.get_single_data()
        finally:
            loader.dispose()
def check_conf_file(conf_file, instance_id, conf):
    assert conf_file.exists
    assert conf_file.user == 'tarantool'
    assert conf_file.group == 'tarantool'

    loader = Loader(conf_file.content_string)
    conf_file_dict = loader.get_data()

    assert instance_id in conf_file_dict
    assert conf_file_dict[instance_id] == conf
Beispiel #8
0
 def process_notify(self, notification):
     """Process events"""
     loader = Loader(self.events_stream)
     setattr(loader, 'notification', notification)
     setattr(loader, 'system', self.system)
     notifications = loader.get_data()
     for notify_name in notifications:
         logging.debug('Process "{}" notification'.format(notify_name))
         if notifications[notify_name] is not None:
             self.send_data(notifications[notify_name])
Beispiel #9
0
 def process_measurements(self):
     """Process measurements"""
     loader = Loader(self.measurements_stream)
     setattr(loader, 'collector', self.collector)
     setattr(loader, 'system', self.system)
     setattr(loader, 'config', self.config)
     measurements = loader.get_data()
     for measurement_name in measurements:
         logging.debug('Process "{}" measurements: {}'.format(
             measurement_name, measurements[measurement_name]))
         for measurement in measurements[measurement_name]:
             self.send_data(measurement)
Beispiel #10
0
def load_file(filename):
    """
    Loads an output file from rAdvisor into an ordered dictionary
    of read timestamp (int) -> LogEntry in the order of logging
    """

    entries = OrderedDict()
    metadata = None

    with open(filename, "r") as csvfile:
        yaml_lines = []
        # Skip the first yaml delimeter
        next(csvfile)

        # Load all lines until the end of the yaml section
        file_iter = peekable(csvfile)
        while not file_iter.peek().startswith("---"):
            yaml_lines.append(next(file_iter))
        # Skip the second yaml delimeter
        next(file_iter)

        # Load YAML to dictionary
        yaml_str = "\n".join(yaml_lines)
        yaml_loader = Loader(yaml_str)
        metadata = yaml_loader.get_data()

        csv_reader = csv.DictReader(file_iter)

        # skip header row
        next(csv_reader)

        preread = None
        try:
            for row in csv_reader:
                entry = LogEntry(row, entries=entries, preread=preread)
                entries[entry.read] = entry
                preread = entry.read
        except Error as e:
            print(e)
            print("An error ocurred. continuing...\n")

    return (entries, metadata)
Beispiel #11
0
def load_yaml(data):
    """
    Load YAML data extending it with line number information, nodes get a __line__ attribute
    """
    loader = Loader(data)

    def compose_node(parent, index):
        # the line number where the previous token has ended (plus empty lines)
        line = loader.line
        node = Composer.compose_node(loader, parent, index)
        node.__line__ = line + 1
        return node

    def construct_mapping(node, deep=False):
        mapping = Constructor.construct_mapping(loader, node, deep=deep)
        mapping['__line__'] = node.__line__
        return mapping
    loader.compose_node = compose_node
    loader.construct_mapping = construct_mapping
    data = loader.get_single_data()
    return data
Beispiel #12
0
    def load(cls, stream, constructors=None, multi_constructors=None):
        loader = Loader(stream)

        cs = dict(cls._constructors)
        if constructors:
            cs.update(constructors)

        mcs = dict(cls._multi_constructors)
        if multi_constructors:
            mcs.update(multi_constructors)

        if cs:
            for name, constructor in cs.items():
                loader.add_constructor(name, constructor)

        if mcs:
            for name, constructor in mcs.items():
                loader.add_multi_constructor(name, constructor)

        try:
            return loader.get_single_data()
        finally:
            loader.dispose()
def args_parser(path):
    with open(path) as f:
        setting=Loader(f)
    args=setting.get_data()
    return args
Beispiel #14
0
    def _load_data_definitions(self):
        """
        Parse the yaml file of base yaml objects and return the information

        :arg file yaml_file: Open file object to read the yaml from
        :returns: An array of Markets that the user can travel to.
        """
        data_file = os.path.join(self.cfg['data_dir'], 'base', 'stellar.yml')
        schema_file = os.path.join(self.cfg['data_dir'], 'schema',
                                   'stellar-schema.json')

        loader = Loader(open(data_file).read())
        data = loader.get_single_data()

        schema = json.loads(open(schema_file).read())
        jsonschema.validate(data, schema)

        self.system_data = OrderedDict()
        for system in data['system']:
            self.system_data[system['name']] = SystemData(system['name'], None)

            locations = OrderedDict()
            for loc in system['location']:
                locations[loc['name']] = LocationData(
                    loc['name'], loc['type'], self.system_data[system['name']])
            self.system_data[system['name']].locations = locations

        # Commodities are anything that may be bought or sold at a particular
        # location.  The UI may separate these out into separate pieces.
        commodities = OrderedDict()
        for commodity in data['cargo']:
            commodities[commodity['name']] = CommodityData(
                commodity['name'],
                frozenset((commodity['type'], 'cargo')),
                commodity['mean_price'],
                commodity['standard_deviation'],
                commodity['depreciation_rate'],
                1,
                commodity['event'],
            )

        for commodity in data['equipment']:
            commodities[commodity['name']] = CommodityData(
                commodity['name'],
                frozenset((commodity['type'], 'equipment')),
                commodity['mean_price'],
                commodity['standard_deviation'],
                commodity['depreciation_rate'],
                commodity['holdspace'],
                commodity['event'],
            )

        for commodity in data['property']:
            commodities[commodity['name']] = CommodityData(
                commodity['name'],
                frozenset(('property', )),
                commodity['mean_price'],
                commodity['standard_deviation'],
                commodity['depreciation_rate'],
                0,
                commodity['event'],
            )
        self.commodity_data = commodities

        ### FIXME: Put ships into commodities too.
        ships = OrderedDict()
        for ship in data['ship']:
            ships[ship['name']] = ShipData(ship['name'], ship['mean_price'],
                                           ship['standard_deviation'],
                                           ship['depreciation_rate'],
                                           ship['holdspace'],
                                           ship['weaponmount'])
        self.ship_data = ships