Esempio n. 1
0
def update_yaml_dict(conf_path=None):
    """
    Context manager for updating a YAML file while preserving key ordering and
    comments.
    """
    conf_path = conf_path or GLOBAL_SCRAPINGHUB_YML_PATH
    dumper = yaml.RoundTripDumper
    try:
        with open(conf_path, 'r') as f:
            conf = yaml.load(f, yaml.RoundTripLoader) or {}
    except IOError as e:
        if e.errno != 2:
            raise
        conf = {}
        # Use alphabetic order when creating files
        dumper = yaml.Dumper
    # Code inside context manager is executed after this yield
    yield conf
    # Avoid writing "key: {}"
    for key in conf.keys():
        if conf[key] == {}:
            del conf[key]
    with open(conf_path, 'w') as f:
        # Avoid writing "{}"
        if conf:
            yaml.dump(conf, f, default_flow_style=False, Dumper=dumper)
def main():
    subprocess.run(['git', 'checkout', '--', 'zuul.d/projects.yaml'])
    yaml = ruamel.yaml.YAML()
    yaml.indent(mapping=2, sequence=4, offset=2)
    projects = yaml.load(open('zuul.d/projects.yaml', 'r'))

    for project in projects:
        if project['project']['name'].split('/')[1].startswith('networking-'):
            if 'templates' not in project['project']:
                continue
            templates = project['project']['templates']
            for template in ('openstack-python-jobs',
                             'openstack-python35-jobs'):
                if template in templates:
                    new_name = template + '-neutron'
                    templates[templates.index(template)] = new_name

    yaml.dump(projects, open('zuul.d/projects.yaml', 'w'))

    # Strip the extra 2 spaces that ruamel.yaml appends because we told it
    # to indent an extra 2 spaces. Because the top level entry is a list it
    # applies that indentation at the top. It doesn't indent the comment lines
    # extra though, so don't do them.
    with open('zuul.d/projects.yaml', 'r') as main_in:
        main_content = main_in.readlines()
    with open('zuul.d/projects.yaml', 'w') as main_out:
        for line in main_content:
            if '#' in line:
                main_out.write(line)
            else:
                if line.startswith('  - project'):
                    main_out.write('\n')
                main_out.write(line[2:])
Esempio n. 3
0
def save_yaml(fname: str, data: JSON_TYPE) -> None:
    """Save a YAML file."""
    yaml = YAML(typ='rt')
    yaml.indent(sequence=4, offset=2)
    tmp_fname = fname + "__TEMP__"
    try:
        try:
            file_stat = os.stat(fname)
        except OSError:
            file_stat = stat_result(
                (0o644, -1, -1, -1, -1, -1, -1, -1, -1, -1))
        with open(os.open(tmp_fname, O_WRONLY | O_CREAT | O_TRUNC,
                          file_stat.st_mode), 'w', encoding='utf-8') \
                as temp_file:
            yaml.dump(data, temp_file)
        os.replace(tmp_fname, fname)
        if hasattr(os, 'chown') and file_stat.st_ctime > -1:
            try:
                os.chown(fname, file_stat.st_uid, file_stat.st_gid)
            except OSError:
                pass
    except YAMLError as exc:
        _LOGGER.error(str(exc))
        raise HomeAssistantError(exc)
    except OSError as exc:
        _LOGGER.exception('Saving YAML file %s failed: %s', fname, exc)
        raise WriteError(exc)
    finally:
        if os.path.exists(tmp_fname):
            try:
                os.remove(tmp_fname)
            except OSError as exc:
                # If we are cleaning up then something else went wrong, so
                # we should suppress likely follow-on errors in the cleanup
                _LOGGER.error("YAML replacement cleanup failed: %s", exc)
Esempio n. 4
0
 def dump(self, data):
     """Write the data to the target yaml file."""
     with open(self.target_file, 'w') as fd:
         yaml.dump(data, fd,
                   Dumper=yaml.RoundTripDumper,
                   default_flow_style=False,
                   default_style='"')
Esempio n. 5
0
def _AlterRuntime(config_filename, runtime):
  try:
    # 0. Take backup
    with tempfile.NamedTemporaryFile(prefix='app.yaml.') as f:
      backup_fname = f.name
    log.status.Print(
        'Copying original config [{0}] to backup location [{1}].'.format(
            config_filename, backup_fname))
    shutil.copyfile(config_filename, backup_fname)
    # 1. Open and parse file using ruamel
    with open(config_filename, 'r') as yaml_file:
      encoding = yaml_file.encoding
      config = yaml.load(yaml_file, yaml.RoundTripLoader)
    # 2. Alter the ruamel in-memory object representing the yaml file
    config['runtime'] = runtime
    # 3. Create an in-memory file buffer and write yaml file to it
    raw_buf = io.BytesIO()
    tmp_yaml_buf = io.TextIOWrapper(raw_buf, encoding)
    yaml.dump(config, tmp_yaml_buf, Dumper=yaml.RoundTripDumper)
    # 4. Overwrite the original app.yaml
    with open(config_filename, 'wb') as yaml_file:
      tmp_yaml_buf.seek(0)
      yaml_file.write(raw_buf.getvalue())
  except Exception as e:
    raise fingerprinter.AlterConfigFileError(e)
Esempio n. 6
0
def dump_in_memory_config_to_var(data, stream=None):
    """[dump in memory config]

    Arguments:
        data {[ruamel.yaml.comments.CommentedMap]} -- [CommentedMap object]
    """

    # NOTE: on ruamel.yaml.comments.CommentedMap
    # The CommentedMap, which is the dict like construct one gets when
    # round-trip loading, supports insertion of a key into a particular
    # position, while optionally adding a comment:

    if stream is None:
        inefficient = True
        output = yaml.dump(data, sys.stdout)
        logger.debug(
            "Ran {} | stream={} | inefficient={}".format(
                sys._getframe().f_code.co_name, stream, inefficient
            )
        )
        return output
    else:
        inefficient = False
        output = yaml.dump(data)
        logger.debug(
            "Ran {} | stream={} | inefficient={}".format(
                sys._getframe().f_code.co_name, stream, inefficient
            )
        )
        return yaml.dump(data)
Esempio n. 7
0
def generate_yaml(hosts, proxy_host, filename):
    import ruamel.yaml as yaml
    tmp = '''
- hosts: 
  gather_facts: no
  vars:
   - proxy_host: 
  become: yes
  become_method: sudo
  become_user: root

  tasks:
    - name: update /etc/hosts
      template: src=/opt/ansible/templates/hosts.j2 dest=/etc/hosts owner=root group=root mode=0644
      notify: 
        - restart dnsmasq
        - flush json

  handlers:
    - name: restart dnsmasq
      service: name=dnsmasq state=restarted
     
    - name: flush json
      shell: python link_info.py chdir=/opt/rrd
'''

    data = yaml.load(tmp, Loader=yaml.RoundTripLoader)
    data[0]['hosts'] = hosts
    data[0]['vars'][0]['proxy_host'] = proxy_host

    with open(filename, 'w') as f:
        yaml.dump(data, f, Dumper=yaml.RoundTripDumper, default_flow_style=False, indent=2)
Esempio n. 8
0
def write_prow_configs_file(output_dir, job_defs):
    """Writes the Prow configurations into a file in output_dir."""
    output_file = os.path.join(output_dir, 'config.yaml')
    with open(output_file, 'w') as fp:
        yaml.dump(
            job_defs, fp, Dumper=yaml.RoundTripDumper, width=float("inf"))
        fp.write('\n')
Esempio n. 9
0
def main(file_paths):
    with open(file_paths[0]) as yaml_file:
        result = yaml.load(yaml_file, Loader=yaml.RoundTripLoader)
    for file_path in file_paths[1:]:
        with open(file_path) as yaml_file:
            dict_update(result, yaml.load(yaml_file, Loader=yaml.RoundTripLoader))
    yaml.dump(result, sys.stdout, Dumper=yaml.RoundTripDumper)
Esempio n. 10
0
def parse_ionic_radii():
    with open('periodic_table.yaml', 'r') as f:
        data = yaml.load(f)
    f = open('ionic_radii.csv', 'r')
    radiidata = f.read()
    f.close()
    radiidata = radiidata.split("\r")
    header = radiidata[0].split(",")
    for i in range(1, len(radiidata)):
        line = radiidata[i]
        toks = line.strip().split(",")
        suffix = ""
        name = toks[1]
        if len(name.split(" ")) > 1:
            suffix = "_" + name.split(" ")[1]
        el = toks[2]

        ionic_radii = {}
        for j in range(3, len(toks)):
            m = re.match("^\s*([0-9\.]+)", toks[j])
            if m:
                ionic_radii[int(header[j])] = float(m.group(1))

        if el in data:
            data[el]['Ionic_radii' + suffix] = ionic_radii
            if suffix == '_hs':
                data[el]['Ionic_radii'] = ionic_radii
        else:
            print(el)
    with open('periodic_table2.yaml', 'w') as f:
        yaml.dump(data, f)
Esempio n. 11
0
def save_config(config, logdir=None):
  """Save a new configuration by name.

  If a logging directory is specified, is will be created and the configuration
  will be stored there. Otherwise, a log message will be printed.

  Args:
    config: Configuration object.
    logdir: Location for writing summaries and checkpoints if specified.

  Returns:
    Configuration object.
  """
  if logdir:
    with config.unlocked:
      config.logdir = logdir
    message = 'Start a new run and write summaries and checkpoints to {}.'
    tf.logging.info(message.format(config.logdir))
    tf.gfile.MakeDirs(config.logdir)
    config_path = os.path.join(config.logdir, 'config.yaml')
    with tf.gfile.FastGFile(config_path, 'w') as file_:
      yaml.dump(config, file_, default_flow_style=False)
  else:
    message = (
        'Start a new run without storing summaries and checkpoints since no '
        'logging directory was specified.')
    tf.logging.info(message)
  return config
 def save(self, path: str):
     cfg = self._dict_to_orderdict(self.map_config)
     with open(path + '/' + self.map_data['name'] + '.yaml', 'w') as cfg_file:
         try:
             yaml3ed.dump(cfg, cfg_file, explicit_start=True, explicit_end=True,
                          default_flow_style=False, allow_unicode=True, version=(1, 2))
         except yaml3ed.YAMLError as exc:
             print(exc)
Esempio n. 13
0
def _write_pb_to_yaml(pb, output):
    # Add yaml representer so that yaml dump can dump OrderedDict. The code
    # is coming from https://stackoverflow.com/questions/16782112.
    yaml.add_representer(OrderedDict, _represent_ordereddict)

    json_obj = _order_dict(json.loads(MessageToJson(pb)))
    with open(output, 'w') as outfile:
        yaml.dump(json_obj, outfile, default_flow_style=False)
Esempio n. 14
0
def save_config(config, path):
    """Save yaml configuration file to disk

    Arguments:
        config {CommentedMap} -- Should be a Ruamel YAML CommentedMap object
        path {str} -- path to configuration file
    """
    with open(path, "w", encoding="utf-8") as fp:
        yaml.dump(config, fp)
Esempio n. 15
0
def save_info(path, info):
    for im_id in sorted(info.keys()):
        im_info = info[im_id]
        if 'cam_K' in im_info.keys():
            im_info['cam_K'] = im_info['cam_K'].flatten().tolist()
        if 'cam_R_w2c' in im_info.keys():
            im_info['cam_R_w2c'] = im_info['cam_R_w2c'].flatten().tolist()
        if 'cam_t_w2c' in im_info.keys():
            im_info['cam_t_w2c'] = im_info['cam_t_w2c'].flatten().tolist()
    with open(path, 'w') as f:
        yaml.dump(info, f, Dumper=yaml.CDumper, width=10000)
Esempio n. 16
0
def save_gt(path, gts):
    for im_id in sorted(gts.keys()):
        im_gts = gts[im_id]
        for gt in im_gts:
            if 'cam_R_m2c' in gt.keys():
                gt['cam_R_m2c'] = gt['cam_R_m2c'].flatten().tolist()
            if 'cam_t_m2c' in gt.keys():
                gt['cam_t_m2c'] = gt['cam_t_m2c'].flatten().tolist()
            if 'obj_bb' in gt.keys():
                gt['obj_bb'] = [int(x) for x in gt['obj_bb']]
    with open(path, 'w') as f:
        yaml.dump(gts, f, Dumper=yaml.CDumper, width=10000)
Esempio n. 17
0
def object_to_yaml(data: JSON_TYPE) -> str:
    """Create yaml string from object."""
    yaml = YAML(typ='rt')
    yaml.indent(sequence=4, offset=2)
    stream = StringIO()
    try:
        yaml.dump(data, stream)
        result = stream.getvalue()  # type: str
        return result
    except YAMLError as exc:
        _LOGGER.error("YAML error: %s", exc)
        raise HomeAssistantError(exc)
def write_job_order(session_dir, input_dir, input_files, output_files, job_order_fn='job.yml'):
    abs_job_order_fn = session_dir + '/' + job_order_fn
    cwl_input_files = [{'class': 'File', 'path': input_dir + '/' + d} for d in input_files]
    data = {
        'input_files': cwl_input_files,
        'output_filenames': output_files,
    }

    logging.warning(data)

    with open(abs_job_order_fn, 'w') as f:
        yaml.dump(data, f, Dumper=yaml.SafeDumper)
    return job_order_fn
Esempio n. 19
0
    def test_fileloading(self):
        # Cannot use tempfile.NamedTemporaryFile because of Windows's file locks
        fd, fname = tempfile.mkstemp('w')
        try:
            with open(fd, 'w', encoding='utf-8') as f:
                ryaml.dump(self.fake_yaml, f)
            c = Configuration.from_filename(fname)
        finally:
            os.remove(fname)

        self.assertDictEqual(
            c._yaml,
            self.fake_yaml
        )
Esempio n. 20
0
def prepare_installation_bundle(cfg, build_dir):
    run('rm -fr {d}; mkdir {d}'.format(d=build_dir))
    env = Environment(loader=PackageLoader('bts_tools', 'templates/deploy'))

    render_template = partial(render_template_file, cfg, build_dir, env)

    # 0.1- generate the install script
    render_template('install_new_graphene_node.sh')
    render_template('install_user.sh')

    # 0.2- generate config.yaml file
    config_yaml = yaml.load(env.get_template('config.yaml').render(), Loader=yaml.RoundTripLoader)
    config_yaml.update(copy.deepcopy(cfg['config_yaml']))
    for client_name, client in config_yaml['clients'].items():
        client.pop('deploy', None)
    with open(join(build_dir, 'config.yaml'), 'w') as config_yaml_file:
        config_yaml_file.write(yaml.dump(config_yaml, indent=4, Dumper=yaml.RoundTripDumper))

    # 0.3- generate api_access.json
    cfg['witness_api_access_user'] = cfg['witness_api_access']['user']
    pw_hash, pw_salt = hash_salt_password(cfg['witness_api_access']['password'])
    cfg['witness_api_access_hash'] = pw_hash
    cfg['witness_api_access_salt'] = pw_salt

    render_template('api_access.json')
    render_template('api_access.steem.json')

    # 0.4- get authorized_keys if any
    if cfg.get('ssh_keys'):
        with open(join(build_dir, 'authorized_keys'), 'w') as key_file:
            for key in cfg['ssh_keys']:
                key_file.write('{}\n'.format(key))

    run('rm -fr {}'.format(join(build_dir, 'etc')))
Esempio n. 21
0
    def start_line(self, document):
        slicedpart = self._slice_segment(self._indices, document, include_selected=False)

        if slicedpart is None or slicedpart == {} or slicedpart == []:
            return 1
        else:
            return len(dump(slicedpart, Dumper=RoundTripDumper).rstrip().split('\n')) + 1
Esempio n. 22
0
def yaml_save(filename, data):
    """
    Save contents of an OrderedDict structure to a yaml file

    :param filename: name of the yaml file to save to
    :type filename: str
    :param data: configuration data to to save
    :type filename: str
    :type data: OrderedDict
    
    :returns: Nothing
    """

    ordered = (type(data).__name__ == 'OrderedDict')
    dict_type = 'dict'
    if ordered:
        dict_type = 'OrderedDict'
    logger.info("Saving '{}' to '{}'".format(dict_type, filename))
    if ordered:
        sdata = _ordered_dump(data, Dumper=yaml.SafeDumper, indent=4, width=768, allow_unicode=True, default_flow_style=False)
    else:
        sdata = yaml.dump(data, Dumper=yaml.SafeDumper, indent=4, width=768, allow_unicode=True, default_flow_style=False)
    sdata = _format_yaml_dump( sdata )
    with open(filename, 'w') as outfile:
        outfile.write( sdata )
Esempio n. 23
0
    def write(self):
        ''' write to file '''
        if not self.filename:
            raise YeditException('Please specify a filename.')

        if self.backup and self.file_exists():
            shutil.copy(self.filename, '{}{}'.format(self.filename, self.backup_ext))

        # Try to set format attributes if supported
        try:
            self.yaml_dict.fa.set_block_style()
        except AttributeError:
            pass

        # Try to use RoundTripDumper if supported.
        if self.content_type == 'yaml':
            try:
                Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
            except AttributeError:
                Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
        elif self.content_type == 'json':
            Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
        else:
            raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
                                 'Please specify a content_type of yaml or json.')

        return (True, self.yaml_dict)
Esempio n. 24
0
 def read(self):
     dumped = yaml.dump(self.data, default_flow_style=False, version=(1, 1))
     return [
         self.editable,
         _("# This is the data in memory, and may not actually be what's "
           "in the file.\n\n%s") % dumped
     ]
Esempio n. 25
0
def cat(config_filename):
    if not path.exists(config_filename):
        raise RuntimeError("{0} does not exist".format(config_filename))

    from ruamel.yaml import dump
    from ruamel.yaml.dumper import RoundTripDumper
    from ruamel.yaml.comments import CommentedMap
    from ruamel.yaml.comments import CommentedSeq

    database = Database(config_filename)
    
    for request in database.Request.select():
        yaml_snip = CommentedMap()
        yaml_snip['request'] = CommentedMap({
            "path": request.request_path,
            "method": request.request_method,
            "headers": {
                header.name: header.value for header in
                database.RequestHeader.filter(request=request)
            },
            "data": request.request_data,
        })
        yaml_snip['response'] = CommentedMap({
            "code": request.response_code,
            "content": request.response_content,
            "headers": {
                header.name: header.value for header in
                database.ResponseHeader.filter(request=request)
            },
        })

    print(dump([yaml_snip, ], Dumper=RoundTripDumper))
Esempio n. 26
0
def cli_uninstall_package(name, ipfs=None, save=None):
    package_dir = os.path.join(os.getcwd(), '.dapple', 'packages', name)

    if os.path.isdir(package_dir):
        shutil.rmtree(package_dir)

    if not save:
        return
    
    # Save to dappfile.
    dappfile = dapple.plugins.load('core.package_dappfile')('')
    modified = False

    if name in dappfile.get('dependencies'):
        del dappfile['dependencies'][name]
        modified = True

    if 'dependencies.%s' % name in dappfile:
        del dappfile['dependencies.%s' % name]
        modified = True

    if not modified:
        return

    with open(os.path.join(os.getcwd(), '.dapple', 'dappfile'), 'w') as f:
        f.write(yaml.dump(dappfile, Dumper=yaml.RoundTripDumper))
Esempio n. 27
0
def process():
    with open('../calliope/config/defaults.yaml', 'r') as f:
        defaults = yaml.round_trip_load(f)

    write_csv(
        './user/includes/default_essentials.csv',
        get_section(defaults['default_tech']['essentials'])
    )
    write_csv(
        './user/includes/default_constraints.csv',
        get_section(defaults['default_tech']['constraints'])
    )
    write_csv(
        './user/includes/default_costs.csv',
        get_section(defaults['default_tech']['costs']['default'])
    )

    with open('../calliope/config/model.yaml', 'r') as f:
        model = yaml.round_trip_load(f)

    write_csv(
        './user/includes/model_settings.csv',
        get_section(model['model'])
    )
    write_csv(
        './user/includes/run_settings.csv',
        get_section(model['run'])
    )

    y = yaml.YAML()

    for tech_group in model['tech_groups']:
        defaults = {
            'essentials': model['tech_groups'][tech_group].get('essentials', {}),
            'constraints': model['tech_groups'][tech_group].get('constraints', {}),
            'costs': model['tech_groups'][tech_group].get('costs', {})
        }
        with open('./user/includes/basetech_{}.yaml'.format(tech_group), 'w') as f:
            f.write(yaml.dump(defaults, Dumper=yaml.RoundTripDumper))

        required_allowed = {
            'required_constraints': y.seq(model['tech_groups'][tech_group].get('required_constraints', [])),
            'allowed_constraints': y.seq(model['tech_groups'][tech_group].get('allowed_constraints', [])),
            'allowed_costs': y.seq(model['tech_groups'][tech_group].get('allowed_costs', []))
        }
        with open('./user/includes/required_allowed_{}.yaml'.format(tech_group), 'w') as f:
            f.write(yaml.dump(required_allowed, indent=4, Dumper=yaml.RoundTripDumper))
Esempio n. 28
0
def dump(*args, **kwargs):
    blame = kwargs.pop('blame', False)

    if blame:
        return dump_annotated(*args, **kwargs)
    else:
        kwargs['Dumper'] = OrderedLineDumper
        return yaml.dump(*args, **kwargs)
Esempio n. 29
0
def export_to_yaml(statechart: Statechart) -> str:
    """
    Export given *Statechart* instance to YAML

    :param statechart:
    :return: A textual YAML representation
    """
    return yaml.dump(export_to_dict(statechart, ordered=False),
                     width=1000, default_flow_style=False, default_style='"')
Esempio n. 30
0
def save_config(config, logdir):
  """Save a new configuration by name.

  If a logging directory is specified, is will be created and the configuration
  will be stored there. Otherwise, a log message will be printed.

  Args:
    config: Configuration object.
    logdir: Location for writing summaries and checkpoints if specified.

  Returns:
    Configuration object.
  """
  message = 'Start a new run and write summaries and checkpoints to {}.'
  print(message.format(logdir))
  config_path = os.path.join(logdir, 'config.yaml')
  yaml.dump(config, config_path, default_flow_style=False)
  return config
Esempio n. 31
0
def ordered_dump(data, stream=None, Dumper=yaml.Dumper, **kwds):
    """
    dumps (OrderedDict) data in YAML format

    @param OrderedDict data: the data
    @param Stream stream: where the data in YAML is dumped
    @param Dumper Dumper: The dumper that is used as a base class
    """
    class OrderedDumper(Dumper):
        """
        A Dumper using an OrderedDict
        """
        external_ndarray_counter = 0

        def ignore_aliases(self, data):
            """
            ignore aliases and anchors
            """
            return True

    def represent_ordereddict(dumper, data):
        """
        Representer for OrderedDict
        """
        return dumper.represent_mapping(
            yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items())

    def represent_int(dumper, data):
        """
        Representer for numpy int dtypes
        """
        return dumper.represent_int(numpy.asscalar(data))

    def represent_float(dumper, data):
        """
        Representer for numpy float dtypes
        """
        return dumper.represent_float(numpy.asscalar(data))

    def represent_ndarray(dumper, data):
        """
        Representer for numpy ndarrays
        """
        try:
            filename = os.path.splitext(os.path.basename(stream.name))[0]
            configdir = os.path.dirname(stream.name)
            newpath = '{0}-{1:06}.npz'.format(
                os.path.join(configdir, filename),
                dumper.external_ndarray_counter)
            numpy.savez_compressed(newpath, array=data)
            node = dumper.represent_str(newpath)
            node.tag = '!extndarray'
            dumper.external_ndarray_counter += 1
        except:
            with BytesIO() as f:
                numpy.savez_compressed(f, array=data)
                compressed_string = f.getvalue()
            node = dumper.represent_binary(compressed_string)
            node.tag = '!ndarray'
        return node

    # add representers
    OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
    OrderedDumper.add_representer(numpy.uint8, represent_int)
    OrderedDumper.add_representer(numpy.uint16, represent_int)
    OrderedDumper.add_representer(numpy.uint32, represent_int)
    OrderedDumper.add_representer(numpy.uint64, represent_int)
    OrderedDumper.add_representer(numpy.int8, represent_int)
    OrderedDumper.add_representer(numpy.int16, represent_int)
    OrderedDumper.add_representer(numpy.int32, represent_int)
    OrderedDumper.add_representer(numpy.int64, represent_int)
    OrderedDumper.add_representer(numpy.float16, represent_float)
    OrderedDumper.add_representer(numpy.float32, represent_float)
    OrderedDumper.add_representer(numpy.float64, represent_float)
    # OrderedDumper.add_representer(numpy.float128, represent_float)
    OrderedDumper.add_representer(numpy.ndarray, represent_ndarray)

    # dump data
    return yaml.dump(data, stream, OrderedDumper, **kwds)
Esempio n. 32
0
from ruamel import yaml

yaml.dump(data, default_flow_style=False, version=(1, 2))
Esempio n. 33
0
        if name == from_branch:
            n = copy.deepcopy(job)
            n['branches'] = [to_branch]
            name = n['name']
            if "master" in name:
                n['name'] = name.replace("master", to_branch)
            else:
                n['name'] = name + "-" + to_branch
            return [n]
    return []


filename = sys.argv[1]
from_branch = sys.argv[2]
to_branch = sys.argv[3]
to_version = sys.argv[4]
y = yaml.load(open(filename), yaml.RoundTripLoader)
count = 0
for t in ['postsubmits', 'presubmits']:
    if t in y:
        for repo in y[t]:
            newjobs = []
            for job in y[t][repo]:
                newjobs += duplicate_job(job, from_branch, to_branch)
            count += len(newjobs)
            y[t][repo] = newjobs
if count > 0:
    out = yaml.dump(y, default_flow_style=False, Dumper=yaml.RoundTripDumper)
    out = out.replace(from_branch + '.yaml', to_branch + '.yaml')
    print out
def main() -> int:
    args = parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)

    log.info("Generating adapter config from soaconfigs.")
    config = create_prometheus_adapter_config(paasta_cluster=args.cluster,
                                              soa_dir=args.soa_dir)
    log.info("Generated adapter config from soaconfigs.")
    if args.dry_run:
        log.info(
            "Generated the following config:\n%s",
            yaml.dump(config,
                      default_flow_style=False,
                      explicit_start=True,
                      width=sys.maxsize),
        )
        return 0  # everything after this point requires creds/updates state
    else:
        log.debug(
            "Generated the following config:\n%s",
            yaml.dump(config,
                      default_flow_style=False,
                      explicit_start=True,
                      width=sys.maxsize),
        )

    if not config["rules"]:
        log.error("Got empty rule configuration - refusing to continue.")
        return 0

    kube_client = KubeClient()
    if not args.dry_run:
        ensure_namespace(kube_client, namespace="paasta")
        ensure_namespace(kube_client, namespace="custom-metrics")

    existing_config = get_prometheus_adapter_configmap(kube_client=kube_client)
    if existing_config and existing_config != config:
        log.info("Existing config differs from soaconfigs - updating.")
        log.debug("Existing data: %s", existing_config)
        log.debug("Desired data: %s", config)
        update_prometheus_adapter_configmap(kube_client=kube_client,
                                            config=config)
        log.info("Updated adapter config.")
    elif existing_config:
        log.info("Existing config matches soaconfigs - exiting.")
        return 0
    else:
        log.info("No existing config - creating.")
        create_prometheus_adapter_configmap(kube_client=kube_client,
                                            config=config)
        log.info("Created adapter config.")

    # the prometheus adapter doesn't currently have a good way to reload on config changes
    # so we do the next best thing: restart the pod so that it picks up the new config.
    # see: https://github.com/DirectXMan12/k8s-prometheus-adapter/issues/104
    restart_prometheus_adapter(kube_client=kube_client)

    return 0
Esempio n. 35
0
# -*- coding: utf-8 -*-
# Python script to Read file from JSON and convert to YAML
# Author - HoneyMoose (https://www.ossez.com)

import json

import ruamel.yaml as yaml

json_filename = 'resources/honeymoose_test.json'
yaml_filename = 'resources/honeymoose_test.yaml'

# Read and process JSON
with open(json_filename) as json_file:
    data = json.load(json_file)
    print(type(data))

    for data_dict in data:
        label_id = data_dict['label_id']
        print(str(label_id))

print(json_file.closed)

# Using with write to YAML
with open(yaml_filename, 'w') as yaml_file:
    yaml.dump(data, yaml_file, allow_unicode=True)
Esempio n. 36
0
def main():
    args = parser().parse_args()
    cfg = YAML().load(
        open(os.environ["FLIGHTMARE_PATH"] + "/flightlib/configs/vec_env.yaml",
             'r'))
    if not args.train:
        cfg["env"]["num_envs"] = 1
        cfg["env"]["num_threads"] = 1

    if args.render:
        cfg["env"]["render"] = "yes"
    else:
        cfg["env"]["render"] = "no"

    env = wrapper.FlightEnvVec(
        QuadrotorEnv_v1(dump(cfg, Dumper=RoundTripDumper), False))

    # set random seed
    configure_random_seed(args.seed, env=env)

    #
    if args.train:
        # save the configuration and other files
        rsg_root = os.path.dirname(os.path.abspath(__file__))
        log_dir = rsg_root + '/saved'
        saver = U.ConfigurationSaver(log_dir=log_dir)
        model = PPO2(
            tensorboard_log=saver.data_dir,
            policy=MlpPolicy,  # check activation function
            policy_kwargs=dict(net_arch=[dict(pi=[128, 128], vf=[128, 128])],
                               act_fun=tf.nn.relu),
            env=env,
            lam=0.95,
            gamma=0.99,  # lower 0.9 ~ 0.99
            # n_steps=math.floor(cfg['env']['max_time'] / cfg['env']['ctl_dt']),
            n_steps=250,
            ent_coef=0.00,
            learning_rate=3e-4,
            vf_coef=0.5,
            max_grad_norm=0.5,
            nminibatches=1,
            noptepochs=10,
            cliprange=0.2,
            verbose=1,
        )

        # tensorboard
        # Make sure that your chrome browser is already on.
        # TensorboardLauncher(saver.data_dir + '/PPO2_1')

        # PPO run
        # Originally the total timestep is 5 x 10^8
        # 10 zeros for nupdates to be 4000
        # 1000000000 is 2000 iterations and so
        # 2000000000 is 4000 iterations.
        logger.configure(folder=saver.data_dir)
        model.learn(total_timesteps=int(25000000),
                    log_dir=saver.data_dir,
                    logger=logger)
        model.save(saver.data_dir)

    # # Testing mode with a trained weight
    else:
        model = PPO2.load(args.weight)
        test_model(env, model, render=args.render)
    def workload_create(self, vimid, workloadid, request):
        '''
        Deploy workload to target k8s via multicloud-k8s
        :param vimid:
        :param workloadid:
        :param request
        '''
        # resp_template = {
        #     "template_type": "HEAT",
        #     "workload_id": workloadid,
        #     "workload_status": "GET_FAILED",
        #     "workload_status_reason": "Exception occurs"
        # }
        # status_code = status.HTTP_500_INTERNAL_SERVER_ERROR

        # viminfo = VimDriverUtils.get_vim_info(vimid)
        workload_query = VimDriverUtils.get_query_part(request)
        workload_data = request.data

        # vf_module_model_customization_id = data.get("vf-module-model-customization-id", None)
        # vf_module_id = data.get("vf-module-id", "")
        user_directive = workload_data.get("user_directives", {})
        # oof_directive = data.get("oof_directives", {})
        # sdnc_directive = data.get("sdnc_directives", {})
        # template_type = data.get("template_type")
        # template_data = data.get("template_data", {})

        # 1, create profile if not exists
        # manifest.yaml content
        manifest_yaml_json = {
            "version": "v1",
            "type": {
                "values": "override_values.yaml"
            }
        }

        # override_values.yaml content
        override_values_yaml_json = ""

        # extract rb and profile info from user_directive
        rbname = None
        rbversion = None
        profilename = None

        for attr in user_directive.get("attributes", []):
            aname = attr.get("attribute_name", None)
            avalue = attr.get("attribute_value", None)
            if aname == "override_values_yaml_base64":
                override_values_yaml_json = yaml.load(base64.b64decode(avalue),
                                                      Loader=yaml.Loader)
            elif aname == "definition-name":
                rbname = avalue
            elif aname == "definition-version":
                rbversion = avalue
            elif aname == "profile-name":
                profilename = avalue

        multicloudK8sUrl = "%s://%s:%s/api/multicloud-k8s/v1" % (
            settings.MSB_SERVICE_PROTOCOL, settings.MSB_SERVICE_ADDR,
            settings.MSB_SERVICE_PORT)
        if rbname and rbversion and profilename and override_values_yaml_json:
            # package them into tarball
            basedir = "/tmp/%s_%s_%s/" % (rbname, rbversion, profilename)
            manifest_yaml_filename = "manifest.yaml"
            override_values_yaml_filename = "override_values.yaml"
            profile_filename = "profile.tar.gz"
            if not os.path.isdir(basedir):
                os.mkdir(basedir)
            logger.debug("k8s profile temp dir for %s,%s,%s is %s" %
                         (rbname, rbversion, profilename, basedir))
            with open(basedir + manifest_yaml_filename,
                      "w") as f_manifest_yaml:
                yaml.dump(manifest_yaml_json,
                          f_manifest_yaml,
                          Dumper=yaml.RoundTripDumper)
            with open(basedir + override_values_yaml_filename,
                      "w") as f_override_values_yaml:
                yaml.dump(override_values_yaml_json,
                          f_override_values_yaml,
                          Dumper=yaml.RoundTripDumper)

            tar = tarfile.open(basedir + profile_filename, "w:gz")
            # tar.add(basedir+manifest_yaml_filename, arcname=manifest_yaml_filename,filter=resettarfile)
            tar.add(basedir + manifest_yaml_filename,
                    arcname=manifest_yaml_filename)
            tar.add(basedir + override_values_yaml_filename,
                    arcname=override_values_yaml_filename)
            tar.close()

            # create profile and upload content
            create_rbprofile_json = {
                "rb-name": rbname,
                "rb-version": rbversion,
                "profile-name": profilename,
                "release-name": "r1",
                "namespace": "testnamespace1",
                "kubernetes-version": "1.16.2"
            }

            profileUrl = multicloudK8sUrl + "/v1/rb/definition/%s/%s/profile" % (
                rbname, rbversion)

            #data = open('create_rbprofile.json')
            response = requests.post(profileUrl,
                                     data=json.dumps(create_rbprofile_json),
                                     verify=False)
            logger.debug("create profile, returns: %s,%s" %
                         (response.content, response.status_code))

            profileContentUrl = profileUrl + "/%s/content" % (profilename)
            #profileContent = open(basedir+profile_filename, 'rb').read()
            with open(basedir + profile_filename, "rb") as profileContent:
                response = requests.post(profileContentUrl,
                                         data=profileContent.read(),
                                         verify=False)
                logger.debug("upload profile content, returns: %s,%s" %
                             (response.content, response.status_code))

        # 2.forward infra_workload API requests with queries
        cloud_owner, cloud_region_id = extsys.decode_vim_id(vimid)
        infraUrl = multicloudK8sUrl + "/%s/%s/infra_workload" % (
            cloud_owner, cloud_region_id)
        if workloadid:
            infraUrl += ("/%s" % workloadid)
        if workload_query:
            infraUrl += ("?%s" % workload_query)

        # should we forward headers ? TBD
        logger.debug("request with url,content: %s,%s" %
                     (infraUrl, workload_data))
        resp = requests.post(infraUrl,
                             data=json.dumps(workload_data),
                             verify=False)
        # resp_template["workload_status_reason"] = resp.content
        logger.debug("response status,content: %s,%s" %
                     (resp.status_code, resp.content))
        return Response(data=json.loads(resp.content), status=resp.status_code)
Esempio n. 38
0
def create_spec(client):

    spec = {}
    Obj = GCP(client.connection)

    # Account Configuration
    projects = client.project.get_name_uuid_map()
    project_list = list(projects.keys())

    if not project_list:
        click.echo(highlight_text("No projects found!!!"))
        click.echo(highlight_text("Please add first"))
        return

    click.echo("\nChoose from given projects:")
    for ind, name in enumerate(project_list):
        click.echo("\t {}. {}".format(str(ind + 1), highlight_text(name)))

    project_id = ""
    while True:
        ind = click.prompt("\nEnter the index of project", default=1)
        if (ind > len(project_list)) or (ind <= 0):
            click.echo("Invalid index !!! ")

        else:
            project_id = projects[project_list[ind - 1]]
            click.echo("{} selected".format(
                highlight_text(project_list[ind - 1])))
            break

    res, err = client.project.read(project_id)
    if err:
        raise Exception("[{}] - {}".format(err["code"], err["error"]))

    project = res.json()
    accounts = project["status"]["project_status"]["resources"][
        "account_reference_list"]

    reg_accounts = []
    for account in accounts:
        reg_accounts.append(account["uuid"])

    payload = {"filter": "type==gcp"}
    res, err = client.account.list(payload)
    if err:
        raise Exception("[{}] - {}".format(err["code"], err["error"]))

    res = res.json()
    gcp_accounts = {}

    for entity in res["entities"]:
        entity_name = entity["metadata"]["name"]
        entity_id = entity["metadata"]["uuid"]
        if entity_id in reg_accounts:
            gcp_accounts[entity_name] = entity_id

    if not gcp_accounts:
        click.echo(
            highlight_text(
                "No gcp account found registered in this project !!!"))
        click.echo("Please add one !!!")
        return

    accounts = list(gcp_accounts.keys())
    spec["resources"] = {}

    click.echo("\nChoose from given GCP accounts")
    for ind, name in enumerate(accounts):
        click.echo("\t {}. {}".format(str(ind + 1), highlight_text(name)))

    while True:
        res = click.prompt("\nEnter the index of account to be used",
                           default=1)
        if (res > len(accounts)) or (res <= 0):
            click.echo("Invalid index !!! ")

        else:
            account_name = accounts[res - 1]
            account_id = gcp_accounts[account_name]  # TO BE USED

            spec["resources"]["account_uuid"] = account_id
            click.echo("{} selected".format(highlight_text(account_name)))
            break

    click.echo("\nChoose from given Operating System types:")
    os_types = gcp.OPERATING_SYSTEMS

    for ind, name in enumerate(os_types):
        click.echo("\t {}. {}".format(str(ind + 1), highlight_text(name)))

    while True:
        ind = click.prompt("\nEnter the index of operating system", default=1)
        if (ind > len(os_types)) or (ind <= 0):
            click.echo("Invalid index !!! ")

        else:
            vm_os = os_types[ind - 1]
            click.echo("{} selected".format(highlight_text(vm_os)))
            break

    # VM Configuration
    vm_name = "vm-@@{calm_unique_hash}@@-@@{calm_array_index}@@"
    spec["resources"]["name"] = click.prompt("\nEnter instance name",
                                             default=vm_name)

    zone_names = Obj.zones(account_id)
    click.echo("\nChoose from given zones")
    for ind, name in enumerate(zone_names):
        click.echo("\t {}. {}".format(str(ind + 1), highlight_text(name)))

    while True:
        ind = click.prompt("\nEnter the index of zone", default=1)
        if ind > len(zone_names):
            click.echo("Invalid index !!! ")

        else:
            zone = zone_names[ind - 1]  # TO BE USED
            spec["resources"]["zone"] = zone
            click.echo("{} selected".format(highlight_text(zone)))
            break

    machine_type_map = Obj.machine_types(account_id, zone)
    entity_names = list(machine_type_map.keys())
    click.echo("\nChoose from given machine types")
    for ind, name in enumerate(entity_names):
        click.echo("\t {}. {}".format(str(ind + 1), highlight_text(name)))

    while True:
        ind = click.prompt("\nEnter the index of machine type", default=1)
        if ind > len(entity_names):
            click.echo("Invalid index !!! ")

        else:
            machine_type = entity_names[ind - 1]
            click.echo("{} selected".format(highlight_text(machine_type)))
            spec["resources"]["machineType"] = machine_type_map[machine_type]
            break

    # Disk Details
    spec["resources"]["disks"] = get_disks(Obj, account_id, zone)

    # Blank Disk details
    spec["resources"]["blankDisks"] = get_blank_disks(zone)

    # Networks
    spec["resources"]["networkInterfaces"] = get_networks(
        Obj, account_id, zone)

    # SSH keys
    spec["resources"]["sshKeys"] = get_ssh_keys()
    metadata = {}
    metadata["items"] = []
    block_project_ssh_keys = click.prompt(
        "\n{}(y/n)".format(
            highlight_text("Want to block project-wide SSH keys")),
        default="n",
    )

    if block_project_ssh_keys[0] == "y":
        metadata["items"].append({
            "value": "true",
            "key": "block-project-ssh-keys"
        })

    # Management
    click.echo("\n\t\t", nl=False)
    click.secho("Management (Optional)", bold=True, underline=True)

    # Guest Customization
    guest_customization = {}
    choice = click.prompt(
        "\n{}(y/n)".format(highlight_text("Want to add Customization script")),
        default="n",
    )

    if choice[0] == "y":
        if vm_os == "Linux":
            startup_script = click.prompt("\nEnter Startup script", default="")
            guest_customization = {"startupScript": startup_script}

        else:
            sysprep = click.prompt("\nEnter Sysprep powershell script",
                                   default="")
            guest_customization = {"sysprep": sysprep}

    spec["resources"]["guestCustomization"] = guest_customization

    # METADATA TAGS
    choice = click.prompt(
        "\n{}(y/n)".format(
            highlight_text("Want to add key value pairs to metadata")),
        default="n",
    )
    while choice[0] == "y":
        Key = click.prompt("\n\tKey", default="")
        Value = click.prompt("\tValue", default="")

        metadata["items"].append({"key": Key, "value": Value})
        choice = choice = click.prompt(
            "\n{}(y/n)".format(
                highlight_text("Want to add more key value pairs")),
            default="n",
        )

    spec["resources"]["metadata"] = metadata

    # NETWORK TAGS
    network_tags = []
    choice = click.prompt("\n{}(y/n)".format(
        highlight_text("Want to add network tags")),
                          default="n")
    if choice[0] == "y":
        tag_list = Obj.network_tags(account_id)

    while choice[0] == "y":
        click.echo("\nChoose from given network tags")
        for ind, name in enumerate(tag_list):
            click.echo("\t {}. {}".format(str(ind + 1), highlight_text(name)))

        while True:
            ind = click.prompt("\nEnter the index of network tag", default=1)
            if ind > len(tag_list):
                click.echo("Invalid index !!! ")

            else:
                network_tag = tag_list[ind - 1]
                tag_list.pop(ind - 1)
                network_tags.append(network_tag)
                click.echo("{} selected".format(highlight_text(network_tag)))
                break

        choice = choice = click.prompt(
            "\n{}(y/n)".format(
                highlight_text("Want to add more network tags")),
            default="n",
        )

    spec["resources"]["tags"] = {}
    if network_tags:
        spec["resources"]["tags"] = {"items": network_tags}

    # LABELS
    labels = []
    choice = click.prompt("\n{}(y/n)".format(
        highlight_text("Want to add labels")),
                          default="n")
    while choice[0] == "y":
        Key = click.prompt("\n\tKey", default="")
        Value = click.prompt("\n\tValue", default="")

        labels.append({"key": Key, "value": Value})
        choice = choice = click.prompt("\n{}(y/n)".format(
            highlight_text("Want to add more labels")),
                                       default="n")

    spec["resources"]["labels"] = labels

    # API Access Configuration
    click.echo("\n\t\t", nl=False)
    click.secho("API Access", bold=True, underline=True)

    service_account_email = click.prompt("\nEnter the Service Account Email")
    click.echo("\nChoose from given Scopes:")
    scopes = list(gcp.SCOPES.keys())

    for ind, name in enumerate(scopes):
        click.echo("\t {}. {}".format(str(ind + 1), highlight_text(name)))

    while True:
        ind = click.prompt("\nEnter the index of scope", default=1)
        if (ind > len(os_types)) or (ind <= 0):
            click.echo("Invalid index !!! ")

        else:
            scope = scopes[ind - 1]
            click.echo("{} selected".format(highlight_text(scope)))
            break

    service_accounts = []
    # Right now only one account is possible through UI
    service_accounts.append({
        "scopes": gcp.SCOPES[scope],
        "email": service_account_email
    })

    spec["resources"]["serviceAccounts"] = service_accounts

    GcpVmProvider.validate_spec(spec)
    click.secho("\nCreate spec for your GCP VM:\n", underline=True)

    # As it contains ssh keys, So use width=1000 for yaml.dump
    click.echo(
        highlight_text(yaml.dump(spec, default_flow_style=False, width=1000)))
Esempio n. 39
0
import os
import numpy as np

# configuration
task_path = os.path.dirname(os.path.realpath(__file__))
rsc_path = task_path + "/../rsc"
cfg_path = task_path + '/../cfg.yaml'
cfg = YAML().load(open(cfg_path, 'r'))

print("Loaded cfg from {}\n".format(cfg_path))

cfg['environment']['num_envs'] = 1
cfg['environment']['render'] = True
env = vecEnv(
    cart_pole_example_env(rsc_path,
                          dump(cfg['environment'], Dumper=RoundTripDumper)))

env.wrapper.showWindow()

ob = env.reset()

# env.start_recording_video("test/env_test.mp4")

for i in range(1000):
    if i % 1 == 0:
        env.reset()

    act = np.ndarray(shape=(1, 1), dtype=np.float32)
    act[:] = 0
    ob, rew, done, newInfo = env.step(act, visualize=True)
    print(ob[0, 1])
Esempio n. 40
0
def write_testgrid_config_file(output_file, testgrid_config):
    """Writes the TestGrid test group configurations into output_file."""
    with open(output_file, 'w') as fp:
        fp.write('# ' + COMMENT + '\n\n')
        yaml.dump(
            testgrid_config, fp, Dumper=yaml.RoundTripDumper, width=float("inf"))
Esempio n. 41
0
def write_prow_configs_file(output_file, job_defs):
    """Writes the Prow configurations into output_file."""
    with open(output_file, 'w') as fp:
        yaml.dump(
            job_defs, fp, Dumper=yaml.RoundTripDumper, width=float("inf"))
#         print("Look here we have our programmer")
#         print(type(load_pr), load_pr)
#         print(load_pr.town)
#     except TypeError as e:
#         print(e)
with open("data2.txt", "wb") as file:
    pickle.dump(aa, file)

# Lets load it
with open("data2.txt", "rb") as file:
    restore_obj = pickle.load(file)
    print(type(restore_obj), restore_obj)

yaml = ruamel.yaml.YAML()
yaml.register_class(Garage)
yaml.dump(aa, sys.stdout)
# with open("data2.yaml", "w") as file:
#     yaml.dump(aa, file)
#     print("Success!")
with open("data2.yaml", "r") as file:
    config = yaml.load(file)
    print(type(config), config)
# aaa = getattr(aa, "cars", "Some")
# bb = getattr(aaa, "price", "None")
# print(aaa)
# bbb = sum(map(lambda x: x.get('price'), dict(aaa)))
# print(bbb)
# print(Car.__dict__)
# print(aa.hit_hat())
# a2a = (Garage(a, cars, c))
# a2a = (Car(c, a2, b, d, e))
Esempio n. 43
0
 def lines_after(self, document, how_many):
     return "\n".join(
         dump(document, Dumper=RoundTripDumper).split('\n')
         [self.end_line(document):self.end_line(document) + how_many])
Esempio n. 44
0
def dump_yml_content(file_path, content):
    '''Dump yaml file content'''
    with open(file_path, 'w') as file:
        file.write(yaml.dump(content, default_flow_style=False))
Esempio n. 45
0
write_cl['changes'] = []

for k, v in cl_list:
    if k in tags['tags'].keys(
    ):  # Check to see if there are any valid tags, as determined by tags.yml
        v = v.rstrip()
        if v not in list(
                tags['defaults'].values()
        ):  # Check to see if the tags are associated with something that isn't the default text
            write_cl['changes'].append({tags['tags'][k]: v})

if write_cl['changes']:
    with io.StringIO() as cl_contents:
        yaml = yaml.YAML()
        yaml.indent(sequence=4, offset=2)
        yaml.dump(write_cl, cl_contents)
        cl_contents.seek(0)

        #Push the newly generated changelog to the master branch so that it can be compiled
        repo.create_file(
            f"html/changelogs/AutoChangeLog-pr-{pr_number}.yml",
            f"Automatic changelog generation for PR #{pr_number} [ci skip]",
            content=f'{cl_contents.read()}',
            branch='master',
            committer=InputGitAuthor(git_name, git_email))
    print("Done!")
else:
    print("No CL changes detected!")
    exit(
        0
    )  # Change to a '1' if you want the action to count lacking CL changes as a failure
Esempio n. 46
0
def dump(obj, default_flow_style=False, stream=None):
    return yaml.dump(obj,
                     default_flow_style=default_flow_style,
                     width=maxint,
                     Dumper=SafeDumper,
                     stream=stream)
Esempio n. 47
0
def dump_config(config_path, config, dumper=yaml.RoundTripDumper):
    with codecs.open(config_path, mode='w', encoding='utf-8') as stream:
        stream.write(to_utf8(yaml.dump(config, Dumper=dumper)))
Esempio n. 48
0
def convert_swg(k, v, annotations):
    r = {}
    r["name"] = k
    r["type"] = "record"
    r["doc"] = v.get("description", "")
    r["fields"] = {
        p: convert_p(p, v, prop, annotations)
        for p, prop in v["properties"].iteritems()
    }
    if k in annotations["_documentRoot"]:
        r["documentRoot"] = True
    return r


def swg2salad(swg, annotations):
    return {
        "$namespaces":
        annotations.get("$namespaces", {}),
        "$graph": [
            convert_swg(k, v, annotations)
            for k, v in swg['definitions'].iteritems()
        ]
    }


if __name__ == "__main__":
    with open(sys.argv[1]) as f:
        with open(sys.argv[2]) as f2:
            annotations = yaml.load(f2)
        print yaml.dump(swg2salad(yaml.load(f), annotations), indent=4)
Esempio n. 49
0
          finalYaml[pk] = mergeYaml(finalYaml[pk], pv)
        else:
          finalYaml[pk] = pv
    else:
      finalYaml = defaultYaml
    return finalYaml

  with open(r'%s' % pathTempDockerCompose) as fileTempDockerCompose:
    yamlTempDockerCompose = yaml.load(fileTempDockerCompose)

  with open(r'%s' % pathOverride) as fileOverride:
    yamlOverride = yaml.load(fileOverride)

  mergedYaml = mergeYaml(yamlOverride, yamlTempDockerCompose)

  with open(r'%s' % pathOutput, 'w') as outputFile:
    yaml.dump(mergedYaml, outputFile, explicit_start=True, default_style='"')

  sys.exit(0)
except SystemExit:
  sys.exit(0)
except:
  print("Something went wrong: ")
  print(sys.exc_info())
  print(traceback.print_exc())
  print("")
  print("")
  print("PyYaml Version: ", yaml.__version__)
  print("")
  sys.exit(2)
Esempio n. 50
0
            walk(v)  # maybe indent again?


with open('swagger_full.yaml') as input_file:
    #dataMap = yaml.safe_load(input_file)

    yaml = ruamel.yaml.YAML()
    yaml.indent(sequence=4, offset=2)
    dataMap = yaml.load(input_file)

    remove_invalid_doc_fields(dataMap)
    fix_incorrect_enum_type(dataMap)
    fix_incorrect_unsigned_type(dataMap)
    fix_incorrect_type_for_ignore_warnings(dataMap)
    fix_incorrect_operationIds_for_snaplock(dataMap)
    fix_incorrect_default_value_for_include_extensions(dataMap)
    fix_incorrect_min_max_for_name_mapping(dataMap)
    fix_duplicate_parameter_in_metrocluster_modify(dataMap)
    fix_incorrect_body_and_form_data(dataMap)
    fix_incorrect_string_value_for_number(dataMap)
    remove_extra_fields_for_snmp_user_definition(dataMap)
    add_unique_types_for_properties(dataMap)
    make_volume_nas_path_nillable(dataMap)
    fix_qtree_name_empty(dataMap)

    walk(dataMap)

    with open('swagger_full_converted.yaml', 'w') as output_file:
        yaml.dump(dataMap, output_file)
Esempio n. 51
0
    data = yaml.load(file_data)
    return data


current_path = os.path.abspath(".")

yamls = [x for x in os.listdir() if x.startswith('x') and x.endswith('.yaml')]

dic_a = get_yaml_data(yamls[0])

for yaml_path in yamls[1:]:
    dic_b = get_yaml_data('xx_mapproxy.yaml')

    for key in ['caches', 'globals', 'grids', 'services', 'sources']:
        print(key)
        if dic_b.get(key):
            pass
        else:
            continue
        for i in dic_b[key]:
            # print i,data1['resources'][i]
            dic_a[key].update({i: dic_b[key][i]})

    for key in ['layers']:
        for val in dic_b[key]:
            dic_a[key].append(val)

with open('mapproxy.yaml', 'w') as fo:
    # fo.write(yaml.dump(new_dic))
    yaml.dump(dic_a, fo)
Esempio n. 52
0
                    default='')
args = parser.parse_args()
mode = args.mode

cfg_abs_path = parser.parse_args().cfg
cfg = YAML().load(open(cfg_abs_path, 'r'))

# save the configuration and other files
rsg_root = os.path.dirname(os.path.abspath(__file__)) + '/../'
log_dir = rsg_root + '/data'

# create environment from the configuration file
if args.mode == "test":  # for test mode, force # of env to 1
    cfg['environment']['num_envs'] = 1
env = Environment(
    RaisimGymEnv(__RSCDIR__, dump(cfg['environment'], Dumper=RoundTripDumper)))

# Get algorithm
if mode == 'train':
    saver = ConfigurationSaver(
        log_dir=log_dir + '/ANYmal_blind_locomotion',
        save_items=[
            rsg_root + 'raisim_gym/env/env/ANYmal/Environment.hpp',
            cfg_abs_path
        ])
    model = PPO2(
        tensorboard_log=saver.data_dir,
        policy=MlpPolicy,
        policy_kwargs=dict(net_arch=[dict(pi=[128, 128], vf=[128, 128])]),
        env=env,
        gamma=0.998,
Esempio n. 53
0
def start_instance():
    """ 
    Set up how many workers to use
    """

    global ansible
    workers_list = []
    master = {}

    # Modifies the Heat templates with number of workers and start the stack
    heat_template = 'Heat.yml'
    workers = request.args.get('workers')
    with open(heat_template) as f:
        list_doc = yaml.safe_load(f)
        parameters = list_doc['parameters']
        node_count = parameters['node_count']
        list_doc['parameters']['node_count']['default'] = int(workers)

        with open(heat_template, 'w') as f:
            yaml.dump(list_doc, f, default_flow_style=False)

        # Create stack
        cmd = ["openstack", "stack", "create",
               "team6_api", "-f", "yaml", "-t", "Heat.yml"]

        process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
        output, error = process.communicate()

    # Remove all IP addresses in  ~/.ssh/known_hosts
    cmd = ["truncate", "-s", "0", "../.ssh/known_hosts"]
    process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
    output, error = process.communicate()

    # Find instances for NOVA
    time.sleep(180)
    relevant_instances = nova.servers.list(search_opts={"name": prefix})

    global NumberWorkers
    workerid = 1

    # Finds the relevant instances, aka workers
    for instance in relevant_instances:
        if (instance.status != "ACTIVE"):
            # if the instance not active, wait 5 seconds
            time.sleep(5)
        try:
            ip = instance.networks[private_net][0]
            name = instance.name
            status = instance.status
            print("Name: ", name)
            print("IP: ", ip)

            if worker_name in name:
                print("Worker " + str(name) + " has IP " + str(ip))
                workername = "sparkworker" + str(workerid)
                print("workername is: " + str(workername))
                workers_list.append({"name": workername, "ip": ip})
                print(workers_list)
                workerid =  workerid + 1
                global NumberWorkers
                NumberWorkers = NumberWorkers + 1
                #id_nr = int(name.strip(worker_name))
                #if id_nr > 0:
                #    workers_list.append({"name": name.strip(prefix), "ip": ip})
            else:
                master = {"name": name.strip(prefix), "ip": ip}
                print("Master " + str(name) + " has IP " + str(ip))
            print("\n")
        except:
            pass

    print(workers_list)
    print(master)
    print("")
    print("Got Instances")
    print("")

    print("Total number of workers = " + str(NumberWorkers))

    # Write to host files
    time.sleep(60)
    write_to_ansible_host(ansible, master, workers_list)
    print("Finished setting up the hosts files for Ansible\n")

    print("Next step is to use Ansible on our new nodes")
    time.sleep(60)
    run_ansible()
    print("=================")
    print("Finished running Ansible on the nodes")

    return "Finished setup process\n"
Esempio n. 54
0
def write_config(conf: dict, filename):
    with open(filename, 'w') as fh:
        yaml = ruamel.yaml.YAML(typ='safe')
        yaml.default_flow_style = False
        yaml.dump(conf, fh)
Esempio n. 55
0
 def end_line(self, document):
     slicedpart = self._slice_segment(self._indices,
                                      document,
                                      include_selected=True)
     return len(
         dump(slicedpart, Dumper=RoundTripDumper).rstrip().split("\n"))
Esempio n. 56
0
                               'self'         :1/4,
                               'maximum'      :1,
                               'factor'       :'{}/2',
                               'utilization'   :1/6,
                               'moderate'     :False,
                               'heat_capacity':30,
                               'fluid_eff'    :1/6,
                               'name'         :'Na',
                               'name_col'     :'#0000ff'}}

if __name__ == "__main__":
    from ruamel import yaml
    Data = {
        'version': version,
        'font_normal': font_normal,
        'font_small': font_small,
        'font_button': font_button,
        'font_namesC': font_namesC,
        'font_namesR': font_namesR,
        'font_heat': font_heat,
        'infomation': infomation,
        'all_rods': all_rods,
        'all_coolant': all_coolant
    }
    file = open('data.cfg', 'w', encoding='utf-8')
    yaml.dump(Data,
              file,
              default_flow_style=False,
              Dumper=yaml.RoundTripDumper,
              allow_unicode=True)
    file.close()
Esempio n. 57
0
 def lines(self, document):
     return "\n".join(
         dump(
             document,
             Dumper=RoundTripDumper).split("\n")[self.start_line(document) -
                                                 1:self.end_line(document)])
Esempio n. 58
0
 def lines_before(self, document, how_many):
     return "\n".join(
         dump(document, Dumper=RoundTripDumper).split("\n")
         [self.start_line(document) - 1 -
          how_many:self.start_line(document) - 1])
Esempio n. 59
0
 def __unicode__(self):
     return "\n".join([
         '---',
         yaml.dump(self.frontmatter, Dumper=yaml.RoundTripDumper).strip(),
         '---', ''
     ])
Esempio n. 60
0
    return zooms


if __name__ == "__main__":
    # download_slideslive()
    download_workshops()
    # download_zooms()

    # load_csv()
    data = build_workshops_basics()
    slideslive = load_slideslive()
    generate_workshop_papers(slideslive)
    talks = add_invited_talks(slideslive)

    fix_talks = slideslive[[is_not_paper(r) for _, r in slideslive.iterrows()]]
    fix_talks.to_csv(
        "yamls/fix_talks.csv",
        index=False,
        columns=["Organizer track name", "Unique ID", "Title", "Speakers"],
    )

    for ws in data:
        uid = ws["UID"]
        ws["prerecorded_talks"] = talks[uid]

    yaml.scalarstring.walk_tree(data)

    with open(PATH_YAMLS / "workshops.yml", "w") as f:
        yaml.dump(data, f, Dumper=ruamel.yaml.RoundTripDumper)