コード例 #1
0
def update_ansible_collection_requirements(filename=''):
    clone_root_path = tempfile.mkdtemp()
    yaml = YAML()  # use ruamel.yaml to keep comments
    with open(filename, "r") as arryml:
        yaml_data = arryml.read()
    tag_refs = 'refs/tags'.encode()
    all_requirements = yaml.load(_update_head_date(yaml_data))
    all_collections = all_requirements.get('collections')

    for collection in all_collections:
        collection_type = collection.get('type')
        if collection_type == 'git' and collection["version"] != 'master':
            colection_repo = clone_role(collection["name"], clone_root_path)
            collection_tags = colection_repo.refs.as_dict(tag_refs)
            collection_tags_list = [
                key.decode() for key in collection_tags.keys()
            ]
            collection_versions = list(map(version.parse,
                                           collection_tags_list))
            collection['version'] = str(max(collection_versions))

    all_requirements['collections'] = all_collections
    print("Overwriting ansible-collection-requirements")
    with open(filename, "w") as arryml:
        yaml = YAML()  # use ruamel.yaml to keep comments that could appear
        yaml.explicit_start = True
        yaml.dump(all_requirements, arryml)
        yaml.explicit_start = False
コード例 #2
0
def submit_job():
    job_args = copy.deepcopy(request.get_json())
    logger.info("Received job to submit: {}".format(job_args["job_uid"]))
    job_uid = job_args["job_uid"]

    foreground = job_args["foreground"]
    logger.info(f"Foreground: {foreground}")
    provider = job_args["provider"]
    monkeyfs_path = get_local_filesystem_for_provider(provider)
    job_folder_path = os.path.join(MONKEYFS_LOCAL_PATH, "jobs", job_uid)
    provider_job_folder_path = os.path.join(monkeyfs_path, "jobs", job_uid)

    with open(os.path.join(job_folder_path, "job.yaml"), "w") as f:
        y = YAML()
        y.explicit_start = True
        y.default_flow_style = False
        y.dump(job_args, f)

    with open(os.path.join(provider_job_folder_path, "job.yaml"), "w") as f:
        y = YAML()
        y.explicit_start = True
        y.default_flow_style = False
        y.dump(job_args, f)

    success, msg = monkey.submit_job(job_args, foreground=foreground)
    res = {"msg": msg, "success": success}

    logger.info("Finished submitting job")
    return jsonify(res)
コード例 #3
0
def bump_upstream_repos_sha_file(filename):
    yaml = YAML()  # use ruamel.yaml to keep comments
    with open(filename, "r") as ossyml:
        repofiledata = yaml.load(ossyml)

    repos = build_repos_dict(repofiledata)
    for project, projectdata in repos.items():
        # a _git_track_branch string of "None" means no tracking, which means
        # do not update (as there is no branch to track)
        if projectdata["trackbranch"] != "None":
            print("Bumping project %s on its %s branch" %
                  (projectdata["url"], projectdata["trackbranch"]))
            sha = get_sha_from_ref(projectdata["url"],
                                   projectdata["trackbranch"])
            repofiledata[project + "_git_install_branch"] = sha
            repofiledata.yaml_add_eol_comment(
                "HEAD as of {:%d.%m.%Y}".format(datetime.now()),
                project + "_git_install_branch",
            )
        else:
            print("Skipping project %s branch %s" %
                  (projectdata["url"], projectdata["trackbranch"]))

    with open(filename, "w") as fw:
        # Temporarily revert the explicit start to add --- into first line
        yaml.explicit_start = True
        yaml.dump(repofiledata, fw)
        yaml.explicit_start = False
コード例 #4
0
ファイル: utils.py プロジェクト: DocDocker/nuxx
def generate_dc(services,
                volumes,
                networks,
                secrets,
                configs,
                version="3",
                return_format='yaml'):
    if return_format == 'yaml':
        s = io.StringIO()
        ret_yaml = YAML()
        ret_yaml.indent(mapping=2, sequence=4, offset=2)
        ret_yaml.explicit_start = True
        ret_yaml.dump({'version': DoubleQuotedScalarString(version)}, s)
        ret_yaml.explicit_start = False
        s.write('\n')

        base_version = int(float(version))

        if services:
            if base_version in [3, 2]:
                services_formatted = format_services_version_three(
                    services, volumes, networks)
                ret_yaml.dump({'services': services_formatted},
                              s,
                              transform=sequence_indent_four)

            if base_version == 1:
                services_formatted = format_services_version_one(
                    services, volumes)
                ret_yaml.dump(services_formatted,
                              s,
                              transform=sequence_indent_one)

            s.write('\n')

        if base_version in [3, 2]:
            if networks:
                networks_formatted = format_networks_top_level(
                    networks, version)
                ret_yaml.dump({'networks': networks_formatted}, s)
                s.write('\n')

        if volumes:
            volumes_formatted = format_volumes_top_level(volumes, version)
            ret_yaml.dump({'volumes': volumes_formatted}, s)
            s.write('\n')

        if secrets:
            ret_yaml.dump({'secrets': secrets}, s)
            s.write('\n')

        if configs:
            ret_yaml.dump({'configs': configs}, s)
            s.write('\n')

        s.seek(0)

        return s
コード例 #5
0
def _generate_namespaces_yaml(namespace_yamls, outfile):
    """Generate namespaces.yaml file from namespace_yamls data.

    Args:
        namespace_yamls: list of NamespaceYAML objects

    """
    yaml = YAML()
    yaml.explicit_start = False
    yaml.indent(mapping=MAPPING, sequence=SEQUENCE, offset=OFFSET)
    for idx, namespace in enumerate(namespace_yamls):
        if idx != 0 and not yaml.explicit_start:
            yaml.explicit_start = True
        yaml.dump(vars(namespace), outfile)
コード例 #6
0
def main():
    kanji_set = parse_wiki_table()

    yaml = YAML()

    stream = open('/home/markdr/Desktop/KanjiStuff/kanji.yaml', 'a+')
    yaml.explicit_start = True

    kanji_set = iter(kanji_set)

    for kanji in kanji_set:

        if kanji.character == '栄':
            break

    for kanji in kanji_set:

        if kanji.character == '宙':
            break

        jisho_kanji = parse_jisho_page(kanji.character)
        kanji.parts = jisho_kanji.parts
        kanji.dependencies = jisho_kanji.dependencies
        if kanji.radical != jisho_kanji.radical:
            logging.warning('Radical for %s: Wiki: %s, Jisho: %s' % (
                kanji.character, kanji.radical, jisho_kanji.radical
            ))

        yaml.dump(kanji.as_serializable(), stream=stream)
        time.sleep(random.random() * 3)
コード例 #7
0
def _init_yaml() -> YAML:
    yaml = YAML()

    yaml.representer.add_representer(FileFormat, _format_representer)
    yaml.representer.add_multi_representer(UUID, _uuid_representer)
    yaml.representer.add_representer(datetime, represent_datetime)
    yaml.representer.add_multi_representer(PurePath, represent_paths)

    # WAGL spits out many numpy primitives in docs.
    yaml.representer.add_representer(numpy.int8, Representer.represent_int)
    yaml.representer.add_representer(numpy.uint8, Representer.represent_int)
    yaml.representer.add_representer(numpy.int16, Representer.represent_int)
    yaml.representer.add_representer(numpy.uint16, Representer.represent_int)
    yaml.representer.add_representer(numpy.int32, Representer.represent_int)
    yaml.representer.add_representer(numpy.uint32, Representer.represent_int)
    yaml.representer.add_representer(numpy.int, Representer.represent_int)
    yaml.representer.add_representer(numpy.int64, Representer.represent_int)
    yaml.representer.add_representer(numpy.uint64, Representer.represent_int)
    yaml.representer.add_representer(numpy.float, Representer.represent_float)
    yaml.representer.add_representer(numpy.float32,
                                     Representer.represent_float)
    yaml.representer.add_representer(numpy.float64,
                                     Representer.represent_float)
    yaml.representer.add_representer(numpy.ndarray, Representer.represent_list)
    yaml.representer.add_representer(numpy.datetime64,
                                     represent_numpy_datetime)

    # Match yamllint default expectations. (Explicit start/end are recommended to tell if a file is cut off)
    yaml.width = 80
    yaml.explicit_start = True
    yaml.explicit_end = True

    return yaml
コード例 #8
0
def read_yml_files():
    """Read from yaml files each category and save to df."""
    files = []
    y = YAML()
    y.default_flow_style = None
    y.explicit_start = True
    y.indent(sequence=4, offset=2)

    for filename in glob.iglob(f"{DATA}/**/*", recursive=True):
        if not os.path.isdir(filename):
            with open(filename, "r") as file:
                row = y.load(file.read())
                files.append(row)

    df = pd.DataFrame(files)

    # Check validity of urls
    list_urls = []
    for i, r in df.iterrows():
        list_urls.append({"url": r["main_source"], "name": r["name"]})
    problems_url = pd.DataFrame(check_urls(list_urls),
                                columns=["name", "url", "error"])
    problems_url["icon"] = NOT_OK
    df = df.merge(problems_url, how="left", on="name")

    return df
コード例 #9
0
def main():
    opts = getArgs()
    for o, a in opts:
        if o in ("-h", "--help"):
            usage()
            sys.exit()
        elif o in ("-k", "--key"):
            d_key = a
        elif o in ("-v", "--value"):
            d_value = a
        else:
            assert False, "unhandled option"
    yaml = YAML()
    yaml.explicit_start = True
    yaml.allow_unicode = True
    yaml.width = 300
    result = []
    for data in list(yaml.load_all(sys.stdin)):
        if data is not None:
            if (data['kind'] == "ConfigMap") or \
               (data['kind'] == "Secret"):
                # update data: key=value
                data['data'][d_key] = d_value
                result.append(data)
            elif 'kind' in data.keys():
                result.append(data)
    yaml.dump_all(result, sys.stdout)
コード例 #10
0
ファイル: sprites.py プロジェクト: martyni/rpg
 def to_yaml(self):
     """ Prints certain values to the screen in YAML format"""
     yaml = YAML()
     yaml.explicit_start = True
     yaml.indent(sequence=4, offset=2)
     data = {"i": self.i, "j": self.j, "x": self.x, "y": self.y}
     data.update({"states": self.state_files})
コード例 #11
0
def main():
    opts = getArgs()
    for o, a in opts:
        if o in ("-h", "--help"):
            usage()
            sys.exit()
        elif o in ("-k", "--key"):
            key_to_update = a
        elif o in ("-v", "--value"):
            value_to_update = a
        else:
            assert False, "unhandled option"

    yaml = YAML()
    yaml.explicit_start = True
    yaml.allow_unicode = True
    yaml.width = 300

    data_list = key_to_update.split(".")
    data_to_refer = value_to_update
    for k in data_list[::-1]:
        data_to_refer = {k: data_to_refer}

    result = []
    for data in list(yaml.load_all(sys.stdin)):
        if data is not None:
            data = update_data(data, data_to_refer)
            result.append(data)
    yaml.dump_all(result, sys.stdout)
コード例 #12
0
def backup_description():

    heap_file = open("./heap.yaml")
    hosts_file = open("./hosts.yaml")
    heap_cfg = list(yaml.load_all(heap_file))
    hosts_cfg = list(yaml.load_all(hosts_file))

    backup_file = open("./backup_description.yaml", 'w')

    yml = YAML()
    yml.explicit_start = True
    yml.Loader = ruamel.yaml.RoundTripLoader
    yml.Dumper = ruamel.yaml.RoundTripDumper

    for j in heap_cfg:
        for i in hosts_cfg:
            if j['HOST'] == i['HOST']:
                yaml_str = "VM: " + j['VM'] + "\nHOST: " + j['HOST']\
                           + "\nDESC: " + api_get_vm_desc(
                            j['HOST'], i['USER'], i['PASSWORD'], j['VM']
                            ) + "\n"
                data = yml.load(yaml_str)
                yml.dump(data, backup_file)
                break

    heap_file.close()
    hosts_file.close()
    backup_file.close()
コード例 #13
0
def _watcher(osde2ectl_cmd, account_config, my_path, cluster_count, delay,
             my_uuid):
    logging.info('Watcher thread started')
    logging.info('Getting status every %d seconds' % int(delay))
    yaml = YAML(pure=True)
    yaml.default_flow_style = False
    yaml.explicit_start = False
    yaml.explicit_end = False
    yaml.allow_duplicate_keys = True
    yaml.dump(account_config, open(my_path + "/account_config.yaml", 'w'))
    my_config = yaml.load(open(my_path + "/account_config.yaml"))
    my_thread = threading.currentThread()
    cmd = [osde2ectl_cmd, "list", "--custom-config", "account_config.yaml"]
    # To stop the watcher we expect the run attribute to be not True
    while getattr(my_thread, "run", True):
        logging.debug(cmd)
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   cwd=my_path,
                                   universal_newlines=True)
        stdout, stderr = process.communicate()

        cluster_count = 0
        state = {}
        status = {}
        error = []
        # Count the various states/status' and report it to logging
        for line in stdout.splitlines():
            if my_config['ocm']['userOverride'] in line:
                cluster_count += 1
                state_key = line.split()[2]
                status_key = line.split()[3]
                state[state_key] = state.get(state_key, 0) + 1
                status[status_key] = status.get(status_key, 0) + 1

                if state_key == "error":
                    error.append(line.split()[1])
                    logging.debug(line.split()[1])

        logging.info('Requested Clusters for test %s: %d' %
                     (my_uuid, cluster_count))
        if cluster_count != 0:
            logging.debug(state.items())
            logging.debug(status.items())
            state_output = "Current clusters state: " + str(
                cluster_count) + " clusters"
            status_output = "Current clusters status: " + str(
                cluster_count) + " clusters"
            for i1 in state.items():
                state_output += " (" + str(i1[0]) + ": " + str(i1[1]) + ")"
            for i2 in status.items():
                status_output += " (" + str(i2[0]) + ": " + str(i2[1]) + ")"
            logging.info(state_output)
            logging.info(status_output)
            if error:
                logging.warning('Clusters in error state: %s' % error)

        time.sleep(delay)
    logging.info('Watcher exiting')
コード例 #14
0
def read_yaml(yaml_file):
    """
    Load yaml file into dict structure.

    Argument:
        yaml_file(str) - full path/filename of yaml
    Returns:
        yaml_file_content(dict) - containing contents of yaml file
    Raises:
        ValueError raised when package yaml cannot be read
    """
    try:
        # Read the package yaml file
        yaml = YAML()
        yaml.explicit_start = True
        yaml.indent(mapping=3)
        yaml.preserve_quotes = True  # not necessary for your current input

        with open(yaml_file) as fp:
            data = yaml.load(fp)
        yaml.dump(data, sys.stdout)
        fp.close()
    except (IOError, ValueError):
        # Log error and raise exception if package yaml can't be read.
        error_msg = '{1}\nError loading {0}\n'.format(yaml_file, traceback.format_exc())
        LOG.error("Reading the vars.yml file failed with " + error_msg)
        raise ValueError(error_msg)

    return data
コード例 #15
0
ファイル: create_jobs.py プロジェクト: deliamadalina/releng
def create_jobs(release, project_yaml):
    """Add YAML to JJB files for release stream"""
    logger = logging.getLogger(__file__)

    # We assume here project keep their subrepo jobs under the part
    # project name. Otherwise we'll have to look for jjb/<repo> for each
    # branch listed.
    project, _ = next(iter(project_yaml['branches'][0]['location'].items()))

    yaml_parser = YAML()
    yaml_parser.preserve_quotes = True
    yaml_parser.explicit_start = True
    # yaml_parser.indent(mapping=4, sequence=0, offset=0)
    # These are some esoteric values that produce indentation matching our jjb
    # configs
    # yaml_parser.indent(mapping=3, sequence=3, offset=2)
    # yaml_parser.indent(sequence=4, offset=2)
    yaml_parser.indent(mapping=2, sequence=4, offset=2)

    (job_files, skipped_files) = jjb_files(project, release)

    if skipped_files:
        logger.info("Jobs already exists for %s in files: %s", project,
                    ', '.join(skipped_files))
    # Exit if there are not jobs to create
    if not job_files:
        return
    logger.info("Creating Jenkins Jobs for %s in files: %s", project,
                ', '.join(job_files))

    stable_branch_stream = """\
      %s:
          branch: 'stable/{stream}'
          gs-pathname: '/{stream}'
          disabled: false
    """ % release

    stable_branch_yaml = yaml_parser.load(stable_branch_stream)
    stable_branch_yaml[release].yaml_set_anchor(release, always_dump=True)

    for job_file in job_files:
        yaml_jjb = yaml_parser.load(open(job_file))
        if 'stream' not in yaml_jjb[0]['project']:
            continue

        # TODO: Some JJB files don't have 'stream'
        project_config = yaml_jjb[0]['project']['stream']
        # There is an odd issue where just appending adds a newline before the
        # branch config, so we append (presumably after master) instead.
        project_config.insert(1, stable_branch_yaml)

        # NOTE: In the future, we may need to override one or multiple of the
        #       following ruamal Emitter methods:
        #         * ruamel.yaml.emitter.Emitter.expect_block_sequence_item
        #         * ruamel.yaml.emitter.Emitter.write_indent
        #       To hopefully replace the need to shell out to sed...
        yaml_parser.dump(yaml_jjb, open(job_file, 'w'))
        args = ['sed', '-i', 's/^  //', job_file]
        subprocess.Popen(args, stdout=subprocess.PIPE, shell=False)
コード例 #16
0
ファイル: derived_dataset.py プロジェクト: jic-dtool/dtoolai
    def _create_readme(self):

        yaml = YAML()
        yaml.explicit_start = True
        yaml.indent(mapping=2, sequence=4, offset=2)
        stream = StringIO()
        yaml.dump(self.readme_dict, stream)
        self.proto_dataset.put_readme(stream.getvalue())
コード例 #17
0
ファイル: io.py プロジェクト: rocky/mult-by-constants
def dump_yaml(cache: MultCache, out=sys.stdout, compact=False) -> None:
    table = reformat_cache(cache)
    yaml = YAML()
    if compact:
        yaml.compact(seq_seq=False, seq_map=False)
    else:
        yaml.explicit_start = True  # type: ignore
    yaml.dump(table, out)
コード例 #18
0
def write_instance_details(local_instances_file, instance_details, hosts):
    instance_details["hosts"] = hosts
    with open(local_instances_file, "w") as f:
        y = YAML()
        instance_details.fa.set_block_style()
        y.explicit_start = True
        y.default_flow_style = False
        y.dump(instance_details, f)
        print("Writing local instance details to: ", local_instances_file)
コード例 #19
0
ファイル: check_conf.py プロジェクト: majj/nimroad
def main2(conf):

    yaml2 = YAML()
    f = open(conf)
    y = yaml2.load(f)
    yaml2.explicit_start = True
    #yaml.dump(y, sys.stdout)
    yaml2.indent(mapping=4, sequence=4, offset=2)
    yaml2.dump(y, sys.stdout)
コード例 #20
0
def load_settings() -> Dict:
    """Load settings from the local configuration file"""
    yml = YAML(typ="rt")
    yml.explicit_start = True
    yml.indent(sequence=4, offset=2)
    settings_file = SETTINGS_FILE
    with open(settings_file, "r") as settings_fp:
        data = yml.load(settings_fp)
    return data
コード例 #21
0
def main(v6_prefix):
    """
    Execution begins here.
    """

    # Load MAC addresses from file
    with open("input_macs.txt", "r") as handle:
        lines = handle.readlines()

    # Initialize Ansible YAML inventory dictionary
    ansible_inv = {"all": {"children": {"remotes": {"hosts": {}}}}}

    # Iterate over the lines read from file
    for index, line in enumerate(lines):

        # Clean up the line; remove whitespace and delimeters
        mac = line.strip().lower()
        for delim in ["-", ":", "."]:
            mac = mac.replace(delim, "")

        # If MAC is invalid, skip it and continue with the next MAC
        if not is_valid_mac(mac):
            continue

        # Build the low-order 64 bits of the IPv6 address
        host_addr = f"{mac[:4]}:{mac[4:6]}ff:fe{mac[6:8]}:{mac[8:]}"

        # Flip the 7th bit of first byte (3rd bit of second nibble) using xor
        flip = hex(int(host_addr[1], 16) ^ 2)[-1]

        # Re-assemble host bits with flipped bit plus IPv6 prefix
        eui64_addr = f"{v6_prefix}{host_addr[:1]}{flip}{host_addr[2:]}"

        # Display MAC address and newly-computed EUI-64 IPv6 address
        print(mac, eui64_addr)

        # Update the Ansible inventory dict with new host. The hostname
        # will be "node_" plus the entire MAC address (user can modify).
        # The IPv6 address is the address to which Ansible connects and
        # the original MAC is retained for documentation/troubleshooting
        ansible_inv["all"]["children"]["remotes"]["hosts"].update({
            f"node_{index + 1}": {
                "ansible_host": DoubleQuotedScalarString(eui64_addr),
                "original_mac": DoubleQuotedScalarString(mac),
            }
        })

    # Instantiate the YAML object, preserving quotes and
    # using explicit start (---) and end (...) markers
    yaml = YAML()
    yaml.preserve_quotes = True
    yaml.explicit_start = True
    yaml.explicit_end = True

    # Dump the Ansible inventory to a new file for use later
    with open("eui64_hosts.yml", "w") as handle:
        yaml.dump(ansible_inv, handle)
コード例 #22
0
ファイル: display.py プロジェクト: mimi1vx/repose
    def __open_yaml():
        from ruamel.yaml import YAML

        yml = YAML(typ="safe", pure=False)
        yml.default_flow_style = False
        yml.explicit_end = True
        yml.explicit_start = True
        yml.indent(mapping=4, sequence=4, offset=2)
        return yml
コード例 #23
0
ファイル: display.py プロジェクト: openSUSE/repose
 def list_products_yaml(self, hostname, system):
     from ruamel.yaml import YAML
     yml = YAML(typ='safe', pure=False)
     yml.default_flow_style = False
     yml.explicit_end = True
     yml.explicit_start = True
     yml.indent(mapping=4, sequence=4, offset=2)
     data = system.to_refhost_dict()
     data["name"] = str(hostname)
     yml.dump(data, self.output)
コード例 #24
0
ファイル: download_hash.py プロジェクト: AygulLina/k8s-master
def open_main_yaml():
    yaml = YAML()
    yaml.explicit_start = True
    yaml.preserve_quotes = True
    yaml.width = 4096

    with open(MAIN_YML, "r") as main_yml:
        data = yaml.load(main_yml)

    return data, yaml
コード例 #25
0
def update_settings(settings: Dict) -> Dict:
    """Updates settings in the local configuration file"""
    yml = YAML(typ="rt")
    yml.explicit_start = True
    yml.indent(sequence=4, offset=2)
    settings_file = SETTINGS_FILE
    with open(settings_file, "r") as settings_fp:
        yml.load(settings_fp)
    with open(settings_file, "w") as settings_fp:
        yml.dump(settings, settings_fp)
コード例 #26
0
ファイル: display.py プロジェクト: ktsamis/repose
 def list_products_yaml(self, hostname, system):
     from ruamel.yaml import YAML
     yml = YAML(typ='safe', pure=False)
     yml.default_flow_style = False
     yml.explicit_end = True
     yml.explicit_start = True
     yml.indent(mapping=4, sequence=4, offset=2)
     data = system.to_refhost_dict()
     data["name"] = str(hostname)
     yml.dump(data, self.output)
コード例 #27
0
def night_mode(obj):
    from ruamel.yaml import YAML

    yaml = YAML()
    yaml.explicit_start = True
    yaml.indent(mapping=4)
    yaml.preserve_quotes = True
    settings = obj.settings['night_mode']
    now = datetime.now()
    if settings['enabled'] and (now -
                                settings['last_sleep']).seconds * 60 * 60 > 12:
        start = datetime(now.year, now.month, now.day, settings['start_hour'],
                         0, 0, 0)
        if now.hour >= settings['end_hour']:
            try:
                end = datetime(now.year, now.month, now.day + 1,
                               settings['end_hour'], 0, 0, 0)
            except ValueError:
                end = datetime(now.year, now.month + 1, 1,
                               settings['end_hour'], 0, 0, 0)
        else:
            end = datetime(now.year, now.month, now.day, settings['end_hour'],
                           0, 0, 0)
        if start <= now < end:
            with open(obj.config_file) as config:
                new_config = yaml.load(config)
                new_config['settings']['night_mode'][
                    'last_sleep'] = datetime.now()
                new_config['settings']['night_mode']['need_relist'] = True
            with open(obj.config_file, 'w') as update:
                yaml.dump(new_config, update)
            relist = settings['relist_for']
            active_transfers = obj.__get_items__(
                p_element='../..',
                p_type='xpath',
                gp_element="//*[contains(text(), 'Active Transfers')]",
                gp_type='xpath',
                get_price=False)
            if len(active_transfers) > 0:
                longest_time_left = active_transfers[-1]['time_left'] + 10
                multi_log(
                    obj, '[Night Mode] Waiting until current transfers expire')
                obj.keep_alive(
                    longest_time_left)  # Ensure all transfers are expired
            multi_log(
                obj,
                '[Night Mode] Relisting all transfers for {}'.format(relist))
            obj.relist_individually(at_market=False, duration=relist)
            sleep_time = (end - datetime.now()).seconds
            multi_log(
                obj,
                '[Night Mode] Sleeping until'.format(settings['end_hour']))
            obj.keep_alive(sleep_time)
            if settings['wait_for_enter']:
                obj.wait_for_enter()
コード例 #28
0
def write_commented_file(filename, yaml_params):
    yaml_params.fa.set_block_style()
    with open(filename, "w") as f:
        try:
            y = YAML()
            y.explicit_start = True
            y.default_flow_style = False
            y.dump(yaml_params, f)
        except:
            print(f"Failed to write aws file: {filename}")
            exit(1)
コード例 #29
0
def main(mgmt_prefix):
    """
    Execution starts here.
    """

    # Create an IPv6 network object to test subnet containment later
    mgmt_net = IPv6Network(mgmt_prefix)

    # Create netmiko SSH connection handler to access the device
    conn = Netmiko(
        host="192.0.2.1",
        username="******",
        password="******",
        device_type="cisco_ios",
    )

    # Should be using "show bgp ipv6 unicast" but code has bug
    # https://github.com/CiscoTestAutomation/genieparser/issues/362
    resp = conn.send_command("show bgp all", use_genie=True)
    v6_rte = resp["vrf"]["default"]["address_family"]["ipv6 unicast"]["routes"]

    # Initialize Ansible YAML inventory dictionary
    ansible_inv = {"all": {"children": {"remotes": {"hosts": {}}}}}

    # Iterate over all collected BGP prefixes
    for index, prefix in enumerate(v6_rte.keys()):

        # Create an IPv6 network representing the specific prefix
        prefix_net = IPv6Network(prefix.lower())

        # Test for subnet containment and for /128 mask
        if prefix_net.subnet_of(mgmt_net) and prefix.endswith("/128"):

            # Assemble inventory item and update inventory dict
            prefix_str = DoubleQuotedScalarString(prefix_net.network_address)
            ansible_inv["all"]["children"]["remotes"]["hosts"].update(
                {f"node_{index + 1}": {
                    "ansible_host": prefix_str
                }})
            print(prefix_str)

    # Close connection when finished
    conn.disconnect()

    # Instantiate the YAML object, preserving quotes and
    # using explicit start (---) and end (...) markers
    yaml = YAML()
    yaml.preserve_quotes = True
    yaml.explicit_start = True
    yaml.explicit_end = True

    # Dump the Ansible inventory to a new file for use later
    with open("bgp_hosts.yml", "w") as handle:
        yaml.dump(ansible_inv, handle)
コード例 #30
0
def read_meta_file(filename):
    yaml = YAML()
    yaml.explicit_start = True
    yaml.indent(mapping=2, sequence=4, offset=2)

    data = None
    with open(filename, 'r') as stream:
        data = yaml.load(stream)

    with open(filename, 'w') as f:
        yaml.dump(data, f)
コード例 #31
0
def read_yaml_file(filename):
    yaml = YAML()
    yaml.explicit_start = True
    yaml.indent(mapping=2, sequence=4, offset=2)
    yaml.allow_unicode = True

    data = None
    with open(filename, 'r') as stream:
        data = yaml.load(stream)

    with open(filename, 'w') as f:
        yaml.dump(data, f, transform=sequence_indent_four)
コード例 #32
0
ファイル: tablegen.py プロジェクト: pigletfly/istio.github.io
                key = newkey
                newkey = ''

        lineNum += 1

    return ret_val

with open('index.md', 'r') as f:
    endReached = False

    data = f.read().split('\n')
    for d in data:
        print d
        if "<!-- AUTO-GENERATED-START -->" in d:
            print '| Key | Default Value | Description |'
            print '| --- | --- | --- |'
            break

    with open('values.yaml', 'r') as f_v:
        d_v = f_v.read()
        yaml = YAML()
        code = yaml.load(d_v)
        yaml.explicit_start = True
        yaml.dump(code, sys.stdout, transform=decode_helm_yaml)

    for d in data:
        if "<!-- AUTO-GENERATED-END -->" in d:
            endReached = True
        if endReached:
            print d