def read_config(base_dir, fns, fn):
    """读取目录下的配置文件"""
    configs = dict()
    kind = {
        PATH_SECRETS: "Secret",
        PATH_CONFIGS: "ConfigMap"
    }.get(base_dir, None)
    if os.path.exists(LOCAL_FUNC_CONFIG_PATH) and kind is not None:
        docs = yaml.full_load_all(open(LOCAL_FUNC_CONFIG_PATH))
        for doc in docs:
            if doc.get("kind", None) == kind:
                paths = [
                    doc.get("metadata", {}).get("namespace", "none"),
                    doc.get("metadata", {}).get("name", "none")
                ]
                for k, v in doc.get("data", {}).items():
                    add_params(configs, paths, k, v)
    if os.path.exists(GLOBAL_FUNC_CONFIG_PATH) and kind is not None:
        docs = yaml.full_load_all(open(GLOBAL_FUNC_CONFIG_PATH))
        for doc in docs:
            if doc.get("kind", None) == kind:
                paths = [
                    doc.get("metadata", {}).get("namespace", "none"),
                    doc.get("metadata", {}).get("name", "none")
                ]
                for k, v in doc.get("data", {}).items():
                    add_params(configs, paths, k, v)
    # set alias, make it easy for user to get the parameters
    if "fission-secret-configmap" in configs:
        configs[GLOBAL_CONFIG_KEY] = configs["fission-secret-configmap"].get(
            "fission-function-global-configmap", {})
    local_key = "func-{}".format(fn)
    if fns in configs and local_key in configs.get(fns, {}):
        configs[LOCAL_CONFIG_KEY] = configs.get(fns, {}).get(local_key, {})
    return configs
Exemple #2
0
    def render(self, values: HelmValues) -> List[Any]:
        with NamedTemporaryFile() as tmp_file:
            values_json = json.loads(values.json(exclude_none=True))
            pprint(values_json)
            content = yaml.dump(values_json)
            tmp_file.write(content.encode())
            tmp_file.flush()

            command = [
                "helm",
                "template",
                self.name,
                os.path.join(git_repo_root(), "helm", "dagster"),
                "--dependency-update",
                "--debug",
                *['--values', tmp_file.name],
                *["--show-only", self.output],
            ]

            templates = subprocess.check_output(command)

            print("\n--- Helm Templates ---")  # pylint: disable=print-call
            print(templates.decode())  # pylint: disable=print-call

            k8s_objects = [
                self.api_client._ApiClient__deserialize_model(  # pylint: disable=W0212
                    k8s_object, self.model
                )
                for k8s_object in yaml.full_load_all(templates)
                if k8s_object
            ]

            return k8s_objects
Exemple #3
0
    def test_rc_file_readable_by_scopesim_parser(self):
        default_file = os.path.join(rc.__pkg_dir__, "defaults.yaml")
        with open(default_file, "r") as f:
            default_dict = [dic for dic in yaml.full_load_all(f)]

        assert isinstance(default_dict[0], dict)
        assert len(default_dict[0]) > 0
Exemple #4
0
def load_yamls(*paths: str) -> dict:
    result = {}
    for p in paths:
        with open(p, 'r', encoding='utf-8') as fp:
            for doc in yaml.full_load_all(fp):
                result.update(doc)
    return result
 def _read_services_descriptions(files):
     descriptions = []
     for f in files:
         docs = list(yaml.full_load_all(f))
         descriptions.append(docs)
     services = list(chain.from_iterable(descriptions))
     return ServiceDescriptions._slugify_names(services)
def render_chart(name="RELEASE-NAME",
                 values=None,
                 show_only=None,
                 validate_schema=True):
    """
    Function that renders a helm chart into dictionaries. For helm chart testing only
    """
    values = values or {}
    with NamedTemporaryFile() as tmp_file:
        content = yaml.dump(values)
        tmp_file.write(content.encode())
        tmp_file.flush()
        command = [
            "helm", "template", name, sys.path[0], '--values', tmp_file.name
        ]
        if show_only:
            for i in show_only:
                command.extend(["--show-only", i])
        templates = subprocess.check_output(command)
        k8s_objects = yaml.full_load_all(templates)
        k8s_objects = [k8s_object for k8s_object in k8s_objects
                       if k8s_object]  # type: ignore
        if validate_schema:
            for k8s_object in k8s_objects:
                validate_k8s_object(k8s_object)
        return k8s_objects
Exemple #7
0
def main():
    """
  """
    parser = argparse.ArgumentParser(
        description=
        'Fix up source adoc files for nicer formatting in the static site')
    parser.add_argument('topic_map', type=str, help='Path to _topic_map.yml')
    args = parser.parse_args()

    # FIX: The Config class should have methods to parse and save config options.
    config = Config()

    if validate_file_path(args.topic_map):
        config.topic_map_path = pathlib.Path(args.topic_map)
    config.input_dir = config.topic_map_path.parent

    # Open and read the topic map. If the file cannot be parsed,
    # exit the program and return 1.
    with config.topic_map_path.open(mode='r') as stream:
        try:
            config.topic_map_yaml = list(yaml.full_load_all(stream))
        except yaml.YAMLError as e:
            print(f'Error: Invalid YAML in _topic_map.yml: {e}')
            sys.exit(1)

    fix_doc_tree(config)

    print('OK')
Exemple #8
0
def loadYamlReplaceVarLocal(yaml_file, fields, multi_resource=False):

    fileToBeReplaced = path.join(path.dirname(__file__), yaml_file)
    if not path.exists(fileToBeReplaced):
        print("The file {} does not exist" "".format(fileToBeReplaced))
        sys.exit(1)

    try:
        with open(fileToBeReplaced, 'r') as f:
            filedata = f.read()

            for searchwrd, replwrd in fields.items():
                filedata = filedata.replace(searchwrd, replwrd)
            if multi_resource:
                yaml_data = list(yaml.full_load_all(filedata))
            else:
                yaml_data = yaml.full_load(filedata)
        # print(yaml_data)
    except request.URLError as e:
        print(e.reason)
        sys.exit(1)

    return yaml_data


# dataDict = {"{{region_name}}":"us-west-2","{{cluster_name}}":"_my_cluster","{{vpc_id}}": "testeste12345"}
# loadYamlLocal('../app_resources/jupyter-config.yaml', True)
Exemple #9
0
def logs(log):
    logfile = os.path.join(context.resources.logger.log_path, log)
    if not os.path.exists(logfile):
        flask.abort(404)
    with open(logfile, 'r') as f:
        objs = [obj for obj in yaml.full_load_all(f)]
    if len(objs) == 1:
        obj = objs[0]
    else:
        obj = {'entries': objs}

    object_type = obj.get('object_type', None)
    if object_type == fetch.rid.FetchedISAs.__name__:
        obj = {
            'summary': summarize.isas(fetch.rid.FetchedISAs(obj)),
            'details': obj,
        }
    elif object_type == fetch.scd.FetchedEntities.__name__:
        obj = {
            'summary': summarize.entities(fetch.scd.FetchedEntities(obj)),
            'details': obj,
        }
    elif object_type == fetch.rid.FetchedFlights.__name__:
        obj = {
            'summary': summarize.flights(fetch.rid.FetchedFlights(obj)),
            'details': obj,
        }

    return flask.render_template('log.html',
                                 log=_redact_and_augment_log(obj),
                                 title=logfile)
Exemple #10
0
    def test_all_files_referenced_in_yamls_exist(self):
        missing_files = []
        for pkg_name in PKG_DICT:

            no_missing = 0
            yaml_files = glob(PKG_DICT[pkg_name] + "/*.yaml")
            for yaml_file in yaml_files:
                with open(yaml_file) as f:
                    try:
                        yaml_dicts = [dic for dic in yaml.full_load_all(f)]
                    except:
                        yaml_dicts = []

                fnames = []
                for yaml_dict in yaml_dicts:
                    fnames += recursive_filename_search(yaml_dict)

                for fname in fnames:
                    if fname is not None:
                        if not isinstance(fname, (list, tuple)):
                            fname = [fname]
                        for fn in fname:
                            if fn.lower() != "none" and fn[0] != "!":
                                full_fname = pth.join(PKG_DICT[pkg_name], fn)
                                if not pth.exists(full_fname):
                                    BADGES[pkg_name]["structure"][
                                        fn] = "missing"
                                    no_missing += 1
                                    missing_files += [full_fname]

            if no_missing == 0:
                BADGES[f"!{pkg_name}.structure.no_missing_files"] = True
        assert not missing_files, f"{missing_files}"
Exemple #11
0
def load_yaml_replace_var_local(yaml_file, fields, multi_resource=False, write_output=False):

    file_to_replace=path.join(path.dirname(__file__), yaml_file)
    if not path.exists(file_to_replace):
        print("The file {} does not exist"
            "".format(file_to_replace))
        sys.exit(1)

    try:
        with open(file_to_replace, 'r') as f:
            filedata = f.read()

            for searchwrd, replwrd in fields.items():
                filedata = filedata.replace(searchwrd, replwrd)
            if multi_resource:
                yaml_data = list(yaml.full_load_all(filedata))
            else:
                yaml_data = yaml.full_load(filedata) 
        if write_output:
            with open(file_to_replace, "w") as f:
                yaml.dump(yaml_data, f, default_flow_style=False, allow_unicode = True, sort_keys=False)
    
        # print(yaml_data)
    except request.URLError as e:
        print(e.reason)
        sys.exit(1)

    return yaml_data
Exemple #12
0
 def read_yaml_all(path):
     with open(path, mode='r', encoding='utf-8') as f:
         raw_data = f.read()
         yaml_data = yaml.full_load_all(raw_data)
     print(yaml_data)
     print(type(yaml_data))
     return list(yaml_data)
Exemple #13
0
 def getDatas(self, key):
     all_data = yaml.full_load_all(self.f)
     d = None
     for data in all_data:
         if data.get(key):
             d = data.get(key)
     return d
Exemple #14
0
def check_pipeline_run_was_successful(pipeline_run_path: str) -> None:
    with open(pipeline_run_path, 'r') as f:
        for pipeline_run in yaml.full_load_all(f):
            run_status = pipeline_run['status']['state']
            if run_status != 'SUCCESS':
                raise AssertionError(
                    f'pipeline run {pipeline_run_path} was unsuccessful, with status: {run_status}'
                )
def load_file(r_path):
    with open(r_path, 'r') as resource:
        if r_path.endswith('json'):
            return loads(resource).get("Resources")
        elif r_path.endswith('yaml') or r_path.endswith('yml'):
            return yaml.full_load_all(resource).get("Resources")
        elif r_path.endswith('template'):
            return load(resource).get("Resources")
Exemple #16
0
def load_yaml(yaml_content):
    try:
        return list(yaml.full_load_all(fix_yaml(yaml_content)))

    except yaml.parser.ParserError:
        raise BrokenYamlError("""
            It seems that you're yaml file is broken.

            Run in through some online validators to find the reason behind it.
        """)
Exemple #17
0
    def _load_template(self, path, update):
        with open(path, "r", encoding="utf-8") as fp:

            loaded_template = yaml.full_load_all(fp.read())

            for index, raw_template in enumerate(loaded_template):

                self._get_validated_obj(raw_template, self._template_schema)

                kind = raw_template["kind"]
                name = raw_template["name"]
                meta = raw_template["meta"]
                raw_template["spec"] = raw_template.get("spec", {})
                random_name = False

                if name == "random":
                    name = base64.b64encode(
                        (path + str(index)).encode()).decode()
                    random_name = True

                category = raw_template["category"]
                tags = raw_template["tags"]

                if name in self._templates and update == False:
                    raise ValueError(f"name already exists. name is {name}")

                if kind == "":
                    raise ValueError("kind is empty.")

                if name == "":
                    raise ValueError("name is empty.")

                if category == "":
                    raise ValueError("category is empty.")

                if not isinstance(meta, dict):
                    raise ValueError(f"meta is not a dict. meta is {meta}")

                # spec validate
                if "template_schema" in raw_template and raw_template[
                        "template_schema"] != {}:
                    raw_template["spec"] = self._get_validated_obj(
                        raw_template["spec"], raw_template["template_schema"])

                self._templates[name] = {
                    "kind": kind,
                    "name": name,
                    "category": category,
                    "tags": tags,
                    "origin": raw_template,
                    "path": path,
                    "meta": meta,
                    "random_name": random_name
                }
    def LoadYamlDictionary(self, yaml_output):
        """Load YAML parameter dictionary generated by GenerateYamlDictionary()."""

        with open(yaml_output, 'r') as f:
            for param in yaml.full_load_all(f):
                pid = param.pop('id')
                device_net = param.pop('device_net')
                macro = param.pop('macro')
                can = param.pop('can')
                bank = param.pop('bank')
                typ = param.pop('type')
                self._AddTableEntry(pid, device_net, macro, can, bank, typ)
Exemple #19
0
def logs(log):
    logfile = os.path.join(context.resources.logger.log_path, log)
    if not os.path.exists:
        flask.abort(404)
    with open(logfile, 'r') as f:
        objs = [obj for obj in yaml.full_load_all(f)]
    if len(objs) == 1:
        obj = objs[0]
    else:
        obj = {'entries': objs}
    return flask.render_template('log.html',
                                 log=_redact_log(obj),
                                 title=logfile)
Exemple #20
0
 def _create_resource(self, yaml_file_path):
     try:
         cluster = self._app.selected_cluster
         with open(yaml_file_path, "r") as f:
             resources = yaml.full_load_all(f)
             for resource in resources:
                 cluster.do_post(
                     resource["apiVersion"],
                     resource["kind"],
                     namespace=resource["metadata"].get("namespace"),
                     body=resource,
                 )
     except Exception as e:
         self._app.show_error(e)
Exemple #21
0
def loadYamlRemotely(url, multi_resource=False):
    try:
        fileToBeParsed = request.urlopen(url)
        if multi_resource:
            yaml_data = list(yaml.full_load_all(fileToBeParsed))
        else:
            yaml_data = yaml.full_load(fileToBeParsed)
        # print(yaml_data)
    except:
        print("Cannot read yaml config file {}, check formatting."
              "".format(fileToBeParsed))
        sys.exit(1)

    return yaml_data
    def load(yaml_file: str) -> List[dict]:
        """
            Read the yaml file into python
            :return: list of all yaml configs
            :rtype: list
            :raise FileNotFoundError: yaml file could not be found on disk
            :raise yaml.error.YAMLError: yaml file is malformed
        """
        if not os.path.isfile(yaml_file):
            raise FileNotFoundError("Yaml file is not found at path")

        with open(yaml_file, 'r') as file:
            yaml_configs = list(yaml.full_load_all(file))
            return yaml_configs
 def from_file(path_: Union[str, Path]) -> List['RegexQA']:
     """Converts multi-doc yaml file into list of RegexQA instances"""
     log.info(f'creating RegexQA from file: {str(path_)}')
     if isinstance(path_, str):
         path = Path(path_)
     else:
         path = cast(Path, path_)
     result: List['RegexQA'] = []
     with open(path) as file:
         for doc in yaml.full_load_all(file):
             regex = doc['regex']
             responses = doc['responses']
             result.append(RegexQA(regex, responses))
     return result
def parse_endpoint(endpoint, job=None):
    epItem = {
        "id": endpoint["id"],
        "name": endpoint["name"],
        "username": endpoint["username"],
        "status": endpoint["status"],
        "hostNetwork": endpoint["hostNetwork"],
        "podName": endpoint["podName"],
        "domain": config["domain"],
    }
    if "podPort" in endpoint:
        epItem["podPort"] = endpoint["podPort"]
    if endpoint["status"] == "running":
        port = int(
            endpoint["endpointDescription"]["spec"]["ports"][0]["nodePort"])
        epItem["port"] = port
        if "nodeName" in endpoint:
            epItem["nodeName"], epItem["domain"] = getNodename()
        if epItem["name"] == "ssh":
            try:
                if job:
                    desc = list(
                        yaml.full_load_all(
                            base64.b64decode(job["jobDescription"])))
                    if epItem["id"].find("worker") != -1:
                        desc = desc[int(
                            re.match(".*worker(\d+)-ssh",
                                     epItem["id"]).groups()[0]) + 1]
                    elif epItem["id"].find("ps0") != -1:
                        desc = desc[0]
                    else:
                        desc = desc[0]
                    for i in desc["spec"]["containers"][0]["env"]:
                        if i["name"] == "DLTS_JOB_TOKEN":
                            epItem["password"] = i["value"]
            except Exception as e:
                logging.error(e)
        elif epItem["name"] == "inference-url":
            epItem["modelname"] = endpoint["modelname"]
            epItem["port"] = base64.b64encode(
                str(epItem["port"]).encode("utf-8"))
        elif epItem["name"] in ["ipython", "tensorboard", "vscode"]:
            epItem["port"] = base64.b64encode(
                str(epItem["port"]).encode("utf-8"))
    if epItem["name"] in ["ipython", "tensorboard", "vscode", "inference-url"]:
        if "extranet_port" in config and config["extranet_port"]:
            epItem["domain"] = epItem["domain"] + ":" + str(
                config["extranet_port"])
    return epItem
Exemple #25
0
    def test_flux_scales_with_pixel_scale(self, filter_name, bg_level):
        yaml_text = YAML_TEXT % (WAVE_MIN, WAVE_MAX, PIXEL_SCALE, PIXEL_SCALE,
                                 filter_name)
        yamls = [yml for yml in yaml.full_load_all(yaml_text)]

        cmd = sim.UserCommands(yamls=yamls)
        opt = sim.OpticalTrain(cmd)
        opt.cmds["!TEL.area"] = 1 * u.m**2

        src = empty_sky()
        opt.observe(src)
        img = opt.image_planes[0].data

        # Ks band photon flux is 1026 ph/s/m2/arcsec2
        assert np.median(img) == pytest.approx(bg_level, rel=0.01)
Exemple #26
0
    def load_from_yaml(self, yaml_path: str):
        with open(yaml_path, "r") as file:
            settings: dict = yaml.full_load_all(file)

        if settings["max_episode_steps"] != "":
            self.max_episode_steps = settings["max_episode_steps"]
        if settings["save_game_name"] != "":
            self.save_game_name = settings["save_game_name"]

        if settings["krpc"]["address"] != "":
            self.krpc_address = settings["krpc"]["address"]
        if settings["krpc"]["rpc_port"] != "":
            self.krpc_rpc_port = settings["krpc"]["rpc_port"]
        if settings["krpc"]["stream_port"] != "":
            self.krpc_stream_port = settings["krpc"]["stream_port"]
Exemple #27
0
 def _get_headers_from_yaml(self, yaml_str):
     try:
         if not yaml_str.strip().startswith('---'):
             raise StopIteration()
         return next(yaml.full_load_all(yaml_str))
     except yaml.YAMLError as e:
         logger.info(
             'YAML header is incorrectly formatted or missing. The '
             f'following information may be useful:\n{e}\nIf you continue '
             'to have difficulties, try pasting your YAML header into an '
             'online parser such as http://yaml-online-parser.appspot.com/.'
         )
     except StopIteration as e:
         logger.info('YAML header is missing!')
     return {}
Exemple #28
0
    def render(
        self,
        values: Union[DagsterHelmValues, DagsterUserDeploymentsHelmValues],
        chart_version: Optional[str] = None,
    ) -> List[Any]:
        with NamedTemporaryFile() as tmp_file:
            helm_dir_path = os.path.join(git_repo_root(), self.helm_dir_path)

            values_json = json.loads(
                values.json(exclude_none=True, by_alias=True))
            pprint(values_json)
            content = yaml.dump(values_json)
            tmp_file.write(content.encode())
            tmp_file.flush()

            command = [
                "helm",
                "template",
                self.name,
                helm_dir_path,
                "--debug",
                *["--values", tmp_file.name],
            ]

            if self.output:
                ## Uncomment to render all templates before filtering to surface Helm templating
                ## errors with better error messages
                # subprocess.check_output(command)

                command += ["--show-only", self.output]

            with self._with_chart_yaml(helm_dir_path, chart_version):
                templates = subprocess.check_output(command)

            print("\n--- Helm Templates ---")  # pylint: disable=print-call
            print(templates.decode())  # pylint: disable=print-call

            k8s_objects = [
                k8s_object for k8s_object in yaml.full_load_all(templates)
                if k8s_object
            ]
            if self.model:
                k8s_objects = [
                    self.api_client._ApiClient__deserialize_model(  # pylint: disable=W0212
                        k8s_object, self.model) for k8s_object in k8s_objects
                ]

            return k8s_objects
Exemple #29
0
    def test_full_load_all_yaml(self):
        txt = dedent('''
        ---
        file1: !include tests/data/include.d/1.yaml

        ---
        file2: !include tests/data/include.d/2.yaml
        ''').strip()
        iterable = yaml.full_load_all(txt)
        for i, data in enumerate(iterable):
            if i == 0:
                self.assertEqual(data, {'file1': YAML1})
            elif i == 1:
                self.assertEqual(data, {'file2': YAML2})
            else:
                raise RuntimeError()
Exemple #30
0
 def read(filename):
     try:
         with open(filename, 'rt', encoding='utf8') as settings_file:
             return next(yaml.full_load_all(settings_file))
     except FileNotFoundError:
         #print('brak pliku konfiguracyjnego')
         sg.popup('brak pliku konfiguracyjnego')
         sys.exit(-1)
     except yaml.scanner.ScannerError:
         #print('błędy w pliku konfiguracyjnym')
         sg.popup('błędy pliku konfiguracyjnego')
         sys.exit(-2)
     except:
         #print("nieoczekiwany błąd przy wczytywaniu konfiguracji:", sys.exc_info()[0])
         sg.popup("nieoczekiwany błąd przy wczytywaniu konfiguracji")
         sys.exit(-3)