Exemple #1
0
def load_settings():
    yaml = YAML()
    if os.path.exists(DEFAULT_SETTINGS_FILE):
        with open(DEFAULT_SETTINGS_FILE, 'r') as f:
            settings = dict(yaml.load(f))
            if settings['whitelist-enabled'] and settings['blacklist-enabled']:
                log('whitelist and blacklist cannot be enabled simultaneously')
                return None
            # Paths starting and ending with '/'
            for i, w in enumerate(settings['whitelist-files']):
                if w[0] != '/':
                    settings['whitelist-files'][i] = '/' + w
                if w[-1] != '/':
                    settings['whitelist-files'][
                        i] = settings['whitelist-files'][i] + '/'

            for i, w in enumerate(settings['blacklist-files']):
                if w[0] != '/':
                    settings['blacklist-files'][i] = '/' + w
                if w[-1] != '/':
                    settings['blacklist-files'][i] = settings[i] + '/'

            return settings

    with open(DEFAULT_SETTINGS_FILE, 'w+') as f:
        yaml.dump(DEFAULT_SETTINGS, f)

    return DEFAULT_SETTINGS
Exemple #2
0
    def _save(self):
        'save configuration in file cache to be restored'
        resource_name = self._get_one_resource_value()
        namespace = self.client.get_namespace(
            self._get_kube_api_resource_type(), resource_name)
        kube_instance = self._get_one_resource_value("kube")
        if not kube_instance:
            kube_instance = 'local'
        cache_folder = os.path.join(settings.OMC_KUBE_CACHE_DIR, kube_instance,
                                    namespace, self._get_kube_resource_type())

        result = self._read_namespaced_resource(resource_name,
                                                namespace,
                                                _preload_content=False)
        stream = StringIO()
        the_result = json.loads(result.data.decode('UTF-8'))
        ObjectUtils.delete_node(the_result, 'metadata.creationTimestamp')
        ObjectUtils.delete_node(the_result, 'metadata.resourceVersion')
        yaml = YAML()
        yaml.dump(the_result, stream)
        content = stream.getvalue()

        make_directory(cache_folder)
        with open(os.path.join(cache_folder, resource_name + '.yaml'),
                  'w') as f:
            f.write(content)
Exemple #3
0
def generate_hpsearch():
    # TODO documentation
    yaml = YAML()
    parent_file_path = Path(args.input_file)
    parent_yaml = yaml.load(parent_file_path)
    hpsearch_config = parent_yaml['hpsearch']

    if hpsearch_config.get('is_child', False):
        raise RuntimeError(
            'This YAML is itself a child config generated for an hyperparameter search.'
        )

    assert hpsearch_config['type'] == 'random_uniform'

    description = hpsearch_config[
        'desc'] if 'desc' in hpsearch_config else 'hpsearch'

    for trial_idx in range(hpsearch_config['n_trials']):
        child_yaml = make_child_config(parent_yaml)
        for p in hpsearch_config['params']:
            value = generate_random_value(p)
            set_item_at_path(child_yaml, p['param'], value)
        child_file_path = parent_file_path.with_name(
            parent_file_path.stem +
            '_{}_{}.yaml'.format(description, trial_idx))
        yaml.dump(child_yaml, child_file_path)
Exemple #4
0
    def __init__(self,
                 dir_: str,
                 file_path_yml: Optional[str] = None,
                 loglevel=logging.DEBUG):
        self.logger = get_logger(self.__class__.__name__, loglevel=loglevel)
        # create a yaml file reader
        self.yaml = YAML()
        # self.yaml.indent(mapping=2, sequence=4, offset=2)
        self.yaml.indent(offset=2)
        self.yaml.default_flow_style = False
        self.yaml.width = 4096

        # attributes that change upon changing the starting dir
        self.configs = {}
        self.reader_data = {}

        self.interesting_proteins, self.go_analysis_gene_names = None, None

        # properties
        self._start_dir = None
        self._file_path_yaml = None

        # set the specified dirs
        self.start_dir = dir_
        if file_path_yml is not None:
            self.file_path_yaml = file_path_yml
Exemple #5
0
    def _resource_completion(self, short_mode=True):
        results = []
        headers = ('NAME', 'SERVER', 'CLUSTER', 'CONTEXT')
        yaml = YAML()
        results.append(headers)
        if os.path.exists(settings.OMC_KUBE_CONFIG_DIR):
            resources = os.listdir(settings.OMC_KUBE_CONFIG_DIR)
            for one_resource in resources:
                try:
                    with open(
                            os.path.join(settings.OMC_KUBE_CONFIG_DIR,
                                         one_resource, 'config')) as f:
                        content = yaml.load(f)
                        one_item = (one_resource,
                                    ObjectUtils.get_node(
                                        content, "clusters[0].cluster.server"),
                                    ObjectUtils.get_node(
                                        content, "clusters[0].name"),
                                    ObjectUtils.get_node(
                                        content, "current-context"))
                        results.append(one_item)
                except Exception as e:
                    logger.error(e, exc_info=True)

        return CompletionContent(Formatter.format_completions(results))
Exemple #6
0
def main(args=None):
    """Build a GSC config for running Ploidetect.

    Uses GSC standars patient_id and biopsy or libraries.
    Ploidetect versions must just be manually specified.
    """
    logger.setLevel(logging.INFO)

    args = parse_args() if not args else args

    if args.output_file and exists(args.output_file):
        raise ValueError(f"Output config already exists: '{args.output_file}'")

    config = build_config(**vars(args))

    if not args.output_file:
        YAML().dump(config, sys.stdout)
    else:
        args.output_file = args.output_file.replace("DERIVED_OUTPUT_DIR",
                                                    config["output_dir"])
        if exists(args.output_file):
            raise ValueError(
                f"Output config already exists: '{args.output_file}'")
        elif (dirname(
                args.output_file)) and not exists(dirname(args.output_file)):
            logger.warning(
                f"Creating output folder: {dirname(args.output_file)}")
            os.makedirs(dirname(args.output_file))
        print(f"Writing config to: {abspath(realpath(args.output_file))}")
        YAML().dump(config, open(args.output_file, "w"))
def parse_hp(hpfile):
    yaml = YAML(typ="safe", pure=True)
    with open(hpfile) as hp_raw:
        hps = yaml.load(hp_raw)
    tf_hps = tf.contrib.training.HParams()
    for k, v in hps["default"].items():
        tf_hps.add_hparam(k, v)
    return tf_hps
Exemple #8
0
    def modify(self, mods, inputs=None, outputs=None):
        """
        Allows to make modifications to the model. It will create a modified configuration file, build the corresponding model and
        set the weights of the current model when possible.

        mods:    a list of dictionaries. Each dictionary can have as key: delete (in this case the value, which is a path, 
                 is deleted from config), or a config path (in this case, the path value is replaced by the value of the dictionary).
        inputs:  a list with the names of the inputs
        outputs: a list with the names of the outputs
        """
        model_weights = self.get_weights()
        yaml_loader = YAML()
        m_conf = Config(self.core_model.processed_config)
        original_keys = list(m_conf.keys())
        deep_conf = Config(shallow_to_deep(m_conf))
        for mod in mods:
            mod_key = list(mod.keys())[0]
            mod_value = mod[mod_key]
            if mod_key == 'delete':
                deep_conf.pop(mod_value)
                if mod_value in original_keys:
                    original_keys.remove(mod_value)
            elif '*' in mod_key:
                mod_key = mod_key.lstrip('/')
                found_paths = [
                    k for k in deep_conf.to_shallow().keys()
                    if fnmatch.fnmatch(k, mod_key)
                ]
                for k in found_paths:
                    k = k.replace('.', '/')
                    if isinstance(mod_value, str):
                        deep_conf[k] = yaml_loader.load(mod_value)
                    else:
                        deep_conf[k] = mod_value
            else:
                mod_key = mod_key.replace('.', '/')
                if mod_key.split('/')[0] not in deep_conf.keys(
                ):  #This means we are adding a new layer
                    layer_name = mod_key.split('/')[0]
                    original_keys.append(layer_name)
                    deep_conf['{}/name'.format(layer_name)] = layer_name
                if isinstance(mod_value, str):
                    deep_conf[mod_key] = yaml_loader.load(mod_value)
                else:
                    deep_conf[mod_key] = mod_value
        new_model_architecture = shallow_to_original_keys(
            deep_conf.to_shallow(), original_keys)
        model = self.build(processed_config=new_model_architecture,
                           input_names=inputs,
                           output_names=outputs)
        layer_names = [l.name for l in model.layers]
        for k, v in model_weights.items():
            if k in layer_names:
                layer = model.get_layer(k)
                layer.set_weights(v)
        self.core_model.model = model
Exemple #9
0
 def yaml(self):
     'get configuration in yaml format'
     resource = self._get_one_resource_value()
     namespace = self.client.get_namespace(
         self._get_kube_api_resource_type(), resource)
     result = self._read_namespaced_resource(resource, namespace)
     stream = StringIO()
     yaml = YAML()
     yaml.dump(result, stream)
     console.log(stream.getvalue())
Exemple #10
0
def main():
    count = 0
    yaml = YAML()

    for post in glob.glob(os.path.join(POSTS_PATH, "*.md")):
        git_lastmod = subprocess.check_output([
            "git", "log", "-1", "--pretty=%ad", "--date=short", post]).strip()

        if not git_lastmod:
            continue

        frontmatter, line_num = get_yaml(post)
        meta = yaml.load(frontmatter)

        if 'sitemap' in meta:
            if 'lastmod' in meta['sitemap']:
                if meta['sitemap']['lastmod'] == git_lastmod:
                    continue

                meta['sitemap']['lastmod'] = git_lastmod

            else:
                meta['sitemap'].insert(0, 'lastmod', git_lastmod)
        else:
            meta.insert(line_num, 'sitemap', {'lastmod': git_lastmod})

        output = 'new.md'
        if os.path.isfile(output):
            os.remove(output)

        with open(output, 'w') as new, open(post, 'r') as old:
            new.write("---\n")
            yaml.dump(meta, new)
            new.write("---\n")
            line_num += 2

            lines = old.readlines()

            for line in lines:
                if line_num > 0:
                    line_num -= 1
                    continue
                else:
                    new.write(line)

        shutil.move(output, post)
        count += 1
        print("[INFO] update 'lastmod' for: '{}'".format(post))

    print("[NOTICE] Success! Update all posts's lastmod.\n")

    if count > 0:
        subprocess.call(["git", "add", POSTS_PATH])
        subprocess.call(["git", "commit", "-m",
                         "[Automation] Update lastmod of post(s)."])
Exemple #11
0
def read_parameters(param_file):
    """Read and return parameters in .yaml file
    Args:
        param_file: Full file path of the parameters file
    Returns:
        YAML (Ruamel) CommentedMap dict-like object
    """
    yaml = YAML()
    with open(param_file) as yamlfile:
        params = yaml.load(yamlfile)
    return params
    def from_yaml(cls, yaml_str: str) -> 'RunDescriber':
        """
        Parse a yaml string (the return of `to_yaml`) into a RunDescriber
        object
        """

        YAML = cls._ruamel_importer()

        yaml = YAML()
        # yaml.load returns an OrderedDict, but we need a dict
        ser = dict(yaml.load(yaml_str))
        return cls.deserialize(ser)
Exemple #13
0
   def __init__(self, threadID):
      Thread.__init__(self)
      self.threadID = threadID
      solver = POMCPV
      yaml = YAML()
      with open('exp_param.yml', 'r') as param:
          args = yaml.load(param)
      args['ucb_coefficient'] = threadID
      np.random.seed(args['seed'])
      random.seed(a=args['seed'])

      env = RobotModel(args)
      self.agent = AgentSMC(env, solver)
    def to_yaml(self) -> str:
        """
        Output the run description as a yaml string
        """

        YAML = self._ruamel_importer()

        yaml = YAML()
        with io.StringIO() as stream:
            yaml.dump(self.serialize(), stream=stream)
            output = stream.getvalue()

        return output
Exemple #15
0
    def dump(self, data, stream=None, **kwargs):
        """
        Output data to a given stream.

        If no stream is given, then the data is returned as a string.
        """
        inefficient = False
        if stream is None:
            inefficient = True
            stream = StringIO()
        YAML.dump(self, data, stream, **kwargs)
        if inefficient:
            return stream.getvalue()
Exemple #16
0
    def __init__(self, yaml_file, geojson_file):

        with open(yaml_file, 'r') as stream:
            yaml = YAML()
            config = yaml.load(stream)

        # error handling happens in image reader
        self.image_list = []
        for tile_args in config['tile-list']:
            tile_config = config['tile-list'][tile_args]
            tile_settings = ImageReader(tile_config, geojson_file)
            self.image_list.append(tile_settings)

        # parse mosaic settings
        if config['mosaic-settings']['build-mosaic'] == True:
            try:
                self.mosaic_keywords = [
                    "build-mosaic", "resampling-method", "clip"
                ]
                self.mosaic_settings = self.parse_settings(
                    self.mosaic_keywords, config['mosaic-settings'])
                self.mosaic_settings['image-list'] = []
                for i in config['mosaic-settings']['image-list']:
                    self.mosaic_settings['image-list'].append(
                        config['mosaic-settings']['image-list'][i])

            except Exception:
                raise IOError('in YAML file mosaic-settings not defined')
        else:
            self.mosaic_settings = {}
            self.mosaic_settings['build-mosaic'] = False

        # parse average settings
        if config['average-settings']['compute-average'] is True:
            try:
                self.average_keywords = [
                    "compute-average", "include-mosaic", "clip"
                ]
                self.average_settings = self.parse_settings(
                    self.average_keywords, config['average-settings'])
                self.average_settings['image-list'] = []
                for i in config['average-settings']['image-list']:
                    self.average_settings['image-list'].append(
                        config['average-settings']['image-list'][i])

            except Exception:
                raise IOError('in YAML file average-settings not defined')

        else:
            self.average_settings = {}
            self.average_settings['compute-average'] = False
Exemple #17
0
def copy_cookiecutter_resume(template_name='easydata'):
    """Make a copy of the cookiecutter replay file in the generated project.

    By default, cookiecutter creates a replay directory in a user's
    ~/.cookiecutter directory. This is largely useless. Easydata dumps
    this data to the generated project (also as json) using a jsonify
    call, but this doesn't yet help us regenerate the project
    automatically.  This hook creates a YAML version of those values
    in the generated project.  This can be used to regenerate the
    project by doing a:

    >>> cookiecutter --config_file path/to/.easydata.yaml easydata

    """
    # relative to root of generated project
    src_path = f'.{template_name}.json'
    yml_path = f'.{template_name}.yml'

    logger.debug(f"Reading cookiecutter replay data from {src_path}")
    with open(src_path) as f:
        cookiecutter_opts = json.load(f)
        yaml_opts = {k:v
                     for k,v in sorted(cookiecutter_opts.items())
                     if not k.startswith('_')}
    yaml = YAML()
    yaml.default_flow_style=False
    yaml.width=4096
    yaml.indent(offset=4)
    logger.debug(f"Dumping cookiecutter replay (YAML) info to {yml_path}")
    with open(yml_path, 'w') as fw:
        yaml.dump({'default_context': yaml_opts}, fw)
Exemple #18
0
def copy_cookiecutter_resume(template_name='cookiecutter-easydata'):
    """Make a copy of the cookiecutter replay file in the generated project.

    By default, cookiecutter creates a replay directory in a user's ~/.cookiecutter
    directory. This hook creates a YAML version of those values in the generated project.
    This can be used to regenerate the project by doing a:
    >>> cookiecutter --config_file path/to/cookiecutter-easydata.yaml cookiecutter-easydata

    """
    config_obj = get_user_config()
    config_dir = pathlib.Path(config_obj['replay_dir'])

    src_path = config_dir / f'{template_name}.json'
    yml_path = f'.{template_name}.yml'  # relative to root of generated project

    logger.debug(f"Reading cookiecutter replay data from {src_path}")
    with open(src_path) as f:
        cookiecutter_opts = json.load(f)
        yaml_opts = {k:v
                     for k,v in sorted(cookiecutter_opts['cookiecutter'].items())
                     if not k.startswith('_')}
    yaml = YAML()
    yaml.default_flow_style=False
    yaml.width=4096
    yaml.indent(offset=4)
    logger.debug(f"Dumping cookiecutter replay (YAML) info to {yml_path}")
    with open(yml_path, 'w') as fw:
        yaml.dump({'default_context': yaml_opts}, fw)
Exemple #19
0
def write_to_global_results(build_status_dict):
    if isfile(GLOBAL_BUILD_RESULTS_PATH):
        fp = open(GLOBAL_BUILD_RESULTS_PATH, "r+")
        yaml_dict = YAML().load(fp)
    else:
        fp = open(GLOBAL_BUILD_RESULTS_PATH, "w")
        yaml_dict = dict()

    for pkg_name, info_dict in build_status_dict.items():
        pkg_dict = yaml_dict.setdefault(pkg_name, {})
        buildtest_number = len(pkg_dict) + 1
        pkg_dict[buildtest_number] = info_dict

    fp.seek(0)
    YAML().dump(yaml_dict, fp)
    fp.truncate()
    fp.close()
Exemple #20
0
def get_sdc_env(conda_activate, sdc_src, sdc_recipe, python, numpy, channels):
    def create_env_list(packages, exclude=''):
        env_list = []
        env_set = set()

        for item in packages:
            package = re.search(r"[\w-]+", item).group()
            version = ''
            if re.search(r"\d+\.[\d\*]*\.?[\d\*]*",
                         item) and '<=' not in item and '>=' not in item:
                version = '={}'.format(
                    re.search(r"\d+\.[\d\*]*\.?[\d\*]*", item).group())
            if package not in env_set and package not in exclude:
                env_set.add(package)
                env_list.append(f'{package}{version}')
        return env_list

    from ruamel_yaml import YAML

    yaml = YAML()
    sdc_recipe_render = os.path.join(sdc_src, 'sdc_recipe_render.yaml')

    # Create environment with conda-build
    sdc_render_env = 'sdc_render'
    sdc_render_env_activate = get_activate_env_cmd(conda_activate,
                                                   sdc_render_env)
    format_print('Render sdc build and test environment using conda-build')
    create_conda_env(conda_activate, sdc_render_env, python, ['conda-build'])
    run_command('{} && {}'.format(
        sdc_render_env_activate, ' '.join([
            f'conda render --python={python}', f'--numpy={numpy}',
            f'{channels} -f {sdc_recipe_render} {sdc_recipe}'
        ])))

    with open(sdc_recipe_render, 'r') as recipe:
        data = yaml.load(recipe)
        build = data['requirements']['build']
        host = data['requirements']['host']
        run = data['requirements']['run']
        test = data['test']['requires']

    return {
        'build': create_env_list(build + host + run, 'vs2017_win-64'),
        'test': create_env_list(run + test)
    }
Exemple #21
0
def get_all_tags():
    all_tags = []
    yaml = YAML()

    for dir in POSTS_DIR:
        path = get_path(dir)
        for file in glob.glob(os.path.join(path, '*.md')):
            meta = yaml.load(get_yaml(file)[0])

            if 'tags' in meta:
                for tag in meta['tags']:
                    if tag not in all_tags:
                        all_tags.append(tag)
            else:
                raise Exception("Cannot found 'tags' in \
                  post '{}' !".format(file))

    return all_tags
Exemple #22
0
    def __init__(self,
                 path: str,
                 file_path_yml: Optional[str] = None,
                 loglevel=logging.DEBUG):
        """
        Parameters
        ----------
        path
            location where the directory/txt folder to the data can be found.
        file_path_yml
            path to the yaml config file
        loglevel
            level of the logger
        """
        self.logger = get_logger(self.__class__.__name__, loglevel=loglevel)
        # create a yaml file reader
        self.yaml = YAML()
        self.yaml.indent(mapping=2, sequence=4, offset=2)
        # self.yaml.indent(offset=2)
        self.yaml.default_flow_style = False
        self.yaml.width = 4096

        # attributes that change upon changing the starting dir
        #: configurations for the run. also saved configurations for the reader under the respective reader name
        self.configs = {}
        self.reader_data = {}

        self.interesting_proteins, self.go_analysis_gene_names = None, None

        # properties
        self._start_dir = None
        self._file_path_yaml = None

        # list to store all selectable terms; custom and provided
        self.list_full_gos = []
        self.list_full_pathways = []

        # set the specified dirs
        self.start_dir = path
        if file_path_yml is not None:
            self.file_path_yaml = file_path_yml
Exemple #23
0
def test__default_mirror_configs_jinja(tmpdir):
    """test jinja parsing is correct/valid"""
    ll = "DEBUG"
    mirror_dir = tmpdir.mkdir("mirror_dir")
    configs = _default_mirror_configs(Path(mirror_dir), ll)

    for conf in configs:
        assert conf.exists()
        with open(conf, 'r') as fp:
            yaml = YAML().load(fp)
            assert yaml["mirror_dir"].startswith(mirror_dir.strpath)
            assert yaml["log_dir"].startswith(mirror_dir.strpath)
            assert yaml["log_level"] == ll
Exemple #24
0
def yaml_loader(request):
    if request.param == "ruamel_yaml":
        try:
            from ruamel_yaml import YAML  # type: ignore

            yaml_loader = YAML().load
        except ImportError:
            from ruamel.yaml import YAML  # type: ignore

            yaml_loader = YAML().load
        module_names = ("ruamel_yaml", "ruamel.yaml")
    else:
        import yaml  # type: ignore

        def yaml_loader(x):
            return yaml.load(x, Loader=yaml.FullLoader)

        module_names = ("pyyaml", )
    old_props = serial.YAML_MODULE_PRIORITIES
    serial.YAML_MODULE_PRIORITIES = module_names
    yield yaml_loader
    serial.YAML_MODULE_PRIORITIES = old_props
Exemple #25
0
    def __init__(self, processID, grid):
        self.processID = processID
        self.target = grid.reward
        self.threadName = "PO-SMC-" + grid.name if SMC else "PO-UCT" + grid.name
        solver = POMCPV
        yaml = YAML()
        with open('exp_param.yml', 'r') as param:
            args = yaml.load(param)
        res = MapStat(grid, confidence_interval=2.0)
        # args['n_start_states'] = res["particles"]
        args['min_particle_count'] = res["particles"]
        res = MapStat(grid, confidence_interval=1.0)
        args['max_particle_count'] = res["particles"]
        args['n_start_states'] = res["particles"]

        print("POPULATION SIZE %d SAMPLE SIZE: %d" %
              (res["num_states"], res["particles"]))
        # np.random.seed(args['seed'])
        # random.seed(a=args['seed'])

        env = RobotModel(args, grid)
        self.agent = AgentSMC(env, solver)
Exemple #26
0
def load_about_md(path):
    """Load contents of ``about.md`` file with title and subtitle from ``path``."""
    logger.info("Loading information from about.md at %s", path)
    with open(path, "rt") as inputf:
        header = []
        lines = [line.rstrip() for line in inputf.readlines()]

        # Load meta data, if any.
        if lines and lines[0] and lines[0].startswith("----"):
            for line in lines[1:]:
                if line.startswith("----"):
                    break
                else:
                    header.append(line)
            lines = lines[len(header) + 2:]

        # Get title and short title, finally create About object.
        meta = YAML().load("\n".join(header)) or {}
        title = meta.get("title", "Untitled")
        short_title = meta.get("short_title", title or "untitled")
        readme = "\n".join([line.rstrip() for line in lines]).lstrip()

        return About(title=title, short_title=short_title, readme=readme)
Exemple #27
0
def get_categories():
    all_categories = []
    yaml = YAML()

    for dir in POSTS_DIR:
        path = get_path(dir)
        for file in glob.glob(os.path.join(path, '*.md')):
            meta = yaml.load(get_yaml(file)[0])

            if 'category' in meta:
                if type(meta['category']) == list:
                    err_msg = (
                        "[Error] File {} 'category' type"
                        " can not be LIST!").format(file)
                    raise Exception(err_msg)
                else:
                    if meta['category'] not in all_categories:
                        all_categories.append(meta['category'])
            else:
                if 'categories' in meta:
                    if type(meta['categories']) == str:
                        error_msg = (
                            "[Error] File {} 'categories' type"
                            " can not be STR!").format(file)
                        raise Exception(error_msg)

                    for ctg in meta['categories']:
                        if ctg not in all_categories:
                            all_categories.append(ctg)
                else:
                    err_msg = (
                        "[Error] File:{} at least "
                        "have one category.").format(file)
                    print(err_msg)

    return all_categories
Exemple #28
0
    def _before_sub_resource(self):
        try:
            if self._have_resource_value():
                resource_value = self._get_one_resource_value()
                client = KubernetesClient(
                    os.path.join(settings.OMC_KUBE_CONFIG_DIR, resource_value,
                                 'config'))
            else:
                client = KubernetesClient()

            self.context['common'] = {'client': client}
        except Exception as e:
            # some action no need to create load config, get config action e.g.
            raise Exception('invalid kubenetes config')


if __name__ == '__main__':
    client = KubernetesClient()
    ret = client.list_service_for_all_namespaces(watch=False)
    yaml = YAML()
    with open('/Users/luganlin/.omc/config/kube/cd150/config') as f:
        content = yaml.load(f)
        print(ObjectUtils.get_node(content, "clusters[0].cluster.server"))

    # print(client.read_namespaced_pod("postgres-svc-5685d4bc7-l6j4m", 'default'))
    # print(client.read_namespaced_pod_template("postgres-svc-5685d4bc7-l6j4m", 'default'))
    # print(client.read_namspaced_event("postgres-svc-5685d4bc7-l6j4m", 'default'))
    console.log(ret)
    # for i in ret.items:
    #     print("%s\t%s\t%s" % (i.status.pod_ip, i.metadata.namespace, i.metadata.name))
Exemple #29
0
class MSPInitializer:
    # set all file names that are required
    yml_file_name_tmp = "config_tmp.yml"
    yml_file_name = "config.yml"
    default_yml_name = "ms_analysis_default.yml"
    go_path = "go_terms"
    pathway_path = "pathways"
    possible_gos = sorted([
        x for x in os.listdir(os.path.join(path_package_config, go_path))
        if x.endswith(".txt")
    ])
    possible_pathways = sorted([
        x for x in os.listdir(os.path.join(path_package_config, pathway_path))
        if x.endswith(".txt")
    ])

    def __init__(self,
                 dir_: str,
                 file_path_yml: Optional[str] = None,
                 loglevel=logging.DEBUG):
        self.logger = get_logger(self.__class__.__name__, loglevel=loglevel)
        # create a yaml file reader
        self.yaml = YAML()
        # self.yaml.indent(mapping=2, sequence=4, offset=2)
        self.yaml.indent(offset=2)
        self.yaml.default_flow_style = False
        self.yaml.width = 4096

        # attributes that change upon changing the starting dir
        self.configs = {}
        self.reader_data = {}

        self.interesting_proteins, self.go_analysis_gene_names = None, None

        # properties
        self._start_dir = None
        self._file_path_yaml = None

        # set the specified dirs
        self.start_dir = dir_
        if file_path_yml is not None:
            self.file_path_yaml = file_path_yml

    @property
    def start_dir(self):
        return self._start_dir

    @start_dir.setter
    def start_dir(self, start_dir):
        start_dir = os.path.normpath(start_dir)
        # make sure to be on the right level and set starting dir
        if os.path.split(start_dir)[1] == "txt":
            self.logger.debug("Removing txt ending from path")
            self._start_dir = os.path.split(start_dir)[0]
        else:
            self._start_dir = start_dir
        self.logger.info(f"Starting dir: {self.start_dir}")
        # set all attributes back None that where file specific
        self.configs = {}
        self.reader_data = {}
        self.file_path_yaml = "file"

    @property
    def path_config(self):
        return os.path.join(self.start_dir, "config")

    @property
    def file_path_yaml(self):
        return self._file_path_yaml

    @file_path_yaml.setter
    def file_path_yaml(self, file_path_yml: str):
        """

        Parameters
        ----------
        file_path_yml
            can be either:
                - "default"
                - "file"
                - a path to a yml file

        Raises
        ------
        ValueError
            if no valid value was provided
        FileNotFoundError
            if the file specified by the file_path_yml was not found

        """
        if file_path_yml.lower() == "default":
            self._file_path_yaml = self.get_default_yml_path()
        elif file_path_yml.lower() == "file":
            if self.has_yml_file():
                self._file_path_yaml = os.path.join(
                    self.start_dir, "config", MSPInitializer.yml_file_name)
            else:
                self._file_path_yaml = self.get_default_yml_path()
        elif file_path_yml.lower().endswith(('.yml', '.yaml')):
            self._file_path_yaml = os.path.normpath(file_path_yml)
        else:
            raise ValueError(
                f"Invalid value provided for yaml file: {file_path_yml}")
        self.logger.debug("yml file location: %s", self._file_path_yaml)

        # load the config from the yml file
        self.logger.info("loading yml file")
        with open(self.file_path_yaml) as f:
            self.configs = self.yaml.load(f)
        self.logger.debug(f"Config file contents: {self.configs}")

    def init_config(self):
        """
        Creates the directory to save the configuration file and saves the configuration
        """
        os.makedirs(self.path_config, exist_ok=True)
        self.update_config_file()

    def has_yml_file(self) -> bool:
        if not os.path.isdir(self.start_dir):
            return False
        if "config" in os.listdir(self.start_dir):
            self.logger.debug("Found config dir")
            config_dir = os.path.join(self.start_dir, "config")
            if MSPInitializer.yml_file_name in os.listdir(config_dir):
                self.logger.debug("Found config.yml file in config dir")
                return True
        return False

    def get_default_yml_path(self) -> str:
        self.logger.debug(
            "Loading default yml file from: %s, since no (valid) file was selected",
            path_package)
        return os.path.join(path_package_config,
                            MSPInitializer.default_yml_name)

    def init_interest_from_txt(
            self) -> Tuple[Dict[str, list], Dict[str, list]]:
        dict_pathway = {}
        dict_go = {}
        for pathway in self.configs.get("pathways"):
            name, proteins = self.read_config_txt_file(
                MSPInitializer.pathway_path, pathway)
            dict_pathway[name] = proteins

        for go in self.configs.get("go_terms"):
            name, proteins = self.read_config_txt_file(MSPInitializer.go_path,
                                                       go)
            dict_go[name] = proteins
        return dict_pathway, dict_go

    def read_config_txt_file(self, path, file) -> Tuple[str, list]:
        fullpath = os.path.join(path_package_config, path, file)
        if path == MSPInitializer.pathway_path:
            with open(fullpath) as f:
                name = f.readline().strip()
                f.readline()
                proteins = []
                for line in f:
                    proteins.append(line.strip())
        elif path == MSPInitializer.go_path:
            name = file.replace(".txt", "")
            with open(fullpath) as f:
                proteins = []
                for line in f:
                    proteins.append(line.strip())
        else:
            raise ValueError(f"Invalid path: {path}")
        return name, proteins

    def update_config_file(self):
        # store the config file as tmp
        self.logger.debug("Updating yml settings file")
        yml_file_loc_tmp = os.path.join(self.path_config,
                                        MSPInitializer.yml_file_name_tmp)
        with open(yml_file_loc_tmp, "w") as outfile:
            self.yaml.dump(self.configs, outfile)

        # delete non tmp if exists
        yml_file_loc = os.path.join(self.path_config,
                                    MSPInitializer.yml_file_name)
        if MSPInitializer.yml_file_name in os.listdir(self.path_config):
            os.remove(yml_file_loc)

        # rename to non tmp
        os.rename(yml_file_loc_tmp, yml_file_loc)

    def read_data(self):
        for Reader in BaseReader.__subclasses__():
            Reader: Type[BaseReader]  # for IDE hints
            try:
                reader = Reader(self.start_dir,
                                self.configs.get(Reader.name, {}))
                self.configs[str(Reader.name)] = deepcopy(reader.reader_config)
                self.reader_data[Reader.name] = reader.full_data

            except MissingFilesException:
                self.logger.debug("No files found for reader: %s", Reader.name)

        # read all proteins and receptors of interest from the config dir
        self.logger.info("Reading proteins and receptors of interest")
        self.interesting_proteins, self.go_analysis_gene_names = self.init_interest_from_txt(
        )
        self.update_config_file()
Exemple #30
0
        map_index = lambda pos: pos.i * y_size + pos.j
        start_new_timer = ModelChecker.check_safe_reachability(
            map(map_index, traj), map(map_index,
                                      self.model.obstacle_positions),
            map(map_index, self.model.goal_positions))
        # STEP log writing
        LOGFILE["reward"].append(reward)
        LOGFILE["time"].append(elapsed_time)
        LOGFILE["sat"].append(start_new_timer)
    dtf = pd.DataFrame(LOGFILE)
    dtf.to_csv(EXP_LOG + 'map-14-4-6_3000' + '.csv')


if __name__ == '__main__':

    yaml = YAML()
    with open('exp_param.yml', 'r') as param:
        args = yaml.load(param)

    init_logger()
    np.random.seed(args['seed'])
    random.seed(a=args['seed'])
    solver = POMCPV
    env = RobotModel(args)
    agent = AgentSMC(env, solver)
    EXP_LOG = "/home/experiments/LargeMap/"
    if (not os.path.exists(EXP_LOG)):
        os.makedirs(EXP_LOG)

    dicounted_return(agent)