Пример #1
0
    def __init__(self, database, schemas, henges=None, checksum_function=md5):
        """
        A user interface to insert and retrieve decomposable recursive unique
        identifiers (DRUIDs).

        :param dict database: Dict-like lookup database for sequences and
            hashes.
        :param list schemas: One or more jsonschema schemas describing the
            data types stored by this Henge
        :param dict henges: One or more henge objects indexed by object name for
            remote storing of items.
        :param function(str) -> str checksum_function: Default function to
            handle the digest of the serialized items stored in this henge.
        """
        self.database = database
        self.checksum_function = checksum_function
        self.digest_version = checksum_function.__name__

        if isinstance(schemas, dict):
            _LOGGER.debug("Using old dict schemas")
            populated_schemas = {}
            for schema_key, schema_value in schemas.items():
                if isinstance(schema_value, str):
                    populated_schemas[schema_key] = yacman.load_yaml(
                        schema_value)
            self.schemas = populated_schemas
        else:
            populated_schemas = []
            for schema_value in schemas:
                if isinstance(schema_value, str):
                    if os.path.isfile(schema_value):
                        populated_schemas.append(
                            yacman.load_yaml(schema_value))
                    else:
                        populated_schemas.append(yaml.safe_load(schema_value))
            split_schemas = {}
            for s in populated_schemas:
                split_schemas.update(split_schema(s))

            self.schemas = split_schemas

        # Identify which henge to use for each item type. Default to self:
        self.henges = {}
        for item_type in self.item_types:
            self.henges[item_type] = self

        # Next add in any remote henges for item types not stored in self:
        if henges:
            for item_type, henge in henges.items():
                if item_type not in self.item_types:
                    self.schemas[item_type] = henge.schemas[item_type]
                    self.henges[item_type] = henge
Пример #2
0
def _check_recipe(recipe):
    """
    Check whether there are any key name clashes in the recipe requirements
    and raise an error if there are

    :param dict recipe: asset_build_package
    :raise ValueError: if any key names are duplicated
    """
    # experimental feature; recipe jsonschema validation
    from jsonschema import validate
    from yacman import load_yaml

    SCHEMA_SRC = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                              "schemas", "recipe_schema.yaml")
    if os.path.exists(SCHEMA_SRC):
        validate(recipe, load_yaml(filepath=SCHEMA_SRC))
        _LOGGER.info(
            "Recipe validated successfully against a schema: {}".format(
                SCHEMA_SRC))
    else:
        _LOGGER.warning("Recipe schema not found: {}".format(SCHEMA_SRC))
    # end of validation
    req_keys = []
    for req in [REQ_PARAMS, REQ_ASSETS, REQ_FILES]:
        req_keys.extend([req_dict[KEY] for req_dict in recipe[req]])
    unique = []
    for k in req_keys:
        if k not in unique:
            unique.append(k)
        else:
            raise ValueError("The recipe contains a duplicated requirement"
                             " key '{}', which is not permitted.".format(k))
    return recipe
Пример #3
0
    def update_packages(self, config_file):
        """
        Parse data from divvy configuration file.

        Given a divvy configuration file, this function will update (not
        overwrite) existing compute packages with existing values. It does not
        affect any currently active settings.

        :param str config_file: path to file with new divvy configuration data
        """
        entries = yacman.load_yaml(config_file)
        self.update(entries)
        return True
Пример #4
0
    def __init__(self, config, pipeline_type=None):
        super(PipelineInterface, self).__init__()

        if isinstance(config, Mapping):
            self.pipe_iface_file = None
            self.source = None
        else:
            _LOGGER.debug("Reading {} from: {}".
                          format(self.__class__.__name__, config))
            self.pipe_iface_file = config
            self.source = config
            config = load_yaml(config)
        self.update(config)
        self._validate(PIFACE_SCHEMA_SRC, flavor=pipeline_type)
        if "path" in self:
            warn(message="'path' specification as a top-level pipeline "
                         "interface key is deprecated and will be removed with "
                         "the next release. Please use 'paths' section "
                         "from now on.", category=DeprecationWarning)
            self._expand_paths(["path"])
        self._expand_paths(["compute", "dynamic_variables_script_path"])
Пример #5
0
from starlette.responses import FileResponse
from starlette.staticfiles import StaticFiles
from starlette.templating import Jinja2Templates
from typing import List
from yacman import load_yaml

from ..const import EIDO_TEMPLATES_PATH, STATICS_PATH
from ..dependencies import *
from ..main import _PEP_STORES

templates = Jinja2Templates(directory=EIDO_TEMPLATES_PATH)
je = jinja2.Environment(loader=jinja2.FileSystemLoader(EIDO_TEMPLATES_PATH))

path_to_schemas = f"{os.path.dirname(__file__)}/schemas.yaml"
try:
    schemas_to_test = load_yaml(path_to_schemas)
except Exception as e:
    print(e, flush=True)


def vwrap(p, schema):
    """
    Validation wrapper function

    This little helper function just wraps the eido validate_project function
    to catch the exceptions raised and convert them into error reports.
    @param p peppy.Project object to validate
    @param schema Eido schema to validate against
    """
    x = None
    try:
Пример #6
0
 def test_update_packages(self, dcc, config_file):
     """Test updating does not produce empty compute packages"""
     entries = load_yaml(config_file)
     dcc.update(entries)
     assert dcc.compute_packages != YacAttMap()
Пример #7
0
    def __init__(self,
                 database,
                 schemas,
                 schemas_str=[],
                 henges=None,
                 checksum_function=md5):
        """
        A user interface to insert and retrieve decomposable recursive unique
        identifiers (DRUIDs).

        :param dict database: Dict-like lookup database for sequences and
            hashes.
        :param list schemas: A list of file paths containing YAML jsonschema schemas describing the
            data types stored by this Henge
        :param list schemas_str: A list of strings containing YAML jsonschema schemas directly
        :param dict henges: One or more henge objects indexed by object name for
            remote storing of items.
        :param function(str) -> str checksum_function: Default function to
            handle the digest of the serialized items stored in this henge.
        """
        self.database = database
        self.checksum_function = checksum_function
        self.digest_version = "md5"
        self.flexible_digests = True

        # TODO: Right now you can pass a file, or a URL, or some yaml directly
        # into the schemas param. I want to split that out so that at least the
        # yaml direct is its own arg

        if isinstance(schemas, dict):
            _LOGGER.debug("Using old dict schemas")
            populated_schemas = {}
            for schema_key, schema_value in schemas.items():
                if isinstance(schema_value, str):
                    populated_schemas[schema_key] = yacman.load_yaml(
                        schema_value)
            self.schemas = populated_schemas
        else:
            populated_schemas = []
            if isinstance(schemas, str):
                _LOGGER.error(
                    "The schemas should be a list. Please pass a list of schemas"
                )
                schemas = [schemas]
            for schema_value in schemas:
                if isinstance(schema_value, str):
                    if os.path.isfile(schema_value):
                        populated_schemas.append(
                            yacman.load_yaml(schema_value))
                    elif is_url(schema_value):
                        populated_schemas.append(read_url(schema_value))
                    else:
                        _LOGGER.error(
                            f"Schema file not found: {schema_value}. Use schemas_str if you meant to specify a direct schema"
                        )
                        # populated_schemas.append(yaml.safe_load(schema_value))

            for schema_value in schemas_str:
                populated_schemas.append(yaml.safe_load(schema_value))

            split_schemas = {}
            for s in populated_schemas:
                split_schemas.update(split_schema(s))

            self.schemas = split_schemas

        # Default array object schema
        # I once wanted the array type to be built in, but now I don't.
        # self.schemas["array"] = {"type": "array", "items": {"type": "string"}}

        # Identify which henge to use for each item type. Default to self:
        self.henges = {}
        for item_type in self.item_types:
            self.henges[item_type] = self

        # Next add in any remote henges for item types not stored in self:
        if henges:
            for item_type, henge in henges.items():
                if item_type not in self.item_types:
                    self.schemas[item_type] = henge.schemas[item_type]
                    self.henges[item_type] = henge