Ejemplo n.º 1
0
    def __init__(self, initial_converters: List[Converter],
                 strict_chaining: bool):
        """
        Constructor from a list of converters. An initial list of at least one should be provided. A conversion chain
        has a 'strict' mode defined at construction time, defining if chaining will be allowed in strict mode
        (output type = input type) or in non-strict mode (output type is subclass of input type)

        :param initial_converters: the initial list of converters
        :param strict_chaining: this is to indicate how chaining should be checked (exact type match or non-strict
        (subclasses).
        """
        # --init with the first converter of the list
        check_var(initial_converters,
                  var_types=list,
                  var_name='initial_converters',
                  min_len=1)
        super(ConversionChain, self).__init__(initial_converters[0].from_type,
                                              initial_converters[0].to_type)

        #-- store the 'strict mode' status
        check_var(strict_chaining, var_types=bool, var_name='strict')
        self.strict = strict_chaining

        # -- then add the others
        self._converters_list = [initial_converters[0]]
        if len(initial_converters) > 1:
            self.add_conversion_steps(initial_converters[1:], inplace=True)
Ejemplo n.º 2
0
    def parse_item(self,
                   location: str,
                   item_type: Type[T],
                   item_name_for_log: str = None,
                   file_mapping_conf: FileMappingConfiguration = None,
                   options: Dict[str, Dict[str, Any]] = None) -> T:
        """
        Main method to parse an item of type item_type

        :param location:
        :param item_type:
        :param item_name_for_log:
        :param file_mapping_conf:
        :param options:
        :return:
        """

        # -- item_name_for_log
        item_name_for_log = item_name_for_log or ''
        check_var(item_name_for_log,
                  var_types=str,
                  var_name='item_name_for_log')

        if len(item_name_for_log) > 0:
            item_name_for_log = item_name_for_log + ' '
        self.logger.debug('**** Starting to parse single object ' +
                          item_name_for_log + 'of type <' +
                          get_pretty_type_str(item_type) + '> at location ' +
                          location + ' ****')

        # common steps
        return self._parse__item(item_type,
                                 location,
                                 file_mapping_conf,
                                 options=options)
Ejemplo n.º 3
0
def get_validated_type(object_type: Type[Any],
                       name: str,
                       enforce_not_joker: bool = True) -> Type[Any]:
    """
    Utility to validate a type :
    * None is not allowed,
    * 'object', 'AnyObject' and 'Any' lead to the same 'AnyObject' type
    * JOKER is either rejected (if enforce_not_joker is True, default) or accepted 'as is'

    :param object_type: the type to validate
    :param name: a name used in exceptions if any
    :param enforce_not_joker: a boolean, set to False to tolerate JOKER types
    :return: the fixed type
    """
    if object_type is object or object_type is Any or object_type is AnyObject:
        return AnyObject
    else:
        # -- !! Do not check TypeVar or Union : this is already handled at higher levels --
        if object_type is JOKER:
            # optionally check if JOKER is allowed
            if enforce_not_joker:
                raise ValueError('JOKER is not allowed for object_type')
        else:
            # note: we dont check var earlier, since 'typing.Any' is not a subclass of type anymore
            check_var(object_type, var_types=type, var_name=name)
        return object_type
Ejemplo n.º 4
0
    def __init__(self,
                 pretty_name: str = None,
                 *,
                 strict_matching: bool = False,
                 register_default_parsers: bool = True,
                 logger: Logger = default_logger):
        """
        Constructor. Initializes the dictionary of parsers with the optionally provided initial_parsers, and
        inits the lock that will be used for access in multithreading context.

        :param pretty_name:
        :param strict_matching:
        :param register_default_parsers:
        :param logger:
        """
        if not register_default_parsers:
            # otherwise this has already been done in __new__
            super(RootParser,
                  self).__init__(pretty_name or 'parsyfiles defaults',
                                 strict_matching)

        # remember if the user registers the default parsers - for future calls to install_basic_multifile_support()
        self.multifile_installed = register_default_parsers
        self.default_parsers_installed = register_default_parsers

        if register_default_parsers:
            # register_default_plugins(self)
            # we are already a copy of the default instance : dont register anything
            # if this assertion fails, thats a discrepancy between __new__ and __init__ arguments
            assert len(self.get_all_parsers()) > 0

        logger = logger or default_logger
        check_var(logger, var_types=Logger, var_name='logger')
        self.logger = logger
Ejemplo n.º 5
0
    def __init__(self,
                 from_type: Type[S],
                 to_type: Type[T],
                 is_able_to_convert_func: Callable[[bool, Type[S], Type[T]],
                                                   bool] = None,
                 can_chain: bool = True):
        """
        Constructor for a converter from one source type (from_type) to one destination type (to_type).
        from_type may be any type except AnyObject or object. to_type may be AnyObject.

        A custom function may be provided to enable converters to reject some conversions, even if the provided type
        is a subclass of their source type and the expected type is a parent class of their dest type (or their dest
        type is 'AnyObject').

        :param from_type: the source type
        :param to_type: the destination type, or AnyObject (for generic converters)
        :param is_able_to_convert_func: an optional function taking a desired object type as an input and outputting a
        boolean. It will be called in 'is_able_to_convert'. This allows implementors to reject some conversions even if
        they are compliant with their declared 'to_type'. Implementors should handle a 'None' value as a joker
        :param can_chain: a boolean (default True) indicating if other converters can be appended at the end of this
        converter to create a chain. Dont change this except if it really can never make sense.
        """
        # --from type
        self.from_type = get_validated_type(from_type, 'from_type')
        if from_type is AnyObject:
            raise ValueError(
                'A converter\'s \'from_type\' cannot be anything at the moment, it would be a mess.'
            )

        # --to type
        self.to_type = get_validated_type(to_type, 'to_type')

        # --conversion function
        check_var(is_able_to_convert_func,
                  var_types=Callable,
                  var_name='is_able_to_convert_func',
                  enforce_not_none=False)
        if is_able_to_convert_func is not None:
            # sanity check : check that conversion function handles jokers properly
            try:
                res = is_able_to_convert_func(True,
                                              from_type=None,
                                              to_type=None)
                if not res:
                    raise ValueError(
                        'Conversion function ' + str(is_able_to_convert_func) +
                        ' can not be registered '
                        'since it does not handle the JOKER (None) cases correctly'
                    )
            except Exception as e:
                raise ValueError(
                    'Error while registering conversion function ' +
                    str(is_able_to_convert_func) + ': ' +
                    str(e)).with_traceback(e.__traceback__)

        self.is_able_to_convert_func = is_able_to_convert_func

        # -- can chain
        check_var(can_chain, var_types=bool, var_name='can_chain')
        self.can_chain = can_chain
        def __init__(
                self,
                location: str,
                file_mapping_conf: AbstractFileMappingConfiguration = None,
                logger: Logger = None):
            """
            Creates a PersistedObject representing an object on the filesystem at location 'location'. It may be
            multifile or singlefile. When this object is created it recursively scans all of its children if any, and
            builds the corresponding PersistedObjects. All of this is logged on the provided logger if any.

            :param location:
            :param file_mapping_conf:
            :param logger:
            """

            # -- file mapping
            check_var(file_mapping_conf,
                      var_types=FileMappingConfiguration,
                      var_name='file_mapping_conf')
            self.file_mapping_conf = file_mapping_conf

            # -- logger
            check_var(logger,
                      var_types=Logger,
                      var_name='logger',
                      enforce_not_none=False)
            self.logger = logger

            try:
                # -- check single file or multifile thanks to the filemapping
                is_singlefile, ext, self._contents_or_path = self.file_mapping_conf.get_unique_object_contents(
                    location)

                # -- store all information in the container(parent class)
                super(FileMappingConfiguration.RecursivePersistedObject,
                      self).__init__(location, is_singlefile, ext)

                # -- log this for easy debug
                if logger is not None:
                    logger.info(str(self))

                # -- create and attach all the self.children if multifile
                if not self.is_singlefile:
                    self.children = {
                        name:
                        FileMappingConfiguration.RecursivePersistedObject(
                            loc,
                            file_mapping_conf=self.file_mapping_conf,
                            logger=self.logger)
                        for name, loc in sorted(self._contents_or_path.items())
                    }

            except (ObjectNotFoundOnFileSystemError,
                    ObjectPresentMultipleTimesOnFileSystemError,
                    IllegalContentNameError) as e:
                # -- log the object that was being built, just for consistency of log messages
                if logger is not None:
                    logger.info(location)
                raise e.with_traceback(e.__traceback__)
            def __init__(self, config: Dict[str, str]):
                check_var(config, var_types=dict, var_name='config')
                super(OpConfig, self).__init__()
                self.__wrapped_impl = config

                # here you may wish to perform additional checks on the wrapped object
                unrecognized = set(config.keys()) - {'operation'}
                if len(unrecognized) > 0:
                    raise ValueError('Unrecognized options : ' +
                                     str(unrecognized))
Ejemplo n.º 8
0
    def __init__(self, encoding: str = None):
        """
        Constructor, with the encoding registered to open the singlefiles.

        :param encoding: the encoding used to open the files default is 'utf-8'
        """
        check_var(encoding,
                  var_types=str,
                  var_name='encoding',
                  enforce_not_none=False)
        self.encoding = encoding or 'utf-8'
Ejemplo n.º 9
0
def get_options_for_id(options: Dict[str, Dict[str, Any]], identifier: str):
    """
    Helper method, from the full options dict of dicts, to return either the options related to this parser or an
    empty dictionary. It also performs all the var type checks

    :param options:
    :param identifier:
    :return:
    """
    check_var(options, var_types=dict, var_name='options')
    res = options[identifier] if identifier in options.keys() else dict()
    check_var(res, var_types=dict, var_name='options[' + identifier + ']')
    return res
def check_extensions(extensions: Set[str], allow_multifile: bool = False):
    """
    Utility method to check that all extensions in the provided set are valid

    :param extensions:
    :param allow_multifile:
    :return:
    """
    check_var(extensions, var_types=set, var_name='extensions')

    # -- check them one by one
    for ext in extensions:
        check_extension(ext, allow_multifile=allow_multifile)
Ejemplo n.º 11
0
    def is_able_to_convert(self, strict: bool, from_type: Type[Any], to_type: Type[Any]) \
            -> Tuple[bool, bool, bool]:
        """
        Utility method to check if a parser is able to parse a given type, either in
        * strict mode : provided_type and desired_type must be equal to this converter's from_type and to_type
        respectively (or the to_type does not match but this converter is generic
        * inference mode (non-strict) : provided_type may be a subclass of from_type, and to_type may be a subclass
        of desired_type

        If a custom function was provided at construction time, it is called to enable converters to reject some
        conversions based on source and/or dest type provided.

        :param strict: a boolean indicating if matching should be in strict mode or not
        :param from_type:
        :param to_type:
        :return: a tuple of 3 booleans : (does match?, strict source match? (None if no match), strict dest match?
        (None if no match))
        """
        check_var(strict, var_types=bool, var_name='strict')

        # -- first call custom checker if provided
        if self.is_able_to_convert_func is not None and not self.is_able_to_convert_func(
                strict, from_type, to_type):
            return False, None, None

        # -- from_type strict match
        if from_type is None or from_type is self.from_type or is_any_type(
                from_type):
            # -- check to type strict
            if to_type is None or self.is_generic() or (to_type is
                                                        self.to_type):
                return True, True, True  # strict to_type match
            # -- check to type non-strict
            elif (not strict) and issubclass(self.to_type, to_type):
                return True, True, False  # approx to_type match

        # -- from_type non-strict match
        elif (not strict) and issubclass(from_type, self.from_type):
            # -- check to type strict
            if to_type is None or self.is_generic() or (to_type is
                                                        self.to_type):
                return True, False, True  # exact to_type match
            # -- check to type non-strict
            elif (not strict) and issubclass(self.to_type, to_type):
                return True, False, False  # approx to_type match

        # -- otherwise no match
        return False, None, None
    def __init__(self,
                 base_parser: AnyParser,
                 converter: Converter[S, T],
                 strict: bool,
                 base_parser_chosen_dest_type: Type[S] = None):
        """
        Constructor from a base parser and a conversion chain.
        Even if the base parser is able to parse several types or even any type, at the moment converters only support
        *one* source type that cannot be 'any'. for this reason in this constructor the caller is expected to restrict
        the parser to a unique destination type explicitly

        :param base_parser:
        :param converter:
        :param strict:
        :param base_parser_chosen_dest_type
        """
        check_var(base_parser, var_types=AnyParser, var_name='base_parser')

        # Removed this check : in some cases, it makes sense
        # (for example use a generic parser to parse object A then convert A to B ; might be more convenient than using
        # the generic parser to parse B directly)
        #
        # if base_parser.is_generic():
        #     raise ValueError('Creating a parsing chain from a base parser able to parse any type is just pointless.')

        self._base_parser = base_parser

        # did the user explicitly restrict the destination type of the base parser ?
        if base_parser_chosen_dest_type is None:
            if len(base_parser.supported_types) != 1:
                raise ValueError(
                    'Cannot create a parsing chain from a parser that is able to parse several types '
                    'without restricting it explicitly. Please set a value for '
                    '\'base_parser_chosen_dest_type\'')
            else:
                # supported types = the parser's ones (that is, only 1)
                parser_out_type = next(iter(base_parser.supported_types))
        else:
            check_var(base_parser_chosen_dest_type,
                      var_types=type,
                      var_name='base_parser_chosen_dest_type')
            parser_out_type = base_parser_chosen_dest_type

        # set the converter
        check_var(converter, var_types=Converter, var_name='converter')
        if not converter.is_able_to_convert(
                strict=strict, from_type=parser_out_type, to_type=None):
            raise ValueError(
                'Cannot chain this parser and this converter : types are not consistent'
            )

        self._converter = converter
        super(ParsingChain,
              self).__init__(supported_types={converter.to_type},
                             supported_exts=base_parser.supported_exts)

        check_var(strict, var_types=bool, var_name='strict')
        self.strict = strict
    def __init__(self, parsers: List[AnyParser] = None):
        """
        Constructor from an initial list of parsers
        :param parsers:
        """

        # -- init
        # explicitly DONT use base constructor
        # super(CascadingParser, self).__init__(supported_types=set(), supported_exts=set())
        self.configured = False
        self._parsers_list = []

        if parsers is not None:
            check_var(parsers, var_types=list, var_name='parsers')
            for parser in parsers:
                self.add_parser_to_cascade(parser)
Ejemplo n.º 14
0
    def __init__(self, object_type: Type[T], obj_on_filesystem: PersistedObject, parser: _BaseParser,
                 logger: Logger, accept_union_types: bool = False):
        """
        Constructor like in PersistedObject, but with an additional logger.

        :param object_type:
        :param obj_on_filesystem:
        :param parser:
        :param logger:
        """
        super(_BaseParsingPlan, self).__init__(object_type, obj_on_filesystem, parser,
                                               accept_union_types=accept_union_types)

        # -- logger
        check_var(logger, var_types=Logger, var_name='logger', enforce_not_none=False)
        self.logger = logger
Ejemplo n.º 15
0
    def get_multifile_object_child_location(self, parent_location: str,
                                            child_name: str):
        """
        Implementation of the parent abstract method.

        In this mode the attribute is a file with the same prefix, separated from the parent object name by
        the character sequence <self.separator>

        :param parent_location: the absolute file prefix of the parent item.
        :param child_name:
        :return: the file prefix for this attribute
        """
        check_var(parent_location, var_types=str, var_name='parent_path')
        check_var(child_name, var_types=str, var_name='item_name')

        # a child location is built by adding the separator between the child name and the parent location
        return parent_location + self.separator + child_name
    def __init__(self,
                 supported_types: Set[Type],
                 supported_exts: Set[str],
                 can_chain: bool = True,
                 is_able_to_parse_func: Callable[[bool, Type[Any]],
                                                 bool] = None):
        """
        Constructor for a parser declaring support for possibly both singlefile and multifile, with a mandatory list of
        supported object types.

        It is possible to declare that a parser is able to parse any type (typically, a pickle parser), by using
        supported_types={Any} or {object} or {AnyObject}. It is also possible to declare a custom function
        'is_able_to_parse_func' telling if a specific object type is supported, in order to accept most types but not
        all.

        Note: users wishing to only implement singlefile OR multifile should rather use or extend SingleFileParser or
        MultiFileParser classes.

        :param supported_types: a set of supported object types that may be parsed. To declare that a parser is able to
        parse any type this should be {AnyObject} ({object} ans {Any} is allowed but will be automatically replaced
        with {AnyObject}).
        :param supported_exts: a set of supported file extensions that may be parsed
        :param can_chain: a boolean (default True) indicating if converters can be appended at the end of this
        parser to create a chain. Dont change this except if it really can never make sense.
        :param is_able_to_parse_func: an optional custom function to allow parsers to reject some types. This function
        signature should be my_func(strict_mode, desired_type) -> bool
        """
        # -- check types
        self.supported_types = get_validated_types(supported_types,
                                                   'supported_types')

        # -- check extensions
        check_extensions(supported_exts, allow_multifile=True)
        self.supported_exts = supported_exts

        # -- check can_chain
        check_var(can_chain, var_types=bool, var_name='can_chain')
        self.can_chain = can_chain

        # -- check is_able_to_parse_func
        check_var(is_able_to_parse_func,
                  var_types=Callable,
                  var_name='is_able_to_parse_func',
                  enforce_not_none=False)
        self.is_able_to_parse_func = is_able_to_parse_func
Ejemplo n.º 17
0
    def __init__(self, separator: str = None, encoding: str = None):
        """
        :param separator: the character sequence used to separate an item name from an item attribute name. Only
        used in flat mode. Default is '.'
        :param encoding: encoding used to open the files. Default is 'utf-8'
        """
        super(FlatFileMappingConfiguration, self).__init__(encoding=encoding)

        # -- check separator
        check_var(separator,
                  var_types=str,
                  var_name='sep_for_flat',
                  enforce_not_none=False,
                  min_len=1)
        self.separator = separator or '.'
        if '/' in self.separator or '\\' in self.separator:
            raise ValueError(
                'Separator cannot contain a folder separation character')
Ejemplo n.º 18
0
    def add_conversion_steps(self,
                             converters: List[Converter],
                             inplace: bool = False):
        """
        Utility method to add converters to this chain. If inplace is True, this object is modified and
        None is returned. Otherwise, a copy is returned

        :param converters: the list of converters to add
        :param inplace: boolean indicating whether to modify this object (True) or return a copy (False)
        :return: None or a copy with the converters added
        """
        check_var(converters, var_types=list, min_len=1)
        if inplace:
            for converter in converters:
                self.add_conversion_step(converter, inplace=True)
        else:
            new = copy(self)
            new.add_conversion_steps(converters, inplace=True)
            return new
Ejemplo n.º 19
0
def get_validated_types(object_types: Set[Type], set_name: str) -> Set[Type]:
    """
    Utility to validate a set of types :
    * None is not allowed as a whole or within the set,
    * object and Any are converted into AnyObject
    * if AnyObject is in the set, it must be the only element

    :param object_types: the set of types to validate
    :param set_name: a name used in exceptions if any
    :return: the fixed set of types
    """
    check_var(object_types, var_types=set, var_name=set_name)
    res = {get_validated_type(typ, set_name + '[x]') for typ in object_types}
    if AnyObject in res and len(res) > 1:
        raise ValueError(
            'The set of types contains \'object\'/\'Any\'/\'AnyObject\', so no other type must be present '
            'in the set')
    else:
        return res
Ejemplo n.º 20
0
    def __init__(self, parser_function: Union[ParsingMethodForStream, ParsingMethodForFile],
                 supported_types: Set[Type[T]], supported_exts: Set[str], streaming_mode: bool = True,
                 custom_name: str = None, function_args: dict = None, option_hints: Callable[[], str] = None):
        """
        Constructor from a parser function , a mandatory set of supported types, and a mandatory set of supported
        extensions.

        Two kind of parser_function may be provided as implementations:
        * if streaming_mode=True (default), this class handles opening and closing the file, and parser_function should
        have a signature such as my_func(desired_type: Type[T], opened_file: TextIOBase, **kwargs) -> T
        * if streaming_mode=False, this class does not handle opening and closing the file. parser_function should be a
        my_func(desired_type: Type[T], file_path: str, encoding: str, **kwargs) -> T

        :param parser_function:
        :param streaming_mode: an optional boolean (default True) indicating if the function should be called with an
        open stream or with a file path
        :param supported_types: mandatory set of supported types, or {
        :param supported_exts: mandatory set of supported singlefile extensions ('.txt', '.json' ...)
        :param function_args: kwargs that will be passed to the function at every call
        :param option_hints: an optional method returning a string containing the options descriptions
        """
        super(SingleFileParserFunction, self).__init__(supported_types=supported_types, supported_exts=supported_exts)

        # -- check the custom name
        check_var(custom_name, var_types=str, var_name='custom_name', enforce_not_none=False)
        self._custom_name = custom_name

        # -- check the function
        # TODO check the function signature to prevent TypeErrors to happen (and then remove the catch block below in _parse_singlefile)
        check_var(parser_function, var_types=Callable, var_name='parser_function')
        self._parser_func = parser_function

        # -- check the streaming mode
        check_var(streaming_mode, var_types=bool, var_name='streaming_mode')
        self._streaming_mode = streaming_mode

        # -- remember the static args values
        check_var(function_args, var_types=dict, var_name='function_args', enforce_not_none=False)
        self.function_args = function_args

        # -- option hints
        check_var(option_hints, var_types=Callable, var_name='option_hints', enforce_not_none=False)
        self._option_hints_func = option_hints
Ejemplo n.º 21
0
    def __init__(self,
                 from_type: Type[S],
                 to_type: Type[T],
                 is_able_to_convert_func: Callable[[bool, Type[S], Type[T]],
                                                   bool] = None,
                 can_chain: bool = True):
        """
        Constructor for a converter from one source type (from_type) to one destination type (to_type).
        from_type may be any type except AnyObject or object. to_type may be AnyObject.

        A custom function may be provided to enable converters to reject some conversions, even if the provided type
        is a subclass of their source type and the expected type is a parent class of their dest type (or their dest
        type is 'AnyObject').

        :param from_type: the source type
        :param to_type: the destination type, or AnyObject (for generic converters)
        :param is_able_to_convert_func: an optional function taking a desired object type as an input and outputting a
        boolean. It will be called in 'is_able_to_convert'. This allows implementors to reject some conversions even if
        they are compliant with their declared 'to_type'.
        :param can_chain: a boolean (default True) indicating if other converters can be appended at the end of this
        converter to create a chain. Dont change this except if it really can never make sense.
        """
        # --from type
        self.from_type = get_validated_type(from_type, 'from_type')
        if from_type is AnyObject:
            raise ValueError(
                'A converter\'s \'from_type\' cannot be anything at the moment, it would be a mess.'
            )

        # --to type
        self.to_type = get_validated_type(to_type, 'to_type')

        # --conversion function
        check_var(is_able_to_convert_func,
                  var_types=Callable,
                  var_name='is_able_to_convert_func',
                  enforce_not_none=False)
        self.is_able_to_convert_func = is_able_to_convert_func

        # -- can chain
        check_var(can_chain, var_types=bool, var_name='can_chain')
        self.can_chain = can_chain
    def __init__(self, parsers: Union[Iterable[AnyParser], Dict[Type, Iterable[AnyParser]]] = None):
        """
        Constructor from an initial list of parsers
        :param parsers:
        """

        # -- init
        # explicitly DONT use base constructor
        # super(CascadingParser, self).__init__(supported_types=set(), supported_exts=set())
        self.configured = False
        self._parsers_list = []

        if parsers is not None:
            check_var(parsers, var_types=Iterable, var_name='parsers')
            if isinstance(parsers, Mapping):
                for typ, parser in parsers.items():
                    self.add_parser_to_cascade(parser, typ)
            else:
                for parser in parsers:
                    self.add_parser_to_cascade(parser)
Ejemplo n.º 23
0
    def get_multifile_object_child_location(self, parent_item_prefix: str,
                                            child_name: str) -> str:
        """
        Implementation of the parent abstract method.
        In this mode the attribute is a file inside the parent object folder

        :param parent_item_prefix: the absolute file prefix of the parent item.
        :return: the file prefix for this attribute
        """
        check_var(parent_item_prefix,
                  var_types=str,
                  var_name='parent_item_prefix')
        check_var(child_name, var_types=str, var_name='item_name')

        # assert that folder_path is a folder
        if not isdir(parent_item_prefix):
            raise ValueError(
                'Cannot get attribute item in non-flat mode, parent item path is not a folder : '
                + parent_item_prefix)
        return join(parent_item_prefix, child_name)
        def __init__(self, desired_type: Type[T], obj_on_filesystem: PersistedObject, parser: AnyParser,
                     parser_list: List[Tuple[Type, Parser]], logger: Logger):

            # We accept that the desired type is a Union or a TypeVar
            # Indeed CascadingParser can both provide alternatives to the same type (no Union), 
            # or to different ones (Union)
            super(CascadingParser.CascadingParsingPlan, self).__init__(desired_type, obj_on_filesystem, parser,
                                                                       accept_union_types=True)

            # --parser list
            check_var(parser_list, var_types=list, var_name='parser_list', min_len=1)
            self.parser_list = parser_list

            # -- the variables that will contain the active parser and its parsing plan
            self.active_parser_idx = -1
            self.active_parsing_plan = None
            self.parsing_plan_creation_errors = OrderedDict()

            # -- activate the next one
            self.activate_next_working_parser(logger=logger)
Ejemplo n.º 25
0
def check_extension(extension: str, allow_multifile: bool = False):
    """
    Utility method to check that the provided extension is valid. Extension should either be MULTIFILE_EXT
    (='multifile') or start with EXT_SEPARATOR (='.') and contain only one occurence of EXT_SEPARATOR

    :param extension:
    :param allow_multifile:
    :return:
    """
    check_var(extension, var_types=str, var_name='extension')

    # Extension should either be 'multifile' or start with EXT_SEPARATOR and contain only one EXT_SEPARATOR
    if (extension.startswith(EXT_SEPARATOR) and extension.count(EXT_SEPARATOR) == 1) \
            or (allow_multifile and extension is MULTIFILE_EXT):
        # ok
        pass
    else:
        raise ValueError('\'extension\' should start with \'' + EXT_SEPARATOR + '\' and contain not other '
                         'occurrence of \'' + EXT_SEPARATOR + '\'' + (', or be equal to \'' + MULTIFILE_EXT + '\' (for '
                         'multifile object parsing)' if allow_multifile else ''))
        def __init__(self, desired_type: Type[T],
                     obj_on_filesystem: PersistedObject, parser: AnyParser,
                     parser_list: List[Parser], logger: Logger):

            super(CascadingParser.CascadingParsingPlan,
                  self).__init__(desired_type, obj_on_filesystem, parser)

            # --parser list
            check_var(parser_list,
                      var_types=list,
                      var_name='parser_list',
                      min_len=1)
            self.parser_list = parser_list

            # -- the variables that will contain the active parser and its parsing plan
            self.active_parser_idx = -1
            self.active_parsing_plan = None
            self.parsing_plan_creation_errors = dict()

            # -- activate the next one
            self.activate_next_working_parser(logger=logger)
    def __init__(self, object_type: Type[T],
                 obj_on_filesystem: PersistedObject,
                 parser: _BaseParserDeclarationForRegistries):
        """
        Creates a parsing plan, from an object's type, an object's files, and a parser.

        :param object_type:
        :param obj_on_filesystem:
        :param parser:
        """
        # DON'T CALL SUPER INIT, since we wrap/proxy an existing object

        # check and apply defaults
        # -- object_type
        check_var(object_type, var_types=type, var_name='object_type')
        self.obj_type = object_type
        # -- obj_files
        check_var(obj_on_filesystem,
                  var_types=PersistedObject,
                  var_name='obj_on_filesystem')
        self.obj_on_fs_to_parse = obj_on_filesystem
        # -- parser
        check_var(parser,
                  var_types=_BaseParserDeclarationForRegistries,
                  var_name='parser')
        self.parser = parser
Ejemplo n.º 28
0
    def __init__(self, object_type: Type[T], obj_on_filesystem: PersistedObject,
                 parser: _BaseParserDeclarationForRegistries, accept_union_types: bool = False):
        """
        Creates a parsing plan, from an object's type, an object's files, and a parser.

        :param object_type:
        :param obj_on_filesystem:
        :param parser:
        :param accept_union_types: a boolean to accept when object_type is a Union or a TypeVar with union constraints
        """
        # DON'T CALL SUPER INIT, since we wrap/proxy an existing object

        # check and apply defaults
        # -- object_type
        t = get_alternate_types_resolving_forwardref_union_and_typevar(object_type)
        if len(t) == 1:
            check_var(t[0], var_types=type, var_name='object_type')
            self.obj_type = t[0]
        elif not accept_union_types:
            raise ValueError('Parsing Plan can not be created for Union type {}'.format(object_type))
        else:
            self.obj_type = object_type
        # -- obj_files
        check_var(obj_on_filesystem, var_types=PersistedObject, var_name='obj_on_filesystem')
        self.obj_on_fs_to_parse = obj_on_filesystem
        # -- parser
        check_var(parser, var_types=_BaseParserDeclarationForRegistries, var_name='parser')
        self.parser = parser
Ejemplo n.º 29
0
def get_validated_type(object_type: Type[Any],
                       name: str,
                       enforce_not_none: bool = True) -> Type[Any]:
    """
    Utility to validate a type :
    * None is not allowed,
    * 'object' and 'Any' are converted into 'AnyObject'

    :param object_type: the type to validate
    :param name: a name used in exceptions if any
    :param enforce_not_none: a boolean, set to False to tolerate None types
    :return: the fixed type
    """
    if object_type is object or object_type is Any or object_type is AnyObject:
        return AnyObject
    else:
        # note: we dont check var earlier, since 'typing.Any' is now not a subclass of type anymore
        check_var(object_type,
                  var_types=type,
                  var_name=name,
                  enforce_not_none=enforce_not_none)
        return object_type
Ejemplo n.º 30
0
    def __init__(self, lazyloadable_keys: List[str],
                 loading_method: Callable[[str], Any]):
        """
        Constructor with a list of keys for which the value can actually be loaded later (when needed) from a
        loading_method.

        :param lazyloadable_keys:
        :param loading_method:
        """
        # initialize the inner dictionary
        self.inner_dict = OrderedDict()
        self.inner_dict_readonly_wrapper = LazyDictionary.ReadOnlyDictProxy(
            self.inner_dict)

        # store the list of loadable keys
        check_var(lazyloadable_keys, var_types=list, var_name='initial_keys')
        self.lazyloadable_keys = lazyloadable_keys

        # loading method
        check_var(loading_method,
                  var_types=Callable,
                  var_name='loading_method')
        self.loading_method = loading_method