def parse_item(self, location: str, item_type: Type[T], item_name_for_log: str = None, file_mapping_conf: FileMappingConfiguration = None, options: Dict[str, Dict[str, Any]] = None) -> T: """ Main method to parse an item of type item_type :param location: :param item_type: :param item_name_for_log: :param file_mapping_conf: :param options: :return: """ # -- item_name_for_log item_name_for_log = item_name_for_log or '' check_var(item_name_for_log, var_types=str, var_name='item_name_for_log') if len(item_name_for_log) > 0: item_name_for_log = item_name_for_log + ' ' self.logger.debug('**** Starting to parse single object ' + item_name_for_log + 'of type <' + get_pretty_type_str(item_type) + '> at location ' + location + ' ****') # common steps return self._parse__item(item_type, location, file_mapping_conf, options=options)
def _parse_multifile(self, desired_type: Type[T], obj: PersistedObject, parsing_plan_for_children: Dict[str, AnyParser._RecursiveParsingPlan], logger: Logger, options: Dict[str, Dict[str, Any]]) -> T: """ :param desired_type: :param obj: :param parsing_plan_for_children: :param logger: :param options: :return: """ # Parse children right now results = {} # 1) first parse all children according to their plan # -- use key-based sorting on children to lead to reproducible results # (in case of multiple errors, the same error will show up first everytime) for child_name, child_plan in sorted(parsing_plan_for_children.items()): results[child_name] = child_plan.execute(logger, options) # 2) finally build the resulting object logger.info('Assembling a ' + get_pretty_type_str(desired_type) + ' from all parsed children of ' + str(obj) + ' by passing them as attributes of the constructor') return dict_to_object(desired_type, results, logger, options, conversion_finder=self.conversion_finder)
def create_for_caught_error(parser: _BaseParserDeclarationForRegistries, desired_type: Type[T], obj: PersistedObject, caught: Exception, options: Dict[str, Dict[str, Any]]): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param parser: :param desired_type: :param obj: :param caught: :param options: :return: """ try: typ = get_pretty_type_str(desired_type) except: typ = str(desired_type) return ParsingException('Error while parsing ' + str(obj) + ' as a ' + typ + ' with parser \'' + str(parser) + '\' using options=(' + str(options) + ') : caught \n ' + str(caught.__class__.__name__) + ' : ' + str(caught))\ .with_traceback(caught.__traceback__) # 'from e' was hiding the inner traceback. This is much better for debug
def _get_parsing_plan_for_multifile_children( self, obj_on_fs: PersistedObject, desired_type: Type[Any], logger: Logger) -> Dict[str, Any]: """ Simply inspects the required type to find the names and types of its constructor arguments. Then relies on the inner ParserFinder to parse each of them. :param obj_on_fs: :param desired_type: :param logger: :return: """ if is_collection(desired_type, strict=True): # if the destination type is 'strictly a collection' (not a subclass of a collection) we know that we can't # handle it here, the constructor is not pep484-typed raise TypeError( 'Desired object type \'' + get_pretty_type_str(desired_type) + '\' is a collection, ' 'so it cannot be parsed with this default object parser') else: # First get the file children children_on_fs = obj_on_fs.get_multifile_children() # Try the type itself # try: return self.__get_parsing_plan_for_multifile_children( obj_on_fs, desired_type, children_on_fs, logger=logger)
def execute(self, logger: Logger, options: Dict[str, Dict[str, Any]]) -> T: """ Overrides the parent method to add log messages. :param logger: the logger to use during parsing (optional: None is supported) :param options: :return: """ in_root_call = False if logger is not None: # log only for the root object, not for the children that will be created by the code below if not hasattr(_BaseParsingPlan.thrd_locals, 'flag_exec') \ or _BaseParsingPlan.thrd_locals.flag_exec == 0: # print('Executing Parsing Plan for ' + str(self)) logger.info('Executing Parsing Plan for ' + str(self)) _BaseParsingPlan.thrd_locals.flag_exec = 1 in_root_call = True # Common log message logger.info('Parsing ' + str(self)) try: res = super(_BaseParsingPlan, self).execute(logger, options) logger.info('--> Successfully parsed a ' + get_pretty_type_str(self.obj_type) + ' from ' + self.location) if in_root_call: # print('Completed parsing successfully') logger.info('Completed parsing successfully') return res finally: # remove threadlocal flag if needed if in_root_call: _BaseParsingPlan.thrd_locals.flag_exec = 0
def create_for_parsing_plan_creation( origin_parser: AnyParser, parent_plan: AnyParser._RecursiveParsingPlan[T], caught: Dict[AnyParser, Exception]): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param origin_parser: :param parent_plan: :param caught: :return: """ base_msg = 'Error while trying to build parsing plan to parse \'' + str(parent_plan.obj_on_fs_to_parse) \ + '\' : \n' \ + ' - required object type is \'' + get_pretty_type_str(parent_plan.obj_type) + '\' \n' \ + ' - cascading parser is : ' + str(origin_parser) + '\n' msg = StringIO() if len(list(caught.keys())) > 0: msg.writelines(' - parsers tried are : \n * ') msg.writelines('\n * '.join([str(p) for p in caught.keys()])) msg.writelines(' \n Caught the following exceptions: \n') for p, err in caught.items(): msg.writelines('--------------- From ' + str(p) + ' caught: \n') print_error_to_io_stream(err, msg) msg.write('\n') return CascadeError(base_msg + msg.getvalue())
def execute(self, logger: Logger, options: Dict[str, Dict[str, Any]]) -> T: """ Overrides the parent method to add log messages. :param logger: the logger to use during parsing (optional: None is supported) :param options: :return: """ in_root_call = False if logger is not None: # log only for the root object, not for the children that will be created by the code below if not hasattr(_BaseParsingPlan.thrd_locals, 'flag_exec') \ or _BaseParsingPlan.thrd_locals.flag_exec == 0: # print('Executing Parsing Plan for ' + str(self)) logger.debug('Executing Parsing Plan for [{location}]' ''.format(location=self.obj_on_fs_to_parse.get_pretty_location(append_file_ext=False))) _BaseParsingPlan.thrd_locals.flag_exec = 1 in_root_call = True # Common log message logger.debug('(P) ' + get_parsing_plan_log_str(self.obj_on_fs_to_parse, self.obj_type, log_only_last=not in_root_call, parser=self.parser)) try: res = super(_BaseParsingPlan, self).execute(logger, options) if logger.isEnabledFor(DEBUG): logger.info('(P) {loc} -> {type} SUCCESS !' ''.format(loc=self.obj_on_fs_to_parse.get_pretty_location( blank_parent_part=not GLOBAL_CONFIG.full_paths_in_logs, compact_file_ext=True), type=get_pretty_type_str(self.obj_type))) else: logger.info('SUCCESS parsed [{loc}] as a [{type}] successfully. Parser used was [{parser}]' ''.format(loc=self.obj_on_fs_to_parse.get_pretty_location(compact_file_ext=True), type=get_pretty_type_str(self.obj_type), parser=str(self.parser))) if in_root_call: # print('Completed parsing successfully') logger.debug('Completed parsing successfully') return res finally: # remove threadlocal flag if needed if in_root_call: _BaseParsingPlan.thrd_locals.flag_exec = 0
def _correct_parsers_in_order_specific_type( self, parser_cache, typ, specific_parsers_strict, specific_parsers_non_strict=None): """ Tests, for each extension available, that the find_all_matching_parsers query for type 'typ' and that extension returns the correct results. * generic parsers first (all the ones from self.all_parsers_generic that support that file extension) * then specific non-strict (if that list is provided) * then the specific strict (that list should be provided :param parser_cache: :param typ: :param specific_parsers_strict: :param specific_parsers_non_strict: :return: """ strict = specific_parsers_non_strict is None for ext in self.all_extensions: print('Checking list of parsers returned for (' + ('' if strict else 'non-') + 'strict mode) type=' + get_pretty_type_str(typ) + ' ext=' + ext) matching, no_type_match_but_ext_match, no_ext_match_but_type_match, no_match \ = parser_cache.find_all_matching_parsers(strict=strict, desired_type=typ, required_ext=ext) # all generic should always be there, at the beginning print('First generic') generic = self.all_parsers_generic.copy() for g in self.all_parsers_generic: if ext not in g.supported_exts: generic.remove(g) self.assertEquals(set(matching[0]), generic) if not strict: print('Then specific non-strict') specific_nonstrict = specific_parsers_non_strict.copy() for f in specific_parsers_non_strict: if ext not in f.supported_exts: specific_nonstrict.remove(f) # remove those that are actually strict for t in specific_parsers_strict: if t in specific_nonstrict: specific_nonstrict.remove(t) self.assertEquals(set(matching[1]), specific_nonstrict) # then all specific should support a print('Then specific strict') specific = specific_parsers_strict.copy() for s in specific_parsers_strict: if ext not in s.supported_exts: specific.remove(s) self.assertEquals(set(matching[2]), specific)
def create_not_able_to_convert(source: S, converter: Converter, desired_type: Type[T]): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param source: :param converter: :param desired_type: :return: """ base_msg = 'Converter ' + str(converter) + ' is not able to ingest source value \'' + str(source) + '\''\ ' of type \'' + get_pretty_type_str(type(source)) + '\' and/or convert it to type \'' \ + get_pretty_type_str(desired_type) + '\'.' base_msg += ' This can happen in a chain when the previous step in the chain is generic and actually produced '\ ' an output of the wrong type/content' return ConversionException(base_msg)
def _get_parsing_plan_for_multifile_children(self, obj_on_fs: PersistedObject, desired_type: Type[Any], logger: Logger) -> Dict[str, Any]: """ Simply simply inspects the required type to find the names and types of its constructor arguments. Then relies on the inner ParserFinder to parse each of them. :param obj_on_fs: :param desired_type: :param logger: :return: """ if is_collection(desired_type, strict=True): raise TypeError('Desired object type \'' + get_pretty_type_str(desired_type)+ '\' is a collection, ' 'so it cannot be parsed with this default object parser') # First get the file children children_on_fs = obj_on_fs.get_multifile_children() # -- (a) extract the schema from the class constructor constructor_args_types_and_opt = get_constructor_attributes_types(desired_type) # -- (b) plan to parse each attribute required by the constructor children_plan = dict() # results will be put in this object # --use sorting in order to lead to reproducible results in case of multiple errors for attribute_name, att_desc in sorted(constructor_args_types_and_opt.items()): attribute_is_mandatory = att_desc[1] attribute_type = att_desc[0] # get the child if attribute_name in children_on_fs.keys(): child_on_fs = children_on_fs[attribute_name] # find a parser parser_found = self.parser_finder.build_parser_for_fileobject_and_desiredtype(child_on_fs, attribute_type, logger=logger) # create a parsing plan children_plan[attribute_name] = parser_found.create_parsing_plan(attribute_type, child_on_fs, logger=logger) else: if attribute_is_mandatory: raise MissingMandatoryAttributeFiles.create(obj_on_fs, desired_type, attribute_name) else: # we don't care : optional attribute # dont use warning since it does not show up nicely #print('----- WARNING: Attribute ' + attribute_name + ' was not found on file system. However ' # 'it is not mandatory for the constructor of type ' + get_pretty_type_str(desired_type) # + ', so we\'ll build the object without it...') logger.warning('----- Attribute ' + attribute_name + ' was not found on file system. However ' 'it is not mandatory for the constructor of type ' + get_pretty_type_str(desired_type) + ', so we\'ll build the object without it...') pass return children_plan
def __str__(self): """ A string representation :return: """ is_at_pp_creation_time = len(self.pp_execution_errors) == 0 base_msg = "Error while trying to {action} parsing plan to parse '{obj}' as a {typ}. Parsers tried:\n" \ "".format(action='create' if is_at_pp_creation_time else 'execute', obj=self.parent_plan.obj_on_fs_to_parse, typ=get_pretty_type_str(self.parent_plan.obj_type)) msg = StringIO() # sort by parser in the list tried_and_errors = [] for t, p in self.origin_parser._parsers_list: t = t or self.parent_plan.obj_type if (t, p) in self.pp_creation_errors: is_creation_err = True err = self.pp_creation_errors[(t, p)] elif (t, p) in self.pp_execution_errors: is_creation_err = False err = self.pp_execution_errors[(t, p)] else: raise Exception('Internal error - this should not happen, please file an issue in the tracker') tried_and_errors.append((t, p, is_creation_err, err)) if len(tried_and_errors) > 0: msg.writelines(' * ' + '\n * '.join(["'{p}' -> <{t}>".format(p=p, t=get_pretty_type_str(t or self.parent_plan.obj_type)) for t, p, is_creation_err, err in tried_and_errors])) msg.writelines('\n\nCaught the following exceptions: \n') for t, p, is_creation_err, err in tried_and_errors: msg.writelines("--------------- From '{p}' -> <{t}> caught the following when {expl} parsing plan: \n" "".format(p=p, t=get_pretty_type_str(t or self.parent_plan.obj_type), expl='creating' if is_creation_err else 'executing')) print_error_to_io_stream(err, msg) msg.write('\n') return base_msg + msg.getvalue()
def get_parsing_plan_log_str(obj_on_fs_to_parse, desired_type, parser): """ Utility method used by several classes to log a message indicating that a given file object is planned to be parsed to the given object type with the given parser. It is in particular used in str(ParsingPlan), but not only. :param obj_on_fs_to_parse: :param desired_type: :param parser: :return: """ return str(obj_on_fs_to_parse) + ' > ' + get_pretty_type_str( desired_type) + ' ------- using ' + str(parser)
def insert_conversion_step_at_beginning(self, converter: Converter[S, T], inplace: bool = False): """ Utility method to insert a converter at the beginning of this chain. If inplace is True, this object is modified and None is returned. Otherwise, a copy is returned :param converter: the converter to add :param inplace: boolean indicating whether to modify this object (True) or return a copy (False) :return: None or a copy with the converter added """ # it the added converter is generic, raise an error if converter.is_generic(): raise ValueError( 'Cannot add this converter at the beginning of this chain : it is already generic !' ) # if the current chain is able to transform its input into a valid input for the new converter elif converter.is_able_to_convert(self.strict, from_type=converter.from_type, to_type=None)[0]: if inplace: self._converters_list.insert(0, converter) # update the current source type self.from_type = converter.from_type return else: new = copy(self) new._converters_list.insert(0, converter) # update the current destination type new.from_type = converter.from_type return new else: raise TypeError( 'Cannnot register a converter on this conversion chain : source type \'' + get_pretty_type_str(converter.from_type) + '\' is not compliant with current destination type of the chain : \'' + get_pretty_type_str(self.to_type) + ' (this chain performs ' + ('' if self.strict else 'non-') + 'strict mode matching)')
def create(item_type: Type[Any], constructor_atts: List[str], invalid_property_name: str): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param item_type: :return: """ return InvalidAttributeNameForConstructorError('Cannot parse object of type <' + get_pretty_type_str(item_type) + '> using the provided configuration file: configuration ' + 'contains a property name (\'' + invalid_property_name + '\')'\ + 'that is not an attribute of the object constructor. <' + get_pretty_type_str(item_type) + '> constructor attributes ' + 'are : ' + str(constructor_atts))
def get_parsing_plan_log_str(obj_on_fs_to_parse, desired_type, log_only_last: bool, parser): """ Utility method used by several classes to log a message indicating that a given file object is planned to be parsed to the given object type with the given parser. It is in particular used in str(ParsingPlan), but not only. :param obj_on_fs_to_parse: :param desired_type: :param log_only_last: a flag to only log the last part of the file path (default False). Note that this can be overriden by a global configuration 'full_paths_in_logs' :param parser: :return: """ loc = obj_on_fs_to_parse.get_pretty_location(blank_parent_part=(log_only_last and not GLOBAL_CONFIG.full_paths_in_logs), compact_file_ext=True) return '{loc} -> {type} ------- using {parser}'.format(loc=loc, type=get_pretty_type_str(desired_type), parser=str(parser))
def create_for_wrong_result_type(parser: _BaseParserDeclarationForRegistries, desired_type: Type[T], obj: PersistedObject, result: T, options: Dict[str, Dict[str, Any]]): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param parser: :param desired_type: :param obj: :param result: :param options: :return: """ msg = "Error while parsing {obj} as a {typ} with parser {p} using options=({opts}) - parser returned an object " \ "of wrong type {tret}: {ret}".format(obj=obj, typ=get_pretty_type_str(desired_type), p=parser, opts=options, tret=type(result), ret=result) return WrongTypeCreatedError(msg)
def _capabilities_equal_query(self, strict): """ Tests that, for all parser registry queries that can be done, * the order of the parsers returned by `find_all_matching_parsers` is correct for all categories. That is, it is consistent with the one returned in `get_capabilities_by_ext` * there are no duplicates :param strict: :return: """ for i in range(1, 10): r = self.create_shuffled_registry() capabilities_by_ext = r.get_capabilities_by_ext( strict_type_matching=strict) # consistency check : each entry should reflect the value returned by find_parsers for ext in capabilities_by_ext.keys(): for typ in capabilities_by_ext[ext].keys(): print('Asserting (' + ('' if strict else 'non-') + 'strict mode) type=' + get_pretty_type_str(typ) + ' ext=' + ext) # query matching, no_type_match_but_ext_match, no_ext_match_but_type_match, no_match \ = r.find_all_matching_parsers(strict, desired_type=typ, required_ext=ext) matching_parsers = matching[0] + matching[1] + matching[2] # capabilities capa = [] if '1_exact_match' in capabilities_by_ext[ext][typ].keys(): capa = capa + capabilities_by_ext[ext][typ][ '1_exact_match'] if '2_approx_match' in capabilities_by_ext[ext][typ].keys( ): capa = capa + capabilities_by_ext[ext][typ][ '2_approx_match'] if '3_generic' in capabilities_by_ext[ext][typ].keys(): capa = capa + capabilities_by_ext[ext][typ]['3_generic'] # asserts self.assertEquals(capa, list(reversed(matching_parsers))) # --remove duplicates capa_no_dup = list(OrderedDict.fromkeys(capa)) self.assertEquals(capa_no_dup, list(reversed(matching_parsers)))
def create(item_type: Type[Any], constructor_args: Dict[str, Any], cause: Exception): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param item_type: :return: """ return ObjectInstantiationException( 'Error while building object of type <' + get_pretty_type_str(item_type) + '> using its constructor and parsed contents : ' + str(constructor_args) + ' : \n' + str(cause.__class__) + ' ' + str(cause) ).with_traceback( cause.__traceback__ ) # 'from e' was hiding the inner traceback. This is much better for debug
def create(obj: PersistedObject, obj_type: Type[Any], arg_name: str): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param obj: :param obj_type: :param arg_name: :return: """ return MissingMandatoryAttributeFiles( 'Multifile object ' + str(obj) + ' cannot be built from constructor of ' 'type ' + get_pretty_type_str(obj_type) + ', mandatory constructor argument \'' + arg_name + '\'was not found on ' 'filesystem')
def __str__(self): if len(self._parsers_list) > 1: first_typ = self._parsers_list[0][0] if all([p[0] is None for p in self._parsers_list[1:]]): return "[Try '{first}' then '{rest}']" \ "".format(first=self._parsers_list[0][1], rest="' then '".join([str(p[1]) for p in self._parsers_list[1:]]) + ']') else: return "[Try '{first}' -> [{first_typ}] then {rest}]" \ "".format(first=self._parsers_list[0][1], first_typ=get_pretty_type_str(first_typ), rest=" then ".join(["'{p}' -> [{p_typ}]".format(p=p[1], p_typ=get_pretty_type_str(p[0])) for p in self._parsers_list[1:]]) + ']') elif len(self._parsers_list) == 1: # useless... return 'CascadingParser[' + str(self._parsers_list[0]) + ']' else: return 'CascadingParser[Empty]'
def create_parsing_plan(self, desired_type: Type[T], filesystem_object: PersistedObject, logger: Logger, _main_call: bool = True): """ Implements the abstract parent method by using the recursive parsing plan impl. Subclasses wishing to produce their own parsing plans should rather override _create_parsing_plan in order to benefit from this same log msg. :param desired_type: :param filesystem_object: :param logger: :param _main_call: internal parameter for recursive calls. Should not be changed by the user. :return: """ in_root_call = False # -- log msg only for the root call, not for the children that will be created by the code below if _main_call and (not hasattr(AnyParser.thrd_locals, 'flag_init') or AnyParser.thrd_locals.flag_init == 0): # print('Building a parsing plan to parse ' + str(filesystem_object) + ' into a ' + # get_pretty_type_str(desired_type)) logger.info('Building a parsing plan to parse ' + str(filesystem_object) + ' into a ' + get_pretty_type_str(desired_type)) AnyParser.thrd_locals.flag_init = 1 in_root_call = True # -- create the parsing plan try: pp = self._create_parsing_plan(desired_type, filesystem_object, logger) finally: # remove threadlocal flag if needed if in_root_call: AnyParser.thrd_locals.flag_init = 0 # -- log success only if in root call if in_root_call: # print('Parsing Plan created successfully') logger.info('Parsing Plan created successfully') # -- finally return return pp
def parse_collection( self, item_file_prefix: str, base_item_type: Type[T], item_name_for_log: str = None, file_mapping_conf: FileMappingConfiguration = None, options: Dict[str, Dict[str, Any]] = None) -> Dict[str, T]: """ Main method to parse a collection of items of type 'base_item_type'. :param item_file_prefix: :param base_item_type: :param item_name_for_log: :param file_mapping_conf: :param options: :return: """ # -- item_name_for_log item_name_for_log = item_name_for_log or '' check_var(item_name_for_log, var_types=str, var_name='item_name_for_log') # creating the wrapping dictionary type collection_type = Dict[str, base_item_type] if len(item_name_for_log) > 0: item_name_for_log = item_name_for_log + ' ' self.logger.debug('**** Starting to parse ' + item_name_for_log + 'collection of <' + get_pretty_type_str(base_item_type) + '> at location ' + item_file_prefix + ' ****') # common steps return self._parse__item(collection_type, item_file_prefix, file_mapping_conf, options=options)
def dict_to_object(desired_type: Type[T], contents_dict: Dict[str, Any], logger: Logger, options: Dict[str, Dict[str, Any]], conversion_finder: ConversionFinder = None, is_dict_of_dicts: bool = False) -> T: """ Utility method to create an object from a dictionary of constructor arguments. Constructor arguments that dont have the correct type are intelligently converted if possible :param desired_type: :param contents_dict: :param logger: :param options: :param conversion_finder: :param is_dict_of_dicts: :return: """ check_var(desired_type, var_types=type, var_name='obj_type') check_var(contents_dict, var_types=dict, var_name='contents_dict') if is_collection(desired_type, strict=True): # if the destination type is 'strictly a collection' (not a subclass of a collection) we know that we can't # handle it here, the constructor is not pep484-typed raise TypeError( 'Desired object type \'' + get_pretty_type_str(desired_type) + '\' is a collection, ' 'so it cannot be created using this generic object creator') else: # Try the type itself # try: return _dict_to_object(desired_type, contents_dict, logger=logger, options=options, conversion_finder=conversion_finder, is_dict_of_dicts=is_dict_of_dicts)
def get_pretty_type_str(self) -> str: return get_pretty_type_str(self.obj_type)
def dict_to_object(desired_type: Type[T], contents_dict: Dict[str, Any], logger: Logger, options: Dict[str, Dict[str, Any]], conversion_finder: ConversionFinder = None, is_dict_of_dicts: bool = False) -> T: """ Utility method to create an object from a dictionary of constructor arguments. Constructor arguments that dont have the correct type are intelligently converted if possible :param desired_type: :param contents_dict: :param logger: :param options: :param conversion_finder: :param is_dict_of_dicts: :return: """ check_var(desired_type, var_types=type, var_name='obj_type') check_var(contents_dict, var_types=dict, var_name='contents_dict') if is_collection(desired_type, strict=True): raise TypeError('Desired object type \'' + get_pretty_type_str(desired_type) + '\' is a collection, ' 'so it cannot be created using this generic object creator') constructor_args_types_and_opt = get_constructor_attributes_types(desired_type) try: # for each attribute, convert the types of its parsed values if required dict_for_init = dict() for attr_name, provided_attr_value in contents_dict.items(): # check if this attribute name is required by the constructor if attr_name in constructor_args_types_and_opt.keys(): # check the theoretical type wanted by the constructor attr_type_required = constructor_args_types_and_opt[attr_name][0] if not is_dict_of_dicts: if isinstance(attr_type_required, type): # this will not fail if type information is not present;the attribute will only be used 'as is' full_attr_name = get_pretty_type_str(desired_type) + '.' + attr_name dict_for_init[attr_name] = ConversionFinder.try_convert_value(conversion_finder, full_attr_name, provided_attr_value, attr_type_required, logger, options) else: warning('Constructor for type <' + get_pretty_type_str(desired_type) + '> has no PEP484 Type ' 'hint, trying to use the parsed value in the dict directly') dict_for_init[attr_name] = provided_attr_value else: # in that mode, the attribute value itself is a dict, so the attribute needs to be built from that # dict first if isinstance(provided_attr_value, dict): # recurse : try to build this attribute from the dictionary provided. We need to know the type # for this otherwise we wont be able to call the constructor :) if attr_type_required is Parameter.empty or not isinstance(attr_type_required, type): raise TypeInformationRequiredError.create_for_object_attributes(desired_type, attr_name) else: # we can build the attribute from the sub-dict dict_for_init[attr_name] = dict_to_object(attr_type_required, provided_attr_value, logger, options, conversion_finder=conversion_finder) else: raise ValueError('Error while trying to build object of type ' + str(desired_type) + ' from a ' 'dictionary of dictionaries. Entry \'' + attr_name + '\' is not a dictionary') else: if is_dict_of_dicts and attr_name is 'DEFAULT': # -- tolerate but ignore - this is probably due to a configparser # warning('Property name \'' + attr_name + '\' is not an attribute of the object constructor. <' # + get_pretty_type_str(desired_type) + '> constructor attributes are : ' # + list(set(constructor_args_types.keys()) - {'self'}) + '. However it is named DEFAULT') pass else: # the dictionary entry does not correspond to a valid attribute of the object raise InvalidAttributeNameForConstructorError.create(desired_type, list(set(constructor_args_types_and_opt.keys()) - {'self'}), attr_name) # create the object using its constructor try: return desired_type(**dict_for_init) except Exception as e: # Wrap into an Exception raise ObjectInstantiationException.create(desired_type, dict_for_init, e) except TypeError as e: raise CaughtTypeErrorDuringInstantiation.create(desired_type, contents_dict, e)
def __get_parsing_plan_for_multifile_children(self, obj_on_fs: PersistedObject, desired_type: Type[Any], children_on_fs: Dict[str, PersistedObject], logger: Logger) \ -> Dict[str, Any]: """ Simply inspects the required type to find the names and types of its constructor arguments. Then relies on the inner ParserFinder to parse each of them. :param obj_on_fs: :param desired_type: :param children_on_fs: :param logger: :return: """ # -- (a) collect pep-484 information in the class constructor to be able to understand what is required constructor_args_types_and_opt = get_constructor_attributes_types( desired_type) # -- (b) plan to parse each attribute required by the constructor children_plan = dict() # results will be put in this object # --use sorting in order to lead to reproducible results in case of multiple errors for attribute_name, att_desc in sorted( constructor_args_types_and_opt.items()): attribute_is_mandatory = att_desc[1] attribute_type = att_desc[0] # get the child if attribute_name in children_on_fs.keys(): child_on_fs = children_on_fs[attribute_name] # find a parser t, parser_found = self.parser_finder.build_parser_for_fileobject_and_desiredtype( child_on_fs, attribute_type, logger=logger) # create a parsing plan children_plan[ attribute_name] = parser_found.create_parsing_plan( t, child_on_fs, logger=logger, _main_call=False) else: if attribute_is_mandatory: raise MissingMandatoryAttributeFiles.create( obj_on_fs, desired_type, attribute_name) else: # we don't care : optional attribute # dont use warning since it does not show up nicely msg = 'NOT FOUND - This optional constructor attribute for type ' \ + get_pretty_type_str(desired_type) + ' was not found on file system, but this may be normal'\ ' - this message is displayed \'just in case\'.' if logger.isEnabledFor(DEBUG): logger.warning( '(B) ' + obj_on_fs.get_pretty_child_location( attribute_name, blank_parent_part=True) + ': ' + msg) else: logger.warning( 'WARNING parsing [{loc}] as a [{typ}]: optional constructor attribute [{att}] ' 'not found on file system. This may be normal - this message is displayed \'just' ' in case\'.'.format( loc=obj_on_fs.get_pretty_location( blank_parent_part=False, append_file_ext=False), typ=get_pretty_type_str(desired_type), att=attribute_name)) return children_plan
def __str__(self): return 'Parser for ' + str([get_pretty_type_str(typ) for typ in self.supported_types]) \ + ' for extensions ' + str(self.supported_exts)