def __init__(self, used_symbols_lister, header_lister, is_implementation_file_func, resource_resolver): # TODO resource resolver is never used! assert isinstance_or_duck(header_lister, HeaderLister) assert isinstance_or_duck(used_symbols_lister, UsedSymbolsLister) self.__used_symbols_lister = used_symbols_lister self.__header_lister = header_lister self.__is_implementation_file = is_implementation_file_func self.__symbol_scanner = DefaultSymbolScanner() self.__resource_resolver = resource_resolver self.__logger = logging.getLogger(self.__class__.__module__)
def get_omitted_modules_by_type(physical_module_describer, module_list_supply, dependency_filter_config_class): assert isinstance_or_duck(physical_module_describer, PhysicalModuleDescriber) assert isinstance_or_duck(module_list_supply, ModuleListSupply) key_func = lambda (module, size): tuple( PhysicalModuleTypes.names( physical_module_describer.get_physical_module_types(module))) grouped_modules = IterTools.sort_and_group( key_func, ModuleListHelper.get_omitted_modules_with_size( module_list_supply, dependency_filter_config_class)) return ((x, tuple(y)) for (x, y) in grouped_modules)
def __init__(self, local_source_resource_resolver, generation_strategy, prepare_func, process_func): assert isinstance_or_duck(local_source_resource_resolver, ResourceResolver) assert isinstance_or_duck(generation_strategy, GenerationStrategy) self.__prepare_func = prepare_func self.__process_func = process_func self.__logger = logging.getLogger(self.__class__.__module__) self.__resource_resolver = local_source_resource_resolver self.__generation_strategy = generation_strategy self.__successful_files = [] self.__skipped_files = [] self.__error_files = [] self.__fatal_files = [] self.__nested_statistics = dict()
def RepairingIndividualFileRepairProcessor(local_source_resource_resolver, generation_strategy, is_implementation_file_func, required_include_files_calculator, include_list_generator_factory): assert isinstance_or_duck(required_include_files_calculator, RequiredIncludeFilesCalculator) prepare_func = lambda repair_path_rel_to_root_unix, input_resource: required_include_files_calculator.calculate_required_include_files( project_file=DefaultProjectFile( path_rel_to_root_unix=repair_path_rel_to_root_unix, #module_name, local_repository_root=input_resource.get_resolution_root(), resource=input_resource)) file_normalizer = BaseFileNormalizer( is_implementation_file_func=is_implementation_file_func, include_list_generator_factory=include_list_generator_factory) process_func = lambda repair_path, intermediate_result, input_file, output_file: file_normalizer.process( repair_path=repair_path, included_paths=intermediate_result, input_file=input_file, output_file=output_file) return IndividualCppFileTransformer( local_source_resource_resolver=local_source_resource_resolver, generation_strategy=generation_strategy, prepare_func=prepare_func, process_func=process_func)
def get_omitted_modules_with_size(module_list_supply, dependency_filter_config_class): assert isinstance_or_duck(module_list_supply, ModuleListSupply) omitted_modules = ModuleListHelper.get_omitted_modules( module_list_supply, dependency_filter_config_class) return ((module, module_list_supply.get_module_size(module)) for module in omitted_modules)
def process(self, input_resource, generator_func): """ @type input_resource: FileResource """ assert isinstance_or_duck(input_resource, FileResource) new_file_local = input_resource.name() + '.new' if os.path.exists(new_file_local): os.unlink(new_file_local) backup_resource = FileResource( input_resource.name() + '.orig', resolution_root=input_resource.get_resolution_root()) if backup_resource.exists(): self.__logger.info( "Backup file %s already exists, regenerating from backup file" % (backup_resource, )) input_resource = backup_resource output_resource = input_resource else: input_resource = input_resource output_resource = FileResource( new_file_local, resolution_root=input_resource.get_resolution_root()) try: retval = generator_func(input_resource, output_resource) except Exception: self.__revert(input_resource, output_resource, backup_resource) raise if output_resource.name() != input_resource.name(): os.rename(input_resource.name(), backup_resource.name()) os.rename(output_resource.name(), input_resource.name()) return retval
def process(self, input_resource, generator_func): """ @type input_resource: Resource (but the delimiters used by the input resource must be compatible with the delimiters of os.path) """ assert isinstance_or_duck(input_resource, FileResource) input_resource_root = input_resource.get_resolution_root() if not input_resource_root: raise ValueError( "Input resource %s has no resolution root, cannot determine correct output path" % input_resource) relpath = PathTools.relative_path( input_resource.name(), relative_to=input_resource_root.name() + os.path.sep, ignore_absolute=False) output_resource = self.__target_resolver.resolve(relpath, force_check=False) output_directory = os.path.dirname(output_resource.name()) if not os.path.exists(output_directory): os.makedirs(output_directory) else: if not os.path.isdir(output_directory): raise ValueError( "File %s is in the way, this is the designated output directory" % output_directory) try: return generator_func(input_resource, output_resource) except Exception: if self.__copy_on_error: ResourceUtil.copy(input_resource, output_resource) raise
def __init__(self, include_path_canonicalizer, filemap_factory_func): """ @param include_path_canonicalizer: @type include_path_canonicalizer: IIncludePathCanonicalizer @param filemap_factory_func: a function which returns a filemap for a given set of canonic paths, such as FileMapFactory.get_filemap """ assert isinstance_or_duck(include_path_canonicalizer, IIncludePathCanonicalizer) self.__include_path_canonicalizer = include_path_canonicalizer filemap = filemap_factory_func( self.__include_path_canonicalizer.get_canonic_paths( (IncludeSpecificationTypes.ANGLE, IncludeSpecificationTypes.QUOTED))) self.__logger = logging.getLogger(self.__class__.__module__) if self.__logger.isEnabledFor(logging.DEBUG): self.__logger.debug( "using filemap with %i keys (first entries: %s)" % (len(filemap.keys()), list(islice(filemap.iteritems(), 10)))) self.__fuzzy_resolver_internal = FuzzyResolverInternal(filemap)
def get_ungrouped_modules(module_list_supply, module_grouper_class): assert isinstance_or_duck(module_list_supply, ModuleListSupply) modules = list(module_list_supply.get_module_list()) module_grouper = module_grouper_class(modules) return ModuleListHelper.filter_ungrouped(modules, module_grouper)
def filter_ungrouped(modules, module_grouper): assert isinstance_or_duck(module_grouper, ModuleGrouper) return (module for module in modules if not module_grouper.get_node_group_prefix(module))
def get_omitted_modules(module_list_supply, dependency_filter_config_class): assert isinstance_or_duck(module_list_supply, ModuleListSupply) modules = list(module_list_supply.get_module_list()) filter_config = dependency_filter_config_class(modules=modules) return ModuleListHelper.filter_omitted(modules, filter_config)
def filter_omitted(modules, filter_config): assert isinstance_or_duck(filter_config, DependencyFilterConfiguration) return ifilter( lambda module: filter_config.skip_module( module) or filter_config.skip_module_as_source(module) or filter_config.skip_module_as_target(module), modules)
def _add_irregularity(self, result): assert isinstance_or_duck(result, DiagnosticResult) self._irregularities.append(result)