def test_key_delta(self): keys = ['a', 'b', 'c'] key_hasher = lambda x: f'new_{x}' cache = {'b': 'old_b', 'c': 'new_c', 'd': 'new_d'} updated, removed, new_cache = key_delta(keys, key_hasher, cache) self.assertEqual(updated, ['a', 'b']) self.assertEqual(removed, ['d']) self.assertEqual(new_cache, {'a': 'new_a', 'b': 'new_b', 'c': 'new_c'})
def format_test_sources(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): test_sources = resources['test_sources'] hasher = lambda f: hash_file(file_system, f) updated, _, new_cache = key_delta(test_sources, hasher, cache) if updated: clang_format_executable = resources['clang_format_executable'] file_system.execute_and_fail_on_bad_return([clang_format_executable, '-i', '-style=file'] + updated) resources['formatted_test_sources'] = test_sources cache.clear() cache.update(new_cache)
def pack(file_system: FileSystem, package_path: str, package_files: List[Tuple[str, str]], cache: Dict[str, Any]): hasher = lambda p: hash_file(file_system, p) updated, removed, new_cache = key_delta([p for p, _ in package_files], hasher, cache) if updated or removed or not file_system.exists(package_path): with file_system.open_tarfile(package_path, 'w:gz') as archive: for file_path, package_file_path in package_files: archive.add(file_path, package_file_path) cache.clear() cache.update(new_cache)
def link_library_using_cache(file_system: FileSystem, compiler: Compiler, libraries_root: str, libraries_interfaces_root: str, symbols_tables_root: str, external_libraries_root: str, external_libraries_interfaces_root: str, objects: List[str], external_libraries: List[str], external_libraries_interfaces: List[str], organization: str, artifact: str, version: str, cache: Dict[str, Any]) -> Tuple[str, str, str]: file_system.create_directory_if_missing(libraries_root) file_system.create_directory_if_missing(libraries_interfaces_root) file_system.create_directory_if_missing(symbols_tables_root) name = get_artifact_full_name(organization, artifact, compiler.get_architecture(), compiler.get_platform(), compiler.get_name(), compiler.get_mode(), version) cache['input'] = input_ = cache.get('input', {}) (old_library, old_library_interface, old_symbols_table) = cache.get('output', (None, None, None)) hasher = lambda path: hash_file(file_system, path) updated, removed, new_cache = key_delta( objects + external_libraries + external_libraries_interfaces, hasher, input_) yield_descriptor = compiler.get_yield_descriptor() library = yield_descriptor.get_library(libraries_root, name) library_interface = yield_descriptor.get_library_interface( libraries_interfaces_root, name) symbols_table = yield_descriptor.get_symbols_table(symbols_tables_root, name) remake_library = library and not file_system.exists(library) remake_library_interface = library_interface and not file_system.exists( library_interface) remake_symbols_table = symbols_table and not file_system.exists( symbols_table) if updated or removed or remake_library or remake_library_interface or remake_symbols_table: if old_library and file_system.exists(old_library): file_system.remove_file(old_library) if old_library_interface and file_system.exists(old_library_interface): file_system.remove_file(old_library_interface) if old_symbols_table and file_system.exists(old_symbols_table): file_system.remove_file(old_symbols_table) compiler.link_library(external_libraries_root, external_libraries_interfaces_root, objects, external_libraries, external_libraries_interfaces, library, library_interface, symbols_table) cache['input'] = new_cache cache['output'] = (library, library_interface, symbols_table) return (library, library_interface, symbols_table)
def compile_using_cache(file_system: FileSystem, compiler: Compiler, headers_root: str, external_headers_root: str, sources_root: str, objects_root: str, headers: List[str], sources: List[str], cache: Dict[str, Any]) -> List[str]: file_system.create_directory_if_missing(objects_root) def hash_translation_unit(source): return hash_binary( compiler.preprocess(headers_root, external_headers_root, headers, source)) updated, removed, new_cache = key_delta(sources, hash_translation_unit, cache) objects = [] yield_descriptor = compiler.get_yield_descriptor() for source in updated: object_ = yield_descriptor.get_object(sources_root, objects_root, source) compiler.compile(headers_root, external_headers_root, headers, source, object_) for source in removed: object_ = yield_descriptor.get_object(sources_root, objects_root, source) if file_system.exists(object_): file_system.remove_file(object_) for source in sources: object_ = yield_descriptor.get_object(sources_root, objects_root, source) if not file_system.exists(object_): compiler.compile(headers_root, external_headers_root, headers, source, object_) objects.append(object_) cache.clear() cache.update(new_cache) return objects
def pull_dependencies(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): project_directory = resources['project_directory'] external_root = join(project_directory, 'target', 'external') external_packages_root = join(external_root, 'packages') resources['external_resources_root'] = external_resources_root = join(external_root, 'resources') resources['external_headers_root'] = external_headers_root = join(external_root, 'headers') resources['external_executables_root'] = external_executables_root = join(external_root, 'executables') resources['external_libraries_root'] = external_libraries_root = join(external_root, 'libraries') resources['external_libraries_interfaces_root'] = external_libraries_interfaces_root = join(external_root, 'libraries_interfaces') resources['external_symbols_tables_root'] = external_symbols_tables_root = join(external_root, 'symbols_tables') file_system.create_directory_if_missing(external_packages_root) file_system.create_directory_if_missing(external_resources_root) file_system.create_directory_if_missing(external_headers_root) file_system.create_directory_if_missing(external_executables_root) file_system.create_directory_if_missing(external_libraries_root) file_system.create_directory_if_missing(external_libraries_interfaces_root) file_system.create_directory_if_missing(external_symbols_tables_root) external_resources = [] external_headers = [] external_libraries = [] external_libraries_interfaces = [] external_symbols_tables = [] external_executables = [] def extend_externals(contents): external_resources.extend(contents['resources']) external_headers.extend(contents['headers']) external_libraries.extend(contents['libraries']) external_libraries_interfaces.extend(contents['libraries_interfaces']) external_symbols_tables.extend(contents['symbols_tables']) external_executables.extend(contents['executables']) pralinefile = resources['pralinefile'] compiler = resources['compiler'] logging_level = program_arguments['global']['logging_level'] packages = remote_proxy.solve_dependencies(pralinefile, compiler.get_architecture(), compiler.get_platform(), compiler.get_name(), compiler.get_mode()) updated, removed, new_cache = key_delta(packages.keys(), lambda p: packages[p], cache) for package in removed: package_path = join(external_packages_root, package) clean_up_package(file_system, package_path, external_root, logging_level) for package in updated: package_path = join(external_packages_root, package) clean_up_package(file_system, package_path, external_root, logging_level) remote_proxy.pull_package(package_path) contents = unpack(file_system, package_path, external_root) extend_externals(contents) for package in packages: if package not in updated: package_path = join(external_packages_root, package) if not file_system.exists(package_path): remote_proxy.pull_package(package_path) contents = unpack(file_system, package_path, external_root) else: contents = get_package_extracted_contents(file_system, package_path, external_root) extend_externals(contents) resources['external_resources'] = external_resources resources['external_headers'] = external_headers resources['external_libraries'] = external_libraries resources['external_libraries_interfaces'] = external_libraries_interfaces resources['external_symbols_tables'] = external_symbols_tables resources['external_executables'] = external_executables cache.clear() cache.update(new_cache)