def link_executable_using_cache(file_system: FileSystem, compiler: Compiler, executables_root: str, symbols_tables_root: str, external_libraries_root: str, external_libraries_interfaces_root: str, objects: List[str], external_libraries: List[str], external_libraries_interfaces: List[str], organization: str, artifact: str, version: str, cache: Dict[str, Any], is_test: bool = False) -> Tuple[str, str]: file_system.create_directory_if_missing(executables_root) file_system.create_directory_if_missing(symbols_tables_root) name = get_artifact_full_name( organization, artifact, compiler.get_architecture(), compiler.get_platform(), compiler.get_name(), compiler.get_mode(), version) + ('.test' if is_test else '') cache['input'] = input_ = cache.get('input', {}) (old_executable, old_symbols_table) = cache.get('output', (None, None)) hasher = lambda path: hash_file(file_system, path) updated, removed, new_cache = key_delta( objects + external_libraries + external_libraries_interfaces, hasher, input_) yield_descriptor = compiler.get_yield_descriptor() executable = yield_descriptor.get_executable(executables_root, name) symbols_table = yield_descriptor.get_symbols_table(symbols_tables_root, name) remake_executable = executable and not file_system.exists(executable) remake_symbols_table = symbols_table and not file_system.exists( symbols_table) if updated or removed or remake_executable or remake_symbols_table: if old_executable and file_system.exists(old_executable): file_system.remove_file(old_executable) if old_symbols_table and file_system.exists(old_symbols_table): file_system.remove_file(old_symbols_table) compiler.link_executable(external_libraries_root, external_libraries_interfaces_root, objects, external_libraries, external_libraries_interfaces, executable, symbols_table) cache['input'] = new_cache cache['output'] = (executable, symbols_table) return (executable, symbols_table)
def clean_up_package(file_system: FileSystem, package_path: str, extraction_path: str, logging_level: int): package = get_package_metadata(package_path) name = package['name'].replace(package_extension, '') organization = package['organization'] artifact = package['artifact'] architecture = package['architecture'] platform = package['platform'] compiler = package['compiler'] mode = package['mode'] yield_descriptor = get_compiler(file_system, compiler, architecture, platform, mode, logging_level).get_yield_descriptor() resources = join(extraction_path, 'resources', organization, artifact) headers = join(extraction_path, 'headers', organization, artifact) library = yield_descriptor.get_library(join(extraction_path, 'libraries'), name) library_interface = yield_descriptor.get_library_interface( join(extraction_path, 'libraries_interfaces'), name) symbols_table = yield_descriptor.get_symbols_table( join(extraction_path, 'symbols_tables'), name) executable = yield_descriptor.get_executable( join(extraction_path, 'executables'), name) if file_system.exists(resources): file_system.remove_directory_recursively(resources) if file_system.exists(headers): file_system.remove_directory_recursively(headers) if library and file_system.exists(library): file_system.remove_file(library) if library_interface and file_system.exists(library_interface): file_system.remove_file(library_interface) if symbols_table and file_system.exists(symbols_table): file_system.remove_file(symbols_table) if executable and file_system.exists(executable): file_system.remove_file(executable) if file_system.exists(package_path): file_system.remove_file(package_path)
def compile_using_cache(file_system: FileSystem, compiler: Compiler, headers_root: str, external_headers_root: str, sources_root: str, objects_root: str, headers: List[str], sources: List[str], cache: Dict[str, Any]) -> List[str]: file_system.create_directory_if_missing(objects_root) def hash_translation_unit(source): return hash_binary( compiler.preprocess(headers_root, external_headers_root, headers, source)) updated, removed, new_cache = key_delta(sources, hash_translation_unit, cache) objects = [] yield_descriptor = compiler.get_yield_descriptor() for source in updated: object_ = yield_descriptor.get_object(sources_root, objects_root, source) compiler.compile(headers_root, external_headers_root, headers, source, object_) for source in removed: object_ = yield_descriptor.get_object(sources_root, objects_root, source) if file_system.exists(object_): file_system.remove_file(object_) for source in sources: object_ = yield_descriptor.get_object(sources_root, objects_root, source) if not file_system.exists(object_): compiler.compile(headers_root, external_headers_root, headers, source, object_) objects.append(object_) cache.clear() cache.update(new_cache) return objects