Exemplo n.º 1
0
def link_executable_using_cache(file_system: FileSystem,
                                compiler: Compiler,
                                executables_root: str,
                                symbols_tables_root: str,
                                external_libraries_root: str,
                                external_libraries_interfaces_root: str,
                                objects: List[str],
                                external_libraries: List[str],
                                external_libraries_interfaces: List[str],
                                organization: str,
                                artifact: str,
                                version: str,
                                cache: Dict[str, Any],
                                is_test: bool = False) -> Tuple[str, str]:
    file_system.create_directory_if_missing(executables_root)
    file_system.create_directory_if_missing(symbols_tables_root)

    name = get_artifact_full_name(
        organization, artifact, compiler.get_architecture(),
        compiler.get_platform(), compiler.get_name(), compiler.get_mode(),
        version) + ('.test' if is_test else '')
    cache['input'] = input_ = cache.get('input', {})
    (old_executable, old_symbols_table) = cache.get('output', (None, None))
    hasher = lambda path: hash_file(file_system, path)
    updated, removed, new_cache = key_delta(
        objects + external_libraries + external_libraries_interfaces, hasher,
        input_)
    yield_descriptor = compiler.get_yield_descriptor()
    executable = yield_descriptor.get_executable(executables_root, name)
    symbols_table = yield_descriptor.get_symbols_table(symbols_tables_root,
                                                       name)
    remake_executable = executable and not file_system.exists(executable)
    remake_symbols_table = symbols_table and not file_system.exists(
        symbols_table)
    if updated or removed or remake_executable or remake_symbols_table:
        if old_executable and file_system.exists(old_executable):
            file_system.remove_file(old_executable)
        if old_symbols_table and file_system.exists(old_symbols_table):
            file_system.remove_file(old_symbols_table)
        compiler.link_executable(external_libraries_root,
                                 external_libraries_interfaces_root, objects,
                                 external_libraries,
                                 external_libraries_interfaces, executable,
                                 symbols_table)
    cache['input'] = new_cache
    cache['output'] = (executable, symbols_table)
    return (executable, symbols_table)
Exemplo n.º 2
0
def pack(file_system: FileSystem, package_path: str,
         package_files: List[Tuple[str, str]], cache: Dict[str, Any]):
    hasher = lambda p: hash_file(file_system, p)
    updated, removed, new_cache = key_delta([p for p, _ in package_files],
                                            hasher, cache)
    if updated or removed or not file_system.exists(package_path):
        with file_system.open_tarfile(package_path, 'w:gz') as archive:
            for file_path, package_file_path in package_files:
                archive.add(file_path, package_file_path)
    cache.clear()
    cache.update(new_cache)
Exemplo n.º 3
0
def compile_using_cache(file_system: FileSystem, compiler: Compiler,
                        headers_root: str, external_headers_root: str,
                        sources_root: str, objects_root: str,
                        headers: List[str], sources: List[str],
                        cache: Dict[str, Any]) -> List[str]:
    file_system.create_directory_if_missing(objects_root)

    def hash_translation_unit(source):
        return hash_binary(
            compiler.preprocess(headers_root, external_headers_root, headers,
                                source))

    updated, removed, new_cache = key_delta(sources, hash_translation_unit,
                                            cache)
    objects = []
    yield_descriptor = compiler.get_yield_descriptor()

    for source in updated:
        object_ = yield_descriptor.get_object(sources_root, objects_root,
                                              source)
        compiler.compile(headers_root, external_headers_root, headers, source,
                         object_)
    for source in removed:
        object_ = yield_descriptor.get_object(sources_root, objects_root,
                                              source)
        if file_system.exists(object_):
            file_system.remove_file(object_)
    for source in sources:
        object_ = yield_descriptor.get_object(sources_root, objects_root,
                                              source)
        if not file_system.exists(object_):
            compiler.compile(headers_root, external_headers_root, headers,
                             source, object_)
        objects.append(object_)

    cache.clear()
    cache.update(new_cache)
    return objects
Exemplo n.º 4
0
def clean_up_package(file_system: FileSystem, package_path: str,
                     extraction_path: str, logging_level: int):
    package = get_package_metadata(package_path)
    name = package['name'].replace(package_extension, '')
    organization = package['organization']
    artifact = package['artifact']
    architecture = package['architecture']
    platform = package['platform']
    compiler = package['compiler']
    mode = package['mode']
    yield_descriptor = get_compiler(file_system, compiler, architecture,
                                    platform, mode,
                                    logging_level).get_yield_descriptor()
    resources = join(extraction_path, 'resources', organization, artifact)
    headers = join(extraction_path, 'headers', organization, artifact)
    library = yield_descriptor.get_library(join(extraction_path, 'libraries'),
                                           name)
    library_interface = yield_descriptor.get_library_interface(
        join(extraction_path, 'libraries_interfaces'), name)
    symbols_table = yield_descriptor.get_symbols_table(
        join(extraction_path, 'symbols_tables'), name)
    executable = yield_descriptor.get_executable(
        join(extraction_path, 'executables'), name)

    if file_system.exists(resources):
        file_system.remove_directory_recursively(resources)

    if file_system.exists(headers):
        file_system.remove_directory_recursively(headers)

    if library and file_system.exists(library):
        file_system.remove_file(library)

    if library_interface and file_system.exists(library_interface):
        file_system.remove_file(library_interface)

    if symbols_table and file_system.exists(symbols_table):
        file_system.remove_file(symbols_table)

    if executable and file_system.exists(executable):
        file_system.remove_file(executable)

    if file_system.exists(package_path):
        file_system.remove_file(package_path)
Exemplo n.º 5
0
def clean(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy):
    target = join(resources['project_directory'], 'target')
    if file_system.exists(target):
        file_system.remove_directory_recursively(target)
Exemplo n.º 6
0
def pull_dependencies(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy):
    project_directory      = resources['project_directory']
    external_root          = join(project_directory, 'target', 'external')
    external_packages_root = join(external_root, 'packages')

    resources['external_resources_root']            = external_resources_root            = join(external_root, 'resources')
    resources['external_headers_root']              = external_headers_root              = join(external_root, 'headers')
    resources['external_executables_root']          = external_executables_root          = join(external_root, 'executables')
    resources['external_libraries_root']            = external_libraries_root            = join(external_root, 'libraries')
    resources['external_libraries_interfaces_root'] = external_libraries_interfaces_root = join(external_root, 'libraries_interfaces')
    resources['external_symbols_tables_root']       = external_symbols_tables_root       = join(external_root, 'symbols_tables')

    file_system.create_directory_if_missing(external_packages_root)
    file_system.create_directory_if_missing(external_resources_root)
    file_system.create_directory_if_missing(external_headers_root)
    file_system.create_directory_if_missing(external_executables_root)
    file_system.create_directory_if_missing(external_libraries_root)
    file_system.create_directory_if_missing(external_libraries_interfaces_root)
    file_system.create_directory_if_missing(external_symbols_tables_root)

    external_resources            = []
    external_headers              = []
    external_libraries            = []
    external_libraries_interfaces = []
    external_symbols_tables       = []
    external_executables          = []

    def extend_externals(contents):
        external_resources.extend(contents['resources'])
        external_headers.extend(contents['headers'])
        external_libraries.extend(contents['libraries'])
        external_libraries_interfaces.extend(contents['libraries_interfaces'])
        external_symbols_tables.extend(contents['symbols_tables'])
        external_executables.extend(contents['executables'])

    pralinefile   = resources['pralinefile']
    compiler      = resources['compiler']
    logging_level = program_arguments['global']['logging_level']
    
    packages = remote_proxy.solve_dependencies(pralinefile,
                                               compiler.get_architecture(),
                                               compiler.get_platform(),
                                               compiler.get_name(),
                                               compiler.get_mode())
    updated, removed, new_cache = key_delta(packages.keys(), lambda p: packages[p], cache)
    
    for package in removed:
        package_path = join(external_packages_root, package)
        clean_up_package(file_system, package_path, external_root, logging_level)

    for package in updated:
        package_path = join(external_packages_root, package)
        clean_up_package(file_system, package_path, external_root, logging_level)
        remote_proxy.pull_package(package_path)
        contents = unpack(file_system, package_path, external_root)
        extend_externals(contents)

    for package in packages:
        if package not in updated:
            package_path = join(external_packages_root, package)
            if not file_system.exists(package_path):
                remote_proxy.pull_package(package_path)
                contents = unpack(file_system, package_path, external_root)
            else:
                contents = get_package_extracted_contents(file_system, package_path, external_root)
            extend_externals(contents)

    resources['external_resources']            = external_resources
    resources['external_headers']              = external_headers
    resources['external_libraries']            = external_libraries
    resources['external_libraries_interfaces'] = external_libraries_interfaces
    resources['external_symbols_tables']       = external_symbols_tables
    resources['external_executables']          = external_executables

    cache.clear()
    cache.update(new_cache)