def unpack(file_system: FileSystem, package_path: str, extraction_path: str) -> Dict[str, List[str]]: contents = { 'resources': [], 'headers': [], 'libraries': [], 'libraries_interfaces': [], 'symbols_tables': [], 'executables': [] } with file_system.open_tarfile(package_path, 'r:gz') as archive: for member in archive.getmembers(): if member.isfile(): extracted = False for root, files in contents.items(): if common_path( [normalized_path(member.name), normalized_path(root)]) == root: archive.extract(member, extraction_path) files.append(join(extraction_path, member.name)) extracted = True if not extracted and member.name != 'Pralinefile': raise InvalidPackageContentsError( f"unrecognized file '{member.name}' in package") for header in contents['headers']: with file_system.open_file(header, 'rb') as f: text = f.read().decode() with file_system.open_file(header, 'wb') as f: f.write(text.replace('PRALINE_EXPORT', 'PRALINE_IMPORT').encode()) return contents
def has_headers_only(file_system: FileSystem, program_arguments: Dict[str, Any], configuration: Dict[str, Any]): sources_root = join(file_system.get_working_directory(), 'sources') files = file_system.files_in_directory(sources_root) return not program_arguments['global']['executable'] and all( f.endswith('.hpp') or f.endswith('.test.cpp') for f in files)
def has_non_executable_sources(file_system: FileSystem, program_arguments: Dict[str, Any], configuration: Dict[str, Any]): sources_root = join(file_system.get_working_directory(), 'sources') files = file_system.files_in_directory(sources_root) return (not program_arguments['global']['executable'] and all(basename(f) != 'executable.cpp' for f in files) and any( f.endswith('.cpp') and not f.endswith('.test.cpp') for f in files))
def main(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): external_libraries_root = resources['external_libraries_root'] resources_root = resources['resources_root'] main_executable = resources['main_executable'] arguments = program_arguments['byStage']['arguments'] file_system.execute_and_fail_on_bad_return([main_executable] + arguments, add_to_library_path=[external_libraries_root, resources_root], interactive=True)
def format_test_sources(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): test_sources = resources['test_sources'] hasher = lambda f: hash_file(file_system, f) updated, _, new_cache = key_delta(test_sources, hasher, cache) if updated: clang_format_executable = resources['clang_format_executable'] file_system.execute_and_fail_on_bad_return([clang_format_executable, '-i', '-style=file'] + updated) resources['formatted_test_sources'] = test_sources cache.clear() cache.update(new_cache)
def pack(file_system: FileSystem, package_path: str, package_files: List[Tuple[str, str]], cache: Dict[str, Any]): hasher = lambda p: hash_file(file_system, p) updated, removed, new_cache = key_delta([p for p, _ in package_files], hasher, cache) if updated or removed or not file_system.exists(package_path): with file_system.open_tarfile(package_path, 'w:gz') as archive: for file_path, package_file_path in package_files: archive.add(file_path, package_file_path) cache.clear() cache.update(new_cache)
def load_clang_format(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): if 'clang-format-executable-path' in configuration: clang_format_executable = configuration['clang-format-executable-path'] if not file_system.is_file(clang_format_executable): raise ClangFormatConfigurationError(f"user supplied clang-format '{clang_format_executable}' is not a file") else: clang_format_executable = file_system.which('clang-format') if clang_format_executable is None: raise ClangFormatConfigurationError("coudn't find clang-format in path -- either supply it in the praline-client.config file or add it to the path environment variable") project_directory = resources['project_directory'] resources['clang_format_executable'] = clang_format_executable resources['clang_format_style_file'] = clang_format_style_file = join(project_directory, '.clang-format') file_system.create_file_if_missing(clang_format_style_file, clang_format_style_file_contents)
def invoke_stage(target_stage: str, stages: Dict[str, Stage], file_system: FileSystem, program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy) -> None: resources = {} pipeline = create_pipeline(target_stage, stages, file_system, program_arguments, configuration) project_directory = file_system.get_working_directory() cache_path = join(project_directory, 'target', 'cache.pickle') for activation, stage_name in pipeline: stage = stages[stage_name] stage_resources = StageResources( stage_name, activation, { resource: resources[resource] for resource in stage.requirements[activation] }, stage.output) stage_program_arguments = get_stage_program_arguments( stage_name, program_arguments) if stage.cacheable: with Cache(file_system, cache_path) as cache: cache[stage_name] = stage_cache = cache.get(stage_name, {}) stage.invoker(file_system, stage_resources, stage_cache, stage_program_arguments, configuration, remote_proxy) else: stage.invoker(file_system, stage_resources, None, stage_program_arguments, configuration, remote_proxy) for resource in stage.output: if resource not in stage_resources: raise ResourceNotSuppliedError( f"stage '{stage_name}' didn't supply resource '{resource}'" ) resources.update(stage_resources.resources)
def get_package_extracted_contents( file_system: FileSystem, package_path: str, extraction_path: str) -> Dict[str, List[str]]: contents = { 'resources': [], 'headers': [], 'libraries': [], 'libraries_interfaces': [], 'symbols_tables': [], 'executables': [] } with file_system.open_tarfile(package_path, 'r:gz') as archive: for member in archive.getmembers(): if member.isfile(): extracted = False for root, files in contents.items(): if common_path( [normalized_path(member.name), normalized_path(root)]) == root: files.append(join(extraction_path, member.name)) extracted = True if not extracted and member.name != 'Pralinefile': raise InvalidPackageContentsError( f"unrecognized file '{member.name}' in package") return contents
def load_headers(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): resources['headers'] = [ f for f in file_system.files_in_directory(resources['headers_root']) if f.endswith('.hpp') ]
def load_pralinefile(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): resources[ 'project_directory'] = project_directory = file_system.get_working_directory( ) pralinefile_path = join(project_directory, 'Pralinefile') try: resources['pralinefile'] = pralinefile = pralinefile_from_path( file_system, pralinefile_path) except FileNotFoundError as e: raise FileNotFoundError( f"no Pralinefile was found in current working directory {project_directory}" ) from e architecture = file_system.get_architecture() if architecture not in pralinefile['architectures']: raise UnsupportedArchitectureError( f"system architecture '{architecture}' is not supported -- supported architectures for this project are {pralinefile['architectures']}" ) platform = file_system.get_platform() if platform not in pralinefile['platforms']: raise UnsupportedPlatformError( f"{platform} is not supported -- supported architectures are {pralinefile['platforms']}" ) mode = 'release' if program_arguments['global'][ 'release'] else pralinefile['modes'][0] logging_level = program_arguments['global']['logging_level'] matching_compilers = [ compiler for compiler in get_compilers(file_system, architecture, platform, mode, logging_level) if compiler.matches() ] compilers = [ compiler for compiler in matching_compilers if compiler.get_name() in pralinefile['compilers'] ] if not compilers: raise NoMatchingCompilerFoundError( f"no suitable compiler was found -- matching compilers are {[c.get_name() for c in matching_compilers]} while specified compilers are {pralinefile['compilers']}" ) resources['compiler'] = compilers[0]
def load_test_sources(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): pralinefile = resources['pralinefile'] test_sources_root = resources['test_sources_root'] test_executable_source = join(test_sources_root, pralinefile['organization'], pralinefile['artifact'], 'executable.test.cpp') file_system.create_file_if_missing(test_executable_source, test_executable_contents) resources['test_sources'] = [ f for f in file_system.files_in_directory(test_sources_root) if f.endswith('.test.cpp') ]
def hash_archive(file_system: FileSystem, archive_path: str): hasher = hashlib.sha3_256() with file_system.open_tarfile(archive_path, 'r:gz') as archive: for member in archive.getmembers(): if member.isfile(): hasher.update(member.name.encode('utf-8')) with archive.extractfile(member.name) as f: for chunk in iter(lambda: f.read(4096), b''): hasher.update(chunk) return hasher.hexdigest()
def validate_project(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): project_directory = resources['project_directory'] resources['headers_root'] = join(project_directory, 'sources') resources['test_sources_root'] = join(project_directory, 'sources') resources['resources_root'] = resources_root = join( project_directory, 'resources') resources['main_sources_root'] = main_sources_root = join( project_directory, 'sources') pralinefile = resources['pralinefile'] organization = pralinefile['organization'] artifact = pralinefile['artifact'] file_system.create_directory_if_missing( join(resources_root, organization, artifact)) file_system.create_directory_if_missing( join(main_sources_root, organization, artifact)) check_unique(file_system, resources_root, organization, artifact) check_unique(file_system, main_sources_root, organization, artifact)
def load_main_sources(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): main_sources_root = resources['main_sources_root'] pralinefile = resources['pralinefile'] main_executable_source = join(main_sources_root, pralinefile['organization'], pralinefile['artifact'], 'executable.cpp') if program_arguments['global']['executable']: resources['main_executable_source'] = main_executable_source file_system.create_file_if_missing(main_executable_source, main_executable_contents) elif file_system.is_file(main_executable_source): resources['main_executable_source'] = main_executable_source else: resources['main_executable_source'] = None resources['main_sources'] = [ f for f in file_system.files_in_directory(main_sources_root) if f.endswith('.cpp') and not f.endswith('.test.cpp') ]
def package_headers_only(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): project_directory = resources['project_directory'] pralinefile = resources['pralinefile'] compiler = resources['compiler'] resources_root = resources['resources_root'] resource_files = resources['resources'] headers_root = resources['headers_root'] if resources.activation in [0, 1]: headers = resources['formatted_headers'] elif resources.activation in [2, 3]: headers = resources['headers'] package_path = join( project_directory, 'target', get_package(pralinefile['organization'], pralinefile['artifact'], compiler.get_architecture(), compiler.get_platform(), compiler.get_name(), compiler.get_mode(), pralinefile['version'])) file_system.create_directory_if_missing(join(project_directory, 'target')) package_files = [(path, join('resources', relative_path(path, resources_root))) for path in resource_files] package_files.extend( (path, join('headers', relative_path(path, headers_root))) for path in headers) package_files.append((join(project_directory, 'Pralinefile'), 'Pralinefile')) pack(file_system, package_path, package_files, cache) resources['headers_only_package'] = package_path
def get_package_dependencies_from_archive(file_system: FileSystem, package_path: str, scope: str) -> List[str]: with file_system.open_tarfile(package_path, 'r:gz') as archive: with archive.extractfile('Pralinefile') as reader: pralinefile = pralinefile_from_reader(reader, skip_validation=True) package = get_package_metadata(package_path) package_dependencies = [] for dependency in pralinefile['dependencies']: if dependency['scope'] == scope: package_dependency = get_package( dependency['organization'], dependency['artifact'], package['architecture'], package['platform'], package['compiler'], package['mode'], dependency['version']) package_dependencies.append(package_dependency) return package_dependencies
def compile_using_cache(file_system: FileSystem, compiler: Compiler, headers_root: str, external_headers_root: str, sources_root: str, objects_root: str, headers: List[str], sources: List[str], cache: Dict[str, Any]) -> List[str]: file_system.create_directory_if_missing(objects_root) def hash_translation_unit(source): return hash_binary( compiler.preprocess(headers_root, external_headers_root, headers, source)) updated, removed, new_cache = key_delta(sources, hash_translation_unit, cache) objects = [] yield_descriptor = compiler.get_yield_descriptor() for source in updated: object_ = yield_descriptor.get_object(sources_root, objects_root, source) compiler.compile(headers_root, external_headers_root, headers, source, object_) for source in removed: object_ = yield_descriptor.get_object(sources_root, objects_root, source) if file_system.exists(object_): file_system.remove_file(object_) for source in sources: object_ = yield_descriptor.get_object(sources_root, objects_root, source) if not file_system.exists(object_): compiler.compile(headers_root, external_headers_root, headers, source, object_) objects.append(object_) cache.clear() cache.update(new_cache) return objects
def can_run_unit_tests(file_system: FileSystem, program_arguments: Dict[str, Any], configuration: Dict[str, Any]): return not program_arguments['global']['skip_unit_tests'] and any( f for f in file_system.files_in_directory('sources') if f.endswith('.test.cpp'))
def clean_up_package(file_system: FileSystem, package_path: str, extraction_path: str, logging_level: int): package = get_package_metadata(package_path) name = package['name'].replace(package_extension, '') organization = package['organization'] artifact = package['artifact'] architecture = package['architecture'] platform = package['platform'] compiler = package['compiler'] mode = package['mode'] yield_descriptor = get_compiler(file_system, compiler, architecture, platform, mode, logging_level).get_yield_descriptor() resources = join(extraction_path, 'resources', organization, artifact) headers = join(extraction_path, 'headers', organization, artifact) library = yield_descriptor.get_library(join(extraction_path, 'libraries'), name) library_interface = yield_descriptor.get_library_interface( join(extraction_path, 'libraries_interfaces'), name) symbols_table = yield_descriptor.get_symbols_table( join(extraction_path, 'symbols_tables'), name) executable = yield_descriptor.get_executable( join(extraction_path, 'executables'), name) if file_system.exists(resources): file_system.remove_directory_recursively(resources) if file_system.exists(headers): file_system.remove_directory_recursively(headers) if library and file_system.exists(library): file_system.remove_file(library) if library_interface and file_system.exists(library_interface): file_system.remove_file(library_interface) if symbols_table and file_system.exists(symbols_table): file_system.remove_file(symbols_table) if executable and file_system.exists(executable): file_system.remove_file(executable) if file_system.exists(package_path): file_system.remove_file(package_path)
#!/usr/bin/env python3 from praline.client.configuration import configuration import logging.config import logging logging.config.dictConfig(configuration['logging']) from praline.client.project.pipeline.orchestration import invoke_stage from praline.client.project.pipeline.program_arguments import get_program_arguments from praline.client.repository.remote_proxy import RemoteProxy from praline.client.project.pipeline.stages.stage import registered_stages from praline.common.file_system import FileSystem if __name__ == '__main__': logger = logging.getLogger(__name__) try: file_system = FileSystem() remote_proxy = RemoteProxy(file_system, configuration['remote-repository']) program_arguments = get_program_arguments(registered_stages) stage = program_arguments['global']['running_stage'] invoke_stage(stage, registered_stages, file_system, program_arguments, configuration, remote_proxy) exit(0) except RuntimeError as exception: logger.fatal(exception) exit(-1)
def get_packages_from_directory(file_system: FileSystem, directory: str) -> List[str]: return [ entry.name for entry in file_system.list_directory(directory) if get_package_metadata(entry.name, none_on_error=True) ]
def pralinefile_from_path(file_system : FileSystem, path: str, skip_validation=False) -> Dict[str, Any]: with file_system.open_file(path, 'r') as reader: return pralinefile_from_reader(reader)
def clean(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): target = join(resources['project_directory'], 'target') if file_system.exists(target): file_system.remove_directory_recursively(target)
def load_resources(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): resources['resources'] = file_system.files_in_directory( resources['resources_root'])
from praline.server.configuration import configuration import logging.config logging.config.dictConfig(configuration['logging']) from flask import Flask, send_from_directory, request, Response, jsonify from praline.common.file_system import FileSystem, join from praline.common.hashing import hash_archive from praline.common.package import get_package_dependencies_recursively from typing import Dict file_system = FileSystem() server = Flask(__name__, root_path=file_system.get_working_directory()) repository_path = join(configuration['repository'], 'packages') @server.route('/package/<package>', methods=['GET', 'PUT']) def package(package) -> Response: file_system.create_directory_if_missing(repository_path) if request.method == 'GET': return send_from_directory(repository_path, package, as_attachment=True) elif request.method == 'PUT': package_path = join(repository_path, package) if file_system.exists(package_path): return Response( f"package '{package}' already exists -- increment the version and try again", status=409,
def pull_dependencies(file_system: FileSystem, resources: StageResources, cache: Dict[str, Any], program_arguments: Dict[str, Any], configuration: Dict[str, Any], remote_proxy: RemoteProxy): project_directory = resources['project_directory'] external_root = join(project_directory, 'target', 'external') external_packages_root = join(external_root, 'packages') resources['external_resources_root'] = external_resources_root = join(external_root, 'resources') resources['external_headers_root'] = external_headers_root = join(external_root, 'headers') resources['external_executables_root'] = external_executables_root = join(external_root, 'executables') resources['external_libraries_root'] = external_libraries_root = join(external_root, 'libraries') resources['external_libraries_interfaces_root'] = external_libraries_interfaces_root = join(external_root, 'libraries_interfaces') resources['external_symbols_tables_root'] = external_symbols_tables_root = join(external_root, 'symbols_tables') file_system.create_directory_if_missing(external_packages_root) file_system.create_directory_if_missing(external_resources_root) file_system.create_directory_if_missing(external_headers_root) file_system.create_directory_if_missing(external_executables_root) file_system.create_directory_if_missing(external_libraries_root) file_system.create_directory_if_missing(external_libraries_interfaces_root) file_system.create_directory_if_missing(external_symbols_tables_root) external_resources = [] external_headers = [] external_libraries = [] external_libraries_interfaces = [] external_symbols_tables = [] external_executables = [] def extend_externals(contents): external_resources.extend(contents['resources']) external_headers.extend(contents['headers']) external_libraries.extend(contents['libraries']) external_libraries_interfaces.extend(contents['libraries_interfaces']) external_symbols_tables.extend(contents['symbols_tables']) external_executables.extend(contents['executables']) pralinefile = resources['pralinefile'] compiler = resources['compiler'] logging_level = program_arguments['global']['logging_level'] packages = remote_proxy.solve_dependencies(pralinefile, compiler.get_architecture(), compiler.get_platform(), compiler.get_name(), compiler.get_mode()) updated, removed, new_cache = key_delta(packages.keys(), lambda p: packages[p], cache) for package in removed: package_path = join(external_packages_root, package) clean_up_package(file_system, package_path, external_root, logging_level) for package in updated: package_path = join(external_packages_root, package) clean_up_package(file_system, package_path, external_root, logging_level) remote_proxy.pull_package(package_path) contents = unpack(file_system, package_path, external_root) extend_externals(contents) for package in packages: if package not in updated: package_path = join(external_packages_root, package) if not file_system.exists(package_path): remote_proxy.pull_package(package_path) contents = unpack(file_system, package_path, external_root) else: contents = get_package_extracted_contents(file_system, package_path, external_root) extend_externals(contents) resources['external_resources'] = external_resources resources['external_headers'] = external_headers resources['external_libraries'] = external_libraries resources['external_libraries_interfaces'] = external_libraries_interfaces resources['external_symbols_tables'] = external_symbols_tables resources['external_executables'] = external_executables cache.clear() cache.update(new_cache)
def has_executable(file_system: FileSystem, program_arguments: Dict[str, Any], configuration: Dict[str, Any]): sources_root = join(file_system.get_working_directory(), 'sources') files = file_system.files_in_directory(sources_root) return program_arguments['global']['executable'] or any( basename(f) == 'executable.cpp' for f in files)
def link_library_using_cache(file_system: FileSystem, compiler: Compiler, libraries_root: str, libraries_interfaces_root: str, symbols_tables_root: str, external_libraries_root: str, external_libraries_interfaces_root: str, objects: List[str], external_libraries: List[str], external_libraries_interfaces: List[str], organization: str, artifact: str, version: str, cache: Dict[str, Any]) -> Tuple[str, str, str]: file_system.create_directory_if_missing(libraries_root) file_system.create_directory_if_missing(libraries_interfaces_root) file_system.create_directory_if_missing(symbols_tables_root) name = get_artifact_full_name(organization, artifact, compiler.get_architecture(), compiler.get_platform(), compiler.get_name(), compiler.get_mode(), version) cache['input'] = input_ = cache.get('input', {}) (old_library, old_library_interface, old_symbols_table) = cache.get('output', (None, None, None)) hasher = lambda path: hash_file(file_system, path) updated, removed, new_cache = key_delta( objects + external_libraries + external_libraries_interfaces, hasher, input_) yield_descriptor = compiler.get_yield_descriptor() library = yield_descriptor.get_library(libraries_root, name) library_interface = yield_descriptor.get_library_interface( libraries_interfaces_root, name) symbols_table = yield_descriptor.get_symbols_table(symbols_tables_root, name) remake_library = library and not file_system.exists(library) remake_library_interface = library_interface and not file_system.exists( library_interface) remake_symbols_table = symbols_table and not file_system.exists( symbols_table) if updated or removed or remake_library or remake_library_interface or remake_symbols_table: if old_library and file_system.exists(old_library): file_system.remove_file(old_library) if old_library_interface and file_system.exists(old_library_interface): file_system.remove_file(old_library_interface) if old_symbols_table and file_system.exists(old_symbols_table): file_system.remove_file(old_symbols_table) compiler.link_library(external_libraries_root, external_libraries_interfaces_root, objects, external_libraries, external_libraries_interfaces, library, library_interface, symbols_table) cache['input'] = new_cache cache['output'] = (library, library_interface, symbols_table) return (library, library_interface, symbols_table)
def hash_file(file_system: FileSystem, file_path: str) -> str: hasher = hashlib.sha3_256() with file_system.open_file(file_path, 'rb') as f: for chunk in iter(lambda: f.read(4096), b''): hasher.update(chunk) return hasher.hexdigest()