def run(self, hashed_file: HashedFile): fpath, file_hash = hashed_file log_or_dot(logger, f"analysing {fpath}") analysed_file = AnalysedFile(fpath=fpath, file_hash=file_hash) index = clang.cindex.Index.create() translation_unit = index.parse(fpath, args=["-xc"]) # Create include region line mappings self._locate_include_regions(translation_unit) # Now walk the actual nodes and find all relevant external symbols usr_symbols: List[str] = [] for node in translation_unit.cursor.walk_preorder(): if not node.spelling: continue # ignore sys include stuff if self._check_for_include(node.location.line) == "sys_include": continue logger.debug('Considering node: %s', node.spelling) if node.kind in {clang.cindex.CursorKind.FUNCTION_DECL, clang.cindex.CursorKind.VAR_DECL}: self._process_symbol_declaration(analysed_file, node, usr_symbols) elif node.kind in {clang.cindex.CursorKind.CALL_EXPR, clang.cindex.CursorKind.DECL_REF_EXPR}: self._process_symbol_dependency(analysed_file, node, usr_symbols) return analysed_file
def compile_file(self, analysed_file: AnalysedFile): output_fpath = analysed_file.fpath.with_suffix('.o') # already compiled? if self._config.reuse_artefacts and output_fpath.exists(): log_or_dot(logger, f'CompileFortran skipping: {analysed_file.fpath}') else: with Timer() as timer: output_fpath.parent.mkdir(parents=True, exist_ok=True) command = self.exe.split() command.extend( self.flags.flags_for_path( path=analysed_file.fpath, source_root=self._config.source_root, project_workspace=self._config.project_workspace)) command.extend(os.getenv('FFLAGS', '').split()) command.append(str(analysed_file.fpath)) command.extend(['-o', str(output_fpath)]) log_or_dot( logger, 'CompileFortran running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: return Exception("Error calling compiler:", err) send_metric(self.name, str(analysed_file.fpath), { 'time_taken': timer.taken, 'start': timer.start }) return CompiledFile(analysed_file, output_fpath)
def _compile_file(self, analysed_file: AnalysedFile): # todo: should really use input_to_output_fpath() here output_fpath = analysed_file.fpath.with_suffix('.o') # already compiled? if self._config.reuse_artefacts and output_fpath.exists(): log_or_dot(logger, f'CompileC skipping: {analysed_file.fpath}') else: with Timer() as timer: output_fpath.parent.mkdir(parents=True, exist_ok=True) command = self.exe.split() command.extend(self.flags.flags_for_path( path=analysed_file.fpath, source_root=self._config.source_root, project_workspace=self._config.project_workspace)) command.append(str(analysed_file.fpath)) command.extend(['-o', str(output_fpath)]) log_or_dot(logger, 'CompileC running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: return TaskException(f"error compiling {analysed_file.fpath}: {err}") send_metric(self.name, str(analysed_file.fpath), timer.taken) return CompiledFile(analysed_file, output_fpath)
def _call_linker(self, filename, objects): command = self.linker.split() command.extend(['-o', filename]) command.extend(map(str, sorted(objects))) # note: this must this come after the list of object files? command.extend(os.getenv('LDFLAGS', []).split()) command.extend(self.flags) log_or_dot(logger, 'Link running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: raise Exception(f"error linking: {err}")
def do_one_file(self, x90_file): log_or_dot(logger=logger, msg=str(x90_file)) generated = x90_file.parent / (str(x90_file.stem) + '_psy.f90') modified_alg = x90_file.with_suffix('.f90') # generate into the build output, not the source generated = input_to_output_fpath( source_root=self._config.source_root, project_workspace=self._config.project_workspace, input_path=generated) modified_alg = input_to_output_fpath( source_root=self._config.source_root, project_workspace=self._config.project_workspace, input_path=modified_alg) generated.parent.mkdir(parents=True, exist_ok=True) # -d specifies "a root directory structure containing kernel source" kernel_options = sum([['-d', k] for k in self.kernel_roots], []) command = [ 'psyclone', '-api', 'dynamo0.3', '-l', 'all', *kernel_options, '-opsy', generated, # filename of generated PSy code '-oalg', modified_alg, # filename of transformed algorithm code x90_file, ] if self._config.reuse_artefacts and Path(modified_alg).exists(): pass else: try: run_command(command) except Exception as err: logger.error(err) return err result = [modified_alg] if Path(generated).exists(): result.append(generated) return result
def run(self, hashed_file: HashedFile): fpath, file_hash = hashed_file log_or_dot(logger, f"analysing {fpath}") # parse the file try: tree = self._parse_file(fpath=fpath) except Exception as err: return err if tree.content[0] is None: logger.debug(f" empty tree found when parsing {fpath}") return EmptySourceFile(fpath) analysed_file = AnalysedFile(fpath=fpath, file_hash=file_hash) # see what's in the tree try: for obj in iter_content(tree): obj_type = type(obj) # todo: ?replace these with function lookup dict[type, func]? - Or the new match statement, Python 3.10 if obj_type == Use_Stmt: self._process_use_statement(analysed_file, obj) # raises elif obj_type == Program_Stmt: analysed_file.add_symbol_def(str(obj.get_name())) elif obj_type == Module_Stmt: analysed_file.add_module_def(str(obj.get_name())) elif obj_type in (Subroutine_Stmt, Function_Stmt): self._process_subroutine_or_function(analysed_file, fpath, obj) # todo: we've not needed this so far, for jules or um...(?) elif obj_type == "variable binding not yet supported": return self._process_variable_binding(fpath) elif obj_type == Comment: self._process_comment(analysed_file, obj) except Exception as err: return err logger.debug(f" analysed {analysed_file.fpath}") return analysed_file
def run(self, artefact_store: Dict, config): """ :param artefact_store: Contains artefacts created by previous Steps, and where we add our new artefacts. This is where the given :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process. :param config: The :class:`fab.build_config.BuildConfig` object where we can read settings such as the project workspace folder or the multiprocessing flag. """ super().run(artefact_store, config) # We're expecting one or more build targets in the artefact store. # When building exes, each build target has a name and a list of compiled files. # When building a shared object there is a single build target with no name. target_objects = self.source_getter(artefact_store) assert target_objects.keys() if self.output_fpath and list(target_objects.keys()) != [None]: raise ValueError("You must not specify an output path (library) when there are root symbols (exes)") if not self.output_fpath and list(target_objects.keys()) == [None]: raise ValueError("You must specify an output path when building a library.") target_archives = artefact_store.setdefault(self.output_collection, {}) for root, objects in target_objects.items(): if root: # we're building an object archive for an exe output_fpath = str(config.project_workspace / BUILD_OUTPUT / f'{root}.a') else: # we're building a single object archive with a given filename assert len(target_objects) == 1, "unexpected root of None with multiple build targets" output_fpath = Template(self.output_fpath).substitute(output=config.project_workspace / BUILD_OUTPUT) command = [self.archiver] command.extend(['cr', output_fpath]) command.extend(map(str, sorted(objects))) log_or_dot(logger, 'CreateObjectArchive running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: raise Exception(f"error creating object archive: {err}") target_archives[root] = [output_fpath]
def _process_artefact(self, fpath): """ Expects an input file in the source folder. Writes the output file to the output folder, with a lower case extension. """ output_fpath = input_to_output_fpath( source_root=self._config.source_root, project_workspace=self._config.project_workspace, input_path=fpath).with_suffix(self.output_suffix) # already preprocessed? if self._config.reuse_artefacts and output_fpath.exists(): log_or_dot(logger, f'Preprocessor skipping: {fpath}') else: with Timer() as timer: output_fpath.parent.mkdir(parents=True, exist_ok=True) command = self.exe.split() command.extend( self.flags.flags_for_path( path=fpath, source_root=self._config.source_root, project_workspace=self._config.project_workspace)) command.append(str(fpath)) command.append(str(output_fpath)) log_or_dot( logger, 'PreProcessor running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: raise Exception(f"error preprocessing {fpath}: {err}") send_metric(self.name, str(fpath), { 'time_taken': timer.taken, 'start': timer.start }) return output_fpath