def _compile_file(self, analysed_file: AnalysedFile): # todo: should really use input_to_output_fpath() here output_fpath = analysed_file.fpath.with_suffix('.o') # already compiled? if self._config.reuse_artefacts and output_fpath.exists(): log_or_dot(logger, f'CompileC skipping: {analysed_file.fpath}') else: with Timer() as timer: output_fpath.parent.mkdir(parents=True, exist_ok=True) command = self.exe.split() command.extend(self.flags.flags_for_path( path=analysed_file.fpath, source_root=self._config.source_root, project_workspace=self._config.project_workspace)) command.append(str(analysed_file.fpath)) command.extend(['-o', str(output_fpath)]) log_or_dot(logger, 'CompileC running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: return TaskException(f"error compiling {analysed_file.fpath}: {err}") send_metric(self.name, str(analysed_file.fpath), timer.taken) return CompiledFile(analysed_file, output_fpath)
def compile_file(self, analysed_file: AnalysedFile): output_fpath = analysed_file.fpath.with_suffix('.o') # already compiled? if self._config.reuse_artefacts and output_fpath.exists(): log_or_dot(logger, f'CompileFortran skipping: {analysed_file.fpath}') else: with Timer() as timer: output_fpath.parent.mkdir(parents=True, exist_ok=True) command = self.exe.split() command.extend( self.flags.flags_for_path( path=analysed_file.fpath, source_root=self._config.source_root, project_workspace=self._config.project_workspace)) command.extend(os.getenv('FFLAGS', '').split()) command.append(str(analysed_file.fpath)) command.extend(['-o', str(output_fpath)]) log_or_dot( logger, 'CompileFortran running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: return Exception("Error calling compiler:", err) send_metric(self.name, str(analysed_file.fpath), { 'time_taken': timer.taken, 'start': timer.start }) return CompiledFile(analysed_file, output_fpath)
def test_error(self): mock_result = mock.Mock(returncode=1) mocked_error_message = 'mocked error message' mock_result.stderr.decode = mock.Mock(return_value=mocked_error_message) with mock.patch('fab.util.subprocess.run', return_value=mock_result): with pytest.raises(RuntimeError) as err: run_command(None) assert mocked_error_message in str(err.value)
def run(self, artefact_store: Dict, config): super().run(artefact_store, config) src = f'{self.src}@{self.revision}' if self.revision else self.src run_command([ 'fcm', 'export', '--force', src, str(config.source_root / self.dst_label) ])
def _call_linker(self, filename, objects): command = self.linker.split() command.extend(['-o', filename]) command.extend(map(str, sorted(objects))) # note: this must this come after the list of object files? command.extend(os.getenv('LDFLAGS', []).split()) command.extend(self.flags) log_or_dot(logger, 'Link running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: raise Exception(f"error linking: {err}")
def run(self, artefact_store: Dict, config): super().run(artefact_store, config) # we want the source folder to end with a / for rsync because we don't want it to create a sub folder src = os.path.expanduser(self.src) if not src.endswith('/'): src += '/' dst: Path = config.source_root / self.dst_label dst.mkdir(parents=True, exist_ok=True) command = ['rsync', '-ruq', src, str(dst)] run_command(command)
def do_one_file(self, x90_file): log_or_dot(logger=logger, msg=str(x90_file)) generated = x90_file.parent / (str(x90_file.stem) + '_psy.f90') modified_alg = x90_file.with_suffix('.f90') # generate into the build output, not the source generated = input_to_output_fpath( source_root=self._config.source_root, project_workspace=self._config.project_workspace, input_path=generated) modified_alg = input_to_output_fpath( source_root=self._config.source_root, project_workspace=self._config.project_workspace, input_path=modified_alg) generated.parent.mkdir(parents=True, exist_ok=True) # -d specifies "a root directory structure containing kernel source" kernel_options = sum([['-d', k] for k in self.kernel_roots], []) command = [ 'psyclone', '-api', 'dynamo0.3', '-l', 'all', *kernel_options, '-opsy', generated, # filename of generated PSy code '-oalg', modified_alg, # filename of transformed algorithm code x90_file, ] if self._config.reuse_artefacts and Path(modified_alg).exists(): pass else: try: run_command(command) except Exception as err: logger.error(err) return err result = [modified_alg] if Path(generated).exists(): result.append(generated) return result
def run(self, artefact_store: Dict, config): """ :param artefact_store: Contains artefacts created by previous Steps, and where we add our new artefacts. This is where the given :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process. :param config: The :class:`fab.build_config.BuildConfig` object where we can read settings such as the project workspace folder or the multiprocessing flag. """ super().run(artefact_store, config) # We're expecting one or more build targets in the artefact store. # When building exes, each build target has a name and a list of compiled files. # When building a shared object there is a single build target with no name. target_objects = self.source_getter(artefact_store) assert target_objects.keys() if self.output_fpath and list(target_objects.keys()) != [None]: raise ValueError("You must not specify an output path (library) when there are root symbols (exes)") if not self.output_fpath and list(target_objects.keys()) == [None]: raise ValueError("You must specify an output path when building a library.") target_archives = artefact_store.setdefault(self.output_collection, {}) for root, objects in target_objects.items(): if root: # we're building an object archive for an exe output_fpath = str(config.project_workspace / BUILD_OUTPUT / f'{root}.a') else: # we're building a single object archive with a given filename assert len(target_objects) == 1, "unexpected root of None with multiple build targets" output_fpath = Template(self.output_fpath).substitute(output=config.project_workspace / BUILD_OUTPUT) command = [self.archiver] command.extend(['cr', output_fpath]) command.extend(map(str, sorted(objects))) log_or_dot(logger, 'CreateObjectArchive running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: raise Exception(f"error creating object archive: {err}") target_archives[root] = [output_fpath]
def _process_artefact(self, fpath): """ Expects an input file in the source folder. Writes the output file to the output folder, with a lower case extension. """ output_fpath = input_to_output_fpath( source_root=self._config.source_root, project_workspace=self._config.project_workspace, input_path=fpath).with_suffix(self.output_suffix) # already preprocessed? if self._config.reuse_artefacts and output_fpath.exists(): log_or_dot(logger, f'Preprocessor skipping: {fpath}') else: with Timer() as timer: output_fpath.parent.mkdir(parents=True, exist_ok=True) command = self.exe.split() command.extend( self.flags.flags_for_path( path=fpath, source_root=self._config.source_root, project_workspace=self._config.project_workspace)) command.append(str(fpath)) command.append(str(output_fpath)) log_or_dot( logger, 'PreProcessor running command: ' + ' '.join(command)) try: run_command(command) except Exception as err: raise Exception(f"error preprocessing {fpath}: {err}") send_metric(self.name, str(fpath), { 'time_taken': timer.taken, 'start': timer.start }) return output_fpath
def test_no_error(self): mock_result = mock.Mock(returncode=0) with mock.patch('fab.util.subprocess.run', return_value=mock_result): run_command(None)
def run(self, artefact_store: Dict, config): super().run(artefact_store=artefact_store, config=config) rose_picker_tool = self.gpl_utils_source / 'rose_picker/rose_picker' gen_namelist_tool = self.lfric_source / 'infrastructure/build/tools/GenerateNamelist' gen_loader_tool = self.lfric_source / 'infrastructure/build/tools/GenerateLoader' gen_feigns_tool = self.lfric_source / 'infrastructure/build/tools/GenerateFeigns' config_dir = self.config_dir or config.source_root / 'configuration' env = os.environ.copy() rose_lfric_path = self.gpl_utils_source / 'lib/python' env['PYTHONPATH'] += f':{rose_lfric_path}' # "rose picker" # creates rose-meta.json and config_namelists.txt in gungho/source/configuration logger.info('rose_picker') run_command( command=[ str(rose_picker_tool), str(self.rose_meta_conf), '-directory', str(config_dir), '-include_dirs', self.lfric_source ], env=env, ) # "build_config_loaders" # builds a bunch of f90s from the json logger.info('GenerateNamelist') run_command(command=[ str(gen_namelist_tool), '-verbose', str(config_dir / 'rose-meta.json'), '-directory', str(config_dir), # '--norandom_enums' ]) # create configuration_mod.f90 in source root logger.info('GenerateLoader') names = [ name.strip() for name in open(config_dir / 'config_namelists.txt').readlines() ] configuration_mod_fpath = config.source_root / 'configuration_mod.f90' run_command(command=[ str(gen_loader_tool), configuration_mod_fpath, *names, ]) # create feign_config_mod.f90 in source root logger.info('GenerateFeigns') feign_config_mod_fpath = config.source_root / 'feign_config_mod.f90' run_command(command=[ str(gen_feigns_tool), str(config_dir / 'rose-meta.json'), '-output', feign_config_mod_fpath, ]) # put the generated source into an artefact artefact_store['configurator_output'] = [ configuration_mod_fpath, feign_config_mod_fpath ]