async def execute_result_async(self, a: ast.AST) -> Any: if self._dir.exists(): self._dir.mkdir(parents=True, exist_ok=True) exe = atlas_xaod_executor() f_spec = exe.write_cpp_files(exe.apply_ast_transformations(a), self._dir) return ExecutorInfo(f_spec.main_script, f_spec.result_rep.filename)
def test_bad_ast_no_call(tmp_path): 'Pass a really bogus ast to the executor' # Get the ast to play with q = query_as_ast() a = ast.UnaryOp(op=ast.USub(), operand=q.query_ast) exe = atlas_xaod_executor() with pytest.raises(ValueError) as e: exe.write_cpp_files(exe.apply_ast_transformations(a), tmp_path) assert 'func_adl ast' in str(e.value)
def test_bad_ast_no_call_to_name(tmp_path): 'Pass a really bogus ast to the executor' # Get the ast to play with q = query_as_ast() a = ast.Call(func=ast.Attribute(value=ast.Constant(10), attr='fork'), args=[q.query_ast]) exe = atlas_xaod_executor() with pytest.raises(ValueError) as e: exe.write_cpp_files(exe.apply_ast_transformations(a), tmp_path) assert 'func_adl ast' in str(e.value)
def test_xaod_executor(tmp_path): 'Write out C++ files for a simple query' # Get the ast to play with a = query_as_ast() \ .Select('lambda e: e.EventInfo("EventInfo").runNumber()') \ .value() exe = atlas_xaod_executor() f_spec = exe.write_cpp_files(exe.apply_ast_transformations(a), tmp_path) for name in f_spec.all_filenames: assert (tmp_path / name).exists()
async def execute_result_async(self, a: ast.AST) -> Any: 'Dummy executor that will return the ast properly rendered. If qastle_roundtrip is true, then we will round trip the ast via qastle first.' # Round trip qastle if requested. if self._q_roundtrip: import qastle print(f'before: {ast.dump(a)}') a_text = qastle.python_ast_to_text_ast(a) a = qastle.text_ast_to_python_ast(a_text).body[0].value print(f'after: {ast.dump(a)}') # Setup the rep for this dataset from func_adl import find_EventDataset file = find_EventDataset(a) iterator = cpp_variable("bogus-do-not-use", top_level_scope(), cpp_type=None) file.rep = cpp_sequence(iterator, iterator, top_level_scope()) # type: ignore # Use the dummy executor to process this, and return it. exe = dummy_executor() rnr = atlas_xaod_executor() exe.evaluate(a) return exe
def evaluate(self, a: ast.AST): rnr = atlas_xaod_executor() self.QueryVisitor = query_ast_visitor() a_transformed = rnr.apply_ast_transformations(a) self.ResultRep = \ self.QueryVisitor.get_as_ROOT(a_transformed)
async def execute_result_async(self, a: ast.AST) -> Any: ''' Run the file locally with docker ''' # Construct the files we will run. with tempfile.TemporaryDirectory() as local_run_dir_p: local_run_dir = Path(local_run_dir_p) local_run_dir.chmod(0o777) exe = atlas_xaod_executor() f_spec = exe.write_cpp_files(exe.apply_ast_transformations(a), local_run_dir) # Write out a file with the mapped in directories. # Until we better figure out how to deal with this, there are some restrictions # on file locations. datafile_dir: Optional[Path] = None with open(f'{local_run_dir}/filelist.txt', 'w') as flist_out: for u in self._files: if not u.exists(): raise AtlasXAODDockerException( f'Cannot access (or find) file {u}') ds_path = u.parent datafile = u.name flist_out.write(f'/data/{datafile}\n') if datafile_dir is None: datafile_dir = ds_path else: if ds_path != datafile_dir: raise Exception( f'Data files must be from the same directory. Have seen {ds_path} and {datafile_dir} so far.' ) # Build a docker command to run this. datafile_mount = "" if datafile_dir is None else f'-v {datafile_dir}:/data' docker_cmd = f'docker run --rm -v {f_spec.output_path}:/scripts -v {f_spec.output_path}:/results {datafile_mount} atlas/analysisbase:latest /scripts/{f_spec.main_script}' proc = await asyncio.create_subprocess_shell( docker_cmd, stdout=asyncio.subprocess.PIPE, # type: ignore stderr=asyncio.subprocess.PIPE) # type: ignore p_stdout, p_stderr = await proc.communicate() if proc.returncode != 0 or dump_running_log: lg = logging.getLogger(__name__) level = logging.INFO if proc.returncode == 0 else logging.ERROR lg.log(level, f"Result of run: {proc.returncode}") lg.log(level, 'std Output: ') dump_split_string(p_stdout.decode(), lambda l: lg.log(level, f' {l}')) lg.log(level, 'std Error: ') dump_split_string(p_stderr.decode(), lambda l: lg.log(level, f' {l}')) if dump_cpp or proc.returncode != 0: level = logging.INFO if proc.returncode == 0 else logging.ERROR lg = logging.getLogger(__name__) with open(os.path.join(str(local_run_dir), "query.cxx"), 'r') as f: lg.log(level, 'C++ Source Code:') dump_split_string(f.read(), lambda l: lg.log(level, f' {l}')) if proc.returncode != 0: raise Exception( f"Docker command failed with error {proc.returncode} ({docker_cmd})" ) # Now that we have run, we can pluck out the result. assert isinstance(f_spec.result_rep, cpp_ttree_rep), 'Unknown return type' return _extract_result_TTree(f_spec.result_rep, local_run_dir)
def test_ctor(): 'Make sure that the ctor works' atlas_xaod_executor()