def test_find_EventDataSet_none():
    a = ast.parse("a+b*2")

    try:
        find_dataset(a)
        assert False
    except:
        pass
def test_find_EventDataset_Select_and_Many():
    a = EventDataset("file://dude.root") \
        .Select("lambda x: x") \
        .SelectMany("lambda x: x") \
        .value(executor=do_exe)

    assert ["file:///dude.root"] == find_dataset(a).url
Ejemplo n.º 3
0
    def write_cpp_files(self, ast: ast.AST,
                        output_path: str) -> xAODExecutionInfo:
        r"""
        Given the AST generate the C++ files that need to run. Return them along with
        the input files.
        """

        # Find the base file dataset and mark it.
        from func_adl.xAOD.backend.util_LINQ import find_dataset
        file = find_dataset(ast)
        iterator = crep.cpp_variable("bogus-do-not-use",
                                     top_level_scope(),
                                     cpp_type=None)
        file.rep = crep.cpp_sequence(iterator, iterator)

        # Visit the AST to generate the code structure and find out what the
        # result is going to be.
        qv = query_ast_visitor()
        result_rep = qv.get_rep(ast)

        # Emit the C++ code into our dictionaries to be used in template generation below.
        query_code = cpp_source_emitter()
        qv.emit_query(query_code)
        book_code = cpp_source_emitter()
        qv.emit_book(book_code)
        class_dec_code = qv.class_declaration_code()
        includes = qv.include_files()

        # The replacement dict to pass to the template generator can now be filled
        info = {}
        info['query_code'] = query_code.lines_of_query_code()
        info['book_code'] = book_code.lines_of_query_code()
        info['class_dec'] = class_dec_code
        info['include_files'] = includes

        # We use jinja2 templates. Write out everything.
        template_dir = find_dir("func_adl/xAOD/backend/R21Code")
        j2_env = jinja2.Environment(
            loader=jinja2.FileSystemLoader(template_dir))
        self.copy_template_file(j2_env, info, 'ATestRun_eljob.py', output_path)
        self.copy_template_file(j2_env, info, 'package_CMakeLists.txt',
                                output_path)
        self.copy_template_file(j2_env, info, 'query.cxx', output_path)
        self.copy_template_file(j2_env, info, 'query.h', output_path)
        self.copy_template_file(j2_env, info, 'runner.sh', output_path)

        os.chmod(os.path.join(str(output_path), 'runner.sh'), 0o755)

        # Build the return object.
        return xAODExecutionInfo(
            file.url, result_rep, output_path, 'runner.sh', [
                'ATestRun_eljob.py', 'package_CMakeLists.txt', 'query.cxx',
                'query.h', 'runner.sh'
            ])
async def exe_for_test(a: ast.AST):
    'Dummy executor that will return the ast properly rendered'
    # Setup the rep for this filter
    file = find_dataset(a)
    iterator = cpp_variable("bogus-do-not-use",
                            top_level_scope(),
                            cpp_type=None)
    file.rep = cpp_sequence(iterator, iterator)

    # Use the dummy executor to process this, and return it.
    exe = dummy_executor()
    rnr = atlas_xaod_executor()
    exe.evaluate(rnr.apply_ast_transformations(a))
    return exe
Ejemplo n.º 5
0
def use_executor_xaod_hash_cache(a: ast.AST,
                                 cache_path: str) -> HashXAODExecutorInfo:
    r'''Write out the C++ code and supporting files to a cache

    Arguments:
        a           The ast that will be transformed
        cache_path  Path the cache directory. We will write everything out in there.

    Returns:
        HashXAODExecutorInfo    Named tuple with the hash and the list of files in it.
    '''
    # We can only do this if the result is going to be a ROOT file(s). So make sure.
    if not isinstance(a, ResultTTree):
        raise CacheExeException(
            f'Can only cache results for a ROOT tree, not for {type(a).__name__}.'
        )

    # Calculate the AST hash. If this is already around then we don't need to do very much!
    hash = ast_hash.calc_ast_hash(a)

    # Next, see if the hash file is there.
    query_file_path = os.path.join(cache_path, hash)
    cache_file = os.path.join(query_file_path, 'rep_cache.pickle')
    if os.path.isfile(cache_file):
        # We have a cache hit. Look it up.
        file = find_dataset(a)
        with open(cache_file, 'rb') as f:
            result_cache = pickle.load(f)
            return _build_result(result_cache, file.url)

    # Create the files to run in that location.
    if not os.path.exists(query_file_path):
        os.makedirs(query_file_path)
    exe = atlas_xaod_executor()
    f_spec = exe.write_cpp_files(exe.apply_ast_transformations(a),
                                 query_file_path)

    # Write out the basic info for the result rep and the runner into that location.
    result_cache = (hash, f_spec.main_script, f_spec.result_rep.treename,
                    f_spec.result_rep.filename)
    with open(os.path.join(query_file_path, 'rep_cache.pickle'), 'wb') as f:
        pickle.dump(result_cache, f)

    return _build_result(result_cache, f_spec.input_urls)
def test_find_EventDataSet_good():
    a = EventDataset("file://junk.root") \
        .value(executor=do_exe)

    assert ["file:///junk.root"] == find_dataset(a).url