def onsplit_codegen(unit, *args):
    keywords = {"OUT_NUM": 1}
    flat_args, spec_args = sort_by_keywords(keywords, args)

    num_outputs = _DEFAULT_CPP_PARTS + _ADDITIONAL_STREAM_COUNT
    if "OUT_NUM" in spec_args:
        num_outputs = int(spec_args["OUT_NUM"][0])

    tool = flat_args[0]
    prefix = flat_args[1]

    cmd = [tool, prefix, 'OUT']
    for num in range(num_outputs):
        cmd.append('{}.{}.cpp'.format(prefix, num))

    cpp_parts = int(num_outputs) - _ADDITIONAL_STREAM_COUNT
    cpp_parts_args = ['--cpp-parts', str(cpp_parts)]

    if len(flat_args) > 2:
        if flat_args[2] != 'OUTPUT_INCLUDES':
            cmd.append('OPTS')
        cmd += cpp_parts_args + flat_args[2:]
    else:
        cmd += ['OPTS'] + cpp_parts_args

    unit.onsplit_codegen_base(cmd)
def onregister_yql_python_udf(unit, *args):
    flat, kv = sort_by_keywords({'NAME': 1, 'RESOURCE_NAME': 1}, args)
    assert len(flat) == 0
    name = get_or_default(kv, 'NAME', 'CustomPython')
    resource_name = get_or_default(kv, 'RESOURCE_NAME', name)

    use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == 'yes'

    unit.onyql_abi_version(['2', '0', '0'])
    unit.onpeerdir(['yql/udfs/common/python/python_udf'])
    unit.onpeerdir(['yql/library/udf'])

    if use_arcadia_python:
        flavor = 'Arcadia'
        unit.onpeerdir([
            'library/python/runtime',
            'yql/udfs/common/python/main',
        ])
    else:
        flavor = 'System'

    output_includes = [
        'yql/udfs/common/python/python_udf/python_udf.h',
        'yql/library/udf/udf_registrator.h',
    ]
    path = name + '.yql_python_udf.cpp'
    unit.onbuiltin_python([
        'build/scripts/gen_yql_python_udf.py',
        flavor, name, resource_name, path,
        'OUT', path,
        'OUTPUT_INCLUDES',
    ] + output_includes
    )
Example #3
0
def oncopy(unit, *args):
    keywords = {'RESULT': 1, 'KEEP_DIR_STRUCT': 0, 'DESTINATION': 1, 'FROM': 1}

    flat_args, spec_args = sort_by_keywords(keywords, args)

    dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else ''
    from_dir = spec_args['FROM'][0] if 'FROM' in spec_args else ''
    keep_struct = 'KEEP_DIR_STRUCT' in spec_args
    save_in_var = 'RESULT' in spec_args
    targets = []

    for source in flat_args:
        rel_path = ''
        path_list = source.split(os.sep)
        filename = path_list[-1]
        if keep_struct:
            if path_list[:-1]:
                rel_path = os.path.join(*path_list[:-1])
        source_path = os.path.join(from_dir, rel_path, filename)
        target_path = os.path.join(dest_dir, rel_path, filename)
        if save_in_var:
            targets.append(target_path)
        unit.oncopy_file([source_path, target_path])
    if save_in_var:
        unit.set([spec_args["RESULT"][0], " ".join(targets)])
Example #4
0
def on_test(unit, *args):
    flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1}, args)
    custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else ''
    test_data = '\"' + ';'.join(spec_args["DATA"]) + '\"' if "DATA" in spec_args else ''
    timeout = spec_args.get("TIMEOUT", ['0'])[0]
    test_type = flat_args[0]
    script_rel_path = None

    if test_type == "PY_TEST":
        script_rel_path = "py.test"
    elif test_type == "FLEUR":
        script_rel_path = "ytest.py"
    elif test_type == "PEP8":
        script_rel_path = "py.test.pep8"
    elif test_type == "PY_FLAKES":
        script_rel_path = "py.test.flakes"

    test_dir = unit.resolve(os.path.join(args[0]))
    test_file = flat_args[2]
    unit.set(['TEST-NAME', os.path.splitext(test_file)[0]])
    unit.set(['TEST-TIMEOUT', timeout])
    unit.set(['SCRIPT-REL-PATH', script_rel_path])
    unit.set(['TESTED-PROJECT-NAME', test_file])
    unit.set(['SOURCE-FOLDER-PATH', test_dir])
    unit.set(['CUSTOM-DEPENDENCIES', custom_deps])
    unit.set(['TEST-DATA', test_data])
Example #5
0
def onacceleo(unit, *args):
    flat, kv = common.sort_by_keywords(
        {'XSD': -1, 'MTL': -1, 'MTL_ROOT': 1, 'MTL_EXTENSION': -1, 'LANG': -1, 'OUT': -1, 'OUT_NOAUTO': -1, 'OUTPUT_INCLUDES': -1, 'DEBUG': 0},
        args
    )

    try:
        mtlroot = kv['MTL_ROOT'][0]
    except Exception:
        mtlroot = unit.path().replace('$S/', '')

    classpath = ['$SCARAB', ]  # XXX special word for ya make to replace following paths with real classpath
    classpath.append('tools/acceleo')
    classpath.extend(kv.get('MTL_EXTENSION', []))

    if not unit.get('IDE_MSVS_CALL'):
        for jar in classpath[1:]:
            unit.oninternal_recurse(jar)

    classpath = ':'.join(classpath)

    # Generate java cmd
    cmd = [
        '-classpath',
        classpath,
        '-Dfile.encoding=UTF-8',
        'ru.yandex.se.logsng.tool.Cli',
    ]

    for xsd in kv.get('XSD', []):
        cmd += ['--xsd', xsd]

    for mtl in kv.get('MTL', []):
        cmd += ['--mtl', mtl]

    for lang in kv.get('LANG', []):
        cmd += ['--lang', lang]

    cmd += ['--output-dir', unit.path().replace('$S/', '${ARCADIA_BUILD_ROOT}/')]
    cmd += ['--build-root', '${ARCADIA_BUILD_ROOT}']
    cmd += ['--source-root', '${ARCADIA_ROOT}']
    cmd += ['--mtl-root', mtlroot]

    # Generate RUN_JAVA args
    run_java = cmd

    if 'DEBUG' not in kv:
        run_java += ['HIDE_OUTPUT']

    inputs = kv.get('XSD', []) + kv.get('MTL', []) + kv.get('LANG', [])
    if inputs:
        run_java += ['IN'] + inputs

    for k in 'OUT', 'OUT_NOAUTO', 'OUTPUT_INCLUDES':
        if kv.get(k):
            run_java += [k] + kv[k]

    unit.onrun_java(run_java)
Example #6
0
def onadd_ytest(unit, *args):
    keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
                "FORK_SUBTESTS": 0, "FORK_TESTS": 0}
    flat_args, spec_args = _common.sort_by_keywords(keywords, args)

    if flat_args[1] == "fuzz.test":
        unit.ondata("arcadia/fuzzing/{}/corpus.json".format(strip_roots(unit.path())))
    elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
        # XXX
        # Current ymake implementation doesn't allow to call macro inside the 'when' body
        # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
        return

    fork_mode = []
    if 'FORK_SUBTESTS' in spec_args:
        fork_mode.append('subtests')
    if 'FORK_TESTS' in spec_args:
        fork_mode.append('tests')
    fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
    fork_mode = ' '.join(fork_mode) if fork_mode else ''

    test_record = {
        'TEST-NAME': flat_args[0],
        'SCRIPT-REL-PATH': flat_args[1],
        'TESTED-PROJECT-NAME': unit.name(),
        'TESTED-PROJECT-FILENAME': unit.filename(),
        'SOURCE-FOLDER-PATH': unit.resolve(unit.path()),
        'BUILD-FOLDER-PATH': strip_roots(unit.path()),
        'BINARY-PATH': strip_roots(os.path.join(unit.path(), unit.filename())),
        'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
        'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
        'TEST-DATA': serialize_list(_common.filter_out_by_keyword(spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ') + get_values_list(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED')),
        'TEST-TIMEOUT': ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or '',
        'FORK-MODE': fork_mode,
        'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '',
        'SIZE': ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or '',
        'TAG': serialize_list(spec_args.get('TAG', []) + (unit.get(['__test_tags']) or '').split(' ')),
        'REQUIREMENTS': serialize_list(spec_args.get('REQUIREMENTS', []) + (unit.get(['__test_requirements']) or '').split(' ')),
        'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
        'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
        'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
        'BLOB': unit.get('TEST_BLOB_DATA') or '',
        'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
    }

    is_fuzz_test = flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes'
    if is_fuzz_test:
        # use all cores if fuzzing requested
        test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]))

    data = dump_test(test_record, is_fuzz_test=is_fuzz_test)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
Example #7
0
def onadd_pytest_bin(unit, *args):
    flat, kws = _common.sort_by_keywords({'RUNNER_BIN': 1}, args)
    if flat:
        ymake.report_configure_error(
            'Unknown arguments found while processing add_pytest_bin macro: {!r}'
            .format(flat))

    runner_bin = kws.get('RUNNER_BIN', [None])[0]
    test_type = 'py3test.bin' if (unit.get("PYTHON3")
                                  == 'yes') else "pytest.bin"

    add_test_to_dart(unit, test_type, runner_bin=runner_bin)
Example #8
0
def onrun_java_program(unit, *args):
    flat, kv = common.sort_by_keywords(
        {'IN': -1, 'IN_DIR': -1, 'OUT': -1, 'OUT_DIR': -1, 'CWD': 1, 'CLASSPATH': -1, 'ADD_SRCS_TO_CLASSPATH': 0},
        args
    )

    for cp in kv.get('CLASSPATH', []):
        unit.oninternal_recurse(cp)

    prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
    new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
Example #9
0
def onregister_yql_python_udf(unit, *args):
    flat, kv = sort_by_keywords(
        {
            'NAME': 1,
            'RESOURCE_NAME': 1,
            'ADD_LIBRA_MODULES': 1
        }, args)
    assert len(flat) == 0
    name = get_or_default(kv, 'NAME', 'CustomPython')
    resource_name = get_or_default(kv, 'RESOURCE_NAME', name)
    add_libra_modules = get_or_default(kv, 'ADD_LIBRA_MODULES', 'no') == 'yes'

    use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == 'yes'
    py3 = unit.get('PYTHON3') == 'yes'

    unit.onyql_abi_version(['2', '9', '0'])
    unit.onpeerdir(['yql/udfs/common/python/python_udf'])
    unit.onpeerdir(['ydb/library/yql/public/udf'])

    if add_libra_modules:
        unit.onpeerdir(['quality/user_sessions/libra_arc/noyql'])
        unit.onpeerdir(['yql/udfs/quality/libra/module'])

    if use_arcadia_python:
        flavor = 'Arcadia'
        unit.onpeerdir(
            ['library/python/runtime', 'yql/udfs/common/python/main']
            if not py3 else
            ['library/python/runtime_py3', 'yql/udfs/common/python/main_py3'])
    else:
        flavor = 'System'

    output_includes = [
        'yql/udfs/common/python/python_udf/python_udf.h',
        'ydb/library/yql/public/udf/udf_registrator.h',
    ]
    if add_libra_modules:
        output_includes.append('yql/udfs/quality/libra/module/module.h')

    path = name + '.yql_python_udf.cpp'
    libra_flag = '1' if add_libra_modules else '0'
    unit.onpython([
        'build/scripts/gen_yql_python_udf.py',
        flavor,
        name,
        resource_name,
        path,
        libra_flag,
        'OUT',
        path,
        'OUTPUT_INCLUDES',
    ] + output_includes)
Example #10
0
def onadd_ytest(unit, *args):
    keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
                "FORK_SUBTESTS": 0, "FORK_TESTS": 0}
    flat_args, spec_args = _common.sort_by_keywords(keywords, args)

    if flat_args[1] == "fuzz.test":
        unit.ondata("arcadia/fuzzing/{}/corpus.json".format(strip_roots(unit.path())))
    elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
        # XXX
        # Current ymake implementation doesn't allow to call macro inside the 'when' body
        # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
        return

    fork_mode = []
    if 'FORK_SUBTESTS' in spec_args:
        fork_mode.append('subtests')
    if 'FORK_TESTS' in spec_args:
        fork_mode.append('tests')
    fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
    fork_mode = ' '.join(fork_mode) if fork_mode else ''

    test_record = {
        'TEST-NAME': flat_args[0],
        'SCRIPT-REL-PATH': flat_args[1],
        'TESTED-PROJECT-NAME': unit.name(),
        'TESTED-PROJECT-FILENAME': unit.filename(),
        'SOURCE-FOLDER-PATH': unit.resolve(unit.path()),
        'BUILD-FOLDER-PATH': strip_roots(unit.path()),
        'BINARY-PATH': strip_roots(os.path.join(unit.path(), unit.filename())),
        'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
        'TEST-DATA': serialize_list(_common.filter_out_by_keyword(spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ') + get_values_list(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED')),
        'TEST-TIMEOUT': ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or '',
        'FORK-MODE': fork_mode,
        'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '',
        'SIZE': ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or '',
        'TAG': serialize_list(spec_args.get('TAG', []) + (unit.get(['__test_tags']) or '').split(' ')),
        'REQUIREMENTS': serialize_list(spec_args.get('REQUIREMENTS', []) + (unit.get(['__test_requirements']) or '').split(' ')),
        'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
        'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
        'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
        'BLOB': unit.get('TEST_BLOB_DATA') or '',
        'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
    }

    # use all cores if fuzzing requested
    if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
        test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all"]))

    data = dump_test(test_record)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
Example #11
0
def onadd_check(unit, *args):
    flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
                                                     "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
    check_type = flat_args[0]
    test_dir = unit.resolve(os.path.join(unit.path()))

    test_timeout = ''
    if check_type in ["PEP8", "PYFLAKES", "PY_FLAKES"]:
        script_rel_path = "py.lint.pylint"
    elif check_type == "JAVA_STYLE":
        if len(flat_args) < 2:
            raise Exception("Not enough arguments for JAVA_STYLE check")
        check_level = flat_args[1]
        allowed_levels = {
            'base': '/yandex_checks.xml',
            'strict': '/yandex_checks_strict.xml',
            'extended': '/yandex_checks_extended.xml',
        }
        if check_level not in allowed_levels:
            raise Exception('{} is not allowed in LINT(), use one of {}'.format(check_level, allowed_levels.keys()))
        flat_args[1] = allowed_levels[check_level]
        script_rel_path = "java.style"
        test_timeout = '120'
    else:
        script_rel_path = check_type

    use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
    test_record = {
        'TEST-NAME': check_type.lower(),
        'TEST-TIMEOUT': test_timeout,
        'SCRIPT-REL-PATH': script_rel_path,
        'TESTED-PROJECT-NAME': os.path.basename(test_dir),
        'SOURCE-FOLDER-PATH': test_dir,
        'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
        'TEST-DATA': '',
        'SPLIT-FACTOR': '',
        'FORK-MODE': '',
        'FORK-TEST-FILES': '',
        'SIZE': 'SMALL',
        'TAG': '',
        'REQUIREMENTS': '',
        'USE_ARCADIA_PYTHON': use_arcadia_python or '',
        'OLD_PYTEST': 'no',
        'PYTHON-PATHS': '',
        'FILES': serialize_list(flat_args[1:])
    }
    data = dump_test(test_record)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
Example #12
0
def onadd_check(unit, *args):
    flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
                                                     "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
    check_type = flat_args[0]
    test_dir = unit.resolve(os.path.join(unit.path()))

    test_timeout = ''
    if check_type in ["PEP8", "PYFLAKES", "PY_FLAKES"]:
        script_rel_path = "py.lint.pylint"
    elif check_type == "JAVA_STYLE":
        if len(flat_args) < 2:
            raise Exception("Not enough arguments for JAVA_STYLE check")
        check_level = flat_args[1]
        allowed_levels = {
            'base': '/yandex_checks.xml',
            'strict': '/yandex_checks_strict.xml',
        }
        if check_level not in allowed_levels:
            raise Exception('{} is not allowed in LINT(), use one of {}'.format(check_level, allowed_levels.keys()))
        flat_args[1] = allowed_levels[check_level]
        script_rel_path = "java.style"
        test_timeout = '120'
    else:
        script_rel_path = check_type

    use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
    test_record = {
        'TEST-NAME': check_type.lower(),
        'TEST-TIMEOUT': test_timeout,
        'SCRIPT-REL-PATH': script_rel_path,
        'TESTED-PROJECT-NAME': os.path.basename(test_dir),
        'SOURCE-FOLDER-PATH': test_dir,
        'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
        'TEST-DATA': '',
        'SPLIT-FACTOR': '',
        'FORK-MODE': '',
        'FORK-TEST-FILES': '',
        'SIZE': 'SMALL',
        'TAG': '',
        'REQUIREMENTS': '',
        'USE_ARCADIA_PYTHON': use_arcadia_python or '',
        'OLD_PYTEST': 'no',
        'PYTHON-PATHS': '',
        'FILES': serialize_list(flat_args[1:])
    }
    data = dump_test(test_record)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
Example #13
0
def ongenerate_script(unit, *args):
    """
    heretic@ promised to make tutorial here
    Don't forget
    Feel free to remind
    """
    flat, kv = common.sort_by_keywords(
        {'OUT': 1, 'TEMPLATE': -1, 'CUSTOM_PROPERTY': -1},
        args
    )
    if len(kv.get('TEMPLATE', [])) > 1:
        ymake.report_configure_error('To mane arguments for TEMPLATE parameter')
    prev = unit.get(['GENERATE_SCRIPT_VALUE']) or ''
    new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['GENERATE_SCRIPT_VALUE', new_val])
Example #14
0
def ongenerate_script(unit, *args):
    """
    heretic@ promised to make tutorial here
    Don't forget
    Feel free to remind
    """
    flat, kv = common.sort_by_keywords(
        {'OUT': 1, 'TEMPLATE': -1, 'CUSTOM_PROPERTY': -1},
        args
    )
    if len(kv.get('TEMPLATE', [])) > 1:
        ymake.report_configure_error('To mane arguments for TEMPLATE parameter')
    prev = unit.get(['GENERATE_SCRIPT_VALUE']) or ''
    new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['GENERATE_SCRIPT_VALUE', new_val])
Example #15
0
def onrun_java_program(unit, *args):
    """
    Custom code generation
    @link: https://wiki.yandex-team.ru/yatool/java/#kodogeneracijarunjavaprogram
    """

    flat, kv = common.sort_by_keywords({'IN': -1, 'IN_DIR': -1, 'OUT': -1, 'OUT_DIR': -1, 'CWD': 1, 'CLASSPATH': -1, 'ADD_SRCS_TO_CLASSPATH': 0}, args)
    depends = kv.get('CLASSPATH', []) + kv.get('JAR', [])
    if depends:
        # XXX: hack to force ymake to build dependencies
        unit.on_run_java(['TOOL'] + depends + ["OUT", "fake.out.{}".format(hash(tuple(depends)))])

    prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
    new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
Example #16
0
def onrun_java(unit, *args):
    flat, kv = common.sort_by_keywords(
        {
            'CLASSPATH': -1,
            'IN': -1,
            'OUT': -1,
            'OUT_NOAUTO': -1,
            'OUTPUT_INCLUDES': -1,
            'DEBUG': 0,
            'JAR': 1
        }, args)
    if not (kv.get('CLASSPATH', []) + kv.get('JAR', [])):
        ymake.report_configure_error(
            'Java program for RUN_JAVA is not specified')

    depends = []
    if not unit.get('IDE_MSVS_CALL'):
        for jar in (kv.get('CLASSPATH', []) + kv.get('JAR', [])):
            depends.append(jar)

    classpath = ':'.join(classpath)

    # Generate java cmd
    cmd = []
    if kv.get('JAR'):
        cmd += [
            '-jar',
            ':'.join(['$SCARAB_SLIM'] + kv.get('JAR')),
        ]
    cmd += [
        '-classpath',
        ':'.join(['$SCARAB'] + kv.get('JAR', []) + kv.get('CLASSPATH', [])),
        '-Dfile.encoding=UTF-8',
    ]

    cmd += flat

    if 'DEBUG' not in kv:
        cmd += ['HIDE_OUTPUT']

    for k in 'IN', 'OUT', 'OUT_NOAUTO', 'OUTPUT_INCLUDES':
        if kv.get(k):
            cmd += [k] + kv[k]

    if depends:
        cmd += ['TOOL'] + depends

    unit.on_run_java(cmd)
Example #17
0
def onrun_java_program(unit, *args):
    """
    Custom code generation
    @link: https://wiki.yandex-team.ru/yatool/java/#kodogeneracijarunjavaprogram
    """
    flat, kv = common.sort_by_keywords(
        {'IN': -1, 'IN_DIR': -1, 'OUT': -1, 'OUT_DIR': -1, 'CWD': 1, 'CLASSPATH': -1, 'ADD_SRCS_TO_CLASSPATH': 0},
        args
    )

    for cp in kv.get('CLASSPATH', []):
        unit.oninternal_recurse(cp)

    prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
    new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
Example #18
0
def onrun_java_program(unit, *args):
    """
    Custom code generation
    @link: https://wiki.yandex-team.ru/yatool/java/#kodogeneracijarunjavaprogram
    """
    flat, kv = common.sort_by_keywords(
        {'IN': -1, 'IN_DIR': -1, 'OUT': -1, 'OUT_DIR': -1, 'CWD': 1, 'CLASSPATH': -1, 'ADD_SRCS_TO_CLASSPATH': 0},
        args
    )

    for cp in kv.get('CLASSPATH', []):
        unit.oninternal_recurse(cp)

    prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
    new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
Example #19
0
def onios_app_settings(unit, *args):
    tail, kv = common.sort_by_keywords({'OS_VERSION': 1, 'DEVICES': -1}, args)
    if tail:
        ymake.report_configure_error(
            'Bad IOS_COMMON_SETTINGS usage - unknown data: ' + str(tail))
    if kv.get('OS_VERSION', []):
        unit.onios_app_common_flags(
            ['--minimum-deployment-target',
             kv.get('OS_VERSION', [])[0]])
        unit.onios_app_assets_flags(
            ['--filter-for-device-os-version',
             kv.get('OS_VERSION', [])[0]])
    devices_flags = []
    for device in kv.get('DEVICES', []):
        devices_flags += ['--target-device', device]
    if devices_flags:
        unit.onios_app_common_flags(devices_flags)
Example #20
0
def onyabs_server_prepare_qxl_from_sandbox(unit, *args):
    (resource_id, qxl_name), kv = sort_by_keywords({'TIMESTAMP': 1}, args)

    json_basename = stripext(qxl_name)
    prefix = unit.path().replace('$S', '${ARCADIA_BUILD_ROOT}') + '/'
    pickle_name = prefix + json_basename + '.pickle'
    enabled_stats_name = prefix + json_basename + '.sts'

    timestamp = int(kv.get('TIMESTAMP', [DEFAULT_TIMESTAMP])[0])

    unit.onfrom_sandbox(['FILE', resource_id, 'OUT', prefix + qxl_name])

    unit.onrun_program([
        MAKE_SQLITE_DB, '--credefs', CREDEFS_JSON, '--qxl', prefix + qxl_name,
        '--pickle', pickle_name, '--nowtime',
        str(timestamp - 1), '--stenabled', enabled_stats_name, 'IN',
        CREDEFS_JSON, 'IN', prefix + qxl_name, 'OUT', pickle_name, 'OUT',
        enabled_stats_name
    ])

    request_dumps = [[
        'yabs_test.json',
    ], [
        'simulator.json',
    ],
                     [
                         'http_laas.json',
                         'http_metrika.json',
                         'http_metasearch.json',
                         'http_metapartner.json',
                         'http_metarank.json',
                     ]]

    for group in request_dumps:
        args = [
            MAKE_REQUESTS,
            prefix + qxl_name,
        ]
        args += group
        args += ['IN', prefix + qxl_name]
        for fname in group:
            args += ['OUT', fname]
        unit.onrun_program(args)
Example #21
0
def onadd_test(unit, *args):
    flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
                                                     "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
    test_type = flat_args[0]
    test_files = flat_args[1:]
    if test_type in ["PEP8", "PY_FLAKES"]:
        return
        # unit_path = unit.path()
        # paths = []
        # for test_file in test_files:
        #     if test_file == ".":
        #         path_to_check = unit_path
        #     else:
        #         path_to_check = os.path.join(unit_path, test_file)
        #     paths.append(path_to_check)
        # return onadd_check(unit, *tuple([test_type] + sorted(paths)))

    custom_deps = spec_args.get('DEPENDS', [])
    timeout = spec_args.get("TIMEOUT", [])
    if timeout:
        timeout = timeout[0]
    else:
        timeout = '0'
    fork_mode = []
    if 'FORK_SUBTESTS' in spec_args:
        fork_mode.append('subtests')
    if 'FORK_TESTS' in spec_args:
        fork_mode.append('tests')
    fork_mode = fork_mode or spec_args.get('FORK_MODE', [])
    split_factor = ''.join(spec_args.get('SPLIT_FACTOR', [])) or ''
    test_size = ''.join(spec_args.get('SIZE', [])) or 'SMALL'
    test_dir = unit.resolve(os.path.join(unit.path()))
    tags = spec_args.get('TAG', []) + (unit.get(['__test_tags']) or '').split(' ')
    requirements = spec_args.get('REQUIREMENTS', []) + (unit.get(['__test_requirements']) or '').split(' ')
    test_data = spec_args.get("DATA", []) + (unit.get(['__test_data']) or '').split(' ')
    python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
    if test_type == "PY_TEST":
        old_pytest = True
    else:
        old_pytest = False

    _dump_test(unit, test_type, test_files, timeout, test_dir, custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, None, old_pytest)
Example #22
0
def onadd_test(unit, *args):
    flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
                                                     "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
    test_type = flat_args[0]
    test_files = flat_args[1:]
    if test_type in ["PEP8", "PY_FLAKES"]:
        return
        # unit_path = unit.path()
        # paths = []
        # for test_file in test_files:
        #     if test_file == ".":
        #         path_to_check = unit_path
        #     else:
        #         path_to_check = os.path.join(unit_path, test_file)
        #     paths.append(path_to_check)
        # return onadd_check(unit, *tuple([test_type] + sorted(paths)))

    custom_deps = spec_args.get('DEPENDS', [])
    timeout = spec_args.get("TIMEOUT", [])
    if timeout:
        timeout = timeout[0]
    else:
        timeout = '0'
    fork_mode = []
    if 'FORK_SUBTESTS' in spec_args:
        fork_mode.append('subtests')
    if 'FORK_TESTS' in spec_args:
        fork_mode.append('tests')
    fork_mode = fork_mode or spec_args.get('FORK_MODE', [])
    split_factor = ''.join(spec_args.get('SPLIT_FACTOR', [])) or ''
    test_size = ''.join(spec_args.get('SIZE', [])) or 'SMALL'
    test_dir = unit.path()
    tags = spec_args.get('TAG', []) + get_values_list(unit, 'TEST_TAGS_VALUE')
    requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
    test_data = spec_args.get("DATA", []) + get_norm_paths(unit, 'TEST_DATA_VALUE')
    python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
    if test_type == "PY_TEST":
        old_pytest = True
    else:
        old_pytest = False

    _dump_test(unit, test_type, test_files, timeout, test_dir, custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, None, old_pytest)
Example #23
0
def onllvm_bc(unit, *args):
    free_args, kwds = sort_by_keywords({'SYMBOLS': -1, 'NAME': 1}, args)
    name = kwds['NAME'][0]
    symbols = kwds.get('SYMBOLS')
    merged_bc = name + '_merged.bc'
    out_bc = name + '_optimized.bc'
    bcs = []
    for x in free_args:
        rel_path = rootrel_arc_src(x, unit)
        bc_path = '${ARCADIA_BUILD_ROOT}/' + skip_build_root(rel_path) + '.bc'
        llvm_compile = unit.onllvm_compile_c if x.endswith('.c') else unit.onllvm_compile_cxx
        llvm_compile([rel_path, bc_path])
        bcs.append(bc_path)
    unit.onllvm_link([merged_bc] + bcs)
    opt_opts = ['-O2', '-globalopt', '-globaldce']
    if symbols:
        # XXX: '#' used instead of ',' to overcome ymake tendency to split everything by comma
        opt_opts += ['-internalize', '-internalize-public-api-list=' + '#'.join(symbols)]
    unit.onllvm_opt([merged_bc, out_bc] + opt_opts)
    unit.onresource([out_bc, '/llvm_bc/' + name])
Example #24
0
def onrun_java_program(unit, *args):
    flat, kv = common.sort_by_keywords(
        {
            'IN': -1,
            'IN_DIR': -1,
            'OUT': -1,
            'OUT_DIR': -1,
            'CWD': 1,
            'CLASSPATH': -1,
            'ADD_SRCS_TO_CLASSPATH': 0
        }, args)

    for cp in kv.get('CLASSPATH', []):
        unit.oninternal_recurse(cp)

    prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
    new_val = (
        prev + ' ' +
        base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
def oncreate_init_py(unit, *args):
    keywords = {"DESTINATION": 1, "INCLUDING_DEST_DIR": 0, "RESULT": 1}

    flat_args, spec_args = sort_by_keywords(keywords, args)
    generated = []

    dest_dir = spec_args["DESTINATION"][0] if "DESTINATION" in spec_args else "$ARCADIA_BUILD_ROOT"
    if "INCLUDING_DEST_DIR" in spec_args:
        generated.append(os.path.join(dest_dir, "__init__.py"))

    for proto_file in flat_args:
        path_list = proto_file.split(os.sep)[:-1]

        for idx, val in enumerate(path_list):
            generated.append(os.path.join(dest_dir, os.path.join(*path_list[0:len(path_list) - idx]), "__init__.py"))

    generated = list(set(generated))

    unit.ontouch(generated)
    if "RESULT" in spec_args:
        unit.set([spec_args["RESULT"][0], " ".join(generated)])
Example #26
0
def oncopy_files_to_build_prefix(unit, *args):
    keywords = {'PREFIX': 1, 'GLOBAL': 0}
    # NB! keyword 'GLOBAL' is a way to skip this word from the list of files

    flat_args, spec_args = sort_by_keywords(keywords, args)
    prefix = spec_args['PREFIX'][0] if 'PREFIX' in spec_args else ''

    if len(prefix) > 0:
        build_prefix = '/'.join([BUILD_ROOT, prefix])
    else:
        build_prefix = BUILD_ROOT

    for arg in flat_args:
        if arg.startswith(build_prefix):
            # nothing to do
            pass
        elif len(prefix) > 0 and arg.startswith(BUILD_ROOT):
            unit.oncopy_file([arg, '{}{}'.format(build_prefix, arg[len(BUILD_ROOT):])])
        elif arg.startswith(SOURCE_ROOT):
            unit.oncopy_file([arg, '{}{}'.format(build_prefix, arg[len(SOURCE_ROOT):])])
        else:
            unit.oncopy_file([arg, '{}/{}/{}'.format(build_prefix, unit.get(['MODDIR']), arg)])
Example #27
0
def on_run_jbuild_program(unit, *args):
    args = list(args)
    """
    Custom code generation
    @link: https://wiki.yandex-team.ru/yatool/java/#kodogeneracijarunjavaprogram
    """

    flat, kv = common.sort_by_keywords(
        {
            'IN': -1,
            'IN_DIR': -1,
            'OUT': -1,
            'OUT_DIR': -1,
            'CWD': 1,
            'CLASSPATH': -1,
            'CP_USE_COMMAND_FILE': 1,
            'ADD_SRCS_TO_CLASSPATH': 0
        }, args)
    depends = kv.get('CLASSPATH', []) + kv.get('JAR', [])
    fake_out = None
    if depends:
        # XXX: hack to force ymake to build dependencies
        fake_out = "fake.out.{}".format(hash(tuple(args)))
        unit.on_run_java(['TOOL'] + depends + ["OUT", fake_out])

    if not kv.get('CP_USE_COMMAND_FILE'):
        args += [
            'CP_USE_COMMAND_FILE',
            unit.get(['JAVA_PROGRAM_CP_USE_COMMAND_FILE']) or 'yes'
        ]

    if fake_out is not None:
        args += ['FAKE_OUT', fake_out]

    prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
    new_val = (
        prev + ' ' +
        base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
    unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
Example #28
0
def onregister_yql_python_udf(unit, *args):
    flat, kv = sort_by_keywords({'NAME': 1, 'RESOURCE_NAME': 1}, args)
    assert len(flat) == 0
    name = get_or_default(kv, 'NAME', 'CustomPython')
    resource_name = get_or_default(kv, 'RESOURCE_NAME', name)

    use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == 'yes'
    py3 = unit.get('PYTHON3') == 'yes'

    unit.onyql_abi_version(['2', '9', '0'])
    unit.onpeerdir(['yql/udfs/common/python/python_udf'])
    unit.onpeerdir(['yql/library/udf'])

    if use_arcadia_python:
        flavor = 'Arcadia'
        unit.onpeerdir(
            ['library/python/runtime', 'yql/udfs/common/python/main']
            if not py3 else [
                'library/python/runtime_py3',
            ])
    else:
        flavor = 'System'

    output_includes = [
        'yql/udfs/common/python/python_udf/python_udf.h',
        'yql/library/udf/udf_registrator.h',
    ]
    path = name + '.yql_python_udf.cpp'
    unit.onpython([
        'build/scripts/gen_yql_python_udf.py',
        flavor,
        name,
        resource_name,
        path,
        'OUT',
        path,
        'OUTPUT_INCLUDES',
    ] + output_includes)
Example #29
0
def ytest_base(unit, related_prj_dir, related_prj_name, args):
    keywords = {"DEPENDS": -1, "DATA": -1}
    flat_args, spec_args = _common.sort_by_keywords(keywords, args)
    unit.set(['TEST-NAME', os.path.basename(flat_args[0])])
    unit.set(['SCRIPT-REL-PATH', flat_args[1]])
    unit.set(['SOURCE-FOLDER-PATH', related_prj_dir])
    unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)])
    unit.set(['TESTED-BINARY-PATH', flat_args[0]])

    custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else ''
    unit.set(['CUSTOM-DEPENDENCIES', custom_deps])
    data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ')
    data = '\"' + ';'.join(data_lst) + '\"' if data_lst else ''
    unit.set(['TEST-DATA', data])
    ya_root = unit.get('YA_ROOT')
    unit.set(['TEST_RUN_SCRIPT', 'devtools/{}/test/node/run_test.py'.format(ya_root)])

    related_dirs_list = ['{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR']
    related_dirs_value = []
    for rel in related_dirs_list:
        related_dirs_value.extend(['--test-related-path', rel])
    unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)])
    unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))])
Example #30
0
def ytest_base(unit, related_prj_dir, related_prj_name, args):
    keywords = {"DEPENDS": -1, "DATA": -1}
    flat_args, spec_args = _common.sort_by_keywords(keywords, args)
    unit.set(['TEST-NAME', os.path.basename(flat_args[0])])
    unit.set(['SCRIPT-REL-PATH', flat_args[1]])
    unit.set(['SOURCE-FOLDER-PATH', related_prj_dir])
    unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)])
    unit.set(['TESTED-BINARY-PATH', flat_args[0]])

    custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else ''
    unit.set(['CUSTOM-DEPENDENCIES', custom_deps])
    data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ')
    data = '\"' + ';'.join(data_lst) + '\"' if data_lst else ''
    unit.set(['TEST-DATA', data])
    ya_root = unit.get('YA_ROOT')
    unit.set(['TEST_RUN_SCRIPT', 'devtools/{}/test/node/run_test.py'.format(ya_root)])

    related_dirs_list = ['${ARCADIA_ROOT}/devtools/svn_credentials', '{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR']
    related_dirs_value = []
    for rel in related_dirs_list:
        related_dirs_value.extend(['--test-related-path', rel])
    unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)])
    unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))])
Example #31
0
def onrun_java(unit, *args):
    flat, kv = common.sort_by_keywords(
        {'CLASSPATH': -1, 'IN': -1, 'OUT': -1, 'OUT_NOAUTO': -1, 'OUTPUT_INCLUDES': -1, 'DEBUG': 0, 'JAR': 1},
        args
    )
    if not (kv.get('CLASSPATH', []) + kv.get('JAR', [])):
        ymake.report_configure_error('Java program for RUN_JAVA is not specified')

    if not unit.get('IDE_MSVS_CALL'):
        for jar in (kv.get('CLASSPATH', []) + kv.get('JAR', [])):
            unit.oninternal_recurse(jar)

    classpath = ':'.join(classpath)

    # Generate java cmd
    cmd = []
    if kv.get('JAR'):
        cmd += [
            '-jar',
            ':'.join(['$SCARAB_SLIM'] + kv.get('JAR')),
        ]
    cmd += [
        '-classpath',
        ':'.join(['$SCARAB'] + kv.get('JAR', []) + kv.get('CLASSPATH', [])),
        '-Dfile.encoding=UTF-8',
    ]

    cmd += flat

    if 'DEBUG' not in kv:
        cmd += ['HIDE_OUTPUT']

    for k in 'IN', 'OUT', 'OUT_NOAUTO', 'OUTPUT_INCLUDES':
        if kv.get(k):
            cmd += [k] + kv[k]

    unit.on_run_java(cmd)
Example #32
0
def onyabs_generate_conf(unit, *args):
    flat, kv = sort_by_keywords(
        {
            'MODE': 1,
            'SCRIPT': 1,
            'SRC': 1,
            'TOOL': 1,
            'CONF_DIR': 1,
            'DEST': 1
        }, args)
    src = get_or_default(kv, 'SRC', 'yabs/server/phantom')
    mode = get_or_default(kv, 'MODE', 'production')

    script = src + "/" + get_or_default(kv, 'SCRIPT', 'mkconf.py')
    conf = src + "/" + get_or_default(kv, 'CONF_DIR', 'conf-tmpl')
    tool = src + "/" + get_or_default(kv, 'TOOL', 'yabs_conf')

    for name in flat:
        filename = "/".join([conf, name])
        unit.onbuiltin_python([
            script, "--cluster-conf-binary", tool, "--mode", mode,
            "--dest-dir", "${BINDIR}", filename, "IN", filename, "OUT",
            "${BINDIR}/%s" % name, "TOOL", tool
        ])
Example #33
0
def onacceleo(unit, *args):
    if unit.get("YMAKE_JAVA_MODULES") == "yes":
        return
    flat, kv = common.sort_by_keywords(
        {
            'XSD': -1,
            'MTL': -1,
            'MTL_ROOT': 1,
            'LANG': -1,
            'OUT': -1,
            'OUT_NOAUTO': -1,
            'OUTPUT_INCLUDES': -1,
            'DEBUG': 0
        }, args)

    try:
        mtlroot = kv['MTL_ROOT'][0]
    except Exception:
        mtlroot = unit.path().replace('$S/', '')

    classpath = [
        '$SCARAB',
    ]  # XXX special word for ya make to replace following paths with real classpath
    classpath.append('tools/acceleo')

    depends = []
    if not unit.get('IDE_MSVS_CALL'):
        for jar in classpath[1:]:
            depends.append(jar)

    classpath = ':'.join(classpath)

    # Generate java cmd
    cmd = [
        '-classpath',
        classpath,
        '-Dfile.encoding=UTF-8',
        'ru.yandex.se.logsng.tool.Cli',
    ]

    for xsd in kv.get('XSD', []):
        cmd += ['--xsd', xsd]

    for mtl in kv.get('MTL', []):
        cmd += ['--mtl', mtl]

    for lang in kv.get('LANG', []):
        cmd += ['--lang', lang]

    cmd += [
        '--output-dir',
        unit.path().replace('$S/', '${ARCADIA_BUILD_ROOT}/')
    ]
    cmd += ['--build-root', '${ARCADIA_BUILD_ROOT}']
    cmd += ['--source-root', '${ARCADIA_ROOT}']
    cmd += ['--mtl-root', mtlroot]

    # Generate RUN_JAVA args
    run_java = cmd

    if 'DEBUG' not in kv:
        run_java += ['HIDE_OUTPUT']

    inputs = kv.get('XSD', []) + kv.get('MTL', []) + kv.get('LANG', [])
    if inputs:
        run_java += ['IN'] + inputs

    for k in 'OUT', 'OUT_NOAUTO', 'OUTPUT_INCLUDES':
        if kv.get(k):
            run_java += [k] + kv[k]

    if depends:
        run_java += ['TOOL'] + depends

    unit.on_run_java(run_java)
Example #34
0
def onadd_ytest(unit, *args):
    keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
                "FORK_SUBTESTS": 0, "FORK_TESTS": 0}
    flat_args, spec_args = _common.sort_by_keywords(keywords, args)

    test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))

    if flat_args[1] == "fuzz.test":
        unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
    elif flat_args[1] == "go.test":
        data, _ = get_canonical_test_resources(unit)
        test_data += data
    elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
        # XXX
        # Current ymake implementation doesn't allow to call macro inside the 'when' body
        # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
        return
    elif flat_args[1] == "no.test":
        return

    fork_mode = []
    if 'FORK_SUBTESTS' in spec_args:
        fork_mode.append('subtests')
    if 'FORK_TESTS' in spec_args:
        fork_mode.append('tests')
    fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
    fork_mode = ' '.join(fork_mode) if fork_mode else ''

    unit_path = get_norm_unit_path(unit)

    test_record = {
        'TEST-NAME': flat_args[0],
        'SCRIPT-REL-PATH': flat_args[1],
        'TESTED-PROJECT-NAME': unit.name(),
        'TESTED-PROJECT-FILENAME': unit.filename(),
        'SOURCE-FOLDER-PATH': unit_path,
        # TODO get rid of BUILD-FOLDER-PATH
        'BUILD-FOLDER-PATH': unit_path,
        'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
        'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
        'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
        'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
        #  'TEST-PRESERVE-ENV': 'da',
        'TEST-DATA': serialize_list(test_data),
        'TEST-TIMEOUT': ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or '',
        'FORK-MODE': fork_mode,
        'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '',
        'SIZE': ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or '',
        'TAG': serialize_list(spec_args.get('TAG', []) + get_values_list(unit, 'TEST_TAGS_VALUE')),
        'REQUIREMENTS': serialize_list(spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')),
        'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
        'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
        'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
        'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
        'BLOB': unit.get('TEST_BLOB_DATA') or '',
        'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
        'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '',
        'TEST_IOS_RUNTIME_TYPE': unit.get('TEST_IOS_RUNTIME_TYPE_VALUE') or '',
        'ANDROID_APK_TEST_ACTIVITY': unit.get('ANDROID_APK_TEST_ACTIVITY_VALUE') or '',
        'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL',
    }

    if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
        test_record['FUZZING'] = '1'
        # use all cores if fuzzing requested
        test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]))

    data = dump_test(unit, test_record)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
Example #35
0
def onadd_ytest(unit, *args):
    keywords = {
        "DEPENDS": -1,
        "DATA": -1,
        "TIMEOUT": 1,
        "FORK_MODE": 1,
        "SPLIT_FACTOR": 1,
        "FORK_SUBTESTS": 0,
        "FORK_TESTS": 0
    }
    flat_args, spec_args = _common.sort_by_keywords(keywords, args)

    if flat_args[1] == "fuzz.test":
        unit.ondata("arcadia/fuzzing/{}/corpus.json".format(
            strip_roots(unit.path())))

    fork_mode = []
    if 'FORK_SUBTESTS' in spec_args:
        fork_mode.append('subtests')
    if 'FORK_TESTS' in spec_args:
        fork_mode.append('tests')
    fork_mode = fork_mode or spec_args.get(
        'FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
    fork_mode = ' '.join(fork_mode) if fork_mode else ''

    test_record = {
        'TEST-NAME':
        flat_args[0],
        'SCRIPT-REL-PATH':
        flat_args[1],
        'TESTED-PROJECT-NAME':
        unit.name(),
        'TESTED-PROJECT-FILENAME':
        unit.filename(),
        'SOURCE-FOLDER-PATH':
        unit.resolve(unit.path()),
        'BUILD-FOLDER-PATH':
        strip_roots(unit.path()),
        'BINARY-PATH':
        strip_roots(os.path.join(unit.path(), unit.filename())),
        'CUSTOM-DEPENDENCIES':
        ' '.join(
            spec_args.get('DEPENDS', []) +
            get_values_list(unit, 'TEST_DEPENDS_VALUE')),
        'TEST-DATA':
        serialize_list(
            spec_args.get('DATA', []) +
            (unit.get(['__test_data']) or '').split(' ') +
            get_values_list(unit, 'TEST_DATA_VALUE')),
        'TEST-TIMEOUT':
        ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT')
        or '',
        'FORK-MODE':
        fork_mode,
        'SPLIT-FACTOR':
        ''.join(spec_args.get('SPLIT_FACTOR', []))
        or unit.get('TEST_SPLIT_FACTOR') or '',
        'SIZE':
        ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or '',
        'TAG':
        serialize_list(
            spec_args.get('TAG', []) +
            (unit.get(['__test_tags']) or '').split(' ')),
        'REQUIREMENTS':
        serialize_list(
            spec_args.get('REQUIREMENTS', []) +
            (unit.get(['__test_requirements']) or '').split(' ')),
        'TEST-CWD':
        unit.get('TEST_CWD_VALUE') or '',
        'FUZZ-DICTS':
        serialize_list(
            spec_args.get('FUZZ_DICTS', []) +
            get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
        'FUZZ-OPTS':
        serialize_list(
            spec_args.get('FUZZ_OPTS', []) +
            get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
    }

    # use all cores if fuzzing requested
    if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
        test_record['REQUIREMENTS'] = serialize_list(
            filter(None,
                   deserialize_list(test_record['REQUIREMENTS']) +
                   ["cpu:all"]))

    data = dump_test(test_record)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
Example #36
0
def onadd_check(unit, *args):
    if unit.get("TIDY") == "yes":
        # graph changed for clang_tidy tests
        return
    flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
                                                     "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
    check_type = flat_args[0]
    test_dir = get_norm_unit_path(unit)

    test_timeout = ''
    fork_mode = ''
    extra_test_data = ''
    extra_test_dart_data = {}
    ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'

    if check_type in ["flake8.py2", "flake8.py3"]:
        script_rel_path = check_type
        fork_mode = unit.get('TEST_FORK_MODE') or ''
    elif check_type == "black":
        script_rel_path = check_type
        fork_mode = unit.get('TEST_FORK_MODE') or ''
    elif check_type == "JAVA_STYLE":
        if ymake_java_test and not unit.get('ALL_SRCDIRS') or '':
            return
        if len(flat_args) < 2:
            raise Exception("Not enough arguments for JAVA_STYLE check")
        check_level = flat_args[1]
        allowed_levels = {
            'base': '/yandex_checks.xml',
            'strict': '/yandex_checks_strict.xml',
            'extended': '/yandex_checks_extended.xml',
            'library': '/yandex_checks_library.xml',
        }
        if check_level not in allowed_levels:
            raise Exception('{} is not allowed in LINT(), use one of {}'.format(check_level, allowed_levels.keys()))
        flat_args[1] = allowed_levels[check_level]
        if check_level == 'none':
            return
        script_rel_path = "java.style"
        test_timeout = '120'
        fork_mode = unit.get('TEST_FORK_MODE') or ''
        if ymake_java_test:
            extra_test_data = java_srcdirs_to_data(unit, 'ALL_SRCDIRS')
        extra_test_dart_data['JDK_RESOURCE'] = 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT')
    elif check_type == "gofmt":
        script_rel_path = check_type
        go_files = flat_args[1:]
        if go_files:
            test_dir = os.path.dirname(go_files[0]).lstrip("$S/")
    else:
        script_rel_path = check_type

    use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
    uid_ext = ''
    if check_type in ("check.data", "check.resource"):
        if unit.get("VALIDATE_DATA") == "no":
            return
    if check_type == "check.data":
        uid_ext = unit.get("SBR_UID_EXT").split(" ", 1)[-1]  # strip variable name
        data_re = re.compile(r"sbr:/?/?(\d+)=?.*")
        data = flat_args[1:]
        resources = []
        for f in data:
            matched = re.match(data_re, f)
            if matched:
                resources.append(matched.group(1))
        if resources:
            test_files = serialize_list(resources)
        else:
            return
    else:
        test_files = serialize_list(flat_args[1:])

    test_record = {
        'TEST-NAME': check_type.lower(),
        'TEST-TIMEOUT': test_timeout,
        'SCRIPT-REL-PATH': script_rel_path,
        'TESTED-PROJECT-NAME': os.path.basename(test_dir),
        'SOURCE-FOLDER-PATH': test_dir,
        'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
        'TEST-DATA': extra_test_data,
        "SBR-UID-EXT": uid_ext,
        'SPLIT-FACTOR': '',
        'TEST_PARTITION': 'SEQUENTIAL',
        'FORK-MODE': fork_mode,
        'FORK-TEST-FILES': '',
        'SIZE': 'SMALL',
        'TAG': '',
        'REQUIREMENTS': '',
        'USE_ARCADIA_PYTHON': use_arcadia_python or '',
        'OLD_PYTEST': 'no',
        'PYTHON-PATHS': '',
        # TODO remove FILES, see DEVTOOLS-7052
        'FILES': test_files,
        'TEST-FILES': test_files,
        'NO_JBUILD': 'yes' if ymake_java_test else 'no',
    }
    test_record.update(extra_test_dart_data)

    data = dump_test(unit, test_record)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
Example #37
0
def onyabs_server_build_yson_index(unit, *args):
    (qxl_dir, json_basename), kv = sort_by_keywords(
        {
            'STATS': -1,
            'TIMESTAMP': 1,
            'STAGE': -1
        }, args)

    tags, lm_tags, dssm_tags = get_tags(kv.get('STATS'))

    qxl_prefix = '${ARCADIA_BUILD_ROOT}/' + qxl_dir + '/'
    pickle_name = qxl_prefix + json_basename + '.pickle'

    prefix = unit.path().replace('$S', '${ARCADIA_BUILD_ROOT}') + '/'

    stage = int(kv.get('STAGE', [DEFAULT_STAGE])[0])
    timestamp = int(kv.get('TIMESTAMP', [DEFAULT_TIMESTAMP])[0])

    deps = [
        qxl_dir,
        os.path.dirname(MKDB_INFO_JSON),
    ]

    unit.onpeerdir(deps)
    unit.ondepends(deps)

    for tag in tags:
        yson_name = prefix + tag + '.yson'
        meta_yson_name = prefix + tag + '.meta.yson'
        db_name = prefix + tag + '.yabs.' + CODEC

        unit.onrun_program([
            MAKE_YSON_FROM_SQLITE,
            '--db',
            pickle_name,
            '--yson',
            yson_name,
            '--meta_yson',
            meta_yson_name,
            '--tag',
            tag,
            '--stage',
            str(stage),
            '--nowtime',
            str(timestamp),
            '--mkdb_info',
            '${ARCADIA_BUILD_ROOT}/' + MKDB_INFO_JSON,
            'IN',
            pickle_name,
            'IN',
            '${ARCADIA_BUILD_ROOT}/' + MKDB_INFO_JSON,
            'OUT',
            yson_name,
            'OUT',
            meta_yson_name,
        ])

        # FIXME: path substitution for TOOLs is bugged,
        # you cannot pass two different TOOLs as arguments
        unit.onbuiltin_python([
            BUILD_YSON_TEST_BASE,
            '--mkdb',
            YT_MKDB,
            '--yson',
            yson_name,
            '--output',
            db_name,
            '--tag',
            tag,
            '--timestamp',
            str(timestamp),
            '--compress',
            CODEC,
            'IN',
            yson_name,
            'OUT',
            db_name,
            'TOOL',
            YT_MKDB,
            'TOOL',
            UC,
        ])

    for tag in lm_tags:
        db_name = prefix + tag + '.yabs.' + CODEC

        unit.onbuiltin_python([
            BUILD_LM_TEST_BASE,
            '--mkdblm',
            MKDBLM,
            '--output',
            db_name,
            '--tag',
            tag,
            '--timestamp',
            str(timestamp),
            '--compress',
            CODEC,
            'OUT',
            db_name,
            'TOOL',
            MKDBLM,
            'TOOL',
            UC,
        ])

    for tag in dssm_tags:
        db_name = prefix + tag + '.yabs.' + CODEC

        unit.onbuiltin_python([
            BUILD_DSSM_TEST_BASE,
            '--mkdbdssm',
            MKDBDSSM,
            '--output',
            db_name,
            '--tag',
            tag,
            '--compress',
            CODEC,
            'OUT',
            db_name,
            'TOOL',
            MKDBDSSM,
            'TOOL',
            UC,
        ])
Example #38
0
def onadd_check(unit, *args):
    flat_args, spec_args = _common.sort_by_keywords(
        {
            "DEPENDS": -1,
            "TIMEOUT": 1,
            "DATA": -1,
            "TAG": -1,
            "REQUIREMENTS": -1,
            "FORK_MODE": 1,
            "SPLIT_FACTOR": 1,
            "FORK_SUBTESTS": 0,
            "FORK_TESTS": 0,
            "SIZE": 1
        }, args)
    check_type = flat_args[0]
    test_dir = get_norm_unit_path(unit)

    test_timeout = ''
    fork_mode = ''

    if check_type in ["flake8.py2", "flake8.py3"]:
        script_rel_path = check_type
        fork_mode = unit.get('TEST_FORK_MODE') or ''
    elif check_type == "JAVA_STYLE":
        if len(flat_args) < 2:
            raise Exception("Not enough arguments for JAVA_STYLE check")
        check_level = flat_args[1]
        allowed_levels = {
            'base': '/yandex_checks.xml',
            'strict': '/yandex_checks_strict.xml',
            'extended': '/yandex_checks_extended.xml',
            'library': '/yandex_checks_library.xml',
        }
        if check_level not in allowed_levels:
            raise Exception(
                '{} is not allowed in LINT(), use one of {}'.format(
                    check_level, allowed_levels.keys()))
        flat_args[1] = allowed_levels[check_level]
        script_rel_path = "java.style"
        test_timeout = '120'
        fork_mode = unit.get('TEST_FORK_MODE') or ''
    elif check_type == "gofmt":
        script_rel_path = check_type
        go_files = flat_args[1:]
        if go_files:
            test_dir = os.path.dirname(go_files[0]).lstrip("$S/")
    else:
        script_rel_path = check_type

    use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
    test_files = serialize_list(flat_args[1:])
    test_record = {
        'TEST-NAME': check_type.lower(),
        'TEST-TIMEOUT': test_timeout,
        'SCRIPT-REL-PATH': script_rel_path,
        'TESTED-PROJECT-NAME': os.path.basename(test_dir),
        'SOURCE-FOLDER-PATH': test_dir,
        'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
        'TEST-DATA': '',
        'SPLIT-FACTOR': '',
        'TEST_PARTITION': 'SEQUENTIAL',
        'FORK-MODE': fork_mode,
        'FORK-TEST-FILES': '',
        'SIZE': 'SMALL',
        'TAG': '',
        'REQUIREMENTS': '',
        'USE_ARCADIA_PYTHON': use_arcadia_python or '',
        'OLD_PYTEST': 'no',
        'PYTHON-PATHS': '',
        # TODO remove FILES, see DEVTOOLS-7052
        'FILES': test_files,
        'TEST-FILES': test_files,
    }

    data = dump_test(unit, test_record)
    if data:
        unit.set_property(["DART_DATA", data])
        save_in_file(unit.get('TEST_DART_OUT_FILE'), data)