def onresource(unit, *args): unit.onpeerdir(['library/resource']) outs = [] for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] compressed = [] compressed_input = [] compressed_output = [] for p, n in iterpair(part_args): if unit.enabled('ARCH_AARCH64') or unit.enabled( 'ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] if p != '-': raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) output = lid + '.rodata' if p == '-': n, p = n.split('=', 1) compressed += ['-', p, output] else: compressed += [p, output] compressed_input.append(p) compressed_output.append(output) srcs_gen.append('{}={}'.format(lid, n)) if compressed: lid = listid(part_args) fake_yasm = '_' + lid + '.yasm' cmd = ['tools/rescompressor', fake_yasm ] + gen_ro_flags(unit) + compressed if compressed_input: cmd += ['IN'] + compressed_input cmd += ['OUT_NOAUTO', fake_yasm] + compressed_output unit.onrun_program(cmd) unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + fake_yasm)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) outs.append(output) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if outs: if len(outs) > 1: unit.onjoin_srcs_global(['join_' + listid(outs) + '.cpp'] + outs) else: unit.onsrcs(['GLOBAL'] + outs)
def onro_resource(unit, *args): unit.onpeerdir(['library/resource']) for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] ro_gen = [] for p, n in iterpair(part_args): if p == '-': raw_gen += [p, n] continue if unit.enabled('ARCH_AARCH64') or unit.enabled( 'ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) srcs_gen.append('{}={}'.format(n, lid)) output = lid + '.roresource' unit.onrun_program([ 'tools/rescompressor', p, output, 'IN', p, 'OUT_NOAUTO', output ]) ro_gen.append(output) if ro_gen: output = listid(part_args) + '.asm' unit.onbuiltin_python([ 'build/scripts/gen_rodata.py', '--out-file', output, '--yasm', '${tool:"contrib/tools/yasm"}' ] + gen_ro_flags(unit) + ro_gen + ['IN'] + ro_gen + ['OUTPUT_INCLUDES'] + ro_gen + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + output)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output])
def onfat_resource(unit, *args): unit.onpeerdir(['library/cpp/resource']) # Since the maximum length of lpCommandLine string for CreateProcess is 8kb (windows) characters, # we make several calls of rescompiler # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx for part_args in split(args, 8000): output = listid(part_args) + '.cpp' inputs = [x for x, y in iterpair(part_args) if x != '-'] if inputs: inputs = ['IN'] + inputs unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output])
def onfat_resource(unit, *args): unit.onpeerdir(['library/resource']) # Since the maximum length of lpCommandLine string for CreateProcess is 8kb (windows) characters, # we make several calls of rescompiler # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx for part_args in split(args, 8000): output = listid(part_args) + '.cpp' inputs = [x for x, y in iterpair(part_args) if x != '-'] if inputs: inputs = ['IN'] + inputs unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output])
def onro_resource(unit, *args): unit.onpeerdir(['library/resource']) for part_args in split(args, 8000): srcs_gen = [] for p, n in iterpair(part_args): if p == '-': continue lid = '_' + pathid(p) compressed_out = lid + '.rodata' unit.onrun_program([ 'tools/rescompressor', p, compressed_out, 'IN', p, 'OUT', compressed_out ]) srcs_gen.append('{}={}'.format(n, lid)) output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output])
def onresource(unit, *args): def split(lst, limit): # paths are specified with replaceable prefix # real length is unknown at the moment, that why we use root_lenght # as a rough estimation root_lenght = 200 filepath = None lenght = 0 bucket = [] for item in lst: if filepath: lenght += root_lenght + len(filepath) + len(item) if lenght > limit and bucket: yield bucket bucket = [] lenght = 0 bucket.append(filepath) bucket.append(item) filepath = None else: filepath = item if bucket: yield bucket unit.onpeerdir(['library/resource']) # Since the maximum length of lpCommandLine string for CreateProcess is 8kb (windows) characters, # we make several calls of rescompiler # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx for part_args in split(args, 8000): output = listid(part_args) + '.cpp' inputs = [x for x, y in iterpair(part_args) if x != '-'] if inputs: inputs = ['IN'] + inputs unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output])
def onpy_srcs(unit, *args): """ PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive. The main disadvantage is the lack of IDE support; There is also no readline yet. The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files. At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path. By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C. Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh. Example of library declaration with PY_SRCS(): PY_LIBRARY(mymodule) PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg) END() Documentation: https://wiki.yandex-team.ru/devtools/commandsandvars/py_srcs/ """ # Each file arg must either be a path, or "${...}/buildpath=modname", where # "${...}/buildpath" part will be used as a file source in a future macro, # and "modname" will be used as a module name. py3 = is_py3(unit) if py3: if '/contrib/tools/python3/src/Lib' not in unit.path(): unit.onpeerdir(['contrib/libs/python']) if '/library/python/runtime_py3' not in unit.path(): unit.onpeerdir(['library/python/runtime_py3']) else: if '/contrib/tools/python/src/Lib' not in unit.path(): unit.onpeerdir(['contrib/libs/python']) if '/library/python/runtime' not in unit.path(): unit.onpeerdir(['library/python/runtime']) is_program = unit.get('MODULE_TYPE') == 'PROGRAM' if is_program: py_program(unit, py3) py_namespace_value = unit.get('PY_NAMESPACE_VALUE') if py_namespace_value == ".": ns = "" else: ns = (unit.get('PY_NAMESPACE_VALUE') or unit.path()[3:].replace('/', '.')) + '.' cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes' cython_includes = [] for path in unit.includes(): cython_includes += ['-I', resolve_to_ymake_path(path)] cython_directives = [] if cython_coverage: cython_directives += ['-X', 'linetrace=True'] pyxs_c = [] pyxs_cpp = [] pyxs = pyxs_cpp pys = [] protos = [] evs = [] swigs = [] dump_dir = unit.get('PYTHON_BUILD_DUMP_DIR') dump_output = None if dump_dir: import thread pid = os.getpid() tid = thread.get_ident() dump_name = '{}-{}.dump'.format(pid, tid) dump_output = open(os.path.join(dump_dir, dump_name), 'a') args = iter(args) for arg in args: # Namespace directives. if arg == 'TOP_LEVEL': ns = '' elif arg == 'NAMESPACE': ns = next(args) + '.' # Cython directives. elif arg == 'CYTHON_C': pyxs = pyxs_c elif arg == 'CYTHON_CPP': pyxs = pyxs_cpp elif arg == 'CYTHON_DIRECTIVE': cython_directives += ['-X', next(args)] # Unsupported but legal PROTO_LIBRARY arguments. elif arg == 'GLOBAL' or arg.endswith('.gztproto'): pass # Sources. else: main_mod = arg == 'MAIN' if main_mod: arg = next(args) if '=' in arg: main_py = False path, mod = arg.split('=', 1) else: path = arg main_py = (path == '__main__.py' or path.endswith('/__main__.py')) if not py3 and main_py: mod = '__main__' else: if arg.startswith('../'): ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg)) if arg.startswith('/'): ymake.report_configure_error('PY_SRCS item starts with "/": {!r}'.format(arg)) continue mod = ns + stripext(arg).replace('/', '.') if py3 and mod == '__main__': ymake.report_configure_error('TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM') if main_mod: py_main(unit, mod + ":main") elif py3 and main_py: py_main(unit, mod) pathmod = (path, mod) if dump_output is not None: dump_output.write('{path}\t{module}\n'.format(path=rootrel_arc_src(path, unit), module=mod)) if path.endswith('.py'): pys.append(pathmod) elif path.endswith('.pyx'): pyxs.append(pathmod) elif path.endswith('.proto'): protos.append(pathmod) elif path.endswith('.ev'): evs.append(pathmod) elif path.endswith('.swg'): if py3: ymake.report_configure_error('SWIG is not yet supported for Python 3: https://st.yandex-team.ru/DEVTOOLS-4863') else: swigs.append(path) # ignore mod, use last (and only) ns else: ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path)) if dump_output is not None: dump_output.close() if pyxs: files2res = set() if cython_coverage: def process_pyx(filename, path, out_suffix): # skip generated files if not is_arc_src(path, unit): return # source file files2res.add((filename, path)) # generated files2res.add((filename + out_suffix, path + out_suffix)) # used includes for entry in parse_pyx_includes(filename, path, unit.resolve('$S')): files2res.add(entry) else: def process_pyx(filename, path, out_suffix): pass for pyxs, cython, out_suffix in [ (pyxs_c, unit.onbuildwith_cython_c, ".c"), (pyxs_cpp, unit.onbuildwith_cython_cpp, ".cpp"), ]: for path, mod in pyxs: filename = rootrel_arc_src(path, unit) cython([ path, '--module-name', mod, '--init-suffix', mangle(mod), '--source-root', '${ARCADIA_ROOT}', # set arcadia root relative __file__ for generated modules '-X', 'set_initial_path={}'.format(filename), ] + cython_includes + cython_directives) py_register(unit, mod, py3) process_pyx(filename, path, out_suffix) if files2res: # Compile original and generated sources into target for proper cython coverage calculation unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)]) if pys: pys_seen = set() pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))} if pys_dups: ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups)) res = [] if py3: for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) unit.onpy3_compile_bytecode([root_rel_path + '-', path]) dest = 'py/' + mod.replace('.', '/') + '.py' res += [ 'DEST', dest, path, 'DEST', dest + '.yapyc3', path + '.yapyc3' ] unit.onresource_files(res) #add_python_lint_checks(unit, [path for path, mod in pys]) else: for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) src = unit.resolve_arc_path(path) or path dst = tobuilddir(src) + '.yapyc' unit.onpy_compile_bytecode([root_rel_path + '-', src]) key = '/py_modules/' + mod res += [ path, key, '-', 'resfs/src/{}={}'.format(key, root_rel_path), dst, '/py_code/' + mod, ] unit.onresource(res) add_python_lint_checks(unit, [path for path, mod in pys]) if protos: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.onpeerdir(unit.get("PY_PROTO_DEPS").split()) proto_paths = [path for path, mod in protos] unit.ongenerate_py_protos_internal(proto_paths) unit.onpy_srcs([ pb2_arg(py_suf, path, mod, unit) for path, mod in protos for py_suf in unit.get("PY_PROTO_SUFFIXES").split() ]) if optimize_proto: unit.onsrcs(proto_paths) pb_cc_outs = [ pb_cc_arg(cc_suf, path, unit) for path in proto_paths for cc_suf in unit.get("CPP_PROTO_SUFFIXES").split() ] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if is_program: unit.onjoin_srcs(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) if evs: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.ongenerate_py_evs_internal([path for path, mod in evs]) unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) if optimize_proto: unit.onsrcs([path for path, mod in evs]) pb_cc_outs = [ev_cc_arg(path, unit) for path, _ in evs] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if is_program: unit.onjoin_srcs(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) if swigs: unit.onsrcs(swigs) prefix = unit.get('MODULE_PREFIX') project = unit.get('REALPRJNAME') py_register(unit, prefix + project, py3) path = '${ARCADIA_BUILD_ROOT}/' + '{}/{}.py'.format(unit.path()[3:], project) arg = '{}={}'.format(path, ns + project.replace('/', '.')) unit.onpy_srcs([arg])
def onpy_srcs(unit, *args): """ @usage PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} Files...) PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive. The main disadvantage is the lack of IDE support; There is also no readline yet. The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files. At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path. By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C. Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh. Example of library declaration with PY_SRCS(): PY_LIBRARY(mymodule) PY_SRCS(a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg) END() PY_REGISTER honors Python2 and Python3 differences and adjusts itself to Python version of a current module Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipylibrarypy3libraryimakrospysrcs """ # Each file arg must either be a path, or "${...}/buildpath=modname", where # "${...}/buildpath" part will be used as a file source in a future macro, # and "modname" will be used as a module name. upath = unit.path()[3:] py3 = is_py3(unit) with_py = not unit.get('PYBUILD_NO_PY') with_pyc = not unit.get('PYBUILD_NO_PYC') in_proto_library = unit.get('PY_PROTO') or unit.get('PY3_PROTO') need_gazetteer_peerdir = False if not upath.startswith('contrib/tools/python') and not upath.startswith('library/python/runtime') and unit.get('NO_PYTHON_INCLS') != 'yes': unit.onpeerdir(['contrib/libs/python']) unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') if unit_needs_main: py_program(unit, py3) py_namespace_value = unit.get('PY_NAMESPACE_VALUE') if py_namespace_value == ".": ns = "" else: ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.' cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' cythonize_py = False optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes' cython_directives = [] if cython_coverage: cython_directives += ['-X', 'linetrace=True'] pyxs_c = [] pyxs_c_h = [] pyxs_c_api_h = [] pyxs_cpp = [] pyxs = pyxs_cpp swigs_c = [] swigs_cpp = [] swigs = swigs_cpp pys = [] protos = [] evs = [] dump_dir = unit.get('PYTHON_BUILD_DUMP_DIR') dump_output = None if dump_dir: import thread pid = os.getpid() tid = thread.get_ident() dump_name = '{}-{}.dump'.format(pid, tid) dump_output = open(os.path.join(dump_dir, dump_name), 'a') args = iter(args) for arg in args: # Namespace directives. if arg == 'TOP_LEVEL': ns = '' elif arg == 'NAMESPACE': ns = next(args) + '.' # Cython directives. elif arg == 'CYTHON_C': pyxs = pyxs_c elif arg == 'CYTHON_C_H': pyxs = pyxs_c_h elif arg == 'CYTHON_C_API_H': pyxs = pyxs_c_api_h elif arg == 'CYTHON_CPP': pyxs = pyxs_cpp elif arg == 'CYTHON_DIRECTIVE': cython_directives += ['-X', next(args)] elif arg == 'CYTHONIZE_PY': cythonize_py = True # SWIG. elif arg == 'SWIG_C': swigs = swigs_c elif arg == 'SWIG_CPP': swigs = swigs_cpp # Unsupported but legal PROTO_LIBRARY arguments. elif arg == 'GLOBAL' or not in_proto_library and arg.endswith('.gztproto'): pass # Sources. else: main_mod = arg == 'MAIN' if main_mod: arg = next(args) if '=' in arg: main_py = False path, mod = arg.split('=', 1) else: if arg.endswith('.gztproto'): need_gazetteer_peerdir = True path = '{}.proto'.format(arg[:-9]) else: path = arg main_py = (path == '__main__.py' or path.endswith('/__main__.py')) if not py3 and unit_needs_main and main_py: mod = '__main__' else: if arg.startswith('../'): ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg)) if arg.startswith('/'): ymake.report_configure_error('PY_SRCS item starts with "/": {!r}'.format(arg)) continue mod = ns + stripext(arg).replace('/', '.') if py3 and mod == '__main__': ymake.report_configure_error('TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM') if main_mod: py_main(unit, mod + ":main") elif py3 and unit_needs_main and main_py: py_main(unit, mod) pathmod = (path, mod) if dump_output is not None: dump_output.write('{path}\t{module}\n'.format(path=rootrel_arc_src(path, unit), module=mod)) if path.endswith('.py'): if cythonize_py: pyxs.append(pathmod) else: pys.append(pathmod) elif path.endswith('.pyx'): pyxs.append(pathmod) elif path.endswith('.proto'): protos.append(pathmod) elif path.endswith('.ev'): evs.append(pathmod) elif path.endswith('.swg'): swigs.append(pathmod) # Allow pyi files in PY_SRCS for autocomplete in IDE, but skip it during building elif path.endswith('.pyi'): pass else: ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path)) if dump_output is not None: dump_output.close() if pyxs: files2res = set() # Include map stores files which were included in the processing pyx file, # to be able to find source code of the included file inside generated file # for currently processing pyx file. include_map = collections.defaultdict(set) if cython_coverage: def process_pyx(filename, path, out_suffix, noext): # skip generated files if not is_arc_src(path, unit): return # source file files2res.add((filename, path)) # generated if noext: files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix)) else: files2res.add((filename + out_suffix, path + out_suffix)) # used includes for entry in parse_pyx_includes(filename, path, unit.resolve('$S')): files2res.add(entry) include_arc_rel = entry[0] include_map[filename].add(include_arc_rel) else: def process_pyx(filename, path, out_suffix, noext): pass for pyxs, cython, out_suffix, noext in [ (pyxs_c, unit.on_buildwith_cython_c_dep, ".c", False), (pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", True), (pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", True), (pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", False), ]: for path, mod in pyxs: filename = rootrel_arc_src(path, unit) cython_args = [path] dep = path if path.endswith('.py'): pxd = '/'.join(mod.split('.')) + '.pxd' if unit.resolve_arc_path(pxd): dep = pxd cython_args.append(dep) cython_args += [ '--module-name', mod, '--init-suffix', mangle(mod), '--source-root', '${ARCADIA_ROOT}', # set arcadia root relative __file__ for generated modules '-X', 'set_initial_path={}'.format(filename), ] + cython_directives cython(cython_args) py_register(unit, mod, py3) process_pyx(filename, path, out_suffix, noext) if files2res: # Compile original and generated sources into target for proper cython coverage calculation unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)]) if include_map: data = [] prefix = 'resfs/cython/include' for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) for filename, files in include_map.iteritems()): data += ['-', line] unit.onresource(data) for swigs, on_swig_python in [ (swigs_c, unit.on_swig_python_c), (swigs_cpp, unit.on_swig_python_cpp), ]: for path, mod in swigs: # Make output prefix basename match swig module name. prefix = path[:path.rfind('/') + 1] + mod.rsplit('.', 1)[-1] swg_py = '{}/{}/{}.py'.format('${ARCADIA_BUILD_ROOT}', upath, prefix) on_swig_python([path, prefix]) onpy_register(unit, mod + '_swg') onpy_srcs(unit, swg_py + '=' + mod) if pys: pys_seen = set() pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))} if pys_dups: ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups)) res = [] if py3: for path, mod in pys: dest = 'py/' + mod.replace('.', '/') + '.py' if with_py: res += ['DEST', dest, path] if with_pyc: root_rel_path = rootrel_arc_src(path, unit) dst = path + uniq_suffix(path, unit) unit.on_py3_compile_bytecode([root_rel_path + '-', path, dst]) res += ['DEST', dest + '.yapyc3', dst + '.yapyc3'] unit.onresource_files(res) add_python_lint_checks(unit, 3, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split()) else: for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) if with_py: key = '/py_modules/' + mod res += [ path, key, '-', 'resfs/src/{}={}'.format(key, root_rel_path), ] if with_pyc: src = unit.resolve_arc_path(path) or path dst = path + uniq_suffix(path, unit) unit.on_py_compile_bytecode([root_rel_path + '-', src, dst]) res += [dst + '.yapyc', '/py_code/' + mod] unit.onresource(res) add_python_lint_checks(unit, 2, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split()) if protos: if not upath.startswith('contrib/libs/protobuf/python/google_lib'): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.onpeerdir(unit.get("PY_PROTO_DEPS").split()) proto_paths = [path for path, mod in protos] unit.on_generate_py_protos_internal(proto_paths) unit.onpy_srcs([ pb2_arg(py_suf, path, mod, unit) for path, mod in protos for py_suf in unit.get("PY_PROTO_SUFFIXES").split() ]) if optimize_proto: unit.onsrcs(proto_paths) if need_gazetteer_peerdir: unit.onpeerdir(['kernel/gazetteer/proto']) pb_cc_outs = [ pb_cc_arg(cc_suf, path, unit) for path in proto_paths for cc_suf in unit.get("CPP_PROTO_SUFFIXES").split() ] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if unit_needs_main: unit.onjoin_srcs(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) if evs: if not upath.startswith('contrib/libs/protobuf/python/google_lib'): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.on_generate_py_evs_internal([path for path, mod in evs]) unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) if optimize_proto: unit.onsrcs([path for path, mod in evs]) pb_cc_outs = [ev_cc_arg(path, unit) for path, _ in evs] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if unit_needs_main: unit.onjoin_srcs(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk)
def onresource(unit, *args): """ @usage: RESOURCE(file name file name...) Add data (resources, random files) to the program) This is a simpler but less flexible option than ARCHIVE(), because in the case of ARCHIVE(), you have to use the data explicitly, and in the case of RESOURCE(), the data falls through SRCS(GLOBAL). Therefore, there is no static data library from RESOURCE(), they are added only at the program linking stage. @example: https://wiki.yandex-team.ru/yatool/howtowriteyamakefiles/#a2ispolzujjtekomanduresource @example: LIBRARY() OWNER(user1) RESOURCE( path/to/file1 /key/in/program/1 path/to/file2 /key2 ) END() """ unit.onpeerdir(['library/resource']) outs = [] # https://st.yandex-team.ru/DEVTOOLS-4037 # compressed_outs = [] for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] compressed = [] compressed_input = [] compressed_output = [] for p, n in iterpair(part_args): if unit.enabled('ARCH_AARCH64') or unit.enabled( 'ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] if p != '-': raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) output = lid + '.rodata' if p == '-': n, p = n.split('=', 1) compressed += ['-', p, output] else: compressed += [p, output] compressed_input.append(p) compressed_output.append(output) srcs_gen.append('{}={}'.format(lid, n)) if compressed: lid = listid(part_args) fake_yasm = '_' + lid + '.yasm' cmd = ['tools/rescompressor', fake_yasm ] + gen_ro_flags(unit) + compressed if compressed_input: cmd += ['IN'] + compressed_input cmd += ['OUT_NOAUTO', fake_yasm] + compressed_output unit.onrun_program(cmd) # https://st.yandex-team.ru/DEVTOOLS-4037 # compressed_outs.append(fake_yasm) unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + fake_yasm)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) outs.append(output) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if outs: if len(outs) > 1: unit.onjoin_srcs_global(['join_' + listid(outs) + '.cpp'] + outs) else: unit.onsrcs(['GLOBAL'] + outs)
def onpy3_srcs(unit, *args): # Each file arg must either be a path, or "${...}/buildpath=modname", where # "${...}/buildpath" part will be used as a file source in a future macro, # and "modname" will be used as a module name. if '/contrib/tools/python3/src/Lib' not in unit.path(): unit.onpeerdir(['contrib/libs/python']) if '/library/python/runtime_py3' not in unit.path(): unit.onpeerdir(['library/python/runtime_py3']) is_program = unit.get('MODULE_TYPE') == 'PROGRAM' if is_program: py3_program(unit) py_namespace_value = unit.get('PY_NAMESPACE_VALUE') if py_namespace_value == ".": ns = "" else: ns = (unit.get('PY_NAMESPACE_VALUE') or unit.path()[3:].replace('/', '.')) + '.' optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes' cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' cython_directives = [] if cython_coverage: cython_directives += ['-X', 'linetrace=True'] pyxs_c = [] pyxs_cpp = [] pyxs = pyxs_cpp pys = [] protos = [] evs = [] args = iter(args) for arg in args: # Namespace directives. if arg == 'TOP_LEVEL': ns = '' elif arg == 'NAMESPACE': ns = next(args) + '.' # Cython directives. elif arg == 'CYTHON_C': pyxs = pyxs_c elif arg == 'CYTHON_CPP': pyxs = pyxs_cpp elif arg == 'CYTHON_DIRECTIVE': cython_directives += ['-X', next(args)] # Unsupported but legal PROTO_LIBRARY arguments. elif arg == 'GLOBAL' or arg.endswith('.gztproto'): pass # Sources. elif arg == '__main__.py' or arg.endswith('/__main__.py'): unit.onfix_python_main([arg, '__real_main__.py']) unit.onpy3_srcs(['TOP_LEVEL', '__real_main__.py']) unit.onpy3_main(['__real_main__:real_main_func']) else: main_mod = arg == 'MAIN' if main_mod: arg = next(args) if '=' in arg: path, mod = arg.split('=', 1) else: path = arg if arg == '__main__.py' or arg.endswith('/__main__.py'): mod = '__main__' else: if arg.startswith('../'): ymake.report_configure_error( 'PY3_SRCS item starts with "../": {!r}'.format( arg)) if arg.startswith('/'): ymake.report_configure_error( 'PY3_SRCS item starts with "/": {!r}'.format(arg)) continue mod = ns + stripext(arg).replace('/', '.') if main_mod: unit.onpy3_main(mod) pathmod = (path, mod) if path.endswith('.py'): pys.append(pathmod) elif path.endswith('.pyx'): pyxs.append(pathmod) elif path.endswith('.proto'): protos.append(pathmod) elif path.endswith('.ev'): evs.append(pathmod) else: ymake.report_configure_error( 'in PY3_SRCS: unrecognized arg {!r}'.format(path)) if pyxs: files2res = set() if cython_coverage: def process_pyx(filename, path, out_suffix): # skip generated files if not is_arc_src(path, unit): return # source file files2res.add((filename, path)) # generated files2res.add((filename + out_suffix, path + out_suffix)) # used includes for entry in parse_pyx_includes(filename, path, unit.resolve('$S')): files2res.add(entry) else: def process_pyx(filename, path, out_suffix): pass for pyxs, cython, out_suffix in [ (pyxs_c, unit.onbuildwith_cython_c, ".c"), (pyxs_cpp, unit.onbuildwith_cython_cpp, ".cpp"), ]: for path, mod in pyxs: filename = rootrel_arc_src(path, unit) cython([ path, '--module-name', mod, '--init-suffix', mangle(mod), '--source-root', '${ARCADIA_ROOT}', # set arcadia root relative __file__ for generated modules '-X', 'set_initial_path={}'.format(filename), ] + cython_directives) unit.onpy3_register([mod]) process_pyx(filename, path, out_suffix) if files2res: # Compile original and generated sources into target for proper cython coverage calculation unit.onresource_files( [x for name, path in files2res for x in ('DEST', name, path)]) if pys: res = [] for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) unit.onpy3_compile_bytecode([root_rel_path + '-', path]) dest = 'py/' + mod.replace('.', '/') + '.py' res += [ 'DEST', dest, path, 'DEST', dest + '.yapyc3', path + '.yapyc3' ] unit.onresource_files(res) #add_python_lint_checks(unit, [path for path, mod in pys]) if protos: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) grpc = unit.get('GRPC_FLAG') == 'yes' if grpc: unit.onpeerdir(['contrib/libs/grpc/python', 'contrib/libs/grpc']) proto_paths = [path for path, mod in protos] unit.ongenerate_py_protos(proto_paths) unit.onpy3_srcs([pb2_arg(path, mod, unit) for path, mod in protos]) if grpc: unit.onpy3_srcs( [pb2_grpc_arg(path, mod, unit) for path, mod in protos]) if optimize_proto: unit.onsrcs(proto_paths) pb_cc_outs = [pb_cc_arg(path, unit) for path in proto_paths] if grpc: pb_cc_outs += [pb_grpc_arg(path, unit) for path in proto_paths] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if is_program: unit.onjoin_srcs( ['join_py3' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global( ['join_py3' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) if evs: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.ongenerate_py_evs([path for path, mod in evs]) unit.onpy3_srcs([ev_arg(path, mod, unit) for path, mod in evs]) if optimize_proto: unit.onsrcs([path for path, mod in evs]) pb_cc_outs = [ev_cc_arg(path, unit) for path, _ in evs] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if is_program: unit.onjoin_srcs( ['join_py3' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global( ['join_py3' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk)
def onpy_srcs(unit, *args): """ PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive. The main disadvantage is the lack of IDE support; There is also no readline yet. The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files. At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path. By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C. Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh. Example of library declaration with PY_SRCS(): PY_LIBRARY(mymodule) PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg) END() Documentation: https://wiki.yandex-team.ru/devtools/commandsandvars/py_srcs/ """ # Each file arg must either be a path, or "${...}/buildpath=modname", where # "${...}/buildpath" part will be used as a file source in a future macro, # and "modname" will be used as a module name. unit.onuse_python([]) if '/library/python/runtime' not in unit.path(): unit.onpeerdir(['library/python/runtime']) is_program = unit.get('MODULE_TYPE') == 'PROGRAM' if is_program: py_program(unit) py_namespace_value = unit.get('PY_NAMESPACE_VALUE') if py_namespace_value == ".": ns = "" else: ns = (unit.get('PY_NAMESPACE_VALUE') or unit.path()[3:].replace('/', '.')) + '.' cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' cython_directives = [] if cython_coverage: cython_directives += ['-X', 'linetrace=True'] pyxs_c = [] pyxs_cpp = [] pyxs = pyxs_cpp pys = [] protos = [] evs = [] swigs = [] args = iter(args) for arg in args: # Namespace directives. if arg == 'TOP_LEVEL': ns = '' elif arg == 'NAMESPACE': ns = next(args) + '.' # Cython directives. elif arg == 'CYTHON_C': pyxs = pyxs_c elif arg == 'CYTHON_CPP': pyxs = pyxs_cpp elif arg == 'CYTHON_DIRECTIVE': cython_directives += ['-X', next(args)] # Unsupported but legal PROTO_LIBRARY arguments. elif arg == 'GLOBAL' or arg.endswith('.gztproto'): pass # Sources. else: if '=' in arg: path, mod = arg.split('=', 1) else: path = arg if arg == '__main__.py' or arg.endswith('/__main__.py'): mod = '__main__' else: mod = ns + stripext(arg).replace('/', '.') pathmod = (path, mod) if path.endswith('.py'): pys.append(pathmod) elif path.endswith('.pyx'): pyxs.append(pathmod) elif path.endswith('.proto'): protos.append(pathmod) elif path.endswith('.ev'): evs.append(pathmod) elif path.endswith('.swg'): swigs.append(path) # ignore mod, use last (and only) ns else: ymake.report_configure_error( 'in PY_SRCS: unrecognized arg {!r}'.format(path)) if pyxs: files2res = set() if cython_coverage: def process_pyx(filename, path, out_suffix): # skip generated files if not is_arc_src(path, unit): return # source file files2res.add((filename, path)) # generated files2res.add((filename + out_suffix, path + out_suffix)) # used includes for entry in parse_pyx_includes(filename, path, unit.resolve('$S')): files2res.add(entry) else: def process_pyx(filename, path, out_suffix): pass for pyxs, cython, out_suffix in [ (pyxs_c, unit.onbuildwith_cython_c, ".c"), (pyxs_cpp, unit.onbuildwith_cython_cpp, ".cpp"), ]: for path, mod in pyxs: filename = rootrel_arc_src(path, unit) cython([ path, '--module-name', mod, '--init-name', 'init' + mangle(mod), '--source-root', '${ARCADIA_ROOT}', # set arcadia root relative __file__ for generated modules '-X', 'set_initial_path={}'.format(filename), ] + cython_directives) unit.onpy_register([mod]) process_pyx(filename, path, out_suffix) if files2res: # Compile original and generated sources into target for proper cython coverage calculation unit.onresource_files( [x for name, path in files2res for x in ('DEST', name, path)]) if pys: res = [] for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) unit.onpy_compile_bytecode([root_rel_path + '-', path]) key = '/py_modules/' + mod res += [ path, key, '-', 'resfs/src/{}={}'.format(key, root_rel_path), path + '.yapyc', '/py_code/' + mod, ] unit.onresource(res) add_python_lint_checks(unit, [path for path, mod in pys]) if protos: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) grpc = unit.get('GRPC_FLAG') == 'yes' if grpc: unit.onpeerdir(['contrib/libs/grpc/python', 'contrib/libs/grpc']) proto_paths = [path for path, mod in protos] unit.ongenerate_py_protos(proto_paths) unit.onpy_srcs([pb2_arg(path, mod, unit) for path, mod in protos]) unit.onsrcs(proto_paths) if not is_program: pb_cc_outs = [pb_cc_arg(path, unit) for path in proto_paths] if len(pb_cc_outs) > 1: unit.onjoin_srcs_global( ['join_' + listid(pb_cc_outs) + '.cpp'] + pb_cc_outs) else: unit.onsrcs(['GLOBAL'] + pb_cc_outs) if grpc: unit.onpy_srcs( [pb2_grpc_arg(path, mod, unit) for path, mod in protos]) if evs: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.ongenerate_py_evs([path for path, mod in evs]) unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) unit.onsrcs([path for path, mod in evs]) if not is_program: pb_cc_outs = [ev_cc_arg(path, unit) for path, _ in evs] if len(pb_cc_outs) > 1: unit.onjoin_srcs_global( ['join_' + listid(pb_cc_outs) + '.cpp'] + pb_cc_outs) else: unit.onsrcs(['GLOBAL'] + pb_cc_outs) if swigs: unit.onsrcs(swigs) prefix = unit.get('MODULE_PREFIX') project = unit.get('REALPRJNAME') unit.onpy_register([prefix + project]) path = '${ARCADIA_BUILD_ROOT}/' + '{}/{}.py'.format( unit.path()[3:], project) arg = '{}={}'.format(path, ns + project.replace('/', '.')) unit.onpy_srcs([arg])
def onresource(unit, *args): unit.onpeerdir(['library/resource']) outs = [] for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] compressed = [] compressed_input = [] compressed_output = [] for p, n in iterpair(part_args): if unit.enabled('ARCH_AARCH64') or unit.enabled('ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] if p != '-': raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) output = lid + '.rodata' if p == '-': n, p = n.split('=', 1) compressed += ['-', p, output] else: compressed += [p, output] compressed_input.append(p) compressed_output.append(output) srcs_gen.append('{}={}'.format(lid, n)) if compressed: lid = listid(part_args) fake_yasm = '_' + lid + '.yasm' cmd = ['tools/rescompressor', fake_yasm] + gen_ro_flags(unit) + compressed if compressed_input: cmd += ['IN'] + compressed_input cmd += ['OUT_NOAUTO', fake_yasm] + compressed_output unit.onrun_program(cmd) if compressed_output: fake_out = lid + '.yasm' cmd = ['build/scripts/fs_tools.py', 'link_or_copy', fake_yasm, fake_out] + ['IN', fake_yasm] cmd += ['OUTPUT_INCLUDES'] + compressed_output cmd += ['OUT_NOAUTO', fake_out] unit.onbuiltin_python(cmd) else: fake_out = fake_yasm unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + fake_out)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) outs.append(output) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if outs: if len(outs) > 1: unit.onjoin_srcs_global(['join_' + listid(outs) + '.cpp'] + outs) else: unit.onsrcs(['GLOBAL'] + outs)