def __init__(self, path, unit): self._path = path self._prefix = tobuilddir(stripext(retarget(unit, self._path))) if 'market/contrib/omniorb' not in self._path: unit.onpeerdir(['market/contrib/omniorb']) unit.onaddincl(['GLOBAL', repl(os.path.dirname(self._prefix))]) unit.onaddincl(['GLOBAL', 'market/contrib/omniorb/include']) flags = unit.get('OMNIIDL_FLAGS') if not flags: flags = '-bcxx -Wba -C. -I /usr/share/idl/yandex' custom_flags = unit.get('OMNIIDL_FLAGS_' + os.path.basename(path).replace('.', '_').upper()) if custom_flags: flags += ' ' + custom_flags self._flags = ['-I', os.path.dirname(self._path)] + flags.split() if '--gen-headers' in self._flags: self._flags.remove('--gen-headers') self._genh = True else: self._genh = False if '-WbF' in self._flags: self._genh = True
def onresource(unit, *args): unit.onpeerdir(['library/resource']) outs = [] for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] compressed = [] compressed_input = [] compressed_output = [] for p, n in iterpair(part_args): if unit.enabled('ARCH_AARCH64') or unit.enabled( 'ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] if p != '-': raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) output = lid + '.rodata' if p == '-': n, p = n.split('=', 1) compressed += ['-', p, output] else: compressed += [p, output] compressed_input.append(p) compressed_output.append(output) srcs_gen.append('{}={}'.format(lid, n)) if compressed: lid = listid(part_args) fake_yasm = '_' + lid + '.yasm' cmd = ['tools/rescompressor', fake_yasm ] + gen_ro_flags(unit) + compressed if compressed_input: cmd += ['IN'] + compressed_input cmd += ['OUT_NOAUTO', fake_yasm] + compressed_output unit.onrun_program(cmd) unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + fake_yasm)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) outs.append(output) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if outs: if len(outs) > 1: unit.onjoin_srcs_global(['join_' + listid(outs) + '.cpp'] + outs) else: unit.onsrcs(['GLOBAL'] + outs)
def __init__(self, path, unit): self._path = path self._prefix = tobuilddir(stripext(retarget(unit, self._path))) if 'market/contrib/omniorb' not in self._path: unit.onpeerdir(['market/contrib/omniorb']) unit.onaddincl(['GLOBAL', resolve_to_ymake_path(os.path.dirname(self._prefix))]) unit.onaddincl(['GLOBAL', 'market/contrib/omniorb/include']) flags = unit.get('OMNIIDL_FLAGS') if not flags: flags = '-bcxx -Wba -C. -I /usr/share/idl/yandex' custom_flags = unit.get('OMNIIDL_FLAGS_' + os.path.basename(path).replace('.', '_').upper()) if custom_flags: flags += ' ' + custom_flags self._flags = ['-I', os.path.dirname(self._path)] + flags.split() if '--gen-headers' in self._flags: self._flags.remove('--gen-headers') self._genh = True else: self._genh = False if '-WbF' in self._flags: self._genh = True
def __init__(self, path, unit): self._tool = unit.get('SWIG_TOOL') self._library_dir = unit.get( 'SWIG_LIBRARY') or 'contrib/tools/swig/Lib' self._local_swig = unit.get('USE_LOCAL_SWIG') == "yes" self._path = path self._flags = ['-cpperraswarn'] self._bindir = common.tobuilddir(unit.path()) self._input_name = common.stripext(os.path.basename(self._path)) relpath = os.path.relpath(os.path.dirname(self._path), unit.path()) self._main_out = os.path.join( self._bindir, '' if relpath == '.' else relpath.replace('..', '__'), self._input_name + '_wrap.c') if not path.endswith('.c.swg'): self._flags += ['-c++'] self._main_out += 'pp' self._swig_lang = unit.get('SWIG_LANG') lang_specific_incl_dir = 'perl5' if self._swig_lang == 'perl' else self._swig_lang incl_dirs = [ posixpath.join(self._library_dir, lang_specific_incl_dir), self._library_dir ] self._incl_dirs = ['$S', '$B' ] + [posixpath.join('$S', d) for d in incl_dirs] modname = unit.get('REALPRJNAME') self._flags.extend(['-module', modname]) if not self._local_swig: unit.onaddincl(incl_dirs) if self._swig_lang == 'python': self._out_name = modname + '.py' self._flags.extend( ['-interface', unit.get('MODULE_PREFIX') + modname]) if self._swig_lang == 'perl': self._out_name = modname + '.pm' self._flags.append('-shadow') unit.onpeerdir(['build/platform/perl']) if self._swig_lang == 'java': self._out_name = os.path.splitext(os.path.basename( self._path))[0] + '.jsrc' self._out_header = os.path.splitext(self._main_out)[0] + '.h' self._package = 'ru.yandex.' + os.path.dirname(self._path).replace( '$S/', '').replace('$B/', '').replace('/', '.').replace( '-', '_') if unit.get('OS_ANDROID') != "yes": unit.onpeerdir(['contrib/libs/jdk']) self._flags.append('-' + self._swig_lang)
def __init__(self, path, unit): self._path = path self._flags = ['-cpperraswarn'] self._bindir = common.tobuilddir(unit.path()) self._input_name = common.stripext(os.path.basename(self._path)) relpath = os.path.relpath(os.path.dirname(self._path), unit.path()) self._main_out = os.path.join( self._bindir, '' if relpath == '.' else relpath.replace('..', '__'), self._input_name + '_wrap.c') if not path.endswith('.c.swg'): self._flags += ['-c++'] self._main_out += 'pp' self._swig_lang = unit.get('SWIG_LANG') lang_specific_incl_dir = 'perl5' if self._swig_lang == 'perl' else self._swig_lang incl_dirs = [ 'bindings/swiglib', os.path.join(_SWIG_LIB_PATH, lang_specific_incl_dir), _SWIG_LIB_PATH, os.path.join(_SWIG_LIB_PATH, 'python') ] self._incl_dirs = ['$S', '$B'] + ['$S/{}'.format(d) for d in incl_dirs] modname = unit.get('REALPRJNAME') self._flags.extend(['-module', modname]) unit.onaddincl(incl_dirs) unit.onpython_addincl([]) if self._swig_lang == 'python': self._out_name = modname + '.py' self._flags.extend( ['-interface', unit.get('MODULE_PREFIX') + modname]) if self._swig_lang == 'perl': self._out_name = modname + '.pm' self._flags.append('-shadow') unit.onpeerdir(['build/platform/perl']) if self._swig_lang == 'java': self._out_name = os.path.splitext(os.path.basename( self._path))[0] + '.jsrc' self._out_header = os.path.splitext(self._main_out)[0] + '.h' self._package = 'ru.yandex.' + os.path.dirname(self._path).replace( '$S/', '').replace('$B/', '').replace('/', '.').replace( '-', '_') unit.onpeerdir(['contrib/libs/jdk']) self._flags.append('-' + self._swig_lang)
def onro_resource(unit, *args): unit.onpeerdir(['library/resource']) for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] ro_gen = [] for p, n in iterpair(part_args): if p == '-': raw_gen += [p, n] continue if unit.enabled('ARCH_AARCH64') or unit.enabled( 'ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) srcs_gen.append('{}={}'.format(n, lid)) output = lid + '.roresource' unit.onrun_program([ 'tools/rescompressor', p, output, 'IN', p, 'OUT_NOAUTO', output ]) ro_gen.append(output) if ro_gen: output = listid(part_args) + '.asm' unit.onbuiltin_python([ 'build/scripts/gen_rodata.py', '--out-file', output, '--yasm', '${tool:"contrib/tools/yasm"}' ] + gen_ro_flags(unit) + ro_gen + ['IN'] + ro_gen + ['OUTPUT_INCLUDES'] + ro_gen + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + output)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output])
def __init__(self, path, unit): self._path = path self._flags = ['-cpperraswarn'] self._bindir = common.tobuilddir(unit.path()) self._input_name = common.stripext(os.path.basename(self._path)) relpath = os.path.relpath(os.path.dirname(self._path), unit.path()) self._main_out = os.path.join(self._bindir, '' if relpath == '.' else relpath.replace('..', '__'), self._input_name + '_wrap.c') if not path.endswith('.c.swg'): self._flags += ['-c++'] self._main_out += 'pp' self._swig_lang = unit.get('SWIG_LANG') lang_specific_incl_dir = 'perl5' if self._swig_lang == 'perl' else self._swig_lang incl_dirs = ['bindings/swiglib', os.path.join(_SWIG_LIB_PATH, lang_specific_incl_dir), _SWIG_LIB_PATH, os.path.join(_SWIG_LIB_PATH, 'python')] self._incl_dirs = ['$S', '$B'] + ['$S/{}'.format(d) for d in incl_dirs] modname = unit.get('REALPRJNAME') self._flags.extend(['-module', modname]) unit.onaddincl(incl_dirs) unit.onpython_addincl([]) if self._swig_lang == 'python': self._out_name = modname + '.py' self._flags.extend(['-interface', unit.get('MODULE_PREFIX') + modname]) if self._swig_lang == 'perl': self._out_name = modname + '.pm' self._flags.append('-shadow') unit.onpeerdir(['contrib/libs/platform/perl']) if self._swig_lang == 'java': self._out_name = os.path.splitext(os.path.basename(self._path))[0] + '.jsrc' self._package = 'ru.yandex.' + os.path.dirname(self._path).replace('$S/', '').replace('$B/', '').replace('/', '.').replace('-', '_') unit.onpeerdir(['contrib/libs/jdk']) self._flags.append('-' + self._swig_lang)
def output(self): return common.make_tuples([common.tobuilddir(common.stripext(self._path)) + '.o'])
def output(self): base_path = common.tobuilddir(common.stripext(self._path)) return [(base_path + self.extension(), []), (base_path + self.schema_extension(), ['noauto'])]
def on_go_process_srcs(unit): """ _GO_PROCESS_SRCS() macro processes only 'CGO' files. All remaining *.go files and other input files are currently processed by a link command of the GO module (GO_LIBRARY, GO_PROGRAM) """ srcs_files = get_appended_values(unit, '_GO_SRCS_VALUE') asm_files = [] c_files = [] cxx_files = [] ev_files = [] go_files = [] in_files = [] proto_files = [] s_files = [] syso_files = [] classifed_files = { '.c': c_files, '.cc': cxx_files, '.cpp': cxx_files, '.cxx': cxx_files, '.ev': ev_files, '.go': go_files, '.in': in_files, '.proto': proto_files, '.s': asm_files, '.syso': syso_files, '.C': cxx_files, '.S': s_files, } # Classify files specifed in _GO_SRCS() macro by extension and process CGO_EXPORT keyword # which can preceed C/C++ files only is_cgo_export = False for f in srcs_files: _, ext = os.path.splitext(f) ext_files = classifed_files.get(ext) if ext_files is not None: if is_cgo_export: is_cgo_export = False if ext in ('.c', '.cc', '.cpp', '.cxx', '.C'): unit.oncopy_file_with_context( [f, f, 'OUTPUT_INCLUDES', '${BINDIR}/_cgo_export.h']) f = '${BINDIR}/' + f else: ymake.report_configure_error( 'Unmatched CGO_EXPORT keyword in SRCS() macro') ext_files.append(f) elif f == 'CGO_EXPORT': is_cgo_export = True else: # FIXME(snermolaev): We can report an unsupported files for _GO_SRCS here pass if is_cgo_export: ymake.report_configure_error( 'Unmatched CGO_EXPORT keyword in SRCS() macro') for f in go_files: if f.endswith('_test.go'): ymake.report_configure_error( 'file {} must be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros' .format(f)) go_test_files = get_appended_values(unit, '_GO_TEST_SRCS_VALUE') go_xtest_files = get_appended_values(unit, '_GO_XTEST_SRCS_VALUE') for f in go_test_files + go_xtest_files: if not f.endswith('_test.go'): ymake.report_configure_error( 'file {} should not be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros' .format(f)) is_test_module = unit.enabled('GO_TEST_MODULE') # Add gofmt style checks if unit.enabled('_GO_FMT_ADD_CHECK'): resolved_go_files = [] go_source_files = [] if is_test_module and unit.get( ['GO_TEST_FOR_DIR']) else go_files for path in itertools.chain(go_source_files, go_test_files, go_xtest_files): if path.endswith('.go'): resolved = unit.resolve_arc_path([path]) if resolved != path and need_lint(resolved): resolved_go_files.append(resolved) if resolved_go_files: basedirs = {} for f in resolved_go_files: basedir = os.path.dirname(f) if basedir not in basedirs: basedirs[basedir] = [] basedirs[basedir].append(f) for basedir in basedirs: unit.onadd_check(['gofmt'] + basedirs[basedir]) # Go coverage instrumentation (NOTE! go_files list is modified here) if is_test_module and unit.enabled('GO_TEST_COVER'): cover_info = [] for f in go_files: if f.endswith('_test.go'): continue cover_var = 'GoCover' + base64.b32encode(f).rstrip('=') cover_file = unit.resolve_arc_path(f) unit.on_go_gen_cover_go([cover_file, cover_var]) if cover_file.startswith('$S/'): cover_file = arc_project_prefix + cover_file[3:] cover_info.append('{}:{}'.format(cover_var, cover_file)) # go_files should be empty now since the initial list shouldn't contain # any non-go or go test file. The value of go_files list will be used later # to update the value of _GO_SRCS_VALUE go_files = [] unit.set(['GO_COVER_INFO_VALUE', ' '.join(cover_info)]) # We have cleaned up the list of files from _GO_SRCS_VALUE var and we have to update # the value since it is used in module command line unit.set([ '_GO_SRCS_VALUE', ' '.join(itertools.chain(go_files, asm_files, syso_files)) ]) unit_path = unit.path() # Add go vet check if unit.enabled('_GO_VET_ADD_CHECK') and need_lint(unit_path): vet_report_file_name = os.path.join( unit_path, '{}{}'.format(unit.filename(), unit.get('GO_VET_REPORT_EXT'))) unit.onadd_check( ["govet", '$(BUILD_ROOT)/' + tobuilddir(vet_report_file_name)[3:]]) for f in ev_files: ev_proto_file = '{}.proto'.format(f) unit.oncopy_file_with_context([f, ev_proto_file]) proto_files.append(ev_proto_file) # Process .proto files for f in proto_files: unit.on_go_proto_cmd(f) # Process .in files for f in in_files: unit.onsrc(f) # Generate .symabis for .s files (starting from 1.12 version) if len(asm_files) > 0: symabis_flags = [] gostd_version = unit.get('GOSTD_VERSION') if compare_versions('1.16', gostd_version) >= 0: import_path = get_import_path(unit) symabis_flags.extend(['FLAGS', '-p', import_path]) if need_compiling_runtime(import_path): symabis_flags.append('-compiling-runtime') unit.on_go_compile_symabis(asm_files + symabis_flags) # Process cgo files cgo_files = get_appended_values(unit, '_CGO_SRCS_VALUE') cgo_cflags = [] if len(c_files) + len(cxx_files) + len(s_files) + len(cgo_files) > 0: if is_test_module: go_test_for_dir = unit.get('GO_TEST_FOR_DIR') if go_test_for_dir and go_test_for_dir.startswith('$S/'): unit.onaddincl(['FOR', 'c', go_test_for_dir[3:]]) unit.onaddincl(['FOR', 'c', unit.get('MODDIR')]) cgo_cflags = get_appended_values(unit, 'CGO_CFLAGS_VALUE') for f in itertools.chain(c_files, cxx_files, s_files): unit.onsrc([f] + cgo_cflags) if len(cgo_files) > 0: if not unit.enabled('CGO_ENABLED'): ymake.report_configure_error( 'trying to build with CGO (CGO_SRCS is non-empty) when CGO is disabled' ) import_path = get_import_path(unit) if import_path != runtime_cgo_path: go_std_root = unit.get('GOSTD') unit.onpeerdir(os.path.join(go_std_root, runtime_cgo_path)) race_mode = 'race' if unit.enabled('RACE') else 'norace' import_runtime_cgo = 'false' if import_path in import_runtime_cgo_false[ race_mode] else 'true' import_syscall = 'false' if import_path in import_syscall_false[ race_mode] else 'true' args = [import_path] + cgo_files + [ 'FLAGS', '-import_runtime_cgo=' + import_runtime_cgo, '-import_syscall=' + import_syscall ] unit.on_go_compile_cgo1(args) cgo2_cflags = get_appended_values(unit, 'CGO2_CFLAGS_VALUE') for f in cgo_files: if f.endswith('.go'): unit.onsrc([f[:-2] + 'cgo2.c'] + cgo_cflags + cgo2_cflags) else: ymake.report_configure_error( 'file {} should not be listed in CGO_SRCS() macros'.format( f)) args = [go_package_name(unit)] + cgo_files if len(c_files) > 0: args += ['C_FILES'] + c_files if len(s_files) > 0: args += ['S_FILES'] + s_files if len(syso_files) > 0: args += ['OBJ_FILES'] + syso_files unit.on_go_compile_cgo2(args)
def main_out(self): return common.tobuilddir(common.stripext(self._path)) + '.cpp'
def output(self): return common.make_tuples( [common.tobuilddir(common.stripext(self._path)) + '.fbs.h'])
def onpy_srcs(unit, *args): """ PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive. The main disadvantage is the lack of IDE support; There is also no readline yet. The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files. At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path. By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C. Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh. Example of library declaration with PY_SRCS(): PY_LIBRARY(mymodule) PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg) END() Documentation: https://wiki.yandex-team.ru/devtools/commandsandvars/py_srcs/ """ # Each file arg must either be a path, or "${...}/buildpath=modname", where # "${...}/buildpath" part will be used as a file source in a future macro, # and "modname" will be used as a module name. py3 = is_py3(unit) if py3: if '/contrib/tools/python3/src/Lib' not in unit.path(): unit.onpeerdir(['contrib/libs/python']) if '/library/python/runtime_py3' not in unit.path(): unit.onpeerdir(['library/python/runtime_py3']) else: if '/contrib/tools/python/src/Lib' not in unit.path(): unit.onpeerdir(['contrib/libs/python']) if '/library/python/runtime' not in unit.path(): unit.onpeerdir(['library/python/runtime']) is_program = unit.get('MODULE_TYPE') == 'PROGRAM' if is_program: py_program(unit, py3) py_namespace_value = unit.get('PY_NAMESPACE_VALUE') if py_namespace_value == ".": ns = "" else: ns = (unit.get('PY_NAMESPACE_VALUE') or unit.path()[3:].replace('/', '.')) + '.' cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes' cython_includes = [] for path in unit.includes(): cython_includes += ['-I', resolve_to_ymake_path(path)] cython_directives = [] if cython_coverage: cython_directives += ['-X', 'linetrace=True'] pyxs_c = [] pyxs_cpp = [] pyxs = pyxs_cpp pys = [] protos = [] evs = [] swigs = [] dump_dir = unit.get('PYTHON_BUILD_DUMP_DIR') dump_output = None if dump_dir: import thread pid = os.getpid() tid = thread.get_ident() dump_name = '{}-{}.dump'.format(pid, tid) dump_output = open(os.path.join(dump_dir, dump_name), 'a') args = iter(args) for arg in args: # Namespace directives. if arg == 'TOP_LEVEL': ns = '' elif arg == 'NAMESPACE': ns = next(args) + '.' # Cython directives. elif arg == 'CYTHON_C': pyxs = pyxs_c elif arg == 'CYTHON_CPP': pyxs = pyxs_cpp elif arg == 'CYTHON_DIRECTIVE': cython_directives += ['-X', next(args)] # Unsupported but legal PROTO_LIBRARY arguments. elif arg == 'GLOBAL' or arg.endswith('.gztproto'): pass # Sources. else: main_mod = arg == 'MAIN' if main_mod: arg = next(args) if '=' in arg: main_py = False path, mod = arg.split('=', 1) else: path = arg main_py = (path == '__main__.py' or path.endswith('/__main__.py')) if not py3 and main_py: mod = '__main__' else: if arg.startswith('../'): ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg)) if arg.startswith('/'): ymake.report_configure_error('PY_SRCS item starts with "/": {!r}'.format(arg)) continue mod = ns + stripext(arg).replace('/', '.') if py3 and mod == '__main__': ymake.report_configure_error('TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM') if main_mod: py_main(unit, mod + ":main") elif py3 and main_py: py_main(unit, mod) pathmod = (path, mod) if dump_output is not None: dump_output.write('{path}\t{module}\n'.format(path=rootrel_arc_src(path, unit), module=mod)) if path.endswith('.py'): pys.append(pathmod) elif path.endswith('.pyx'): pyxs.append(pathmod) elif path.endswith('.proto'): protos.append(pathmod) elif path.endswith('.ev'): evs.append(pathmod) elif path.endswith('.swg'): if py3: ymake.report_configure_error('SWIG is not yet supported for Python 3: https://st.yandex-team.ru/DEVTOOLS-4863') else: swigs.append(path) # ignore mod, use last (and only) ns else: ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path)) if dump_output is not None: dump_output.close() if pyxs: files2res = set() if cython_coverage: def process_pyx(filename, path, out_suffix): # skip generated files if not is_arc_src(path, unit): return # source file files2res.add((filename, path)) # generated files2res.add((filename + out_suffix, path + out_suffix)) # used includes for entry in parse_pyx_includes(filename, path, unit.resolve('$S')): files2res.add(entry) else: def process_pyx(filename, path, out_suffix): pass for pyxs, cython, out_suffix in [ (pyxs_c, unit.onbuildwith_cython_c, ".c"), (pyxs_cpp, unit.onbuildwith_cython_cpp, ".cpp"), ]: for path, mod in pyxs: filename = rootrel_arc_src(path, unit) cython([ path, '--module-name', mod, '--init-suffix', mangle(mod), '--source-root', '${ARCADIA_ROOT}', # set arcadia root relative __file__ for generated modules '-X', 'set_initial_path={}'.format(filename), ] + cython_includes + cython_directives) py_register(unit, mod, py3) process_pyx(filename, path, out_suffix) if files2res: # Compile original and generated sources into target for proper cython coverage calculation unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)]) if pys: pys_seen = set() pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))} if pys_dups: ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups)) res = [] if py3: for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) unit.onpy3_compile_bytecode([root_rel_path + '-', path]) dest = 'py/' + mod.replace('.', '/') + '.py' res += [ 'DEST', dest, path, 'DEST', dest + '.yapyc3', path + '.yapyc3' ] unit.onresource_files(res) #add_python_lint_checks(unit, [path for path, mod in pys]) else: for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) src = unit.resolve_arc_path(path) or path dst = tobuilddir(src) + '.yapyc' unit.onpy_compile_bytecode([root_rel_path + '-', src]) key = '/py_modules/' + mod res += [ path, key, '-', 'resfs/src/{}={}'.format(key, root_rel_path), dst, '/py_code/' + mod, ] unit.onresource(res) add_python_lint_checks(unit, [path for path, mod in pys]) if protos: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.onpeerdir(unit.get("PY_PROTO_DEPS").split()) proto_paths = [path for path, mod in protos] unit.ongenerate_py_protos_internal(proto_paths) unit.onpy_srcs([ pb2_arg(py_suf, path, mod, unit) for path, mod in protos for py_suf in unit.get("PY_PROTO_SUFFIXES").split() ]) if optimize_proto: unit.onsrcs(proto_paths) pb_cc_outs = [ pb_cc_arg(cc_suf, path, unit) for path in proto_paths for cc_suf in unit.get("CPP_PROTO_SUFFIXES").split() ] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if is_program: unit.onjoin_srcs(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) if evs: if '/contrib/libs/protobuf/python/google_lib' not in unit.path(): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.ongenerate_py_evs_internal([path for path, mod in evs]) unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) if optimize_proto: unit.onsrcs([path for path, mod in evs]) pb_cc_outs = [ev_cc_arg(path, unit) for path, _ in evs] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if is_program: unit.onjoin_srcs(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global(['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) if swigs: unit.onsrcs(swigs) prefix = unit.get('MODULE_PREFIX') project = unit.get('REALPRJNAME') py_register(unit, prefix + project, py3) path = '${ARCADIA_BUILD_ROOT}/' + '{}/{}.py'.format(unit.path()[3:], project) arg = '{}={}'.format(path, ns + project.replace('/', '.')) unit.onpy_srcs([arg])
def onresource(unit, *args): """ @usage: RESOURCE(file name file name...) Add data (resources, random files) to the program) This is a simpler but less flexible option than ARCHIVE(), because in the case of ARCHIVE(), you have to use the data explicitly, and in the case of RESOURCE(), the data falls through SRCS(GLOBAL). Therefore, there is no static data library from RESOURCE(), they are added only at the program linking stage. @example: https://wiki.yandex-team.ru/yatool/howtowriteyamakefiles/#a2ispolzujjtekomanduresource @example: LIBRARY() OWNER(user1) RESOURCE( path/to/file1 /key/in/program/1 path/to/file2 /key2 ) END() """ unit.onpeerdir(['library/resource']) outs = [] # https://st.yandex-team.ru/DEVTOOLS-4037 # compressed_outs = [] for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] compressed = [] compressed_input = [] compressed_output = [] for p, n in iterpair(part_args): if unit.enabled('ARCH_AARCH64') or unit.enabled( 'ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] if p != '-': raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) output = lid + '.rodata' if p == '-': n, p = n.split('=', 1) compressed += ['-', p, output] else: compressed += [p, output] compressed_input.append(p) compressed_output.append(output) srcs_gen.append('{}={}'.format(lid, n)) if compressed: lid = listid(part_args) fake_yasm = '_' + lid + '.yasm' cmd = ['tools/rescompressor', fake_yasm ] + gen_ro_flags(unit) + compressed if compressed_input: cmd += ['IN'] + compressed_input cmd += ['OUT_NOAUTO', fake_yasm] + compressed_output unit.onrun_program(cmd) # https://st.yandex-team.ru/DEVTOOLS-4037 # compressed_outs.append(fake_yasm) unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + fake_yasm)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) outs.append(output) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if outs: if len(outs) > 1: unit.onjoin_srcs_global(['join_' + listid(outs) + '.cpp'] + outs) else: unit.onsrcs(['GLOBAL'] + outs)
def output(self): return common.make_tuples([ common.tobuilddir(self._path + '.h.rl5') ])
def on_go_process_srcs(unit): """ _GO_PROCESS_SRCS() macro processes only 'CGO' files. All remaining *.go files and other input files are currently processed by a link command of the GO module (GO_LIBRARY, GO_PROGRAM) """ srcs_files = get_appended_values(unit, 'GO_SRCS_VALUE') for f in srcs_files: if f.endswith('_test.go'): ymake.report_configure_error('file {} must be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f)) go_test_files = get_appended_values(unit, 'GO_TEST_SRCS_VALUE') go_xtest_files = get_appended_values(unit, 'GO_XTEST_SRCS_VALUE') for f in go_test_files + go_xtest_files: if not f.endswith('_test.go'): ymake.report_configure_error('file {} should not be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f)) is_test_module = unit.enabled('GO_TEST_MODULE') if is_test_module and unit.enabled('GO_TEST_COVER'): temp_srcs_files = [] cover_info = [] for f in srcs_files: if f.endswith('.go') and not f.endswith('_test.go'): cover_var = 'GoCover_' + base64.b32encode(f).rstrip('=') cover_file = unit.resolve_arc_path(f) unit.on_go_gen_cover_go([cover_file, cover_var]) if cover_file.startswith('$S/'): cover_file = arc_project_prefix + cover_file[3:] cover_info.append('{}:{}'.format(cover_var, cover_file)) else: temp_srcs_files.append(f) srcs_files = temp_srcs_files unit.set(['GO_SRCS_VALUE', ' '.join(srcs_files)]) unit.set(['GO_COVER_INFO_VALUE', ' '.join(cover_info)]) resolved_go_files = [] for path in srcs_files + go_test_files + go_xtest_files: if path.endswith(".go"): resolved = unit.resolve_arc_path([path]) if resolved != path and not resolved.startswith("$S/vendor/") and not resolved.startswith("$S/contrib/"): resolved_go_files.append(resolved) if resolved_go_files: basedirs = {} for f in resolved_go_files: basedir = os.path.dirname(f) if basedir not in basedirs: basedirs[basedir] = [] basedirs[basedir].append(f) for basedir in basedirs: unit.onadd_check(['gofmt'] + basedirs[basedir]) # Add go vet check if unit.get(['GO_VET']) == 'yes': vet_package_name = go_package_name(unit, vet_mode=True) unit.onadd_check(["govet", '$(BUILD_ROOT)/' + tobuilddir(os.path.join(unit.path(), vet_package_name + '.a.vet.txt'))[3:]]) unit.set(['_GO_VET_PACKAGE_NAME', vet_package_name]) go_std_root = unit.get('GOSTD') + os.path.sep proto_files = filter(lambda x: x.endswith('.proto'), srcs_files) if len(proto_files) > 0: for f in proto_files: unit.on_go_proto_cmd(f) in_files = filter(lambda x: x.endswith('.in'), srcs_files) if len(in_files) > 0: for f in in_files: unit.onsrc(f) if compare_versions('1.12', unit.get('GOSTD_VERSION')) >= 0: asm_files = filter(lambda x: x.endswith('.s'), srcs_files) if len(asm_files) > 0: unit.on_go_compile_symabis(asm_files) s_files = filter(lambda x: x.endswith('.S'), srcs_files) c_files = filter(lambda x: x.endswith('.c'), srcs_files) cxx_files = filter(lambda x: any(x.endswith(e) for e in ('.cc', '.cpp', '.cxx', '.C')), srcs_files) syso_files = filter(lambda x: x.endswith('.syso'), srcs_files) cgo_files = get_appended_values(unit, 'CGO_SRCS_VALUE') cgo_cflags = [] if len(c_files) + len(cxx_files) + len(s_files) + len(cgo_files) > 0: if is_test_module: cgo_cflags.append(os.path.join('-I${ARCADIA_ROOT}', unit.get('GO_TEST_FOR_DIR')[3:])) cgo_cflags.append('-I$CURDIR') unit.oncgo_cflags(cgo_cflags) cgo_cflags = get_appended_values(unit, 'CGO_CFLAGS_VALUE') if len(cxx_files) == 0: unit.onno_runtime() for f in itertools.chain(c_files, cxx_files, s_files): unit.onsrc([f] + cgo_cflags) if len(cgo_files) > 0: if not unit.enabled('CGO_ENABLED'): ymake.report_configure_error('trying to build with CGO (CGO_SRCS is non-empty) when CGO is disabled') import_path = rootrel_arc_src(unit.path(), unit) if import_path.startswith(go_std_root): import_path = import_path[len(go_std_root):] if import_path != runtime_cgo_path: unit.onpeerdir(os.path.join(go_std_root, runtime_cgo_path)) race_mode = 'race' if unit.enabled('RACE') else 'norace' import_runtime_cgo = 'false' if import_path in import_runtime_cgo_false[race_mode] else 'true' import_syscall = 'false' if import_path in import_syscall_false[race_mode] else 'true' args = [import_path] + cgo_files + ['FLAGS', '-import_runtime_cgo=' + import_runtime_cgo, '-import_syscall=' + import_syscall] unit.on_go_compile_cgo1(args) cgo2_cflags = get_appended_values(unit, 'CGO2_CFLAGS_VALUE') for f in cgo_files: if f.endswith('.go'): unit.onsrc([f[:-2] + 'cgo2.c'] + cgo_cflags + cgo2_cflags) else: ymake.report_configure_error('file {} should not be listed in CGO_SRCS() macros'.format(f)) args = [go_package_name(unit)] + cgo_files if len(c_files) > 0: args += ['C_FILES'] + c_files if len(s_files) > 0: args += ['S_FILES'] + s_files if len(syso_files) > 0: args += ['OBJ_FILES'] + syso_files unit.on_go_compile_cgo2(args)
def output(self): return common.make_tuples([common.tobuilddir(common.stripext(self._path)) + self.extension()])
def onresource(unit, *args): unit.onpeerdir(['library/resource']) outs = [] for part_args in split(args, 8000): srcs_gen = [] raw_gen = [] raw_inputs = [] compressed = [] compressed_input = [] compressed_output = [] for p, n in iterpair(part_args): if unit.enabled('ARCH_AARCH64') or unit.enabled('ARCH_ARM') or unit.enabled('ARCH_PPC64LE'): raw_gen += [p, n] if p != '-': raw_inputs.append(p) continue lid = '_' + pathid(p + n + unit.path()) output = lid + '.rodata' if p == '-': n, p = n.split('=', 1) compressed += ['-', p, output] else: compressed += [p, output] compressed_input.append(p) compressed_output.append(output) srcs_gen.append('{}={}'.format(lid, n)) if compressed: lid = listid(part_args) fake_yasm = '_' + lid + '.yasm' cmd = ['tools/rescompressor', fake_yasm] + gen_ro_flags(unit) + compressed if compressed_input: cmd += ['IN'] + compressed_input cmd += ['OUT_NOAUTO', fake_yasm] + compressed_output unit.onrun_program(cmd) if compressed_output: fake_out = lid + '.yasm' cmd = ['build/scripts/fs_tools.py', 'link_or_copy', fake_yasm, fake_out] + ['IN', fake_yasm] cmd += ['OUTPUT_INCLUDES'] + compressed_output cmd += ['OUT_NOAUTO', fake_out] unit.onbuiltin_python(cmd) else: fake_out = fake_yasm unit.onsrcs(['GLOBAL', tobuilddir(unit.path() + '/' + fake_out)]) if srcs_gen: output = listid(part_args) + '.cpp' unit.onrun_program(['tools/rorescompiler', output] + srcs_gen + ['OUT_NOAUTO', output]) outs.append(output) if raw_gen: output = listid(part_args) + '_raw.cpp' if raw_inputs: raw_inputs = ['IN'] + raw_inputs unit.onrun_program(['tools/rescompiler', output] + raw_gen + raw_inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) if outs: if len(outs) > 1: unit.onjoin_srcs_global(['join_' + listid(outs) + '.cpp'] + outs) else: unit.onsrcs(['GLOBAL'] + outs)
def output(self): return common.make_tuples([common.tobuilddir(self._path + '.h.rl5')])
def onpy_srcs(unit, *args): """ @usage PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} Files...) PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive. The main disadvantage is the lack of IDE support; There is also no readline yet. The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files. At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path. By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C. Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh. Example of library declaration with PY_SRCS(): PY_LIBRARY(mymodule) PY_SRCS(a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg) END() PY_REGISTER honors Python2 and Python3 differences and adjusts itself to Python version of a current module Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipylibrarypy3libraryimakrospysrcs """ # Each file arg must either be a path, or "${...}/buildpath=modname", where # "${...}/buildpath" part will be used as a file source in a future macro, # and "modname" will be used as a module name. upath = unit.path()[3:] py3 = is_py3(unit) with_py = not unit.get('PYBUILD_NO_PY') with_pyc = not unit.get('PYBUILD_NO_PYC') if not upath.startswith('contrib/tools/python') and not upath.startswith( 'library/python/runtime') and unit.get('NO_PYTHON_INCLS') != 'yes': unit.onpeerdir(['contrib/libs/python']) unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') if unit_needs_main: py_program(unit, py3) py_namespace_value = unit.get('PY_NAMESPACE_VALUE') if py_namespace_value == ".": ns = "" else: ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.' cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' cythonize_py = False optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes' cython_directives = [] if cython_coverage: cython_directives += ['-X', 'linetrace=True'] pyxs_c = [] pyxs_c_h = [] pyxs_c_api_h = [] pyxs_cpp = [] pyxs = pyxs_cpp swigs_c = [] swigs_cpp = [] swigs = swigs_cpp pys = [] protos = [] evs = [] dump_dir = unit.get('PYTHON_BUILD_DUMP_DIR') dump_output = None if dump_dir: import thread pid = os.getpid() tid = thread.get_ident() dump_name = '{}-{}.dump'.format(pid, tid) dump_output = open(os.path.join(dump_dir, dump_name), 'a') args = iter(args) for arg in args: # Namespace directives. if arg == 'TOP_LEVEL': ns = '' elif arg == 'NAMESPACE': ns = next(args) + '.' # Cython directives. elif arg == 'CYTHON_C': pyxs = pyxs_c elif arg == 'CYTHON_C_H': pyxs = pyxs_c_h elif arg == 'CYTHON_C_API_H': pyxs = pyxs_c_api_h elif arg == 'CYTHON_CPP': pyxs = pyxs_cpp elif arg == 'CYTHON_DIRECTIVE': cython_directives += ['-X', next(args)] elif arg == 'CYTHONIZE_PY': cythonize_py = True # SWIG. elif arg == 'SWIG_C': swigs = swigs_c elif arg == 'SWIG_CPP': swigs = swigs_cpp # Unsupported but legal PROTO_LIBRARY arguments. elif arg == 'GLOBAL' or arg.endswith('.gztproto'): pass # Sources. else: main_mod = arg == 'MAIN' if main_mod: arg = next(args) if '=' in arg: main_py = False path, mod = arg.split('=', 1) else: path = arg main_py = (path == '__main__.py' or path.endswith('/__main__.py')) if not py3 and unit_needs_main and main_py: mod = '__main__' else: if arg.startswith('../'): ymake.report_configure_error( 'PY_SRCS item starts with "../": {!r}'.format(arg)) if arg.startswith('/'): ymake.report_configure_error( 'PY_SRCS item starts with "/": {!r}'.format(arg)) continue mod = ns + stripext(arg).replace('/', '.') if py3 and mod == '__main__': ymake.report_configure_error( 'TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM') if main_mod: py_main(unit, mod + ":main") elif py3 and unit_needs_main and main_py: py_main(unit, mod) pathmod = (path, mod) if dump_output is not None: dump_output.write('{path}\t{module}\n'.format( path=rootrel_arc_src(path, unit), module=mod)) if path.endswith('.py'): if cythonize_py: pyxs.append(pathmod) else: pys.append(pathmod) elif path.endswith('.pyx'): pyxs.append(pathmod) elif path.endswith('.proto'): protos.append(pathmod) elif path.endswith('.ev'): evs.append(pathmod) elif path.endswith('.swg'): swigs.append(pathmod) else: ymake.report_configure_error( 'in PY_SRCS: unrecognized arg {!r}'.format(path)) if dump_output is not None: dump_output.close() if pyxs: files2res = set() # Include map stores files which were included in the processing pyx file, # to be able to find source code of the included file inside generated file # for currently processing pyx file. include_map = collections.defaultdict(set) if cython_coverage: def process_pyx(filename, path, out_suffix, noext): # skip generated files if not is_arc_src(path, unit): return # source file files2res.add((filename, path)) # generated if noext: files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix)) else: files2res.add((filename + out_suffix, path + out_suffix)) # used includes for entry in parse_pyx_includes(filename, path, unit.resolve('$S')): files2res.add(entry) include_arc_rel = entry[0] include_map[filename].add(include_arc_rel) else: def process_pyx(filename, path, out_suffix, noext): pass for pyxs, cython, out_suffix, noext in [ (pyxs_c, unit.on_buildwith_cython_c_dep, ".c", False), (pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", True), (pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", True), (pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", False), ]: for path, mod in pyxs: filename = rootrel_arc_src(path, unit) cython_args = [path] dep = path if path.endswith('.py'): pxd = '/'.join(mod.split('.')) + '.pxd' if unit.resolve_arc_path(pxd): dep = pxd cython_args.append(dep) cython_args += [ '--module-name', mod, '--init-suffix', mangle(mod), '--source-root', '${ARCADIA_ROOT}', # set arcadia root relative __file__ for generated modules '-X', 'set_initial_path={}'.format(filename), ] + cython_directives cython(cython_args) py_register(unit, mod, py3) process_pyx(filename, path, out_suffix, noext) if files2res: # Compile original and generated sources into target for proper cython coverage calculation unit.onresource_files( [x for name, path in files2res for x in ('DEST', name, path)]) if include_map: data = [] prefix = 'resfs/cython/include' for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join( sorted(files))) for filename, files in include_map.iteritems()): data += ['-', line] unit.onresource(data) for swigs, on_swig_python in [ (swigs_c, unit.on_swig_python_c), (swigs_cpp, unit.on_swig_python_cpp), ]: for path, mod in swigs: # Make output prefix basename match swig module name. prefix = path[:path.rfind('/') + 1] + mod.rsplit('.', 1)[-1] swg_py = '{}/{}/{}.py'.format('${ARCADIA_BUILD_ROOT}', upath, prefix) on_swig_python([path, prefix]) onpy_register(unit, mod + '_swg') onpy_srcs(unit, swg_py + '=' + mod) if pys: pys_seen = set() pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))} if pys_dups: ymake.report_configure_error( 'Duplicate(s) is found in the PY_SRCS macro: {}'.format( pys_dups)) res = [] if py3: for path, mod in pys: dest = 'py/' + mod.replace('.', '/') + '.py' if with_py: res += ['DEST', dest, path] if with_pyc: root_rel_path = rootrel_arc_src(path, unit) unit.on_py3_compile_bytecode([root_rel_path + '-', path]) res += ['DEST', dest + '.yapyc3', path + '.yapyc3'] unit.onresource_files(res) add_python_lint_checks(unit, 3, [path for path, mod in pys]) else: for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) if with_py: key = '/py_modules/' + mod res += [ path, key, '-', 'resfs/src/{}={}'.format(key, root_rel_path), ] if with_pyc: src = unit.resolve_arc_path(path) or path dst = tobuilddir(src) + '.yapyc' unit.on_py_compile_bytecode([root_rel_path + '-', src]) res += [dst, '/py_code/' + mod] unit.onresource(res) add_python_lint_checks(unit, 2, [path for path, mod in pys]) if protos: if not upath.startswith('contrib/libs/protobuf/python/google_lib'): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.onpeerdir(unit.get("PY_PROTO_DEPS").split()) proto_paths = [path for path, mod in protos] unit.on_generate_py_protos_internal(proto_paths) unit.onpy_srcs([ pb2_arg(py_suf, path, mod, unit) for path, mod in protos for py_suf in unit.get("PY_PROTO_SUFFIXES").split() ]) if optimize_proto: unit.onsrcs(proto_paths) pb_cc_outs = [ pb_cc_arg(cc_suf, path, unit) for path in proto_paths for cc_suf in unit.get("CPP_PROTO_SUFFIXES").split() ] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if unit_needs_main: unit.onjoin_srcs( ['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global( ['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) if evs: if not upath.startswith('contrib/libs/protobuf/python/google_lib'): unit.onpeerdir(['contrib/libs/protobuf/python/google_lib']) unit.on_generate_py_evs_internal([path for path, mod in evs]) unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) if optimize_proto: unit.onsrcs([path for path, mod in evs]) pb_cc_outs = [ev_cc_arg(path, unit) for path, _ in evs] for pb_cc_outs_chunk in generate_chunks(pb_cc_outs, 10): if unit_needs_main: unit.onjoin_srcs( ['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk) else: unit.onjoin_srcs_global( ['join_' + listid(pb_cc_outs_chunk) + '.cpp'] + pb_cc_outs_chunk)
def output(self): return common.make_tuples([ common.tobuilddir(common.stripext(self._path)) + self.extension() ])