def _generate_ninja_file(app_name, compiler_path, linker_path, archiver_path, compiler_flags, linker_flags, app_source_dirs, project_root, build_dir, output_dir, lib_cs_path, lib_app_path): with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file: ninja_file = Writer(build_file) ninja_file.variable(key="builddir", value=build_dir) # Write the compiler rule for c, cpp and cc ninja_file.rule("compile", command="{} {} -o $out $in".format(compiler_path, compiler_flags), description="Compiling source: $in", depfile="$out.o.d", deps="gcc") # Write the rule that generates the dependencies ninja_file.rule("dependencies", command="{} {} -MM -MG -MF $out $in".format(compiler_path, compiler_flags), description="Generating dependency: $in") # Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess ninja_file.rule("archive", command="{} rcs $out @$out.rsp".format(archiver_path), description="Building static library: $out", rspfile="$out.rsp", rspfile_content="$in") # Write the rule to link. Note we use response files as on Windows the command is too long for CreateProcess ninja_file.rule("link", command="{} @$out.rsp {} -o $out".format(linker_path, linker_flags), description="Linking: $out", rspfile="$out.rsp", rspfile_content="$in") # Write the compile command for all source files. cs_source_dirs = [os.path.normpath('{}/ChilliSource/Source/ChilliSource'.format(project_root)), os.path.normpath('{}/ChilliSource/Source/CSBackend/Platform/RPi/'.format(project_root)), os.path.normpath('{}/ChilliSource/Source/CSBackend/Rendering/OpenGL/'.format(project_root))] cs_output_files = _write_build_command(ninja_file, cs_source_dirs, 'c,cpp,cc', 'compile', 'dependencies', project_root, build_dir) app_output_files = _write_build_command(ninja_file, app_source_dirs, 'c,cpp,cc', 'compile', 'dependencies', project_root, build_dir) all_output_files = cs_output_files + app_output_files # Write the command to generate the static library for ChilliSource and the application ninja_file.build(rule="archive", inputs=cs_output_files, outputs=lib_cs_path) ninja_file.build(rule="archive", inputs=app_output_files, outputs=lib_app_path) # Write the rule to link the libraries into the executable ninja_file.build(rule="link", inputs=all_output_files, outputs=os.path.join(output_dir, app_name))
def shogunAllProto(argv): ''' Generate XXX.pb.{h,cc} files from all available XXX.proto files in the source directory. Depends: protoc (protobuf-compiler) Input: .proto Output: .pb.cc, .pb.h ''' ag = argparse.ArgumentParser() ag.add_argument('-o', help='write ninja file', type=str, default='all_proto.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (1) initialize ninja file cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) glob all proto protos = glob.glob(f'**/*.proto', recursive=True) print(cyan('AllProto:'), f'globbed {len(protos)} .proto files') # (3) generate .pb.cc, .pb.h for proto in protos: cursor.build([ proto.replace('.proto', '.pb.cc'), proto.replace('.proto', '.pb.h')], 'rule_PROTOC', proto) # done cursor.close()
def write_buildfile(): with open("build.ninja", "w") as buildfile: n = Writer(buildfile) # Variable declarations n.variable("lib_path", "/usr/msp430/lib") #"/usr/lib/gcc/msp430/4.6.3") n.variable("tc_path", "/usr/bin") n.variable("cflags", cflags) n.variable("cxxflags", cxxflags) n.variable("lflags", lflags) # Rule declarations n.rule("cxx", command="$tc_path/msp430-g++ $cxxflags -c $in -o $out") n.rule("cc", command="$tc_path/msp430-gcc $cflags -c $in -o $out") n.rule("cl", command="$tc_path/msp430-gcc $lflags $in -o $out -lm -lgcc -lc") n.rule("oc", command="$tc_path/msp430-objcopy -O binary $in $out") n.rule("cdb", command="ninja -t compdb cc cxx > compile_commands.json") n.rule("cscf", command="find " + " ".join(set(source_dirs + include_dirs)) + " -regex \".*\\(\\.c\\|\\.h\\|.cpp\\|.hpp\\)$$\" -and " + "-not -type d > $out") n.rule("cscdb", command="cscope -bq") # Build rules n.build("compile_commands.json", "cdb") n.build("cscope.files", "cscf") n.build(["cscope.in.out", "cscope.po.out", "cscope.out"], "cscdb", "cscope.files") objects = [] def cc(name): ofile = subst_ext(name, ".o") n.build(ofile, "cc", name) objects.append(ofile) def cxx(name): ofile = subst_ext(name, ".o") n.build(ofile, "cxx", name) objects.append(ofile) def cl(oname, ofiles): n.build(oname, "cl", ofiles) sources = get_sources() map(cc, filter(lambda x: x.endswith(".c"), sources)) map(cxx, filter(lambda x: x.endswith(".cpp"), sources)) cl("main.elf", objects) n.build("main.bin", "oc", "main.elf")
def ninja_build(buildables): with open("build.ninja", "w") as buildfile: ninja = Writer(buildfile) basic_rules(ninja) for b in buildables: b.build(ninja) system("ninja")
def _generate_ninja_file(cpp_compiler_path, c_compiler_path, archiver_path, target_scheme, build_dir, lib_path): with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file: ninja_file = Writer(build_file) ninja_file.variable(key="builddir", value=build_dir) cpp_compiler_flags = COMPILER_FLAGS_DEFAULT_CPP + " " + COMPILER_FLAGS_TARGET_MAP[ target_scheme] + " " + INCLUDE_PATHS c_compiler_flags = COMPILER_FLAGS_DEFAULT_C + " " + COMPILER_FLAGS_TARGET_MAP[ target_scheme] + " " + INCLUDE_PATHS # Write the compiler rule for c, cpp and cc ninja_file.rule("compile_cpp", command="{} {} -o $out $in".format( cpp_compiler_path, cpp_compiler_flags), description="Compiling C++ source: $in", depfile="$out.o.d", deps="gcc") ninja_file.rule("compile_c", command="{} {} -o $out $in".format( c_compiler_path, c_compiler_flags), description="Compiling C source: $in", depfile="$out.o.d", deps="gcc") # Write the rule that generates the dependencies ninja_file.rule("dependencies_cpp", command="{} {} -MM -MG -MF $out $in".format( cpp_compiler_path, cpp_compiler_flags), description="Generating C++ dependency: $in") ninja_file.rule("dependencies_c", command="{} {} -MM -MG -MF $out $in".format( c_compiler_path, c_compiler_flags), description="Generating C dependency: $in") # Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess ninja_file.rule("archive", command="{} rcs $out @$out.rsp".format(archiver_path), description="Building static library: $out", rspfile="$out.rsp", rspfile_content="$in") # Write the compile command for all source files. output_files = _write_build_command(ninja_file, SOURCE_PATHS, 'cpp', 'compile_cpp', 'dependencies_cpp', build_dir) output_files += _write_build_command(ninja_file, SOURCE_PATHS, 'c,cc', 'compile_c', 'dependencies_c', build_dir) # Write the command to generate the static library for ChilliSource and the application ninja_file.build(rule="archive", inputs=output_files, outputs=lib_path)
def shogunProtoText(argv): ''' Build a binary ELF executable named proto_text, which generates XXX.pb_text{.cc,.h,-impl.h} files from a given XXX.proto file. This binary file is for one-time use. Depends: shogunAllProto Input: bazelDump, cxx source Output: proto_text ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='proto_text.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) _, srclist = eGrep('.*.h$', srclist) # we don't need to deal with header here _, srclist = eGrep('^third_party', srclist) # no third_party stuff _, srclist = eGrep('.*windows/.*', srclist) # no windoge source _, srclist = eGrep('.*.proto$', srclist) # already dealt with in (2) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.cc and .pb.h files are generated in shogunAllProto _, genlist = eGrep('.*.pb.h$', genlist) pbcclist, genlist = eGrep('.*.pb.cc$', genlist) if len(genlist) > 0: print(yellow('Remainders:'), genlist) # (3) deal with source files cclist, srclist = eGrep('.*.cc', srclist) objlist = [] for cc in cclist + pbcclist: obj = cc.replace('.cc', '.o') objlist.append(cursor.build(obj, 'rule_CXX_OBJ', cc)[0]) if len(srclist) > 0: print(yellow('Remainders:'), srclist) # (4) link objects into the final ELF cursor.build(f'proto_text', 'rule_CXX_EXEC', inputs=objlist, variables={'LIBS': '-lpthread -lprotobuf -ldouble-conversion'}) # done cursor.close()
def subst_ext(fname, ext): return os.path.splitext(fname)[0] + ext def get_sources(): fnames = [] for d in source_dirs: for f in os.listdir(d): fnames.append(os.path.join(d, f)) return fnames def get_includes(): return " ".join(map(lambda x : "-I"+x, source_dirs)) with open("build.ninja", "w") as buildfile: n = Writer(buildfile) # Variable declarations n.variable("tc_path", "/home/kbalke/Projects/Coding/energia-0101E0016/hardware/tools/lm4f/bin") n.variable("cflags", "-c -Os -w -g -ffunction-sections -fdata-sections -mthumb -mcpu=cortex-m4 -mfloat-abi=hard -mfpu=fpv4-sp-d16 -fsingle-precision-constant -DF_CPU=80000000L -DPART_TM4C123GH6PM " + get_includes()) n.variable("cxxflags", "-c -Os -w -g -std=c++11 -fno-rtti -fno-exceptions -ffunction-sections -fdata-sections -mthumb -mcpu=cortex-m4 -mfloat-abi=hard -mfpu=fpv4-sp-d16 -fsingle-precision-constant -DF_CPU=80000000L -DPART_TM4C123GH6PM " + get_includes()) n.variable("lflags", "-Os -g -nostartfiles -nostdlib -Wl,--gc-sections -T lm4fcpp_blizzard.ld -Wl,--entry=ResetISR -mthumb -mcpu=cortex-m4 -mfloat-abi=hard -mfpu=fpv4-sp-d16 -fsingle-precision-constant -lm -lc -lgcc -lstdc++") # Rule declarations n.rule("cxx", command = "$tc_path/arm-none-eabi-g++ $cxxflags -c $in -o $out") n.rule("cc", command = "$tc_path/arm-none-eabi-gcc $cflags -c $in -o $out") n.rule("cca",
inputs=sorted([tarball] + patches if tarball else collect_files(srcdir)), variables=OrderedDict([ ('name', prj), ('configure', configure), ('configure_flags', all_flags), ('srcdir', '$top_srcdir/%s' % (srcdir if srcdir else prj)), ('destdir', '$builddir/%s' % prj), ('workdir', '$intdir/%s' % prj), ])) n.build([prj], 'phony', target) return target with open('build.ninja', 'w') as buildfile: n = Writer(buildfile, width=20) n.comment('generated by %s' % sys.argv[0]) n.include('build.vars.ninja') n.rule( 'download', description='Downloading $url...', pool='console', command= 'curl -o $out $url && test "$sha256" = `shasum -a256 $out | cut -f1 -d" "`' ) targets = [] n.build( ['tarballs/%s' % GETTEXT_TARBALL],
for path in RCC_FILES: name = path if os.path.basename(path) == 'hlp.png': name = 'hlp.png' elif path.endswith('.out.js'): name = path[:-7] + '.js' stream.write(' <file alias="%s">%s</file>\n' % (name, os.path.abspath(path))) stream.write('</qresource>\n') stream.write('</RCC>') info('Writing build.ninja...\n') with open('build.ninja', 'w') as stream: n = Writer(stream) n.comment('Transformers') n.rule('uic', py_script('tools/common/uic.py', ['$in', '$out'] + pyuic), 'UIC $out') n.rule('rcc', cmd2str([rcc, '-binary', '$in', '-o', '$out']), 'RCC $out') n.rule('js_lupdate', py_script('tools/common/js_lupdate.py', ['-o', '$out', '$in']), 'JS-LUPDATE $out') n.rule('pylupdate', cmd2str(pylupdate + ['$in', '-ts', '$out']), 'PY-LUPDATE $out') n.rule('lupdate', cmd2str([lupdate, '$in', '-ts', '$out']), 'LUPDATE $out') n.rule('webpack', cmdenv([node, 'node_modules/webpack/bin/webpack.js'], {'USE_WEBKIT': webkit}), 'WEBPACK $out') if sys.platform.startswith('linux'): n.rule('cat', 'cat $in > $out', 'CAT $out') n.comment('Files') ui_targets = build_targets(n, UI_FILES, 'uic', new_ext='py', new_path='knossos/ui') n.build('knossos/data/resources.rcc', 'rcc', 'knossos/data/resources.qrc', implicit=RCC_FILES)
import sys import argparse import codecs # ninja/miscにパスを通す sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'ninja', 'misc')) from ninja_syntax import Writer if __name__ == '__main__': # 引数処理 parser = argparse.ArgumentParser() parser.add_argument('output') args = parser.parse_args() with codecs.open(args.output, 'w', 'utf-8') as f: writer = Writer(f) writer.comment('ninjaのルールを定義するファイル') writer.newline() # configファイルのインクルード writer.include('config.ninja') writer.newline() # ルール定義 writer.rule( name='text_converter', command='python $text_converter $in $out --setting $setting_file', description='<キャラ名>を置換するコンバート') writer.newline()
PROJECT_NAME = 'toy' if sys.platform != 'win32' else 'toy.exe' CC = 'g++' if sys.platform != 'win32' else 'cl' CFLAGS = [] if sys.platform == 'win32': CFLAGS = ['-O2', '-EHsc', '-Zo', '/fp:fast', '-Iinclude'] else: CFLAGS = ['-std=c++11', '-O3', '-pthread', '-march=native', '-Iinclude'] SRC_DIR = 'src' BUILD_DIR = 'build' BIN_DIR = 'bin' with open('build.ninja', 'w') as build_file: n = Writer(build_file) n.comment('THIS FILE IS GENERATED BY configure.py') n.comment('EDITS WILL BE OVERWRITTEN') n.newline() ############################################################################ # VARIABLES ############################################################################ n.variable(key='ninja_required_version', value='1.9') if sys.platform == 'win32': n.variable(key='msvc_deps_prefix', value='Note: including file:') n.variable(key='cc', value=CC)
def shogunTFCCLib(argv): ''' Build libtensorflow_cc.so Depends: all_proto, proto_text, CCOP Input: bazel dump, source files Output: libtensorflow_cc.so ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='libtensorflow_cc.ninja') ag.add_argument('-H', help='where to put the headers list', type=str, default='libtensorflow_cc.hdrs') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hard-coded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) tflib_extra_srcs = ['debian/embedded/fft/fftsg.c'] _, srclist = eGrep('^third_party', srclist) _, srclist = eGrep('.*/windows/.*', srclist) # no windoge source. _, srclist = eGrep('.*.cu.cc$', srclist) # no CUDA file for CPU-only build _, srclist = eGrep('.*.pbtxt$', srclist) # not for us _, srclist = eGrep('.*platform/cloud.*', srclist) # SSL 1.1.1 broke it. _, srclist = eGrep('.*platform/s3.*', srclist) # we don't have https://github.com/aws/aws-sdk-cpp _, srclist = eGrep('.*_main.cc$', srclist) # don't include any main function. _, srclist = eGrep('.*test.*', srclist) # don't include any test _, srclist = eGrep('.*cc_op_gen.*', srclist) # don't include cc_op_gen. _, srclist = eGrep('.*gen_proto_text_functions.cc', srclist) # not for this library _, srclist = eGrep('.*tensorflow.contrib.cloud.*', srclist) # it wants GoogleAuthProvider etc. _, srclist = eGrep('.*gcs_config_ops.cc', srclist) # it wants GcsFileSystem srclist = list(set(srclist)) if getDpkgArchitecture('DEB_HOST_ARCH') != 'amd64': # they FTBFS on non-amd64 arches _, srclist = eGrep('.*/core/debug/.*', srclist) _, genlist = eGrep('.*/core/debug/.*', genlist) _, srclist = eGrep('.*debug_ops.*', srclist) _, genlist = eGrep('.*debug_ops.*', genlist) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.h and .pb.cc are already generated by shogunAllProto gen_pbh, genlist = eGrep('.*.pb.h', genlist) gen_pbcc, genlist = eGrep('.*.pb.cc', genlist) # XXX: temporary workaround for //tensorflow/core/debug:debug_service.grpc.pb.cc if getDpkgArchitecture('DEB_HOST_ARCH') == 'amd64': # This is amd64-only cursor.build(['tensorflow/core/debug/debug_service.grpc.pb.cc', 'tensorflow/core/debug/debug_service.grpc.pb.h'], 'rule_PROTOC_GRPC', inputs='tensorflow/core/debug/debug_service.proto') # (2.2) .pb_text.* pbtlist = [x for x in genlist if any(x.endswith(y) for y in ('.pb_text.h', '.pb_text.cc', '.pb_text-impl.h'))] pbtlist = [x.replace('.pb_text.h', '.proto').replace('.pb_text.cc', '.proto').replace('.pb_text-impl.h', '.proto') for x in pbtlist] gen_pbth, genlist = eGrep('.*.pb_text.h', genlist) gen_pbtih, genlist = eGrep('.*.pb_text-impl.h', genlist) gen_pbtcc, genlist = eGrep('.*.pb_text.cc', genlist) for pbt in list(set(pbtlist)): cursor.build([ pbt.replace('.proto', '.pb_text.h'), pbt.replace('.proto', '.pb_text.cc'), pbt.replace('.proto', '.pb_text-impl.h') ], 'rule_PROTO_TEXT', pbt) # (2.3) cc_op_gen gen_ccopcc, genlist = eGrep('.*/cc/ops/.*.cc', genlist) gen_ccoph, genlist = eGrep('.*/cc/ops/.*.h', genlist) # (2.4) finish dealing with generated files if genlist: print(yellow('Remainders:'), genlist) assert(len(genlist) == 1) # (3) deal with source files # (3.1) filter-out headers _, srclist = eGrep('.*.proto$', srclist) # done in (2) src_hdrs, srclist = eGrep('.*.h$', srclist) # (3.2) compile .cc source src_cc, srclist = eGrep('.*.cc', srclist) objlist = [] exception_eigen_avoid_std_array = [ 'sparse_tensor_dense_matmul_op', 'conv_grad_ops_3d', 'adjust_contrast_op' ] for cc in src_cc + gen_pbcc + gen_pbtcc + gen_ccopcc + genlist + tflib_extra_srcs: variables = {} elif any(x in cc for x in exception_eigen_avoid_std_array): variables = {'SHOGUN_EXTRA': '-DEIGEN_AVOID_STL_ARRAY'} obj = cursor.build(re.sub('.c[c]?$', '.o', cc), 'rule_CXX_OBJ', inputs=cc, variables=variables)[0] objlist.append(obj)
def shogunCCOP(argv): ''' Generate tensorflow cc ops : tensorflow/cc/ops/*.cc and *.h Depends: AllProto, proto_text, libtensorflow_framework Input: cc source, bazel dump Output: one-time-use binary "XXX_gen_cc" and generated .cc .h files. ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='ccop.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) filter unrelated files, we only want cc_op related files. _, genlist = eGrep('.*.pb.h', genlist) _, genlist = eGrep('.*.pb.cc', genlist) _, genlist = eGrep('.*.pb_text.h', genlist) _, genlist = eGrep('.*.pb_text-impl.h', genlist) _, genlist = eGrep('.*.pb_text.cc', genlist) # (3) XXX_gen_cc # (3.1) deal with a missing source cursor.build('tensorflow/core/ops/user_ops.cc', 'COPY', inputs='tensorflow/core/user_ops/fact.cc') # (3.2) build several common objects main_cc = ['tensorflow/core/framework/op_gen_lib.cc', 'tensorflow/cc/framework/cc_op_gen.cc', 'tensorflow/cc/framework/cc_op_gen_main.cc', ] main_obj = [x.replace('.cc', '.o') for x in main_cc] for cc in main_cc: cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc) # (3.2) build executables and generate file with executable gen_ccopcc, genlist = eGrep('.*/cc/ops/.*.cc', genlist) gen_ccoph, genlist = eGrep('.*/cc/ops/.*.h', genlist) opnamelist = list(set(os.path.basename(x.replace('.cc', '').replace('.h', '')) for x in (gen_ccopcc + gen_ccoph) if 'internal' not in x )) for opname in opnamelist: coreopcc = 'tensorflow/core/ops/' + opname + '.cc' ccopcc = 'tensorflow/cc/ops/' + opname + '.cc' # build corresponding elf executable cursor.build(f'{opname}_gen_cc', 'rule_CXX_EXEC', inputs=[coreopcc] + main_obj, variables={'SHOGUN_EXTRA': '-I. -L. -ltf_ccop'}) # generate file cursor.build([ccopcc.replace('.cc', '.h'), ccopcc], 'rule_CC_OP_GEN', inputs=f'{opname}_gen_cc', variables={'cc_op_gen_internal': '0' if opname != 'sendrecv_ops' else '1'}, implicit_outputs=[ccopcc.replace('.cc', '_internal.h'), ccopcc.replace('.cc', '_internal.cc')]) ## done cursor.close()
def shogunTFLib_framework(argv): ''' Build libtensorflow_framework.so. With slight modification, this function should be able to build libtensorflow_android.so too. Depends: AllProto, proto_text Input: bazelDump, cxx source Output: libtensorflow_framework.so ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='libtensorflow_framework.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) _, srclist = eGrep('.*proto_text.gen_proto_text_functions.cc', srclist) _, srclist = eGrep('^third_party', srclist) _, srclist = eGrep('.*/windows/.*', srclist) # no windoge source. # (1) Initialize ninja file cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.h and .pb.cc are already generated by shogunAllProto gen_pbh, genlist = eGrep('.*.pb.h', genlist) gen_pbcc, genlist = eGrep('.*.pb.cc', genlist) # (2.2) .pb_text.* pbtlist = [x for x in genlist if any(x.endswith(y) for y in ('.pb_text.h', '.pb_text.cc', '.pb_text-impl.h'))] pbtlist = [x.replace('.pb_text.h', '.proto').replace('.pb_text.cc', '.proto').replace('.pb_text-impl.h', '.proto') for x in pbtlist] gen_pbth, genlist = eGrep('.*.pb_text.h', genlist) gen_pbtih, genlist = eGrep('.*.pb_text-impl.h', genlist) gen_pbtcc, genlist = eGrep('.*.pb_text.cc', genlist) for pbt in list(set(pbtlist)): cursor.build([ pbt.replace('.proto', '.pb_text.h'), pbt.replace('.proto', '.pb_text.cc'), pbt.replace('.proto', '.pb_text-impl.h') ], 'rule_PROTO_TEXT', pbt) # (2.3) finish dealing with generated files if genlist: print(yellow('Remainders:'), genlist) assert(len(genlist) == 1) # (3) deal with source files # (3.1) filter-out headers _, srclist = eGrep('.*.proto$', srclist) # done in (2) src_hdrs, srclist = eGrep('.*.h$', srclist) # (3.2) compile .cc source src_cc, srclist = eGrep('.*.cc', srclist) objlist = [] for cc in src_cc + gen_pbcc + gen_pbtcc + genlist: variables = {} obj = cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc, variables=variables)[0] objlist.append(obj) # (4) link the final executable cursor.build('libtensorflow_framework.so', 'rule_CXX_SHLIB', inputs=objlist, variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif' + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp' + ' -lpthread', 'SHOGUN_EXTRA': f'-Wl,--soname=libtensorflow_framework.so.{tf_soversion}' + f' -Wl,--version-script tensorflow/tf_framework_version_script.lds' + f' -fvisibility=hidden'}) # (5) a temporary shared object used by shogunCCOP libccop = [x for x in objlist if all(y not in x for y in ('core/kernels', 'core/ops'))] cursor.build('libtf_ccop.so', 'rule_CXX_SHLIB', inputs=libccop, variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif' + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp' + ' -lpthread'}) # done cursor.close()
cpp_sources = mglob( 'src/*.cc', 'src/w/*.cc', 'src/vfx/*.cc', 'src/ops/*.cc', ) c_sources = mglob( 'src/*.c', 'ext/*.c', ) binary_data = {'DroidSansMonoTTF': 'fonts/DroidSansMono.ttf'} with open('build.ninja', 'w') as f: w = Writer(f) w.variable('builddir', 'build') w.variable('cppflags', cppflags) w.variable('cflags', cflags) w.variable('ldflags', ldflags) w.rule('cpp', command=[ cpp_compiler, '$cppflags', '-MMD', '-MF', '$out.d', '-c', '-o', '$out', '$in' ], depfile='$out.d') w.rule('cc', command=[ c_compiler, '$cflags', '-MMD', '-MF', '$out.d', '-c', '-o', '$out', '$in' ],