def make_current(self, ninja: ninja_syntax.Writer): current = Path("ver/current") try: current.unlink() except Exception: pass current.symlink_to(self.version) ninja.build("ver/current/build/papermario.z64", "phony", str(self.rom_path()))
def ninja_build(buildables): with open("build.ninja", "w") as buildfile: ninja = Writer(buildfile) basic_rules(ninja) for b in buildables: b.build(ninja) system("ninja")
def ninjafy(project): file = open("build.ninja", "w+") ninja = Writer(file) for v in project.variables: ninja.variable(v.key, v.value) for r in project.rules: ninja.rule(r.name, r.command) for b in project.builds: ninja.build(b.outputs, b.rule, b.inputs)
def shogunAllProto(argv): ''' Generate XXX.pb.{h,cc} files from all available XXX.proto files in the source directory. Depends: protoc (protobuf-compiler) Input: .proto Output: .pb.cc, .pb.h ''' ag = argparse.ArgumentParser() ag.add_argument('-o', help='write ninja file', type=str, default='all_proto.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (1) initialize ninja file cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) glob all proto protos = glob.glob(f'**/*.proto', recursive=True) print(cyan('AllProto:'), f'globbed {len(protos)} .proto files') # (3) generate .pb.cc, .pb.h for proto in protos: cursor.build([ proto.replace('.proto', '.pb.cc'), proto.replace('.proto', '.pb.h')], 'rule_PROTOC', proto) # done cursor.close()
def write_ninja_for_tools(ninja: ninja_syntax.Writer): ninja.rule("cc_tool", description="cc_tool $in", command=f"cc -w $in -O3 -o $out", ) ninja.build(YAY0_COMPRESS_TOOL, "cc_tool", f"{BUILD_TOOLS}/yay0/Yay0compress.c") ninja.build(CRC_TOOL, "cc_tool", f"{BUILD_TOOLS}/rom/n64crc.c")
def shogunProtoText(argv): ''' Build a binary ELF executable named proto_text, which generates XXX.pb_text{.cc,.h,-impl.h} files from a given XXX.proto file. This binary file is for one-time use. Depends: shogunAllProto Input: bazelDump, cxx source Output: proto_text ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='proto_text.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) _, srclist = eGrep('.*.h$', srclist) # we don't need to deal with header here _, srclist = eGrep('^third_party', srclist) # no third_party stuff _, srclist = eGrep('.*windows/.*', srclist) # no windoge source _, srclist = eGrep('.*.proto$', srclist) # already dealt with in (2) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.cc and .pb.h files are generated in shogunAllProto _, genlist = eGrep('.*.pb.h$', genlist) pbcclist, genlist = eGrep('.*.pb.cc$', genlist) if len(genlist) > 0: print(yellow('Remainders:'), genlist) # (3) deal with source files cclist, srclist = eGrep('.*.cc', srclist) objlist = [] for cc in cclist + pbcclist: obj = cc.replace('.cc', '.o') objlist.append(cursor.build(obj, 'rule_CXX_OBJ', cc)[0]) if len(srclist) > 0: print(yellow('Remainders:'), srclist) # (4) link objects into the final ELF cursor.build(f'proto_text', 'rule_CXX_EXEC', inputs=objlist, variables={'LIBS': '-lpthread -lprotobuf -ldouble-conversion'}) # done cursor.close()
def make_build(cfgpath, tmp_dir, build_info): merge_path = os.path.join(BASE_DIR, 'merge.py') with open('build.ninja', 'w') as f: n = Writer(f) for ftname, ft_build_info in build_info.iteritems(): tmp_dir = ft_build_info['tmp_dir'] dest_dir = ft_build_info['dest_dir'] options = ft_build_info['options'] sep = ft_build_info['seperator'] cfgsect = 'Type:{}'.format(ftname) # declare rule cmds = [] first = True if 'merge' in options: cmds.append('/usr/bin/env python ' + merge_path + ' {} {} $in'.format(cfgpath, cfgsect)) first = False if 'sort' in options: sortcol = ft_build_info['sort_col'] cmds.append("sort -t \"`printf '{}'`\" -T {} -k{}". format(sep, tmp_dir, sortcol) + (' $in' if first else '')) first = False if 'zip' in options: cmds.append("gzip -kc" + (' $in' if first else '')) rule_name = '{}_rule'.format(ftname) n.rule(rule_name, command=' | '.join(cmds) + ' > $out') n.newline() # declare build _make_bulid_declare_build(n, options, dest_dir, ft_build_info, rule_name) n.newline()
def write_buildfile(): with open("build.ninja", "w") as buildfile: n = Writer(buildfile) # Variable declarations n.variable("lib_path", "/usr/arm-none-eabi/lib") n.variable("cflags", cflags) n.variable("cxxflags", cxxflags) n.variable("lflags", lflags) # Rule declarations n.rule("cxx", command = "arm-none-eabi-g++ $cxxflags -c $in -o $out") n.rule("cc", command = "arm-none-eabi-gcc $cflags -c $in -o $out") n.rule("cl", command = "arm-none-eabi-gcc $lflags $in -o $out") n.rule("oc", command = "arm-none-eabi-objcopy -O binary $in $out") n.rule("cdb", command = "ninja -t compdb cc cxx > compile_commands.json") n.rule("cscf", command = "find " + " ".join(set(source_dirs + include_dirs)) + " -regex \".*\\(\\.c\\|\\.h\\|.cpp\\|.hpp\\)$$\" -and " + "-not -type d > $out") n.rule("cscdb", command = "cscope -bq") # Build rules n.build("compile_commands.json", "cdb") n.build("cscope.files", "cscf") n.build(["cscope.in.out", "cscope.po.out", "cscope.out"], "cscdb", "cscope.files") objects = [] def cc(name): ofile = subst_ext(name, ".o") n.build(ofile, "cc", name) objects.append(ofile) def cxx(name): ofile = subst_ext(name, ".o") n.build(ofile, "cxx", name) objects.append(ofile) def cl(oname, ofiles): n.build(oname, "cl", ofiles) sources = get_sources() map(cc, filter(lambda x : x.endswith(".c") or x.endswith(".S"), sources)) map(cxx, filter(lambda x : x.endswith(".cpp"), sources)) cl("main.elf", objects) n.build("main.bin", "oc", "main.elf")
import sys import argparse import codecs # ninja/miscにパスを通す sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'ninja', 'misc')) from ninja_syntax import Writer if __name__ == '__main__': # 引数処理 parser = argparse.ArgumentParser() parser.add_argument('output') args = parser.parse_args() with codecs.open(args.output, 'w', 'utf-8') as f: writer = Writer(f) writer.comment('ninjaのルールを定義するファイル') writer.newline() # configファイルのインクルード writer.include('config.ninja') writer.newline() # ルール定義 writer.rule( name='text_converter', command='python $text_converter $in $out --setting $setting_file', description='<キャラ名>を置換するコンバート') writer.newline()
def _generate_ninja_file(cpp_compiler_path, c_compiler_path, archiver_path, target_scheme, build_dir, lib_path): with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file: ninja_file = Writer(build_file) ninja_file.variable(key="builddir", value=build_dir) cpp_compiler_flags = COMPILER_FLAGS_DEFAULT_CPP + " " + COMPILER_FLAGS_TARGET_MAP[target_scheme] + " " + INCLUDE_PATHS c_compiler_flags = COMPILER_FLAGS_DEFAULT_C + " " + COMPILER_FLAGS_TARGET_MAP[target_scheme] + " " + INCLUDE_PATHS # Write the compiler rule for c, cpp and cc ninja_file.rule("compile_cpp", command="{} {} -o $out $in".format(cpp_compiler_path, cpp_compiler_flags), description="Compiling C++ source: $in", depfile="$out.o.d", deps="gcc") ninja_file.rule("compile_c", command="{} {} -o $out $in".format(c_compiler_path, c_compiler_flags), description="Compiling C source: $in", depfile="$out.o.d", deps="gcc") # Write the rule that generates the dependencies ninja_file.rule("dependencies_cpp", command="{} {} -MM -MG -MF $out $in".format(cpp_compiler_path, cpp_compiler_flags), description="Generating C++ dependency: $in") ninja_file.rule("dependencies_c", command="{} {} -MM -MG -MF $out $in".format(c_compiler_path, c_compiler_flags), description="Generating C dependency: $in") # Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess ninja_file.rule("archive", command="{} rcs $out @$out.rsp".format(archiver_path), description="Building static library: $out", rspfile="$out.rsp", rspfile_content="$in") # Write the compile command for all source files. output_files = _write_build_command(ninja_file, SOURCE_PATHS, 'cpp', 'compile_cpp', 'dependencies_cpp', build_dir) output_files += _write_build_command(ninja_file, SOURCE_PATHS, 'c,cc', 'compile_c', 'dependencies_c', build_dir) # Write the command to generate the static library for ChilliSource and the application ninja_file.build(rule="archive", inputs=output_files, outputs=lib_path)
def _generate_ninja_file(cpp_compiler_path, c_compiler_path, archiver_path, target_scheme, build_dir, lib_path): with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file: ninja_file = Writer(build_file) ninja_file.variable(key="builddir", value=build_dir) cpp_compiler_flags = COMPILER_FLAGS_DEFAULT_CPP + " " + COMPILER_FLAGS_TARGET_MAP[ target_scheme] + " " + INCLUDE_PATHS c_compiler_flags = COMPILER_FLAGS_DEFAULT_C + " " + COMPILER_FLAGS_TARGET_MAP[ target_scheme] + " " + INCLUDE_PATHS # Write the compiler rule for c, cpp and cc ninja_file.rule("compile_cpp", command="{} {} -o $out $in".format( cpp_compiler_path, cpp_compiler_flags), description="Compiling C++ source: $in", depfile="$out.o.d", deps="gcc") ninja_file.rule("compile_c", command="{} {} -o $out $in".format( c_compiler_path, c_compiler_flags), description="Compiling C source: $in", depfile="$out.o.d", deps="gcc") # Write the rule that generates the dependencies ninja_file.rule("dependencies_cpp", command="{} {} -MM -MG -MF $out $in".format( cpp_compiler_path, cpp_compiler_flags), description="Generating C++ dependency: $in") ninja_file.rule("dependencies_c", command="{} {} -MM -MG -MF $out $in".format( c_compiler_path, c_compiler_flags), description="Generating C dependency: $in") # Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess ninja_file.rule("archive", command="{} rcs $out @$out.rsp".format(archiver_path), description="Building static library: $out", rspfile="$out.rsp", rspfile_content="$in") # Write the compile command for all source files. output_files = _write_build_command(ninja_file, SOURCE_PATHS, 'cpp', 'compile_cpp', 'dependencies_cpp', build_dir) output_files += _write_build_command(ninja_file, SOURCE_PATHS, 'c,cc', 'compile_c', 'dependencies_c', build_dir) # Write the command to generate the static library for ChilliSource and the application ninja_file.build(rule="archive", inputs=output_files, outputs=lib_path)
"Ay_Apu.cpp", "Ay_Cpu.cpp", "Ay_Emu.cpp", "Blip_Buffer.cpp", "Classic_Emu.cpp", "Data_Reader.cpp", "Dual_Resampler.cpp", "Effects_Buffer.cpp", "Fir_Resampler.cpp", "Gb_Apu.cpp", "Gb_Cpu.cpp", "Gb_Oscs.cpp", "Gbs_Emu.cpp", "gme.cpp", "Gme_File.cpp", "Gym_Emu.cpp", "Hes_Apu.cpp", "Hes_Cpu.cpp", "Hes_Emu.cpp", "Kss_Cpu.cpp", "Kss_Emu.cpp", "Kss_Scc_Apu.cpp", "M3u_Playlist.cpp", "Multi_Buffer.cpp", "Music_Emu.cpp", "Nes_Apu.cpp", "Nes_Cpu.cpp", "Nes_Fme7_Apu.cpp", "Nes_Namco_Apu.cpp", "Nes_Oscs.cpp", "Nes_Vrc6_Apu.cpp", "Nsfe_Emu.cpp", "Nsf_Emu.cpp", "Sap_Apu.cpp", "Sap_Cpu.cpp", "Sap_Emu.cpp", "Sms_Apu.cpp", "Snes_Spc.cpp", "Spc_Cpu.cpp", "Spc_Dsp.cpp", "Spc_Emu.cpp", "Spc_Filter.cpp", "Vgm_Emu.cpp", "Vgm_Emu_Impl.cpp", "Ym2413_Emu.cpp", "Ym2612_Emu.cpp" ] buildfile = open("build.ninja", "w") n = Writer(buildfile) # variable declarations n.comment("variable declarations") n.variable("CC", "emcc") n.variable("CXX", "em++") n.newline() n.variable("ROOT", ".") n.variable("XZ_ROOT", "$ROOT/xz-embedded") n.variable("GME_ROOT", "$ROOT/gme-source-0.6.1") n.variable("OBJECTS", "$ROOT/objects") n.variable("FINAL_DIR", "$ROOT/final") n.newline() n.variable( "EXPORT_LIST", "\"['_crPlayerContextSize', '_crPlayerInitialize', '_crPlayerLoadFile', '_crPlayerSetTrack', '_crPlayerGenerateStereoFrames', '_crPlayerVoicesCanBeToggled', '_crPlayerGetVoiceCount', '_crPlayerGetVoiceName', '_crPlayerSetVoiceState', '_crPlayerCleanup', '_main']\""
import glob import os from ninja_syntax import Writer gxc_dirs = glob.glob('./examples/**/', recursive=True) with open("build.ninja", "w") as buildfile: n = Writer(buildfile) n.rule("grc", command="cmd /c cd ${cwd} && grc ${in_name}", description="grc ${in_name}") n.rule("gxc", command="cmd /c cd ${cwd} && gxc ${in_name}", description="gxc ${in_name}") for dir in gxc_dirs: gxc_files = glob.glob(dir + '*.gxc') if gxc_files: grc_files = glob.glob(dir + '*.grc') gxc_implicit_inputs = [] for grc_file in grc_files: #def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, #variables=None, implicit_outputs=None): grc_file_name_part = os.path.split(grc_file)[1].split('.')[0] gr_file = os.path.join(dir, grc_file_name_part + '.gr') grh_file = os.path.join(dir, grc_file_name_part + '.grh') gxc_implicit_inputs.append(gr_file) gxc_implicit_inputs.append(grh_file) n.build(gr_file, 'grc',
def write_buildfile(): with open("build.ninja", "w") as buildfile: n = Writer(buildfile) # Variable declarations n.variable("lib_path", "/usr/msp430/lib") #"/usr/lib/gcc/msp430/4.6.3") n.variable("ldscript_dev_path", "/usr/msp430/lib/ldscripts/" + MCU) n.variable("ldscript_path", "/usr/msp430/lib/ldscripts") n.variable("tc_path", "/usr/bin") n.variable("cflags", cflags) n.variable("cxxflags", cxxflags) n.variable("lflags", lflags) # Rule declarations n.rule("cxx", command="msp430-g++ $cxxflags -c $in -o $out") n.rule("cc", command="msp430-gcc $cflags -c $in -o $out") n.rule("cl", command="msp430-gcc $lflags $in -o $out -lm -lgcc -lc") n.rule("oc", command="msp430-objcopy -O binary $in $out") n.rule("cdb", command="ninja -t compdb cc cxx > compile_commands.json") n.rule( "cscf", command="find " + " ".join(set(source_dirs + include_dirs)) + " -regex \".*\\(\\.c\\|\\.h\\|.cpp\\|.hpp\\)$$\" -and " + "-not -type d | egrep -v \"msp430[a-zA-Z0-9]*\\.h$$\" > $out; echo " + "/usr/msp430/include/" + MCU + ".h" + " > $out") n.rule("cscdb", command="cscope -bq") # Build rules n.build("compile_commands.json", "cdb") n.build("cscope.files", "cscf") n.build(["cscope.in.out", "cscope.po.out", "cscope.out"], "cscdb", "cscope.files") objects = [] def cc(name): ofile = subst_ext(name, ".o") n.build(ofile, "cc", name) objects.append(ofile) def cxx(name): ofile = subst_ext(name, ".o") n.build(ofile, "cxx", name) objects.append(ofile) def cl(oname, ofiles): n.build(oname, "cl", ofiles) sources = get_sources() map(cc, filter(lambda x: x.endswith(".c"), sources)) map(cxx, filter(lambda x: x.endswith(".cpp"), sources)) cl("main.elf", objects) n.build("main.bin", "oc", "main.elf")
def get_includes(): return " ".join(map(lambda x: "-I" + x, source_dirs + include_dirs)) def get_libs(): return " ".join(libraries) def get_defines(): return " ".join(map(lambda x: "-D" + x, defines)) with open("build.ninja", "w") as buildfile: n = Writer(buildfile) # Variable declarations n.variable("cxxflags", "-g -Wall -std=c++14 " + get_includes() + " " + get_defines()) n.variable("cflags", "-g -Wall -std=c99 " + get_includes() + " " + get_defines()) n.variable("lflags", " -lm -lstdc++ -lc") n.variable("libs", get_libs()) # Rule declarations n.rule("cxx", command="g++ $cxxflags -c $in -o $out") n.rule("cc", command="gcc $cflags -c $in -o $out") n.rule("cl", command="gcc -o $out $in $libs $lflags")
def shogunTFLib_framework(argv): ''' Build libtensorflow_framework.so. With slight modification, this function should be able to build libtensorflow_android.so too. Depends: AllProto, proto_text Input: bazelDump, cxx source Output: libtensorflow_framework.so ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='libtensorflow_framework.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) _, srclist = eGrep('.*proto_text.gen_proto_text_functions.cc', srclist) _, srclist = eGrep('^third_party', srclist) _, srclist = eGrep('.*/windows/.*', srclist) # no windoge source. # (1) Initialize ninja file cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.h and .pb.cc are already generated by shogunAllProto gen_pbh, genlist = eGrep('.*.pb.h', genlist) gen_pbcc, genlist = eGrep('.*.pb.cc', genlist) # (2.2) .pb_text.* pbtlist = [x for x in genlist if any(x.endswith(y) for y in ('.pb_text.h', '.pb_text.cc', '.pb_text-impl.h'))] pbtlist = [x.replace('.pb_text.h', '.proto').replace('.pb_text.cc', '.proto').replace('.pb_text-impl.h', '.proto') for x in pbtlist] gen_pbth, genlist = eGrep('.*.pb_text.h', genlist) gen_pbtih, genlist = eGrep('.*.pb_text-impl.h', genlist) gen_pbtcc, genlist = eGrep('.*.pb_text.cc', genlist) for pbt in list(set(pbtlist)): cursor.build([ pbt.replace('.proto', '.pb_text.h'), pbt.replace('.proto', '.pb_text.cc'), pbt.replace('.proto', '.pb_text-impl.h') ], 'rule_PROTO_TEXT', pbt) # (2.3) finish dealing with generated files if genlist: print(yellow('Remainders:'), genlist) assert(len(genlist) == 1) # (3) deal with source files # (3.1) filter-out headers _, srclist = eGrep('.*.proto$', srclist) # done in (2) src_hdrs, srclist = eGrep('.*.h$', srclist) # (3.2) compile .cc source src_cc, srclist = eGrep('.*.cc', srclist) objlist = [] for cc in src_cc + gen_pbcc + gen_pbtcc + genlist: variables = {} obj = cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc, variables=variables)[0] objlist.append(obj) # (4) link the final executable cursor.build('libtensorflow_framework.so', 'rule_CXX_SHLIB', inputs=objlist, variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif' + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp' + ' -lpthread', 'SHOGUN_EXTRA': f'-Wl,--soname=libtensorflow_framework.so.{tf_soversion}' + f' -Wl,--version-script tensorflow/tf_framework_version_script.lds' + f' -fvisibility=hidden'}) # (5) a temporary shared object used by shogunCCOP libccop = [x for x in objlist if all(y not in x for y in ('core/kernels', 'core/ops'))] cursor.build('libtf_ccop.so', 'rule_CXX_SHLIB', inputs=libccop, variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif' + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp' + ' -lpthread'}) # done cursor.close()
n.build([target], '%s_build' % prj, inputs=list(collect_files(srcdir)), variables=OrderedDict([ ('name', prj), ('configure', configure), ('configure_flags', all_flags), ('srcdir', '$top_srcdir/%s' % srcdir), ('destdir', '$builddir/%s' % prj), ('workdir', '$intdir/%s' % prj), ])) return target with open('build.ninja', 'w') as buildfile: n = Writer(buildfile) n.comment('generated by %s' % sys.argv[0]) n.include('build.vars.ninja') targets = [] targets.append(gen_configure(n, 'gettext', srcdir='gettext', configure='./configure', flags=[ '--prefix=/', 'CC=$cc', 'CXX=$cxx', # When running configure under Xcode, # SIGALRM is ignored and this doesn't play # nice with some of the (useless - gnulib)
for path in RCC_FILES: name = path if os.path.basename(path) == 'hlp.png': name = 'hlp.png' elif path.endswith('.out.js'): name = path[:-7] + '.js' stream.write(' <file alias="%s">%s</file>\n' % (name, os.path.abspath(path))) stream.write('</qresource>\n') stream.write('</RCC>') info('Writing build.ninja...\n') with open('build.ninja', 'w') as stream: n = Writer(stream) n.comment('Transformers') n.rule('uic', py_script('tools/common/uic.py', ['$in', '$out'] + pyuic), 'UIC $out') n.rule('rcc', cmd2str([rcc, '-binary', '$in', '-o', '$out']), 'RCC $out') n.rule('js_lupdate', py_script('tools/common/js_lupdate.py', ['-o', '$out', '$in']), 'JS-LUPDATE $out') n.rule('pylupdate', cmd2str(pylupdate + ['$in', '-ts', '$out']), 'PY-LUPDATE $out') n.rule('lupdate', cmd2str([lupdate, '$in', '-ts', '$out']), 'LUPDATE $out') n.rule('webpack', cmdenv([node, 'node_modules/webpack/bin/webpack.js'], {'USE_WEBKIT': webkit}), 'WEBPACK $out') if sys.platform.startswith('linux'): n.rule('cat', 'cat $in > $out', 'CAT $out') n.comment('Files') ui_targets = build_targets(n, UI_FILES, 'uic', new_ext='py', new_path='knossos/ui') n.build('knossos/data/resources.rcc', 'rcc', 'knossos/data/resources.qrc', implicit=RCC_FILES)
def write_ninja_rules(ninja: ninja_syntax.Writer, cpp: str): # platform-specific if sys.platform == "darwin": os_dir = "mac" iconv = "tools/iconv.py UTF-8 SHIFT-JIS" elif sys.platform == "linux": from os import uname if uname()[4] == "aarch64": os_dir = "arm" else: os_dir = "linux" iconv = "iconv --from UTF-8 --to SHIFT-JIS" else: raise Exception(f"unsupported platform {sys.platform}") cross = "mips-linux-gnu-" ninja.variable("python", sys.executable) ninja.rule( "ld", description="link($version) $out", command= f"{cross}ld -T ver/$version/undefined_syms.txt -T ver/$version/undefined_syms_auto.txt -T ver/$version/undefined_funcs_auto.txt -T ver/$version/dead_syms.txt -Map $mapfile --no-check-sections -T $in -o $out", ) ninja.rule( "z64", description="rom $out", command= f"{cross}objcopy $in $out -O binary && {BUILD_TOOLS}/rom/n64crc $out", ) if DO_SHA1_CHECK: ninja.rule( "sha1sum", description="check $in", command=f"sha1sum -c $in && touch $out", ) else: ninja.rule( "sha1sum", description="check $in", command=f"touch $out", ) ninja.rule( "cc", description="cc($version) $in", command= f"bash -o pipefail -c '{cpp} {CPPFLAGS} $in -o - | {iconv} | {BUILD_TOOLS}/{os_dir}/cc1 {CFLAGS} -o - | {BUILD_TOOLS}/{os_dir}/mips-nintendo-nu64-as -EB -G 0 - -o $out'", depfile="$out.d", deps="gcc", ) ninja.rule( "cc_nusys", description="cc($version) $in", command= f"bash -o pipefail -c '{cpp} {CPPFLAGS} $in -o - | {iconv} | {BUILD_TOOLS}/{os_dir}/cc1 {CFLAGS_NUSYS} -o - | {BUILD_TOOLS}/{os_dir}/mips-nintendo-nu64-as -EB -G 0 - -o $out'", depfile="$out.d", deps="gcc", ) ninja.rule( "cc_libultra", description="cc($version) $in", command= f"bash -o pipefail -c '{cpp} {CPPFLAGS} $in -o - | {iconv} | {BUILD_TOOLS}/{os_dir}/cc1 {CFLAGS_LIBULTRA} -o - | {BUILD_TOOLS}/{os_dir}/mips-nintendo-nu64-as -EB -G 0 - -o $out'", depfile="$out.d", deps="gcc", ) ninja.rule( "cc_dsl", description="cc_dsl($version) $in", command= f"bash -o pipefail -c '{cpp} {CPPFLAGS} $in -o - | $python {BUILD_TOOLS}/cc_dsl/compile_script.py | {iconv} | {BUILD_TOOLS}/{os_dir}/cc1 {CFLAGS} -o - | {BUILD_TOOLS}/{os_dir}/mips-nintendo-nu64-as -EB -G 0 - -o $out'", depfile="$out.d", deps="gcc", ) ninja.rule( "bin", description="bin $in", command=f"{cross}ld -r -b binary $in -o $out", ) ninja.rule( "as", description="as $in", command= f"{cross}as -EB -march=vr4300 -mtune=vr4300 -Iinclude $in -o $out", ) ninja.rule( "img", description="img($img_type) $in", command= f"$python {BUILD_TOOLS}/img/build.py $img_type $in $out $img_flags", ) ninja.rule( "img_header", description="img_header $in", command=f"$python {BUILD_TOOLS}/img/header.py $in $out", ) ninja.rule( "yay0", description="yay0 $in", command=f"{BUILD_TOOLS}/yay0/Yay0compress $in $out", ) ninja.rule( "sprite", description="sprite $sprite_name", command=f"$python {BUILD_TOOLS}/sprites/sprite.py $out $sprite_dir", ) ninja.rule( "sprite_combine", description="sprite_combine $in", command=f"$python {BUILD_TOOLS}/sprites/combine.py $out $in", ) ninja.rule( "sprite_header", description="sprite_header $sprite_name", command= f"$python {BUILD_TOOLS}/sprites/header.py $out $sprite_dir $sprite_id", ) ninja.rule( "msg", description="msg $in", command=f"$python {BUILD_TOOLS}/msg/parse_compile.py $in $out", ) ninja.rule( "msg_combine", description="msg_combine $out", command=f"$python {BUILD_TOOLS}/msg/combine.py $out $in", ) ninja.rule( "mapfs", description="mapfs $out", command=f"$python {BUILD_TOOLS}/mapfs/combine.py $out $in", ) ninja.rule("map_header", command=f"$python {BUILD_TOOLS}/mapfs/map_header.py $in > $out")
def write_ninja(self, ninja: ninja_syntax.Writer, skip_outputs: Set[str]): import segtypes import segtypes.n64.data # Doesn't get imported on jp for some odd reason (should maybe be a * import?) assert self.linker_entries is not None built_objects = set() generated_headers = [] def build(object_paths: Union[Path, List[Path]], src_paths: List[Path], task: str, variables: Dict[str, str] = {}): if not isinstance(object_paths, list): object_paths = [object_paths] object_strs = [str(obj) for obj in object_paths] needs_build = False for object_path in object_paths: if object_path.suffixes[-1] == ".o": built_objects.add(str(object_path)) elif object_path.suffixes[-1] == ".h": generated_headers.append(str(object_path)) # don't rebuild objects if we've already seen all of them if not str(object_path) in skip_outputs: needs_build = True if needs_build: skip_outputs.update(object_strs) implicit = [] order_only = [] if task == "yay0": implicit.append(YAY0_COMPRESS_TOOL) elif task in ["cc", "cc_dsl", "cc_nusys", "cc_libultra"]: order_only.append("generated_headers_" + self.version) ninja.build( object_strs, # $out task, self.resolve_src_paths(src_paths), # $in variables={ "version": self.version, **variables }, implicit=implicit, order_only=order_only, ) # Build objects for entry in self.linker_entries: seg = entry.segment if isinstance(seg, segtypes.n64.header.N64SegHeader): build(entry.object_path, entry.src_paths, "as") elif isinstance(seg, segtypes.n64.asm.N64SegAsm) or ( isinstance(seg, segtypes.n64.data.N64SegData) and not seg.type[0] == "."): build(entry.object_path, entry.src_paths, "as") elif isinstance(seg, segtypes.n64.c.N64SegC) or (isinstance( seg, segtypes.n64.data.N64SegData) and seg.type[0] == "."): task = "cc" if "nusys" in entry.src_paths[0].parts: task = "cc_nusys" elif "os" in entry.src_paths[0].parts: task = "cc_libultra" else: with entry.src_paths[0].open() as f: s = f.read() if "SCRIPT(" in s or "#pragma SCRIPT" in s: task = "cc_dsl" build(entry.object_path, entry.src_paths, task) elif isinstance(seg, segtypes.n64.bin.N64SegBin): build(entry.object_path, entry.src_paths, "bin") elif isinstance(seg, segtypes.n64.Yay0.N64SegYay0): compressed_path = entry.object_path.with_suffix( "") # remove .o build(compressed_path, entry.src_paths, "yay0") build(entry.object_path, [compressed_path], "bin") elif isinstance(seg, segtypes.n64.img.N64SegImg): flags = "" if seg.flip_horizontal: flags += "--flip-x " if seg.flip_vertical: flags += "--flip-y " build(entry.object_path.with_suffix(".bin"), entry.src_paths, "img", variables={ "img_type": seg.type, "img_flags": flags, }) build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin") build( self.build_path() / "include" / seg.dir / (seg.name + ".png.h"), entry.src_paths, "img_header") elif isinstance(seg, segtypes.n64.palette.N64SegPalette): build(entry.object_path.with_suffix(".bin"), entry.src_paths, "img", variables={ "img_type": seg.type, "img_flags": "", }) build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin") elif seg.type == "PaperMarioNpcSprites": sprite_yay0s = [] for sprite_id, sprite_dir in enumerate(entry.src_paths, 1): sprite_name = sprite_dir.name bin_path = entry.object_path.with_suffix("") / ( sprite_name + ".bin") yay0_path = bin_path.with_suffix(".Yay0") sprite_yay0s.append(yay0_path) variables = { "sprite_id": sprite_id, "sprite_name": sprite_name, "sprite_dir": str(self.resolve_asset_path(sprite_dir)), } build(bin_path, [sprite_dir], "sprite", variables=variables) build(yay0_path, [bin_path], "yay0") build( self.build_path() / "include" / seg.dir / seg.name / (sprite_name + ".h"), [sprite_dir], "sprite_header", variables=variables, ) build(entry.object_path.with_suffix(".bin"), sprite_yay0s, "sprite_combine") build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin") elif seg.type == "PaperMarioMessages": msg_bins = [] for section_idx, msg_path in enumerate(entry.src_paths): bin_path = entry.object_path.with_suffix( "") / f"{section_idx:02X}.bin" msg_bins.append(bin_path) build(bin_path, [msg_path], "msg") build([ entry.object_path.with_suffix(".bin"), self.build_path() / "include" / "message_ids.h", ], msg_bins, "msg_combine") build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin") elif seg.type == "PaperMarioMapFS": bin_yay0s: List[Path] = [ ] # flat list of (uncompressed path, compressed? path) pairs for path in entry.src_paths: name = path.stem bin_path = entry.object_path.with_suffix("").with_suffix( "") / f"{name}.bin" if name.startswith("party_"): compress = True build(bin_path, [path], "img", variables={ "img_type": "party", "img_flags": "", }) elif name.endswith("_bg"): compress = True build(bin_path, [path], "img", variables={ "img_type": "bg", "img_flags": "", }) elif name.endswith("_tex"): compress = False bin_path = path elif name.endswith("_shape"): map_name = "_".join(name.split("_")[:-1]) # Handle map XML files, if they exist (TODO: have splat output these) map_xml = self.resolve_asset_path( Path(f"assets/{self.version}") / seg.dir / seg.name / (map_name + ".xml")) if map_xml.exists(): # Build a header file for this map build( self.build_path() / "include" / seg.dir / seg.name / (map_name + ".h"), [map_xml], "map_header", ) # NOTE: we don't build the map xml into a _shape or _hit file (yet); the Star Rod Map Editor # is able to build the xml nonmatchingly into assets/star_rod_build/mapfs/*.bin for people # who want that (i.e. modders). 'star_rod_build' should be added to asset_stack also. compress = True bin_path = path else: compress = True bin_path = path if compress: yay0_path = bin_path.with_suffix(".Yay0") build(yay0_path, [bin_path], "yay0") else: yay0_path = bin_path bin_yay0s.append(bin_path) bin_yay0s.append(yay0_path) # combine build(entry.object_path.with_suffix(""), bin_yay0s, "mapfs") build(entry.object_path, [entry.object_path.with_suffix("")], "bin") else: raise Exception( f"don't know how to build {seg.__class__.__name__} '{seg.name}'" ) # Build elf, z64, ok ninja.build( str(self.elf_path()), "ld", str(self.linker_script_path()), implicit=[str(obj) for obj in built_objects], variables={ "version": self.version, "mapfile": str(self.map_path()) }, ) ninja.build( str(self.rom_path()), "z64", str(self.elf_path()), implicit=[CRC_TOOL], ) ninja.build( str(self.rom_ok_path()), "sha1sum", f"ver/{self.version}/checksum.sha1", implicit=[str(self.rom_path())], ) ninja.build("generated_headers_" + self.version, "phony", generated_headers)
def ninjaCommonHeader(cursor: Writer, ag: Any) -> None: ''' Writes a common header to the ninja file. ag is parsed arguments. ''' cursor.comment('-- start common ninja header --') cursor.comment(f'Note, this ninja file was automatically generated by {__file__}') cursor.newline() cursor.comment('-- compiling tools --') cursor.newline() cursor.variable('CXX', 'g++') cursor.variable('PROTOC', '/usr/bin/protoc') cursor.variable('PROTO_TEXT', f'./proto_text') cursor.variable('SHOGUN_EXTRA', '') # used for adding specific flags for a specific target cursor.newline() cursor.comment('-- compiler flags --') cursor.newline() cursor.variable('CPPFLAGS', '-D_FORTIFY_SOURCE=2 ' + str(os.getenv('CPPFLAGS', ''))) cursor.variable('CXXFLAGS', '-std=c++14 -O2 -pipe -fPIC -gsplit-dwarf -DNDEBUG' + ' -fstack-protector-strong -w ' + str(os.getenv('CXXFLAGS', ''))) cursor.variable('LDFLAGS', '-Wl,-z,relro -Wl,-z,now ' + str(os.getenv('LDFLAGS', ''))) cursor.variable('INCLUDES', '-I. -I./debian/embedded/eigen3 -I./third_party/eigen3/' + ' -I/usr/include/gemmlowp -I/usr/include/llvm-c-7' + ' -I/usr/include/llvm-7 -Ithird_party/toolchains/gpus/cuda/') cursor.variable('LIBS', '-lpthread -lprotobuf -lnsync -lnsync_cpp -ldouble-conversion' + ' -ldl -lm -lz -lre2 -ljpeg -lpng -lsqlite3 -llmdb -lsnappy -lgif -lLLVM-7') cursor.newline() cursor.comment('-- compiling rules-- ') cursor.rule('rule_PROTOC', f'$PROTOC $in --cpp_out . $SHOGUN_EXTRA') cursor.rule('rule_PROTOC_GRPC', f'$PROTOC --grpc_out . --cpp_out . --plugin protoc-gen-grpc=/usr/bin/grpc_cpp_plugin $in') cursor.rule('rule_PROTO_TEXT', f'$PROTO_TEXT tensorflow/core tensorflow/core tensorflow/tools/proto_text/placeholder.txt $in') cursor.rule('rule_CXX_OBJ', f'$CXX $CPPFLAGS $CXXFLAGS $INCLUDES $SHOGUN_EXTRA -c $in -o $out') cursor.rule('rule_CXX_EXEC', f'$CXX $CPPFLAGS $CXXFLAGS $INCLUDES $LDFLAGS $LIBS $SHOGUN_EXTRA $in -o $out') cursor.rule('rule_CXX_SHLIB', f'$CXX -shared -fPIC $CPPFLAGS $CXXFLAGS $INCLUDES $LDFLAGS $LIBS $SHOGUN_EXTRA $in -o $out') cursor.rule('rule_CC_OP_GEN', f'LD_LIBRARY_PATH=. ./$in $out $cc_op_gen_internal tensorflow/core/api_def/base_api') cursor.rule('COPY', f'cp $in $out') cursor.newline() cursor.comment('-- end common ninja header --') cursor.newline()
def shogunTFCCLib(argv): ''' Build libtensorflow_cc.so Depends: all_proto, proto_text, CCOP Input: bazel dump, source files Output: libtensorflow_cc.so ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='libtensorflow_cc.ninja') ag.add_argument('-H', help='where to put the headers list', type=str, default='libtensorflow_cc.hdrs') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hard-coded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) tflib_extra_srcs = ['debian/embedded/fft/fftsg.c'] _, srclist = eGrep('^third_party', srclist) _, srclist = eGrep('.*/windows/.*', srclist) # no windoge source. _, srclist = eGrep('.*.cu.cc$', srclist) # no CUDA file for CPU-only build _, srclist = eGrep('.*.pbtxt$', srclist) # not for us _, srclist = eGrep('.*platform/cloud.*', srclist) # SSL 1.1.1 broke it. _, srclist = eGrep('.*platform/s3.*', srclist) # we don't have https://github.com/aws/aws-sdk-cpp _, srclist = eGrep('.*_main.cc$', srclist) # don't include any main function. _, srclist = eGrep('.*test.*', srclist) # don't include any test _, srclist = eGrep('.*cc_op_gen.*', srclist) # don't include cc_op_gen. _, srclist = eGrep('.*gen_proto_text_functions.cc', srclist) # not for this library _, srclist = eGrep('.*tensorflow.contrib.cloud.*', srclist) # it wants GoogleAuthProvider etc. _, srclist = eGrep('.*gcs_config_ops.cc', srclist) # it wants GcsFileSystem srclist = list(set(srclist)) if getDpkgArchitecture('DEB_HOST_ARCH') != 'amd64': # they FTBFS on non-amd64 arches _, srclist = eGrep('.*/core/debug/.*', srclist) _, genlist = eGrep('.*/core/debug/.*', genlist) _, srclist = eGrep('.*debug_ops.*', srclist) _, genlist = eGrep('.*debug_ops.*', genlist) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.h and .pb.cc are already generated by shogunAllProto gen_pbh, genlist = eGrep('.*.pb.h', genlist) gen_pbcc, genlist = eGrep('.*.pb.cc', genlist) # XXX: temporary workaround for //tensorflow/core/debug:debug_service.grpc.pb.cc if getDpkgArchitecture('DEB_HOST_ARCH') == 'amd64': # This is amd64-only cursor.build(['tensorflow/core/debug/debug_service.grpc.pb.cc', 'tensorflow/core/debug/debug_service.grpc.pb.h'], 'rule_PROTOC_GRPC', inputs='tensorflow/core/debug/debug_service.proto') # (2.2) .pb_text.* pbtlist = [x for x in genlist if any(x.endswith(y) for y in ('.pb_text.h', '.pb_text.cc', '.pb_text-impl.h'))] pbtlist = [x.replace('.pb_text.h', '.proto').replace('.pb_text.cc', '.proto').replace('.pb_text-impl.h', '.proto') for x in pbtlist] gen_pbth, genlist = eGrep('.*.pb_text.h', genlist) gen_pbtih, genlist = eGrep('.*.pb_text-impl.h', genlist) gen_pbtcc, genlist = eGrep('.*.pb_text.cc', genlist) for pbt in list(set(pbtlist)): cursor.build([ pbt.replace('.proto', '.pb_text.h'), pbt.replace('.proto', '.pb_text.cc'), pbt.replace('.proto', '.pb_text-impl.h') ], 'rule_PROTO_TEXT', pbt) # (2.3) cc_op_gen gen_ccopcc, genlist = eGrep('.*/cc/ops/.*.cc', genlist) gen_ccoph, genlist = eGrep('.*/cc/ops/.*.h', genlist) # (2.4) finish dealing with generated files if genlist: print(yellow('Remainders:'), genlist) assert(len(genlist) == 1) # (3) deal with source files # (3.1) filter-out headers _, srclist = eGrep('.*.proto$', srclist) # done in (2) src_hdrs, srclist = eGrep('.*.h$', srclist) # (3.2) compile .cc source src_cc, srclist = eGrep('.*.cc', srclist) objlist = [] exception_eigen_avoid_std_array = [ 'sparse_tensor_dense_matmul_op', 'conv_grad_ops_3d', 'adjust_contrast_op' ] for cc in src_cc + gen_pbcc + gen_pbtcc + gen_ccopcc + genlist + tflib_extra_srcs: variables = {} elif any(x in cc for x in exception_eigen_avoid_std_array): variables = {'SHOGUN_EXTRA': '-DEIGEN_AVOID_STL_ARRAY'} obj = cursor.build(re.sub('.c[c]?$', '.o', cc), 'rule_CXX_OBJ', inputs=cc, variables=variables)[0] objlist.append(obj)
def shogunCCOP(argv): ''' Generate tensorflow cc ops : tensorflow/cc/ops/*.cc and *.h Depends: AllProto, proto_text, libtensorflow_framework Input: cc source, bazel dump Output: one-time-use binary "XXX_gen_cc" and generated .cc .h files. ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='ccop.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) filter unrelated files, we only want cc_op related files. _, genlist = eGrep('.*.pb.h', genlist) _, genlist = eGrep('.*.pb.cc', genlist) _, genlist = eGrep('.*.pb_text.h', genlist) _, genlist = eGrep('.*.pb_text-impl.h', genlist) _, genlist = eGrep('.*.pb_text.cc', genlist) # (3) XXX_gen_cc # (3.1) deal with a missing source cursor.build('tensorflow/core/ops/user_ops.cc', 'COPY', inputs='tensorflow/core/user_ops/fact.cc') # (3.2) build several common objects main_cc = ['tensorflow/core/framework/op_gen_lib.cc', 'tensorflow/cc/framework/cc_op_gen.cc', 'tensorflow/cc/framework/cc_op_gen_main.cc', ] main_obj = [x.replace('.cc', '.o') for x in main_cc] for cc in main_cc: cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc) # (3.2) build executables and generate file with executable gen_ccopcc, genlist = eGrep('.*/cc/ops/.*.cc', genlist) gen_ccoph, genlist = eGrep('.*/cc/ops/.*.h', genlist) opnamelist = list(set(os.path.basename(x.replace('.cc', '').replace('.h', '')) for x in (gen_ccopcc + gen_ccoph) if 'internal' not in x )) for opname in opnamelist: coreopcc = 'tensorflow/core/ops/' + opname + '.cc' ccopcc = 'tensorflow/cc/ops/' + opname + '.cc' # build corresponding elf executable cursor.build(f'{opname}_gen_cc', 'rule_CXX_EXEC', inputs=[coreopcc] + main_obj, variables={'SHOGUN_EXTRA': '-I. -L. -ltf_ccop'}) # generate file cursor.build([ccopcc.replace('.cc', '.h'), ccopcc], 'rule_CC_OP_GEN', inputs=f'{opname}_gen_cc', variables={'cc_op_gen_internal': '0' if opname != 'sendrecv_ops' else '1'}, implicit_outputs=[ccopcc.replace('.cc', '_internal.h'), ccopcc.replace('.cc', '_internal.cc')]) ## done cursor.close()
def _generate_ninja_file(app_name, compiler_path, linker_path, archiver_path, compiler_flags, linker_flags, app_source_dirs, project_root, build_dir, output_dir, lib_cs_path, lib_app_path): with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file: ninja_file = Writer(build_file) ninja_file.variable(key="builddir", value=build_dir) # Write the compiler rule for c, cpp and cc ninja_file.rule("compile", command="{} {} -o $out $in".format(compiler_path, compiler_flags), description="Compiling source: $in", depfile="$out.o.d", deps="gcc") # Write the rule that generates the dependencies ninja_file.rule("dependencies", command="{} {} -MM -MG -MF $out $in".format(compiler_path, compiler_flags), description="Generating dependency: $in") # Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess ninja_file.rule("archive", command="{} rcs $out @$out.rsp".format(archiver_path), description="Building static library: $out", rspfile="$out.rsp", rspfile_content="$in") # Write the rule to link. Note we use response files as on Windows the command is too long for CreateProcess ninja_file.rule("link", command="{} @$out.rsp {} -o $out".format(linker_path, linker_flags), description="Linking: $out", rspfile="$out.rsp", rspfile_content="$in") # Write the compile command for all source files. cs_source_dirs = [os.path.normpath('{}/ChilliSource/Source/ChilliSource'.format(project_root)), os.path.normpath('{}/ChilliSource/Source/CSBackend/Platform/RPi/'.format(project_root)), os.path.normpath('{}/ChilliSource/Source/CSBackend/Rendering/OpenGL/'.format(project_root))] cs_output_files = _write_build_command(ninja_file, cs_source_dirs, 'c,cpp,cc', 'compile', 'dependencies', project_root, build_dir) app_output_files = _write_build_command(ninja_file, app_source_dirs, 'c,cpp,cc', 'compile', 'dependencies', project_root, build_dir) all_output_files = cs_output_files + app_output_files # Write the command to generate the static library for ChilliSource and the application ninja_file.build(rule="archive", inputs=cs_output_files, outputs=lib_cs_path) ninja_file.build(rule="archive", inputs=app_output_files, outputs=lib_app_path) # Write the rule to link the libraries into the executable ninja_file.build(rule="link", inputs=all_output_files, outputs=os.path.join(output_dir, app_name))
#!/usr/bin/env python3 from ninja_syntax import Writer with open("build.ninja", "w") as buildfile: n = Writer(buildfile) n.variable("cc", "clang") n.variable("cflags", "-Weverything") n.rule("compile", command="$cc $cflags -c $in -o $out", description="Compiling object file $out") n.rule("link", command="$cc $in -o $out", description="Linking executable $out") n.build("hello.o", "compile hello.c") n.build("hello", "link hello.o") n.default("hello")
inputs=sorted([tarball] + patches if tarball else collect_files(srcdir)), variables=OrderedDict([ ('name', prj), ('configure', configure), ('configure_flags', all_flags), ('srcdir', '$top_srcdir/%s' % (srcdir if srcdir else prj)), ('destdir', '$builddir/%s' % prj), ('workdir', '$intdir/%s' % prj), ])) n.build([prj], 'phony', target) return target with open('build.ninja', 'w') as buildfile: n = Writer(buildfile, width=20) n.comment('generated by %s' % sys.argv[0]) n.include('build.vars.ninja') n.rule( 'download', description='Downloading $url...', pool='console', command= 'curl -o $out $url && test "$sha256" = `shasum -a256 $out | cut -f1 -d" "`' ) targets = [] n.build( ['tarballs/%s' % GETTEXT_TARBALL],
PROJECT_NAME = 'toy' if sys.platform != 'win32' else 'toy.exe' CC = 'g++' if sys.platform != 'win32' else 'cl' CFLAGS = [] if sys.platform == 'win32': CFLAGS = ['-O2', '-EHsc', '-Zo', '/fp:fast', '-Iinclude'] else: CFLAGS = ['-std=c++11', '-O3', '-pthread', '-march=native', '-Iinclude'] SRC_DIR = 'src' BUILD_DIR = 'build' BIN_DIR = 'bin' with open('build.ninja', 'w') as build_file: n = Writer(build_file) n.comment('THIS FILE IS GENERATED BY configure.py') n.comment('EDITS WILL BE OVERWRITTEN') n.newline() ############################################################################ # VARIABLES ############################################################################ n.variable(key='ninja_required_version', value='1.9') if sys.platform == 'win32': n.variable(key='msvc_deps_prefix', value='Note: including file:') n.variable(key='cc', value=CC)
def write_ninja_rules(ninja: ninja_syntax.Writer, cpp: str, cppflags: str, extra_cflags: str, use_ccache: bool, non_matching: bool, debug: bool): # platform-specific if sys.platform == "darwin": iconv = "tools/iconv.py UTF-8 SHIFT-JIS" elif sys.platform == "linux": iconv = "iconv --from UTF-8 --to SHIFT-JIS" else: raise Exception(f"unsupported platform {sys.platform}") ccache = "" if use_ccache: ccache = "ccache " try: subprocess.call(["ccache"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except FileNotFoundError: ccache = "" cross = "mips-linux-gnu-" cc = f"{BUILD_TOOLS}/cc/gcc/gcc" cc_ido = f"{BUILD_TOOLS}/cc/ido5.3/cc" cc_272_dir = f"{BUILD_TOOLS}/cc/gcc2.7.2/" cc_272 = f"{cc_272_dir}/gcc" cxx = f"{BUILD_TOOLS}/cc/gcc/g++" CPPFLAGS_COMMON = "-Iver/$version/build/include -Iinclude -Isrc -Iassets/$version -D_LANGUAGE_C -D_FINALROM " \ "-DVERSION=$version -DF3DEX_GBI_2 -D_MIPS_SZLONG=32" CPPFLAGS = "-w " + CPPFLAGS_COMMON + " -nostdinc" CPPFLAGS_272 = "-Iver/$version/build/include -Iinclude -Isrc -Iassets/$version -D_LANGUAGE_C -D_FINALROM " \ "-DVERSION=$version -DF3DEX_GBI_2 -D_MIPS_SZLONG=32 -nostdinc" cflags = f"-c -G0 -O2 -fno-common -B {BUILD_TOOLS}/cc/gcc/ {extra_cflags}" cflags_272 = f"-c -G0 -mgp32 -mfp32 -mips3 {extra_cflags}" cflags_272 = cflags_272.replace("-ggdb3","-g1") ninja.variable("python", sys.executable) ninja.rule("ld", description="link($version) $out", command=f"{cross}ld -T ver/$version/build/undefined_syms.txt -T ver/$version/undefined_syms_auto.txt -T ver/$version/undefined_funcs_auto.txt -Map $mapfile --no-check-sections -T $in -o $out", ) objcopy_sections = "" if debug: ninja.rule("genobjcopy", description="generate $out", command=f"$python {BUILD_TOOLS}/genobjcopy.py $in $out", ) objcopy_sections = "@ver/$version/build/objcopy_sections.txt " ninja.rule("z64", description="rom $out", command=f"{cross}objcopy {objcopy_sections} $in $out -O binary && {BUILD_TOOLS}/rom/n64crc $out", ) ninja.rule("sha1sum", description="check $in", command="sha1sum -c $in && touch $out" if DO_SHA1_CHECK else "touch $out", ) ninja.rule("cpp", description="cpp $in", command=f"{cpp} $in {cppflags} -P -o $out" ) ninja.rule("cc", description="gcc $in", command=f"bash -o pipefail -c '{cpp} {CPPFLAGS} {cppflags} $cppflags -MD -MF $out.d $in -o - | {iconv} > $out.i && {ccache}{cc} {cflags} $cflags $out.i -o $out'", depfile="$out.d", deps="gcc", ) ninja.rule("cc_ido", description="ido $in", command=f"{ccache}{cc_ido} -w {CPPFLAGS_COMMON} {cppflags} $cppflags -c -mips1 -O0 -G0 -non_shared -Xfullwarn -Xcpluscomm -o $out $in", ) ninja.rule("cc_272", description="cc_272 $in", command=f"bash -o pipefail -c 'COMPILER_PATH={cc_272_dir} {cc_272} {CPPFLAGS_272} {cppflags} $cppflags {cflags_272} $cflags $in -o $out && mips-linux-gnu-objcopy -N $in $out'", ) ninja.rule("cxx", description="cxx $in", command=f"bash -o pipefail -c '{cpp} {CPPFLAGS} {cppflags} $cppflags -MD -MF $out.d $in -o - | {iconv} > $out.i && {ccache}{cxx} {cflags} $cflags $out.i -o $out'", depfile="$out.d", deps="gcc", ) ninja.rule("bin", description="bin $in", command=f"{cross}ld -r -b binary $in -o $out", ) ninja.rule("as", description="as $in", command=f"{cross}as -EB -march=vr4300 -mtune=vr4300 -Iinclude $in -o $out", ) ninja.rule("img", description="img($img_type) $in", command=f"$python {BUILD_TOOLS}/img/build.py $img_type $in $out $img_flags", ) ninja.rule("img_header", description="img_header $in", command=f"$python {BUILD_TOOLS}/img/header.py $in $out $c_name", ) ninja.rule("bin_inc_c", description="bin_inc_c $out", command=f"$python {BUILD_TOOLS}/bin_inc_c.py $in $out $c_name", ) ninja.rule("yay0", description="yay0 $in", command=f"{BUILD_TOOLS}/yay0/Yay0compress $in $out", ) ninja.rule("sprite", description="sprite $sprite_name", command=f"$python {BUILD_TOOLS}/sprites/sprite.py $out $sprite_dir", ) ninja.rule("sprite_combine", description="sprite_combine $in", command=f"$python {BUILD_TOOLS}/sprites/combine.py $out $in", ) ninja.rule("sprite_header", description="sprite_header $sprite_name", command=f"$python {BUILD_TOOLS}/sprites/header.py $out $sprite_dir $sprite_id", ) ninja.rule("msg", description="msg $in", command=f"$python {BUILD_TOOLS}/msg/parse_compile.py $in $out", ) ninja.rule("msg_combine", description="msg_combine $out", command=f"$python {BUILD_TOOLS}/msg/combine.py $out $in", ) ninja.rule("mapfs", description="mapfs $out", command=f"$python {BUILD_TOOLS}/mapfs/combine.py $version $out $in", ) ninja.rule("pack_title_data", description="pack_title_data $out", command=f"$python {BUILD_TOOLS}/mapfs/pack_title_data.py $out $in", ) ninja.rule("map_header", command=f"$python {BUILD_TOOLS}/mapfs/map_header.py $in > $out") ninja.rule("pm_charset", command=f"$python {BUILD_TOOLS}/pm_charset.py $out $in") ninja.rule("pm_charset_palettes", command=f"$python {BUILD_TOOLS}/pm_charset_palettes.py $out $in") with Path("tools/permuter_settings.toml").open("w") as f: f.write(f"compiler_command = \"{cc} {CPPFLAGS.replace('$version', 'us')} {cflags} -DPERMUTER -fforce-addr\"\n") f.write(f"assembler_command = \"{cross}as -EB -march=vr4300 -mtune=vr4300 -Iinclude\"\n") f.write( """ [preserve_macros] "gs?[DS]P.*" = "void" OVERRIDE_FLAG_CHECK = "int" OS_K0_TO_PHYSICAL = "int" "G_.*" = "int" "TEXEL.*" = "int" PRIMITIVE = "int" [decompme.compilers] "tools/build/cc/gcc/gcc" = "gcc2.8.1" """)
mkdir_if_ne(os.path.join(build_dir, "finished")) ## Collect the sources list sources = reduce(lambda a,b : a+b, [filter(lambda d : check_source_ext(d), os.listdir(source_dir)) for source_dir in source_dirs]) ## Identify toplevel design file topfile = find_top(sources) open(bdir(project_file), "w").write( "\n".join(["%s %s %s" % (source_type(source), default_lib, source) for source in sources]) + "\n") with open("build.ninja", "w") as buildfile: n = Writer(buildfile) n.variable("builddir", build_dir) n.variable("const", constraint_file) n.variable("device", device) n.variable("opt_level", "1") n.variable("opt_mode", "Speed") n.variable("global_opt", "speed") n.variable("package", package) n.variable("prjfile", project_file) n.variable("speed", speed) n.variable("topfile", os.path.splitext(topfile)[0]) n.rule("cpbuild", "cp $in $out") n.rule("genscript", "echo \"run -ifn $prjfile -ifmt mixed -top $topfile " + "-ofn design.ngc -ofmt NGC -p ${device}-${speed}-" +
return None # Create working directories mkdir_if_ne(build_dir) ## Collect the sources list sources = reduce(lambda a, b: a + b, [ filter(filter_source, os.listdir(source_dir)) for source_dir in source_dirs ]) ## Identify toplevel design file topfile = find_top(sources) with open("build.ninja", "w") as buildfile: n = Writer(buildfile) n.variable("builddir", build_dir) n.variable("const", constraint_file) n.variable("device", device) n.variable("device_type", device_type) n.variable("package", package) n.variable("clock_constraint_mhz", clock_constraint_mhz) n.variable("topfile", topfile) n.variable("top_module", os.path.splitext(topfile)[0]) n.variable("sources", ' '.join(sources)) n.rule("cpbuild", "cp $in $out") n.rule( "synthesize",
def write_ninja(self, ninja: ninja_syntax.Writer, skip_outputs: Set[str], non_matching: bool, debug: bool): import segtypes import segtypes.common.data import segtypes.n64.Yay0 assert self.linker_entries is not None built_objects = set() generated_headers = [] def build(object_paths: Union[Path, List[Path]], src_paths: List[Path], task: str, variables: Dict[str, str] = {}): if not isinstance(object_paths, list): object_paths = [object_paths] object_strs = [str(obj) for obj in object_paths] needs_build = False for object_path in object_paths: if object_path.suffixes[-1] == ".o": built_objects.add(str(object_path)) elif object_path.suffixes[-1] == ".h" or task == "bin_inc_c": generated_headers.append(str(object_path)) # don't rebuild objects if we've already seen all of them if not str(object_path) in skip_outputs: needs_build = True if needs_build: skip_outputs.update(object_strs) implicit = [] order_only = [] if task == "yay0": implicit.append(YAY0_COMPRESS_TOOL) elif task in ["cc", "cxx"]: order_only.append("generated_headers_" + self.version) ninja.build( object_strs, # $out task, self.resolve_src_paths(src_paths), # $in variables={ "version": self.version, **variables }, implicit=implicit, order_only=order_only, ) # Build objects for entry in self.linker_entries: seg = entry.segment if isinstance(seg, segtypes.n64.header.N64SegHeader): build(entry.object_path, entry.src_paths, "as") elif isinstance(seg, segtypes.common.asm.CommonSegAsm) or (isinstance(seg, segtypes.common.data.CommonSegData) and not seg.type[0] == "."): build(entry.object_path, entry.src_paths, "as") elif isinstance(seg, segtypes.common.c.CommonSegC) or (isinstance(seg, segtypes.common.data.CommonSegData) and seg.type[0] == "."): cflags = None if isinstance(seg.yaml, dict): cflags = seg.yaml.get("cflags") elif len(seg.yaml) >= 4: cflags = seg.yaml[3] # default cflags where not specified if cflags is None: if "nusys" in entry.src_paths[0].parts: cflags = "" elif "os" in entry.src_paths[0].parts: # libultra cflags = "" else: # papermario cflags = "-fforce-addr" # c task = "cc" if entry.src_paths[0].suffixes[-1] == ".cpp": task = "cxx" if seg.name.endswith("osFlash"): task = "cc_ido" elif "gcc_272" in cflags: task = "cc_272" cflags = cflags.replace("gcc_272", "") build(entry.object_path, entry.src_paths, task, variables={ "cflags": cflags, "cppflags": f"-DVERSION_{self.version.upper()}", }) # images embedded inside data aren't linked, but they do need to be built into .inc.c files if isinstance(seg, segtypes.common.group.CommonSegGroup): for seg in seg.subsegments: if isinstance(seg, segtypes.n64.img.N64SegImg): flags = "" if seg.flip_horizontal: flags += "--flip-x " if seg.flip_vertical: flags += "--flip-y " src_paths = [seg.out_path().relative_to(ROOT)] inc_dir = self.build_path() / "include" / seg.dir bin_path = self.build_path() / seg.dir / (seg.name + ".png.bin") build(bin_path, src_paths, "img", variables={ "img_type": seg.type, "img_flags": flags, }) c_sym = seg.create_symbol( addr=seg.vram_start, in_segment=True, type="data", define=True ) vars = {"c_name": c_sym.name} build(inc_dir / (seg.name + ".png.h"), src_paths, "img_header", vars) build(inc_dir / (seg.name + ".png.inc.c"), [bin_path], "bin_inc_c", vars) elif isinstance(seg, segtypes.n64.palette.N64SegPalette): src_paths = [seg.out_path().relative_to(ROOT)] inc_dir = self.build_path() / "include" / seg.dir bin_path = self.build_path() / seg.dir / (seg.name + ".pal.bin") build(bin_path, src_paths, "img", variables={ "img_type": seg.type, "img_flags": "", }) c_sym = seg.create_symbol( addr=seg.vram_start, in_segment=True, type="data", define=True ) vars = {"c_name": c_sym.name} build(inc_dir / (seg.name + ".pal.inc.c"), [bin_path], "bin_inc_c", vars) elif isinstance(seg, segtypes.common.bin.CommonSegBin): build(entry.object_path, entry.src_paths, "bin") elif isinstance(seg, segtypes.n64.Yay0.N64SegYay0): compressed_path = entry.object_path.with_suffix("") # remove .o build(compressed_path, entry.src_paths, "yay0") build(entry.object_path, [compressed_path], "bin") elif isinstance(seg, segtypes.n64.img.N64SegImg): flags = "" if seg.flip_horizontal: flags += "--flip-x " if seg.flip_vertical: flags += "--flip-y " bin_path = entry.object_path.with_suffix(".bin") inc_dir = self.build_path() / "include" / seg.dir build(bin_path, entry.src_paths, "img", variables={ "img_type": seg.type, "img_flags": flags, }) build(entry.object_path, [bin_path], "bin") # c_sym = seg.create_symbol( # addr=seg.vram_start, in_segment=True, type="data", define=True # ) # vars = {"c_name": c_sym.name} build(inc_dir / (seg.name + ".png.h"), entry.src_paths, "img_header") elif isinstance(seg, segtypes.n64.palette.N64SegPalette): bin_path = entry.object_path.with_suffix(".bin") build(bin_path, entry.src_paths, "img", variables={ "img_type": seg.type, "img_flags": "", }) build(entry.object_path, [bin_path], "bin") elif seg.type == "pm_npc_sprites": sprite_yay0s = [] for sprite_id, sprite_dir in enumerate(entry.src_paths, 1): sprite_name = sprite_dir.name bin_path = entry.object_path.with_suffix("") / (sprite_name + ".bin") yay0_path = bin_path.with_suffix(".Yay0") sprite_yay0s.append(yay0_path) variables = { "sprite_id": sprite_id, "sprite_name": sprite_name, "sprite_dir": str(self.resolve_asset_path(sprite_dir)), } build(bin_path, [sprite_dir], "sprite", variables=variables) build(yay0_path, [bin_path], "yay0") build( self.build_path() / "include" / seg.dir / seg.name / (sprite_name + ".h"), [sprite_dir], "sprite_header", variables=variables, ) build(entry.object_path.with_suffix(".bin"), sprite_yay0s, "sprite_combine") build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin") elif seg.type == "pm_msg": msg_bins = [] for section_idx, msg_path in enumerate(entry.src_paths): bin_path = entry.object_path.with_suffix("") / f"{section_idx:02X}.bin" msg_bins.append(bin_path) build(bin_path, [msg_path], "msg") build([ entry.object_path.with_suffix(".bin"), self.build_path() / "include" / "message_ids.h", ], msg_bins, "msg_combine") build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin") elif seg.type == "pm_map_data": bin_yay0s: List[Path] = [] # flat list of (uncompressed path, compressed? path) pairs src_dir = Path("assets/x") / seg.name for path in entry.src_paths: name = path.stem out_dir = entry.object_path.with_suffix("").with_suffix("") bin_path = out_dir / f"{name}.bin" if name.startswith("party_"): compress = True build(bin_path, [path], "img", variables={ "img_type": "party", "img_flags": "", }) elif name == "title_data": compress = True logotype_path = out_dir / "title_logotype.bin" copyright_path = out_dir / "title_copyright.bin" copyright_pal_path = out_dir / "title_copyright.pal" # jp only press_start_path = out_dir / "title_press_start.bin" build(logotype_path, [src_dir / "title/logotype.png"], "img", variables={ "img_type": "rgba32", "img_flags": "", }) build(press_start_path, [src_dir / "title/press_start.png"], "img", variables={ "img_type": "ia8", "img_flags": "", }) if self.version == "jp": build(copyright_path, [src_dir / "title/copyright.png"], "img", variables={ "img_type": "ci4", "img_flags": "", }) build(copyright_pal_path, [src_dir / "title/copyright.png"], "img", variables={ "img_type": "palette", "img_flags": "", }) imgs = [logotype_path, copyright_path, press_start_path, copyright_pal_path] else: build(copyright_path, [src_dir / "title/copyright.png"], "img", variables={ "img_type": "ia8", "img_flags": "", }) imgs = [logotype_path, copyright_path, press_start_path] build(bin_path, imgs, "pack_title_data") elif name.endswith("_bg"): compress = True bin_path = self.build_path() / bin_path build(bin_path, [path], "img", variables={ "img_type": "bg", "img_flags": "", }) elif name.endswith("_tex"): compress = False bin_path = path elif name.endswith("_shape"): map_name = "_".join(name.split("_")[:-1]) # Handle map XML files, if they exist (TODO: have splat output these) map_xml = self.resolve_asset_path(Path(f"assets/{self.version}") / seg.dir / seg.name / (map_name + ".xml")) if map_xml.exists(): # Build a header file for this map build( self.build_path() / "include" / seg.dir / seg.name / (map_name + ".h"), [map_xml], "map_header", ) # NOTE: we don't build the map xml into a _shape or _hit file (yet); the Star Rod Map Editor # is able to build the xml nonmatchingly into assets/star_rod_build/mapfs/*.bin for people # who want that (i.e. modders). 'star_rod_build' should be added to asset_stack also. compress = True bin_path = path else: compress = True bin_path = path if compress: yay0_path = out_dir / f"{name}.Yay0" build(yay0_path, [bin_path], "yay0") else: yay0_path = bin_path bin_yay0s.append(bin_path) bin_yay0s.append(yay0_path) # combine build(entry.object_path.with_suffix(""), bin_yay0s, "mapfs") build(entry.object_path, [entry.object_path.with_suffix("")], "bin") elif seg.type == "pm_charset": rasters = [] for src_path in entry.src_paths: out_path = self.build_path() / seg.dir / seg.name / (src_path.stem + ".bin") build(out_path, [src_path], "img", variables={ "img_type": "ci4", "img_flags": "", }) rasters.append(out_path) build(entry.object_path.with_suffix(""), rasters, "pm_charset") build(entry.object_path, [entry.object_path.with_suffix("")], "bin") elif seg.type == "pm_charset_palettes": palettes = [] for src_path in entry.src_paths: out_path = self.build_path() / seg.dir / seg.name / "palette" / (src_path.stem + ".bin") build(out_path, [src_path], "img", variables={ "img_type": "palette", "img_flags": "", }) palettes.append(out_path) build(entry.object_path.with_suffix(""), palettes, "pm_charset_palettes") build(entry.object_path, [entry.object_path.with_suffix("")], "bin") elif seg.type in ["pm_effect_loads", "pm_effect_shims"]: build(entry.object_path, entry.src_paths, "as") elif seg.type == "linker" or seg.type == "linker_offset": pass else: raise Exception(f"don't know how to build {seg.__class__.__name__} '{seg.name}'") # Create objcopy section list if debug: ninja.build( str(self.objcopy_sections_path()), "genobjcopy", str(self.build_path() / "elf_sections.txt"), ) # Run undefined_syms through cpp ninja.build( str(self.undefined_syms_path()), "cpp", str(self.version_path / "undefined_syms.txt") ) # Build elf, z64, ok additional_objects = [str(self.undefined_syms_path())] if debug: additional_objects += [str(self.objcopy_sections_path())] ninja.build( str(self.elf_path()), "ld", str(self.linker_script_path()), implicit=[str(obj) for obj in built_objects] + additional_objects, variables={ "version": self.version, "mapfile": str(self.map_path()) }, ) ninja.build( str(self.rom_path()), "z64", str(self.elf_path()), implicit=[CRC_TOOL], variables={ "version": self.version }, ) ninja.build( str(self.rom_ok_path()), "sha1sum", f"ver/{self.version}/checksum.sha1", implicit=[str(self.rom_path())], ) ninja.build("generated_headers_" + self.version, "phony", generated_headers)
import sys import argparse import codecs # ninja/miscにパスを通す sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'ninja', 'misc')) from ninja_syntax import Writer if __name__ == '__main__': # 引数処理 parser = argparse.ArgumentParser() parser.add_argument('output') args = parser.parse_args() with codecs.open(args.output, 'w', 'utf-8') as f: writer = Writer(f) writer.comment('ninjaの定数等を定義するファイル') writer.newline() # このリポジトリのルートディレクトリ root_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), '..')) writer.comment('テキストコンバーター') writer.variable(key='text_converter', value=os.path.join(root_dir, 'scripts', 'text_converter.py')) writer.comment('テキストマージャー') writer.variable(key='text_merger',
for f in os.listdir(d): fnames.append(os.path.join(d, f)) return fnames def get_includes(): return " ".join(map(lambda x : "-I"+x, source_dirs + include_dirs)) def get_libs(): return " ".join(libraries) #return "-Wl,-rpath," + (":".join(library_dirs)) def get_defines(): return " ".join(map(lambda x : "-D"+x, defines)) with open("build.ninja", "w") as buildfile: n = Writer(buildfile) # Variable declarations n.variable("cflags", "-funsigned-char -funsigned-bitfields -DDEBUG -O1 -ffunction-sections -fdata-sections -fpack-struct -fshort-enums -g2 -Wall -mmcu=attiny84a -c -std=gnu99 " + get_includes() + " " + get_defines()) n.variable("lflags", " -Wl,--start-group -Wl,-lm -Wl,--end-group -Wl,--gc-sections -mmcu=attiny84a") n.variable("libs", get_libs()) # Rule declarations n.rule("cxx", command = "avr-g++ $cxxflags -c $in -o $out") n.rule("cc", command = "avr-gcc $cflags -c $in -o $out") n.rule("cl", command = "avr-gcc -o $out $in $libs $lflags")
def subst_ext(fname, ext): return os.path.splitext(fname)[0] + ext def get_sources(): fnames = [] for d in source_dirs: for f in os.listdir(d): fnames.append(os.path.join(d, f)) return fnames def get_includes(): return " ".join(map(lambda x : "-I"+x, source_dirs)) with open("build.ninja", "w") as buildfile: n = Writer(buildfile) # Variable declarations n.variable("tc_path", "/home/kbalke/Projects/Coding/energia-0101E0016/hardware/tools/lm4f/bin") n.variable("cflags", "-c -Os -w -g -ffunction-sections -fdata-sections -mthumb -mcpu=cortex-m4 -mfloat-abi=hard -mfpu=fpv4-sp-d16 -fsingle-precision-constant -DF_CPU=80000000L -DPART_TM4C123GH6PM " + get_includes()) n.variable("cxxflags", "-c -Os -w -g -std=c++11 -fno-rtti -fno-exceptions -ffunction-sections -fdata-sections -mthumb -mcpu=cortex-m4 -mfloat-abi=hard -mfpu=fpv4-sp-d16 -fsingle-precision-constant -DF_CPU=80000000L -DPART_TM4C123GH6PM " + get_includes()) n.variable("lflags", "-Os -g -nostartfiles -nostdlib -Wl,--gc-sections -T lm4fcpp_blizzard.ld -Wl,--entry=ResetISR -mthumb -mcpu=cortex-m4 -mfloat-abi=hard -mfpu=fpv4-sp-d16 -fsingle-precision-constant -lm -lc -lgcc -lstdc++") # Rule declarations n.rule("cxx", command = "$tc_path/arm-none-eabi-g++ $cxxflags -c $in -o $out") n.rule("cc", command = "$tc_path/arm-none-eabi-gcc $cflags -c $in -o $out") n.rule("cca",
cpp_sources = mglob( 'src/*.cc', 'src/w/*.cc', 'src/vfx/*.cc', 'src/ops/*.cc', ) c_sources = mglob( 'src/*.c', 'ext/*.c', ) binary_data = {'DroidSansMonoTTF': 'fonts/DroidSansMono.ttf'} with open('build.ninja', 'w') as f: w = Writer(f) w.variable('builddir', 'build') w.variable('cppflags', cppflags) w.variable('cflags', cflags) w.variable('ldflags', ldflags) w.rule('cpp', command=[ cpp_compiler, '$cppflags', '-MMD', '-MF', '$out.d', '-c', '-o', '$out', '$in' ], depfile='$out.d') w.rule('cc', command=[ c_compiler, '$cflags', '-MMD', '-MF', '$out.d', '-c', '-o', '$out', '$in' ],