Пример #1
0
def _generate_ninja_file(cpp_compiler_path, c_compiler_path, archiver_path, target_scheme, build_dir, lib_path):

	with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file:
		ninja_file = Writer(build_file)

		ninja_file.variable(key="builddir", value=build_dir)

		cpp_compiler_flags = COMPILER_FLAGS_DEFAULT_CPP + " " + COMPILER_FLAGS_TARGET_MAP[target_scheme] + " " + INCLUDE_PATHS
		c_compiler_flags = COMPILER_FLAGS_DEFAULT_C + " " + COMPILER_FLAGS_TARGET_MAP[target_scheme] + " " + INCLUDE_PATHS

		# Write the compiler rule for c, cpp and cc
		ninja_file.rule("compile_cpp", command="{} {} -o $out $in".format(cpp_compiler_path, cpp_compiler_flags), description="Compiling C++ source: $in", depfile="$out.o.d", deps="gcc")
		ninja_file.rule("compile_c", command="{} {} -o $out $in".format(c_compiler_path, c_compiler_flags), description="Compiling C source: $in", depfile="$out.o.d", deps="gcc")

		# Write the rule that generates the dependencies
		ninja_file.rule("dependencies_cpp", command="{} {} -MM -MG -MF $out $in".format(cpp_compiler_path, cpp_compiler_flags), description="Generating C++ dependency: $in")
		ninja_file.rule("dependencies_c", command="{} {} -MM -MG -MF $out $in".format(c_compiler_path, c_compiler_flags), description="Generating C dependency: $in")

		# Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess
		ninja_file.rule("archive", command="{} rcs $out @$out.rsp".format(archiver_path), description="Building static library: $out", rspfile="$out.rsp", rspfile_content="$in")

		# Write the compile command for all source files.
		output_files = _write_build_command(ninja_file, SOURCE_PATHS, 'cpp', 'compile_cpp', 'dependencies_cpp', build_dir)
		output_files += _write_build_command(ninja_file, SOURCE_PATHS, 'c,cc', 'compile_c', 'dependencies_c', build_dir)

		# Write the command to generate the static library for ChilliSource and the application
		ninja_file.build(rule="archive", inputs=output_files, outputs=lib_path)
Пример #2
0
def shogunAllProto(argv):
    '''
    Generate XXX.pb.{h,cc} files from all available XXX.proto
    files in the source directory.

    Depends: protoc (protobuf-compiler)
    Input: .proto
    Output: .pb.cc, .pb.h
    '''
    ag = argparse.ArgumentParser()
    ag.add_argument('-o', help='write ninja file', type=str, default='all_proto.ninja')
    ag = ag.parse_args(argv)
    print(red('Argument Dump:'))
    pprint(vars(ag))

    # (1) initialize ninja file
    cursor = Writer(open(ag.o, 'w'))
    ninjaCommonHeader(cursor, ag)

    # (2) glob all proto
    protos = glob.glob(f'**/*.proto', recursive=True)
    print(cyan('AllProto:'), f'globbed {len(protos)} .proto files')

    # (3) generate .pb.cc, .pb.h
    for proto in protos:
        cursor.build([ proto.replace('.proto', '.pb.cc'),
            proto.replace('.proto', '.pb.h')], 'rule_PROTOC', proto)

    # done
    cursor.close()
Пример #3
0
def write_ninja_for_tools(ninja: ninja_syntax.Writer):
    ninja.rule("cc_tool",
        description="cc_tool $in",
        command=f"cc -w $in -O3 -o $out",
    )

    ninja.build(YAY0_COMPRESS_TOOL, "cc_tool", f"{BUILD_TOOLS}/yay0/Yay0compress.c")
    ninja.build(CRC_TOOL, "cc_tool", f"{BUILD_TOOLS}/rom/n64crc.c")
Пример #4
0
def ninjafy(project):
    file = open("build.ninja", "w+")
    ninja = Writer(file)
    for v in project.variables:
        ninja.variable(v.key, v.value)
    for r in project.rules:
        ninja.rule(r.name, r.command)
    for b in project.builds:
        ninja.build(b.outputs, b.rule, b.inputs)
Пример #5
0
def _generate_ninja_file(cpp_compiler_path, c_compiler_path, archiver_path,
                         target_scheme, build_dir, lib_path):

    with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file:
        ninja_file = Writer(build_file)

        ninja_file.variable(key="builddir", value=build_dir)

        cpp_compiler_flags = COMPILER_FLAGS_DEFAULT_CPP + " " + COMPILER_FLAGS_TARGET_MAP[
            target_scheme] + " " + INCLUDE_PATHS
        c_compiler_flags = COMPILER_FLAGS_DEFAULT_C + " " + COMPILER_FLAGS_TARGET_MAP[
            target_scheme] + " " + INCLUDE_PATHS

        # Write the compiler rule for c, cpp and cc
        ninja_file.rule("compile_cpp",
                        command="{} {} -o $out $in".format(
                            cpp_compiler_path, cpp_compiler_flags),
                        description="Compiling C++ source: $in",
                        depfile="$out.o.d",
                        deps="gcc")
        ninja_file.rule("compile_c",
                        command="{} {} -o $out $in".format(
                            c_compiler_path, c_compiler_flags),
                        description="Compiling C source: $in",
                        depfile="$out.o.d",
                        deps="gcc")

        # Write the rule that generates the dependencies
        ninja_file.rule("dependencies_cpp",
                        command="{} {} -MM -MG -MF $out $in".format(
                            cpp_compiler_path, cpp_compiler_flags),
                        description="Generating C++ dependency: $in")
        ninja_file.rule("dependencies_c",
                        command="{} {} -MM -MG -MF $out $in".format(
                            c_compiler_path, c_compiler_flags),
                        description="Generating C dependency: $in")

        # Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess
        ninja_file.rule("archive",
                        command="{} rcs $out @$out.rsp".format(archiver_path),
                        description="Building static library: $out",
                        rspfile="$out.rsp",
                        rspfile_content="$in")

        # Write the compile command for all source files.
        output_files = _write_build_command(ninja_file, SOURCE_PATHS, 'cpp',
                                            'compile_cpp', 'dependencies_cpp',
                                            build_dir)
        output_files += _write_build_command(ninja_file, SOURCE_PATHS, 'c,cc',
                                             'compile_c', 'dependencies_c',
                                             build_dir)

        # Write the command to generate the static library for ChilliSource and the application
        ninja_file.build(rule="archive", inputs=output_files, outputs=lib_path)
Пример #6
0
    def make_current(self, ninja: ninja_syntax.Writer):
        current = Path("ver/current")

        try:
            current.unlink()
        except Exception:
            pass

        current.symlink_to(self.version)

        ninja.build("ver/current/build/papermario.z64", "phony", str(self.rom_path()))
Пример #7
0
def shogunProtoText(argv):
    '''
    Build a binary ELF executable named proto_text, which generates
    XXX.pb_text{.cc,.h,-impl.h} files from a given XXX.proto file.
    This binary file is for one-time use.

    Depends: shogunAllProto
    Input: bazelDump, cxx source
    Output: proto_text
    '''
    ag = argparse.ArgumentParser()
    ag.add_argument('-i', help='list of source files', type=str, required=True)
    ag.add_argument('-g', help='list of generated files', type=str, required=True)
    ag.add_argument('-o', help='where to write the ninja file', type=str, default='proto_text.ninja')
    ag = ag.parse_args(argv)
    print(red('Argument Dump:'))
    pprint(vars(ag))

    # (0) read bazel dump and apply hardcoded filters
    srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()])
    genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()])
    _, srclist = eGrep('.*.h$', srclist) # we don't need to deal with header here
    _, srclist = eGrep('^third_party', srclist) # no third_party stuff
    _, srclist = eGrep('.*windows/.*', srclist) # no windoge source
    _, srclist = eGrep('.*.proto$', srclist) # already dealt with in (2)

    # (1) Instantiate ninja writer
    cursor = Writer(open(ag.o, 'w'))
    ninjaCommonHeader(cursor, ag)

    # (2) deal with generated files
    # (2.1) .pb.cc and .pb.h files are generated in shogunAllProto
    _, genlist = eGrep('.*.pb.h$', genlist)
    pbcclist, genlist = eGrep('.*.pb.cc$', genlist)
    if len(genlist) > 0:
        print(yellow('Remainders:'), genlist)

    # (3) deal with source files
    cclist, srclist = eGrep('.*.cc', srclist)
    objlist = []
    for cc in cclist + pbcclist:
        obj = cc.replace('.cc', '.o')
        objlist.append(cursor.build(obj, 'rule_CXX_OBJ', cc)[0])
    if len(srclist) > 0:
        print(yellow('Remainders:'), srclist)

    # (4) link objects into the final ELF
    cursor.build(f'proto_text', 'rule_CXX_EXEC', inputs=objlist,
            variables={'LIBS': '-lpthread -lprotobuf -ldouble-conversion'})

    # done
    cursor.close()
def write_buildfile():
    with open("build.ninja", "w") as buildfile:
        n = Writer(buildfile)

        # Variable declarations
        n.variable("lib_path", "/usr/arm-none-eabi/lib")
        n.variable("cflags", cflags)
        n.variable("cxxflags", cxxflags)
        n.variable("lflags", lflags)

        # Rule declarations
        n.rule("cxx",
               command = "arm-none-eabi-g++ $cxxflags -c $in -o $out")

        n.rule("cc",
               command = "arm-none-eabi-gcc $cflags -c $in -o $out")

        n.rule("cl",
               command = "arm-none-eabi-gcc $lflags $in -o $out")

        n.rule("oc",
               command = "arm-none-eabi-objcopy -O binary $in $out")

        n.rule("cdb",
              command = "ninja -t compdb cc cxx > compile_commands.json")

        n.rule("cscf",
              command = "find " + " ".join(set(source_dirs + include_dirs)) +
                        " -regex \".*\\(\\.c\\|\\.h\\|.cpp\\|.hpp\\)$$\" -and " +
                        "-not -type d > $out")

        n.rule("cscdb",
              command = "cscope -bq")

        # Build rules
        n.build("compile_commands.json", "cdb")
        n.build("cscope.files", "cscf")
        n.build(["cscope.in.out", "cscope.po.out", "cscope.out"], "cscdb",
                "cscope.files")

        objects = []

        def cc(name):
            ofile = subst_ext(name, ".o")
            n.build(ofile, "cc", name)
            objects.append(ofile)
        def cxx(name):
            ofile = subst_ext(name, ".o")
            n.build(ofile, "cxx", name)
            objects.append(ofile)
        def cl(oname, ofiles):
            n.build(oname, "cl", ofiles)

        sources = get_sources()
        map(cc, filter(lambda x : x.endswith(".c") or x.endswith(".S"), sources))
        map(cxx, filter(lambda x : x.endswith(".cpp"), sources))

        cl("main.elf", objects)

        n.build("main.bin", "oc", "main.elf")
def write_buildfile():
    with open("build.ninja", "w") as buildfile:
        n = Writer(buildfile)

        # Variable declarations
        n.variable("lib_path",
                   "/usr/msp430/lib")  #"/usr/lib/gcc/msp430/4.6.3")
        n.variable("tc_path", "/usr/bin")
        n.variable("cflags", cflags)
        n.variable("cxxflags", cxxflags)
        n.variable("lflags", lflags)

        # Rule declarations
        n.rule("cxx", command="$tc_path/msp430-g++ $cxxflags -c $in -o $out")

        n.rule("cc", command="$tc_path/msp430-gcc $cflags -c $in -o $out")

        n.rule("cl",
               command="$tc_path/msp430-gcc $lflags $in -o $out -lm -lgcc -lc")

        n.rule("oc", command="$tc_path/msp430-objcopy -O binary $in $out")

        n.rule("cdb", command="ninja -t compdb cc cxx > compile_commands.json")

        n.rule("cscf",
               command="find " + " ".join(set(source_dirs + include_dirs)) +
               " -regex \".*\\(\\.c\\|\\.h\\|.cpp\\|.hpp\\)$$\" -and " +
               "-not -type d > $out")

        n.rule("cscdb", command="cscope -bq")

        # Build rules
        n.build("compile_commands.json", "cdb")
        n.build("cscope.files", "cscf")
        n.build(["cscope.in.out", "cscope.po.out", "cscope.out"], "cscdb",
                "cscope.files")

        objects = []

        def cc(name):
            ofile = subst_ext(name, ".o")
            n.build(ofile, "cc", name)
            objects.append(ofile)

        def cxx(name):
            ofile = subst_ext(name, ".o")
            n.build(ofile, "cxx", name)
            objects.append(ofile)

        def cl(oname, ofiles):
            n.build(oname, "cl", ofiles)

        sources = get_sources()
        map(cc, filter(lambda x: x.endswith(".c"), sources))
        map(cxx, filter(lambda x: x.endswith(".cpp"), sources))

        cl("main.elf", objects)

        n.build("main.bin", "oc", "main.elf")
Пример #10
0
def _generate_ninja_file(app_name,
	compiler_path, linker_path, archiver_path, 
	compiler_flags, linker_flags,
	app_source_dirs,
	project_root, build_dir, output_dir, lib_cs_path, lib_app_path):

	with open(os.path.join(build_dir, "Application.ninja"), "w") as build_file:
		ninja_file = Writer(build_file)

		ninja_file.variable(key="builddir", value=build_dir)

		# Write the compiler rule for c, cpp and cc
		ninja_file.rule("compile", command="{} {} -o $out $in".format(compiler_path, compiler_flags), description="Compiling source: $in", depfile="$out.o.d", deps="gcc")

		# Write the rule that generates the dependencies
		ninja_file.rule("dependencies", command="{} {} -MM -MG -MF $out $in".format(compiler_path, compiler_flags), description="Generating dependency: $in")

		# Write the rule to build the static library. Note we use response files as on Windows the command is too long for CreateProcess
		ninja_file.rule("archive", command="{} rcs $out @$out.rsp".format(archiver_path), description="Building static library: $out", rspfile="$out.rsp", rspfile_content="$in")

		# Write the rule to link. Note we use response files as on Windows the command is too long for CreateProcess
		ninja_file.rule("link", command="{} @$out.rsp {} -o $out".format(linker_path, linker_flags), description="Linking: $out", rspfile="$out.rsp", rspfile_content="$in")

		# Write the compile command for all source files.
		cs_source_dirs = [os.path.normpath('{}/ChilliSource/Source/ChilliSource'.format(project_root)), os.path.normpath('{}/ChilliSource/Source/CSBackend/Platform/RPi/'.format(project_root)), os.path.normpath('{}/ChilliSource/Source/CSBackend/Rendering/OpenGL/'.format(project_root))]
		cs_output_files = _write_build_command(ninja_file, cs_source_dirs, 'c,cpp,cc', 'compile', 'dependencies', project_root, build_dir)
		app_output_files = _write_build_command(ninja_file, app_source_dirs, 'c,cpp,cc', 'compile', 'dependencies', project_root, build_dir)
		all_output_files = cs_output_files + app_output_files

		# Write the command to generate the static library for ChilliSource and the application
		ninja_file.build(rule="archive", inputs=cs_output_files, outputs=lib_cs_path)
		ninja_file.build(rule="archive", inputs=app_output_files, outputs=lib_app_path)

		# Write the rule to link the libraries into the executable
		ninja_file.build(rule="link", inputs=all_output_files, outputs=os.path.join(output_dir, app_name))
Пример #11
0
#!/usr/bin/env python3

from ninja_syntax import Writer

with open("build.ninja", "w") as buildfile:
    n = Writer(buildfile)

    n.variable("cc", "clang")
    n.variable("cflags", "-Weverything")

    n.rule("compile",
           command="$cc $cflags -c $in -o $out",
           description="Compiling object file $out")

    n.rule("link",
           command="$cc $in -o $out",
           description="Linking executable $out")
    n.build("hello.o", "compile hello.c")
    n.build("hello", "link hello.o")

    n.default("hello")
Пример #12
0
def shogunTFCCLib(argv):
    '''
    Build libtensorflow_cc.so

    Depends: all_proto, proto_text, CCOP
    Input: bazel dump, source files
    Output: libtensorflow_cc.so
    '''
    ag = argparse.ArgumentParser()
    ag.add_argument('-i', help='list of source files', type=str, required=True)
    ag.add_argument('-g', help='list of generated files', type=str, required=True)
    ag.add_argument('-o', help='where to write the ninja file', type=str, default='libtensorflow_cc.ninja')
    ag.add_argument('-H', help='where to put the headers list', type=str, default='libtensorflow_cc.hdrs')
    ag = ag.parse_args(argv)
    print(red('Argument Dump:'))
    pprint(vars(ag))

    # (0) read bazel dump and apply hard-coded filters
    srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()])
    genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()])
    tflib_extra_srcs = ['debian/embedded/fft/fftsg.c']
    _, srclist = eGrep('^third_party', srclist)
    _, srclist = eGrep('.*/windows/.*', srclist) # no windoge source.
    _, srclist = eGrep('.*.cu.cc$', srclist) # no CUDA file for CPU-only build
    _, srclist = eGrep('.*.pbtxt$', srclist) # not for us
    _, srclist = eGrep('.*platform/cloud.*', srclist) # SSL 1.1.1 broke it.
    _, srclist = eGrep('.*platform/s3.*', srclist) # we don't have https://github.com/aws/aws-sdk-cpp
    _, srclist = eGrep('.*_main.cc$', srclist) # don't include any main function.
    _, srclist = eGrep('.*test.*', srclist) # don't include any test
    _, srclist = eGrep('.*cc_op_gen.*', srclist) # don't include cc_op_gen.
    _, srclist = eGrep('.*gen_proto_text_functions.cc', srclist) # not for this library
    _, srclist = eGrep('.*tensorflow.contrib.cloud.*', srclist) # it wants GoogleAuthProvider etc.
    _, srclist = eGrep('.*gcs_config_ops.cc', srclist) # it wants GcsFileSystem
    srclist = list(set(srclist))

    if getDpkgArchitecture('DEB_HOST_ARCH') != 'amd64':
        # they FTBFS on non-amd64 arches
        _, srclist = eGrep('.*/core/debug/.*', srclist)
        _, genlist = eGrep('.*/core/debug/.*', genlist)
        _, srclist = eGrep('.*debug_ops.*', srclist)
        _, genlist = eGrep('.*debug_ops.*', genlist)

    # (1) Instantiate ninja writer
    cursor = Writer(open(ag.o, 'w'))
    ninjaCommonHeader(cursor, ag)

    # (2) deal with generated files
    # (2.1) .pb.h and .pb.cc are already generated by shogunAllProto
    gen_pbh, genlist = eGrep('.*.pb.h', genlist)
    gen_pbcc, genlist = eGrep('.*.pb.cc', genlist)

    # XXX: temporary workaround for //tensorflow/core/debug:debug_service.grpc.pb.cc
    if getDpkgArchitecture('DEB_HOST_ARCH') == 'amd64':
        # This is amd64-only
        cursor.build(['tensorflow/core/debug/debug_service.grpc.pb.cc', 'tensorflow/core/debug/debug_service.grpc.pb.h'],
            'rule_PROTOC_GRPC', inputs='tensorflow/core/debug/debug_service.proto')

    # (2.2) .pb_text.*
    pbtlist = [x for x in genlist if any(x.endswith(y) for y in ('.pb_text.h', '.pb_text.cc', '.pb_text-impl.h'))]
    pbtlist = [x.replace('.pb_text.h', '.proto').replace('.pb_text.cc', '.proto').replace('.pb_text-impl.h', '.proto') for x in pbtlist]
    gen_pbth, genlist = eGrep('.*.pb_text.h', genlist)
    gen_pbtih, genlist = eGrep('.*.pb_text-impl.h', genlist)
    gen_pbtcc, genlist = eGrep('.*.pb_text.cc', genlist)
    for pbt in list(set(pbtlist)):
        cursor.build([
            pbt.replace('.proto', '.pb_text.h'),
            pbt.replace('.proto', '.pb_text.cc'),
            pbt.replace('.proto', '.pb_text-impl.h')
            ], 'rule_PROTO_TEXT', pbt)

    # (2.3) cc_op_gen
    gen_ccopcc, genlist = eGrep('.*/cc/ops/.*.cc', genlist)
    gen_ccoph, genlist = eGrep('.*/cc/ops/.*.h', genlist)

    # (2.4) finish dealing with generated files
    if genlist:
        print(yellow('Remainders:'), genlist)
        assert(len(genlist) == 1)

    # (3) deal with source files
    # (3.1) filter-out headers
    _, srclist = eGrep('.*.proto$', srclist) # done in (2)
    src_hdrs, srclist = eGrep('.*.h$', srclist)

    # (3.2) compile .cc source
    src_cc, srclist = eGrep('.*.cc', srclist)
    objlist = []
    exception_eigen_avoid_std_array = [
        'sparse_tensor_dense_matmul_op', 'conv_grad_ops_3d', 'adjust_contrast_op' ]
    for cc in src_cc + gen_pbcc + gen_pbtcc + gen_ccopcc + genlist + tflib_extra_srcs:
        variables = {}
        elif any(x in cc for x in exception_eigen_avoid_std_array):
            variables = {'SHOGUN_EXTRA': '-DEIGEN_AVOID_STL_ARRAY'}
        obj = cursor.build(re.sub('.c[c]?$', '.o', cc), 'rule_CXX_OBJ', inputs=cc, variables=variables)[0]
        objlist.append(obj)
Пример #13
0
def shogunCCOP(argv):
    '''
    Generate tensorflow cc ops : tensorflow/cc/ops/*.cc and *.h

    Depends: AllProto, proto_text, libtensorflow_framework
    Input: cc source, bazel dump
    Output: one-time-use binary "XXX_gen_cc" and generated .cc .h files.
    '''
    ag = argparse.ArgumentParser()
    ag.add_argument('-i', help='list of source files', type=str, required=True)
    ag.add_argument('-g', help='list of generated files', type=str, required=True)
    ag.add_argument('-o', help='where to write the ninja file', type=str, default='ccop.ninja')
    ag = ag.parse_args(argv)
    print(red('Argument Dump:'))
    pprint(vars(ag))

    # (0) read bazel dump and apply hardcoded filters
    genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()])

    # (1) Instantiate ninja writer
    cursor = Writer(open(ag.o, 'w'))
    ninjaCommonHeader(cursor, ag)

    # (2) filter unrelated files, we only want cc_op related files.
    _, genlist = eGrep('.*.pb.h', genlist)
    _, genlist = eGrep('.*.pb.cc', genlist)
    _, genlist = eGrep('.*.pb_text.h', genlist)
    _, genlist = eGrep('.*.pb_text-impl.h', genlist)
    _, genlist = eGrep('.*.pb_text.cc', genlist)

    # (3) XXX_gen_cc
    # (3.1) deal with a missing source
    cursor.build('tensorflow/core/ops/user_ops.cc', 'COPY', inputs='tensorflow/core/user_ops/fact.cc')

    # (3.2) build several common objects
    main_cc = ['tensorflow/core/framework/op_gen_lib.cc',
        'tensorflow/cc/framework/cc_op_gen.cc',
        'tensorflow/cc/framework/cc_op_gen_main.cc',
        ]
    main_obj = [x.replace('.cc', '.o') for x in main_cc]
    for cc in main_cc:
        cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc)

    # (3.2) build executables and generate file with executable
    gen_ccopcc, genlist = eGrep('.*/cc/ops/.*.cc', genlist)
    gen_ccoph, genlist = eGrep('.*/cc/ops/.*.h', genlist)
    opnamelist = list(set(os.path.basename(x.replace('.cc', '').replace('.h', ''))
        for x in (gen_ccopcc + gen_ccoph) if 'internal' not in x ))

    for opname in opnamelist:
        coreopcc = 'tensorflow/core/ops/' + opname + '.cc'
        ccopcc   = 'tensorflow/cc/ops/'   + opname + '.cc'

        # build corresponding elf executable
        cursor.build(f'{opname}_gen_cc', 'rule_CXX_EXEC', inputs=[coreopcc] + main_obj,
            variables={'SHOGUN_EXTRA': '-I. -L. -ltf_ccop'})

        # generate file
        cursor.build([ccopcc.replace('.cc', '.h'), ccopcc], 'rule_CC_OP_GEN', inputs=f'{opname}_gen_cc',
                variables={'cc_op_gen_internal': '0' if opname != 'sendrecv_ops' else '1'},
                implicit_outputs=[ccopcc.replace('.cc', '_internal.h'), ccopcc.replace('.cc', '_internal.cc')])

    ## done
    cursor.close()
Пример #14
0
def shogunTFLib_framework(argv):
    '''
    Build libtensorflow_framework.so. With slight modification, this
    function should be able to build libtensorflow_android.so too.

    Depends: AllProto, proto_text
    Input: bazelDump, cxx source
    Output: libtensorflow_framework.so
    '''
    ag = argparse.ArgumentParser()
    ag.add_argument('-i', help='list of source files', type=str, required=True)
    ag.add_argument('-g', help='list of generated files', type=str, required=True)
    ag.add_argument('-o', help='where to write the ninja file', type=str, default='libtensorflow_framework.ninja')
    ag = ag.parse_args(argv)
    print(red('Argument Dump:'))
    pprint(vars(ag))

    # (0) read bazel dump and apply hardcoded filters
    srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()])
    genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()])
    _, srclist = eGrep('.*proto_text.gen_proto_text_functions.cc', srclist)
    _, srclist = eGrep('^third_party', srclist)
    _, srclist = eGrep('.*/windows/.*', srclist) # no windoge source.

    # (1) Initialize ninja file
    cursor = Writer(open(ag.o, 'w'))
    ninjaCommonHeader(cursor, ag)

    # (2) deal with generated files
    # (2.1) .pb.h and .pb.cc are already generated by shogunAllProto
    gen_pbh, genlist = eGrep('.*.pb.h', genlist)
    gen_pbcc, genlist = eGrep('.*.pb.cc', genlist)

    # (2.2) .pb_text.*
    pbtlist = [x for x in genlist if any(x.endswith(y) for y in ('.pb_text.h', '.pb_text.cc', '.pb_text-impl.h'))]
    pbtlist = [x.replace('.pb_text.h', '.proto').replace('.pb_text.cc', '.proto').replace('.pb_text-impl.h', '.proto') for x in pbtlist]
    gen_pbth, genlist = eGrep('.*.pb_text.h', genlist)
    gen_pbtih, genlist = eGrep('.*.pb_text-impl.h', genlist)
    gen_pbtcc, genlist = eGrep('.*.pb_text.cc', genlist)
    for pbt in list(set(pbtlist)):
        cursor.build([
            pbt.replace('.proto', '.pb_text.h'),
            pbt.replace('.proto', '.pb_text.cc'),
            pbt.replace('.proto', '.pb_text-impl.h')
            ], 'rule_PROTO_TEXT', pbt)

    # (2.3) finish dealing with generated files
    if genlist:
        print(yellow('Remainders:'), genlist)
        assert(len(genlist) == 1)

    # (3) deal with source files
    # (3.1) filter-out headers
    _, srclist = eGrep('.*.proto$', srclist) # done in (2)
    src_hdrs, srclist = eGrep('.*.h$', srclist)

    # (3.2) compile .cc source
    src_cc, srclist = eGrep('.*.cc', srclist)
    objlist = []
    for cc in src_cc + gen_pbcc + gen_pbtcc + genlist:
        variables = {}
        obj = cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc, variables=variables)[0]
        objlist.append(obj)

    # (4) link the final executable
    cursor.build('libtensorflow_framework.so', 'rule_CXX_SHLIB', inputs=objlist,
            variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif'
            + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp'
            + ' -lpthread',
            'SHOGUN_EXTRA': f'-Wl,--soname=libtensorflow_framework.so.{tf_soversion}'
            + f' -Wl,--version-script tensorflow/tf_framework_version_script.lds'
            + f'  -fvisibility=hidden'})

    # (5) a temporary shared object used by shogunCCOP
    libccop = [x for x in objlist if all(y not in x for y in ('core/kernels', 'core/ops'))]
    cursor.build('libtf_ccop.so', 'rule_CXX_SHLIB', inputs=libccop,
            variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif'
            + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp'
            + ' -lpthread'})

    # done
    cursor.close()
Пример #15
0
        gxc_files = glob.glob(dir + '*.gxc')
        if gxc_files:
            grc_files = glob.glob(dir + '*.grc')
            gxc_implicit_inputs = []
            for grc_file in grc_files:
                #def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
                #variables=None, implicit_outputs=None):
                grc_file_name_part = os.path.split(grc_file)[1].split('.')[0]
                gr_file = os.path.join(dir, grc_file_name_part + '.gr')
                grh_file = os.path.join(dir, grc_file_name_part + '.grh')
                gxc_implicit_inputs.append(gr_file)
                gxc_implicit_inputs.append(grh_file)
                n.build(gr_file,
                        'grc',
                        inputs=grc_file,
                        implicit_outputs=grh_file,
                        variables={
                            'in_name': grc_file_name_part,
                            'cwd': dir
                        })

            #TODO: Currently GXC uses the same temporary filename for preprocessed output which does not allow parallel builds with same working dir.
            #      We make GXCs in same directory dependent on each other so they build serially
            gxc_dep_files = [
                os.path.join(dir,
                             os.path.split(f)[1].split('.')[0] + '.gx')
                for f in gxc_files
            ]
            for gxc_file in gxc_files:
                gxc_file_name_part = os.path.split(gxc_file)[1].split('.')[0]
                gx_file = os.path.join('./build', gxc_file_name_part + '.gx')
                gxc_dep_files.remove(gx_file)
Пример #16
0
    n.variable("package", package)
    n.variable("clock_constraint_mhz", clock_constraint_mhz)
    n.variable("topfile", topfile)
    n.variable("top_module", os.path.splitext(topfile)[0])
    n.variable("sources", ' '.join(sources))

    n.rule("cpbuild", "cp $in $out")

    n.rule(
        "synthesize",
        "(cd $builddir; yosys -ql hardware.log -p 'synth_ice40 -top $top_module -blif hardware.blif; write_verilog optimized.v' $sources)"
    )
    n.rule(
        "par",
        "(cd $builddir; arachne-pnr -d $device -P $package -o hardware.asc -p $const hardware.blif)"
    )
    n.rule(
        "timing",
        "(cd $builddir; icetime -d $device_type -c $clock_constraint_mhz -mtr hardware.rpt hardware.asc)"
    )
    n.rule("bitgen", "(cd $builddir; icepack hardware.asc hardware.bin)")

    for f in sources + [constraint_file]:
        n.build(os.path.join(build_dir, f), "cpbuild", f)
    n.build("${builddir}/hardware.blif", "synthesize", sources)
    n.build("${builddir}/hardware.asc", "par",
            ["${builddir}/${const}", "${builddir}/hardware.blif"])
    n.build("${builddir}/hardware.rpt", "timing", ["${builddir}/hardware.asc"])
    n.build("${builddir}/hardware.bin", "bitgen",
            ["${builddir}/hardware.asc", "${builddir}/hardware.rpt"])
Пример #17
0
    n.rule("cdb", command="ninja -t compdb cc cxx > cc_preexp.json")

    n.rule("cdb_e",
           command=
           "cat cc_preexp.json | ./expand_compdb.py > compile_commands.json")

    n.rule(
        "cscf",
        command="find " + " ".join(set(source_dirs + include_dirs)) +
        " -regex \".*\\(\\.c\\|\\.h\\|.cpp\\|.hpp\\)$$\" -and -not -type d > $out"
    )

    n.rule("cscdb", command="cscope -bq")

    # Build rules
    n.build("cc_preexp.json", "cdb")
    n.build("compile_commands.json", "cdb_e", "cc_preexp.json")
    n.build("cscope.files", "cscf")
    n.build(["cscope.in.out", "cscope.po.out", "cscope.out"], "cscdb",
            "cscope.files")

    objects = []

    def cc(name):
        ofile = subst_ext(name, ".o")
        n.build(ofile, "cc", name)
        objects.append(ofile)

    def cxx(name):
        ofile = subst_ext(name, ".o")
        n.build(ofile, "cxx", name)
Пример #18
0
    def write_ninja(self, ninja: ninja_syntax.Writer, skip_outputs: Set[str]):
        import segtypes
        import segtypes.n64.data  # Doesn't get imported on jp for some odd reason (should maybe be a * import?)

        assert self.linker_entries is not None

        built_objects = set()
        generated_headers = []

        def build(object_paths: Union[Path, List[Path]],
                  src_paths: List[Path],
                  task: str,
                  variables: Dict[str, str] = {}):
            if not isinstance(object_paths, list):
                object_paths = [object_paths]

            object_strs = [str(obj) for obj in object_paths]
            needs_build = False

            for object_path in object_paths:
                if object_path.suffixes[-1] == ".o":
                    built_objects.add(str(object_path))
                elif object_path.suffixes[-1] == ".h":
                    generated_headers.append(str(object_path))

                # don't rebuild objects if we've already seen all of them
                if not str(object_path) in skip_outputs:
                    needs_build = True

            if needs_build:
                skip_outputs.update(object_strs)

                implicit = []
                order_only = []

                if task == "yay0":
                    implicit.append(YAY0_COMPRESS_TOOL)
                elif task in ["cc", "cc_dsl", "cc_nusys", "cc_libultra"]:
                    order_only.append("generated_headers_" + self.version)

                ninja.build(
                    object_strs,  # $out
                    task,
                    self.resolve_src_paths(src_paths),  # $in
                    variables={
                        "version": self.version,
                        **variables
                    },
                    implicit=implicit,
                    order_only=order_only,
                )

        # Build objects
        for entry in self.linker_entries:
            seg = entry.segment

            if isinstance(seg, segtypes.n64.header.N64SegHeader):
                build(entry.object_path, entry.src_paths, "as")
            elif isinstance(seg, segtypes.n64.asm.N64SegAsm) or (
                    isinstance(seg, segtypes.n64.data.N64SegData)
                    and not seg.type[0] == "."):
                build(entry.object_path, entry.src_paths, "as")
            elif isinstance(seg, segtypes.n64.c.N64SegC) or (isinstance(
                    seg, segtypes.n64.data.N64SegData) and seg.type[0] == "."):
                task = "cc"
                if "nusys" in entry.src_paths[0].parts:
                    task = "cc_nusys"
                elif "os" in entry.src_paths[0].parts:
                    task = "cc_libultra"
                else:
                    with entry.src_paths[0].open() as f:
                        s = f.read()
                        if "SCRIPT(" in s or "#pragma SCRIPT" in s:
                            task = "cc_dsl"

                build(entry.object_path, entry.src_paths, task)
            elif isinstance(seg, segtypes.n64.bin.N64SegBin):
                build(entry.object_path, entry.src_paths, "bin")
            elif isinstance(seg, segtypes.n64.Yay0.N64SegYay0):
                compressed_path = entry.object_path.with_suffix(
                    "")  # remove .o
                build(compressed_path, entry.src_paths, "yay0")
                build(entry.object_path, [compressed_path], "bin")
            elif isinstance(seg, segtypes.n64.img.N64SegImg):
                flags = ""
                if seg.flip_horizontal:
                    flags += "--flip-x "
                if seg.flip_vertical:
                    flags += "--flip-y "

                build(entry.object_path.with_suffix(".bin"),
                      entry.src_paths,
                      "img",
                      variables={
                          "img_type": seg.type,
                          "img_flags": flags,
                      })
                build(entry.object_path,
                      [entry.object_path.with_suffix(".bin")], "bin")

                build(
                    self.build_path() / "include" / seg.dir /
                    (seg.name + ".png.h"), entry.src_paths, "img_header")
            elif isinstance(seg, segtypes.n64.palette.N64SegPalette):
                build(entry.object_path.with_suffix(".bin"),
                      entry.src_paths,
                      "img",
                      variables={
                          "img_type": seg.type,
                          "img_flags": "",
                      })
                build(entry.object_path,
                      [entry.object_path.with_suffix(".bin")], "bin")
            elif seg.type == "PaperMarioNpcSprites":
                sprite_yay0s = []

                for sprite_id, sprite_dir in enumerate(entry.src_paths, 1):
                    sprite_name = sprite_dir.name

                    bin_path = entry.object_path.with_suffix("") / (
                        sprite_name + ".bin")
                    yay0_path = bin_path.with_suffix(".Yay0")
                    sprite_yay0s.append(yay0_path)

                    variables = {
                        "sprite_id": sprite_id,
                        "sprite_name": sprite_name,
                        "sprite_dir": str(self.resolve_asset_path(sprite_dir)),
                    }

                    build(bin_path, [sprite_dir],
                          "sprite",
                          variables=variables)
                    build(yay0_path, [bin_path], "yay0")
                    build(
                        self.build_path() / "include" / seg.dir / seg.name /
                        (sprite_name + ".h"),
                        [sprite_dir],
                        "sprite_header",
                        variables=variables,
                    )

                build(entry.object_path.with_suffix(".bin"), sprite_yay0s,
                      "sprite_combine")
                build(entry.object_path,
                      [entry.object_path.with_suffix(".bin")], "bin")
            elif seg.type == "PaperMarioMessages":
                msg_bins = []

                for section_idx, msg_path in enumerate(entry.src_paths):
                    bin_path = entry.object_path.with_suffix(
                        "") / f"{section_idx:02X}.bin"
                    msg_bins.append(bin_path)
                    build(bin_path, [msg_path], "msg")

                build([
                    entry.object_path.with_suffix(".bin"),
                    self.build_path() / "include" / "message_ids.h",
                ], msg_bins, "msg_combine")
                build(entry.object_path,
                      [entry.object_path.with_suffix(".bin")], "bin")
            elif seg.type == "PaperMarioMapFS":
                bin_yay0s: List[Path] = [
                ]  # flat list of (uncompressed path, compressed? path) pairs

                for path in entry.src_paths:
                    name = path.stem
                    bin_path = entry.object_path.with_suffix("").with_suffix(
                        "") / f"{name}.bin"

                    if name.startswith("party_"):
                        compress = True
                        build(bin_path, [path],
                              "img",
                              variables={
                                  "img_type": "party",
                                  "img_flags": "",
                              })
                    elif name.endswith("_bg"):
                        compress = True
                        build(bin_path, [path],
                              "img",
                              variables={
                                  "img_type": "bg",
                                  "img_flags": "",
                              })
                    elif name.endswith("_tex"):
                        compress = False
                        bin_path = path
                    elif name.endswith("_shape"):
                        map_name = "_".join(name.split("_")[:-1])

                        # Handle map XML files, if they exist (TODO: have splat output these)
                        map_xml = self.resolve_asset_path(
                            Path(f"assets/{self.version}") / seg.dir /
                            seg.name / (map_name + ".xml"))
                        if map_xml.exists():
                            # Build a header file for this map
                            build(
                                self.build_path() / "include" / seg.dir /
                                seg.name / (map_name + ".h"),
                                [map_xml],
                                "map_header",
                            )

                            # NOTE: we don't build the map xml into a _shape or _hit file (yet); the Star Rod Map Editor
                            # is able to build the xml nonmatchingly into assets/star_rod_build/mapfs/*.bin for people
                            # who want that (i.e. modders). 'star_rod_build' should be added to asset_stack also.

                        compress = True
                        bin_path = path
                    else:
                        compress = True
                        bin_path = path

                    if compress:
                        yay0_path = bin_path.with_suffix(".Yay0")
                        build(yay0_path, [bin_path], "yay0")
                    else:
                        yay0_path = bin_path

                    bin_yay0s.append(bin_path)
                    bin_yay0s.append(yay0_path)

                # combine
                build(entry.object_path.with_suffix(""), bin_yay0s, "mapfs")
                build(entry.object_path, [entry.object_path.with_suffix("")],
                      "bin")
            else:
                raise Exception(
                    f"don't know how to build {seg.__class__.__name__} '{seg.name}'"
                )

        # Build elf, z64, ok
        ninja.build(
            str(self.elf_path()),
            "ld",
            str(self.linker_script_path()),
            implicit=[str(obj) for obj in built_objects],
            variables={
                "version": self.version,
                "mapfile": str(self.map_path())
            },
        )
        ninja.build(
            str(self.rom_path()),
            "z64",
            str(self.elf_path()),
            implicit=[CRC_TOOL],
        )
        ninja.build(
            str(self.rom_ok_path()),
            "sha1sum",
            f"ver/{self.version}/checksum.sha1",
            implicit=[str(self.rom_path())],
        )

        ninja.build("generated_headers_" + self.version, "phony",
                    generated_headers)
Пример #19
0
    n = Writer(stream)

    n.comment('Transformers')
    n.rule('uic', py_script('tools/common/uic.py', ['$in', '$out'] + pyuic), 'UIC $out')
    n.rule('rcc', cmd2str([rcc, '-binary', '$in', '-o', '$out']), 'RCC $out')
    n.rule('js_lupdate', py_script('tools/common/js_lupdate.py', ['-o', '$out', '$in']), 'JS-LUPDATE $out')
    n.rule('pylupdate', cmd2str(pylupdate + ['$in', '-ts', '$out']), 'PY-LUPDATE $out')
    n.rule('lupdate', cmd2str([lupdate, '$in', '-ts', '$out']), 'LUPDATE $out')
    n.rule('webpack', cmdenv([node, 'node_modules/webpack/bin/webpack.js'], {'USE_WEBKIT': webkit}), 'WEBPACK $out')

    if sys.platform.startswith('linux'):
        n.rule('cat', 'cat $in > $out', 'CAT $out')

    n.comment('Files')
    ui_targets = build_targets(n, UI_FILES, 'uic', new_ext='py', new_path='knossos/ui')
    n.build('knossos/data/resources.rcc', 'rcc', 'knossos/data/resources.qrc', implicit=RCC_FILES)
    n.build('html/js/translations.js', 'js_lupdate', ['html/js/main.js'])
    n.build('locale/_py.ts', 'pylupdate', SRC_FILES)
    n.build('locale/_ui.ts', 'lupdate', ['locale/_py.ts', 'html/js/translations.js'] + UI_FILES)
    n.build('html/dist/bundle.js', 'webpack', JS_FILES, implicit=['node_modules/webpack/bin/webpack.js'])

    n.comment('Shortcuts')
    n.build('resources', 'phony', ui_targets + ['knossos/data/resources.rcc', 'html/js/translations.js'])

    n.comment('Scripts')
    if 'SKIP_RECONF' not in os.environ:
        n.rule('regen', py_script('configure.py', sys.argv[1:]), 'RECONFIGURE', generator=True)
        n.build('build.ninja', 'regen', ['configure.py', 'knossos/center.py', 'file_list.json'])

    setup_args = ['sdist']
    if check_module('wheel', required=False):
Пример #20
0
        n.rule('make_dirs', command='mkdir -p $build_dir $bin_dir')
    if sys.platform == 'win32':
        n.rule('create_sym_link',
               command=
               'cmd /c mklink $project_name.exe $build_dir\$project_name.exe')
    else:
        n.rule('create_sym_link',
               command='ln -sf $bin_dir/$project_name $project_name')

    n.newline()

    ############################################################################
    # BUILDS
    ############################################################################

    n.build(outputs='dirs', rule='make_dirs')
    n.build(outputs='fresh', rule='clean_all')
    n.build(outputs='sym', rule='create_sym_link')

    sources = []
    for (root, dirnames, filenames) in os.walk(SRC_DIR):
        for filename in fnmatch.filter(filenames, '*.cpp'):
            if sys.platform != 'win32' and 'win32_' not in filename:
                sources.append(os.path.join(root, filename))
            elif sys.platform == 'win32' and filename != 'main.cpp':
                sources.append(os.path.join(root, filename))

    for source in sources:
        if sys.platform == 'win32':
            n.build(outputs=source.replace('.cpp',
                                           '.obj').replace('src', BUILD_DIR),
Пример #21
0
    n.include('build.vars.ninja')

    n.rule(
        'download',
        description='Downloading $url...',
        pool='console',
        command=
        'curl -o $out $url && test "$sha256" = `shasum -a256 $out | cut -f1 -d" "`'
    )

    targets = []

    n.build(
        ['tarballs/%s' % GETTEXT_TARBALL],
        'download',
        variables={
            'url': 'https://ftp.gnu.org/pub/gnu/gettext/%s' % GETTEXT_TARBALL,
            'sha256': GETTEXT_SHA256,
        })

    targets.append(
        gen_configure(
            n,
            'gettext',
            tarball='tarballs/%s' % GETTEXT_TARBALL,
            patches=glob('gettext/*.patch'),
            configure='./configure',
            flags=[
                '--prefix=/',
                'CC=$cc',
                'CXX=$cxx',
Пример #22
0
           command = "avr-objdump -h -S $in > $out")

    n.rule("ocsrec",
           command = "avr-objcopy -O srec -R .eeprom -R .fuse -R .lock -R .signature $in $out")

    n.rule("cdb",
           command = "ninja -t compdb cc > compile_commands.json")

    n.rule("cscf",
            command = "find " + " ".join(set(source_dirs + include_dirs)) + " -regex \".*\\(\\.c\\|\\.h\\|.cpp\\|.hpp\\)$$\" -and -not -type d > $out")

    n.rule("cscdb",
           command = "cscope -bq")

    # Build rules
    n.build("compile_commands.json", "cdb")
    n.build("cscope.files", "cscf")
    n.build(["cscope.in.out", "cscope.po.out", "cscope.out"], "cscdb", "cscope.files")

    objects = []

    def cc(name):
        ofile = subst_ext(name, ".o")
        n.build(ofile, "cc", name)
        objects.append(ofile)
    def cxx(name):
        ofile = subst_ext(name, ".o")
        n.build(ofile, "cxx", name)
        objects.append(ofile)
    def cl(oname, ofiles):
        n.build(oname, "cl", ofiles)
Пример #23
0
    n.variable("prjfile", project_file)
    n.variable("speed", speed)
    n.variable("topfile", os.path.splitext(topfile)[0])

    n.rule("cpbuild", "cp $in $out")
    n.rule("genscript", "echo \"run -ifn $prjfile -ifmt mixed -top $topfile " +
                         "-ofn design.ngc -ofmt NGC -p ${device}-${speed}-" +
                         "${package} -opt_mode $opt_mode -opt_level " +
                         "$opt_level\" > $out")

    n.rule("synthesize", "(cd $builddir; xst -ifn xst_script)")
    n.rule("build", "(cd $builddir; ngdbuild -uc $const design.ngc design.ngd)")
    n.rule("map", "(cd $builddir; map -global_opt $global_opt -logic_opt on " +
                  "-mt on -timing -w design.ngd -o design.ncd design.pcf)")
    n.rule("par", "(cd $builddir; par -w design.ncd finished/design.ncd " +
                  "design.pcf)")
    n.rule("bitgen", "(cd $builddir; bitgen -w finished/design.ncd " +
                     "design.bit design.pcf)")

    for f in sources + [constraint_file]:
        n.build(os.path.join(build_dir, f), "cpbuild", f)
    n.build("${builddir}/xst_script", "genscript", sources)
    n.build("${builddir}/design.ngc", "synthesize", "${builddir}/xst_script")
    n.build("${builddir}/design.ngd", "build", "${builddir}/design.ngc")
    n.build(["${builddir}/design.ncd", "${builddir}/design.pcf"], "map",
            "${builddir}/design.ngd")
    n.build("${builddir}/finished/design.ncd", "par",
            ["${builddir}/design.ncd", "${builddir}/design.pcf"])
    n.build("${builddir}/design.bit", "bitgen",
            ["${builddir}/finished/design.ncd"])
Пример #24
0
    n = Writer(stream)

    n.comment('Transformers')
    n.rule('uic', py_script('tools/common/uic.py', ['$in', '$out'] + pyuic), 'UIC $out')
    n.rule('rcc', cmd2str([rcc, '-binary', '$in', '-o', '$out']), 'RCC $out')
    n.rule('js_lupdate', py_script('tools/common/js_lupdate.py', ['-o', '$out', '$in']), 'JS-LUPDATE $out')
    n.rule('pylupdate', cmd2str(pylupdate + ['$in', '-ts', '$out']), 'PY-LUPDATE $out')
    n.rule('lupdate', cmd2str([lupdate, '$in', '-ts', '$out']), 'LUPDATE $out')
    n.rule('webpack', cmdenv([node, 'node_modules/webpack/bin/webpack.js'], {'USE_WEBKIT': webkit}), 'WEBPACK $out')

    if sys.platform.startswith('linux'):
        n.rule('cat', 'cat $in > $out', 'CAT $out')

    n.comment('Files')
    ui_targets = build_targets(n, UI_FILES, 'uic', new_ext='py', new_path='knossos/ui')
    n.build('knossos/data/resources.rcc', 'rcc', 'knossos/data/resources.qrc', implicit=RCC_FILES)
    n.build('html/js/translations.js', 'js_lupdate', ['html/js/main.js'])
    n.build('locale/_py.ts', 'pylupdate', SRC_FILES)
    n.build('locale/_ui.ts', 'lupdate', ['locale/_py.ts', 'html/js/translations.js'] + UI_FILES)
    n.build('html/dist/bundle.js', 'webpack', JS_FILES, implicit=['node_modules/webpack/bin/webpack.js'])

    n.comment('Shortcuts')
    n.build('resources', 'phony', ui_targets + ['knossos/data/resources.rcc', 'html/js/translations.js'])

    n.comment('Scripts')
    if 'SKIP_RECONF' not in os.environ:
        n.rule('regen', py_script('configure.py', sys.argv[1:]), 'RECONFIGURE', generator=True)
        n.build('build.ninja', 'regen', ['configure.py', 'knossos/center.py', 'file_list.json'])

    setup_args = ['sdist']
    if check_module('wheel', required=False):
Пример #25
0
    def write_ninja(self, ninja: ninja_syntax.Writer, skip_outputs: Set[str], non_matching: bool, debug: bool):
        import segtypes
        import segtypes.common.data
        import segtypes.n64.Yay0

        assert self.linker_entries is not None

        built_objects = set()
        generated_headers = []

        def build(object_paths: Union[Path, List[Path]], src_paths: List[Path], task: str, variables: Dict[str, str] = {}):
            if not isinstance(object_paths, list):
                object_paths = [object_paths]

            object_strs = [str(obj) for obj in object_paths]
            needs_build = False

            for object_path in object_paths:
                if object_path.suffixes[-1] == ".o":
                    built_objects.add(str(object_path))
                elif object_path.suffixes[-1] == ".h" or task == "bin_inc_c":
                    generated_headers.append(str(object_path))

                # don't rebuild objects if we've already seen all of them
                if not str(object_path) in skip_outputs:
                    needs_build = True

            if needs_build:
                skip_outputs.update(object_strs)

                implicit = []
                order_only = []

                if task == "yay0":
                    implicit.append(YAY0_COMPRESS_TOOL)
                elif task in ["cc", "cxx"]:
                    order_only.append("generated_headers_" + self.version)

                ninja.build(
                    object_strs, # $out
                    task,
                    self.resolve_src_paths(src_paths), # $in
                    variables={ "version": self.version, **variables },
                    implicit=implicit,
                    order_only=order_only,
                )

        # Build objects
        for entry in self.linker_entries:
            seg = entry.segment

            if isinstance(seg, segtypes.n64.header.N64SegHeader):
                build(entry.object_path, entry.src_paths, "as")
            elif isinstance(seg, segtypes.common.asm.CommonSegAsm) or (isinstance(seg, segtypes.common.data.CommonSegData) and not seg.type[0] == "."):
                build(entry.object_path, entry.src_paths, "as")
            elif isinstance(seg, segtypes.common.c.CommonSegC) or (isinstance(seg, segtypes.common.data.CommonSegData) and seg.type[0] == "."):
                cflags = None
                if isinstance(seg.yaml, dict):
                    cflags = seg.yaml.get("cflags")
                elif len(seg.yaml) >= 4:
                    cflags = seg.yaml[3]

                # default cflags where not specified
                if cflags is None:
                    if "nusys" in entry.src_paths[0].parts:
                        cflags = ""
                    elif "os" in entry.src_paths[0].parts: # libultra
                        cflags = ""
                    else: # papermario
                        cflags = "-fforce-addr"

                # c
                task = "cc"
                if entry.src_paths[0].suffixes[-1] == ".cpp":
                    task = "cxx"

                if seg.name.endswith("osFlash"):
                    task = "cc_ido"
                elif "gcc_272" in cflags:
                    task = "cc_272"

                cflags = cflags.replace("gcc_272", "")

                build(entry.object_path, entry.src_paths, task, variables={
                    "cflags": cflags,
                    "cppflags": f"-DVERSION_{self.version.upper()}",
                })

                # images embedded inside data aren't linked, but they do need to be built into .inc.c files
                if isinstance(seg, segtypes.common.group.CommonSegGroup):
                    for seg in seg.subsegments:
                        if isinstance(seg, segtypes.n64.img.N64SegImg):
                            flags = ""
                            if seg.flip_horizontal:
                                flags += "--flip-x "
                            if seg.flip_vertical:
                                flags += "--flip-y "

                            src_paths = [seg.out_path().relative_to(ROOT)]
                            inc_dir = self.build_path() / "include" / seg.dir
                            bin_path = self.build_path() / seg.dir / (seg.name + ".png.bin")

                            build(bin_path, src_paths, "img", variables={
                                "img_type": seg.type,
                                "img_flags": flags,
                            })

                            c_sym = seg.create_symbol(
                                addr=seg.vram_start, in_segment=True, type="data", define=True
                            )
                            vars = {"c_name": c_sym.name}
                            build(inc_dir / (seg.name + ".png.h"), src_paths, "img_header", vars)
                            build(inc_dir / (seg.name + ".png.inc.c"), [bin_path], "bin_inc_c", vars)
                        elif isinstance(seg, segtypes.n64.palette.N64SegPalette):
                            src_paths = [seg.out_path().relative_to(ROOT)]
                            inc_dir = self.build_path() / "include" / seg.dir
                            bin_path = self.build_path() / seg.dir / (seg.name + ".pal.bin")

                            build(bin_path, src_paths, "img", variables={
                                "img_type": seg.type,
                                "img_flags": "",
                            })

                            c_sym = seg.create_symbol(
                                addr=seg.vram_start, in_segment=True, type="data", define=True
                            )
                            vars = {"c_name": c_sym.name}
                            build(inc_dir / (seg.name + ".pal.inc.c"), [bin_path], "bin_inc_c", vars)
            elif isinstance(seg, segtypes.common.bin.CommonSegBin):
                build(entry.object_path, entry.src_paths, "bin")
            elif isinstance(seg, segtypes.n64.Yay0.N64SegYay0):
                compressed_path = entry.object_path.with_suffix("") # remove .o
                build(compressed_path, entry.src_paths, "yay0")
                build(entry.object_path, [compressed_path], "bin")
            elif isinstance(seg, segtypes.n64.img.N64SegImg):
                flags = ""
                if seg.flip_horizontal:
                    flags += "--flip-x "
                if seg.flip_vertical:
                    flags += "--flip-y "

                bin_path = entry.object_path.with_suffix(".bin")
                inc_dir = self.build_path() / "include" / seg.dir

                build(bin_path, entry.src_paths, "img", variables={
                    "img_type": seg.type,
                    "img_flags": flags,
                })
                build(entry.object_path, [bin_path], "bin")

                # c_sym = seg.create_symbol(
                #     addr=seg.vram_start, in_segment=True, type="data", define=True
                # )
                # vars = {"c_name": c_sym.name}
                build(inc_dir / (seg.name + ".png.h"), entry.src_paths, "img_header")
            elif isinstance(seg, segtypes.n64.palette.N64SegPalette):
                bin_path = entry.object_path.with_suffix(".bin")

                build(bin_path, entry.src_paths, "img", variables={
                    "img_type": seg.type,
                    "img_flags": "",
                })
                build(entry.object_path, [bin_path], "bin")
            elif seg.type == "pm_npc_sprites":
                sprite_yay0s = []

                for sprite_id, sprite_dir in enumerate(entry.src_paths, 1):
                    sprite_name = sprite_dir.name

                    bin_path = entry.object_path.with_suffix("") / (sprite_name + ".bin")
                    yay0_path = bin_path.with_suffix(".Yay0")
                    sprite_yay0s.append(yay0_path)

                    variables = {
                        "sprite_id": sprite_id,
                        "sprite_name": sprite_name,
                        "sprite_dir": str(self.resolve_asset_path(sprite_dir)),
                    }

                    build(bin_path, [sprite_dir], "sprite", variables=variables)
                    build(yay0_path, [bin_path], "yay0")
                    build(
                        self.build_path() / "include" / seg.dir / seg.name / (sprite_name + ".h"),
                        [sprite_dir],
                        "sprite_header",
                        variables=variables,
                    )

                build(entry.object_path.with_suffix(".bin"), sprite_yay0s, "sprite_combine")
                build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin")
            elif seg.type == "pm_msg":
                msg_bins = []

                for section_idx, msg_path in enumerate(entry.src_paths):
                    bin_path = entry.object_path.with_suffix("") / f"{section_idx:02X}.bin"
                    msg_bins.append(bin_path)
                    build(bin_path, [msg_path], "msg")

                build([
                    entry.object_path.with_suffix(".bin"),
                    self.build_path() / "include" / "message_ids.h",
                ], msg_bins, "msg_combine")
                build(entry.object_path, [entry.object_path.with_suffix(".bin")], "bin")
            elif seg.type == "pm_map_data":
                bin_yay0s: List[Path] = [] # flat list of (uncompressed path, compressed? path) pairs
                src_dir = Path("assets/x") / seg.name

                for path in entry.src_paths:
                    name = path.stem
                    out_dir = entry.object_path.with_suffix("").with_suffix("")
                    bin_path = out_dir / f"{name}.bin"

                    if name.startswith("party_"):
                        compress = True
                        build(bin_path, [path], "img", variables={
                            "img_type": "party",
                            "img_flags": "",
                        })
                    elif name == "title_data":
                        compress = True

                        logotype_path = out_dir / "title_logotype.bin"
                        copyright_path = out_dir / "title_copyright.bin"
                        copyright_pal_path = out_dir / "title_copyright.pal" # jp only
                        press_start_path = out_dir / "title_press_start.bin"

                        build(logotype_path, [src_dir / "title/logotype.png"], "img", variables={
                            "img_type": "rgba32",
                            "img_flags": "",
                        })
                        build(press_start_path, [src_dir / "title/press_start.png"], "img", variables={
                            "img_type": "ia8",
                            "img_flags": "",
                        })

                        if self.version == "jp":
                            build(copyright_path, [src_dir / "title/copyright.png"], "img", variables={
                                "img_type": "ci4",
                                "img_flags": "",
                            })
                            build(copyright_pal_path, [src_dir / "title/copyright.png"], "img", variables={
                                "img_type": "palette",
                                "img_flags": "",
                            })
                            imgs = [logotype_path, copyright_path, press_start_path, copyright_pal_path]
                        else:
                            build(copyright_path, [src_dir / "title/copyright.png"], "img", variables={
                                "img_type": "ia8",
                                "img_flags": "",
                            })
                            imgs = [logotype_path, copyright_path, press_start_path]

                        build(bin_path, imgs, "pack_title_data")
                    elif name.endswith("_bg"):
                        compress = True
                        bin_path = self.build_path() / bin_path
                        build(bin_path, [path], "img", variables={
                            "img_type": "bg",
                            "img_flags": "",
                        })
                    elif name.endswith("_tex"):
                        compress = False
                        bin_path = path
                    elif name.endswith("_shape"):
                        map_name = "_".join(name.split("_")[:-1])

                        # Handle map XML files, if they exist (TODO: have splat output these)
                        map_xml = self.resolve_asset_path(Path(f"assets/{self.version}") / seg.dir / seg.name / (map_name + ".xml"))
                        if map_xml.exists():
                            # Build a header file for this map
                            build(
                                self.build_path() / "include" / seg.dir / seg.name / (map_name + ".h"),
                                [map_xml],
                                "map_header",
                            )

                            # NOTE: we don't build the map xml into a _shape or _hit file (yet); the Star Rod Map Editor
                            # is able to build the xml nonmatchingly into assets/star_rod_build/mapfs/*.bin for people
                            # who want that (i.e. modders). 'star_rod_build' should be added to asset_stack also.

                        compress = True
                        bin_path = path
                    else:
                        compress = True
                        bin_path = path

                    if compress:
                        yay0_path = out_dir / f"{name}.Yay0"
                        build(yay0_path, [bin_path], "yay0")
                    else:
                        yay0_path = bin_path

                    bin_yay0s.append(bin_path)
                    bin_yay0s.append(yay0_path)

                # combine
                build(entry.object_path.with_suffix(""), bin_yay0s, "mapfs")
                build(entry.object_path, [entry.object_path.with_suffix("")], "bin")
            elif seg.type == "pm_charset":
                rasters = []

                for src_path in entry.src_paths:
                    out_path = self.build_path() / seg.dir / seg.name / (src_path.stem + ".bin")
                    build(out_path, [src_path], "img", variables={
                        "img_type": "ci4",
                        "img_flags": "",
                    })
                    rasters.append(out_path)

                build(entry.object_path.with_suffix(""), rasters, "pm_charset")
                build(entry.object_path, [entry.object_path.with_suffix("")], "bin")
            elif seg.type == "pm_charset_palettes":
                palettes = []

                for src_path in entry.src_paths:
                    out_path = self.build_path() / seg.dir / seg.name / "palette" / (src_path.stem + ".bin")
                    build(out_path, [src_path], "img", variables={
                        "img_type": "palette",
                        "img_flags": "",
                    })
                    palettes.append(out_path)

                build(entry.object_path.with_suffix(""), palettes, "pm_charset_palettes")
                build(entry.object_path, [entry.object_path.with_suffix("")], "bin")
            elif seg.type in ["pm_effect_loads", "pm_effect_shims"]:
                build(entry.object_path, entry.src_paths, "as")
            elif seg.type == "linker" or seg.type == "linker_offset":
                pass
            else:
                raise Exception(f"don't know how to build {seg.__class__.__name__} '{seg.name}'")

        # Create objcopy section list
        if debug:
            ninja.build(
                str(self.objcopy_sections_path()),
                "genobjcopy",
                str(self.build_path() / "elf_sections.txt"),
            )

        # Run undefined_syms through cpp
        ninja.build(
            str(self.undefined_syms_path()),
            "cpp",
            str(self.version_path / "undefined_syms.txt")
        )

        # Build elf, z64, ok
        additional_objects = [str(self.undefined_syms_path())]
        if debug:
            additional_objects += [str(self.objcopy_sections_path())]

        ninja.build(
            str(self.elf_path()),
            "ld",
            str(self.linker_script_path()),
            implicit=[str(obj) for obj in built_objects] +  additional_objects,
            variables={ "version": self.version, "mapfile": str(self.map_path()) },
        )
        ninja.build(
            str(self.rom_path()),
            "z64",
            str(self.elf_path()),
            implicit=[CRC_TOOL],
            variables={ "version": self.version },
        )
        ninja.build(
            str(self.rom_ok_path()),
            "sha1sum",
            f"ver/{self.version}/checksum.sha1",
            implicit=[str(self.rom_path())],
        )

        ninja.build("generated_headers_" + self.version, "phony", generated_headers)
    command=
    "$CXX -o $out $in --memory-init-file 0 -Wall -Os -s EXPORTED_FUNCTIONS=$EXPORT_LIST -s TOTAL_MEMORY=50000000",
    description="LINK $out")
n.newline()

n.rule("COMPRESS",
       command="zopfli -c --i25 $in > $out",
       description="COMPRESS $out")
n.newline()

# build the Embedded XZ library
n.comment("build the Embedded XZ library")
xz_object_list = []
for src in XZ_EMBEDDED_C_SOURCES:
    object_file = "$OBJECTS/" + src[:-1] + "o"
    n.build(object_file, "XZ_CC", inputs="$XZ_ROOT/" + src)
    xz_object_list.append(object_file)
object_file = "$OBJECTS/xzdec.o"
n.build(object_file, "XZ_CC", inputs="xzdec.c")
xz_object_list.append(object_file)
n.newline()

targets_list = []
compressed_targets_list = []

# build the many GME players
for gme_type in GME_TYPES:
    n.comment("the build files for " + gme_type + " GME player")
    object_list = xz_object_list[:]
    cxx_compiler = "GME_" + gme_type + "_CXX"
    for src in GME_CPP_SOURCES:
Пример #27
0
           command = "$tc_path/arm-none-eabi-gcc $cflags -S -c $in -o $out")

    n.rule("cl",
           command = "$tc_path/arm-none-eabi-gcc $lflags $in -o $out")

    n.rule("oc",
           command = "$tc_path/arm-none-eabi-objcopy -O binary $in $out")

    n.rule("cdb",
           command = "ninja -t compdb cc cxx > compile_commands.json")

    n.rule("cscdb",
           command = "cscope -Rbq")

    # Build rules
    n.build("compile_commands.json", "cdb")
    n.build("cscope.*", "cscdb")

    objects = []

    def cc(name):
        ofile = subst_ext(name, ".o")
        n.build(ofile, "cc", name)
        objects.append(ofile)
    def cca(name):
        ofile = subst_ext(name, ".s")
        n.build(ofile, "cca", name)
        objects.append(ofile)
    def cxx(name):
        ofile = subst_ext(name, ".o")
        n.build(ofile, "cxx", name)
Пример #28
0
            '$out', '$in'
        ],
        depfile='$out.d')
 w.rule('cc',
        command=[
            c_compiler, '$cflags', '-MMD', '-MF', '$out.d', '-c', '-o',
            '$out', '$in'
        ],
        depfile='$out.d')
 w.rule('bin2c', command=['python', 'bin2c.py', '$varname', '$in', '$out'])
 objs = []
 for src in cpp_sources:
     stem = strip_suffix(src)
     obj = '$builddir/' + stem + '.o'
     objs.append(obj)
     w.build(obj, rule='cpp', inputs=src)
 for src in c_sources:
     stem = strip_suffix(src)
     obj = '$builddir/' + stem + '.o'
     objs.append(obj)
     w.build(obj, rule='cc', inputs=src)
 for varname, filename in binary_data.items():
     c_file = '$builddir/generated/' + filename + '.c'
     w.build(c_file,
             rule='bin2c',
             inputs=filename,
             variables={'varname': varname})
     o_file = '$builddir/' + filename + '.o'
     w.build(o_file, rule='cc', inputs=c_file)
     objs.append(o_file)
 w.rule('ld', command=[linker, '-o', '$out', '$in', '$ldflags'])