def shogunAllProto(argv): ''' Generate XXX.pb.{h,cc} files from all available XXX.proto files in the source directory. Depends: protoc (protobuf-compiler) Input: .proto Output: .pb.cc, .pb.h ''' ag = argparse.ArgumentParser() ag.add_argument('-o', help='write ninja file', type=str, default='all_proto.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (1) initialize ninja file cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) glob all proto protos = glob.glob(f'**/*.proto', recursive=True) print(cyan('AllProto:'), f'globbed {len(protos)} .proto files') # (3) generate .pb.cc, .pb.h for proto in protos: cursor.build([ proto.replace('.proto', '.pb.cc'), proto.replace('.proto', '.pb.h')], 'rule_PROTOC', proto) # done cursor.close()
def shogunProtoText(argv): ''' Build a binary ELF executable named proto_text, which generates XXX.pb_text{.cc,.h,-impl.h} files from a given XXX.proto file. This binary file is for one-time use. Depends: shogunAllProto Input: bazelDump, cxx source Output: proto_text ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='proto_text.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) _, srclist = eGrep('.*.h$', srclist) # we don't need to deal with header here _, srclist = eGrep('^third_party', srclist) # no third_party stuff _, srclist = eGrep('.*windows/.*', srclist) # no windoge source _, srclist = eGrep('.*.proto$', srclist) # already dealt with in (2) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.cc and .pb.h files are generated in shogunAllProto _, genlist = eGrep('.*.pb.h$', genlist) pbcclist, genlist = eGrep('.*.pb.cc$', genlist) if len(genlist) > 0: print(yellow('Remainders:'), genlist) # (3) deal with source files cclist, srclist = eGrep('.*.cc', srclist) objlist = [] for cc in cclist + pbcclist: obj = cc.replace('.cc', '.o') objlist.append(cursor.build(obj, 'rule_CXX_OBJ', cc)[0]) if len(srclist) > 0: print(yellow('Remainders:'), srclist) # (4) link objects into the final ELF cursor.build(f'proto_text', 'rule_CXX_EXEC', inputs=objlist, variables={'LIBS': '-lpthread -lprotobuf -ldouble-conversion'}) # done cursor.close()
def shogunCCOP(argv): ''' Generate tensorflow cc ops : tensorflow/cc/ops/*.cc and *.h Depends: AllProto, proto_text, libtensorflow_framework Input: cc source, bazel dump Output: one-time-use binary "XXX_gen_cc" and generated .cc .h files. ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='ccop.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) # (1) Instantiate ninja writer cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) filter unrelated files, we only want cc_op related files. _, genlist = eGrep('.*.pb.h', genlist) _, genlist = eGrep('.*.pb.cc', genlist) _, genlist = eGrep('.*.pb_text.h', genlist) _, genlist = eGrep('.*.pb_text-impl.h', genlist) _, genlist = eGrep('.*.pb_text.cc', genlist) # (3) XXX_gen_cc # (3.1) deal with a missing source cursor.build('tensorflow/core/ops/user_ops.cc', 'COPY', inputs='tensorflow/core/user_ops/fact.cc') # (3.2) build several common objects main_cc = ['tensorflow/core/framework/op_gen_lib.cc', 'tensorflow/cc/framework/cc_op_gen.cc', 'tensorflow/cc/framework/cc_op_gen_main.cc', ] main_obj = [x.replace('.cc', '.o') for x in main_cc] for cc in main_cc: cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc) # (3.2) build executables and generate file with executable gen_ccopcc, genlist = eGrep('.*/cc/ops/.*.cc', genlist) gen_ccoph, genlist = eGrep('.*/cc/ops/.*.h', genlist) opnamelist = list(set(os.path.basename(x.replace('.cc', '').replace('.h', '')) for x in (gen_ccopcc + gen_ccoph) if 'internal' not in x )) for opname in opnamelist: coreopcc = 'tensorflow/core/ops/' + opname + '.cc' ccopcc = 'tensorflow/cc/ops/' + opname + '.cc' # build corresponding elf executable cursor.build(f'{opname}_gen_cc', 'rule_CXX_EXEC', inputs=[coreopcc] + main_obj, variables={'SHOGUN_EXTRA': '-I. -L. -ltf_ccop'}) # generate file cursor.build([ccopcc.replace('.cc', '.h'), ccopcc], 'rule_CC_OP_GEN', inputs=f'{opname}_gen_cc', variables={'cc_op_gen_internal': '0' if opname != 'sendrecv_ops' else '1'}, implicit_outputs=[ccopcc.replace('.cc', '_internal.h'), ccopcc.replace('.cc', '_internal.cc')]) ## done cursor.close()
def shogunTFLib_framework(argv): ''' Build libtensorflow_framework.so. With slight modification, this function should be able to build libtensorflow_android.so too. Depends: AllProto, proto_text Input: bazelDump, cxx source Output: libtensorflow_framework.so ''' ag = argparse.ArgumentParser() ag.add_argument('-i', help='list of source files', type=str, required=True) ag.add_argument('-g', help='list of generated files', type=str, required=True) ag.add_argument('-o', help='where to write the ninja file', type=str, default='libtensorflow_framework.ninja') ag = ag.parse_args(argv) print(red('Argument Dump:')) pprint(vars(ag)) # (0) read bazel dump and apply hardcoded filters srclist = bazelPreprocess([l.strip() for l in open(ag.i, 'r').readlines()]) genlist = bazelPreprocess([l.strip() for l in open(ag.g, 'r').readlines()]) _, srclist = eGrep('.*proto_text.gen_proto_text_functions.cc', srclist) _, srclist = eGrep('^third_party', srclist) _, srclist = eGrep('.*/windows/.*', srclist) # no windoge source. # (1) Initialize ninja file cursor = Writer(open(ag.o, 'w')) ninjaCommonHeader(cursor, ag) # (2) deal with generated files # (2.1) .pb.h and .pb.cc are already generated by shogunAllProto gen_pbh, genlist = eGrep('.*.pb.h', genlist) gen_pbcc, genlist = eGrep('.*.pb.cc', genlist) # (2.2) .pb_text.* pbtlist = [x for x in genlist if any(x.endswith(y) for y in ('.pb_text.h', '.pb_text.cc', '.pb_text-impl.h'))] pbtlist = [x.replace('.pb_text.h', '.proto').replace('.pb_text.cc', '.proto').replace('.pb_text-impl.h', '.proto') for x in pbtlist] gen_pbth, genlist = eGrep('.*.pb_text.h', genlist) gen_pbtih, genlist = eGrep('.*.pb_text-impl.h', genlist) gen_pbtcc, genlist = eGrep('.*.pb_text.cc', genlist) for pbt in list(set(pbtlist)): cursor.build([ pbt.replace('.proto', '.pb_text.h'), pbt.replace('.proto', '.pb_text.cc'), pbt.replace('.proto', '.pb_text-impl.h') ], 'rule_PROTO_TEXT', pbt) # (2.3) finish dealing with generated files if genlist: print(yellow('Remainders:'), genlist) assert(len(genlist) == 1) # (3) deal with source files # (3.1) filter-out headers _, srclist = eGrep('.*.proto$', srclist) # done in (2) src_hdrs, srclist = eGrep('.*.h$', srclist) # (3.2) compile .cc source src_cc, srclist = eGrep('.*.cc', srclist) objlist = [] for cc in src_cc + gen_pbcc + gen_pbtcc + genlist: variables = {} obj = cursor.build(cc.replace('.cc', '.o'), 'rule_CXX_OBJ', inputs=cc, variables=variables)[0] objlist.append(obj) # (4) link the final executable cursor.build('libtensorflow_framework.so', 'rule_CXX_SHLIB', inputs=objlist, variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif' + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp' + ' -lpthread', 'SHOGUN_EXTRA': f'-Wl,--soname=libtensorflow_framework.so.{tf_soversion}' + f' -Wl,--version-script tensorflow/tf_framework_version_script.lds' + f' -fvisibility=hidden'}) # (5) a temporary shared object used by shogunCCOP libccop = [x for x in objlist if all(y not in x for y in ('core/kernels', 'core/ops'))] cursor.build('libtf_ccop.so', 'rule_CXX_SHLIB', inputs=libccop, variables={'LIBS': '-lfarmhash -lhighwayhash -lsnappy -lgif' + ' -ldouble-conversion -lz -lprotobuf -ljpeg -lnsync -lnsync_cpp' + ' -lpthread'}) # done cursor.close()