def generate_item_models(): target_file = pth.join(pth.dirname(__file__), "models", "spider_models.py") Cog().main([sys.argv[0], "-r", target_file]) reload(spider_models) reload(models) call_command("makemigrations") call_command("migrate", "spiderman")
def _runcog(options, files, uncog=False): """Common function for the cog and runcog tasks.""" options.order('cog', 'sphinx', add_rest=True) c = Cog() if uncog: c.options.bNoGenerate = True c.options.bReplace = True c.options.bDeleteCode = options.get("delete_code", False) includedir = options.get('includedir', None) if includedir: include = Includer(includedir, cog=c, include_markers=options.get("include_markers")) # load cog's namespace with our convenience functions. c.options.defines['include'] = include c.options.defines['sh'] = _cogsh(c) c.options.sBeginSpec = options.get('beginspec', '[[[cog') c.options.sEndSpec = options.get('endspec', ']]]') c.options.sEndOutput = options.get('endoutput', '[[[end]]]') basedir = options.get('basedir', None) if basedir is None: basedir = (path(options.get('docroot', "docs")) / options.get('sourcedir', "")) basedir = path(basedir) if not files: pattern = options.get("pattern", "*.rst") if pattern: files = basedir.walkfiles(pattern) else: files = basedir.walkfiles() for f in sorted(files): dry("cog %s" % f, c.processOneFile, f)
def insert_files_in_rsts(pkg_dir, cog_exe): """ Cog-insert source files in Sphinx files """ fnames = [ os.path.join(pkg_dir, 'docs', 'README.rst'), os.path.join(pkg_dir, 'README.rst'), ] print('Inserting source files in documentation files') for fname in fnames: print(' Processing file {0}'.format(fname)) retcode = Cog().main( [cog_exe, '-e', '-x', '-o', fname + '.tmp', fname]) if retcode: raise RuntimeError('Error deleting insertion of source files in ' 'documentation file {0}'.format(fname)) retcode = Cog().main([cog_exe, '-e', '-o', fname + '.tmp', fname]) if retcode: raise RuntimeError('Error inserting source files in ' 'docstrings in module {0}'.format(fname)) move_file(fname + '.tmp', fname)
def test_if_cog_needs_to_be_run(): _stdout = sys.stdout sys.stdout = StringIO() readme = pathlib.Path(__file__).parents[1] / "README.md" Cog().main(["cog", str(readme)]) output = sys.stdout.getvalue() sys.stdout = _stdout assert ( output == readme.read_text() ), "Run 'cog -r README.md' from the top level of the repo, with Python < 3.10"
def write_embedded_values(propagation_channel_id, sponsor_id, client_version, embedded_server_list, remote_server_list_signature_public_key, remote_server_list_url_split, feedback_encryption_public_key, feedback_upload_server, feedback_upload_path, feedback_upload_server_headers, info_link_url, upgrade_signature_public_key, upgrade_url_split, get_new_version_url, get_new_version_email, faq_url, privacy_policy_url, propagator_managed_upgrades, ignore_non_embedded_server_entries=False, home_tab_url_exclusions=[]): utils.set_embedded_values(client_version, '","'.join(embedded_server_list), ignore_non_embedded_server_entries, feedback_encryption_public_key, feedback_upload_server, feedback_upload_path, feedback_upload_server_headers, info_link_url, '', '', urlparse.urlunsplit(upgrade_url_split), upgrade_signature_public_key, get_new_version_url, get_new_version_email, faq_url, privacy_policy_url, propagator_managed_upgrades, propagation_channel_id, sponsor_id, urlparse.urlunsplit(remote_server_list_url_split), remote_server_list_signature_public_key, '","'.join(home_tab_url_exclusions)) cog_args = shlex.split('cog -U -I "%s" -o "%s" -D buildname="" "%s"' % (os.getcwd(), EMBEDDED_VALUES_FILENAME, EMBEDDED_VALUES_FILENAME + '.stub')) ret_error = Cog().main(cog_args) if ret_error != 0: print 'Cog failed with error: %d' % ret_error raise
def test_can_load(): """ Tests whether the external cog code-gen app can load our serialized objects """ wrapper = __test_cases() for opts in wrapper: # create a dummy callgen callgen = CallgenResult(order=opts.order, lang=opts.lang, dev_mem_type=wrapper.state['dev_mem_type'], type_map=type_map(opts.lang)) with temporary_directory() as tdir: with open(os.path.join(tdir, 'test.cpp'), mode='w') as file: file.write(""" /*[[[cog import cog import os import pickle # next, unserialize the callgen with open(callgen, 'rb') as file: call = pickle.load(file) # and create a memory manager from pyjac.kernel_utils.memory_tools import get_memory mem = get_memory(call) cog.outl('success!') ]]] [[[end]]]*/""") # and serialize mem with open(os.path.join(tdir, 'callgen.pickle'), 'wb') as file: pickle.dump(callgen, file) # and call cog from cogapp import Cog cmd = [ 'cog', '-e', '-d', '-Dcallgen={}'.format( os.path.join(tdir, 'callgen.pickle')), '-o', os.path.join(tdir, 'test'), os.path.join(tdir, 'test.cpp')] Cog().callableMain(cmd) with open(os.path.join(tdir, 'test'), 'r') as file: assert file.read().strip() == 'success!'
def main(): parser = MyParser( description= 'This application create components files from cdsl files or .ice from idsl\n' '\ta) to generate code from a CDSL file: ' + sys.argv[0].split('/')[-1] + ' INPUT_FILE.CDSL OUTPUT_PATH\n' + '\tb) to generate a new CDSL file: ' + sys.argv[0].split('/')[-1] + ' NEW_COMPONENT_DESCRIPTOR.CDSL', formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-I", "--include_dirs", nargs='*', help="Include directories", action=FullPaths, default=[]) parser.add_argument("-d", '--diff', dest='diff', choices=DIFF_TOOLS, action='store') parser.add_argument("input_file", help="The input dsl file") parser.add_argument("output_path", nargs='?', help="The path to put the files") args = parser.parse_args() if args.output_path is None: if args.input_file.endswith(".cdsl"): generateDummyCDSL(args.input_file) generateDummySMDSL("statemachine.smdsl") sys.exit(0) else: print(args.output_path, args.input_file) print(parser.error("No output path with non .cdsl file")) sys.exit(-1) inputFile = args.input_file outputPath = args.output_path sys.path.append('/opt/robocomp/python') new_existing_files = {} if inputFile.endswith(".cdsl"): component = DSLFactory().from_file( inputFile, includeDirectories=args.include_dirs) imports = ''.join([imp + '#' for imp in component['imports']]) # verification pool = IDSLPool(imports, args.include_dirs) interface_list = component['requires'] + component[ 'implements'] + component['subscribesTo'] + component['publishes'] for interface_required in interface_list: interface_required = interface_required if isinstance( interface_required, str) else interface_required[0] if not pool.moduleProviding(interface_required): raise rcExceptions.InterfaceNotFound(interface_required, pool.interfaces()) if component['language'].lower( ) == 'cpp' or component['language'].lower() == 'cpp11': # # Check output directory # if not os.path.exists(outputPath): create_directory(outputPath) # Create directories within the output directory try: create_directory(outputPath + "/bin") create_directory(outputPath + "/etc") create_directory(outputPath + "/src") except: print('There was a problem creating a directory') sys.exit(1) pass # # Generate regular files # files = [ 'CMakeLists.txt', 'DoxyFile', 'README-STORM.txt', 'README.md', 'etc/config', 'src/main.cpp', 'src/CMakeLists.txt', 'src/CMakeListsSpecific.txt', 'src/commonbehaviorI.h', 'src/commonbehaviorI.cpp', 'src/genericmonitor.h', 'src/genericmonitor.cpp', 'src/config.h', 'src/specificmonitor.h', 'src/specificmonitor.cpp', 'src/genericworker.h', 'src/genericworker.cpp', 'src/specificworker.h', 'src/specificworker.cpp', 'src/mainUI.ui' ] specificFiles = [ 'src/specificworker.h', 'src/specificworker.cpp', 'src/CMakeListsSpecific.txt', 'src/mainUI.ui', 'src/specificmonitor.h', 'src/specificmonitor.cpp', 'README.md', 'etc/config' ] for f in files: ofile = outputPath + '/' + f if f in specificFiles and os.path.exists(ofile): print('Not overwriting specific file "' + ofile + '", saving it to ' + ofile + '.new') new_existing_files[os.path.abspath( ofile)] = os.path.abspath(ofile) + '.new' ofile += '.new' ifile = "/opt/robocomp/share/robocompdsl/templateCPP/" + f if f != 'src/mainUI.ui' or component['gui'] is not None: print('Generating', ofile) run = "cog.py -z -d -D theCDSL=" + inputFile + " -D theIDSLs=" + imports + ' -D theIDSLPaths=' + '#'.join( args.include_dirs) + " -o " + ofile + " " + ifile run = run.split(' ') ret = Cog().main(run) if ret != 0: print('ERROR') sys.exit(-1) replaceTagsInFile(ofile) # # Generate interface-dependent files # for ima in component['implements']: im = ima if type(im) != type(''): im = im[0] if communicationIsIce(ima): for f in ["SERVANT.H", "SERVANT.CPP"]: ofile = outputPath + '/src/' + im.lower( ) + 'I.' + f.split('.')[-1].lower() print('Generating ', ofile, ' (servant for', im + ')') # Call cog run = "cog.py -z -d -D theCDSL=" + inputFile + " -D theIDSLs=" + imports + ' -D theIDSLPaths=' + '#'.join( args.include_dirs ) + " -D theInterface=" + im + " -o " + ofile + " " + "/opt/robocomp/share/robocompdsl/templateCPP/" + f run = run.split(' ') ret = Cog().main(run) if ret != 0: print('ERROR') sys.exit(-1) replaceTagsInFile(ofile) for imp in component['subscribesTo']: im = imp if type(im) != type(''): im = im[0] if communicationIsIce(imp): for f in ["SERVANT.H", "SERVANT.CPP"]: ofile = outputPath + '/src/' + im.lower( ) + 'I.' + f.split('.')[-1].lower() print('Generating ', ofile, ' (servant for', im + ')') # Call cog theInterfaceStr = im if type(theInterfaceStr) == type([]): theInterfaceStr = str(';'.join(im)) run = "cog.py -z -d -D theCDSL=" + inputFile + " -D theIDSLs=" + imports + ' -D theIDSLPaths=' + '#'.join( args.include_dirs ) + " -D theInterface=" + theInterfaceStr + " -o " + ofile + " " + "/opt/robocomp/share/robocompdsl/templateCPP/" + f #print(run run = run.split(' ') ret = Cog().main(run) if ret != 0: print('ERROR') sys.exit(-1) replaceTagsInFile(ofile) elif component['language'].lower() == 'python': # # Check output directory # if not os.path.exists(outputPath): create_directory(outputPath) # Create directories within the output directory try: create_directory(outputPath + "/etc") create_directory(outputPath + "/src") except: print('There was a problem creating a directory') sys.exit(1) pass needStorm = False for pub in component['publishes']: if communicationIsIce(pub): needStorm = True for sub in component['subscribesTo']: if communicationIsIce(sub): needStorm = True # # Generate regular files # files = [ 'CMakeLists.txt', 'DoxyFile', 'README-STORM.txt', 'README.md', 'etc/config', 'src/main.py', 'src/genericworker.py', 'src/specificworker.py', 'src/mainUI.ui' ] specificFiles = [ 'src/specificworker.py', 'src/mainUI.ui', 'README.md', 'etc/config' ] for f in files: if f == 'src/main.py': ofile = outputPath + '/src/' + component['name'] + '.py' else: ofile = outputPath + '/' + f if f in specificFiles and os.path.exists(ofile): print('Not overwriting specific file "' + ofile + '", saving it to ' + ofile + '.new') new_existing_files[os.path.abspath( ofile)] = os.path.abspath(ofile) + '.new' ofile += '.new' ifile = "/opt/robocomp/share/robocompdsl/templatePython/" + f ignoreFile = False if f == 'src/mainUI.ui' and component['gui'] is None: ignoreFile = True if f == 'CMakeLists.txt' and component['gui'] is None: ignoreFile = True if f == 'README-STORM.txt' and needStorm == False: ignoreFile = True if not ignoreFile: print('Generating', ofile) run = "cog.py -z -d -D theCDSL=" + inputFile + " -D theIDSLs=" + imports + ' -D theIDSLPaths=' + '#'.join( args.include_dirs) + " -o " + ofile + " " + ifile run = run.split(' ') ret = Cog().main(run) if ret != 0: print('ERROR') sys.exit(-1) replaceTagsInFile(ofile) if f == 'src/main.py': os.chmod(ofile, os.stat(ofile).st_mode | 0o111) # # Generate interface-dependent files # for imp in component['implements'] + component['subscribesTo']: if type(imp) != type(''): im = imp[0] else: im = imp if communicationIsIce(imp): for f in ["SERVANT.PY"]: ofile = outputPath + '/src/' + im.lower( ) + 'I.' + f.split('.')[-1].lower() print('Generating', ofile, ' (servant for', im + ')') # Call cog run = "cog.py -z -d -D theCDSL=" + inputFile + " -D theIDSLs=" + imports + ' -D theIDSLPaths=' + '#'.join( args.include_dirs ) + " -D theInterface=" + im + " -o " + ofile + " " + "/opt/robocomp/share/robocompdsl/templatePython/" + f run = run.split(' ') ret = Cog().main(run) if ret != 0: print('ERROR') sys.exit(-1) replaceTagsInFile(ofile) else: print('Unsupported language', component['language']) if component['usingROS'] == True: for imp in component['imports']: generateROSHeaders(imp, outputPath + "/src", component, args.include_dirs) # Code to launch diff tool on .new files to be compared with their old version if args.diff is not None: diff_tool, _ = get_diff_tool(prefered=args.diff) print( "Executing diff tool for existing files. Close if no change is needed." ) for o_file, n_file in new_existing_files.items(): if not filecmp.cmp(o_file, n_file): print([diff_tool, o_file, n_file]) try: subprocess.call([diff_tool, o_file, n_file]) except KeyboardInterrupt as e: print( "Comparasion interrupted. All files have been generated. Check this .new files manually:" ) for o_file2, n_file2 in new_existing_files.items(): if not filecmp.cmp(o_file2, n_file2): print("%s %s" % (o_file2, n_file2)) break except Exception as e: print("Exception trying to execute %s" % (diff_tool)) print(e.message) else: print("Binary equal files %s and %s" % (o_file, n_file)) elif inputFile.endswith(".idsl"): # idsl = IDSLParsing.fromFileIDSL(inputFile) print('Generating ICE file ', outputPath) # Call cog run = "cog.py -z -d" + " -D theIDSL=" + inputFile + ' -D theIDSLPaths=' + '#'.join( args.include_dirs ) + " -o " + outputPath + " /opt/robocomp/share/robocompdsl/TEMPLATE.ICE" run = run.split(' ') ret = Cog().main(run) if ret != 0: print('ERROR') sys.exit(-1) replaceTagsInFile(outputPath)
def test_read_initial_conditions(self): setup = test_utils.get_read_ics_source() wrapper = OptionLoopWrapper.from_get_oploop(self, do_conp=True) for opts in wrapper: with temporary_build_dirs() as (build_dir, obj_dir, lib_dir): conp = wrapper.state['conp'] # make a dummy generator insns = (""" {spec} = {param} {{id=0}} """) domain = arc.creator('domain', arc.kint_type, (10, ), 'C', initializer=np.arange( 10, dtype=arc.kint_type)) mapstore = arc.MapStore(opts, domain, None) # create global args param = arc.creator(arc.pressure_array, np.float64, (arc.problem_size.name, 10), opts.order) spec = arc.creator(arc.state_vector, np.float64, (arc.problem_size.name, 10), opts.order) namestore = type('', (object, ), {'jac': ''}) # create array / array strings param_lp, param_str = mapstore.apply_maps(param, 'j', 'i') spec_lp, spec_str = mapstore.apply_maps(spec, 'j', 'i') # create kernel infos info = knl_info('spec_eval', insns.format(param=param_str, spec=spec_str), mapstore, kernel_data=[spec_lp, param_lp, arc.work_size], silenced_warnings=['write_race(0)']) # create generators kgen = make_kernel_generator( opts, KernelType.dummy, [info], namestore, input_arrays=[param.name, spec.name], output_arrays=[spec.name], name='ric_tester') # make kernels kgen._make_kernels() # and generate RIC _, record, _ = kgen._generate_wrapping_kernel(build_dir) kgen._generate_common(build_dir, record) ric = os.path.join( build_dir, 'read_initial_conditions' + utils.file_ext[opts.lang]) # write header write_aux(build_dir, opts, self.store.specs, self.store.reacs) with open(os.path.join(build_dir, 'setup.py'), 'w') as file: file.write( setup.safe_substitute(buildpath=build_dir, obj_dir=obj_dir)) # and compile from pyjac.libgen import compile, get_toolchain toolchain = get_toolchain(opts.lang) compile(opts.lang, toolchain, [ric], obj_dir=obj_dir) # write wrapper self.__write_with_subs('read_ic_wrapper.pyx', os.path.join(self.store.script_dir, 'test_utils'), build_dir, header_ext=utils.header_ext[opts.lang]) # setup utils.run_with_our_python([ os.path.join(build_dir, 'setup.py'), 'build_ext', '--build-lib', lib_dir ]) infile = os.path.join(self.store.script_dir, 'test_utils', 'ric_tester.py.in') outfile = os.path.join(lib_dir, 'ric_tester.py') # cogify try: Cog().callableMain([ 'cogapp', '-e', '-d', '-Dconp={}'.format(conp), '-o', outfile, infile ]) except Exception: import logging logger = logging.getLogger(__name__) logger.error('Error generating initial conditions reader:' ' {}'.format(outfile)) raise # save phi, param in correct order phi = (self.store.phi_cp if conp else self.store.phi_cv) savephi = phi.flatten(opts.order) param = self.store.P if conp else self.store.V savephi.tofile(os.path.join(lib_dir, 'phi_test.npy')) param.tofile(os.path.join(lib_dir, 'param_test.npy')) # save bin file out_file = np.concatenate( ( np.reshape(phi[:, 0], (-1, 1)), # temperature np.reshape(param, (-1, 1)), # param phi[:, 1:]), axis=1 # species ) out_file = out_file.flatten('K') with open(os.path.join(lib_dir, 'data.bin'), 'wb') as file: out_file.tofile(file) # and run utils.run_with_our_python( [outfile, opts.order, str(self.store.test_size)])
def driver(output_dir, is_mo_caffe_ext_gen, is_mo_mxnet_ext_gen, is_mo_tf_ext_gen, is_mo_op_gen, is_ie_cpu_gen, is_ie_gpu_gen=False, is_from_config=False): analysis = '\n'.join([ 'Generating:', '\tModel Optimizer: ', '\t\t Extractor for Caffe Custom Layer: {}'.format( is_mo_caffe_ext_gen), '\t\t Extractor for MxNet Custom Layer: {}'.format( is_mo_mxnet_ext_gen), '\t\t Extractor for TensorFlow Custom Layer: {}'.format( is_mo_tf_ext_gen), '\t\t Framework-agnostic operation extension: {}'.format(is_mo_op_gen), '\t Inference Engine: ', '\t\t CPU extension: {}'.format(is_ie_cpu_gen), '\t\t GPU extension: {}'.format(is_ie_gpu_gen), ]) print(analysis) if is_mo_caffe_ext_gen or is_mo_mxnet_ext_gen or is_mo_tf_ext_gen or is_mo_op_gen: [caffe_extr_path, mxnet_extr_path, tf_extr_path, op_path] = create_mo_folder_structure(output_dir) if is_ie_cpu_gen or is_ie_gpu_gen: [ie_cpu_path, ie_gpu_path] = create_ie_folder_structure(output_dir) if is_mo_caffe_ext_gen: mo_extr_descr = MOExtractorDescr(is_mo_op_gen) if not is_from_config: mo_extr_descr.create_extension_description() if is_mo_mxnet_ext_gen: mo_extr_descr = MOMXNetExtractorDescr(is_mo_op_gen) if not is_from_config: mo_extr_descr.create_extension_description() if is_mo_tf_ext_gen: mo_extr_descr = MOTFExtractorDescr(is_mo_op_gen) if not is_from_config: mo_extr_descr.create_extension_description() if is_mo_op_gen: mo_op = MOOpDescr(is_mo_caffe_ext_gen or is_mo_tf_ext_gen) if not is_from_config: mo_op.create_extension_description() if is_ie_cpu_gen and not is_from_config: ie_ext_descr_cpu = IEExtensionDescr('cpu') ie_ext_descr_cpu.create_extension_description() if is_ie_gpu_gen and not is_from_config: ie_ext_descr_gpu = IEExtensionDescr('cldnn') ie_ext_descr_gpu.create_extension_description() pathname = os.path.dirname(sys.argv[0]) path = os.path.abspath(pathname) jobs = [] op = lambda what, where, operation: lambda is_force=False: io_task( what, where, operation, force=is_force) if is_mo_caffe_ext_gen: what = InteractiveModule.params['name'][0].lower() + '_ext.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/caffe_extractor.py') ] sub_jobs = [op(what, caffe_extr_path, move)] jobs.append((command, sub_jobs)) if is_mo_mxnet_ext_gen: what = InteractiveModule.params['name'][0].lower() + '_ext.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/mxnet_extractor.py') ] sub_jobs = [op(what, mxnet_extr_path, move)] jobs.append((command, sub_jobs)) if is_mo_tf_ext_gen: what = InteractiveModule.params['name'][0].lower() + '_ext.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/tf_extractor.py') ] sub_jobs = [op(what, tf_extr_path, move)] jobs.append((command, sub_jobs)) if is_mo_op_gen: what = InteractiveModule.get_param('opName').replace( ".", "_").lower() + '.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/mo_op.py') ] sub_jobs = [op(what, op_path, move)] jobs.append((command, sub_jobs)) if is_ie_cpu_gen: #try to find out IE samples to copy ext_base files # 1. extgen and IE samples in one packet if os.path.exists( os.path.join( path, "../inference_engine/samples/extension/ext_base.cpp")): ext_base_path = os.path.join( path, "../inference_engine/samples/extension/") else: # 2. we have InferenceEngine_DIR path if os.getenv('InferenceEngine_DIR') and os.path.exists( os.path.join(os.getenv('InferenceEngine_DIR'), "../samples/extension/ext_base.cpp")): ext_base_path = os.path.join(os.getenv('InferenceEngine_DIR'), "../samples/extension/") else: # 3. we have path to extension sample explicitly (for development mainly) if os.getenv('IE_extension_sample') and os.path.exists( os.path.join(os.getenv('IE_extension_sample'), "./ext_base.cpp")): ext_base_path = os.getenv('IE_extension_sample') else: raise Exception( "Can not locate the Inference Engine extension sample.\n" + "Please run setupenv.sh from OpenVINO toolkit or set path to " + "IE sample extension explicitly in IE_extension_sample" ) what = 'ext_' + InteractiveModule.get_param('ie_name').replace( ".", "_").lower() + '.cpp' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/ie_extension.cpp') ] sub_jobs = [ op(what, ie_cpu_path, move), op(os.path.join(path, './templates/CMakeLists.txt'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_base.cpp'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_base.hpp'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_list.cpp'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_list.hpp'), ie_cpu_path, copy) ] jobs.append((command, sub_jobs)) if is_ie_gpu_gen: for ext in ('cl', 'xml'): op_file = InteractiveModule.get_param( 'ie_name').lower() + '_kernel.{}'.format(ext) command = [ '', '-d', '-o' + op_file, os.path.join(path, './templates/ie_gpu_ext.{}'.format(ext)) ] sub_jobs = [op(op_file, ie_gpu_path, move)] jobs.append((command, sub_jobs)) for job, sub_jobs in jobs: Cog().main(job) for sub_job in sub_jobs: try: sub_job() except shutil.Error as e: file_name = str(e).split('\'')[1] if 'already exists' in str(e): res = "no" if not is_from_config: res = input( 'The file {} will be overwritten and all changes can be lost. ' .format(file_name) + 'Are you sure (y/n)? ') if res.lower() == 'yes' or res.lower() == 'y': sub_job(True) else: print( '[WARNING] File {} already exist. If you want to re-generate it, remove or move the file {} and try again' .format(file_name, file_name)) if is_mo_caffe_ext_gen: print('Stub file for Caffe Model Optimizer extractor is in {} folder'. format(str(os.path.abspath(caffe_extr_path)))) if is_mo_tf_ext_gen: print( 'Stub file for TensorFlow Model Optimizer extractor is in {} folder' .format(str(os.path.abspath(tf_extr_path)))) if is_mo_mxnet_ext_gen: print('Stub file for MxNet Model Optimizer extractor is in {} folder'. format(str(os.path.abspath(mxnet_extr_path)))) if is_mo_op_gen: print('Stub file for Model Optimizer operation is in {} folder'.format( str(op_path))) if is_ie_cpu_gen: print('Stub files for Inference Engine CPU extension are in {} folder'. format(str(ie_cpu_path))) if is_ie_gpu_gen: print('Stub files for Inference Engine GPU extension are in {} folder'. format(str(ie_gpu_path))) return 0
def generate_setup(lang, setupfile, pyxfile, home_dir, build_dir, out_dir, libname, extra_include_dirs=[], libraries=[], libdirs=[], ktype=KernelType.jacobian): """Helper method to fill in the template .in files Parameters ---------- lang : str The language of the wrapper being generated setupfile : str Filename of the setup file template pyxfile : str Filename of the pyx file template home_dir : str Home directory path build_dir : str Build directory path out_dir : str Output directory path libname : str Library name extra_include_dirs : Optional[list of str] Optional; if supplied, extra include directions for the python wrapper libraries : Optional[list of str] Optional; if supplied extra libraries to use libdirs : Optional[list of str] Optional; if supplied, library directories Returns ------- setup: str The path to the generated setup.py file """ setup = SetupGen(name='pyjac', libname=libname, include_dirs=extra_include_dirs, package_lang=utils.package_lang[lang], wrapper=pyxfile, lang=lang, build_dir=build_dir, libraries=libraries, libdirs=libdirs) # serialize # dump wrapper with utils.temporary_directory() as tdir: setupgen = os.path.join(tdir, 'setupgen.pickle') with open(setupgen, 'wb') as file: pickle.dump(setup, file) infile = setupfile outfile = os.path.basename(infile[:infile.rindex('.in')]) outfile = os.path.join(out_dir, outfile) # and cogify try: Cog().callableMain([ 'cogapp', '-e', '-d', '-Dsetupgen={}'.format(setupgen), '-o', outfile, infile ]) except Exception: logger = logging.getLogger(__name__) logger.error( 'Error generating python setup file: {}'.format(outfile)) raise return outfile
#!/usr/bin/env python """ Cog content generation tool. http://nedbatchelder.com/code/cog Copyright 2004-2019, Ned Batchelder. """ import sys from cogapp import Cog sys.exit(Cog().main(sys.argv))
def test_strided_copy(): wrapper = __test_cases() for opts in wrapper: lang = opts.lang order = opts.order depth = opts.depth width = opts.width with temporary_build_dirs() as (build_dir, obj_dir, lib_dir): vec_size = depth if depth else (width if width else 0) # set max per run such that we will have a non-full run (1024 - 1008) # this should also be evenly divisible by depth and width # (as should the non full run) max_per_run = 16 # number of ics should be divisibly by depth and width ics = max_per_run * 8 + vec_size if vec_size: assert ics % vec_size == 0 assert max_per_run % vec_size == 0 assert int(np.floor(ics / max_per_run) * max_per_run) % vec_size == 0 # build initial callgen callgen = CallgenResult( order=opts.order, lang=opts.lang, dev_mem_type=wrapper.state['dev_mem_type'], type_map=type_map(opts.lang)) # set type dtype = np.dtype('float64') # create test arrays def __create(shape): if not isinstance(shape, tuple): shape = (shape,) shape = (ics,) + shape arr = np.zeros(shape, dtype=dtype, order=order) arr.flat[:] = np.arange(np.prod(shape)) return arr arrays = [__create(16), __create(10), __create(20), __create((20, 20)), __create(())] const = [np.arange(10, dtype=dtype)] # max size for initialization in kernel max_size = max([x.size for x in arrays]) def _get_dtype(dtype): return lp.to_loopy_type( dtype, target=get_target(opts.lang)) lp_arrays = [lp.GlobalArg('a{}'.format(i), shape=(arc.problem_size.name,) + a.shape[1:], order=order, dtype=_get_dtype(arrays[i].dtype)) for i, a in enumerate(arrays)] + \ [lp.TemporaryVariable( 'a{}'.format(i + len(arrays)), dtype=_get_dtype(dtype), order=order, initializer=const[i], read_only=True, shape=const[i].shape) for i in range(len(const))] const = lp_arrays[len(arrays):] # now update args callgen = callgen.copy(name='test', input_args={'test': [x for x in lp_arrays if x not in const]}, output_args={'test' : []}, host_constants={'test': const}) temp_fname = os.path.join(build_dir, 'in' + utils.file_ext[lang]) fname = os.path.join(build_dir, 'test' + utils.file_ext[lang]) with open(temp_fname, 'w') as file: file.write(dedent(""" /*[[[cog # expected globals: # callgen - path to serialized callgen object # lang - the language to use # problem_size - the problem size # max_per_run - the run-size # max_size - the maximum array size # order - The data ordering import cog import os import numpy as np from six.moves import cPickle as pickle # unserialize the callgen with open(callgen, 'rb') as file: callgen = pickle.load(file) # determine the headers to include lang_headers = [] if lang == 'opencl': lang_headers.extend([ '#include "memcpy_2d.oclh"', '#include "vectorization.oclh"', '#include <CL/cl.h>', '#include "error_check.oclh"']) elif lang == 'c': lang_headers.extend([ '#include "memcpy_2d.hpp"', '#include "error_check.hpp"']) cog.outl('\\n'.join(lang_headers)) ]]] [[[end]]]*/ // normal headers #include <stdlib.h> #include <string.h> #include <assert.h> int main() { /*[[[cog if lang == 'opencl': cog.outl( 'double* h_temp_d;\\n' 'int* h_temp_i;\\n' '// create a context / queue\\n' 'int lim = 10;\\n' 'cl_uint num_platforms;\\n' 'cl_uint num_devices;\\n' 'cl_platform_id platform [lim];\\n' 'cl_device_id device [lim];\\n' 'cl_int return_code;\\n' 'cl_context context;\\n' 'cl_command_queue queue;\\n' 'check_err(clGetPlatformIDs(lim, platform, &num_platforms));\\n' 'for (int i = 0; i < num_platforms; ++i)\\n' '{\\n' ' check_err(clGetDeviceIDs(platform[i], CL_DEVICE_TYPE_ALL, ' ' lim, device, &num_devices));\\n' ' if(num_devices > 0)\\n' ' break;\\n' '}\\n' 'context = clCreateContext(NULL, 1, &device[0], NULL, NULL, ' '&return_code);\\n' 'check_err(return_code);\\n' '//create queue\\n' 'queue = clCreateCommandQueue(context, device[0], 0, ' '&return_code);\\n' 'check_err(return_code);\\n') ]]] [[[end]]]*/ /*[[[cog # determine maximum array size cog.outl('double zero [{max_size}] = {{0}};'.format( max_size=max_size)) # init variables cog.outl('int problem_size = {};'.format(problem_size)) cog.outl('int per_run = {};'.format(max_per_run)) ]]] [[[end]]]*/ /*[[[cog # create memory tool from string import Template import loopy as lp from pyjac.kernel_utils.memory_tools import get_memory from pyjac.kernel_utils.memory_tools import HostNamer from pyjac.kernel_utils.memory_tools import DeviceNamer mem = get_memory(callgen, host_namer=HostNamer(), device_namer=DeviceNamer()) # declare host and device arrays for arr in callgen.kernel_args['test'] + callgen.work_arrays: if not isinstance(arr, lp.ValueArg): cog.outl(mem.define(False, arr)) cog.outl(mem.define(True, arr)) # define host constants for arr in callgen.host_constants['test']: cog.outl(mem.define(False, arr, host_constant=True, force_no_const=True)) cog.outl(mem.define(True, arr)) # and declare the temporary array cog.outl(mem.define(True, lp.GlobalArg( 'temp_d', dtype=lp.to_loopy_type(np.float64)))) # allocate host and device arrays for arr in callgen.kernel_args['test'] + callgen.work_arrays: if not isinstance(arr, lp.ValueArg): cog.outl(mem.alloc(False, arr)) cog.outl(mem.alloc(True, arr)) for arr in callgen.host_constants['test']: # alloc device version of host constant cog.outl(mem.alloc(True, arr)) # copy host constants cog.outl(mem.copy(True, arr, host_constant=True)) def _get_size(arr): size = 1 for x in arr.shape: if not isinstance(x, int): assert x.name == 'problem_size' size *= int(problem_size) else: size *= x return size # save copies of host arrays host_copies = [Template( '${type} ${save} [${size}] = {${vals}};\\n' 'memset(${host}, 0, ${size} * sizeof(${type}));' ).safe_substitute( save='h_' + arr.name + '_save', host='h_' + arr.name, size=_get_size(arr), vals=', '.join([str(x) for x in np.arange( _get_size(arr)).flatten(order)]), type=callgen.type_map[arr.dtype]) for arr in callgen.kernel_args['test'] + callgen.host_constants['test']] for hc in host_copies: cog.outl(hc) ]]] [[[end]]]*/ // kernel for (size_t offset = 0; offset < problem_size; offset += per_run) { int this_run = problem_size - offset < per_run ? \ problem_size - offset : per_run; /* Memory Transfers into the kernel, if any */ /*[[[cog mem2 = get_memory(callgen, host_namer=HostNamer(postfix='_save'), device_namer=DeviceNamer()) for arr in callgen.kernel_args['test']: cog.outl(mem2.copy(True, arr)) ]]] [[[end]]]*/ /* Memory Transfers out */ /*[[[cog for arr in callgen.kernel_args['test']: cog.outl(mem.copy(False, arr)) ]]] [[[end]]]*/ } /*[[[cog # and finally check check_template = Template( 'for(int i = 0; i < ${size}; ++i)\\n' '{\\n' ' assert(${host}[i] == ${save}[i]);\\n' '}\\n') checks = [check_template.safe_substitute( host=mem.get_name(False, arr), save=mem2.get_name(False, arr), size=_get_size(arr)) for arr in callgen.kernel_args['test']] for check in checks: cog.outl(check) ]]] [[[end]]]*/ /*[[[cog if lang == 'opencl': cog.outl('check_err(clFlush(queue));') cog.outl('check_err(clReleaseCommandQueue(queue));') cog.outl('check_err(clReleaseContext(context));') ]]] [[[end]]]*/ return 0; } """.strip())) # serialize callgen with open(os.path.join(build_dir, 'callgen.pickle'), 'wb') as file: pickle.dump(callgen, file) # cogify from cogapp import Cog cmd = [ 'cog', '-e', '-d', '-Dcallgen={}'.format( os.path.join(build_dir, 'callgen.pickle')), '-Dmax_per_run={}'.format(max_per_run), '-Dproblem_size={}'.format(ics), '-Dmax_size={}'.format(max_size), '-Dlang={}'.format(lang), '-Dorder={}'.format(order), '-o', fname, temp_fname] Cog().callableMain(cmd) files = [fname] # write aux write_aux(build_dir, opts, [], []) # copy any deps def __copy_deps(lang, scan_path, out_path, change_extension=True, ffilt=None, nfilt=None): deps = [x for x in os.listdir(scan_path) if os.path.isfile( os.path.join(scan_path, x)) and not x.endswith('.in')] if ffilt is not None: deps = [x for x in deps if ffilt in x] if nfilt is not None: deps = [x for x in deps if nfilt not in x] files = [] for dep in deps: dep_dest = dep dep_is_header = dep.endswith(utils.header_ext[lang]) ext = (utils.file_ext[lang] if not dep_is_header else utils.header_ext[lang]) if change_extension and not dep.endswith(ext): dep_dest = dep[:dep.rfind('.')] + ext shutil.copyfile(os.path.join(scan_path, dep), os.path.join(out_path, dep_dest)) if not dep_is_header: files.append(os.path.join(out_path, dep_dest)) return files scan = os.path.join(script_dir, os.pardir, 'kernel_utils', lang) files += __copy_deps(lang, scan, build_dir, nfilt='.py') scan = os.path.join(script_dir, os.pardir, 'kernel_utils', 'common') files += __copy_deps(host_langs[lang], scan, build_dir, change_extension=False, ffilt='memcpy_2d') # build toolchain = get_toolchain(lang) obj_files = compile( lang, toolchain, files, source_dir=build_dir, obj_dir=obj_dir) lib = link(toolchain, obj_files, 'memory_test', lib_dir=lib_dir) # and run subprocess.check_call(lib)
from cogapp import Cog from dut_params import dut import os assert "HLD_ROOT" in os.environ scripts_dir = os.environ["HLD_ROOT"] + "/scripts/systemc-gen" ret = False pairs = [("Config.h", "Config-addresses-cog.h"), (dut.nm + "_hls.h", "dut_hls-hierarchy-cog.h"), (dut.nm + "_hls_tb.h", "dut_hls_tb-cog.h"), (dut.nm + "_acc.h", "dut_acc-cog.h"), (dut.nm + "_acc_tb.h", "dut_acc_tb-cog.h"), (dut.nm + "_sched.h", "dut_sched-cog.h")] for (tgt, src) in pairs: if os.path.isfile(tgt): lst = ["cog.py", "-r", "-c", "-I.", "-I" + scripts_dir + "/", tgt] else: lst = [ "cog.py", "-c", "-I.", "-I" + scripts_dir + "/", "-o", tgt, scripts_dir + "/" + src ] print(lst) rc = Cog().main(lst) ret = ret or rc sys.exit(ret)
#!/usr/bin/env python import glob import os from cogapp import Cog input_files = glob.glob('./*.yml') # print(input_files) if len(input_files) > 0: cog_gen = Cog() for file in input_files: print ("Reading configuration from %s" % file) # Retrieve the file name without extension config_string = 'CONFIG_FILE=' + os.path.basename(file) fname_wo_ext = os.path.splitext(os.path.basename(file))[0] cpp_file = fname_wo_ext + '.cpp' h_file = fname_wo_ext + '.h' print (' Generating ' + cpp_file) args = ['', '-o', cpp_file, '-D', config_string, '-d', '-z', '../templates/CppFile.cog'] #print args cog_gen.callableMain(args) print (' Generating ' + h_file) args = ['', '-o', h_file, '-D', config_string, '-d', '-z', '../templates/HFile.cog'] #print args cog_gen.callableMain(args) print('done.')
def get_api_code(name): """ Generate API class for name """ model = getattr(models, name) filter_fields = getattr(model, 'filter_fields', []) return API_TEMPLATE % dict(name=name, filter_fields=filter_fields) def get_serializer_code(name, clazz): """ Generate Serializer class for name """ try: depth = clazz.serialize_depth except AttributeError: depth = 0 return SERIALIZE_TEMPLATE % dict(name=name, depth=depth) def get_url_code(name): """ Generate code needed in urls file for name """ return URL_TEMPLATE % dict(name=name, lname=name.lower()) if __name__ == '__main__': from cogapp import Cog for filename in ('api.py', 'serializers.py', 'urls.py'): Cog().main(['cog', '-cr', 'django_eighty_days/' + filename])
def generarH(idslFile, imported): idsl = IDSLParsing.gimmeIDSLStruct(idslFile, files='', includeDirectories=includeDirectories) os.system("rm -f "+outputPath + "/" + idsl['module']['name'] + "ROS/msg/__init__.py") os.system("rm -f "+outputPath + "/" + idsl['module']['name'] + "ROS/srv/__init__.py") for imp in idsl['module']['contents']: if imp['type'] in ['struct','sequence']: for f in [ "SERVANT.MSG"]: ofile = outputPath+"/"+imp['name'] + "." + f.split('.')[-1].lower() print 'Generating', ofile, ' (servant for', idslFile.split('.')[0].lower() + ')' # Call cog run = "cog.py -z -d" + ' -D theIDSLPaths='+ '#'.join(includeDirectories) + " -D structName=" + imp['name'] +" -D theIDSL="+idslFile+ " -o " + ofile + " " + "/opt/robocomp/share/robocompdsl/templateCPP/" + f run = run.split(' ') ret = Cog().main(run) if ret != 0: print 'ERROR' sys.exit(-1) replaceTagsInFile(ofile) commandCPP = "/opt/ros/kinetic/share/gencpp/cmake/../../../lib/gencpp/gen_cpp.py " + ofile + " -Istd_msgs:/opt/ros/kinetic/share/std_msgs/cmake/../msg -I" + idsl['module']['name'] + "ROS:" + outputPath commandPY = "/opt/ros/kinetic/share/gencpp/cmake/../../../lib/genpy/genmsg_py.py " + ofile + " -Istd_msgs:/opt/ros/kinetic/share/std_msgs/cmake/../msg -I" + idsl['module']['name'] + "ROS:" + outputPath for impo in imported: if not impo == idsl['module']['name']+"ROS": commandCPP = commandCPP + " -I" + impo + ":" + outputPath commandPY = commandPY + " -I" + impo + ":" + outputPath if not os.path.exists(outputPath): creaDirectorio(outputPath) commandCPP = commandCPP + " -p "+ idsl['module']['name'] + "ROS -o " + outputPath + "/" + idsl['module']['name'] + "ROS -e /opt/ros/kinetic/share/gencpp/cmake/.." commandPY = commandPY + " -p "+ idsl['module']['name'] + "ROS -o " + outputPath + "/" + idsl['module']['name'] +"ROS/msg" if comp['language'].lower() == 'cpp': os.system(commandCPP) else: os.system(commandPY) try: fileInit = open(outputPath + "/" + idsl['module']['name'] + "ROS/msg/__init__.py", 'a') fileInit.write("from ._"+imp['name']+" import *\n") fileInit.close() except: pass for imp in idsl['module']['contents']: if imp['type'] == 'interface': for ima in component['implements']+component['requires']: im = ima if type(im) != type(''): im = im[0] if not communicationIsIce(ima) and im == imp['name']: for method in imp['methods']: if 'params' in method: if len(method['params']) == 2: for f in [ "SERVANT.SRV"]: ofile = outputPath+"/"+method['name'] + "." + f.split('.')[-1].lower() print 'Generating', ofile, ' (servant for', idslFile.split('.')[0].lower() + ')' # Call cog run = "cog.py -z -d" + ' -D theIDSLPaths='+ '#'.join(includeDirectories) + " -D methodName=" + method['name'] +" -D theIDSL="+idslFile+ " -o " + ofile + " " + "/opt/robocomp/share/robocompdsl/templateCPP/" + f run = run.split(' ') ret = Cog().main(run) if ret != 0: print 'ERROR' sys.exit(-1) replaceTagsInFile(ofile) commandCPP = "/opt/ros/kinetic/share/gencpp/cmake/../../../lib/gencpp/gen_cpp.py " +ofile+ " -Istd_msgs:/opt/ros/kinetic/share/std_msgs/cmake/../msg -Istd_srvs:/opt/ros/kinetic/share/std_srv/cmake/../srv -I" + idsl['module']['name'] + "ROS:" + outputPath commandPY = "/opt/ros/kinetic/share/gencpp/cmake/../../../lib/genpy/gensrv_py.py " +ofile+ " -Istd_msgs:/opt/ros/kinetic/share/std_msgs/cmake/../msg -Istd_srvs:/opt/ros/kinetic/share/std_srv/cmake/../srv -I" + idsl['module']['name'] + "ROS:" + outputPath for impo in imported: if not impo == idsl['module']['name']+"ROS": commandCPP = commandCPP + " -I" + impo + ":" + outputPath commandPY = commandPY + " -I" + impo + ":" + outputPath if not os.path.exists(outputPath): creaDirectorio(outputPath) commandCPP = commandCPP + " -p "+ idsl['module']['name'] + "ROS -o "+ outputPath+"/"+idsl['module']['name'] + "ROS -e /opt/ros/kinetic/share/gencpp/cmake/.." commandPY = commandPY + " -p "+ idsl['module']['name'] + "ROS -o "+ outputPath+"/"+idsl['module']['name'] +"ROS/srv" if comp['language'].lower() == 'cpp': os.system(commandCPP) else: os.system(commandPY) try: fileInit = open(outputPath + "/" + idsl['module']['name'] + "ROS/srv/__init__.py", 'a') fileInit.write("from ._"+method['name']+" import *\n") fileInit.close() except: pass else: print "error: ROS service with incorrect number of parameters. ROS only supports remote procedure calls of the form: void method(type inVar, out type outVar);" for param in enumerate(method['params']): print param[0], '-->', param[1] sys.exit(-1) else: print "error: service without params. Form is: void method(type inVar, out type outVar);" sys.exit(-1) os.system("touch "+outputPath + "/" + idsl['module']['name'] + "ROS/__init__.py") return idsl['module']['name']+"ROS"
#!/usr/bin/python3 """ Cog code generation tool. http://nedbatchelder.com/code/cog Copyright 2004-2015, Ned Batchelder. usage: cog.py inputfile """ import time start = time.clock() import sys from cogapp import Cog ret = Cog().main(sys.argv) print("//Time to generate %s : %.2f sec" % (sys.argv[1], (time.clock() - start))) sys.exit(ret)
def erase_models(): target_file = pth.join(pth.dirname(__file__), "models", "spider_models.py") Cog().main([sys.argv[0], "-x", target_file])
specificFiles = [ 'src/specificworker.h', 'src/specificworker.cpp', 'src/CMakeListsSpecific.txt', 'src/mainUI.ui', 'src/specificmonitor.h', 'src/specificmonitor.cpp', 'README.md' ] for f in files: ofile = outputPath + '/' + f if f in specificFiles and os.path.exists(ofile): print 'Not overwriting specific file "' + ofile + '", saving it to ' + ofile + '.new' ofile += '.new' ifile = "/opt/robocomp/share/robocompdsl/templateCPP/" + f print 'Generating', ofile, 'from', ifile run = "cog.py -z -d -D theCDSL=" + inputFile + " -D theIDSLs=" + imports + " -o " + ofile + " " + ifile run = run.split(' ') ret = Cog().main(run) if ret != 0: print 'ERROR' sys.exit(-1) replaceTagsInFile(ofile) # # Generate interface-dependent files # for im in component['implements'] + component['subscribesTo']: for f in ["SERVANT.H", "SERVANT.CPP"]: ofile = outputPath + '/src/' + im.lower() + 'I.' + f.split( '.')[-1].lower() print 'Generating', ofile, ' (servant for', im + ')' # Call cog run = "cog.py -z -d -D theCDSL=" + inputFile + " -D theIDSLs=" + imports + " -D theInterface=" + im + " -o " + ofile + " " + "/opt/robocomp/share/robocompdsl/templateCPP/" + f run = run.split(' ')
def generate_wrapper(lang, pyxfile, build_dir, ktype=KernelType.jacobian, additional_inputs=[], additional_outputs=[], nice_name=None): """ Generate the Cython wrapper file Parameters ---------- lang : str The language of the wrapper being generated pyxfile : str Filename of the pyx file template build_dir : str The path to place the generated cython wrapper in ktype : :class:`KernelType` [KernelType.jacobian] The type of wrapper to generate additional_inputs : list of str If supplied, treat these arguments as additional input variables additional_outputs : list of str If supplied, treat these arguments as additional output variables nice_name: str [None] If supplied, use this instead of :param:`ktype` to derive the kernel name Returns ------- wrapper: str The path to the generated python wrapper """ # create wrappergen if nice_name is None: nice_name = utils.enum_to_string(ktype) if ktype == KernelType.jacobian: inputs, outputs = jac_args(True) # replace 'P_arr' w/ 'param' for clarity replacements = {'P_arr': 'param'} elif ktype != KernelType.dummy: inputs, outputs = rate_args(True, ktype) replacements = { 'cp': 'specific_heat', 'cv': 'specific_heat', 'h': 'specific_energy', 'u': 'specific_energy' } else: assert additional_outputs assert additional_inputs replacements = {} inputs = additional_inputs[:] outputs = additional_outputs[:] def extend(names, args=[]): for name in names: if name in replacements: name = replacements[name] if name not in args: args.append(name) return args args = extend(outputs, extend(inputs)) wrapper = WrapperGen(name=nice_name, kernel_args=args, lang=lang) # dump wrapper with utils.temporary_directory() as tdir: wrappergen = os.path.join(tdir, 'wrappergen.pickle') with open(wrappergen, 'wb') as file: pickle.dump(wrapper, file) infile = pyxfile outfile = 'pyjac_{}.pyx'.format(utils.package_lang[lang]) outfile = os.path.join(build_dir, outfile) # and cogify try: Cog().callableMain([ 'cogapp', '-e', '-d', '-Dwrappergen={}'.format(wrappergen), '-o', outfile, infile ]) except Exception: logger = logging.getLogger(__name__) logger.error( 'Error generating python wrapper file: {}'.format(outfile)) raise return outfile