def generate_files(template_data): files = {} for template_file in template_files: with open(template_file, 'r') as f: data = f.read() files[template_file] = em.expand(data, template_data) # Generate files for each arm for arm_info in template_data['arms']: template_data['arm'] = arm_info with open(arm_template_file, 'r') as f: data = f.read() files[arm_info.name + '.urdf.xacro'] = em.expand(data, template_data) return files
def build_message(msg_name): print ('building %s' % (msg_name)) msg = message_dict[msg_name] with open('%s.json' % (msg_name), 'w') as f: f.write(json.dumps(msg, default=lambda x: x.__dict__)) for template in templates: output = em.expand(template['source'], msg=msg) if not output.strip(): continue output_file = os.path.join(build_dir, em.expand('@{from canard_dsdlc_helpers import *}'+template['output_file'], msg=msg)) mkdir_p(os.path.dirname(output_file)) with open(output_file, 'w') as f: f.write(output)
def __process_template_folder(path, subs): items = os.listdir(path) processed_items = [] for item in list(items): item = os.path.abspath(os.path.join(path, item)) if os.path.basename(item) in ['.', '..', '.git', '.svn']: continue if os.path.isdir(item): sub_items = __process_template_folder(item, subs) processed_items.extend([os.path.join(item, s) for s in sub_items]) if not item.endswith(TEMPLATE_EXTENSION): continue with open(item, 'r') as f: template = f.read() # Remove extension template_path = item[:-len(TEMPLATE_EXTENSION)] # Expand template info("Expanding '{0}' -> '{1}'".format( os.path.relpath(item), os.path.relpath(template_path))) result = em.expand(template, **subs) # Write the result with open(template_path, 'w') as f: f.write(result.encode('utf8')) # Copy the permissions shutil.copymode(item, template_path) processed_items.append(item) return processed_items
def test_env_cached_static(self): # hack to fix empy nosetests clash sys.stdout = em.ProxyFile(sys.stdout) dstdir = os.path.join(self.workspacedir, 'catkin_test') shutil.copytree(os.path.join(MOCK_DIR, 'src', 'catkin_test'), dstdir) template_file = os.path.join(os.path.dirname(__file__), '..', '..', 'cmake', 'em', 'order_packages.cmake.em') with open (template_file, 'r') as fhand: template = fhand.read() gdict = {'CATKIN_DEVEL_PREFIX': '/foo', 'CMAKE_PREFIX_PATH': ['/bar'], 'CATKIN_GLOBAL_LIB_DESTINATION': '/glob-dest/lib', 'CATKIN_GLOBAL_BIN_DESTINATION': '/glob-dest/bin', 'PYTHON_INSTALL_DIR': '/foo/dist-packages'} result = em.expand(template, gdict, source_root_dir=self.workspacedir, whitelisted_packages=None, blacklisted_packages=None) self.assertTrue('set(CATKIN_ORDERED_PACKAGES "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGE_PATHS "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGES_IS_META "")' in result, result) self.assertTrue('set(CATKIN_MESSAGE_GENERATORS' in result, result) self.assertTrue("""\ list(APPEND CATKIN_ORDERED_PACKAGES "catkin_test") list(APPEND CATKIN_ORDERED_PACKAGE_PATHS "catkin_test/catkin_test") list(APPEND CATKIN_ORDERED_PACKAGES_IS_META "True")""" in result, result) self.assertTrue("""\ list(APPEND CATKIN_ORDERED_PACKAGES "a") list(APPEND CATKIN_ORDERED_PACKAGE_PATHS "catkin_test/a") list(APPEND CATKIN_ORDERED_PACKAGES_IS_META "False")""" in result, result) # catkin itself filtered out self.assertFalse('list(APPEND CATKIN_ORDERED_PACKAGES "catkin"' in result, result) self.assertEqual(28, len(result.splitlines()))
def _generate_empy_file(self, em_params, template_paths, template_name, file_path, file_name): print("Generating " + file_name) template_file = None for template_path in template_paths: template_file = template_path + "/" + template_name try: f = open(template_file, 'r') #quit loop on first successful open break except IOError: pass print("Loading template file: " + template_file) text = em.expand(f.read(), em_params) package_file = file_path + "/" + file_name if (text): print("Outputting package file: " + package_file) fd = open(package_file, 'w') fd.write(text) else: print("Generated package file: " + package_file + ", contents empty, no file created")
def print_badge(name, affiliation): with open(INPUT_SVG, 'r') as fh: template = fh.read() subs = {} subs['fullname'] = name subs['affiliation'] = affiliation svgout = em.expand(template, subs) if DEBUG_SVG: print('='*80) print(svgout) print('='*80) svgfile = tempfile.NamedTemporaryFile(suffix='.svg') pdffile = tempfile.NamedTemporaryFile(suffix='.pdf') with open(svgfile.name, 'w') as fh: fh.write(svgout) cmd = ['inkscape', '-f', svgfile.name, '-A', pdffile.name] print("command is %s" % cmd) subprocess.check_call(cmd) cmd = ['lp', '-d', PRINTER, pdffile.name] print("command is %s" % cmd) if PRINT: subprocess.check_call(cmd) else: print("skipping printing due to PRINT being false")
def __process_template_folder(path, subs): items = os.listdir(path) processed_items = [] for item in list(items): item = os.path.abspath(os.path.join(path, item)) if os.path.basename(item) in ['.', '..', '.git', '.svn']: continue if os.path.isdir(item): sub_items = __process_template_folder(item, subs) processed_items.extend([os.path.join(item, s) for s in sub_items]) if not item.endswith(TEMPLATE_EXTENSION): continue with open(item, 'r') as f: template = f.read() # Remove extension template_path = item[:-len(TEMPLATE_EXTENSION)] # Expand template info("Expanding '{0}' -> '{1}'".format(os.path.relpath(item), os.path.relpath(template_path))) result = em.expand(template, **subs) # Don't write an empty file if len(result) == 0 and \ os.path.basename(template_path) in ['copyright']: processed_items.append(item) continue # Write the result with io.open(template_path, 'w', encoding='utf-8') as f: if sys.version_info.major == 2: result = result.decode('utf-8') f.write(result) # Copy the permissions shutil.copymode(item, template_path) processed_items.append(item) return processed_items
def expand(fname, stack_yaml, source_dir, dest_dir, filetype=''): #where normal templates live templatedir = template_dir() #the default input template file path ifilename = os.path.join(templatedir, fname) if filetype != '': if filetype.startswith('+'): ifilename = os.path.join(source_dir, filetype[1:]) else: ifilename += ('.' + filetype + '.em') else: ifilename += '.em' print("Reading %s template from %s" % (fname, ifilename)) file_em = open(ifilename).read() s = em.expand(file_em, **stack_yaml) ofilename = os.path.join(dest_dir, fname) ofilestr = open(ofilename, "w") print(s, file=ofilestr) ofilestr.close() if fname == 'rules': os.chmod(ofilename, 0755)
def create_from_template(self, template_name, data, directory, chmod=None, outfile=None): # Configure template name extention = '.em' if not template_name.endswith(extention): template_file = template_name + extention else: template_file = template_name template_name = template_name[:len(extention)] # Open the template with change_directory(directory): with open(template_file, 'r') as f: template = f.read() execute_command('git rm ' + template_file) # Expand template outfile = outfile if outfile is not None else template_name info("Expanding template: '" + template_file + "' to '" + outfile + "'") result = em.expand(template, **data) # Write the template out with change_directory(directory): with open(outfile, 'w+') as f: f.write(result) # Set permissions if needed if chmod is not None: os.chmod(outfile, chmod)
def test_env_cached_static(self): # hack to fix empy nosetests clash sys.stdout = em.ProxyFile(sys.stdout) template_file = os.path.join(os.path.dirname(__file__), '..', '..', 'cmake', 'em', 'order_packages.cmake.em') with open(template_file, 'r') as fhand: template = fhand.read() gdict = { 'CATKIN_DEVEL_PREFIX': '/foo', 'CMAKE_PREFIX_PATH': ['/bar'], 'CATKIN_GLOBAL_LIB_DESTINATION': '/glob-dest/lib', 'CATKIN_GLOBAL_BIN_DESTINATION': '/glob-dest/bin', 'PYTHON_INSTALL_DIR': '/foo/dist-packages' } result = em.expand(template, gdict, source_root_dir='/tmp/nowhere_dir', whitelisted_packages=[], blacklisted_packages=[], underlay_workspaces=[]) self.assertTrue('set(CATKIN_ORDERED_PACKAGES "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGE_PATHS "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGES_IS_META "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGES_BUILD_TYPE "")' in result, result) self.assertTrue('set(CATKIN_MESSAGE_GENERATORS' in result, result) self.assertEqual(10, len(result.splitlines()))
def generate_files(template_data): files = {} for template_file in template_files: with open(template_file, 'r') as f: data = f.read() files[template_file] = em.expand(data, template_data) return files
def create_from_template(self, template_name, data, directory, chmod=None, outfile=None): # Configure template name extention = '.em' if not template_name.endswith(extention): template_file = template_name + extention else: template_file = template_name template_name = template_name[:len(extention)] template_path = os.path.join('templates', template_file) # Get the template contents using pkg_resources group = 'bloom.generators.debian' # info("Looking for template: " + group + ':' + template_path) try: template = pkg_resources.resource_string(group, template_path) except IOError as err: error("Failed to load template " "'{0}': {1}".format(template_name, str(err))) self.exit(code.DEBIAN_FAILED_TO_LOAD_TEMPLATE) # Expand template outfile = outfile if outfile is not None else template_name info("Expanding template: '" + template_file + "' to '" + \ outfile + "'") result = em.expand(template, **data) # Write the template out with change_directory(directory): with open(outfile, 'w+') as f: f.write(result) # Set permissions if needed if chmod is not None: os.chmod(outfile, chmod)
def expand(fname, stack_data, dest_dir, filetype=''): # insert template type if fname == 'rules' and stack_data['Catkin-DebRulesType'] == 'custom': path = os.path.join(dest_dir, '..', stack_data['Catkin-DebRulesFile']) f = open(path) file_em = f.read() f.close() else: if filetype != '': ifilename = (fname + '.' + filetype + '.em') else: ifilename = fname + '.em' ifilename = os.path.join('resources', 'em', ifilename) print("Reading %s template from %s" % (fname, ifilename)) try: file_em = pkg_resources.resource_string('bloom', ifilename) except IOError: warning("Could not find {0}, skipping...".format(ifilename)) return False s = em.expand(file_em, **stack_data) ofilename = os.path.join(dest_dir, fname) with open(ofilename, "w") as ofilestr: print(s, file=ofilestr) if fname == 'rules': os.chmod(ofilename, 0755) return True
def __process_template_folder(path, subs): items = os.listdir(path) processed_items = [] for item in list(items): item = os.path.abspath(os.path.join(path, item)) if os.path.basename(item) in ['.', '..', '.git', '.svn']: continue if os.path.isdir(item): sub_items = __process_template_folder(item, subs) processed_items.extend([os.path.join(item, s) for s in sub_items]) if not item.endswith(TEMPLATE_EXTENSION): continue with open(item, 'r') as f: template = f.read() # Remove extension template_path = item[:-len(TEMPLATE_EXTENSION)] # Expand template info("Expanding '{0}' -> '{1}'".format(os.path.relpath(item), os.path.relpath(template_path))) result = em.expand(template, **subs) # Write the result with open(template_path, 'w') as f: f.write(result) # Copy the permissions shutil.copymode(item, template_path) processed_items.append(item) return processed_items
def expand_template(meta_name): meta_template_file = templates.get_package_template_path(meta_name + '.em') with open(meta_template_file, 'r') as f: meta_template_str = f.read() meta_str = em.expand(meta_template_str, config) meta_file = os.path.join(context.source_space, meta_name) with open(meta_file, 'w') as f: f.write(meta_str)
def get_snippet(self, cliargs): snippet = pkgutil.get_data('rocker', 'templates/%s_snippet.Dockerfile.em' % self.name).decode('utf-8') substitutions = self.get_environment_subs(cliargs) if 'user_override_name' in cliargs and cliargs['user_override_name']: substitutions['name'] = cliargs['user_override_name'] substitutions['dir'] = os.path.join('/home/', cliargs['user_override_name']) substitutions['home_extension_active'] = True if 'home' in cliargs and cliargs['home'] else False return em.expand(snippet, substitutions)
def build_message(msg_name): print 'building %s' % (msg_name, ) msg = message_dict[msg_name] for template in templates: output = em.expand(template['source'], msg=msg) if not output.strip(): continue output_file = os.path.join( build_dir, em.expand('@{from canard_dsdlc_helpers import *}' + template['output_file'], msg=msg)) mkdir_p(os.path.dirname(output_file)) with open(output_file, 'wb') as f: f.write(output)
def get_snippet(self, cliargs): snippet = pkgutil.get_data( 'rocker', 'templates/%s_snippet.Dockerfile.em' % self.name).decode('utf-8') substitutions = self.get_environment_subs() substitutions[ 'home_extension_active'] = True if 'home' in cliargs and cliargs[ 'home'] else False return em.expand(snippet, substitutions)
def _load_em_params(self, moveit_package, support_package, model, num_joints): #Load empy parameters em_params = {} em.expand('@{moveit_package = "' + moveit_package + '"}', em_params) em.expand('@{support_package = "' + support_package + '"}', em_params) em.expand('@{model = "' + model + '"}', em_params) em.expand('@{num_joints = "' + str(num_joints) + '"}', em_params) return em_params
def genSvg(id, dicno, paper_size): return em.expand( """<svg width="@(paper_width)mm" height="@(paper_height)mm" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg"> <rect x="@((paper_width - fid_len)/2)mm" y="@((paper_height - fid_len)/2)mm" width="@(fid_len)mm" height="4.0mm" style="stroke:none; fill:black"/> <rect x="@((paper_width - fid_len)/2)mm" y="@((paper_height + fid_len)/2 - 4)mm" width="@(fid_len)mm" height="4.0mm" style="stroke:none; fill:black"/> <rect x="@((paper_width - fid_len)/2)mm" y="@((paper_height - fid_len)/2)mm" width="4.0mm" height="@(fid_len)mm" style="stroke:none; fill:black"/> <rect x="@((paper_width + fid_len)/2 - 4)mm" y="@((paper_height - fid_len)/2)mm" width="4.0mm" height="@(fid_len)mm" style="stroke:none; fill:black"/> <image x="@((paper_width - fid_len)/2)mm" y="@((paper_height - fid_len)/2)mm" width="@(fid_len)mm" height="@(fid_len)mm" xlink:href="/tmp/marker@(id).png" /> @{cut = max(fid_len/10 * 1.4, 10)} @{corner_x = (paper_width - fid_len)/2 - cut} @{corner_y = (paper_height - fid_len)/2 - cut} <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x + 2)mm" y2="@(corner_y)mm" style="stroke:black"/> <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x)mm" y2="@(corner_y + 2)mm" style="stroke:black"/> @{corner_x = (paper_width + fid_len)/2 + cut} @{corner_y = (paper_height - fid_len)/2 - cut} <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x - 2)mm" y2="@(corner_y)mm" style="stroke:black"/> <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x)mm" y2="@(corner_y + 2)mm" style="stroke:black"/> <text x="@(paper_width/2)mm" y="@(corner_y - 1)mm" text-anchor="middle" style="font-family:ariel; font-size:8;"> This line should be exactly @(fid_len/10)cm long. </text> <line x1="@(paper_width/2 - fid_len/2)mm" y1="@(corner_y)mm" x2="@(paper_width/2 + fid_len/2)mm" y2="@(corner_y)mm" style="stroke:black"/> <line x1="@(corner_x)mm" y1="@(paper_height/2 - fid_len/2)mm" x2="@(corner_x)mm" y2="@(paper_height/2 + fid_len/2)mm" style="stroke:black"/> @{corner_x = (paper_width - fid_len)/2 - cut} @{corner_y = (paper_height + fid_len)/2 + cut} <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x + 2)mm" y2="@(corner_y)mm" style="stroke:black"/> <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x)mm" y2="@(corner_y - 2)mm" style="stroke:black"/> <line x1="@(corner_x)mm" y1="@(paper_height/2 - fid_len/2)mm" x2="@(corner_x)mm" y2="@(paper_height/2 + fid_len/2)mm" style="stroke:black"/> <line x1="@(paper_width/2 - fid_len/2)mm" y1="@(corner_y)mm" x2="@(paper_width/2 + fid_len/2)mm" y2="@(corner_y)mm" style="stroke:black"/> @{corner_x = (paper_width + fid_len)/2 + cut} @{corner_y = (paper_height + fid_len)/2 + cut} <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x - 2)mm" y2="@(corner_y)mm" style="stroke:black"/> <line x1="@(corner_x)mm" y1="@(corner_y)mm" x2="@(corner_x)mm" y2="@(corner_y - 2)mm" style="stroke:black"/> <text x="@(paper_width/2)mm" y="@((paper_height + fid_len)/2 + 30)mm" text-anchor="middle" style="font-family:ariel; font-size:24;">@(id) D@(dicno)</text> </svg> """, { "id": id, "dicno": dicno, "paper_width": paper_size[0], "paper_height": paper_size[1], "fid_len": 140.0 })
def get_snippet(self, cli_args): self.compute_env_subs(cli_args) snippet = pkgutil.get_data( 'novnc_rocker', 'templates/%s_snippet.Dockerfile.em' % self.name).decode('utf-8') try: result = em.expand(snippet, self._env_subs) except (NameError, TypeError) as ex: raise NameError("Failed to evaluate snippet for %s: %s. \nargs are: %s" % (self.name, ex, self._env_subs)) return result
def build_message(self): print 'building %s' % (self.MsgName, ) global common_msg_header for template in templates: if template['source_file'] == 'ubx_msg.c' and (self.MsgType in [ 'PollRequest', 'Input', 'Command', 'Set' ]): continue output = em.expand(template['source'], msg=self) if not output.strip(): continue output_file = os.path.join( build_dir, em.expand('@{from ubx_pdf_csv_parser_helper import *}' + template['output_file'], msg=self)) mkdir_p(os.path.dirname(output_file)) with open(output_file, 'wb') as f: f.write(output) if template['source_file'] == 'ubx_msg.h': common_msg_header.write('#include <%s>\r\n' % output_file)
def get_files(self, cli_args): file_list = ['supervisor.conf', 'self.pem', 'nginx.conf'] files = {} for f in file_list: files['%s' % f] = pkgutil.get_data('novnc_rocker', 'templates/%s' % f).decode('utf-8') template_list = ['novnc.conf', 'rproxy-nginx-site', '.htpasswd'] for f in template_list: files['%s' % f] = em.expand( pkgutil.get_data('novnc_rocker', 'templates/%s.em' % f).decode('utf-8'), cli_args) return files
def get_snippet(self, cli_args): ign_ver, linux_ver = cli_args[Ignition.get_name()].split(':') if (ign_ver not in Ignition.get_releases()): print("WARNING specified Ignition version '%s' is not valid, must choose from " % ign_ver, Ignition.get_releases()) sys.exit(1) if (linux_ver not in Ignition.get_OSs()): print("WARNING specified OS '%s' is not valid, must choose from " % linux_ver, Ignition.get_OSs()) sys.exit(1) self._env_subs['ign_distro'] = ign_ver self._env_subs['system_version'] = linux_ver snippet = pkgutil.get_data( 'ign_rocker', 'templates/%s_snippet.Dockerfile.em' % self.name).decode('utf-8') return em.expand(snippet, self._env_subs)
def source(self): if not os.path.exists("mrt_cmake_modules"): self.run( "git clone https://github.com/KIT-MRT/mrt_cmake_modules.git") mrt_cmake_dir = os.path.join(os.getcwd(), "mrt_cmake_modules") with open("mrt_cmake_modules/cmake/mrt_cmake_modules-extras.cmake.em" ) as f: extras = em.expand(f.read(), DEVELSPACE=True, PROJECT_SOURCE_DIR=mrt_cmake_dir, CMAKE_CURRENT_SOURCE_DIR=mrt_cmake_dir) with open("mrt_cmake_modules-extras.cmake", "w") as f: f.write(extras) with open("Findmrt_cmake_modules.cmake", "w") as f: f.write(find_mrt_cmake) with open("CMakeLists.txt", "w") as f: f.write(cmake_lists)
def test_env_cached_static(self): # hack to fix empy nosetests clash sys.stdout = em.ProxyFile(sys.stdout) dstdir = os.path.join(self.workspacedir, 'catkin_test') shutil.copytree(os.path.join(MOCK_DIR, 'src', 'catkin_test'), dstdir) template_file = os.path.join(os.path.dirname(__file__), '..', '..', 'cmake', 'em', 'order_packages.cmake.em') with open(template_file, 'r') as fhand: template = fhand.read() gdict = { 'CATKIN_DEVEL_PREFIX': '/foo', 'CMAKE_PREFIX_PATH': ['/bar'], 'CATKIN_GLOBAL_LIB_DESTINATION': '/glob-dest/lib', 'CATKIN_GLOBAL_BIN_DESTINATION': '/glob-dest/bin', 'PYTHON_INSTALL_DIR': '/foo/dist-packages' } result = em.expand(template, gdict, source_root_dir=self.workspacedir, whitelisted_packages=None, blacklisted_packages=None, underlay_workspaces=None) self.assertTrue('set(CATKIN_ORDERED_PACKAGES "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGE_PATHS "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGES_IS_META "")' in result, result) self.assertTrue('set(CATKIN_MESSAGE_GENERATORS' in result, result) self.assertTrue( """\ list(APPEND CATKIN_ORDERED_PACKAGES "catkin_test") list(APPEND CATKIN_ORDERED_PACKAGE_PATHS "catkin_test/catkin_test") list(APPEND CATKIN_ORDERED_PACKAGES_IS_META "True") list(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE "catkin")""" in result, result) self.assertTrue( """\ list(APPEND CATKIN_ORDERED_PACKAGES "a") list(APPEND CATKIN_ORDERED_PACKAGE_PATHS "catkin_test/a") list(APPEND CATKIN_ORDERED_PACKAGES_IS_META "False") list(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE "catkin")""" in result, result) # catkin itself filtered out self.assertFalse( 'list(APPEND CATKIN_ORDERED_PACKAGES "catkin"' in result, result) self.assertEqual(38, len(result.splitlines()))
def get_files(self, cli_args): file_list = ['supervisor.conf'] files = {} for f in file_list: files['%s' % f] = pkgutil.get_data( 'novnc_rocker', 'templates/%s' % f).decode('utf-8') template_list = ['turbovnc.conf'] self.compute_env_subs(cli_args) for f in template_list: try: files['%s' % f] = em.expand( pkgutil.get_data( 'novnc_rocker', 'templates/%s.em' % f).decode('utf-8'), self._env_subs) except (NameError, TypeError) as ex: raise NameError("Failed to evaluate template %s: %s \args are: %s" % (f, ex, self._env_subs)) return files
def test_env_cached_static(self): # hack to fix empy nosetests clash sys.stdout = em.ProxyFile(sys.stdout) template_file = os.path.join(os.path.dirname(__file__), '..', '..', 'cmake', 'em', 'order_packages.cmake.em') with open (template_file, 'r') as fhand: template = fhand.read() gdict = {'CATKIN_DEVEL_PREFIX': '/foo', 'CMAKE_PREFIX_PATH': ['/bar'], 'CATKIN_GLOBAL_LIB_DESTINATION': '/glob-dest/lib', 'CATKIN_GLOBAL_BIN_DESTINATION': '/glob-dest/bin', 'PYTHON_INSTALL_DIR': '/foo/dist-packages'} result = em.expand(template, gdict, source_root_dir='/tmp/nowhere_dir', whitelisted_packages=[], blacklisted_packages=[]) self.assertTrue('set(CATKIN_ORDERED_PACKAGES "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGE_PATHS "")' in result, result) self.assertTrue('set(CATKIN_ORDERED_PACKAGES_IS_META "")' in result, result) self.assertTrue('set(CATKIN_MESSAGE_GENERATORS' in result, result) self.assertEqual(7, len(result.splitlines()))
def debexpand(name, d, filetype=''): ifilename = os.path.join(templatedir, name) if filetype != '': if filetype.startswith('+'): ifilename = os.path.join(srcdir, filetype[1:]) else: ifilename += ('.' + filetype + '.em') else: ifilename += '.em' print("Reading %s template from %s" % (name, ifilename)) file_em = open(ifilename).read() s = em.expand(file_em, **d) ofilename = outdir + '/' + name ofilestr = open(ofilename, "w") print(s, file=ofilestr) ofilestr.close() if name == 'rules': os.chmod(ofilename, 0755)
def expand_template_file( template_file_name: str, dest_file_path: str, data: Dict[str, Any], ) -> Optional[str]: """ Expand template file. :param template_file_name: the name of the template file to use (in data/) :param dest_file_path: the file path :return: the path of the created file, or None if it failed """ template_file_path = os.path.join(data_dir, template_file_name) template = None with open(template_file_path, 'r') as f: template = f.read() if template is None: return None written = 0 with open(dest_file_path, 'w') as f: written = f.write(em.expand(template, data)) if 0 >= written: return None return dest_file_path
def createCopybaraFile(substitutions, working_dir, template_dir, force): # Substitute subs = substitutions.__dict__ # Create directory directory = working_dir + "/" + substitutions.workflow + "/" + substitutions.project if os.path.isdir(directory): if not force: print( "Folder {0} already exists. Use --force flag to generate files anyway." .format(directory)) sys.exit() else: os.makedirs(directory) # Expand template template_file = template_dir + "/copybara_template_" + substitutions.workflow + TEMPLATE_EXTENSION with open(template_file, 'r') as fh: result = em.expand(fh.read(), **subs) # Write the result copybara_file = directory + "/copy.bara.sky" with open(copybara_file, 'w') as f: f.write(result)
print ">>> ", " ".join(unsatisfied) print "The missing packages are:" print ">>> ", " ".join(notfound) sys.exit(0) while len(depgraph) > 0: for pkg, deps in depgraph.iteritems(): deps.difference_update(written) if len(deps) == 0 and pkg not in written: write_project_cmake(pkg, index[(pkg, None)]) written.add(pkg) for pkg in written: if pkg in depgraph: del depgraph[pkg] print toplevel_em = open(sys.argv[3] + "/toplevel.cmake.em").read() toplevel_out = open(sys.argv[2] + "/toplevel.cmake", "w") toplevel_out.write( em.expand(toplevel_em, dict(packages=index, langs=langs, src_pythonpath=src_pythonpath, topo_pkgs=topo_pkgs)) ) cpack_em = open(sys.argv[3] + "/make_debs.sh.em").read() cpack_out = open(sys.argv[2] + "/make_debs.sh", "w") cpack_out.write(em.expand(cpack_em, dict(projects=topo_pkgs)))
build_dir, ) messages = uavcan.dsdl.parse_namespaces(namespace_paths) for template in templates: with open(os.path.join(templates_dir, template['source_file']), 'rb') as f: template['source'] = f.read() builtlist = set() for msg in messages: if buildlist is None or msg.full_name in buildlist: print 'building %s' % (msg.full_name, ) builtlist.add(msg.full_name) for template in templates: output = em.expand(template['source'], msg=msg) if not output.strip(): continue output_file = os.path.join( build_dir, em.expand('@{from canard_dsdlc_helpers import *}' + template['output_file'], msg=msg)) mkdir_p(os.path.dirname(output_file)) with open(output_file, 'wb') as f: f.write(output) assert buildlist is None or not buildlist - builtlist, "%s not built" % ( buildlist - builtlist, )
def expand_template(config_template, d): s = em.expand(config_template, **d) return s
def expand(config_template, d): with open(config_template) as fh: file_em = fh.read() s = em.expand(file_em, **d) return s
def write_project_cmake(name, d, index=index): global topo_pkgs print ">>>", name, " \r", sys.stdout.flush() bindir = sys.argv[2] + "/" + name if not os.path.isdir(bindir): os.mkdir(bindir) pkgdict = dict(PROJECT=name) pkgdict["brief_doc"] = d.get("brief", "no brief description") pkgdict["description"] = d.get("description", "no description") pkgdict["DEPENDED_PACKAGE_PATHS"] = [index[(pkgname, None)]["srcdir"] for pkgname in d["depend"]] pkgdict["GENERATED_ACTIONS"] = d["actions"] pkgdict["msgs"] = d["msgs"] pkgdict["srvs"] = d["srvs"] pkgdict["cfgs"] = d["cfgs"] pkgdict["thirdparty"] = d["3rdparty"] if "3rdparty" in d else [] pkgdict["exported_include_dirs"] = [] if "export" in d: if "include_dirs" in d["export"]: pkgdict["exported_include_dirs"] = d["export"]["include_dirs"] libs_i_need = pkgdict["libs_i_need"] = [] includes_i_need = pkgdict["includes_i_need"] = [] link_dirs = pkgdict["link_dirs"] = [] swig_flags = pkgdict["swig_flags"] = [] defines = [] assert "recursive_depends" in d for pkgname in d["recursive_depends"]: pkg = index[(pkgname, None)] pkgcomment = [r"# %s" % pkgname] if "export" in pkg: if "include_dirs" in pkg["export"]: includes_i_need += pkg["export"]["include_dirs"] if "libs" in pkg["export"]: libs_i_need += pkgcomment + pkg["export"]["libs"] if "defines" in pkg["export"]: defines += pkg["export"]["defines"] if "swig" in pkg["export"]: pkgdict["swig_flags"] += pkgcomment + pkg["export"]["swig"]["flags"] pkgdict["recursive_depends"] = d["recursive_depends"] pkgdict["defines"] = ["-D" + x for x in defines] topo_pkgs += [name] pkgdict["pythondirs"] = d.get("pythondirs", []) # print >>ofile, 'install(DIRECTORY %s DESTINATION share COMPONENT %s PATTERN ".svn" EXCLUDE REGEX ".*\\.(launch|xml|yaml|dox|srv|msg|cmake")' \ #% (d['srcdir'], name) ofile = open(bindir + "/package.cmake", "w") print >> ofile, em.expand(package_em, pkgdict)
if len(deps) == 0 and pkg not in written: write_project_cmake(pkg, index[pkg]) written.add(pkg) for pkg in written: if pkg in depgraph: del depgraph[pkg] print toplevel_em = open(sys.argv[2] + '/toplevel.cmake.em').read() toplevel_out = open(sys.argv[3] + '/toplevel.cmake', 'w') d = dict(packages=index, langs=langs, src_pythonpath=src_pythonpath, topo_pkgs=topo_pkgs) print "Writing toplevel for %d packages...." % len(d['packages']) # pprint.pprint(d) toplevel_out.write(em.expand(toplevel_em, d)) cpack_em = open(sys.argv[2] + '/make_debs.sh.em').read() cpack_out = open(sys.argv[3] + '/make_debs.sh', 'w') cpack_out.write(em.expand(cpack_em, dict(projects = topo_pkgs)))
def get_snippet(self, cliargs): snippet = pkgutil.get_data('rocker', 'templates/%s_snippet.Dockerfile.em' % self.name).decode('utf-8') return em.expand(snippet, self.get_environment_subs())
def write_project_cmake(name, d, index=index): global topo_pkgs # print ">>>", name, ' \r', sys.stdout.flush() bindir = sys.argv[3] + '/' + name if not os.path.isdir(bindir): os.mkdir(bindir) pkgdict = dict(PROJECT = name) pkgdict['brief_doc'] = d.get('brief', "no brief description") pkgdict['description'] = d.get('description', "no description") pkgdict['DEPENDED_PACKAGE_PATHS'] = [index[pkgname].attrib['srcdir'] for pkgname in [x.attrib['package'] for x in d.findall('depend') if 'package' in x.attrib]] actions = d.find('actions') pkgdict['actions'] = actions.text if actions != None else '' msgs = d.find('msgs') pkgdict['msgs'] = msgs.text if msgs != None else '' srvs = d.find('srvs') pkgdict['srvs'] = srvs.text if srvs != None else '' cfgs = d.find('cfgs') pkgdict['cfgs'] = cfgs.text if cfgs != None else '' pkgdict['thirdparty'] = [x.attrib['thirdparty'] for x in d.findall('depend') if 'thirdparty' in x.attrib] pkgdict['srcdir'] = d.attrib['srcdir'] pkgdict['exported_include_dirs'] = [x.text for x in d.findall('export/include_dir')] libs_i_need = pkgdict['libs_i_need'] = [] includes_i_need = pkgdict['includes_i_need'] = [] link_dirs = pkgdict['link_dirs'] = [] swig_flags = pkgdict['swig_flags'] = [] defines = [] pkgdict['config_libraries'] = d.get('export', {}).get('libs', []) pkgdict['config_definitions'] = d.get('export', {}).get('defines', []) pkgdict['depend'] = [x.attrib['package'] for x in d.findall('depend') if 'package' in x.attrib] assert 'recursive_depends' in d.attrib # print "RECDEPS:", name, "->", d.attrib['recursive_depends'] for pkgname in d.attrib['recursive_depends']: #print "CHECKDEP", pkgname pkg = index[pkgname] pkgcomment = r' # %s' % pkgname for l in pkg.findall('export/lib'): libs_i_need += [l.text + pkgcomment] for i in pkg.findall('export/include_dir'): includes_i_need += [i.text + " # " + pkgname] for d in pkg.findall('export/define'): defines += [d.text + " # " + pkgname] if 'export' in pkg: #if 'include_dirs' in pkg['export']: # includes_i_need += pkg['export']['include_dirs'] if 'defines' in pkg['export']: defines += pkg['export']['defines'] if 'swig' in pkg['export']: pkgdict['swig_flags'] += pkgcomment + pkg['export']['swig']['flags'] pkgdict['recursive_depends'] = d.attrib['recursive_depends'] pkgdict['defines'] = ['-D'+x for x in defines] topo_pkgs += [name] pkgdict['pythondirs'] = d.get('pythondirs', []) ofile = open(bindir + '/package.cmake', 'w') print >>ofile, em.expand(package_em, pkgdict) oconfig_file = open(bindir + '/' + name + '-config.cmake.in', 'w') print >>oconfig_file, em.expand(config_em, pkgdict) pkgconfig_file = open(bindir + '/' + name + '.pc.in', 'w') print >>pkgconfig_file, em.expand(pkgconfig_em, pkgdict)
def get_preamble(self, cliargs): preamble = pkgutil.get_data('rocker', 'templates/%s_preamble.Dockerfile.em' % self.name).decode('utf-8') return em.expand(preamble, self.get_environment_subs(cliargs))