def _generate_from_spec(input_file, output_dir, template_dir, msg_context, spec, template_map): md5sum = genmsg.compute_md5(msg_context, spec) # Set dictionary for the generator intepreter g = {"file_name_in": input_file, "spec": spec, "md5sum": md5sum} # Loop over all files to generate for template_file_name, output_file_name in template_map.items(): template_file = os.path.join(template_dir, template_file_name) output_file = os.path.join( output_dir, output_file_name.replace("@NAME@", spec.short_name)) #print "generate_from_template %s %s %s" % (input_file, template_file, output_file) ofile = open(output_file, 'w') #todo try # todo, reuse interpreter interpreter = em.Interpreter(output=ofile, globals=g, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) if not os.path.isfile(template_file): raise RuntimeError, "Template file %s not found in template dir %s" % ( template_file_name, template_dir) interpreter.file(open(template_file)) #todo try interpreter.shutdown()
def empyprocess(flines, dir_, template_vars=None): """Pass configure file through EmPy processor.""" cwd = os.getcwd() os.chdir(dir_) ftempl = StringIO('\n'.join(flines)) xtempl = StringIO() interpreter = em.Interpreter(output=em.UncloseableFile(xtempl)) try: interpreter.file(ftempl, '<template>', template_vars) except Exception as exc: lineno = interpreter.contexts[-1].identify()[1] raise EmPyError(interpreter.meta(exc), lineno), None, sys.exc_info()[2] finally: interpreter.shutdown() xsuite = xtempl.getvalue() os.chdir(cwd) ftempl.close() xtempl.close() suiterc = [] for line in xsuite.splitlines(): # EmPy leaves blank lines where source lines contain # only EmPy code; this matters if line continuation # markers are involved, so we remove blank lines here. if not line.strip(): continue # restoring newlines here is only necessary for display by # the cylc view command: # ##suiterc.append(line + '\n') suiterc.append(line) return suiterc
def generate_module(package_name, output_dir, template_dir, template_dict): # Locate generate msg files files = os.listdir(output_dir) # Set dictionary for the generator intepreter g = dict(files=files, package=package_name) # Loop over all files to generate for template_file_name, output_file_name in template_dict.items(): template_file = os.path.join(template_dir, template_file_name) output_file = os.path.join(output_dir, output_file_name) ofile = open(output_file, 'w') #todo try # todo, reuse interpreter interpreter = em.Interpreter(output=ofile, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) interpreter.updateGlobals(g) if not os.path.isfile(template_file): raise RuntimeError, "Template file %s not found in template dir %s" % ( template_file_name, template_dir) interpreter.file(open(template_file)) #todo try interpreter.shutdown()
def evaluate_template(template_name, data): global _interpreter # create copy before manipulating data = dict(data) data['TEMPLATE'] = _evaluate_template template_path = os.path.join(os.path.dirname(__file__), template_name) output = StringIO() try: _interpreter = em.Interpreter( output=output, options={ em.BUFFERED_OPT: True, em.RAW_OPT: True, }) with open(template_path, 'r') as h: content = h.read() _interpreter.invoke( 'beforeFile', name=template_name, file=h, locals=data) _interpreter.string(content, template_path, locals=data) _interpreter.invoke('afterFile') return output.getvalue() except Exception as e: # noqa: F841 print( "{e.__class__.__name__} processing template '{template_name}'" .format_map(locals()), file=sys.stderr) raise finally: _interpreter.shutdown() _interpreter = None
def generate_by_template(output_file, template_file, em_globals): """ Invokes empy intepreter to geneate output_file by the given template_file and predefined em_globals dict """ # check if folder exists: folder_name = os.path.dirname(output_file) if not os.path.exists(folder_name): os.makedirs(folder_name) ofile = open(output_file, 'w') # todo, reuse interpreter interpreter = em.Interpreter(output=ofile, globals=em_globals, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) try: interpreter.file(open(template_file)) except OSError as e: ofile.close() os.remove(output_file) raise interpreter.shutdown() ofile.close() return True
def generate_file(template, destination, context): base_name = template.split('/')[-1] base_name_components = base_name.split('.') # Remove the last base name component if it's .em if base_name_components[-1] == 'em': base_name_components.pop(-1) # Add the message type name to the source file base_name_components[0] = base_name_components[ 0] + '__' + get_message_type_name(context['spec']) filename = '.'.join(base_name_components) output_file_path = '/'.join([destination, filename]) output_buffer = StringIO() interpreter = em.Interpreter(output=output_buffer, globals=copy.deepcopy(context)) interpreter.file(open(template, 'r')) if not os.path.exists(destination): os.makedirs(destination) with open(output_file_path, 'w') as file: file.write(output_buffer.getvalue())
def _expand_template(template_file, data, output_file): output = StringIO() interpreter = em.Interpreter( output=output, options={ em.BUFFERED_OPT: True, em.RAW_OPT: True, }, globals=data, ) with open(template_file, 'r') as h: try: interpreter.file(h) content = output.getvalue() except Exception as e: if os.path.exists(output_file): os.remove(output_file) print("Exception when expanding '%s' into '%s': %s" % (template_file, output_file, e), file=sys.stderr) raise finally: interpreter.shutdown() if os.path.exists(output_file): with open(output_file, 'r') as h: if h.read() == content: return else: os.makedirs(os.path.dirname(output_file), exist_ok=True) with open(output_file, 'w') as h: h.write(content)
def generate_install(self, job_start_file): # Default is /etc/ros/DISTRO/JOBNAME.d self._set_job_path() # User-specified launch files. self._add_job_files() # This is optional to support the old --augment flag where a "job" only adds # launch files to an existing configuration. if (self.job.generate_system_files): # Share a single instance of the empy interpreter. self.interpreter = em.Interpreter(globals=self.job.__dict__.copy()) self.installation_files[os.path.join( self.root, "lib/systemd/system", self.job.name + ".service")] = { "content": self._fill_template("templates/systemd_job.conf.em"), "mode": 0o644 } self.installation_files[os.path.join( self.root, "etc/systemd/system/multi-user.target.wants", self.job.name + ".service")] = { "symlink": os.path.join(self.root, "lib/systemd/system/", self.job.name + ".service") } self.installation_files[os.path.join( self.root, "usr/sbin", self.job.name + "-start")] = { "content": self._fill_template("templates/%s" % job_start_file), "mode": 0o755 } self.installation_files[os.path.join( self.root, "usr/sbin", self.job.name + "-stop")] = { "content": self._fill_template("templates/job-stop.em"), "mode": 0o755 } self.interpreter.shutdown() # Add an annotation file listing what has been installed. This is a union of what's being # installed now with what has been installed previously, so that an uninstall should remove # all of it. A more sophisticated future implementation could track contents or hashes and # thereby warn users when a new installation is stomping a change they have made. self._load_installed_files_set() self.installed_files_set.update(self.installation_files.keys()) # Remove the job directory. This will fail if it is not empty, and notify the user. self.installed_files_set.add(self.job.job_path) # Remove the annotation file itself. self.installed_files_set.add(self.installed_files_set_location) self.installation_files[self.installed_files_set_location] = { "content": "\n".join(self.installed_files_set) } return self.installation_files
def main(): parser = argparse.ArgumentParser( description="Generate a static C device description from an EDS/DCF file." ) parser.add_argument( "--no-strings", action="store_true", help="do not include optional strings in the output", ) parser.add_argument( "--include-config", action="store_true", help="add '#include <config.h>' snippet to the output", ) parser.add_argument( "-o", "--output", metavar="FILE", type=str, default="-", help="write the output to FILE instead of stdout", ) parser.add_argument( "--header", action="store_true", help="generate header file with function prototype", ) parser.add_argument("filename", nargs=1, help="the name of the EDS/DCF file") parser.add_argument( "name", nargs=1, type=str, default="", help="the variable name of the generated device description", ) args = parser.parse_args() if args.header: dev = None filename = "dev.h.em" else: dev = read_device_from_dcf(args.filename[0]) filename = "dev.c.em" with open_or_stdout(args.output) as output: params = { "no_strings": args.no_strings, "include_config": args.include_config, "dev": dev, "name": args.name[0], } interpreter = em.Interpreter(output=output, globals=params) try: filename = pkg_resources.resource_filename(__name__, "data/" + filename) interpreter.file(open(filename)) finally: interpreter.shutdown()
def generate_changes_file(g): global os_name, os_version, rdistro, ctx, os_installers, default_os_installer, dist_data, rindex, rcache, rview interpreter = em.Interpreter(output=open( g['osc_project'] + '/' + g['osc_package'] + '/' + g['Name'] + '.changes', "w")) interpreter.include('template.changes.em', g) interpreter.shutdown() return g['Name'] + '.changes'
def generate_pkg_meta_file(g): global os_name, os_version, rdistro, ctx, os_installers, default_os_installer, dist_data, rindex, rcache, rview output = io.StringIO("") interpreter = em.Interpreter(output=output) interpreter.include('template.pkg_meta.em', g) retval = output.getvalue() interpreter.shutdown() return retval
def generate_template(template_in, template_out): import em g = {'ros_distro': os.environ['ROS_DISTRO']} interpreter = em.Interpreter(output=template_out, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) interpreter.updateGlobals(g) interpreter.file(open(template_in)) interpreter.shutdown()
def transform_csv_to_html(data_source, metadata_builder, rosdistro, start_time, template_file, resource_path, cached_distribution=None): reader = csv.reader(data_source, delimiter=',', quotechar='"') rows = [row for row in reader] headers = rows[0] rows = rows[1:] metadata_columns = [None] * 4 + [metadata_builder(c) for c in headers[4:]] headers = [ format_header_cell(headers[i], metadata_columns[i]) for i in range(len(headers)) ] # count non-None rows per (sub-)column row_counts = [[]] * 4 + [[0] * 3 for _ in range(4, len(headers))] for row in rows: for i in range(4, len(row_counts)): versions = get_cell_versions(row[i]) for j in range(0, len(versions)): if versions[j] != 'None': row_counts[i][j] += 1 def get_package_name_from_row(row): return row[0] rows = sorted(rows, key=get_package_name_from_row) rows = [format_row(r, metadata_columns) for r in rows] inject_status_and_maintainer(cached_distribution, headers, row_counts, rows) # div-wrap the first three cells for layout reasons. It's difficult to contrain the # overall dimensions of a table cell without an inner element to use as the overflow # container. for row in rows: for i in range(3): row[i] = "<div>%s</div>" % row[i] repos = REPOS resource_hashes = get_resource_hashes() output = StringIO() try: interpreter = em.Interpreter(output=output) interpreter.file(open(template_file), locals=locals()) return output.getvalue() finally: interpreter.shutdown()
def main(): file_name = "counter.v" file = open(file_name, 'w') interpreter = em.Interpreter(output=file, argv=str(20)) # Process an actual file (and output to stdout): interpreter.file(open('counter.em')) interpreter.shutdown() # this is important; see below subprocess.call('apio "verify"', shell=True) subprocess.call('apio "build"', shell=True) subprocess.call('apio "upload"', shell=True)
def write_dcf(self, directory: str, remote_pdo: bool = False): with open(os.path.join(directory, "master.dcf"), "w") as output: globals = { "master": self, "slaves": self.slaves, "remote_pdo": remote_pdo } interpreter = em.Interpreter(output=output, globals=globals) try: filename = pkg_resources.resource_filename( __name__, "data/master.dcf.em") interpreter.file(open(filename)) finally: interpreter.shutdown()
def generate_template(template_in, template_out): import em from vinca.config import skip_testing, ros_distro g = { "ros_distro": ros_distro, "skip_testing": "ON" if skip_testing else "OFF" } interpreter = em.Interpreter(output=template_out, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) interpreter.updateGlobals(g) interpreter.file(open(template_in)) interpreter.shutdown()
def generate_by_template(output_file, template_file, em_globals): """ Invokes empy intepreter to geneate output_file by the given template_file and predefined em_globals dict """ ofile = open(output_file, 'w') # todo, reuse interpreter interpreter = em.Interpreter(output=ofile, globals=em_globals, options={em.RAW_OPT:True,em.BUFFERED_OPT:True}) if not os.path.isfile(template_file): ofile.close() os.remove(output_file) raise RuntimeError("Template file %s not found" % (template_file)) interpreter.file(open(template_file)) #todo try interpreter.shutdown() ofile.close() return True
def _generate_from_spec(input_file, output_dir, template_dir, msg_context, spec, template_map, search_path): md5sum = genmsg.gentools.compute_md5(msg_context, spec) # precompute msg definition once if isinstance(spec, genmsg.msgs.MsgSpec): msg_definition = genmsg.gentools.compute_full_text(msg_context, spec) # Loop over all files to generate for template_file_name, output_file_name in template_map.items(): template_file = os.path.join(template_dir, template_file_name) output_file = os.path.join( output_dir, output_file_name.replace("@NAME@", spec.short_name)) #print "generate_from_template %s %s %s" % (input_file, template_file, output_file) ofile = open(output_file, 'w') #todo try # Set dictionary for the generator interpreter g = { "file_name_in": input_file, "spec": spec, "md5sum": md5sum, "search_path": search_path, "msg_context": msg_context } if isinstance(spec, genmsg.msgs.MsgSpec): g['msg_definition'] = msg_definition # todo, reuse interpreter interpreter = em.Interpreter(output=ofile, globals=g, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) if not os.path.isfile(template_file): ofile.close() os.remove(output_file) raise RuntimeError( "Template file %s not found in template dir %s" % (template_file_name, template_dir)) interpreter.file(open(template_file)) #todo try interpreter.shutdown()
def main(): parser = argparse.ArgumentParser(description='Configure IBIS-AMI model C++ source code, IBIS model, and AMI file.') parser.add_argument('py_file', type=str, help='name of model configuration file (*.py)') args = parser.parse_args() # Confirm the existence of the model configuration file. py_file = args.py_file if(not op.isfile(py_file)): raise RuntimeError("Model configuration file, %s, not found." % (py_file)) else: py_file = op.abspath(py_file) file_base_name = op.splitext(op.basename(py_file))[0] # Read model configuration information. print "Reading model configuration information from file: %s." % (py_file) with open(py_file, 'rt') as cfg_file: cfg = imp.load_module(file_base_name, cfg_file, py_file, ('py', 'r', imp.PY_SOURCE)) # Configure the 3 files. for ext in ['cpp', 'ami', 'ibs']: out_file = file_base_name + '.' + ext if(ext == 'ami'): em_file = op.dirname(__file__) + '/generic.ami.em' elif(ext == 'ibs'): em_file = op.dirname(__file__) + '/generic.ibs.em' else: em_file = out_file + '.' + 'em' print "Buidling '%s' from '%s'..." % (out_file, em_file) with open(out_file, 'wt') as out_file: interpreter = em.Interpreter( output = out_file, globals = { 'ami_params' : cfg.ami_params, 'ibis_params' : cfg.ibis_params, 'param_types' : param_types, 'model_name' : cfg.kFileBaseName, 'description' : cfg.kDescription, 'date' : str(date.today()), } ) try: interpreter.file(open(em_file)) finally: interpreter.shutdown()
def expand_template(template_file, data, output_file, minimum_timestamp=None): output = StringIO() interpreter = em.Interpreter( output=output, options={ em.BUFFERED_OPT: True, em.RAW_OPT: True, }, globals=data, ) with open(template_file, 'r') as h: try: interpreter.file(h) except Exception: if os.path.exists(output_file): os.remove(output_file) print("Exception when expanding '%s' into '%s'" % (template_file, output_file), file=sys.stderr) raise content = output.getvalue() interpreter.shutdown() # only overwrite file if necessary # which is either when the timestamp is too old or when the content is different if os.path.exists(output_file): timestamp = os.path.getmtime(output_file) if minimum_timestamp is None or timestamp > minimum_timestamp: with open(output_file, 'r') as h: if h.read() == content: return else: # create folder if necessary try: os.makedirs(os.path.dirname(output_file)) except FileExistsError: pass with open(output_file, 'w') as h: h.write(content)
def _generate_from_spec(input_file, output_dir, template_dir, msg_context, spec, template_map, search_path): if spec is None: return # Loop over all files to generate for template_file_name, output_file_name in template_map.items(): template_file = os.path.join(template_dir, template_file_name) output_file = os.path.join( output_dir, output_file_name.replace("@NAME@", spec.short_name.split('_')[-1])) #print "generate_from_template %s %s %s" % (input_file, template_file, output_file) ofile = open(output_file, 'w') #todo try # Set dictionary for the generator interpreter g = { "file_name_in": input_file, "spec": spec, "search_path": search_path, "msg_context": msg_context } # todo, reuse interpreter interpreter = em.Interpreter(output=ofile, globals=g, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) if not os.path.isfile(template_file): ofile.close() os.remove(output_file) raise RuntimeError( "Template file %s not found in template dir %s" % (template_file_name, template_dir)) interpreter.file(open(template_file)) #todo try interpreter.shutdown()
def ami_config(py_file): """Read in the ``py_file`` and cpp.em file then generate a ibis, ami and cpp.""" file_base_name = Path(py_file).stem # Read model configuration information. print("Reading model configuration information from file: %s." % (py_file)) spec = importlib.util.spec_from_file_location(file_base_name, py_file) cfg = importlib.util.module_from_spec(spec) spec.loader.exec_module(cfg) # Configure the 3 files. for ext in ["cpp", "ami", "ibs"]: out_file = Path(py_file).with_suffix(".{}".format(ext)) if ext == "ami": em_file = Path(__file__).parent.joinpath("generic.ami.em") elif ext == "ibs": em_file = Path(__file__).parent.joinpath("generic.ibs.em") else: em_file = out_file.with_suffix(".cpp.em") print(f"Buidling '{out_file}' from '{em_file}'...") with open(out_file, "w") as out_file: interpreter = em.Interpreter( output=out_file, globals={ "ami_params": cfg.ami_params, "ibis_params": cfg.ibis_params, "param_types": param_types, "model_name": cfg.kFileBaseName, "description": cfg.kDescription, "date": str(date.today()), }, ) try: interpreter.file(open(em_file)) finally: interpreter.shutdown()
def evaluate_template(template_name, data): """ Write the data in the template. :param template_dir: name of the template to write :type template_dir: str :param data: data that is used to fill the template :type template_dir: dict :returns: string with the template evaluated :rtype: str """ msg_index_template, template_path = load_template(template_name) output = StringIO() interpreter = em.Interpreter( output=output, options={ em.BUFFERED_OPT: True, em.RAW_OPT: True, }, ) try: with open(template_path, 'r') as h: template_content = h.read() interpreter.invoke( 'beforeFile', name=template_name, file=h, locals=data) interpreter.string(template_content, template_path, locals=data) interpreter.invoke('afterFile') except Exception as e: # noqa: F841 print(f"{e.__class__.__name__} when expanding '{template_name}' " f": '{e}'", file=sys.stderr) raise content = output.getvalue() interpreter.shutdown() return content
def expand_template(template_name, data, output_file, minimum_timestamp=None, template_basepath=None): # in the legacy API the first argument was the path to the template if template_basepath is None: template_name = pathlib.Path(template_name) template_basepath = template_name.parent template_name = template_name.name global interpreter output = StringIO() interpreter = em.Interpreter( output=output, options={ em.BUFFERED_OPT: True, em.RAW_OPT: True, }, ) global template_prefix_path template_prefix_path.append(template_basepath) template_path = get_template_path(template_name) # create copy before manipulating data = dict(data) _add_helper_functions(data) try: with template_path.open('r') as h: template_content = h.read() interpreter.invoke('beforeFile', name=template_name, file=h, locals=data) interpreter.string(template_content, template_path, locals=data) interpreter.invoke('afterFile') except Exception as e: # noqa: F841 if os.path.exists(output_file): os.remove(output_file) print("{e.__class__.__name__} when expanding '{template_name}' into " "'{output_file}': {e}".format_map(locals()), file=sys.stderr) raise finally: template_prefix_path.pop() content = output.getvalue() interpreter.shutdown() # only overwrite file if necessary # which is either when the timestamp is too old or when the content is different if os.path.exists(output_file): timestamp = os.path.getmtime(output_file) if minimum_timestamp is None or timestamp > minimum_timestamp: with open(output_file, 'r') as h: if h.read() == content: return else: # create folder if necessary try: os.makedirs(os.path.dirname(output_file)) except FileExistsError: pass with open(output_file, 'w') as h: h.write(content)
#PROBLEMAS DE FUNCIONAMIENTO EN LA GENERACION DE DISCOS ENIGMA import em number_of_discs = input("Please enter the number of discs:\n") for number in range(number_of_discs): file_name = "disc_" + str(number + 1) + ".v" file = open(file_name, 'w') interpreter = em.Interpreter(output=file, argv=str(number + 1)) # Process an actual file (and output to stdout): interpreter.file(open('enigma_disc.em')) interpreter.shutdown() # this is important; see below
def main(): import argparse description = ("Version {0}\n\nA cmake-like Makefile generator for Fanuc " "Robotics (Karel) projects\nthat supports out-of-source " "builds.".format(ROSSUM_VERSION)) epilog = ("Usage example:\n\n" " mkdir C:\\foo\\bar\\build\n" " cd C:\\foo\\bar\\build\n" " rossum C:\\foo\\bar\\src") parser = argparse.ArgumentParser( prog='rossum', description=description, epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-v', '--verbose', action='store_true', dest='verbose', help='Be verbose') parser.add_argument('-V', '--version', action='version', version='%(prog)s {0}'.format(ROSSUM_VERSION)) parser.add_argument( '-q', '--quiet', action='store_true', dest='quiet', help='Be quiet (only warnings and errors will be shown)') parser.add_argument('--rg64', action='store_true', dest='rg64', help='Assume 64-bit Roboguide version.') parser.add_argument( '-c', '--core', type=str, dest='core_version', metavar='ID', default=(os.environ.get(ENV_DEFAULT_CORE_VERSION) or DEFAULT_CORE_VERSION), help="Version of the core files used when translating " "(default: %(default)s). Use the '{0}' environment " "variable to configure an alternative default without having to " "specify it on each invocation of rossum.".format( ENV_DEFAULT_CORE_VERSION)) parser.add_argument( '--support', type=str, dest='support_dir', metavar='PATH', help="Location of KAREL support directory " "(default: auto-detect based on selected core version and " "FANUC registry keys)") parser.add_argument('-d', '--dry-run', action='store_true', dest='dry_run', help='Do everything except writing to build file') parser.add_argument('--ktrans', type=str, dest='ktrans', metavar='PATH', help="Location of ktrans (default: auto-detect)") parser.add_argument( '--ktransw', type=str, dest='ktransw', metavar='PATH', help="Location of ktransw (default: assume it's on the Windows PATH)") parser.add_argument( '-n', '--no-env', action='store_true', dest='no_env', help='Do not search the {0}, even if it is set'.format(ENV_PKG_PATH)) parser.add_argument( '-p', '--pkg-dir', action='append', type=str, dest='extra_paths', metavar='PATH', default=[], help='Additional paths to search for packages (multiple allowed). ' 'Note: this essentially extends the source space.') parser.add_argument( '-r', '--robot-ini', type=str, dest='robot_ini', metavar='INI', default=ROBOT_INI_NAME, help="Location of {0} (default: source dir)".format(ROBOT_INI_NAME)) parser.add_argument( '-w', '--overwrite', action='store_true', dest='overwrite', help='Overwrite any build file that may exist in the build dir') parser.add_argument('src_dir', type=str, metavar='SRC', help="Main directory with packages to build") parser.add_argument( 'build_dir', type=str, nargs='?', metavar='BUILD', help="Directory for out-of-source builds (default: 'cwd')") args = parser.parse_args() # configure the logger FMT = '%(levelname)-8s | %(message)s' logging.basicConfig(format=FMT, level=logging.INFO) global logger logger = logging.getLogger('rossum') if args.verbose: logger.setLevel(logging.DEBUG) if args.quiet: logger.setLevel(logging.WARNING) logger.info("This is rossum v{0}".format(ROSSUM_VERSION)) ############################################################################ # # Validation # # build dir is either CWD or user specified it build_dir = os.path.abspath(args.build_dir or os.getcwd()) source_dir = os.path.abspath(args.src_dir) extra_paths = [os.path.abspath(p) for p in args.extra_paths] # make sure that source dir exists if not os.path.exists(source_dir): logger.fatal( "Directory '{0}' does not exist. Aborting".format(source_dir)) # TODO: find appropriate exit code sys.exit(_OS_EX_DATAERR) # refuse to do in-source builds if os.path.exists(os.path.join(build_dir, MANIFEST_NAME)): logger.fatal("Found a package manifest ({0}) in the build " "dir ({1}). Refusing to do in-source builds.".format( MANIFEST_NAME, build_dir)) # TODO: find appropriate exit code sys.exit(_OS_EX_DATAERR) # make sure that build dir exists if not os.path.exists(build_dir): logger.fatal("Directory '{0}' does not exist (and not creating it), " "aborting".format(build_dir)) # TODO: find appropriate exit code sys.exit(_OS_EX_DATAERR) # check we can find a usable robot.ini somewhere. # strategy: # - if user provided a location, use that # - if not, try CWD (default value of arg is relative to CWD) # - if that doesn't work, try source space # because 'args.robot_ini' has a default which is simply 'robot.ini', we # cover the first two cases in the above list with this single statement robot_ini_loc = os.path.abspath(args.robot_ini) # check that it actually exists logger.debug("Checking: {}".format(robot_ini_loc)) if not os.path.exists(robot_ini_loc): logger.warning("No {} in CWD, and no alternative provided, trying " "source space".format(ROBOT_INI_NAME)) robot_ini_loc = os.path.join(source_dir, ROBOT_INI_NAME) logger.debug("Checking: {}".format(robot_ini_loc)) if os.path.exists(robot_ini_loc): logger.info("Found {} in source space".format(ROBOT_INI_NAME)) else: logger.warning("File does not exist: {}".format(robot_ini_loc)) logger.fatal("Cannot find a {}, aborting".format(ROBOT_INI_NAME)) sys.exit(_OS_EX_DATAERR) # non-"empty" robot.ini files may conflict with rossum and/or ktransw # CLAs. Ideally, we'd allow rossum/ktransw CLAs to override paths and # other settings from robot.ini files, but for now we'll only just # WARN the user if we find a non-empty file. with open(robot_ini_loc, 'r') as f: robot_ini_txt = f.read() if ('Path' in robot_ini_txt) or ('Support' in robot_ini_txt): logger.warning( "Found {} contains potentially conflicting ktrans " "settings!".format(ROBOT_INI_NAME)) # try to find base directory for FANUC tools try: fr_base_dir = find_fr_install_dir(search_locs=FANUC_SEARCH_PATH, is64bit=args.rg64) logger.info( "Using {} as FANUC software base directory".format(fr_base_dir)) except Exception as e: # not being able to find the Fanuc base dir is only a problem if: # 1) no ktrans.exe location provided # 2) no support dir location provided # # exit with a fatal error if we're missing either of those if (not args.ktrans or not args.support_dir): logger.fatal( "Error trying to detect FANUC base-dir: {0}".format(e)) logger.fatal( "Please provide alternative locations for ktrans and support dir using" ) logger.fatal("the '--ktrans' and '--support' options.") logger.fatal("Cannot continue, aborting") sys.exit(_OS_EX_DATAERR) # if both of those have been provided we don't care and can continue logger.warning("Error trying to detect FANUC base-dir: {0}".format(e)) logger.warning("Continuing with provided arguments") # TODO: maybe generalise into 'find_tool(..)' or something (for maketp etc) # see if we need to find ktrans ourselves ktrans_path = KTRANS_BIN_NAME if not args.ktrans: logger.debug("Trying to auto-detect ktrans location ..") try: search_locs = [fr_base_dir] search_locs.extend(KTRANS_SEARCH_PATH) ktrans_path = find_ktrans(kbin_name=KTRANS_BIN_NAME, search_locs=search_locs) except MissingKtransException as mke: logger.fatal("Aborting: {0}".format(mke)) sys.exit(_OS_EX_DATAERR) except Exception as e: logger.fatal("Aborting: {0} (unhandled, please report)".format(e)) sys.exit(_OS_EX_DATAERR) # or if user provided its location else: logger.debug("User provided ktrans location: {0}".format(args.ktrans)) ktrans_path = os.path.abspath(args.ktrans) logger.debug("Setting ktrans path to: {0}".format(ktrans_path)) # make sure it exists if not os.path.exists(ktrans_path): logger.fatal("Specified ktrans location ({0}) does not exist. " "Aborting.".format(ktrans_path)) sys.exit(_OS_EX_DATAERR) logger.info("ktrans location: {0}".format(ktrans_path)) # try to find support directory for selected core software version logger.info("Setting default system core version to: {}".format( args.core_version)) # see if we need to find support dir ourselves if not args.support_dir: try: fr_support_dir = find_ktrans_support_dir( fr_base_dir=fr_base_dir, version_string=args.core_version) except Exception as e: logger.fatal("Couldn't determine core software support directory, " "aborting".format(e)) sys.exit(_OS_EX_DATAERR) # or if user provided its location else: fr_support_dir = args.support_dir logger.debug( "User provided support dir location: {0}".format(fr_support_dir)) # make sure it exists if not os.path.exists(fr_support_dir): logger.fatal("Specified support dir ({0}) does not exist. " "Aborting.".format(fr_support_dir)) sys.exit(_OS_EX_DATAERR) logger.info("Karel core support dir: {}".format(fr_support_dir)) # if user didn't supply an alternative, assume it's on the PATH ktransw_path = args.ktransw or KTRANSW_BIN_NAME logger.info("ktransw location: {0}".format(ktransw_path)) # template and output file locations template_dir = os.path.dirname(os.path.realpath(__file__)) template_path = os.path.join(template_dir, BUILD_FILE_TEMPLATE_NAME) build_file_path = os.path.join(build_dir, BUILD_FILE_NAME) # check if not os.path.isfile(template_path): raise RuntimeError("Template file %s not found in template " "dir %s" % (template_path, template_dir)) logger.debug("Using build file template: {0}".format(template_path)) ############################################################################ # # Package discovery # # always look in the source space and any extra paths user provided src_space_dirs = [source_dir] # and any extra paths the user provided src_space_dirs.extend(extra_paths) logger.info("Source space(s) searched for packages (in order: src, args):") for p in src_space_dirs: logger.info(' {0}'.format(p)) # discover packages src_space_pkgs = find_pkgs(src_space_dirs) logger.info("Found {0} package(s) in source space(s):".format( len(src_space_pkgs))) for pkg in src_space_pkgs: logger.info(" {0} (v{1})".format(pkg.manifest.name, pkg.manifest.version)) # discover pkgs in non-source space directories, if those have been configured other_pkgs = [] if (not args.no_env) and (ENV_PKG_PATH in os.environ): logger.info("Other location(s) searched for packages ({}):".format( ENV_PKG_PATH)) other_pkg_dirs = [ p for p in os.environ[ENV_PKG_PATH].split(os.pathsep) if len(p) > 0 ] for p in other_pkg_dirs: logger.info(' {0}'.format(p)) other_pkgs.extend(find_pkgs(other_pkg_dirs)) logger.info("Found {0} package(s) in other location(s):".format( len(other_pkgs))) for pkg in other_pkgs: logger.info(" {0} (v{1})".format(pkg.manifest.name, pkg.manifest.version)) # process all discovered pkgs all_pkgs = [] all_pkgs.extend(src_space_pkgs) all_pkgs.extend(other_pkgs) # make sure all their dependencies are present try: check_pkg_dependencies(all_pkgs) except Exception as e: logger.fatal("Error occured while checking packages: {}. Cannot " "continue".format(e)) # TODO: find appropriate exit code sys.exit(_OS_EX_DATAERR) # all discovered pkgs get used for dependency and include path resolution, resolve_dependencies(all_pkgs) resolve_includes(all_pkgs) # but only the pkgs in the source space(s) get their objects build gen_obj_mappings(src_space_pkgs) # notify user of config logger.info("Building {} package(s)".format(len(src_space_pkgs))) logger.info("Build configuration:") logger.info(" source dir: {0}".format(source_dir)) logger.info(" build dir : {0}".format(build_dir)) logger.info(" robot.ini : {0}".format(robot_ini_loc)) logger.info("Writing generated rules to {0}".format(build_file_path)) # stop if user wanted a dry-run if args.dry_run: logger.info("Requested dry-run, not saving build file") sys.exit(0) ############################################################################ # # Template processing # # populate dicts & lists needed by template ktrans = KtransInfo(path=ktrans_path, support=KtransSupportDirInfo( path=fr_support_dir, version_string=args.core_version)) ktransw = KtransWInfo(path=ktransw_path) bs_info = RossumSpaceInfo(path=build_dir) sp_infos = [RossumSpaceInfo(path=p) for p in src_space_dirs] robini_info = KtransRobotIniInfo(path=robot_ini_loc) ws = RossumWorkspace(build=bs_info, sources=sp_infos, robot_ini=robini_info, pkgs=src_space_pkgs) # don't overwrite existing files, unless instructed to do so if (not args.overwrite) and os.path.exists(build_file_path): logger.fatal("Existing {0} detected and '--overwrite' not specified. " "Aborting".format(BUILD_FILE_NAME)) # TODO: find appropriate exit code sys.exit(_OS_EX_DATAERR) # write out template with open(build_file_path, 'w') as ofile: # setup the dict for empy globls = { 'ws': ws, 'ktrans': ktrans, 'ktransw': ktransw, 'rossum_version': ROSSUM_VERSION, 'tstamp': datetime.datetime.now().isoformat(), } interp = em.Interpreter(output=ofile, globals=globls, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) # load and process the template logger.debug("Processing template") interp.file(open(template_path)) logger.debug("Shutting down empy") interp.shutdown() # done logger.info("Configuration successful, you may now run 'ninja' in the " "build directory.")
def _generate_from_spec(input_file, output_dir, template_dir, msg_context, spec, template_map, search_path): md5sum = genmsg.gentools.compute_md5(msg_context, spec) # precompute msg definition once if isinstance(spec, genmsg.msgs.MsgSpec): msg_definition = genmsg.gentools.compute_full_text(msg_context, spec) # map all non-built-in ROS types to their Simple Message Assigned IDs if isinstance(spec, genmsg.msgs.MsgSpec): mapped_types = _map_types(spec, search_path, _mapping_dict) elif isinstance(spec, genmsg.srvs.SrvSpec): mapped_types = _map_types(spec.request, search_path, _mapping_dict) mapped_types.update( _map_types(spec.response, search_path, _mapping_dict)) # map spec we are generating for currently # note that we don't use genfrkl.ros_type_to_sm_id(..) here, as we # already have the md5sum of the message spec we are generating for spec_tgt_type_id = genfrkl.msg_mapping.map_md5_to_sm_id( _mapping_dict, md5sum) kl_smname = genfrkl.fmt_sm_name(spec_tgt_type_id) kl_structname = genfrkl.fmt_sm_type(spec_tgt_type_id) kl_libname = genfrkl.fmt_sm_libname(spec_tgt_type_id) msg_id_hex = '%04X' % spec_tgt_type_id #print "DEBUG: spec_tgt_type_id: %d" % spec_tgt_type_id # Loop over all files to generate for template_file_name, output_file_name in template_map.items(): template_file = os.path.join(template_dir, template_file_name) output_file = os.path.join( output_dir, output_file_name.replace("@NAME@", spec.short_name)) #print "generate_from_template %s %s %s" % (input_file, template_file, output_file) ofile = open(output_file, 'w') #todo try # Set dictionary for the generator interpreter g = { "file_name_in": input_file, "spec": spec, "md5sum": md5sum, "search_path": search_path, "msg_context": msg_context } if isinstance(spec, genmsg.msgs.MsgSpec): g['msg_definition'] = msg_definition # add Simple Message data g["mapped_types"] = mapped_types g["kl_smname"] = kl_smname g["kl_structname"] = kl_structname g["kl_libname"] = kl_libname g["msg_id_hex"] = msg_id_hex g["mapped_msg_type"] = spec_tgt_type_id # todo, reuse interpreter interpreter = em.Interpreter(output=ofile, globals=g, options={ em.RAW_OPT: True, em.BUFFERED_OPT: True }) if not os.path.isfile(template_file): ofile.close() os.remove(output_file) raise RuntimeError( "Template file %s not found in template dir %s" % (template_file_name, template_dir)) interpreter.file(open(template_file)) #todo try interpreter.shutdown()
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys, os, subprocess import em publisher_command = os.environ.get("TEST_FIROS2_ROS2_PUBLISHER_BIN") subscriber_command = os.environ.get("TEST_FIROS2_FASTRTPS_SUBSCRIBER_BIN") firos2_command = os.environ.get("TEST_FIROS2_BIN") config_template = os.environ.get("TEST_FIROS2_CONFIG_XML_EM") build_directory = os.environ.get("TEST_FIROS2_BIN_DIR") config_xml_path = build_directory + "/generated_config.xml" output_config_xml = open(config_xml_path, "w+") interpreter = em.Interpreter(output=output_config_xml) interpreter.file(open(config_template)) interpreter.shutdown() subscriber_proc = subprocess.Popen([subscriber_command]) publisher_proc = subprocess.Popen([publisher_command]) bridge_proc = subprocess.Popen([firos2_command, config_xml_path]) subscriber_proc.communicate() retvalue = subscriber_proc.returncode publisher_proc.kill() bridge_proc.kill() sys.exit(retvalue)
"--input_directory", dest='directory_in', required=True, help="Output directory") parser.add_argument("-o", "--output_directory", dest='directory_out', required=True, help="Input directory") parser.add_argument("-f", "--files", dest='gen_files', nargs='+', required=True, help="Files to generate") # Parse arguments args = parser.parse_args() for gen_file in args.gen_files: ofile = args.directory_out + "/" + gen_file output_file = open(ofile, 'w') # need to specify the em_globals inside the loop -> em.Error: interpreter globals collision em_globals = { "tasks": args.tasks_all, "tasks_add": args.tasks_add, } interpreter = em.Interpreter(output=output_file, globals=em_globals) interpreter.file(open(args.directory_in + "/" + gen_file + ".em")) interpreter.shutdown()
def __init__(self, srvname, author, output_dir): self.srvname = srvname self.author = author self.output_dir = output_dir self.__inter = em.Interpreter() self.__output = StringIO()