def get_message_fixed_size(spec, search_path): """ Return the size of a message. If the message does not have a fixed size, returns None """ if not is_message_fixed_size(spec, search_path): return None # else length = 0 for f in spec.parsed_fields(): if f.is_builtin: type_size = get_type_size(f.base_type) if type_size is None: raise Exception('Field {} has a non-constant size'.format(f.base_type)) if not f.is_array: length += type_size elif not f.array_len: raise Exception('Array field {} has a variable length'.format(f.base_type)) else: length += (f.array_len * type_size) else: field_msg_context = MsgContext.create_default() field_spec = genmsg.msg_loader.load_msg_by_type(field_msg_context, f.base_type, search_path) field_size = get_message_fixed_size(field_spec, search_path) if field_size is None: raise Exception('Field {} has a non-constant size'.format(f.base_type)) length += field_size return length
def generate_messages(self, package, package_files, outdir, search_path): """ :returns: return code, ``int`` """ if not genmsg.is_legal_resource_base_name(package): raise MsgGenerationException( "\nERROR: package name '%s' is illegal and cannot be used in message generation.\nPlease see http://ros.org/wiki/Names" % (package)) # package/src/package/msg for messages, packages/src/package/srv for services msg_context = MsgContext.create_default() retcode = 0 for f in package_files: try: f = os.path.abspath(f) infile_name = os.path.basename(f) full_type = genmsg.gentools.compute_full_type_name( package, infile_name) outfile = self.generate(msg_context, full_type, f, outdir, search_path) #actual generation except Exception as e: if not isinstance(e, MsgGenerationException) and not isinstance( e, genmsg.msgs.InvalidMsgSpec): traceback.print_exc() print( "\nERROR: Unable to generate %s for package '%s': while processing '%s': %s\n" % (self.what, package, f, e), file=sys.stderr) retcode = 1 #flag error return retcode
def generate_firos_messages(self, package, data, outdir, OUTPUT, search_path): """ :returns: return code, ``int`` """ if not genmsg.is_legal_resource_base_name(package): raise MsgGenerationException("\nERROR: package name '%s' is illegal and cannot be used in message generation.\nPlease see http://ros.org/wiki/Names"%(package)) # package/src/package/msg for messages, packages/src/package/srv for services msg_context = MsgContext.create_default() retcode = 0 try: # you can't just check first... race condition outdir = outdir + OUTPUT os.makedirs(outdir) f = open(os.path.join(outdir, "__init__.py"), 'w') f.close() except OSError as e: if e.errno != 17: # file exists raise for robotName in data: try: robot = data[robotName] for topic_name in robot['topics']: topic = robot['topics'][topic_name] full_type = str(robotName) + '/' + str(topic_name) if type(topic['msg']) is dict: self.firos_generate(msg_context, full_type, topic, outdir, search_path) #actual generation except Exception as e: if not isinstance(e, MsgGenerationException) and not isinstance(e, genmsg.msgs.InvalidMsgSpec): traceback.print_exc() print("\nERROR: Unable to generate %s for package '%s': %s\n"%(self.what, package, e), file=sys.stderr) retcode = 1 #flag error return retcode
def generate(pkg, _file, out_dir, search_path): msg_context = MsgContext.create_default() name = os.path.splitext(os.path.basename(_file))[0] if _file.endswith('.msg'): spec = genmsg.msg_loader.load_msg_from_file(msg_context, _file, name) with open(out_dir + os.path.sep + 'Types' + os.path.sep + _msg_serializable_xml_name(name), 'w') as f: #f.write("<!--\n%s\n%s\n%s\n-->\n"%(pkg,_file,search_path)) tree = gen_serializable_xml(spec, pkg) tree.write(f, pretty_print=True) elif _file.endswith('.srv'): spec = genmsg.msg_loader.load_srv_from_file(msg_context, _file, name) with open(out_dir + os.path.sep + 'Types' + os.path.sep + _srv_serializable_xml_name(name, is_input=True), 'w') as f: #f.write("<!--\n%s\n%s\n%s\n-->\n"%(pkg,_file,search_path)) tree = gen_serializable_xml(spec.request, pkg) tree.write(f, pretty_print=True) with open(out_dir + os.path.sep + 'Types' + os.path.sep + _srv_serializable_xml_name(name, is_input=False), 'w') as f: #f.write("<!--\n%s\n%s\n%s\n-->\n"%(pkg,_file,search_path)) tree = gen_serializable_xml(spec.response, pkg) tree.write(f, pretty_print=True) with open(out_dir + os.path.sep + 'Ports' + os.path.sep + _port_xml_name(name), 'w') as f: #f.write("<!--\n%s\n%s\n%s\n-->\n"%(pkg,_file,search_path)) tree = gen_port_xml(spec, pkg, is_srv=(True if _file.endswith('.srv') else False)) tree.write(f, pretty_print=True) return 0
def test_compute_md5_text(): from genmsg import MsgContext msg_context = MsgContext.create_default() # this test is just verifying that the md5sum is what it was for cturtle->electric Header_md5 = "2176decaecbce78abc3b96ef049fabed" rg_msg_dir = os.path.join(get_test_msg_dir(), TEST_CTX, 'msg') clock_msg = os.path.join(rg_msg_dir, 'Clock.msg') # a bit gory, but go ahead and regression test these important messages assert "time clock" == _compute_md5_text(msg_context, clock_msg) log_msg = os.path.join(rg_msg_dir, 'Log.msg') assert "byte DEBUG=1\nbyte INFO=2\nbyte WARN=4\nbyte ERROR=8\nbyte FATAL=16\n%s header\nbyte level\nstring name\nstring msg\nstring file\nstring function\nuint32 line\nstring[] topics"%Header_md5 == _compute_md5_text(msg_context, log_msg) tests = _load_md5_tests('md5text') # text file #1 is the reference for k, files in tests.iteritems(): print("running tests", k) ref_file = [f for f in files if f.endswith('%s1.txt'%k)] if not ref_file: assert False, "failed to load %s"%k ref_file = ref_file[0] ref_text = open(ref_file, 'r').read().strip() print("KEY", k) files = [f for f in files if not f.endswith('%s1.txt'%k)] for f in files[1:]: f_text = _compute_md5_text(msg_context, f) assert ref_text == f_text, "failed on %s\n%s\n%s: \n[%s]\nvs.\n[%s]\n"%(k, ref_file, f, ref_text, f_text)
def generate_msg(pkg, files, out_dir, search_path): """ Generate dart code for all messages in a package """ # print('Generated packages {}'.format(generated_packages)) msg_context = MsgContext.create_default() for f in files: f = os.path.abspath(f) infile = os.path.basename(f) full_type = genmsg.gentools.compute_full_type_name(pkg, infile) spec = genmsg.msg_loader.load_msg_from_file(msg_context, f, full_type) if spec.short_name == 'String': spec.short_name = 'StringMessage' generate_msg_from_spec(msg_context, spec, search_path, out_dir, pkg, f) indir = os.path.dirname(files[0]) ######################################## # 3. Write the package pubspec.yaml.dart file ######################################## io = StringIO() s = IndentedWriter(io) write_pubspec(s, pkg, search_path, msg_context, indir) package_update = True pubspec = '{}/pubspec.yaml'.format(out_dir) mode = 'w+' if os.path.isfile(pubspec): mode = 'r+' with open(pubspec, mode) as f: if f.read() == io.getvalue( ) and time.time() - os.path.getmtime(pubspec) < 5: # print('Pubspec identical') package_update = False if package_update: with open(pubspec, 'w+') as f: f.write(io.getvalue()) import subprocess try: # print('running pub upgrade in {}'.format(out_dir)) subprocess.check_output('which pub', shell=True) p = subprocess.Popen(['pub', 'upgrade'], cwd=out_dir, stdout=subprocess.PIPE) p.wait() except subprocess.CalledProcessError as e: pass io.close() (directory, pack) = psplit(out_dir) if len(search_path.keys()) == 0: return for package in search_path.keys(): if package != pkg and package is not None: # new_search = deepcopy(search_path) # new_search.pop(package) generate_all_msgs_for_package(package, directory, search_path)
def generate_messages(self, package, package_files, outdir, search_path): """ :returns: return code, ``int`` """ if not genmsg.is_legal_resource_base_name(package): raise MsgGenerationException( "\nERROR: package name '%s' is illegal and cannot be used in message generation.\nPlease see http://ros.org/wiki/Names" % (package) ) # package/src/package/msg for messages, packages/src/package/srv for services msg_context = MsgContext.create_default() retcode = 0 for f in package_files: try: f = os.path.abspath(f) infile_name = os.path.basename(f) full_type = genmsg.gentools.compute_full_type_name(package, infile_name) outfile = self.generate(msg_context, full_type, f, outdir, search_path) # actual generation except Exception as e: if not isinstance(e, MsgGenerationException) and not isinstance(e, genmsg.msgs.InvalidMsgSpec): traceback.print_exc() print( "\nERROR: Unable to generate %s for package '%s': while processing '%s': %s\n" % (self.what, package, f, e), file=sys.stderr, ) retcode = 1 # flag error return retcode
def test_compute_md5_text(): from genmsg import MsgContext msg_context = MsgContext.create_default() # this test is just verifying that the md5sum is what it was for cturtle->electric Header_md5 = "2176decaecbce78abc3b96ef049fabed" rg_msg_dir = os.path.join(get_test_msg_dir(), TEST_CTX, 'msg') clock_msg = os.path.join(rg_msg_dir, 'Clock.msg') # a bit gory, but go ahead and regression test these important messages assert "time clock" == _compute_md5_text(msg_context, clock_msg) log_msg = os.path.join(rg_msg_dir, 'Log.msg') assert "byte DEBUG=1\nbyte INFO=2\nbyte WARN=4\nbyte ERROR=8\nbyte FATAL=16\n%s header\nbyte level\nstring name\nstring msg\nstring file\nstring function\nuint32 line\nstring[] topics" % Header_md5 == _compute_md5_text( msg_context, log_msg) tests = _load_md5_tests('md5text') # text file #1 is the reference for k, files in tests.items(): print("running tests", k) ref_file = [f for f in files if f.endswith('%s1.txt' % k)] if not ref_file: assert False, "failed to load %s" % k ref_file = ref_file[0] ref_text = open(ref_file, 'r').read().strip() print("KEY", k) files = [f for f in files if not f.endswith('%s1.txt' % k)] for f in files[1:]: f_text = _compute_md5_text(msg_context, f) assert ref_text == f_text, "failed on %s\n%s\n%s: \n[%s]\nvs.\n[%s]\n" % ( k, ref_file, f, ref_text, f_text)
def test_md5_equals(): from genmsg import MsgContext msg_context = MsgContext.create_default() search_path = get_search_path() tests = _load_md5_tests('same') for k, files in tests.items(): print("running tests", k) md5sum = _compute_md5(msg_context, files[0]) for f in files[1:]: assert md5sum == _compute_md5(msg_context, f), "failed on %s: \n[%s]\nvs.\n[%s]\n"%(k, _compute_md5_text(msg_context, files[0]), _compute_md5_text(msg_context, f))
def generate_srv(pkg, files, out_dir, search_path): """ Generate lisp code for all services in a package """ msg_context = MsgContext.create_default() for f in files: f = os.path.abspath(f) infile = os.path.basename(f) full_type = genmsg.gentools.compute_full_type_name(pkg, infile) spec = genmsg.msg_loader.load_srv_from_file(msg_context, f, full_type) generate_srv_from_spec(msg_context, spec, search_path, out_dir, pkg, f)
def generate_srv(pkg, files, out_dir, search_path): """ Generate euslisp code for all services in a package """ msg_context = MsgContext.create_default() for f in files: f = os.path.abspath(f) infile = os.path.basename(f) full_type = genmsg.gentools.compute_full_type_name(pkg, infile) spec = genmsg.msg_loader.load_srv_from_file(msg_context, f, full_type) generate_srv_from_spec(msg_context, spec, search_path, out_dir, pkg, f)
def test_md5_equals(): from genmsg import MsgContext msg_context = MsgContext.create_default() search_path = get_search_path() tests = _load_md5_tests('same') for k, files in tests.iteritems(): print("running tests", k) md5sum = _compute_md5(msg_context, files[0]) for f in files[1:]: assert md5sum == _compute_md5(msg_context, f), "failed on %s: \n[%s]\nvs.\n[%s]\n"%(k, _compute_md5_text(msg_context, files[0]), _compute_md5_text(msg_context, f))
def test_md5_not_equals(): from genmsg import MsgContext msg_context = MsgContext.create_default() tests = _load_md5_tests('different') for k, files in tests.iteritems(): print("running tests", k) md5s = set() md6md5sum = _compute_md5(msg_context, files[0]) for f in files: md5s.add(_compute_md5(msg_context, f)) # each md5 should be unique assert len(md5s) == len(files)
def test_md5_not_equals(): from genmsg import MsgContext msg_context = MsgContext.create_default() tests = _load_md5_tests('different') for k, files in tests.items(): print("running tests", k) md5s = set() md6md5sum = _compute_md5(msg_context, files[0]) for f in files: md5s.add(_compute_md5(msg_context, f)) # each md5 should be unique assert len(md5s) == len(files)
def generate_srv(pkg, files, out_dir, search_path): """ Generate dart code for all services in a package """ msg_context = MsgContext.create_default() for f in files: f = os.path.abspath(f) infile = os.path.basename(f) full_type = genmsg.gentools.compute_full_type_name(pkg, infile) spec = genmsg.msg_loader.load_srv_from_file(msg_context, f, full_type) if '.action' in f: print('Action class') return generate_srv_from_spec(msg_context, spec, search_path, out_dir, pkg, f) indir = os.path.dirname(files[0]) ######################################## # 3. Write the package pubspec.yaml file ######################################## io = StringIO() s = IndentedWriter(io) write_pubspec(s, pkg, search_path, msg_context, indir) package_update = True pubspec = '{}/pubspec.yaml'.format(out_dir) mode = 'w+' if os.path.isfile(pubspec): mode = 'r+' with open(pubspec, mode) as f: if f.read() == io.getvalue( ) and time.time() - os.path.getmtime(pubspec) < 5: # print('Pubspec identical') package_update = False if package_update: with open(pubspec, 'w+') as f: f.write(io.getvalue()) import subprocess try: # print('running pub upgrade in {}'.format(out_dir)) subprocess.check_output('which pub', shell=True) p = subprocess.Popen(['pub', 'upgrade'], cwd=out_dir, stdout=subprocess.PIPE) p.wait() except subprocess.CalledProcessError as e: pass io.close()
def test_compute_full_text(): from genmsg import MsgContext, compute_full_text, load_msg_by_type, load_depends msg_context = MsgContext.create_default() search_path = get_search_path() # regression test against values used for cturtle-electric spec = load_msg_by_type(msg_context, 'rosgraph_msgs/Log', search_path) load_depends(msg_context, spec, search_path) val = compute_full_text(msg_context, spec) assert val == log_full_text, "[%s][%s]"%(val, log_full_text) spec = load_msg_by_type(msg_context, 'geometry_msgs/TwistWithCovarianceStamped', search_path) load_depends(msg_context, spec, search_path) val = compute_full_text(msg_context, spec) assert val == twist_with_covariance_stamped_full_text, "[%s][%s]"%(val, twist_with_covariance_stamped_full_text)
def is_message_fixed_size(spec, search_path): """Check if a particular message specification has a constant size in bytes""" parsed_fields = spec.parsed_fields() types = [f.base_type for f in parsed_fields] variableLengthArrays = [f.is_array and not f.array_len for f in parsed_fields] isBuiltin = [f.is_builtin for f in parsed_fields] if 'string' in types: return False elif True in variableLengthArrays: return False elif False not in isBuiltin: return True else: nonBuiltins = [f for f in parsed_fields if not f.is_builtin] # print(nonBuiltins) for idx, f in enumerate(nonBuiltins): field_msg_context = MsgContext.create_default() field_spec = genmsg.msg_loader.load_msg_by_type(field_msg_context, f.base_type, search_path) if not is_message_fixed_size(field_spec, search_path): return False return True
def generate_dynamic(core_type, msg_cat): """ Dymamically generate message classes from msg_cat .msg text gendeps dump. This method modifies sys.path to include a temp file directory. :param core_type str: top-level ROS message type of concatenated .msg text :param msg_cat str: concatenation of full message text (output of gendeps --cat) :raises: MsgGenerationException If dep_msg is improperly formatted """ msg_context = MsgContext.create_default() core_pkg, core_base_type = genmsg.package_resource_name(core_type) # REP 100: pretty gross hack to deal with the fact that we moved # Header. Header is 'special' because it can be used w/o a package # name, so the lookup rules end up failing. We are committed to # never changing std_msgs/Header, so this is generally fine. msg_cat = msg_cat.replace('roslib/Header', 'std_msgs/Header') # separate msg_cat into the core message and dependencies splits = msg_cat.split('\n' + '=' * 80 + '\n') core_msg = splits[0] deps_msgs = splits[1:] # create MsgSpec representations of .msg text specs = { core_type: msg_loader.load_msg_from_string(msg_context, core_msg, core_type) } # - dependencies for dep_msg in deps_msgs: # dependencies require more handling to determine type name dep_type, dep_spec = _generate_dynamic_specs(msg_context, specs, dep_msg) specs[dep_type] = dep_spec # clear the message registration table and register loaded # types. The types have to be registered globally in order for # message generation of dependents to work correctly. msg_context = msg_loader.MsgContext.create_default() search_path = {} # no ability to dynamically load for t, spec in specs.items(): msg_context.register(t, spec) # process actual MsgSpecs: we accumulate them into a single file, # rewriting the generated text as needed buff = StringIO() for t, spec in specs.items(): pkg, s_type = genmsg.package_resource_name(t) # dynamically generate python message code for line in msg_generator(msg_context, spec, search_path): line = _gen_dyn_modify_references(line, t, list(specs.keys())) buff.write(line + '\n') full_text = buff.getvalue() # Create a temporary directory tmp_dir = tempfile.mkdtemp(prefix='genpy_') # Afterwards, we are going to remove the directory so that the .pyc file gets cleaned up if it's still around atexit.register(shutil.rmtree, tmp_dir) # write the entire text to a file and import it (it will get deleted when tmp_dir goes - above) tmp_file = tempfile.NamedTemporaryFile(mode='w', suffix='.py', dir=tmp_dir, delete=False) tmp_file.file.write(full_text) tmp_file.file.close() # import our temporary file as a python module, which requires modifying sys.path sys.path.append(os.path.dirname(tmp_file.name)) # - strip the prefix to turn it into the python module name try: mod = __import__(os.path.basename(tmp_file.name)[:-3]) except Exception: # TODOXXX:REMOVE with open(tmp_file.name) as f: text = f.read() with open('/tmp/foo', 'w') as f2: f2.write(text) raise # finally, retrieve the message classes from the dynamic module messages = {} for t in specs.keys(): pkg, s_type = genmsg.package_resource_name(t) try: messages[t] = getattr(mod, _gen_dyn_name(pkg, s_type)) except AttributeError: raise MsgGenerationException( 'cannot retrieve message class for %s/%s: %s' % (pkg, s_type, _gen_dyn_name(pkg, s_type))) messages[t]._spec = specs[t] return messages
def write_get_message_size(s, spec, search_path): """ Write a static method to determine the buffer size of a complete message """ with Indent(s): s.write('static getMessageSize(object) {') msg_size = get_message_fixed_size(spec, search_path) if msg_size is not None: with Indent(s): s.write('return {};'.format(msg_size)) else: def get_dynamic_field_length_line(field, query): if field.is_builtin: if not is_string(field.base_type): raise Exception('Unexpected field {} with type {} has unknown length'.format(field.name, field.base_type)) # it's a string array! return 'length += 4 + {}.length;'.format(query) # else (package, msg_type) = field.base_type.split('/') samePackage = spec.package == package if samePackage: return 'length += {}.getMessageSize({});'.format(msg_type, query) else: return 'length += {}.msg.{}.getMessageSize({});'.format(package, msg_type, query) with Indent(s): s.write('let length = 0;') # certain fields will always have the same size # calculate that here instead of dynamically every time len_constant_length_fields = 0; for f in spec.parsed_fields(): field_size = None if f.is_builtin: field_size = get_type_size(f.base_type) else: field_msg_context = MsgContext.create_default() field_spec = genmsg.msg_loader.load_msg_by_type(field_msg_context, f.base_type, search_path) field_size = get_message_fixed_size(field_spec, search_path) if f.is_array: if f.array_len and field_size is not None: len_constant_length_fields += (field_size * f.array_len) continue elif not f.array_len: len_constant_length_fields += 4 if field_size == 1: s.write('length += object.{}.length;'.format(f.name)) elif field_size is not None: s.write('length += {} * object.{}.length;'.format(field_size, f.name)) else: if f.is_builtin: if not is_string(f.base_type): raise Exception('Unexpected field {} with type {} has unknown length'.format(f.name, f.base_type)) # it's a string array! line_to_write = 'length += 4 + val.length;' else: (package, msg_type) = f.base_type.split('/') samePackage = spec.package == package if samePackage: line_to_write = 'length += {}.getMessageSize(val);'.format(msg_type,) else: line_to_write = 'length += {}.msg.{}.getMessageSize(val);'.format(package, msg_type) s.write('object.{}.forEach((val) => {{'.format(f.name)) with Indent(s): s.write(line_to_write) s.write('});') elif field_size is not None: len_constant_length_fields += field_size else: # field size is dynamic! if f.is_builtin: if not is_string(f.base_type): raise Exception('Unexpected field {} with type {} has unknown length'.format(f.name, f.base_type)) # it's a string array! len_constant_length_fields += 4 line_to_write = 'length += object.{}.length;'.format(f.name) else: (package, msg_type) = f.base_type.split('/') samePackage = spec.package == package if samePackage: line_to_write = 'length += {}.getMessageSize(object.{});'.format(msg_type, f.name) else: line_to_write = 'length += {}.msg.{}.getMessageSize(object.{});'.format(package, msg_type, f.name) s.write(line_to_write) if len_constant_length_fields > 0: s.write('return length + {};'.format(len_constant_length_fields)) else: s.write('return length;') s.write('}') s.newline()
def write_get_message_size(s, spec, search_path): """ Write a static method to determine the buffer size of a complete message """ with Indent(s): s.write('static getMessageSize(object) {') msg_size = get_message_fixed_size(spec, search_path) if msg_size is not None: with Indent(s): s.write('return {};'.format(msg_size)) else: def get_dynamic_field_length_line(field, query): if field.is_builtin: if not is_string(field.base_type): raise Exception('Unexpected field {} with type {} has unknown length'.format(field.name, field.base_type)) # it's a string array! return 'length += 4 + {}.length;'.format(query) # else (package, msg_type) = field.base_type.split('/') samePackage = spec.package == package if samePackage: return 'length += {}.getMessageSize({});'.format(msg_type, query) else: return 'length += {}.msg.{}.getMessageSize({});'.format(package, msg_type, query) with Indent(s): s.write('let length = 0;') # certain fields will always have the same size # calculate that here instead of dynamically every time len_constant_length_fields = 0; for f in spec.parsed_fields(): field_size = None if f.is_builtin: field_size = get_type_size(f.base_type) else: field_msg_context = MsgContext.create_default() field_spec = genmsg.msg_loader.load_msg_by_type(field_msg_context, f.base_type, search_path) field_size = get_message_fixed_size(field_spec, search_path) if f.is_array: if f.array_len and field_size is not None: len_constant_length_fields += (field_size * f.array_len) continue elif not f.array_len: len_constant_length_fields += 4 if field_size == 1: s.write('length += object.{}.length;'.format(f.name)) elif field_size is not None: s.write('length += {} * object.{}.length;'.format(field_size, f.name)) else: if f.is_builtin: if not is_string(f.base_type): raise Exception('Unexpected field {} with type {} has unknown length'.format(f.name, f.base_type)) # it's a string array! line_to_write = 'length += 4 + val.length;' else: (package, msg_type) = f.base_type.split('/') samePackage = spec.package == package if samePackage: line_to_write = 'length += {}.getMessageSize(val);'.format(msg_type,) else: line_to_write = 'length += {}.msg.{}.getMessageSize(val);'.format(package, msg_type) s.write('object.{}.forEach((val) => {{'.format(f.name)) with Indent(s): s.write(line_to_write) s.write('});') elif field_size is not None: len_constant_length_fields += field_size else: # field size is dynamic! if f.is_builtin: if not is_string(f.base_type): raise Exception('Unexpected field {} with type {} has unknown length'.format(f.name, f.base_type)) # it's a string array! len_constant_length_fields += 4 line_to_write = 'length += Buffer.byteLength(object.{}, \'utf8\');'.format(f.name) else: (package, msg_type) = f.base_type.split('/') samePackage = spec.package == package if samePackage: line_to_write = 'length += {}.getMessageSize(object.{});'.format(msg_type, f.name) else: line_to_write = 'length += {}.msg.{}.getMessageSize(object.{});'.format(package, msg_type, f.name) s.write(line_to_write) if len_constant_length_fields > 0: s.write('return length + {};'.format(len_constant_length_fields)) else: s.write('return length;') s.write('}') s.newline()
from __future__ import print_function import os import sys from genmsg import EXT_MSG, EXT_SRV, MsgContext from genmsg.gentools import compute_full_type_name from genmsg.msg_loader import load_msg_from_file, load_srv_from_file from genmsg.msgs import bare_msg_type, is_builtin, resolve_type pkg_name = sys.argv[1] msg_file = sys.argv[2] deps = sys.argv[3].split(':') if len(sys.argv) > 3 else [] msg_context = MsgContext.create_default() full_type_name = compute_full_type_name(pkg_name, os.path.basename(msg_file)) if msg_file.endswith(EXT_MSG): spec = load_msg_from_file(msg_context, msg_file, full_type_name) unresolved_types = spec.types elif msg_file.endswith(EXT_SRV): spec = load_srv_from_file(msg_context, msg_file, full_type_name) unresolved_types = spec.request.types + spec.response.types else: print("Processing file: '%s' - unknown file extension" % msg_file, file=sys.stderr) sys.exit(1) package_context = spec.package for unresolved_type in unresolved_types:
def generate_dynamic(core_type, msg_cat): """ Dymamically generate message classes from msg_cat .msg text gendeps dump. This method modifies sys.path to include a temp file directory. :param core_type str: top-level ROS message type of concatenanted .msg text :param msg_cat str: concatenation of full message text (output of gendeps --cat) :raises: MsgGenerationException If dep_msg is improperly formatted """ msg_context = MsgContext.create_default() core_pkg, core_base_type = genmsg.package_resource_name(core_type) # REP 100: pretty gross hack to deal with the fact that we moved # Header. Header is 'special' because it can be used w/o a package # name, so the lookup rules end up failing. We are committed to # never changing std_msgs/Header, so this is generally fine. msg_cat = msg_cat.replace('roslib/Header', 'std_msgs/Header') # separate msg_cat into the core message and dependencies splits = msg_cat.split('\n'+'='*80+'\n') core_msg = splits[0] deps_msgs = splits[1:] # create MsgSpec representations of .msg text specs = { core_type: genmsg.msg_loader.load_msg_from_string(msg_context, core_msg, core_type) } # - dependencies for dep_msg in deps_msgs: # dependencies require more handling to determine type name dep_type, dep_spec = _generate_dynamic_specs(msg_context, specs, dep_msg) specs[dep_type] = dep_spec # clear the message registration table and register loaded # types. The types have to be registered globally in order for # message generation of dependents to work correctly. msg_context = genmsg.msg_loader.MsgContext.create_default() search_path = {} # no ability to dynamically load for t, spec in specs.items(): msg_context.register(t, spec) # process actual MsgSpecs: we accumulate them into a single file, # rewriting the generated text as needed buff = StringIO() for t, spec in specs.items(): pkg, s_type = genmsg.package_resource_name(t) # dynamically generate python message code for l in msg_generator(msg_context, spec, search_path): l = _gen_dyn_modify_references(l, list(specs.keys())) buff.write(l + '\n') full_text = buff.getvalue() # Create a temporary directory tmp_dir = tempfile.mkdtemp(prefix='genpy_') # Afterwards, we are going to remove the directory so that the .pyc file gets cleaned up if it's still around atexit.register(shutil.rmtree, tmp_dir) # write the entire text to a file and import it (it will get deleted when tmp_dir goes - above) tmp_file = tempfile.NamedTemporaryFile(suffix=".py",dir=tmp_dir,delete=False) tmp_file.file.write(full_text) tmp_file.file.close() # import our temporary file as a python module, which requires modifying sys.path sys.path.append(os.path.dirname(tmp_file.name)) # - strip the prefix to turn it into the python module name try: mod = __import__(os.path.basename(tmp_file.name)[:-3]) except: #TODOXXX:REMOVE with open(tmp_file.name) as f: text = f.read() with open('/tmp/foo', 'w') as f2: f2.write(text) raise # finally, retrieve the message classes from the dynamic module messages = {} for t in specs.keys(): pkg, s_type = genmsg.package_resource_name(t) try: messages[t] = getattr(mod, _gen_dyn_name(pkg, s_type)) except AttributeError: raise MsgGenerationException("cannot retrieve message class for %s/%s: %s"%(pkg, s_type, _gen_dyn_name(pkg, s_type))) return messages
from __future__ import print_function import os import sys from genmsg import EXT_MSG, EXT_SRV, MsgContext from genmsg.gentools import compute_full_type_name from genmsg.msg_loader import load_msg_from_file, load_srv_from_file from genmsg.msgs import bare_msg_type, is_builtin, resolve_type pkg_name = sys.argv[1] msg_file = sys.argv[2] deps = sys.argv[3].split(':') if len(sys.argv) > 3 else [] msg_context = MsgContext.create_default() full_type_name = compute_full_type_name(pkg_name, os.path.basename(msg_file)) if msg_file.endswith(EXT_MSG): spec = load_msg_from_file(msg_context, msg_file, full_type_name) unresolved_types = spec.types elif msg_file.endswith(EXT_SRV): spec = load_srv_from_file(msg_context, msg_file, full_type_name) unresolved_types = spec.request.types + spec.response.types else: print("Processing file: '%s' - unknown file extension" % msg_file, file=sys.stderr) sys.exit(1) package_context = spec.package for unresolved_type in unresolved_types: bare_type = bare_msg_type(unresolved_type)