def Compute_Derivative(current, past, time): if time: result = (current - past) / time return (result) else: utils.logwarn("Infinite derivative.") return 0
def generate_accessor(accessor, instr, reg): accfmt = instr[accessor['access_type']] if accfmt['type'] == 'read': templ = RS_REG_READ elif accfmt['type'] == 'write': templ = RS_REG_WRITE elif accfmt['type'] == 'modify': logwarn("modify is not handled yet!") return "" else: logerr("unknown access format type '{}'".format(accfmt['type'])) if reg['use_encoding']: encstr = "S{}_{}_C{}_C{}_{}".format( int(accessor['encoding']['op0'], base=2), int(accessor['encoding']['op1'], base=2), int(accessor['encoding']['crn'], base=2), int(accessor['encoding']['crm'], base=2), int(accessor['encoding']['op2'], base=2), ) else: encstr = reg['id'] encoding = accfmt['fmt'][0] + " " + ", ".join( accfmt['fmt'][1:]).format(encstr) return templ.format(name=reg['name'], id=reg['id'], length=reg['length'], access=accessor['access_instruction'], enc=encoding)
def Compute_Derivative(current,past,time): if time: result=(current-past)/time return(result) else: utils.logwarn("Infinite derivative.") return 0
def Get_Parameter(NODE_NAME,PARAMETER_NAME,DEFAULT_VALUE): param=rospy.get_param(PARAMETER_NAME,DEFAULT_VALUE) if rospy.has_param(PARAMETER_NAME): utils.loginfo(''+PARAMETER_NAME+' found: '+str(param)) else: utils.logwarn(''+PARAMETER_NAME+' not found. Default: '+str(DEFAULT_VALUE)) return param
def Goto(self, dest): utils.logwarn(str(dest)) outpos = [sum(x) for x in zip(self.point, dest)] utils.logwarn(str(outpos)) outpos.append(0.0) outvelo = [0.0]*3 outvelo.append(0.0) outacc = [0.0]*3 outacc.append(0.0) outmsg = self.tg.get_message(outpos, outvelo, outacc) self.pub.publish(outmsg) self.security_pub.publish(False)
def Goto(self, dest): utils.logwarn(str(dest)) outpos = [sum(x) for x in zip(self.point, dest)] utils.logwarn(str(outpos)) outpos.append(0.0) outvelo = [0.0] * 3 outvelo.append(0.0) outacc = [0.0] * 3 outacc.append(0.0) outmsg = self.tg.get_message(outpos, outvelo, outacc) self.pub.publish(outmsg) self.security_pub.publish(False)
def start_publishing(self): utils.loginfo('start publishing') rate = rospy.Rate(30) timer = Time() #Get parameters (all the body ID's that are requested) self.body_names = sml_setup.Get_Parameter(NODE_NAME, 'body_names', ['iris1', 'iris2']) self.body_array = sml_setup.Get_Parameter(NODE_NAME, 'body_array', [1, 2]) if type(self.body_array) is str: self.body_array = ast.literal_eval(self.body_array) #Get topic names for each requested body body_topic_array = self.Get_Topic_Names(self.body_array) #Establish one publisher topic for each requested body topics_publisher = self.Get_Publishers(body_topic_array) #Initialize empty past data list mocap_past_data = [] empty_data = QuadPositionDerived() for i in range(0, len(self.body_array)): mocap_past_data.append(empty_data) while not rospy.is_shutdown(): delta_time = timer.get_time_diff() if self.bodies: for i in range(0, len(self.body_array)): utils.loginfo('body ' + str(i)) if self.body_names[i] in self.bodies.name: indice = self.bodies.name.index(self.body_names[i]) mocap_data = self.get_data(indice) mocap_data_derived = self.Get_Derived_Data( mocap_data, mocap_past_data[i], delta_time) #update past mocap data mocap_past_data[i] = mocap_data_derived #Publish data on topic topics_publisher[i].publish(mocap_data_derived) rate.sleep() utils.logwarn('Mocap stop publishing')
def __init__(self): utils.loginfo('Mocap starting') utils.loginfo('start publishing') rate = rospy.Rate(30) self.all_bodies = dict() #Get parameters (all the body ID's that are requested) self.body_names = utils.Get_Parameter('body_names', ['iris1', 'iris2']) self.body_array = utils.Get_Parameter('body_array', [1, 2]) if type(self.body_array) is str: self.body_array = ast.literal_eval(self.body_array) #Get topic names for each requested body body_topic_array = self.Get_Topic_Names(self.body_array) #Establish one publisher topic for each requested body topics_publisher = self.Get_Publishers(body_topic_array) # Create the different body objects self.body_state = [] for i in range(0, len(self.body_array)): self.body_state.append(State()) # Suscrib to Gazebo self.start_subscribes() while not rospy.is_shutdown(): for i in range(0, len(self.body_array)): if self.body_state[i].data: data = self.all_bodies[self.body_names[i]] self.body_state[i].update(data, self.body_state[i].time) topics_publisher[i].publish( self.body_state[i].get_message()) self.body_state[i].data = False # for i in range(0,len(self.body_array)): # if self.body_state[i].data: # topics_publisher[i].publish(self.body_state[i].get_message()) # self.body_state[i].data = False rate.sleep() utils.logwarn('Mocap stop publishing')
def start_publishing(self): utils.loginfo('start publishing') rate=rospy.Rate(30) timer=Time() #Get parameters (all the body ID's that are requested) self.body_names=sml_setup.Get_Parameter(NODE_NAME,'body_names',['iris1','iris2']) self.body_array=sml_setup.Get_Parameter(NODE_NAME,'body_array',[1,2]) if type(self.body_array) is str: self.body_array=ast.literal_eval(self.body_array) #Get topic names for each requested body body_topic_array=self.Get_Topic_Names(self.body_array) #Establish one publisher topic for each requested body topics_publisher=self.Get_Publishers(body_topic_array) #Initialize empty past data list mocap_past_data=[] empty_data=QuadPositionDerived() for i in range(0,len(self.body_array)): mocap_past_data.append(empty_data) while not rospy.is_shutdown(): delta_time=timer.get_time_diff() if self.bodies: for i in range(0,len(self.body_array)): utils.loginfo('body ' + str(i)) if self.body_names[i] in self.bodies.name: indice = self.bodies.name.index(self.body_names[i]) mocap_data=self.get_data(indice) mocap_data_derived=self.Get_Derived_Data(mocap_data,mocap_past_data[i],delta_time) #update past mocap data mocap_past_data[i]=mocap_data_derived #Publish data on topic topics_publisher[i].publish(mocap_data_derived) rate.sleep() utils.logwarn('Mocap stop publishing')
def __init__(self): utils.loginfo('Mocap starting') utils.loginfo('start publishing') rate=rospy.Rate(30) self.all_bodies = dict() #Get parameters (all the body ID's that are requested) self.body_names=utils.Get_Parameter('body_names',['iris1','iris2']) self.body_array=utils.Get_Parameter('body_array',[1,2]) if type(self.body_array) is str: self.body_array=ast.literal_eval(self.body_array) #Get topic names for each requested body body_topic_array=self.Get_Topic_Names(self.body_array) #Establish one publisher topic for each requested body topics_publisher=self.Get_Publishers(body_topic_array) # Create the different body objects self.body_state = [] for i in range(0,len(self.body_array)): self.body_state.append(State()) # Suscrib to Gazebo self.start_subscribes() while not rospy.is_shutdown(): for i in range(0,len(self.body_array)): if self.body_state[i].data: data = self.all_bodies[self.body_names[i]] self.body_state[i].update(data,self.body_state[i].time) topics_publisher[i].publish(self.body_state[i].get_message()) self.body_state[i].data = False # for i in range(0,len(self.body_array)): # if self.body_state[i].data: # topics_publisher[i].publish(self.body_state[i].get_message()) # self.body_state[i].data = False rate.sleep() utils.logwarn('Mocap stop publishing')
def generate(cfg, args): logtitle("Generating AArch64 register bindings for {}".format(args.target)) jsonfile = DIR_DATA / args.version / "sysreg.json" if not jsonfile.is_file(): logwarn("json file is not present, trying to generated") do_parse(cfg, args) # load the json file sysregdata = None try: with open(jsonfile) as sysregjson: sysregdata = json.load(sysregjson) except: logerr("Failed to load configuraton file.") if args.target == 'rust': rustcode.generate(sysregdata, DIR_OUTPUT) else: logerr("unknown target {}".format(args.target)) logok("Generated access code for {}".format(args.target))
def start_publishing(): rate = rospy.Rate(30) timer = Time() #Get parameters (all the body ID's that are requested) body_array = sml_setup.Get_Parameter(NODE_NAME, 'body_array', [8, 16, 17]) if type(body_array) is str: body_array = ast.literal_eval(body_array) #Get topic names for each requested body body_topic_array = Get_Topic_Names(body_array) #Establish one publisher topic for each requested body topics_publisher = Get_Publishers(body_topic_array) #Initialize empty past data list mocap_past_data = [] empty_data = QuadPositionDerived() for i in range(0, len(body_array)): mocap_past_data.append(empty_data) # Initialize error numbers error = [0] * len(body_array) while not rospy.is_shutdown(): delta_time = timer.get_time_diff() for i in range(0, len(body_array)): mocap_data = Get_Body_Data(body_array[i]) if mocap_data.found_body: mocap_data_derived = Get_Derived_Data(mocap_data, mocap_past_data[i], delta_time) #update past mocap data mocap_past_data[i] = mocap_data_derived #Publish data on topic topics_publisher[i].publish(mocap_data_derived) error[i] = 0 else: error[i] += 1 if error[i] < 30: if error[i] > 5: mocap_past_data[i].found_body = False utils.logwarn("Send body %i not found" % (body_array[i])) utils.logwarn("Body %i: %i errors" % (body_array[i], error[i])) elif error[i] == 30: utils.logwarn("Body %i: %i errors. Stop printing Errors!" % (body_array[i], error[i])) topics_publisher[i].publish(mocap_past_data[i]) rate.sleep()
def start_publishing(): rate=rospy.Rate(30) timer=Time() #Get parameters (all the body ID's that are requested) body_array=sml_setup.Get_Parameter(NODE_NAME,'body_array',[8,16,17]) if type(body_array) is str: body_array=ast.literal_eval(body_array) #Get topic names for each requested body body_topic_array=Get_Topic_Names(body_array) #Establish one publisher topic for each requested body topics_publisher=Get_Publishers(body_topic_array) #Initialize empty past data list mocap_past_data=[] empty_data=QuadPositionDerived() for i in range(0,len(body_array)): mocap_past_data.append(empty_data) # Initialize error numbers error = [0]*len(body_array) while not rospy.is_shutdown(): delta_time=timer.get_time_diff() for i in range(0,len(body_array)): mocap_data=Get_Body_Data(body_array[i]) if mocap_data.found_body: mocap_data_derived=Get_Derived_Data(mocap_data,mocap_past_data[i],delta_time) #update past mocap data mocap_past_data[i]=mocap_data_derived #Publish data on topic topics_publisher[i].publish(mocap_data_derived) error[i]=0 else: error[i]+=1 if error[i]<30: if error[i]>5: mocap_past_data[i].found_body = False utils.logwarn("Send body %i not found"%(body_array[i])) utils.logwarn("Body %i: %i errors"%(body_array[i],error[i])) elif error[i]==30: utils.logwarn("Body %i: %i errors. Stop printing Errors!"%(body_array[i],error[i])) topics_publisher[i].publish(mocap_past_data[i]) rate.sleep()
def do_parse(cfg, args): version = cfg["versions"][args.version] log("Version: {}".format(args.version)) # check if the data directory exists, if not download files xmldir = DIR_DATA / args.version if not xmldir.is_dir(): logwarn("xml files are not present, trying to download...") do_download(cfg, args) # get the instructions instrxmlfile = DIR_DATA / args.version / version["instructions"] instr = parse_xml_instructions(cfg, args, instrxmlfile) registers = [] sysinstr = [] xmldir = xmlfilepath = DIR_DATA / args.version for f in os.listdir(xmldir): # skip non xml files if not f.endswith(".xml"): continue # skip the instructions file if f == version["instructions"]: continue # dskip ignored files if f in version['filemap']['ignored']: continue # skip use_encoding = False if f in version["filemap"]['useencoding']: use_encoding = True xmlfilepath = xmldir / f (parsed_registers, parsed_others) = parse_xml_file(cfg, args, instr, xmlfilepath) for pr in parsed_registers: pr['use_encoding'] = use_encoding registers.append(pr) for po in parsed_others: po['use_encoding'] = use_encoding sysinstr.append(po) registers.sort(key=lambda x: x['id']) sysinstr.sort(key=lambda x: x['id']) # the json file for the parsed output jsonfile = DIR_DATA / args.version / "sysreg.json" with open(jsonfile, "w") as write_file: jsondata = { 'instrfmt': instr, 'version': args.version, 'url': version['url'], 'registers': registers, 'instructions': sysinstr } try: json.dump(jsondata, write_file, indent=2) except: logerr("could not store the sysregs json file")
def get_position(self,data): if self.point == []: self.point = [data.x,data.y,data.z] utils.logwarn("Initial position: %s"%(str(self.point)))
def get_position(self, data): if self.point == []: self.point = [data.x, data.y, data.z] utils.logwarn("Initial position: %s" % (str(self.point)))
def parse_reg_fieldsets_node(reg_fieldsets_node, is_register): # <reg_fieldsets> # ... # <fields length="64"> # <field ..> # <field_msb>63</field_msb> # <field_lsb>32</field_lsb> # <field_description order="before"> # ... # </field_description> # </field> # </fields> # ... # <reg_fieldset length="64"> # <fieldat id="0_63_32" lsb="32" msb="63"/> # ... # </reg_fieldset> # ... # </reg_fieldsets> if reg_fieldsets_node == None: logerr(" - expected a reg_fieldsets node, but was None!") return None # here there can be multiple fields/fieldset definitions. we take the one for aarc64 fields_nodes = [] for fields_node in reg_fieldsets_node.findall("fields"): fields_condition = fields_node.find("fields_condition") if fields_condition is None: fields_nodes.append(fields_node) continue if fields_condition.text is None: fields_nodes.append(fields_node) continue # do not take anything from AAarch32 if "from AArch32" in fields_condition.text or \ "to AArch32" in fields_condition.text or \ "using AArch32" in fields_condition.text: continue if fields_condition.text.endswith("== 0"): continue if "AArch32" in fields_condition.text: logwarn(' - had aarch32') fields_nodes.append(fields_node) # we have no fields, probably this is a cache if len(fields_nodes) == 0: if not is_register: return (0, None) else: logerr( " - expected fields_nodes to have exactly one element, had 0" ) if len(fields_nodes) != 1: logwarn( " - expected fields_nodes to have exactly one element, had {}". format(len(fields_nodes))) fields_node = fields_nodes[0] length = int(fields_node.attrib["length"]) fields = [] for field_node in fields_node.findall("field"): field = parse_field_node(field_node, length) fields.append(field) return (length, fields)
def parse_xml_instructions(cfg, args, xmlfile): # start off with the default access instructins from the config accessinstr = cfg["accessinstructions"] # if there is no instructions file, take the ones from the configurations if not xmlfile.is_file(): log("Using default access instructions from config file") return accessinstr log("Parsing instructions file: {}".format(xmlfile)) # the structure of the file is as follows # # <access_instruction_defs> # <access_instruction_def execution_state="AArch64" id="MRS"> # <name>MRS</name> # <access_type type="read"/> # <access_syntax name="MRS"> # <var n="Xt"/> # <var n="systemreg"/> # </access_syntax> # </access_instruction_def> # </access_instruction_defs> # get the root of the XML tree root = ET.parse(xmlfile).getroot() for ai_node in root.findall(".//access_instruction_def"): # we only care about aarch64 if ai_node.attrib["execution_state"].lower() != "aarch64": continue # access name should always be present access_name = ai_node.find("name").text.lower() # the id should always be present access_id = ai_node.attrib["id"].lower() # the acces type might not be there (e.g. for TLBI instructions) acc_type_node = ai_node.find("access_type") if acc_type_node is not None: access_type = acc_type_node.attrib["type"].lower() else: access_type = None # getting the access syntax, there might be multiple variants as_fmt = None as_syntax = None for as_node in ai_node.findall("access_syntax"): # skip the no input variants if "variant" in as_node.attrib: continue # get the instruction name as_fmt = [as_node.attrib["name"].lower()] as_syntax = [as_node.attrib["name"].lower()] # get the variables / parameters for var_node in as_node.findall("var"): n = var_node.attrib["n"] as_syntax.append(n) # if n = 'Xt' this is a generic register, this gets replaced by asm '$0' if n == 'Xt': n = "$0" elif n == 'imm': n = "#{imm}" # if n = 'systemreg' this means we need to replace it with the register name elif n in [ "systemreg", "dc_op", "pstatefield", "ic_op", "at_op", "tlbi_op", "cfp_op", "cpp_op", "dvp_op", "op1", "op2" ]: n = "{}" elif n in ["Cn", "Cm"]: prefix = var_node.attrib["prefix"] n = prefix + "{}" else: logwarn("Unhandled access method: {}".format(n)) as_fmt.append(n) accessinstr[access_id] = { "name": access_name, "type": access_type, "fmt": as_fmt, "syntax": as_syntax } return accessinstr