def getMappings(self): """ Create and return mappings for all found features (Singleton) """ if hasattr(self, 'mappings'): return self.mappings mappings = [] #Lengths of mappings as indices of the overall vector length self.cutPts = [0] self.mapNames = [] for idx, feat in enumerate(self.features): if not (self.isIgnoreFeat(feat)): if self.filtr: filtrData = self.db.getDescValueCountList(str(feat)) #Use filter, if given mappings.append( Mapping(str(feat), None, filtr=StandardMappingFilter( filtrData, numExplicit=self.explval))) else: values = self.db.getValueList(feat) mappings.append(Mapping(str(feat), values)) else: #Passthrough feature, do not one-hot encode mappings.append(Mapping(str(feat), [], passThr=True)) self.cutPts.append(self.cutPts[idx] + mappings[idx].length) self.mapNames.extend([ str(feat) for i in range(self.cutPts[idx + 1] - self.cutPts[idx]) ]) return mappings
def lighten_area_around(self, image, offset, x, y): half_radius = self.parent.reductionNeighborhoodWalkify.value() min_x = Mapping.clip_value(x - half_radius, half_radius, image.width() - half_radius) max_x = Mapping.clip_value(x + half_radius, half_radius, image.width() - half_radius) min_y = Mapping.clip_value(y - half_radius, half_radius, image.height() - half_radius) max_y = Mapping.clip_value(y + half_radius, half_radius, image.height() - half_radius) adjustbrightness = self.parent.localBrightnessAdjustmentWalkify.value() myset = set() for s in range(int(round(half_radius / 2))): myset.add(s) myset.add(-s) for comb in combinations(myset, 2): inv_distance = 2 * max(myset) - (abs(comb[0]) + abs(comb[1])) modfactor = half_radius * inv_distance self.lighten_one_pixel(image, adjustbrightness * modfactor, x + comb[0], y + comb[1]) for el in myset: inv_distance = 2 * max(myset) - (2 * abs(el)) modfactor = half_radius * inv_distance self.lighten_one_pixel(image, adjustbrightness * modfactor, x + el, y + el)
def step(self, x, y, direction, brightness): """ step is called many times while iterating over the loaded bitmap in each step we calculate a circle based on position x,y with given brightness in the bitmap. direction is a direction vector indicating what direction we are moving in. calculate a new circle in the circle generation process :param x: x pos in bitmap that calculation should be based on :param y: y pos in bitmap that calculation should be based on :param direction: in what direction are we moving? :param brightness: gray value in the bitmap at x,y :return: potentially modified step size """ r = Mapping.linexp(brightness, self.minBrightness, self.maxBrightness, self.maxRadius, self.minRadius) if not r: r = self.minRadius stepsize = int( Mapping.linlin(brightness, self.minBrightness, self.maxBrightness, self.minStepSize, self.maxStepSize)) if not self.clipToBitmap or (self.clipToBitmap and not Circle( x, y, r).edges(self.width, self.height)): item = QGraphicsEllipseItem(x - r, y - r, r * 2, r * 2) pen = QPen() pen.setWidth(self.strokeWidth) item.setPen(pen) self.group.addToGroup(item) return max(int(stepsize), 1)
def main_worker(): print "I am the worker" print "worker: sending the object" m = Mapping(Vertex(0, 0)) n = Mapping(Vertex(0, 1)) pair = MappingPair(3, m, n) comm.send(pair, dest=0) print "worker: done sending"
def step(self, x, y, direction, brightness): """ step is called many times while iterating over the loaded bitmap in each step we calculate a squiggle based on position x,y with given brightness in the bitmap. direction is a direction vector indicating what direction we are moving in. calculate a new squiggle in the circle generation process :param x: x pos in bitmap that calculation should be based on :param y: y pos in bitmap that calculation should be based on :param direction: in what direction are we moving? :param brightness: gray value in the bitmap at x,y :return: potentially modified step size """ stepSize = Mapping.linexp(brightness, 0, 255, self.minStepSize, self.maxStepSize) stepSize = Mapping.linlin(stepSize, 1, 10, 1, 10 / self.detail) self.previous_stepsize = stepSize amplitudeSize = Mapping.linlin(brightness, 0, 255, self.strength, 0) if self.prevPos is None: self.path = QPainterPath() self.path.moveTo(x, y) self.prevPos = np.array([[ x, ], [ y, ]]) else: newPos = np.array([[ x, ], [ y, ]]) dirx = direction[0][0] diry = direction[1][0] ortho_dir = np.array([[-diry], [dirx] ]) * self.disturbance_direction disturbance = ortho_dir * amplitudeSize disturbedPos = (self.prevPos + newPos) / 2 + disturbance if not self.clipToBitmap or (self.clipToBitmap and not Circle( x, y, amplitudeSize).edges(self.width, self.height)): self.path.quadTo(disturbedPos[0][0], disturbedPos[1][0], newPos[0][0], newPos[1][0]) else: self.path.moveTo(newPos[0][0], newPos[1][0]) self.prevPos = newPos self.disturbance_direction = -self.disturbance_direction return max(int(stepSize), 1)
def make_new_mapping(self, old_mapping, locus, features): new_sr = copy.deepcopy(old_mapping.sr) new_sr[locus] -= features for feature in features: new_sr[locus].add(-feature) try: self.feature_dict.get_segment(new_sr[locus]) except IndexError: return [] new_mapping = Mapping(self.feature_dict, [ False, copy.deepcopy(old_mapping.ur), new_sr, copy.deepcopy(old_mapping.changes) ]) new_mapping.stem = copy.copy(old_mapping.stem) for feature in features: change = Change(self.feature_dict, change_type='change', mapping=new_mapping, locus=locus, feature=feature) change.make_set() new_mapping.changes.append(change) new_mapping.add_boundaries() new_mapping.set_ngrams() return new_mapping
def __init__(self, map_frame_id, map_resolution, map_width, map_height, map_origin_x, map_origin_y, map_origin_yaw, inflate_radius, unknown_space, free_space, c_space, occupied_space, optional=None): self.__pose = None self.__map = GridMap(map_frame_id, map_resolution, map_width, map_height, map_origin_x, map_origin_y, map_origin_yaw, unknown_space) self.__inflated_map = self.__map self.__mapping = Mapping(unknown_space, free_space, c_space, occupied_space, inflate_radius, optional) self.__update = None self.__correct_inflated_map = True
def __init__(self, map_frame_id, map_resolution, map_width, map_height, map_origin_x, map_origin_y, map_origin_yaw, inflate_radius, unknown_space, free_space, c_space, occupied_space, optional=None): rospy.init_node('occupancy_grid_handler') self.__pose = None self.__map = GridMap(map_frame_id, map_resolution, map_width, map_height, map_origin_x, map_origin_y, map_origin_yaw) self.__inflated_map = self.__map self.__mapping = Mapping(unknown_space, free_space, c_space, occupied_space, inflate_radius, optional) self.__odom_sub = message_filters.Subscriber('SVEA5/odom', OdometryROS) self.__scan_sub = message_filters.Subscriber('scan', LaserScanROS) self.__ts = message_filters.ApproximateTimeSynchronizer([self.__odom_sub, self.__scan_sub], 10, 0.01) self.__ts.registerCallback(self.callback) self.__map_pub = rospy.Publisher('map', OccupancyGridROS, queue_size=1, latch=True) self.__map_updates_pub = rospy.Publisher("map_updates", OccupancyGridUpdateROS, queue_size=10) self.__map_inflated_pub = rospy.Publisher('inflated_map', OccupancyGridROS, queue_size=1, latch=True) self.publish_map() rospy.spin()
def __init__(self, BASEDIR, only_clicked=False, session_only=False, cycle_time=1): super().__init__(BASEDIR, session_only=False, cycle_time=1, only_clicked=True) self.name = 'popevent_mpcexplore' mapper = Mapping() self.rec_mapping = mapper.get_header_rec() self.event_mapping = mapper.get_header_event() self.item_id_idx = self.rec_mapping.index('ITEM_SOURCE') self.publisher_id_idx = self.rec_mapping.index('PUBLISHER') self.recs_idx = self.event_mapping.index('recs') self.user_id_idx = self.event_mapping.index('USER_COOKIE') self.keyword_idx = self.rec_mapping.index('KEYWORD') self.poprankevent = MostClicked(BASEDIR) self.mpc_event_session = MPCEventSession(BASEDIR) self.user_last_item_dict = {} self.item_sequence_dict = {} self.user_item_dict = {} self.keyword_dict = {} self.correct = 0 self.total_events = 0 self.nrrows = 0
def dict(self): """Serialize the condition""" data = Mapping() data['type'] = self.__reg_condition.condition.__name__ data.check_key = self.__reg_condition.condition.name data.level = self.level data.pid = self.pid if self.detectable: data.detector = self.detector.__name__ data.symptom = self.__reg_condition.condition.symptom data.solution = self.__reg_condition.condition.solution data.rectifier_tried = self.rectifier_tried data.detector_tried = self.detector_tried data.error_code = 1 data.msg = self.last_message data.rectifiable = False if self.rectifier: data.rectifiable = True try: data.rectifier = self.rectifier.__name__ except Exception: pass data.detectable = self.detectable data.rectified = self.rectified if self.detected: data.update(self.detected.dict()) data.detected = True if self.detected else self.detected data.error_code_string = self.context_name() return data
def __init__(self, BASEDIR, session_only=False, cycle_time=1): super().__init__(BASEDIR, session_only, cycle_time) self.name = 'contentrank' mapper = Mapping() self.rec_mapping = mapper.get_header_rec() self.event_mapping = mapper.get_header_event() self.update_mapping = mapper.get_header_update() self.item_id_idx = self.rec_mapping.index('ITEM_SOURCE') self.publisher_id_idx = self.rec_mapping.index('PUBLISHER') self.recs_idx = self.event_mapping.index('recs') self.limit_idx = self.rec_mapping.index('limit') self.title_idx = self.update_mapping.index('title') self.text_idx = self.update_mapping.index('text') self.update_id_idx = self.update_mapping.index('id') self.update_domainid_idx = self.update_mapping.index('domainid') self.germanStemmer = GermanStemmer(ignore_stopwords=True) self.stopwords = stopwords.words('german') self.stems = {} # (item, [stem, stem, stem]) self.correct = 0 self.total_events = 0 self.nrrows = 0 self.counts = {}
def recombine(mappings): [ mapping.setContingencyTable() for mapping in mappings ] parents_nclust = [ mapping.n_cluster for mapping in mappings ] ncluster_child = np.random.randint(min(parents_nclust),max(parents_nclust)+1) child = Mapping(N_MARKER) combined_parents = sum([mapping.contingency_table.values for mapping in mappings]) contingency_table = (combined_parents == len(mappings)).astype(int) child.assignments = contingency2assignments(contingency_table) pairs = np.argwhere((combined_parents < len(mappings)) & (combined_parents > 0)) count = 0 while child.n_cluster > ncluster_child: i,j = pairs[count] clusters = (child.assignments.iloc[i]["cluster"], child.assignments.iloc[j]["cluster"]) child.assignments[child.assignments==clusters[1]] = clusters[0] count += 1 return child.assignments
def start_sync(): logger.debug("Sync started!") # Clear mapping errors Mapping().save_mapping_errors({}) try: plex_connection = PlexConnection(config.server_url, config.server_token) except ConnectionError: raise PlexConnection.PlexServerUnreachable( f"Unable to reach Plex server at {config.server_url}") except BadRequest: raise PlexConnection.InvalidPlexToken(f"Invalid Plex token provided.") plex_anime = plex_connection.get_anime(config.libraries[0]) # Check anime that are out of sync with anilist logger.debug("Checking for any required updates") for anime in plex_anime: if anime.update_required(): anime.update_on_anilist() # Go through the list and mark any shows that have all their episodes watched as completed logger.debug("Fixing leftover completed shows") anilist = Anilist(config.anilist_access_token) for id, data in anilist.user_list.items(): if data.get('progress') == data.get('media').get( 'episodes') and data.get('status') != 'COMPLETED': anilist.update_series(id, data.get('progress'), 'COMPLETED') logger.debug("Sync complete!\n")
def __init__(self, BASEDIR, flushing=False, flush_cycle=24, session_only=False, cycle_time=1): super().__init__(BASEDIR, session_only, cycle_time) self.flushing = flushing self.flush_cycle = flush_cycle self.name = "greedy_explore" mapper = Mapping() self.rec_mapping = mapper.get_header_rec() self.event_mapping = mapper.get_header_event() self.item_id_idx = self.rec_mapping.index('ITEM_SOURCE') self.publisher_id_idx = self.rec_mapping.index('PUBLISHER') self.recs_idx = self.event_mapping.index('recs') self.limit_idx = self.rec_mapping.index('limit') self.time_idx = self.rec_mapping.index('TIME_HOUR') self.user_id_idx = self.event_mapping.index('USER_COOKIE') self.popdict = OrderedDict() self.user_item_dict = {} self.correct = 0 self.total_events = 0 self.nrrows = 0
def __init__(self, root): self.root = root self.root.geometry("500x400") self.readDocument = ReadDocument("test.txt", 3) self.mapping = Mapping() self.shuffle = Shuffle() self.reduce = Reduce() self.text_state = "Ningun Proceso Ejecutansose" #Distribucion de documentos l1 = Label(self.root, text = "Distribucion de Archivos") l1.grid(row = 0, column = 0, sticky = W, pady = 2) self.document_name = Entry(self.root) self.document_name.insert(END, "test.txt") self.document_name.grid(row = 1, column = 0, sticky = W, pady = 2) self.lines_number = Entry(self.root) self.lines_number.insert(END, "3") self.lines_number.grid(row = 2, column = 0, sticky = W, pady = 2) button_task1 = Button(self.root, text = "Start", command = self.startThread1) button_task1.grid(row = 3, column = 0, sticky = W, pady = 2) button_task1_bug = Button(self.root, text = "Bug", command = lambda: self.readDocument.setBug(True)) button_task1_bug.grid(row = 4, column = 0, sticky = W, pady = 2) #Map l2 = Label(self.root, text = "Map") l2.grid(row = 0, column = 1, sticky = W, pady = 2) self.n_maps = Entry(self.root) self.n_maps.insert(END, 6) self.n_maps.grid(row = 1, column = 1, sticky = W, pady = 2) button_task2 = Button(self.root, text = "Start", command = self.startThread2) button_task2.grid(row = 3, column = 1, sticky = W, pady = 2) button_task2_bug = Button(self.root, text = "Bug", command = lambda: self.mapping.setBug(True)) button_task2_bug.grid(row = 4, column = 1, sticky = W, pady = 2) #Shuffle l3 = Label(self.root, text = "Shuffle") l3.grid(row = 0, column = 2, sticky = W, pady = 2) button_task3 = Button(self.root, text = "Start", command = self.startThread3) button_task3.grid(row = 3, column = 2, sticky = W, pady = 2) button_task3_bug = Button(self.root, text = "Bug", command = lambda: self.shuffle.setBug(True)) button_task3_bug.grid(row = 4, column = 2, sticky = W, pady = 2) #Reduce l4 = Label(self.root, text = "Reduce") l4.grid(row = 0, column = 3, sticky = W, pady = 2) button_task4 = Button(self.root, text = "Start", command = self.startThread4) button_task4.grid(row = 3, column = 3, sticky = W, pady = 2) button_task4_bug = Button(self.root, text = "Bug", command = lambda: self.reduce.setBug(True)) button_task4_bug.grid(row = 4, column = 3, sticky = W, pady = 2) #Estado self.l5 = Label(self.root, text = self.text_state, fg = "red") self.l5.grid(row = 7, column = 1, pady = 20) self.l6 = Label(self.root, text = "", fg = "purple") self.l6.grid(row = 8, column = 1, pady = 5)
def __init__(self,N): self.pop_size = N self.populations = {i: Mapping(N_MARKER,ID=i) for i in range(N)} # Adaptive rates using direction of highest variability? self.recombine_prob = .25 self.mutation_rate = .75 self.pool = Pool(20) self.arity = 2 self.metrics = [] self.fitnesses = [] self.solution = Mapping.fromCSV() sol_assign,sol_scores,sol_fitness = evaluate(self.solution) self.solution.assignments = sol_assign self.solution.fitness = sol_fitness self.solution.hashtable["scores"] = sol_scores
def __postConversionForBatocera__(self): self.logger.log(" Batocera post-conversion") if 'mapper' in self.conversionConf and self.conversionConf[ 'mapper'] == 'Yes': # TODO Remove included padt2.keys when new full generation well tested by users Mapping(self.keyb2joypad.gamesConf, util.getCleanGameID(self.metadata, ''), self.getLocalGameOutputDir(), self.conversionConf, self.logger).mapForBatocera()
def __init__(self, shared_array, event, dispatcher_options_array, mapping_cfg): self.m = Mapping(mapping_cfg) self.mapping_cfg = mapping_cfg threading.Thread.__init__(self) self.shared_array = shared_array self.daemon = True self.shared_thread_event = event self.dispatcher_options_array = dispatcher_options_array print("self.dispatcher_options_array: ", self.dispatcher_options_array)
def __init__(self, shared_array, event, json_path, mapping_cfg): threading.Thread.__init__(self) self.shared_array = shared_array self.shared_thread_event = event self.daemon = True self.JSONS_PATH = os.path.normpath(self.ROOT_PATH + "/" + json_path) self.m = Mapping(mapping_cfg) self.mapping_cfg = mapping_cfg self.freq_second = defaultdict(float) print("FolderDispatcher JSON PATH: " + self.JSONS_PATH)
def test_mapping(self): idea = { u'Node': [{ u'SW': [u'Nemea', u'HostStatsNemea'], u'Type': [u'Flow', u'Statistical'], u'Name': u'cz.cesnet.nemea.hoststats' }], u'Category': [u'Recon.Scanning'], u'EventTime': u'2017-01-01T02:06:00Z', u'Description': u'Horizontal port scan', u'ConnCount': 655, u'CeaseTime': u'2017-01-01T02:10:53Z', u'Format': u'IDEA0', u'ID': u'1bdfff5e-6ad4-4e63-98f6-e3e350996a5f', u'Source': [{ u'IP4': [u'185.35.62.107'], u'Proto': [u'tcp'] }], u'FlowCount': 655, u'DetectTime': u'2017-05-02T18:13:59Z', u'CreateTime': u'2017-05-02T18:13:59Z' } m = Mapping("../config/mapping") h = m.map_alert_to_hash(idea) dis = { 'Node': [{ u'Type': [u'Flow', u'Statistical'], u'SW': [u'Nemea', u'HostStatsNemea'], u'Name': u'cz.cesnet.nemea.hoststats' }], 'DetectTime': u'2017-05-02T18:13:59Z', 'SourceIP6': None, 'SourceIP4': [u'185.35.62.107'], 'TargetIP4': None, 'Category': [u'Recon.Scanning'], 'TargetIP6': None } self.assertEqual(h, dis) h = IdeaMapping.map_alert_to_hash(idea) self.assertEqual(h, dis)
def __init__(self): self.bounding_boxes_top = "/darknet_ros/bounding_boxes" self.classes = [ 'narrows_from_left', 'no_bicycle', 'residential', 'roundabout' ] # From '/home/robot/dd2419_ws/src/darknet_ros/darknet_ros/config/yolo-lite-cf9-4classes.yaml' # Set up checkpoint clearing service print('waiting for clear checkpoint service') rospy.wait_for_service('clearpointservice') print('got clear checkpoint service') print('waiting for path planning service') rospy.wait_for_service('path_planning') print('got path planning service') # Creating a map object to obtain the poses of the objects in the map. # markers and signs are both lists with tuples (name, pose). # objects are a dictionary containing both markers and signs where the names are keys and poses are values. # self.objects in form [x, y, z, roll, pitch, yaw] # NOTE: Roll and pitch are such that yaw is flipped 180 deg and roll & pitch can be ignored self.map = Mapping( '/home/robot/dd2419_ws/src/crazyflie_9/worlds_json/crazyflie9_apartment.world.json', 0.05, 2) self.markers, self.signs, self.objects = self.map.object_poses() self.objects['narrows_from_left'] = self.objects.pop( 'road_narrows_from_left') self.objects['roundabout'] = self.objects.pop('roundabout_warning') # Signs that the drone has already visited. Should be reset when signs_visited == classes self.signs_visited = [] # Initialize callback variables self.boxes = None # Initialize tfbuffer self.tf_buffer = tf2_ros.Buffer() tf2_ros.TransformListener(self.tf_buffer) # Initialize goal publisher (for interacting with 'hover' node) self.goal_pub = rospy.Publisher("goal", Position, queue_size=10) # Initialize path publisher (just visualization) self.path_pub = rospy.Publisher("path_vis", Path, queue_size=10) # Initialize subscriber to bounding box rospy.Subscriber(self.bounding_boxes_top, BoundingBoxes, self._detection_cb) # Pause for subscription and tf_buffer rospy.sleep(5)
def main(): try: raid = Raid(config.disk_files) mapping = Mapping(raid) print(raid.disks[0].read(0)) print(raid) start_writers(config.writers_count, mapping) start_readers(config.readers_count, mapping) finally: print('Closing raid') raid.close()
def test_date_mapping(): mapping = Mapping({}, {"day": "2020-01-01"}) assert mapping.json() == { "mappings": { "dynamic": False, "properties": { "day": { "type": "date" } } } }
def test_json_mapping(): mapping = Mapping({}, {"hello": "world"}) assert mapping.json() == { "mappings": { "dynamic": False, "properties": { "hello": { "type": "text" } } } }
def test_integer_mapping(): mapping = Mapping({}, {"value": "25"}) assert mapping.json() == { "mappings": { "dynamic": False, "properties": { "value": { "type": "double" } } } }
def make_faithful_cand(self, mapping): if (mapping.ur != mapping.sr).any(): new_mapping = Mapping(self.feature_dict, [ False, copy.deepcopy(mapping.ur), copy.deepcopy(mapping.ur), [] ]) new_mapping.stem = copy.copy(mapping.stem) new_mapping.add_boundaries() new_mapping.set_ngrams() return [new_mapping] else: return []
def find_darkest_neighbor(self, image, cx, cy): darkest_neighbor = 256 half_radius = self.parent.reductionNeighborhoodWalkify.value() min_x = Mapping.clip_value(cx - half_radius, half_radius, image.width() - half_radius) min_y = Mapping.clip_value(cy - half_radius, half_radius, image.height() - half_radius) max_x = Mapping.clip_value(cx + half_radius, half_radius, image.width() - half_radius) max_y = Mapping.clip_value(cy + half_radius, half_radius, image.height() - half_radius) for x in range(min_x, max_x + 1): for y in range(min_y, max_y + 1): distance = sqrt((x - cx)**2 + (y - cy)**2) if distance < half_radius: currcolor = qGray(image.pixel(x, y)) + random() * 0.01 if currcolor < darkest_neighbor: darkest_x = x darkest_y = y darkest_neighbor = currcolor return darkest_x, darkest_y, darkest_neighbor
def mutate(mapping): new_mapping = Mapping(N_MARKER, assignments=mapping.assignments.copy(), initialize=False) new_mapping.mutate() new_assignments,new_cluster_scores,new_fitness = evaluate(new_mapping) if mapping.fitness <= new_fitness: new_mapping.hashtable["scores"] = new_cluster_scores return (new_assignments,new_fitness,new_mapping.hashtable) else: return (mapping.assignments,mapping.fitness,mapping.hashtable)
class Anime: """ A object representing an anime holding various data values and providing methods for calculating and obtaining information about the anime. """ # Class variables config = Config() mapping = Mapping() anilist = Anilist(config.anilist_access_token) # Instance variables title: str tvdb_id: str season_number: str watched_episodes: int def __post_init__(self) -> None: """ Defines other instance variables that require more complex assignments. :return: None """ self.anilist_id = self.obtain_anilist_id() self.total_episodes = self.obtain_total_episodes() self.anilist_progress = (Anime.anilist.get_anime(self.anilist_id) or {}).get('progress') self.anilist_status = (Anime.anilist.get_anime(self.anilist_id) or {}).get('status') self.status = self.equate_watch_status() def obtain_anilist_id(self) -> Optional[str]: """ Obtains the matching Anilist id from the mapping files. :return: The Anilist id for the anime or None if there was no id mapped. """ anilist_id = Anime.mapping.get_anilist_id(self.tvdb_id, self.title, self.season_number) if anilist_id is None: Anime.mapping.add_to_mapping_errors(self) return anilist_id def obtain_total_episodes(self) -> Optional[int]: """ Obtains the total number of episodes from the Anilist data. :return: The total number of episodes or None if the anime isn't already on Anilist or if the total episodes isn't known by Anilist. """ if (anime := Anime.anilist.get_anime(self.anilist_id)) is None: return None x = anime.get('media', {}).get('episodes') return x
def run(self): assert self.target_struct is not None dm_to_gen = self.findDataModel(self.target_struct) if dm_to_gen is None: rotten_peel("Couldn't find DataModel: %s when trying to Run!", self.target_struct) data, blobs, data_mappings, blob_mappings = dm_to_gen.generate() blob_arr = [] id_idx = {} blob_arr.append(data) i = 1 for b_id in blobs: blob = blobs[b_id] id_idx[b_id] = i blob_arr.append(blob) i += 1 map_arr = [] for offset in data_mappings: # main guy src_id = self.target_struct blob_id = data_mappings[offset] dst_idx = id_idx[blob_id] src_idx = 0 entry = Mapping(blob_id, src_id, src_idx, dst_idx, offset) map_arr.append(entry) for blob_id in blob_mappings: for offset in blob_mappings[blob_id]: src_id = blob_id src_idx = id_idx[blob_id] dst_id = blob_mappings[blob_id][offset] dst_idx = id_idx[dst_id] entry = Mapping(dst_id, src_id, src_idx, dst_idx, offset) map_arr.append(entry) return blob_arr, map_arr