示例#1
0
    def getMappings(self):
        """
        Create and return mappings for all found features (Singleton)
        """
        if hasattr(self, 'mappings'):
            return self.mappings

        mappings = []
        #Lengths of mappings as indices of the overall vector length
        self.cutPts = [0]
        self.mapNames = []
        for idx, feat in enumerate(self.features):
            if not (self.isIgnoreFeat(feat)):
                if self.filtr:
                    filtrData = self.db.getDescValueCountList(str(feat))
                    #Use filter, if given
                    mappings.append(
                        Mapping(str(feat),
                                None,
                                filtr=StandardMappingFilter(
                                    filtrData, numExplicit=self.explval)))
                else:
                    values = self.db.getValueList(feat)
                    mappings.append(Mapping(str(feat), values))
            else:
                #Passthrough feature, do not one-hot encode
                mappings.append(Mapping(str(feat), [], passThr=True))
            self.cutPts.append(self.cutPts[idx] + mappings[idx].length)
            self.mapNames.extend([
                str(feat)
                for i in range(self.cutPts[idx + 1] - self.cutPts[idx])
            ])
        return mappings
示例#2
0
def main_worker():
    print "I am the worker"
    print "worker: sending the object"
    m = Mapping(Vertex(0, 0))
    n = Mapping(Vertex(0, 1))
    pair = MappingPair(3, m, n)
    comm.send(pair, dest=0)
    print "worker: done sending"
示例#3
0
def start_sync():
    logger.debug("Sync started!")
    # Clear mapping errors
    Mapping().save_mapping_errors({})

    try:
        plex_connection = PlexConnection(config.server_url,
                                         config.server_token)
    except ConnectionError:
        raise PlexConnection.PlexServerUnreachable(
            f"Unable to reach Plex server at {config.server_url}")
    except BadRequest:
        raise PlexConnection.InvalidPlexToken(f"Invalid Plex token provided.")

    plex_anime = plex_connection.get_anime(config.libraries[0])

    # Check anime that are out of sync with anilist
    logger.debug("Checking for any required updates")
    for anime in plex_anime:
        if anime.update_required():
            anime.update_on_anilist()

    # Go through the list and mark any shows that have all their episodes watched as completed
    logger.debug("Fixing leftover completed shows")
    anilist = Anilist(config.anilist_access_token)
    for id, data in anilist.user_list.items():
        if data.get('progress') == data.get('media').get(
                'episodes') and data.get('status') != 'COMPLETED':
            anilist.update_series(id, data.get('progress'), 'COMPLETED')

    logger.debug("Sync complete!\n")
示例#4
0
    def __init__(self,
                 BASEDIR,
                 flushing=False,
                 flush_cycle=24,
                 session_only=False,
                 cycle_time=1):
        super().__init__(BASEDIR, session_only, cycle_time)
        self.flushing = flushing
        self.flush_cycle = flush_cycle
        self.name = "greedy_explore"

        mapper = Mapping()
        self.rec_mapping = mapper.get_header_rec()
        self.event_mapping = mapper.get_header_event()
        self.item_id_idx = self.rec_mapping.index('ITEM_SOURCE')
        self.publisher_id_idx = self.rec_mapping.index('PUBLISHER')
        self.recs_idx = self.event_mapping.index('recs')
        self.limit_idx = self.rec_mapping.index('limit')
        self.time_idx = self.rec_mapping.index('TIME_HOUR')
        self.user_id_idx = self.event_mapping.index('USER_COOKIE')

        self.popdict = OrderedDict()
        self.user_item_dict = {}

        self.correct = 0
        self.total_events = 0
        self.nrrows = 0
示例#5
0
def recombine(mappings):

    [ mapping.setContingencyTable()
      for mapping in mappings ]

    parents_nclust = [ mapping.n_cluster for mapping in mappings ]
    ncluster_child = np.random.randint(min(parents_nclust),max(parents_nclust)+1)
    child = Mapping(N_MARKER)

    combined_parents = sum([mapping.contingency_table.values for mapping in mappings])
    
    contingency_table = (combined_parents == len(mappings)).astype(int)
                        
    child.assignments = contingency2assignments(contingency_table)

    pairs = np.argwhere((combined_parents < len(mappings))
                        & (combined_parents > 0))

    count = 0
    while child.n_cluster > ncluster_child:            
        i,j = pairs[count]

        clusters = (child.assignments.iloc[i]["cluster"],
                    child.assignments.iloc[j]["cluster"])
        
        child.assignments[child.assignments==clusters[1]] = clusters[0]
        count += 1

    return child.assignments
示例#6
0
    def __init__(self, BASEDIR, session_only=False, cycle_time=1):
        super().__init__(BASEDIR, session_only, cycle_time)
        self.name = 'contentrank'

        mapper = Mapping()
        self.rec_mapping = mapper.get_header_rec()
        self.event_mapping = mapper.get_header_event()
        self.update_mapping = mapper.get_header_update()
        self.item_id_idx = self.rec_mapping.index('ITEM_SOURCE')
        self.publisher_id_idx = self.rec_mapping.index('PUBLISHER')
        self.recs_idx = self.event_mapping.index('recs')
        self.limit_idx = self.rec_mapping.index('limit')
        self.title_idx = self.update_mapping.index('title')
        self.text_idx = self.update_mapping.index('text')
        self.update_id_idx = self.update_mapping.index('id')
        self.update_domainid_idx = self.update_mapping.index('domainid')

        self.germanStemmer = GermanStemmer(ignore_stopwords=True)
        self.stopwords = stopwords.words('german')
        self.stems = {}  # (item, [stem, stem, stem])

        self.correct = 0
        self.total_events = 0
        self.nrrows = 0

        self.counts = {}
    def __init__(self, map_frame_id, map_resolution, map_width, map_height,
                 map_origin_x, map_origin_y, map_origin_yaw, inflate_radius,
                 unknown_space, free_space, c_space, occupied_space, optional=None):
        rospy.init_node('occupancy_grid_handler')
        self.__pose = None
        self.__map = GridMap(map_frame_id, map_resolution, map_width, map_height,
                         map_origin_x, map_origin_y, map_origin_yaw)

        self.__inflated_map = self.__map

        self.__mapping = Mapping(unknown_space, free_space, c_space,
                                 occupied_space, inflate_radius, optional)

        self.__odom_sub = message_filters.Subscriber('SVEA5/odom', OdometryROS)
        self.__scan_sub = message_filters.Subscriber('scan', LaserScanROS)

        self.__ts = message_filters.ApproximateTimeSynchronizer([self.__odom_sub,
                                                     self.__scan_sub], 10, 0.01)
        self.__ts.registerCallback(self.callback)

        self.__map_pub = rospy.Publisher('map', OccupancyGridROS, queue_size=1,
                                         latch=True)
        self.__map_updates_pub = rospy.Publisher("map_updates",
                                                 OccupancyGridUpdateROS,
                                                 queue_size=10)

        self.__map_inflated_pub = rospy.Publisher('inflated_map', OccupancyGridROS, queue_size=1, latch=True)

        self.publish_map()

        rospy.spin()
示例#8
0
文件: GEN.py 项目: presleyp/gp2
 def make_new_mapping(self, old_mapping, locus, features):
     new_sr = copy.deepcopy(old_mapping.sr)
     new_sr[locus] -= features
     for feature in features:
         new_sr[locus].add(-feature)
     try:
         self.feature_dict.get_segment(new_sr[locus])
     except IndexError:
         return []
     new_mapping = Mapping(self.feature_dict, [
         False,
         copy.deepcopy(old_mapping.ur), new_sr,
         copy.deepcopy(old_mapping.changes)
     ])
     new_mapping.stem = copy.copy(old_mapping.stem)
     for feature in features:
         change = Change(self.feature_dict,
                         change_type='change',
                         mapping=new_mapping,
                         locus=locus,
                         feature=feature)
         change.make_set()
         new_mapping.changes.append(change)
     new_mapping.add_boundaries()
     new_mapping.set_ngrams()
     return new_mapping
    def __init__(self,
                 BASEDIR,
                 only_clicked=False,
                 session_only=False,
                 cycle_time=1):
        super().__init__(BASEDIR,
                         session_only=False,
                         cycle_time=1,
                         only_clicked=True)
        self.name = 'popevent_mpcexplore'

        mapper = Mapping()
        self.rec_mapping = mapper.get_header_rec()
        self.event_mapping = mapper.get_header_event()
        self.item_id_idx = self.rec_mapping.index('ITEM_SOURCE')
        self.publisher_id_idx = self.rec_mapping.index('PUBLISHER')
        self.recs_idx = self.event_mapping.index('recs')
        self.user_id_idx = self.event_mapping.index('USER_COOKIE')
        self.keyword_idx = self.rec_mapping.index('KEYWORD')

        self.poprankevent = MostClicked(BASEDIR)
        self.mpc_event_session = MPCEventSession(BASEDIR)

        self.user_last_item_dict = {}
        self.item_sequence_dict = {}
        self.user_item_dict = {}
        self.keyword_dict = {}

        self.correct = 0
        self.total_events = 0
        self.nrrows = 0
示例#10
0
    def __init__(self,
                 map_frame_id,
                 map_resolution,
                 map_width,
                 map_height,
                 map_origin_x,
                 map_origin_y,
                 map_origin_yaw,
                 inflate_radius,
                 unknown_space,
                 free_space,
                 c_space,
                 occupied_space,
                 optional=None):
        self.__pose = None
        self.__map = GridMap(map_frame_id, map_resolution, map_width,
                             map_height, map_origin_x, map_origin_y,
                             map_origin_yaw, unknown_space)

        self.__inflated_map = self.__map

        self.__mapping = Mapping(unknown_space, free_space, c_space,
                                 occupied_space, inflate_radius, optional)

        self.__update = None

        self.__correct_inflated_map = True
示例#11
0
    def dict(self):
        """Serialize the condition"""
        data = Mapping()
        data['type'] = self.__reg_condition.condition.__name__
        data.check_key = self.__reg_condition.condition.name
        data.level = self.level
        data.pid = self.pid
        if self.detectable:
            data.detector = self.detector.__name__

        data.symptom = self.__reg_condition.condition.symptom
        data.solution = self.__reg_condition.condition.solution
        data.rectifier_tried = self.rectifier_tried
        data.detector_tried = self.detector_tried
        data.error_code = 1
        data.msg = self.last_message
        data.rectifiable = False
        if self.rectifier:
            data.rectifiable = True
            try:
                data.rectifier = self.rectifier.__name__
            except Exception:
                pass
        data.detectable = self.detectable
        data.rectified = self.rectified

        if self.detected:
            data.update(self.detected.dict())

        data.detected = True if self.detected else self.detected
        data.error_code_string = self.context_name()

        return data
示例#12
0
	def __init__(self, root):
		self.root = root
		self.root.geometry("500x400")

		self.readDocument = ReadDocument("test.txt", 3)
		self.mapping = Mapping()
		self.shuffle = Shuffle()
		self.reduce = Reduce()

		self.text_state = "Ningun Proceso Ejecutansose"

		#Distribucion de documentos
		l1 = Label(self.root, text = "Distribucion de Archivos")
		l1.grid(row = 0, column = 0, sticky = W, pady = 2)
		self.document_name = Entry(self.root)
		self.document_name.insert(END, "test.txt")
		self.document_name.grid(row = 1, column = 0, sticky = W, pady = 2)
		self.lines_number = Entry(self.root)
		self.lines_number.insert(END, "3")
		self.lines_number.grid(row = 2, column = 0, sticky = W, pady = 2)
		button_task1 = Button(self.root, text = "Start", command = self.startThread1)
		button_task1.grid(row = 3, column = 0, sticky = W, pady = 2)
		button_task1_bug = Button(self.root, text = "Bug", command = lambda: self.readDocument.setBug(True))
		button_task1_bug.grid(row = 4, column = 0, sticky = W, pady = 2)


		#Map
		l2 = Label(self.root, text = "Map")
		l2.grid(row = 0, column = 1, sticky = W, pady = 2)
		self.n_maps = Entry(self.root)
		self.n_maps.insert(END, 6)
		self.n_maps.grid(row = 1, column = 1, sticky = W, pady = 2)
		button_task2 = Button(self.root, text = "Start", command = self.startThread2)
		button_task2.grid(row = 3, column = 1, sticky = W, pady = 2)
		button_task2_bug = Button(self.root, text = "Bug", command = lambda: self.mapping.setBug(True))
		button_task2_bug.grid(row = 4, column = 1, sticky = W, pady = 2)

		#Shuffle
		l3 = Label(self.root, text = "Shuffle")
		l3.grid(row = 0, column = 2, sticky = W, pady = 2)
		button_task3 = Button(self.root, text = "Start", command = self.startThread3)
		button_task3.grid(row = 3, column = 2, sticky = W, pady = 2)
		button_task3_bug = Button(self.root, text = "Bug", command = lambda: self.shuffle.setBug(True))
		button_task3_bug.grid(row = 4, column = 2, sticky = W, pady = 2)

		#Reduce
		l4 = Label(self.root, text = "Reduce")
		l4.grid(row = 0, column = 3, sticky = W, pady = 2)
		button_task4 = Button(self.root, text = "Start", command = self.startThread4)
		button_task4.grid(row = 3, column = 3, sticky = W, pady = 2)
		button_task4_bug = Button(self.root, text = "Bug", command = lambda: self.reduce.setBug(True))
		button_task4_bug.grid(row = 4, column = 3, sticky = W, pady = 2)

		#Estado

		self.l5 = Label(self.root, text = self.text_state, fg = "red")
		self.l5.grid(row = 7, column = 1, pady = 20)
		self.l6 = Label(self.root, text = "", fg = "purple")
		self.l6.grid(row = 8, column = 1, pady = 5)
示例#13
0
 def __postConversionForBatocera__(self):
     self.logger.log("  Batocera post-conversion")
     if 'mapper' in self.conversionConf and self.conversionConf[
             'mapper'] == 'Yes':
         # TODO Remove included padt2.keys when new full generation well tested by users
         Mapping(self.keyb2joypad.gamesConf,
                 util.getCleanGameID(self.metadata, ''),
                 self.getLocalGameOutputDir(), self.conversionConf,
                 self.logger).mapForBatocera()
示例#14
0
 def __init__(self, shared_array, event, dispatcher_options_array, mapping_cfg):
     self.m = Mapping(mapping_cfg)
     self.mapping_cfg = mapping_cfg
     threading.Thread.__init__(self)
     self.shared_array = shared_array
     self.daemon = True
     self.shared_thread_event = event
     self.dispatcher_options_array = dispatcher_options_array
     print("self.dispatcher_options_array: ", self.dispatcher_options_array)
示例#15
0
 def __init__(self, shared_array, event, json_path, mapping_cfg):
     threading.Thread.__init__(self)
     self.shared_array = shared_array
     self.shared_thread_event = event
     self.daemon = True
     self.JSONS_PATH = os.path.normpath(self.ROOT_PATH + "/" + json_path)
     self.m = Mapping(mapping_cfg)
     self.mapping_cfg = mapping_cfg
     self.freq_second = defaultdict(float)
     print("FolderDispatcher JSON PATH: " + self.JSONS_PATH)
示例#16
0
    def test_mapping(self):
        idea = {
            u'Node': [{
                u'SW': [u'Nemea', u'HostStatsNemea'],
                u'Type': [u'Flow', u'Statistical'],
                u'Name': u'cz.cesnet.nemea.hoststats'
            }],
            u'Category': [u'Recon.Scanning'],
            u'EventTime':
            u'2017-01-01T02:06:00Z',
            u'Description':
            u'Horizontal port scan',
            u'ConnCount':
            655,
            u'CeaseTime':
            u'2017-01-01T02:10:53Z',
            u'Format':
            u'IDEA0',
            u'ID':
            u'1bdfff5e-6ad4-4e63-98f6-e3e350996a5f',
            u'Source': [{
                u'IP4': [u'185.35.62.107'],
                u'Proto': [u'tcp']
            }],
            u'FlowCount':
            655,
            u'DetectTime':
            u'2017-05-02T18:13:59Z',
            u'CreateTime':
            u'2017-05-02T18:13:59Z'
        }
        m = Mapping("../config/mapping")
        h = m.map_alert_to_hash(idea)

        dis = {
            'Node': [{
                u'Type': [u'Flow', u'Statistical'],
                u'SW': [u'Nemea', u'HostStatsNemea'],
                u'Name': u'cz.cesnet.nemea.hoststats'
            }],
            'DetectTime':
            u'2017-05-02T18:13:59Z',
            'SourceIP6':
            None,
            'SourceIP4': [u'185.35.62.107'],
            'TargetIP4':
            None,
            'Category': [u'Recon.Scanning'],
            'TargetIP6':
            None
        }
        self.assertEqual(h, dis)

        h = IdeaMapping.map_alert_to_hash(idea)
        self.assertEqual(h, dis)
示例#17
0
    def __init__(self):

        self.bounding_boxes_top = "/darknet_ros/bounding_boxes"

        self.classes = [
            'narrows_from_left', 'no_bicycle', 'residential', 'roundabout'
        ]
        # From '/home/robot/dd2419_ws/src/darknet_ros/darknet_ros/config/yolo-lite-cf9-4classes.yaml'

        # Set up checkpoint clearing service
        print('waiting for clear checkpoint service')
        rospy.wait_for_service('clearpointservice')
        print('got clear checkpoint service')

        print('waiting for path planning service')
        rospy.wait_for_service('path_planning')
        print('got path planning service')

        # Creating a map object to obtain the poses of the objects in the map.
        # markers and signs are both lists with tuples (name, pose).
        # objects are a dictionary containing both markers and signs where the names are keys and poses are values.
        # self.objects in form [x, y, z, roll, pitch, yaw]
        # NOTE: Roll and pitch are such that yaw is flipped 180 deg and roll & pitch can be ignored
        self.map = Mapping(
            '/home/robot/dd2419_ws/src/crazyflie_9/worlds_json/crazyflie9_apartment.world.json',
            0.05, 2)
        self.markers, self.signs, self.objects = self.map.object_poses()

        self.objects['narrows_from_left'] = self.objects.pop(
            'road_narrows_from_left')
        self.objects['roundabout'] = self.objects.pop('roundabout_warning')

        # Signs that the drone has already visited. Should be reset when signs_visited == classes
        self.signs_visited = []

        # Initialize callback variables
        self.boxes = None

        # Initialize tfbuffer
        self.tf_buffer = tf2_ros.Buffer()
        tf2_ros.TransformListener(self.tf_buffer)

        # Initialize goal publisher (for interacting with 'hover' node)
        self.goal_pub = rospy.Publisher("goal", Position, queue_size=10)

        # Initialize path publisher (just visualization)
        self.path_pub = rospy.Publisher("path_vis", Path, queue_size=10)

        # Initialize subscriber to bounding box
        rospy.Subscriber(self.bounding_boxes_top, BoundingBoxes,
                         self._detection_cb)

        # Pause for subscription and tf_buffer
        rospy.sleep(5)
示例#18
0
def test_integer_mapping():
    mapping = Mapping({}, {"value": "25"})
    assert mapping.json() == {
        "mappings": {
            "dynamic": False,
            "properties": {
                "value": {
                    "type": "double"
                }
            }
        }
    }
示例#19
0
def test_date_mapping():
    mapping = Mapping({}, {"day": "2020-01-01"})
    assert mapping.json() == {
        "mappings": {
            "dynamic": False,
            "properties": {
                "day": {
                    "type": "date"
                }
            }
        }
    }
示例#20
0
def test_json_mapping():
    mapping = Mapping({}, {"hello": "world"})
    assert mapping.json() == {
        "mappings": {
            "dynamic": False,
            "properties": {
                "hello": {
                    "type": "text"
                }
            }
        }
    }
示例#21
0
def main():

    try:
        raid = Raid(config.disk_files)
        mapping = Mapping(raid)
        print(raid.disks[0].read(0))
        print(raid)
        start_writers(config.writers_count, mapping)
        start_readers(config.readers_count, mapping)
    finally:
        print('Closing raid')
        raid.close()
示例#22
0
def mutate(mapping):

    new_mapping = Mapping(N_MARKER,
                          assignments=mapping.assignments.copy(),
                          initialize=False)
    new_mapping.mutate()
    new_assignments,new_cluster_scores,new_fitness = evaluate(new_mapping)

    if mapping.fitness <= new_fitness:
        new_mapping.hashtable["scores"] = new_cluster_scores
        return (new_assignments,new_fitness,new_mapping.hashtable)
    else:
        return (mapping.assignments,mapping.fitness,mapping.hashtable)
示例#23
0
文件: GEN.py 项目: presleyp/gp2
 def make_faithful_cand(self, mapping):
     if (mapping.ur != mapping.sr).any():
         new_mapping = Mapping(self.feature_dict, [
             False,
             copy.deepcopy(mapping.ur),
             copy.deepcopy(mapping.ur), []
         ])
         new_mapping.stem = copy.copy(mapping.stem)
         new_mapping.add_boundaries()
         new_mapping.set_ngrams()
         return [new_mapping]
     else:
         return []
示例#24
0
class Anime:
    """ A object representing an anime holding various data values and providing methods for calculating and obtaining
    information about the anime.
    """
    # Class variables
    config = Config()
    mapping = Mapping()
    anilist = Anilist(config.anilist_access_token)

    # Instance variables
    title: str
    tvdb_id: str
    season_number: str
    watched_episodes: int

    def __post_init__(self) -> None:
        """ Defines other instance variables that require more complex assignments.

        :return: None
        """
        self.anilist_id = self.obtain_anilist_id()
        self.total_episodes = self.obtain_total_episodes()
        self.anilist_progress = (Anime.anilist.get_anime(self.anilist_id)
                                 or {}).get('progress')
        self.anilist_status = (Anime.anilist.get_anime(self.anilist_id)
                               or {}).get('status')

        self.status = self.equate_watch_status()

    def obtain_anilist_id(self) -> Optional[str]:
        """ Obtains the matching Anilist id from the mapping files.

        :return: The Anilist id for the anime or None if there was no id mapped.
        """
        anilist_id = Anime.mapping.get_anilist_id(self.tvdb_id, self.title,
                                                  self.season_number)
        if anilist_id is None:
            Anime.mapping.add_to_mapping_errors(self)
        return anilist_id

    def obtain_total_episodes(self) -> Optional[int]:
        """ Obtains the total number of episodes from the Anilist data.

        :return: The total number of episodes
                 or None if the anime isn't already on Anilist or if the total episodes isn't known by Anilist.
        """
        if (anime := Anime.anilist.get_anime(self.anilist_id)) is None:
            return None
        x = anime.get('media', {}).get('episodes')

        return x
示例#25
0
文件: jpit.py 项目: zhr1991/difuze
    def run(self):
        assert self.target_struct is not None
        dm_to_gen = self.findDataModel(self.target_struct)
        if dm_to_gen is None:
            rotten_peel("Couldn't find DataModel: %s when trying to Run!",
                        self.target_struct)
        data, blobs, data_mappings, blob_mappings = dm_to_gen.generate()

        blob_arr = []
        id_idx = {}
        blob_arr.append(data)
        i = 1
        for b_id in blobs:
            blob = blobs[b_id]
            id_idx[b_id] = i
            blob_arr.append(blob)
            i += 1

        map_arr = []
        for offset in data_mappings:
            # main guy
            src_id = self.target_struct
            blob_id = data_mappings[offset]
            dst_idx = id_idx[blob_id]
            src_idx = 0
            entry = Mapping(blob_id, src_id, src_idx, dst_idx, offset)
            map_arr.append(entry)

        for blob_id in blob_mappings:
            for offset in blob_mappings[blob_id]:
                src_id = blob_id
                src_idx = id_idx[blob_id]
                dst_id = blob_mappings[blob_id][offset]
                dst_idx = id_idx[dst_id]
                entry = Mapping(dst_id, src_id, src_idx, dst_idx, offset)
                map_arr.append(entry)

        return blob_arr, map_arr
    def __init__(self):
        self.map_manager = Mapping()
        self.AP = self.map_manager.getAPs()

        self.list_color = [["red", "violet"], ["blue", "green"],
                           ["orange", "black"]]
        self.norm = matplotlib.colors.Normalize(vmin=-90, vmax=-20)
        #NODE FOR ROS
        rospy.init_node("point_controller")
        self.publisher = rospy.Publisher('/visualization_marker_array',
                                         MarkerArray)
        self.markerArray = MarkerArray()

        self.rate = rospy.Rate(10)
示例#27
0
    def end_spider(self):
        '''爬虫流程结束的函数,首先判断数据量是否足够'''
        num = len(self.result) - max(self.page * self.page_num, self.vacancy_totals)

        if num < 0 and self.current_circle_num < self.max_circle_num:
            self.start = self.end
            self.end += num.__abs__() / self.page_num + 1
            self.current_circle_num += 1
            self.main()
            return
        else:
            result = Mapping(self.result).main()
            if result:
                self.write_vacancy_ids()
            print self.set_tip(u"本次职位采集完毕,共爬取{}个职位,入库{}个".format(len(self.vacancy_temp), result))
示例#28
0
def test_config_property_mapping():
    mapping = Mapping({"properties": {
        "dep": {
            "type": "keyword"
        }
    }}, {"dep": "25"})
    assert mapping.json() == {
        "mappings": {
            "dynamic": False,
            "properties": {
                "dep": {
                    "type": "keyword"
                }
            }
        }
    }
示例#29
0
    def __init__(self, QAFile, ReviewFile, minReview):

        self.Map = Mapping(QAFile, ReviewFile, minReview)
        self.QAnswers = []
        self.Sentences = []
        self.QPerItem = []
        self.SPerItem = []
        self.PairWiseFeature = {}
        self.Avgdl = defaultdict(float)

        self.Map.create_mappings()

        for i in range(len(self.Map.ItemIDMap)):
            self.QPerItem.append([])

        for i in range(len(self.Map.ItemIDMap)):
            self.SPerItem.append([])
def main():
    # print("start!!")

    # start and goal position
    sx = 120.0  # [m]
    sy = 100.0  # [m]
    gx = 20.0  # [m]
    gy = 100.0  # [m]
    grid_size = 2.0  # [m]
    robot_radius = 10.0  # [m]
    
    # mapp = Mapping(location, 0.1, 3)
    mapp = Mapping(location, 0.05, 2)

    matrx = mapp.matrix
    range_of_map = matrx.shape
    horizonal = range_of_map[0]
    vertical = range_of_map[1]
    # print(matrx.shape)
    # print(horizonal)
    # print(vertical)

    # set obstable positions
    matrx_indx = np.nonzero(matrx == 1) # represent the walls
    oy_old = matrx_indx[0].tolist()
    ox_old = matrx_indx[1].tolist()
    oy = [vertical-i for i in oy_old]
    ox = [horizonal-i for i in ox_old]

    
    if show_animation:  # pragma: no cover

        plt.plot(ox, oy, ".k")
        plt.plot(sx, sy, "og")
        plt.plot(gx, gy, "xb")
        plt.grid(True)
        plt.axis("equal")

    a_star = AStarPlanner(ox, oy, grid_size, robot_radius)
    rx, ry = a_star.planning(sx, sy, gx, gy)
    rx.reverse()
    ry.reverse()
    print(rx,ry)
    if show_animation:  # pragma: no cover
        plt.plot(rx, ry, "-r")
        plt.show()