def extract(self): self.pdf = PDFContainer(format=self.config.outPDFFormat, codec=self.config.fileCodec) if self.pdf.format == "filter": self.pdf.convertPDFFilter(self.INFilename) else: self.pdf.convertPDFAlternative(self.INFilename) self.tokenizer = nltk.data.load(self.config.sentencesSplitterModel) self.extractorNer = NERExtractor(self.config) self.extractorLoc = Location(self.config.minTanimoto) # extract title txt = self.pdf.getPages(0, 3) self.extractTitle(txt) # extract names txt = self.pdf.getPages(0, 10) sents = txt.split('\n') # tokenizer.tokenize(txt) self.extractName(sents) # extract locations with coords txt = self.pdf.getAllPages() sents = self.tokenizer.tokenize(txt) self.extractLocation(sents) # extract key words self.extractKeyWords(txt) # extract refs self.extractRefs(txt)
def test_locking_money(self): user = UserGetter.get_by_id('xUQeyplJshTzco4vyHHVoytT3FD2') request = DeliveryRequest( "id", "item", "description\ntext", origin=Location(name="Odenvägen 1, SE-194 63 Odenslunda, Sweden", latitude=59.51224, longitude=17.93536).to_dict(), destination=Location(name="Rolsmo 1, SE-360 24 Linneryd, Sweden", latitude=56.64989, longitude=15.16624).to_dict(), reward=100, weight=2, fragile=True, status=Status.AVAILABLE, money_lock=0, owner=user.to_minified().to_dict(), assistant=MinifiedUser("", "", "", "").to_dict(), image_path="") # ensure enough capital user.deposit(100) user_balance = user.balance self.assertEqual(user.balance, user_balance) user.lock_delivery_amount(request) self.assertEqual(user.balance, user_balance - 100)
def __init__(self, uid: str, item: str, description: str, origin: dict, destination: dict, reward: int, weight: int, fragile: bool, status: Status, money_lock: int, owner: dict, assistant: dict, image_path: str, **kwargs): """Initializes the delivery list""" self.uid: str = uid self.item: str = item self.description: str = description self.origin: Location = Location(**origin) self.destination: Location = Location(**destination) self.reward: int = reward self.weight: int = weight self.fragile: bool = fragile self.status: Status = status self.money_lock: int = money_lock self.owner: MinifiedUser = MinifiedUser(**owner) if assistant: self.assistant: MinifiedUser = MinifiedUser(**assistant) self.image_path: str = image_path self.weight_text = self._weight_props[weight].text self.weight_icon = self._weight_props[weight].icon self.status_text = self._to_text(status)
def test_get_by_id(self): delivery_request = DeliveryRequestGetter.get_by_id( u'DLpVc0QmbOHzfDo24Hpp') expected = DeliveryRequest( uid='DLpVc0QmbOHzfDo24Hpp', item='Xbox controller', description='I AM USED FOR TESTS. DO NOT REMOVE', origin=Location(name="Odenvägen 1, SE-194 63 Odenslunda, Sweden", latitude=59.51224, longitude=17.93536).to_dict(), destination=Location(name="Rolsmo 1, SE-360 24 Linneryd, Sweden", latitude=56.64989, longitude=15.16624).to_dict(), reward=123, weight=0, fragile=False, status=Status.AVAILABLE, money_lock=23, owner=MinifiedUser( mail="*****@*****.**", name= 'Travis CI Account - DON\'T DELETE OR YOULL BREAK THE ENTIRE CI WORKFLOW/UNITTESTS REEEEEEEEEEEEEEEEEEEEEEEEE', phonenumber='0', uid="xUQeyplJshTzco4vyHHVoytT3FD2").to_dict(), assistant=dict(), image_path='') self.assertDictEqual.__self__.maxDiff = None self.assertDictEqual(delivery_request.to_dict(), expected.to_dict())
def read_locations(source_file_name: str): """Loads locations from the specified CSV file and returns a collection of location instances. :param source_file_name: The CSV file to load. """ with open(source_file_name) as location_file: location_reader = csv.DictReader(location_file) output_locations = [] int_fieldnames = [ i for i in location_reader.fieldnames if re.match(r'\d+', i) ] for row in location_reader: # Build distance dictionary my_dists = dict.fromkeys(int_fieldnames, 0) for ifn in my_dists.keys(): try: my_dists[ifn] = float(row[ifn]) except ValueError: my_dists[ifn] = -1 # Add to output this_row_location = Location(int(row["ID"]), row["Name"], row["Address"], my_dists) output_locations.append(this_row_location) print( f'Read in {location_reader.line_num} lines from {source_file_name}, ' f'created {len(output_locations)} package objects.') Location.hub = output_locations[0] return output_locations
def create_customerorder_list(self, orderinfo_list): # random generation of customer(s) location for orderinfo in orderinfo_list: customer_loc_list = [ Location(self.worldsize * random(), self.worldsize * random()) for i in range(orderinfo["workorder_num"]) ] orderinfo.update({"customer_loc_list": customer_loc_list}) self.workorder_list = orderinfo_list # create processes that specify arrival statistics and manage processes initiated upon # workorder arrival self.arrivalstats_list = [ { "pid": x["pid"], "arrival_obj": ArrivalStochastics( env=self.env, lambd_rate=x["lambd_rate"], dist_type=x["dist_type"], customer_loc_list=x["customer_loc_list"], pid=x["pid"], producer_action=self.producer_list[self.pindexerGet( x["pid"])].producer_action) } #interrupt_process=self.producer_list[self.pindexerGet(x["pid"])].action_process)} for x in self.workorder_list ]
def get(self, store_id): store_bson = self.stores_collection.find_one({"_id": bson.ObjectId(store_id)}) store = StoreModel(str(store_bson['_id']), store_bson['store_type'], store_bson['location_quick_name'], Location(store_bson['location']['type'], store_bson['location']['coordinates'])) return store
def findInImage(self, image): if image is None or len(image.shape) is not 2: print('error with image') return if len(self.template.shape) is not 2: print('error with template') return image = cv2.normalize(image, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U) m = np.float32([(1, 0, self.template.shape[0] / 2), (0, 1, self.template.shape[1] / 2)]) d = cv2.matchTemplate(image, self.template, cv2.TM_CCORR_NORMED) d = cv2.warpAffine(d, m, dsize=(image.shape[1], image.shape[0])) m, M, m_1, M_1 = cv2.minMaxLoc(d) print(m, M, m_1, M_1) threshold = self.threshold if self.variableThreshold: threshold = M - threshold locations = [] _, d2 = cv2.threshold(d, threshold, 1, cv2.THRESH_BINARY) d2 = cv2.normalize(d2, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U) d = cv2.normalize(d, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U) contours, hier = cv2.findContours(d2, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) for cnt in contours: (x, y, w, h) = cv2.boundingRect(cnt) locations.append(Location((y, x), size=(h, w))) return locations, cv2.merge((d2, d, image))
def create_vehicle_list(self, vehicleinfo_list): self.vehicle_list = [ Vehicle(env=self.env, vid=x["vid"], initial_location=Location(x["x"], x["y"]), capacity=x["capacity"], velocity=x["velocity"]) for x in vehicleinfo_list ]
def test_from_dict(self): data = {'name': 'test', 'longitude': 0, 'latitude': 0} location = Location.from_dict(data) self.assertEqual(location.name, data['name']) self.assertEqual(location.latitude, data['latitude']) self.assertEqual(location.longitude, data['longitude'])
def __init__( self, number, description=None, lat=None, lng=None, level=None, time=None ): errors = [] try: self.number = int(number) except (TypeError, ValueError): errors.append({ "number": _("Drop point number is not a number.") }) else: if self.number < 1: errors.append({ "number": _("Drop point number is not positive.") }) with db.session.no_autoflush: if db.session.query(DropPoint).get(self.number): errors.append({ "number": _("That drop point already exists.") }) if time and not isinstance(time, datetime): errors.append({ "DropPoint": _("Creation time not a datetime object.") }) if isinstance(time, datetime) and time > datetime.today(): errors.append({ "DropPoint": _("Creation time in the future.") }) self.time = time if time else datetime.today() try: Location( self, time=self.time, description=description, lat=lat, lng=lng, level=level ) except ValueError as e: errors += e.args if errors: raise ValueError(*errors) db.session.add(self)
def get(self): stores = [] for store_bson in self.stores_collection.find(): #TODO refactor out into mapper store = Store(str(store_bson['_id']), store_bson['store_type'], store_bson['location_quick_name'], Location(store_bson['location']['type'], store_bson['location']['coordinates'])) stores.append(store) return stores
def put(self, store_id): data = request.get_json() store_bson = self.stores_collection.find_one({'_id': bson.ObjectId(store_id)}) store_bson['location_quick_name'] = data['location_quick_name'] store_bson['location']['coordinates'] = data['location']['coordinates'] self.stores_collection.update_one(store_bson) store = StoreModel(str(store_bson['_id']), store_bson['store_type'], store_bson['location_quick_name'], Location(store_bson['location']['type'], store_bson['location']['coordinates'])) return store
def test_query(self): user = UserGetter.get_by_id('xUQeyplJshTzco4vyHHVoytT3FD2') delivery_requests = DeliveryRequestGetter.query( u'owner.uid', u'==', u'xUQeyplJshTzco4vyHHVoytT3FD2') self.assertGreaterEqual(len(delivery_requests), 1, msg="Expected at least 1 match.") # Clean up DRs for dr in delivery_requests: if not dr.uid == 'DLpVc0QmbOHzfDo24Hpp': with Firestore.batch('packages') as batch: batch.delete(dr.uid) delivery_requests = DeliveryRequestGetter.query( u'owner.uid', u'==', u'xUQeyplJshTzco4vyHHVoytT3FD2') self.assertEqual(len(delivery_requests), 1, msg="Expected 1 match.") expected = DeliveryRequest( uid='DLpVc0QmbOHzfDo24Hpp', item='Xbox controller', description='I AM USED FOR TESTS. DO NOT REMOVE', origin=Location("Odenvägen 1, SE-194 63 Odenslunda, Sweden", latitude=59.51224, longitude=17.93536).to_dict(), destination=Location("Rolsmo 1, SE-360 24 Linneryd, Sweden", latitude=56.64989, longitude=15.16624).to_dict(), reward=123, weight=0, fragile=False, status=Status.AVAILABLE, money_lock=23, owner=user.to_minified().to_dict(), assistant=dict(), image_path='') self.assertDictEqual.__self__.maxDiff = None self.assertDictEqual(delivery_requests[0].to_dict(), expected.to_dict())
def test_addition_to_drop_point(self): dp_number = 3 first_description = "here" first_lat = -23.5 first_lng = 84 first_level = 3 second_description = "there" second_lat = 3.14 second_lng = -2.71828 second_level = 2 first_time = datetime.today() - timedelta(hours=2) second_time = datetime.today() - timedelta(hours=2 - 1) dp = DropPoint(dp_number, description=first_description, lat=first_lat, lng=first_lng, level=first_level, time=first_time) db.session.commit() self.assertEqual(len(dp.locations), 1, "Drop point does not have exactly one location.") self.assertEqual( db.session.query(Location).count(), 1, "Not exactly one location in the database.") second_location = Location(dp, description=second_description, lat=second_lat, lng=second_lng, level=second_level, time=second_time) db.session.commit() self.assertEqual(len(dp.locations), 2, "Drop point does not have exactly two locations.") self.assertEqual( db.session.query(Location).count(), 2, "Not exactly two locations in the database.") self.assertEqual( dp.get_current_location(), second_location, "Current drop point location is not second location.") self.assertEqual(dp.locations[1].time - dp.locations[0].time, second_time - first_time, "Wrong time difference between locations.")
def extract(self): if self.model.INFilename != "": from converter.pdfextractor import PDFContainer import nltk.data from model.title import extractTitle from converter.pdfextractor import PDFContainer from model.location import Location from config.dictionary import refWords, Coves, Seas, Bays, Islands from model.keywords import KeywordExtractor from model.reference import ExtracrReference from model.ner import NERExtractor from converter.dataContainer import DataPerson, DataLocation, DataKeyword, DataRef from converter.load import insertData #self.model.extract() self.view.setProgress(0) self.view.isVisibleProgress(True) self.model.pdf = PDFContainer(format=self.model.config.outPDFFormat, codec=self.model.config.fileCodec) if self.model.pdf.format == "filter": self.model.pdf.convertPDFFilter(self.model.INFilename) else: self.model.pdf.convertPDFAlternative(self.model.INFilename) self.view.setProgress(10) self.model.tokenizer = nltk.data.load(self.model.config.sentencesSplitterModel) self.view.setProgress(20) self.model.extractorNer = NERExtractor(self.model.config) self.view.setProgress(30) self.model.extractorLoc = Location(self.model.config.minTanimoto) self.view.setProgress(40) # extract title txt = self.model.pdf.getPages(0, 3) self.model.extractTitle(txt) self.view.setProgress(50) # extract names txt = self.model.pdf.getPages(0, 10) sents = txt.split('\n') # tokenizer.tokenize(txt) self.model.extractName(sents) self.view.setProgress(60) # extract locations with coords txt = self.model.pdf.getAllPages() sents = self.model.tokenizer.tokenize(txt) self.model.extractLocation(sents) self.view.setProgress(70) # extract key words self.model.extractKeyWords(txt) self.view.setProgress(80) # extract refs self.model.extractRefs(txt) self.view.setProgress(90) ############################# self.view.setProgress(100)
def create_delivery_request(): origin = Location(name="Odenvägen 1, SE-194 63 Odenslunda, Sweden", latitude=59.51224, longitude=17.93536).to_dict() destination = Location(name="Rolsmo 1, SE-360 24 Linneryd, Sweden", latitude=56.64989, longitude=15.16624).to_dict() return DeliveryRequest( "TEST", "item", "This a test, feel free to remove.", origin, destination, reward=10, weight=2, fragile=True, status=Status.AVAILABLE, money_lock=0, owner=MinifiedUser("", "", "", "xUQeyplJshTzco4vyHHVoytT3FD2").to_dict(), assistant=MinifiedUser("", "", "", "").to_dict(), image_path="")
def profile_optimization_strategies(routing_method: str, loc_count: int, step_size: int = 1): # Initial data load locations = read_locations("sample_locations.csv") if len(locations) < loc_count: need = loc_count - len(locations) locations.extend(Location.generate_fake_locations(need)) test_routing_table = None if routing_method.lower() == "hash": test_routing_table = RoutingTableHash( locations) # Build location+location distance lookup hash table elif routing_method.lower() == "array": test_routing_table = RoutingTableArray(locations) else: raise Exception("Unknown routing table type specified:", routing_method) # Testing lots of locations for count in range(5, loc_count, step_size): print(f"-- Optimization strategies with {count} locations --") this_pass_locations = locations[1:count] optimizer = RouteOptimizer(this_pass_locations, test_routing_table, locations[0]) if count <= optimizer.bfs_cutoff_slow: start = time.perf_counter() route = optimizer.get_optimized_bfs() elapsed = time.perf_counter() - start optimizer.print_route_evaluation( f"Brute force N={len(this_pass_locations) + 1}", route, elapsed) else: print( "Skipping brute force search due to location count being above", optimizer.bfs_cutoff_slow) start = time.perf_counter() route = optimizer.get_optimized_nn() elapsed = time.perf_counter() - start optimizer.print_route_evaluation( f"Nearest Neighbor N={len(this_pass_locations) + 1}", route, elapsed) start = time.perf_counter() route = optimizer.get_optimized_cpm() elapsed = time.perf_counter() - start optimizer.print_route_evaluation( f"AS Coproximity N={len(this_pass_locations) + 1}", route, elapsed) exit(0)
def fetch_locations(self): print('Fetching locations...', flush=True) self.driver.get(self.url) has_next = True iterator_count = 0 locations = [] while has_next: if iterator_count > 0: location_elements = self.driver.find_element_by_xpath( '//*[@id="LOCATION_LIST"]/ul') location_elements = location_elements.find_elements_by_tag_name( 'li') else: location_elements = self.driver.find_elements_by_class_name( 'geo_name') for e in location_elements: try: url = e.find_element_by_tag_name('a').get_attribute('href') url_text = e.find_element_by_tag_name('a').text language = get_language_by_url(self.driver.current_url) if language == 'es': name = url_text.split(I18N[language]['in'] + ' ')[-1] elif language == 'en': name = url_text.split(' Restaurants')[0] else: name = '' locations.append(Location(url, name)) except: logging.warning('Couldn\'t fetch location.') pass if iterator_count > 0: next_page = ".pgLinks>.sprite-pageNext" else: next_page = ".pagination>.next" try: has_next = self.driver.execute_script( 'return !document.querySelector("' + next_page + '").classList.contains("disabled")') if has_next: self.driver.execute_script('document.querySelector("' + next_page + '").click()') except: has_next = False iterator_count += 1 time.sleep(SECONDS_BETWEEN_REQUEST) return locations
def __init__(self, pid, location=None, production_time=0, worldsize=100.0, env=None, deliverymgr=None): self._pid = pid if location is None: x = random.random()*worldsize y = random.random()*worldsize self.location = Location(x,y) else: self.location = location # workorder list for this producer self._workorder_list = None self.deliverymgr = deliverymgr # time required to produce product; if stochastic, indicates mean value self.production_time = production_time self.env = env
def test_construction_exceptions(self): dp = DropPoint(1, lat=0, lng=0, level=1) with self.assertRaisesRegexp(ValueError, "drop point"): Location("foo") time_in_future = datetime.today() + timedelta(hours=1) with self.assertRaisesRegexp(ValueError, "future"): Location(dp, time=time_in_future, lat=0, lng=0, level=1) with self.assertRaisesRegexp(ValueError, "not a datetime"): Location(dp, time="foo", lat=0, lng=0, level=1) start_time = datetime.today() with self.assertRaisesRegexp(ValueError, "older than current"): Location(dp, time=start_time, lat=0, lng=0, level=1) db.session.commit() Location(dp, time=start_time - timedelta(hours=1)) invalid_lat = ("foo", -180, 91, None) invalid_lng = ("bar", -181, 251.5, None) invalid_level = ("quux", None) for lat in invalid_lat: with self.assertRaisesRegexp(ValueError, "lat"): Location(dp, lat=lat, lng=0, level=1) for lng in invalid_lng: with self.assertRaisesRegexp(ValueError, "lng"): Location(dp, lat=0, lng=lng, level=1) for level in invalid_level: with self.assertRaisesRegexp(ValueError, "level"): Location(dp, lat=0, lng=0, level=level) too_long = "a" * (Location.max_description + 1) with self.assertRaisesRegexp(ValueError, "too long"): Location(dp, lat=0, lng=0, level=1, description=too_long)
def add_location(self): ''' Create list_of_locations built of Location objects. Except Vallue Error when data isn't integer. ''' option = [4, 5] amount = 1 for row in Territory.list_of_territory: try: if self.name == row[4] and int(row[3]) in option: location = Location(row[4], row[5], amount) self.list_of_locations.append(location) amount += 1 except ValueError: continue
def create_producer_list(self, producerinfo_list=None, producer_num=1): if producerinfo_list: self.producer_list = [ Producer(pid=x["pid"], location=Location(x=x["x"], y=x["y"]), production_time=x["production_time"], env=self.env, deliverymgr=self.deliverymgr) for x in producerinfo_list ] else: for i in range(producer_num): producer = Producer(i + 1, worldsize=self.worldsize, env=self.env) self.producer_list.append(producer) self.pindexerGet = lambda x: dict( (p.pid, i) for i, p in enumerate(self.producer_list)).get(x)
def post(self): data = request.get_json() store_bson = self.stores_collection.find_one({ 'store_type': data['store_type'], 'location.coordinates': data['location']['coordinates'] }) if store_bson: return {'message': 'item exists'}, 400 new_store = { '_id': bson.ObjectId(), 'store_type': data['store_type'], 'location': data['location'], 'location_quick_name': data['location_quick_name'] } self.stores_collection.insert_one(new_store) store = Store(str(new_store['_id']), new_store['store_type'], new_store['location_quick_name'], Location(new_store['location']['type'], new_store['location']['coordinates'])) return store
def create_location(token, location_name, detailed_address, lot, lat, province, city, location_contactss): """ 创建转运网点 :param token: :param location_name: :param detailed_address: :param lot: :param lat: :param province: :param city: :return: """ user = hgetall(get_token(token)) if (not user) or int(user['role_type']) != cs.USER_ROLE_INFO[u"线路规划专员"]: return cs.AUTH_ERROR, None # 生成location_code location_code = generate_location_code() try: location_obj = Location(location_name, location_code, detailed_address, lot, lat, province, city, cs.LOCATION_STATUS_INFO[u"启用"]) # 创建网点联系人 if location_contactss: print location_contactss for item in location_contactss: location_contacts = LC(location_code, item['contacts_name'], item['contacts_telephone']) db.session.add(location_contacts) db.session.add(location_obj) db.session.commit() return cs.OK, location_code except: logger.error("create location err : {}".format( traceback.format_exc(), )) raise finally: db.session.rollback()
def test_to_dict(self): location1 = Location('test', 0.0, 0.0) expected = {'name': 'test', 'longitude': 0, 'latitude': 0} self.assertDictEqual(location1.to_dict(), expected)
import os, sys parentPath = os.path.abspath("..") if parentPath not in sys.path: sys.path.insert(0, parentPath) from model.location import Location def test(expect, output): if expect == output: print("OK") else: print("ERR", expect, " ", output) l = Location(0.5) test(l.isNorm('a'), True) test(l.isNorm(' '), True) test(l.isNorm('1'), True) test(l.isNorm(')'), False) test(l.normalize('It is a good day.'), 'It is a good day') test(l.getLoc(['It is a good day.']), 'it is a good day') test(l.isFuzzyEqual('It is a good day.', 'It is a good night.'), 1.0) test(l.isFuzzyEqual('It is a good day.', 'It is a bad night.'), 0.0) test(l.VangerFisher('Morskaya', 'Morskoy'), True) test(l.VangerFisher('Morskaya', 'Mars'), False)
import pandas as pd from knowledge import Knowledge from model.location import Location df = pd.read_excel('tmp/Danh sách cấp xã.xls') locations = [] # xã for i, row in df.iterrows(): if (i > 0) and (i % 1000 == 0): print(f"Extract {i} entities") location = Location(name=row["Tên"], level=row["Cấp"]) locations.append(location) # quận, huyện level1_entities = df["Quận Huyện"].unique() for name in level1_entities: location = Location(name=name, level="Quận Huyện") locations.append(location) # tỉnh, thành phố level0_entities = df["Tỉnh / Thành Phố"].unique() for name in level0_entities: location = Location(name=name, level="Tỉnh / Thành Phố") locations.append(location) savepath = "data/locations.jl" Knowledge.save(locations, savepath) print(f"{len(locations)} entities is saved in {savepath}")
def initialize(): collections = mongo.db.collection_names() if "locations" not in collections: col: wrappers.Collection = mongo.db.locations col.ensure_index([("geo", GEOSPHERE)]) locations = json.load(open("./db/locations.json")) col: wrappers.Collection = mongo.db.locations for location in locations: loc = Location(**location) col.insert_one(loc.__dict__) subways = json.load(open("./db/subway.json")) for subway in subways: if not subway: continue loc = Location(**subway) col.insert_one(loc.__dict__) if "rooms" not in collections: col: wrappers.Collection = mongo.db.rooms col.create_index("seq", unique=True) col.ensure_index([("geo", GEOSPHERE)]) col2: wrappers.Collection = mongo.db.room_detail col2.create_index("seq", unique=True) col2.ensure_index([("geo", GEOSPHERE)]) db = json.load(open("./db/db.json")) for key in db: for key2 in db[key]: for info in db[key][key2]: try: location = info["locs"][0]["loc"] rooms_info = info["rooms"] col: wrappers.Collection = mongo.db.rooms items = [] for rinfo in rooms_info: rinfo["region_code"] = location["code"] room = RoomInfo(**rinfo) items += [room.__dict__] if items: col.insert_many(items) except: pass if not items: continue col: wrappers.Collection = mongo.db.room_detail option = [ "침대", "책상", "인터넷", "전자도어락", "세탁기", "에어컨", "옷장", "신발장", "TV", "냉장고", "가스레인지" ] detail = [] for item in items: try: item["building_floor"] = random.randint(1, 5) item["floor"] = random.randint( 1, item["building_floor"]) if item["room_type"] < 3: item["room_count"] = item["room_type"] + 1 item["bath_count"] = 1 item["reg_date"] = "2020.10.04 22:30" item["building_date"] = "2020.10.04 22:00" item["maintain_cost"] = random.randint(5, 10) cnt = random.randint(0, len(option)) item["options"] = random.sample(option, cnt) del item["_id"] col.insert_one(item) detail += [item] except: continue #if detail: # col.insert_many(detail) if "security_light" not in collections: col: wrappers.Collection = mongo.db.security_light col.ensure_index([("geo", GEOSPHERE)]) db = json.load(open("./db/security_light.json")) items = [] for item in db["records"]: if "latitude" in item and "longtitude" in item: cctv = SecurityLight(**item) items += [cctv.__dict__] if items: col.insert_many(items) if "cctv" not in collections: col: wrappers.Collection = mongo.db.cctv col.ensure_index([("geo", GEOSPHERE)]) db = json.load(open("./db/cctv.json")) items = [] for item in db["records"]: if "latitude" in item and "longtitude" in item: cctv = CCTV(**item) items += [cctv.__dict__] if items: col.insert_many(items)
def test_to_string(self): location = Location('test', 1.0, -5.0) expected = 'Location: (1.0, -5.0) test' self.assertEqual(str(location), expected)