def insert_many_locations(locations_list): """ Executes multiple inserts at once, to reduce amount of time the database is locked :param locations_list: :return: """ if len(locations_list) >= number_entries_before_action: # with databaseSetup.database.atomic(): WOULD INCREASE SPEED, IMPORT PROBLEMS Locations.insert_many(locations_list).execute() print("Inserted " + str(len(locations_list)) + " Records") del locations_list[:]
try: print("OpenCage") # Try OpenCage first time.sleep(2) address = opencage_geolocator.reverse(point, exactly_one=True) provider = "OpenCage" with open("OpenCage.json", "a") as output: json.dump(address.raw, output, sort_keys=True, indent=4) output.write(",\n") response = opencage_parser(address.raw, longitude, latitude) response[0].execute() except GeocoderQuotaExceeded or GeocoderTimedOut or GeocoderServiceError: print("Could not access geocoders for location: " + point_string) break # Skips if cannot find locat if len(location_bulk_insert_queries) != 0: Locations.insert_many(location_bulk_insert_queries).execute() print("Inserted " + str(len(location_bulk_insert_queries)) + " Records") else: with open(os.path.join(rootdir, "LocationHistory.json"), 'r') as source: data = json.load(source) locations = data.get('locations') for key, location in enumerate(locations): time_stamp = location.get('timestampMs') converted_time_stamp = datetime.fromtimestamp(float(time_stamp) / 1000.0) longitude = location.get('longitudeE7') / 10000000.0 latitude = location.get('latitudeE7') / 10000000.0 point_string = str(latitude) + ", " + str(longitude) point = Point(latitude=latitude, longitude=longitude) if (key % number_entries_before_action) == 0: current_position_saver(key) if location_from_dict(longitude_query=longitude, latitude_query=latitude, type_query="Google"):