def add(): my_document = search.Document( # Setting the doc_id is optional. If omitted, the search service will create an identifier. doc_id = 'PA6-5000', fields=[ search.GeoField(name='job_location', value=search.GeoPoint(38.7234211,-9.1873166 39.093031,-9.2629842)), search.GeoField(name='job_location', value=search.GeoPoint(39.093031,-9.2629842)) ]) try: index = search.Index(name="job_index") if index.get(doc_id): index.put(document) except search.Error: logging.exception('Put failed')
def put(self): self.response.status = 422 if (self.request.body): inputBody = json.loads(self.request.body) coords = inputBody['location'].split(",", 1) lat = float(coords[0]) lon = float(coords[1]) status_document = search.Document( doc_id=inputBody['instance_id'], fields=[ search.GeoField(name='location', value=search.GeoPoint(lat, lon)) ]) try: index = search.Index(name="locationIndex") index.put(status_document) self.response.status = 201 except search.Error: logging.exception('Put failed')
def update_search_index(self, id, playground): playground_doc = search.Document( # As search documents cannot be updated and can only be replaced, for every create or update, we will replace the search document completely. # Hence using the datastore id for the search id as well. Retrieving the entity also becomes easier this way. doc_id = str(id), # Be vey cautious on what fields are indexed, as that impacts the cost and search performance. # Store only the fields that are required to be searched upon. For retrievel, we will retrieve the id from search and use that to query the entity from the datastore. fields=[ search.TextField(name='name', value=playground.name), search.TextField(name='caption', value=playground.caption), search.TextField(name='sport', value=playground.sport), search.TextField(name='locality', value=playground.address.locality), search.TextField(name='locality_id', value=playground.address.locality_id), search.TextField(name='city', value=playground.address.city), search.NumberField(name='status', value=playground.status), search.DateField(name='updated_on', value=datetime.date(playground.updated_on)), search.GeoField(name='latlong', value=search.GeoPoint(playground.address.latlong.lat, playground.address.latlong.lon)) ]) try: index = search.Index(name=PLAYGROUND) index.put(playground_doc) logger.debug("Successfully stored playground in search index %s " % id) except search.Error: logger.exception('Storing playground %s in search index failed' % id)
def update_public_index(obj): try: location = None data = '' if isinstance(obj, User) and obj.name: data = obj.name location = obj.last_location elif isinstance(obj, Message) and obj.text: data = obj.text elif isinstance(obj, Group) and obj.name: data = obj.name else: return index = search.Index(name=PUBLIC_SEARCH_INDEX) fields = [search.TextField(name='text', value=data)] if location and location.latlon: latlon = location.latlon fields.append( search.GeoField(name='location', value=search.GeoPoint(latlon.lat, latlon.lon))) index.put(search.Document(doc_id=obj.key.urlsafe(), fields=fields)) except: logging.warn('Adding object %s to search index failed.' % (str(obj))) logging.warn(sys.exc_info()[0])
def get(self): photo_query = Photo.query() photos = photo_query.fetch() photo_list = [] for photo in photos: url = photo.url lat = photo.lat lon = photo.lon stream_id = photo.stream_id geopoint = search.GeoPoint(lat, lon) search_index = search.Document(doc_id=url, fields=[ search.TextField(name='url', value=url), search.TextField( name='stream_id', value=str(stream_id)), search.GeoField(name='geopoint', value=geopoint) ]) result = search.Index(name='photo').put(search_index) photo_list.append({ 'stream_id': stream_id, 'url': url, 'lat': lat, 'lon': lon }) self.response.out.write(json.dumps(photo_list))
def person_doc(self,counter, body): request_body = json.loads(self.request.body) lat_val = request_body['lat'] latitude = float(str(lat_val)) lon_val = request_body['lon'] longitude = float(str(lon_val)) car_breakdown_area = request_body['car_breakdown_area'] safe_parking_area = request_body['safe_parking_area'] water_level_area = request_body['water_level_area'] # if len(geopoints) == 0: # poi = latitude,longitude; # geopoints.extend(poi) # else: # for eachpoi in geopoints: # distance = geocalc(latitude,longitude,eachpoi[0],eachpoi[1]) # if distance > 1: # geopoints.extend(poi) my_document = search.Document( # Setting the doc_id is optional. If omitted, the search service will create an identifier. doc_id = str(counter), fields=[ search.TextField(name='supplier', value='person'), search.NumberField(name='wiper_speed', value=-100.0), search.NumberField(name='speed', value=-100.0), search.TextField(name='car_break_down', value=str(car_breakdown_area)), search.TextField(name='car_parked', value=str(safe_parking_area)), search.NumberField(name='water_level_area', value=int(water_level_area)), search.GeoField(name='wlocation', value=search.GeoPoint(latitude,longitude)) ]) documents_list.extend([my_document]) self.response.write(len(documents_list))
def _build_event_date(i, event, ed, venue, start, end, is_hours=False): """ Helper to create a specific date - yeilds one search doc """ category = ed.category if is_hours: category = CATEGORY.HOURS fields = [] doc_id = '%s-%s' % (event.slug, i) fields.append(search.TextField(name='name', value=event.name)) fields.append(search.AtomField(name='slug', value=event.slug)) fields.append(search.AtomField(name='event_keystr', value=str(event.key.urlsafe()))) # Populate bits specific to the event date fields.append(search.NumberField(name='start', value=unix_time(timezone('UTC').localize(start)))) fields.append(search.NumberField(name='end', value=unix_time(timezone('UTC').localize(end)))) fields.append(search.AtomField(name='category', value=category)) # Attach Venue/Geo Information fields.append(search.AtomField(name='venue_slug', value=ed.venue_slug)) venue_geo = None if venue.geo: geoPt = venue.geo if isinstance(geoPt, list): geoPt = geoPt[0] venue_geo = search.GeoPoint(geoPt.lat, geoPt.lon) fields.append(search.GeoField(name='venue_geo', value=venue_geo)) return search.Document(doc_id=doc_id, fields=fields)
def index_artifact(index_, id_, fields): f = [] for i in xrange(0, len(fields), 3): if fields[i] == ATOMFIELD: f.append(search.AtomField(name=fields[i + 1], value=fields[i + 2])) elif fields[i] == TEXTFIELD: f.append(search.TextField(name=fields[i + 1], value=fields[i + 2])) elif fields[i] == HTMLFIELD: f.append(search.HtmlField(name=fields[i + 1], value=fields[i + 2])) elif fields[i] == NUMBERFIELD: f.append( search.NumberField(name=fields[i + 1], value=fields[i + 2])) elif fields[i] == DATEFIELD: f.append(search.DateField(name=fields[i + 1], value=fields[i + 2])) elif fields[i] == GEOPOINTFIELD: f.append(search.GeoField(name=fields[i + 1], value=fields[i + 2])) doc = search.Document(doc_id=id_, fields=f) retry_count = 0 while True: try: index_.put(doc) break except search.Error as e: if retry_count < 3: log.warning( 'Error put doc into index, could be out of space. Creating new index' ) index_ = search.Index(index_.name[:-4] + str(int(index_.name[-4:])).zfill(4), namespace=index_.namespace) retry_count += 1 else: raise e
def createPositionDocument(doc_id, gaepos): geoPt = search.GeoPoint(gaepos.lat, gaepos.lon) d = search.Document(fields=[ search.TextField(name="id", value=doc_id), search.GeoField(name="position", value=geoPt) ]) return d
def get_field(self,key,value): try: if(self.mapping[key]): if(self.mapping[key]=='TEXTFIELD'): return search.TextField(name=key,value=value); elif(self.mapping[key]=='ATOMFIELD'): return search.AtomField(name=key,value=value); elif(self.mapping[key]=='NUMBERFIELD'): return search.NumberField(name=key,value=value); elif(self.mapping[key]=='HTMLFIELD'): return search.HtmlField(name=key,value=value); elif(self.mapping[key]=='DATEFIELD'): timestamp=value; timestamp = timestamp if timestamp>0 else -timestamp; value=datetime.fromtimestamp(timestamp/1000.0); return search.DateField(name=key,value=value); elif(self.mapping[key]=='GEOFIELD'): return search.GeoField(name=key,value=value); else: return None; else: return None; except KeyError,keyError: print(keyError); return None;
def job_doc(self,counter): latitude = self.request.get('lat') longitude = self.request.get('lon') job_id = self.request.get('job_id') address = self.request.get('address') description = self.request.get('description') skill = self.request.get('skill') job_date = self.request.get('job_date') job_document = search.Document( # Setting the doc_id is optional. If omitted, the search service will create an identifier. doc_id = str(counter), fields=[ search.TextField(name='job_id', value=str(job_id)), search.TextField(name='address', value=str(address)), search.TextField(name='description', value=str(description)), search.TextField(name='skill', value=str(skill)), search.DateField(name='job_date', value=datetime.datetime.fromtimestamp(float(long(str(job_date))))), search.GeoField(name='job_location', value=search.GeoPoint(float(latitude),float(longitude))) ]) documents_list.extend([job_document]) job = AJob(id = job_id, job_id = job_id, assigned = False, address = address, description = description, skill = skill, job_date = datetime.datetime.fromtimestamp(float(long(str(job_date)))), date_posted = datetime.datetime.today()) job.put() self.response.write(len(documents_list))
def update_search_index(self, id, event): event_doc = search.Document( # As search documents cannot be updated and can only be replaced, for every create or update, we will replace the search document completely. # Hence using the datastore id for the search id as well. Retrieving the entity also becomes easier this way. doc_id=str(id), # Store only the fields that are required to be searched/sorted upon. For retrievel, we will retrieve the id from search and use that to query the entity from the datastore. fields=[ search.TextField(name='name', value=event.name), search.TextField(name='caption', value=event.caption), search.TextField(name='sport', value=event.sport), search.TextField(name='locality', value=event.address.locality), search.TextField(name='locality_id', value=event.address.locality_id), search.TextField(name='city', value=event.address.city), search.DateField(name='start_datetime', value=event.start_datetime), search.DateField(name='end_datetime', value=event.end_datetime), search.NumberField(name='status', value=event.status), search.DateField(name='updated_on', value=datetime.date(event.updated_on)), search.GeoField(name='latlong', value=search.GeoPoint( event.address.latlong.lat, event.address.latlong.lon)) ]) try: index = search.Index(name=EVENT) index.put(event_doc) except search.Error: logger.exception('Storing event %s in search index failed' % id)
def car_doc(self,counter): #body = self.request.body #json_body = json.loads(body) latitude = self.request.get('lat') longitude = self.request.get('lon') car_speed = self.request.get('car_speed') wiper_speed = self.request.get('wiper_speed') car_parked = self.request.get('car_parked') water_level_area = self.request.get('water_level_area') car_breakdown_area = self.request.get('car_breakdown_area') #self.response.write(float(car_speed)*2) #self.response.write(float(water_level_area)*2) my_document = search.Document( # Setting the doc_id is optional. If omitted, the search service will create an identifier. doc_id = str(counter), fields=[ search.TextField(name='supplier', value='car'), search.NumberField(name='speed', value=float(car_speed)), search.TextField(name='car_break_down', value=str(car_breakdown_area)), search.NumberField(name='wiper_speed', value=float(wiper_speed)), search.TextField(name='car_parked', value=str(car_parked)), search.NumberField(name='water_level_area', value=int(water_level_area)), search.GeoField(name='wlocation', value=search.GeoPoint(float(latitude),float(longitude))) ]) documents_list.extend([my_document]) s = '%s, %s' % (len(documents_list), counter) self.response.write(s)
def get(self, team_key): team = Team.get_by_id(team_key) lat_lon = team.get_lat_lon() if lat_lon: fields = [ search.GeoField(name='location', value=search.GeoPoint(lat_lon[0], lat_lon[1])) ] search.Index(name="teamLocation").put(search.Document(doc_id=team.key.id(), fields=fields))
def get(self, event_key): event = Event.get_by_id(event_key) lat_lon = event.get_lat_lon() if lat_lon: fields = [ search.NumberField(name='year', value=event.year), search.GeoField(name='location', value=search.GeoPoint(lat_lon[0], lat_lon[1])) ] search.Index(name="eventLocation").put(search.Document(doc_id=event.key.id(), fields=fields))
def _geoindex_doc(site_key): site = site_db.Site.get(site_key) search_doc = search.Document(doc_id=str(site.key()), fields=[ search.GeoField(name='loc', value=search.GeoPoint( site.latitude, site.longitude)) ]) search.Index(name='GEOSEARCH_INDEX').put(search_doc)
def index(self): document = search.Document(doc_id=str(self.key.id()), fields=[ search.GeoField(name='location', value=search.GeoPoint( self.location.lat, self.location.lon)) ]) index = search.Index(name='caps') index.put(document)
def update_event_location_index(cls, event): if event.normalized_location and event.normalized_location.lat_lng: fields = [ search.NumberField(name='year', value=event.year), search.GeoField(name='location', value=search.GeoPoint( event.normalized_location.lat_lng.lat, event.normalized_location.lat_lng.lon)) ] search.Index(name="eventLocation").put( search.Document(doc_id=event.key.id(), fields=fields))
def to_search_document(self): doc = search.Document( doc_id=self.key.urlsafe(), fields=[ search.TextField(name='tags', value=" ".join(self.tags)), search.TextField(name='title', value=self.title), search.HtmlField(name='description', value=self.description), search.GeoField(name='location', value=search.GeoPoint(self.location.lat, self.location.lon)), ], ) return doc
def __init__(self, *args, **kwargs): super(Map, self).__init__(fields=[ search.GeoField(name="location", value=search.GeoPoint(float(kwargs['latitude']), float(kwargs['longitude']))), search.TextField(name="building_name", value=kwargs['building_name']), search.TextField(name="level_name", value=kwargs['level_name']), search.NumberField(name="floor_number", value=int(kwargs['floor_number'])), search.NumberField(name="scale", value=float(kwargs['scale'])), search.TextField(name="path", value=kwargs['path']) ])
def __UpdateDocument(self): fields = [search.TextField(name='name', value=self.name)] if self.wca_person: fields.append( search.TextField(name='wca_id', value=self.wca_person.id())) if self.city: fields.append(search.TextField(name='city', value=self.city)) if self.latitude and self.longitude: fields.append( search.GeoField(name='location', value=search.GeoPoint( self.latitude / 1000000., self.longitude / 1000000.))) document = search.Document(doc_id=str(self.key.id()), fields=fields) User.GetSearchIndex().put(document)
def postUpdateHook(cls, teams, updated_attr_list, is_new_list): """ To run after models have been updated """ for (team, updated_attrs) in zip(teams, updated_attr_list): lat_lon = team.get_lat_lon() # Add team to lat/lon info to search index if lat_lon: fields = [ search.GeoField(name='location', value=search.GeoPoint( lat_lon[0], lat_lon[1])) ] search.Index(name="teamLocation").put( search.Document(doc_id=team.key.id(), fields=fields))
def loadStoreLocationData(): # create documents from store location info # currently logs but otherwise swallows search errors. slocs = stores.stores for s in slocs: logging.info("s: %s", s) geopoint = search.GeoPoint(s[3][0], s[3][1]) fields = [search.TextField(name=docs.Store.STORE_NAME, value=s[1]), search.TextField(name=docs.Store.STORE_ADDRESS, value=s[2]), search.GeoField(name=docs.Store.STORE_LOCATION, value=geopoint) ] d = search.Document(doc_id=s[0], fields=fields) try: add_result = search.Index(config.STORE_INDEX_NAME).put(d) except search.Error: logging.exception("Error adding document:")
def create_document(): document = search.Document( # Setting the doc_id is optional. If omitted, the search service will # create an identifier. doc_id='PA6-5000', fields=[ search.TextField(name='customer', value='Joe Jackson'), search.HtmlField(name='comment', value='this is <em>marked up</em> text'), search.NumberField(name='number_of_visits', value=7), search.DateField(name='last_visit', value=datetime.now()), search.DateField(name='birthday', value=datetime(year=1960, month=6, day=19)), search.GeoField(name='home_location', value=search.GeoPoint(37.619, -122.37)) ]) return document
def PutAndCache(site): site.compute_similarity_matching_fields() site.put() # geospatial index ## NOTE: THIS DOES NOT WORK ON DEV_APPENGINE # (as per https://code.google.com/p/googleappengine/issues/detail?id=7769 ) search_doc = search.Document(doc_id=str(site.key()), fields=[ search.GeoField(name='loc', value=search.GeoPoint( site.latitude, site.longitude)) ]) search.Index(name='GEOSEARCH_INDEX').put(search_doc) return memcache.set(cache_prefix + str(site.key().id()), (site, SiteToDict(site)), time=cache_time)
def test_to_seach_fields(self): # Test list field generation. entity = TestEntity(test_repeatedprop=['item_1', 'item_2']) search_fields = entity._to_search_fields('test_repeatedprop', ['item_1', 'item_2']) expected_fields = [ search.TextField(name='test_repeatedprop', value='item_1'), search.AtomField(name='test_repeatedprop', value='item_1'), search.TextField(name='test_repeatedprop', value='item_2'), search.AtomField(name='test_repeatedprop', value='item_2') ] self.assertEqual(expected_fields, search_fields) # Test ndb.Key field generation. test_key = ndb.Key('Test', 1) entity = TestEntity(test_keyproperty=test_key) search_field = entity._to_search_fields('test_keyproperty', test_key) expected_field = [ search.AtomField(name='test_keyproperty', value=test_key.urlsafe()) ] self.assertEqual(expected_field, search_field) # Test datetime field generation. date = datetime.datetime(year=2017, month=1, day=5) entity = TestEntity(test_datetime=date) search_field = entity._to_search_fields('test_datetime', date) expected_field = [search.DateField(name='test_datetime', value=date)] self.assertEqual(expected_field, search_field) # Test boolean field generation. entity = TestEntity(test_bool=True) search_field = entity._to_search_fields('test_bool', True) expected_field = [search.AtomField(name='test_bool', value='True')] self.assertEqual(expected_field, search_field) # Test geopt field generation. geopt = ndb.GeoPt('52.37, 4.88') entity = TestEntity(test_geopt=geopt) search_field = entity._to_search_fields('test_geopt', geopt) expected_field = [ search.GeoField(name='test_geopt', value=search.GeoPoint(52.37, 4.88)) ] self.assertEqual(expected_field, search_field)
def update_team_location_index(cls, team): if team.normalized_location and team.normalized_location.lat_lng: partial_fields = [ search.GeoField(name='location', value=search.GeoPoint( team.normalized_location.lat_lng.lat, team.normalized_location.lat_lng.lon)) ] # Teams by year for year in TeamParticipationQuery(team.key.id()).fetch(): fields = partial_fields + [ search.NumberField(name='year', value=year) ] search.Index(name=cls.TEAM_LOCATION_INDEX).put( search.Document(doc_id='{}_{}'.format(team.key.id(), year), fields=fields)) # Any year search.Index(name=cls.TEAM_LOCATION_INDEX).put( search.Document(doc_id=team.key.id(), fields=partial_fields))
def addTuple(request): record = parseField(request) if record: document = search.Document(fields=[ search.TextField(name='ename', value=record[0]), search.TextField(name='content', value=record[1]), search.GeoField(name='loc', value=record[3]), search.DateField(name='date', value=record[2]) ], language='zh') try: index.put(document) return True except search.Error: logging.exception('Put failed') return False else: return False
def create(cls, distrito, national_id, name, loc, address, picture_url): """ Creates a new casilla in the datastore and a document for the Search API and includes it on the CasillasIndex. Args: - distrito: String holding the name of the national id of the Distrito this Casilla belongs to - national_id String holding the national_id for the casilla - name: String holding the name of the casilla - loc: String holding Lat, Lon of the casilla - address: String holding the address of the location - picture_url: String holding the url of the picture for the location Returns: Key of new entity """ try: d = Distrito.get_from_datastore(distrito) distrito_key = d.key geo_pt = ndb.GeoPt(str(loc)) o = Casilla(loc=geo_pt, national_id=national_id, distrito=distrito_key, name=name, address=address, picture_url=picture_url) key = o.put() # Generate document for search API l_doc = search.Document(fields=[ search.TextField(name='key', value=str(key)), search.GeoField(name='loc', value=search.GeoPoint(geo_pt.lat, geo_pt.lon)) ]) index = search.Index(name="CasillasIndex") index.put(l_doc) except Exception: logging.exception("[casilla] - Error in create Casilla", exc_info=True) raise CasillaCreationError( 'Error creating the casilla in platform') else: return key
def create_location(self, center_lat, center_lng, pl_key): """ Creates a search document. :param center_lat: float representing the center latitude :param center_lng: float representing the center longitude :param pl_key: key property of the private location :return: Boolean of whether the location was inserted """ geo_point = search.GeoPoint(center_lat, center_lng) private_location_fk_str = self\ .convert_pl_key_to_string(pl_key) fields = list() fields.append(search.GeoField(name='center', value=geo_point)) fields.append(search.AtomField(name='private_location_id', value=private_location_fk_str)) d = search.Document(fields=fields) index = search.Index(name=self.index_name) add_result = index.put(d) return add_result