def questions_list(self, request): """Gets a list of questions within the radius specified by the coordinates. Radius is in km. """ query_lat = float(request.lat) if request.lat else 0 query_lon = float(request.lon) if request.lon else 0 query_radius = float(request.radius_km * 1000.0) if request.radius_km else 0 if not is_user_authenticated(): raise endpoints.UnauthorizedException if query_lat == 0 or query_lon == 0 or query_radius == 0: raise endpoints.BadRequestException("One of the required parameters (lat, lon, radius) is undefined") if len(search.get_indexes()) == 0: logging.error("{}: no indices exists. must investigate why".format(TAG)) raise endpoints.InternalServerErrorException # FIXME: this should only return 100 results at a time query_string = "distance(location, geopoint(%f, %f)) <= %f" % (query_lat, query_lon, query_radius) index = search.Index(name=ALL_QUESTIONS_INDEX) search_results = index.search(query_string) search_questions = [] for search_item in search_results: retrieved_question = QuestionModel.get_by_id(long(search_item.doc_id)) # condition accounts for the case when the search index is stale since it contains references to questions # no longer in the database if retrieved_question is None: logging.info("{}: {} index has extra documents".format(TAG, ALL_QUESTIONS_INDEX)) logging.debug("Following search document was not found in Question model") logging.debug(retrieved_question) prune_question_search_index() else: search_questions.append(retrieved_question) returned_questions = [self.create_api_response_question(question) for question in search_questions] return QuestionMessageCollection(questions=returned_questions)
def get_indexes(namespace=None, index_name=None, create_new=True): if not namespace: return None if index_name is None: results = search.get_indexes(namespace=namespace).results if len(results) == 0 and create_new: return [search.Index('0001', namespace=namespace)] else: return results else: results = search.get_indexes(index_name_prefix=index_name, namespace=namespace).results if len(results) == 0 and create_new: return [search.Index(index_name + '_0001', namespace=namespace)] else: return results
def get_index_info(): indices = list() for index in search.get_indexes(fetch_schema=True): logging.info("index %s", index.name) logging.info("schema: %s", index.schema) indices.append({'name': index.name, 'schema': index.schema}) doc_index = search.Index(name=index.name) docs = doc_index.get_range() for doc in docs: logging.info('%s:%s', doc['scene_location'], doc.doc_id) return indices
def matchManifest(manifest_key): for index in search.get_indexes(fetch_schema=True, namespace="text"): #ind = search.Index(name=index.name, namespace="text") print "INDEX"+index.name data = models.Manifest.query(models.Manifest.key==manifest_key).get() query_string = data.voyage results = index.search(query_string) if results: data.voyage_link=index.name data.put() return index
def search(self, query_string): for index in search.get_indexes(fetch_schema=True): index = search.Index(name=index.name) # query_string = "text: 018E" try: results = index.search(query_string) # Iterate over the documents in the results for scored_document in results: return results, index.name # handle results except search.Error: logging.exception("Search failed")
def search(self, query_string): for index in search.get_indexes(fetch_schema=True): index = search.Index(name=index.name) # query_string = "text: 018E" try: results = index.search(query_string) # Iterate over the documents in the results for scored_document in results: return results, index.name # handle results except search.Error: logging.exception('Search failed')
def get(self): start = self.request.get_range('start', min_value=0, default=0) namespace = self.request.get('namespace', default_value=None) resp = search.get_indexes(offset=start, limit=self._MAX_RESULTS_PER_PAGE + 1, namespace=namespace or '') has_more = len(resp.results) > self._MAX_RESULTS_PER_PAGE indexes = resp.results[:self._MAX_RESULTS_PER_PAGE] values = { 'namespace': namespace, 'has_namespace': namespace is not None, 'indexes': indexes} self._handle_paging(start, has_more, values) self.response.write(self.render('search.html', values))
def get(self): start = self.request.get_range('start', min_value=0, default=0) namespace = self.request.get('namespace', default_value=None) resp = search.get_indexes(offset=start, limit=self._MAX_RESULTS_PER_PAGE + 1, namespace=namespace or '') has_more = len(resp.results) > self._MAX_RESULTS_PER_PAGE indexes = resp.results[:self._MAX_RESULTS_PER_PAGE] values = { 'namespace': namespace or '', 'has_namespace': namespace is not None, 'indexes': indexes} self._handle_paging(start, has_more, values) self.response.write(self.render('search.html', values))
def get(self): """List all indexes for all namespaces.""" # Doesn't show indexes created by indexer in other namespaces. # For that, you have to pass the namespace parameter. ns = map(self.request.get,['namespace'])[0] response = search.get_indexes(namespace=ns, fetch_schema=True) logging.info('RESPONSE %s' % response.results) if response.results: body = '' for index in response.results: body += 'Name: %s<br>' % index.name body += 'Namespace: %s<br>' % index.namespace body += 'Storage usage: %s<br>' % index.storage_usage body += 'Storage limit: %s<br>' % index.storage_limit body += 'Schema: %s<p>' % index.schema else: body = 'No indexes found.' self.response.out.write(body)
def get_questions(): """returns the questions in JSON format""" lat = request.args.get('lat', 0, type=float) lon = request.args.get('lon', 0, type=float) radius = request.args.get('radius', 0, type=float) if lat == 0 and lon == 0 and radius == 0: questions = Question.all() else: radius_in_metres = float(radius) * 1000.0 q = "distance(location, geopoint(%f, %f)) <= %f" % (float(lat), float(lon), float(radius_in_metres)) # build the index if not already done if search.get_indexes().__len__() == 0: rebuild_question_search_index() index = search.Index(name="myQuestions") results = index.search(q) # TODO: replace this with a proper .query questions = [Question.get_by_id(long(r.doc_id)) for r in results] questions = filter(None, questions) # filter deleted questions if questions: questions = sorted(questions, key=lambda question: question.timestamp) dataset = [] for question in questions: # This conversion can be performed using a custom JsonEncoder implementation, # but I didn't have much success. Some good links below - # http://stackoverflow.com/questions/1531501/json-serialization-of-google-app-engine-models # https://gist.github.com/erichiggins/8969259 # https://gist.github.com/bengrunfeld/062d0d8360667c47bc5b details = {'key': question.key.id(), 'added_by': question.added_by.nickname(), 'content': question.content, 'timestamp': question.timestamp.strftime('%m-%d-%y'), 'location': {'lat': question.location.lat, 'lon': question.location.lon} } dataset.append(details) return jsonify(result=dataset)
def list_questions(): """Lists all questions posted on the site - available to anonymous users""" form = QuestionForm() search_form = QuestionSearchForm() user = users.get_current_user() login_url = users.create_login_url(url_for('home')) query_string = request.query_string latitude = request.args.get('lat') longitude = request.args.get('lon') radius = request.args.get('r') # If searching w/ params (GET) if request.method == 'GET' and all(v is not None for v in (latitude, longitude, radius)): radius_in_metres = float(radius) * 1000.0 q = "distance(location, geopoint(%f, %f)) <= %f" % (float(latitude), float(longitude), float(radius_in_metres)) # build the index if not already done if search.get_indexes().__len__() == 0: rebuild_question_search_index() index = search.Index(name="myQuestions") results = index.search(q) # TODO: replace this with a proper .query questions = [Question.get_by_id(long(r.doc_id)) for r in results] questions = filter(None, questions) # filter deleted questions if questions: questions = sorted(questions, key=lambda question: question.timestamp) search_form.latitude.data = float(latitude) search_form.longitude.data = float(longitude) search_form.distance_in_km.data = radius_in_metres/1000.0 else: questions = Question.all() channel_token = None if (user): channel_token = safe_channel_create(user_channel_id(user)) return render_template('list_questions.html', questions=questions, form=form, user=user, login_url=login_url, search_form=search_form, channel_token=channel_token)
def logIndexes(): for index in search.get_indexes(fetch_schema=True): logging.info("index %s", index.name) logging.info("schema: %s", index.schema)
def create_document(cls, params): """ Creates doc for specific mechanic """ # ALEXK split the location into pair of coordinates and create the geopoint # TODO make more proper handling of the undefined location (maybe block in GUI or not add to the document) businessLatitude = float(43.6519186) # this is a temporry solution since such case must be blocked in GUI businessLongitude = float(-79.3824024) if "geo_lat" and "geo_long" in params: # Artiom K. check if the key exists if params["geo_lat"] == "" or params["geo_lat"] == "undefined": logging.info("location as not defined as search criteria, setting to Toronto") businessLatitude = float( 43.6519186 ) # this is a temporry solution since such case must be blocked in GUI businessLongitude = float(-79.3824024) else: # coordinatesPair = tuple(params['location'].split(',')) # businessLatitude = float(coordinatesPair[0].strip('(').strip(')')) # businessLongitude = float(coordinatesPair[1].strip('(').strip(')')) businessLatitude = float(params["geo_lat"]) businessLongitude = float(params["geo_long"]) geopoint = search.GeoPoint(businessLatitude, businessLongitude) # construct the address from the separated fields address = params["street"] + ", " + params["city"] + ", " + params["pcode"] brands = "" if params["brands"] == []: brands = "" else: for brand in params["brands"]: brands += brand + "," if params["num_reviews"] == "": logging.info("number of reviews is not defined") num_reviews_var = random.randint(0, 100) # random numbers for testing purposes else: num_reviews_var = int(params["num_reviews"]) if params["avg_rating"] == "": logging.info("average rating is not defined") avg_rating_var = round(random.uniform(1, 10), 2) # random numbers for testing purposes else: avg_rating_var = float(params["avg_rating"]) document = search.Document( fields=[ search.TextField(name="pid", value=params["pid"]), search.TextField(name="name", value=params["name"]), search.TextField(name="address", value=address), search.TextField(name="street", value=params["street"]), search.TextField(name="city", value=params["city"]), search.TextField(name="province", value=params["province"]), search.TextField(name="pcode", value=params["pcode"]), search.TextField(name="website", value=params["website"]), search.GeoField(name="location", value=geopoint), # search.GeoField(name='geo_lat', value=params['geo_lat']), # search.GeoField(name='geo_long', value=params['geo_long']), # search.TextField(name='phones', value=params['phones']), # search.TextField(name='categories', value=params['categories']), search.TextField(name="brands", value=brands), search.DateField(name="date", value=datetime.now().date()), search.NumberField(name="num_reviews", value=num_reviews_var), search.NumberField(name="avg_rating", value=avg_rating_var), ] ) try: search.Index(name=INDEX_NAME).put(document) except search.Error: logging.exception("Put failed") # for debug purpose only for index in search.get_indexes(fetch_schema=True): logging.info("index %s", index.name) logging.info("schema: %s", index.schema)
def test(self, doc_id): print(search.get_indexes()) print(self.get_document_by_id(doc_id)) self.search("HI")
def updateContainerStatus(manifest_key): for index in search.get_indexes(fetch_schema=True, namespace="text"): query = models.ManifestDetail.query(models.ManifestDetail.manifest == manifest_key) detail = query.fetch(query.count()) print "FINDME "+index.name try: findblob_from_index = models.LinktoManifest.query(models.LinktoManifest.filename_stripped == index.name).fetch(1)[0] wb = xlrd.open_workbook(file_contents=blobstore.BlobReader(findblob_from_index.blob).read()) except: findblob_from_index = models.LinktoManifest.query().fetch(1)[0] wb = xlrd.open_workbook(file_contents=blobstore.BlobReader(findblob_from_index.blob).read()) for dat in detail: # print "FINDME2 "+blobstore.BlobInfo.get(dat.manifest.get().blob).filename ind = index_search(dat.container_number, index.name) if (ind): dat.container_status=True dat.sheet = ind[4] dat.put() # print dat.key # print dat.sheet code = index_search("row=%i AND text=%s" % (ind[2], dat.code), index.name) if (code): dat.code_status=True dat.put() #vents = index_search("row=%i AND text=%s" % (ind[2], dat.vents), index.name) #find vents with in statement sh = wb.sheet_by_name(ind[4]) row = sh.row(int(ind[2])) cell = sh.cell_value(int(ind[2]), (int(ind[3]))+2) set_temp = sh.cell_value(int(ind[2]), (int(ind[3]))+3) num = map(int, re.findall(r'\d+', dat.vents)) cell_num = map(int, re.findall(r'\d+', cell)) temp_num = map(int, re.findall(r'\d+', dat.temp)) set_num = map(int, re.findall(r'\d+', str(set_temp))) if (temp_num == set_num): dat.temp_status=True dat.put() elif (set_temp == dat.temp): dat.temp_status=True dat.put() # print "mapped: "+str(num) # print "cell: "+str(cell) # print "strin1"+string1 if (num == cell_num): dat.vents_status=True dat.put() elif (cell == dat.vents): dat.vents_status=True dat.put() temp = index_search("row=%i AND text=%s" % (ind[2], dat.temp), index.name) if (temp): dat.temp_status=True dat.put() return ind