def init_wiki_es(cls): if cls._wiki_es is None: wiki_es = settings.get("WIKI_ES") wiki_es_max_retries = settings.get("WIKI_ES_MAX_RETRIES") wiki_es_timeout = settings.get("WIKI_ES_TIMEOUT") if wiki_es is None: raise WikiUndefinedException else: cls._wiki_es = Elasticsearch(wiki_es, max_retries=wiki_es_max_retries, timeout=wiki_es_timeout)
def __init__(self): wiki_es_url = settings.get("WIKI_ES") wiki_es_max_retries = settings.get("WIKI_ES_MAX_RETRIES") wiki_es_timeout = settings.get("WIKI_ES_TIMEOUT") if wiki_es_url is None: self.es = None return self.es = Elasticsearch( wiki_es_url, max_retries=wiki_es_max_retries, timeout=wiki_es_timeout, )
def fetch_containers(cls, place): coord = place.get_coord() if not coord: return [] lat = coord.get("lat") lon = coord.get("lon") if lat is None or lon is None: return [] hits = recycling_client.get_latest_measures( lat=lat, lon=lon, max_distance=MAX_DISTANCE_AROUND_POI) containers = [] for h in hits: doc = h["_source"] if "percentage" not in doc: logger.warning( "Recycling container data does not contain 'percentage' field", extra={"doc": doc}, ) continue containers.append( RecyclingContainer( type=doc.get("pav", {}).get("wasteType"), updated_at=doc.get( settings.get("RECYCLING_DATA_TIMESTAMP_FIELD")), filling_level=doc.get("percentage"), place_description=doc.get("metadata", {}).get("entity"), )) return containers
def __init__(self): super().__init__() pj_api_url = settings.get("PJ_API_ID") if pj_api_url: self.session = PjAuthSession(refresh_timeout=self.PJ_API_TIMEOUT) self.enabled = True else: self.enabled = False
def __init__(self): pj_es_url = settings.get('PJ_ES') if pj_es_url: self.es = Elasticsearch(pj_es_url, timeout=3.) self.enabled = True else: self.enabled = False
def handle_option(id, request: Request): response = Response() if settings.get("CORS_OPTIONS_REQUESTS_ENABLED", False) is True: response.headers["Access-Control-Allow-Origin"] = "*" response.headers["Access-Control-Allow-Headers"] = request.headers.get( "Access-Control-Request-Headers", "*") response.headers["Access-Control-Allow-Methods"] = "GET" else: response.status_code = 405 return response
def handle_option(id, headers: Headers): if settings.get('CORS_OPTIONS_REQUESTS_ENABLED', False) is True: headers = { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Headers': headers.get('Access-Control-Request-Headers', '*'), 'Access-Control-Allow-Methods': 'GET', } return Response('', headers=headers) return Response('', status_code=405)
class PjSource: PLACE_ID_PREFIX = 'pj:' es_index = settings.get('PJ_ES_INDEX') es_query_template = settings.get('PJ_ES_QUERY_TEMPLATE') def __init__(self): pj_es_url = settings.get('PJ_ES') if pj_es_url: self.es = Elasticsearch(pj_es_url, timeout=3.) self.enabled = True else: self.enabled = False def bbox_is_covered(self, bbox): if not self.enabled: return False return bbox_inside_polygon(*bbox, poly=france_polygon) def get_places_bbox(self, raw_categories, bbox, size=10, query=''): left, bot, right, top = bbox body = { 'id': self.es_query_template, 'params': { 'query': query, 'top_left_lat': top, 'top_left_lon': left, 'bottom_right_lat': bot, 'bottom_right_lon': right, }, } if query: body['params']['match_amenities'] = True if raw_categories: body['params']['filter_category'] = True body['params']['category'] = raw_categories result = self.es.search_template(index=self.es_index, body=body, params={'size': size}) raw_places = result.get('hits', {}).get('hits', []) return [PjPOI(p['_source']) for p in raw_places] def get_place(self, id): internal_id = id.replace(self.PLACE_ID_PREFIX, '', 1) es_places = self.es.search( index=self.es_index, body={"filter": { "term": { "_id": internal_id } }}) es_place = es_places.get('hits', {}).get('hits', []) if len(es_place) == 0: raise NotFound(detail={'message': f"place {id} not found"}) if len(es_place) > 1: logger.warning("Got multiple places with id %s", id) return PjPOI(es_place[0]['_source'])
def kuzzle_url(self): return settings.get('KUZZLE_CLUSTER_URL')
def weather_key(self): return settings.get("WEATHER_API_KEY")
def __init__(self): self._thumbr_urls = settings.get('THUMBR_URLS').split(',') self._thumbr_enabled = settings.get('THUMBR_ENABLED') self._salt = settings.get('THUMBR_SALT') or '' if self._thumbr_enabled and not self._salt: logger.warning('Thumbr salt is empty')
def get_authorization_url(self): base_url = settings.get("RECYCLING_SERVER_URL") return f"{base_url}/_login/local"
class ApiPjSource: PLACE_ID_NAMESPACE = "pj" PJ_RESULT_MAX_SIZE = 30 PJ_INFO_API_URL = "https://api.pagesjaunes.fr/v1/pros" PJ_FIND_API_URL = "https://api.pagesjaunes.fr/v1/pros/search" PJ_API_TIMEOUT = float(settings.get("PJ_API_TIMEOUT")) def __init__(self): super().__init__() pj_api_url = settings.get("PJ_API_ID") if pj_api_url: self.session = PjAuthSession(refresh_timeout=self.PJ_API_TIMEOUT) self.enabled = True else: self.enabled = False @staticmethod def format_where(bbox): """ >>> ApiPjSource.format_where([2e-5,-0.5,2,0.5]) 'gZ0.000020,-0.500000,2.000000,0.500000' """ left, bot, right, top = bbox return f"gZ{left:.6f},{bot:.6f},{right:.6f},{top:.6f}" def bbox_is_covered(self, bbox): if not self.enabled: return False return bbox_inside_polygon(*bbox, poly=france_polygon) def point_is_covered(self, point): if not self.enabled: return False return france_polygon.contains(point) def internal_id(self, poi_id): return poi_id.replace(f"{self.PLACE_ID_NAMESPACE}:", "", 1) def get_from_params(self, url, params=None) -> PjApiPOI: res = self.session.get(url, params=params, timeout=self.PJ_API_TIMEOUT) res.raise_for_status() return res.json() def get_places_from_url(self, url, params=None, size=10, ignore_status=()): try: res = pj_find.Response(**self.get_from_params(url, params)) except requests.RequestException as exc: if exc.response is not None and exc.response.status_code in ignore_status: logger.debug("Ignored pagesjaunes error: %s", exc) else: logger.error("Failed to query pagesjaunes: %s", exc) return [] pois = [PjApiPOI(listing) for listing in res.search_results.listings[:size] or []] if ( len(pois) < size and res.context and res.context.pages and res.context.pages.next_page_url ): pois += self.get_places_from_url( res.context.pages.next_page_url, size=size - len(pois), ) return pois def search_places(self, query: str, place_in_query: bool, size=10) -> List[PjApiPOI]: query_params = {"q": query if place_in_query else f"{query} france"} return self.get_places_from_url( self.PJ_FIND_API_URL, query_params, size, ignore_status=(400,) ) def get_places_bbox( self, categories: List[CategoryEnum], bbox, size=10, query="" ) -> List[PjApiPOI]: query_params = { "what": " ".join(c.pj_what() for c in categories), "where": self.format_where(bbox), # The API may return less than 'max' items per page, so let's use 'size + 5' # as a margin to avoid requesting a second page unnecessarily. "max": min(self.PJ_RESULT_MAX_SIZE, size + 5), } if query: query_params["what"] += " " + query query_params["what"] = query_params["what"].strip() api_places = self.get_places_from_url(self.PJ_FIND_API_URL, query_params, size) # Remove null merchant ids # or duplicated merchant ids that may be returned in different pages merchant_ids = set() places = [] for p in api_places: merchant_id = p.data.merchant_id if merchant_id and (merchant_id not in merchant_ids): places.append(p) merchant_ids.add(merchant_id) return places def get_place(self, poi_id) -> PjApiPOI: try: return PjApiPOI( pj_info.Response( **self.get_from_params( path.join(self.PJ_INFO_API_URL, self.internal_id(poi_id)) ) ) ) except RequestsHTTPError as e: if e.response.status_code in (404, 400): logger.debug( "Got HTTP %s from PagesJaunes API", e.response.status_code, exc_info=True ) raise PlaceNotFound from e raise
def get_authorization_params(self): return { "grant_type": "client_credentials", "client_id": settings.get("PJ_API_ID"), "client_secret": settings.get("PJ_API_SECRET"), }
class PjSource: PLACE_ID_PREFIX = "pj:" es_index = settings.get("PJ_ES_INDEX") es_query_template = settings.get("PJ_ES_QUERY_TEMPLATE") def __init__(self): pj_es_url = settings.get("PJ_ES") if pj_es_url: self.es = Elasticsearch(pj_es_url, timeout=3.0) self.enabled = True else: self.enabled = False def bbox_is_covered(self, bbox): if not self.enabled: return False return bbox_inside_polygon(*bbox, poly=france_polygon) def get_places_bbox(self, raw_categories, bbox, size=10, query=""): left, bot, right, top = bbox body = { "id": self.es_query_template, "params": { "query": query, "top_left_lat": top, "top_left_lon": left, "bottom_right_lat": bot, "bottom_right_lon": right, }, } if query: body["params"]["match_amenities"] = True if raw_categories: body["params"]["filter_category"] = True body["params"]["category"] = raw_categories result = self.es.search_template(index=self.es_index, body=body, params={"size": size}) raw_places = result.get("hits", {}).get("hits", []) return [PjPOI(p["_source"]) for p in raw_places] def get_place(self, id): internal_id = id.replace(self.PLACE_ID_PREFIX, "", 1) es_places = self.es.search( index=self.es_index, body={"filter": { "term": { "_id": internal_id } }}) es_place = es_places.get("hits", {}).get("hits", []) if len(es_place) == 0: raise HTTPException(status_code=404, detail=f"place {id} not found") if len(es_place) > 1: logger.warning("Got multiple places with id %s", id) return PjPOI(es_place[0]["_source"])
def __init__(self): self._thumbr_urls = settings.get("THUMBR_URLS").split(",") self._thumbr_enabled = settings.get("THUMBR_ENABLED") self._salt = settings.get("THUMBR_SALT") or "" if self._thumbr_enabled and not self._salt: logger.warning("Thumbr salt is empty")
import hmac from urllib.parse import quote import httpx from fastapi import HTTPException, Query from fastapi.responses import RedirectResponse from idunn import settings client = httpx.AsyncClient() base_url = settings.get("BASE_URL") secret = settings.get("SECRET").encode() def resolve_url(url: str) -> str: """ Idunn's URL that can be provided to redirect to the same page as the input URL would. """ return base_url + f"v1/redirect?url={quote(url, safe='')}&hash={hash_url(url)}" def hash_url(url: str) -> str: """ Hash of the URL that the client must provide in order to avoid abusive use of the endpoint. """ return hmac.HMAC(key=secret, msg=url.encode(), digestmod="sha256").hexdigest() async def follow_redirection(
def base_url(self): return settings.get("RECYCLING_SERVER_URL")
def weather_url(self): return settings.get("WEATHER_API_URL")