def set_colors(*colordef): l = list(colordef) defs = {} while len(l) > 0: key_target = l.pop(0).split(",") col = l.pop(0) for tgt in key_target: if tgt == '--': # special: applies color to all yet unused colors for keycode in keys.others(defs.keys()): defs[keycode] = colors.get(col) for keycode in keys.get(tgt): if keycode is None: continue defs[keycode] = colors.get(col) if len(defs) == 0: raise Exception("could not determine any color definitions") COLOR_PAYLOAD = [] for keycode, color in defs.items(): COLOR_PAYLOAD += [keycode] COLOR_PAYLOAD += color with device.Device() as dev: dev.send_colors(COLOR_PAYLOAD)
def get_pickers(self, withscores=False): "Return all readers that picked the link." return r.zrange(keys.get(LINK_PICKERS, self.id), 0, -1, desc=True, withscores=withscores)
def handle(self): logging.info('Mailchimp webhook: %s', self.request) if self.request.get('secret_key') != keys.get( 'mailchimp_webhook_secret'): self.response.set_status(403) self.response.out.write('Forbidden: bad secret') return '' # email update if self.request.get('type') == 'upemail': if self.request.get('data[list_id]') == mailchimp_api.LIST_ID: old_email = self.request.get('data[old_email]') new_email = self.request.get('data[new_email]') email_users = users.User.query( users.User.email == old_email).fetch(100) logging.info('Found %s users for old_email %s, updating to %s', len(email_users), old_email, new_email) for user in email_users: user.email = new_email user.put() else: logging.error('Unexpected webhook list_id: %s', self.request.get('data[list_id]')) else: logging.error('Unexpected webhook type: %s', self.request.get('type'))
def handle(self): logging.info('Mandrill webhook: %s', self.request) if self.request.get('secret_key') != keys.get( 'mandrill_webhook_secret'): logging.error('Got mandrill webhook with bad secret: %s', self.request.get('secret_key')) self.response.set_status(403) self.response.out.write('Forbidden: bad secret') return if not self.request.get('mandrill_events'): logging.error('Got mandrill webhook without events') self.response.set_status(400) self.response.out.write('Bad Request: no mandrill_events') return mandrill_events = json.loads(self.request.get('mandrill_events')) logging.info('Processing %s webhook events', len(mandrill_events)) for event in mandrill_events: if event['event'] in ['hard_bounce', 'spam', 'unsub', 'reject']: metadata = event['msg']['metadata'] user_id = metadata['user_id'] user = users.User.get_by_id(user_id) if metadata['email_type'] == 'weekly': user.send_email = False logging.info( 'Unsubscribing user %s (%s) in response to Mandrill request', user.fb_uid, user.full_name) user.put()
def init_memcache(): client = pylibmc.Client([keys.get('redis_memcache_endpoint')], binary=True, username='******', password=keys.get('redis_memcache_password')) # Non-existent functions necessary to adhere to the memcache API expected by gae_memcache's setup_client() client.set_servers = None client.forget_dead_hosts = None client.debuglog = None client.replace_multi = None client.offset_multi = None if gae_memcache: # Try to use this redis memcache for all GAE stuff seamlessly gae_memcache.setup_client(client) return client
def get_picks(self, count=config.PICKS_COUNT, withscores=False): "Return latest reader picks as links." return r.zrange(keys.get(READER_PICKS, self.id), 0, count - 1, desc=True, withscores=withscores)
def post(self): if self.json_body: event_id = self.json_body.get('event_id') language = self.json_body.get('language') or self.json_body.get('locale') if not event_id: self.add_error('Need to pass event_id argument') if not language: self.add_error('Need to pass language/locale argument') else: self.add_error('Need to pass a post body of json params') # Remap our traditional/simplified chinese languages if language == 'zh': language = 'zh-TW' elif language == 'zh-Hant': language = 'zh-TW' elif language == 'zh-Hans': language = 'zh-CN' self.errors_are_fatal() db_event = eventdata.DBEvent.get_by_id(event_id) service = build('translate', 'v2', developerKey=keys.get('google_server_key')) result = service.translations().list( target=language, format='text', q=[db_event.name or '', db_event.description or ''] ).execute() translations = [x['translatedText'] for x in result['translations']] self.write_json_success({'name': translations[0], 'description': translations[1]})
def post(self): if self.json_body: event_id = self.json_body.get('event_id') language = self.json_body.get('language') if not event_id: self.add_error('Need to pass event_id argument') if not language: self.add_error('Need to pass language argument') else: self.add_error('Need to pass a post body of json params') self.errors_are_fatal() fb_event = self.fbl.get(fb_api.LookupEvent, event_id, allow_cache=False) service = build('translate', 'v2', developerKey=keys.get('google_server_key')) result = service.translations().list( target=language, format='text', q=[ fb_event['info'].get('name', ''), fb_event['info'].get('description', '') ]).execute() translations = [x['translatedText'] for x in result['translations']] self.write_json_success({ 'name': translations[0], 'description': translations[1] })
def make_request(server, path, params): new_params = params.copy() new_params['scrapinghub_key'] = keys.get('scrapinghub_key') data = json.dumps(new_params) quoted_data = urllib.quote_plus(data) f = urllib2.urlopen('http://%s/%s' % (server, path), quoted_data) result = f.read() return result
def get(self): if self.request.get('hub.mode') == 'subscribe': if self.request.get('hub.verify_token') != keys.get('fb_webhook_verify_token'): logging.critical('Received invalid verify_token: %s', self.request.get('hub.verify_token')) return self.response.out.write(self.request.get('hub.challenge')) else: logging.critical('Unknown hub.mode received: %s', self.request.get('hub.mode'))
def get_fellows(self, count=config.FELLOWS_COUNT, withscores=False): "Return fellows as readers based on recorded picks." # self.set_fellows() # real time return r.zrange(keys.get(READER_FELLOWS, self.id), 0, count - 1, desc=True, withscores=withscores)
def get_edition(self, count=config.NEWS_COUNT, withscores=False): "Return news edition as links based on fellows." # self.set_edition() # real time return r.zrange(keys.get(READER_EDITION, self.id), 0, count - 1, desc=True, withscores=withscores)
def post(self): if self.json_body['scrapinghub_key'] != keys.get('scrapinghub_key'): self.response.status = 403 return for item in self.json_body['items']: logging.info('Processing %s', item) process_uploaded_item(item) process_upload_finalization(self.json_body['studio_name']) self.response.status = 200
def verify_decryption_password(appKey, password) : crypto = None prk_bytes = keys.get(appKey, password) prk_bytes = base64.b64decode(to_bytes(prk_bytes)) crypto = serialization.load_pem_private_key( prk_bytes, password=None, backend=default_backend() ) if(crypto != None) : return crypto else: sys.exit()
def post(self): if self.json_body['private_key'] != keys.get('private_key'): self.response.status = 403 return lookup_type = self.json_body['lookup_type'] lookup_kwargs = self.json_body['lookup'] if lookup_type == 'places': result = gmaps_api.places_api.get_json(**lookup_kwargs) elif lookup_type == 'geocode': result = gmaps_api.geocode_api.get_json(**lookup_kwargs) else: self.response.status = 404 return self.response.write(json.dumps(result))
def get_json(self, **kwargs): post_data = json.dumps({ 'private_key': keys.get('private_key'), 'lookup_type': self.lookup_type, 'lookup': kwargs, }) url = 'https://www.dancedeets.com/_gmaps_api' file = urllib.urlopen(url, post_data) try: response = file.read() finally: file.close() if file.getcode() == 200: logging.info('Returning result from prod server: %s', post_data) return json.loads(response) else: return self.backend.get_json(**kwargs)
def post(self): if self.json_body['scrapinghub_key'] != keys.get('scrapinghub_key'): self.response.status = 403 return fb_uid = '701004' user = users.User.get_by_id(fb_uid) fbl = fb_api.FBLookup(fb_uid, user.fb_access_token) for event_url in self.json_body['events']: logging.info('Adding %s', event_url) event_id = urls.get_event_id_from_url(event_url) if not event_id: logging.warning('Not a valid fb event for adding: %s', event_url) continue fb_event = fbl.get(fb_api.LookupEvent, event_id, allow_cache=False) try: add_entities.add_update_event(fb_event, fbl, creating_method=eventdata.CM_AUTO_WEB) except add_entities.AddEventException: logging.exception('Error adding event %s', event_id)
def real_notify(user, event_id, extra_data): if not can_notify(user): logging.info("No android GCM tokens.") return # We don't pass debug=True, because gcm.py library keeps adding more loggers ad-infinitum. # Instead we call GCM.enable_logging() once at the top-level. g = gcm.GCM(keys.get('google_server_key')) tokens = user.device_tokens('android') data = { # Important data for clientside lookups 'event_id': event_id, } data.update(extra_data) response = g.json_request(registration_ids=tokens, data=data) changed_tokens = False if 'errors' in response: for error, reg_ids in response['errors'].iteritems(): if error in ('NotRegistered', 'InvalidRegistration'): for reg_id in reg_ids: tokens.remove(reg_id) changed_tokens = True else: logging.error("Error for user %s with event %s: %s", user.fb_uid, event_id, error) if 'canonical' in response: for reg_id, canonical_id in response['canonical'].iteritems(): tokens.remove(reg_id) tokens.append(canonical_id) changed_tokens = True if changed_tokens: user.put() logging.info("User %s (%s), event %s: sent notification!", user.fb_uid, user.full_name, event_id) return 'success' in response
def post(self): if self.json_body['scrapinghub_key'] != keys.get('scrapinghub_key'): self.response.status = 403 return events_to_update = [] new_ids = set() for json_body in self.json_body['items']: event_id = eventdata.DBEvent.generate_id(json_body['namespace'], json_body['namespaced_id']) e = eventdata.DBEvent.get_or_insert(event_id) if e.creating_method is None: new_ids.add(event_id) e.creating_method = eventdata.CM_WEB_SCRAPE events_to_update.append((e, json_body)) event_updates.update_and_save_web_events(events_to_update) for event_id in new_ids: logging.info("New event, publishing to twitter/facebook: %s", event_id) deferred.defer(pubsub.eventually_publish_event, event_id) process_upload_finalization(self.json_body['studio_name']) self.response.status = 200
import base64 import hashlib import hmac import json import logging import urllib import keys from . import gmaps_backends from util import mr from util import urls google_maps_private_key = keys.get("google_maps_private_key") google_server_key = keys.get("google_server_key") class LiveBackend(gmaps_backends.GMapsBackend): def __init__(self, name, protocol_host, path, use_private_key): self.name = name self.protocol_host = protocol_host self.path = path self.use_private_key = use_private_key def get_json(self, **kwargs): mr.increment('gmaps-api-%s' % self.name) if self.use_private_key: kwargs['client'] = 'free-dancedeets' unsigned_url_path = "%s?%s" % (self.path, urls.urlencode(kwargs)) private_key = google_maps_private_key decoded_key = base64.urlsafe_b64decode(private_key)
from apiclient.discovery import build import app import base_servlet import fb_api import keys from topics import grouping from topics import topic_db from search import search from search import search_base from servlets import api # Set DEVELOPER_KEY to the "API key" value from the Google Developers Console: # https://console.developers.google.com/project/_/apiui/credential # Please ensure that you have enabled the YouTube Data API for your project. DEVELOPER_KEY = keys.get('google_server_key') YOUTUBE_API_SERVICE_NAME = 'youtube' YOUTUBE_API_VERSION = 'v3' @app.bio_route('/?') @app.route('/topic/?') class TopicListHandler(base_servlet.BaseRequestHandler): def requires_login(self): return False def get(self): topics = topic_db.Topic.query().fetch(500) self.display['topics'] = sorted(topics, key=lambda x: x.url_path) self.render_template('topic_list')
import keys consumer_key = 'xzpiBnUCGqTWSqTgmE6XtLDpw' consumer_secret = keys.get("twitter_consumer_secret")
import logging from twilio.rest import TwilioRestClient from twilio.rest.resources import base # This is the right import for 3.5.2 from twilio import TwilioRestException import keys # put your own credentials here ACCOUNT_SID = "AC4fe8564ea12bfcb3af18df2ee99c2bd9" AUTH_TOKEN = keys.get("twilio_auth_token") class InvalidPhoneNumberException(Exception): pass def send_email_link(phone_number): client = TwilioRestClient(ACCOUNT_SID, AUTH_TOKEN) orig_get_cert_file = base.get_cert_file try: # We monkey patch the cert file to not use anything base.get_cert_file = lambda: None logging.info("Sending SMS to %s", phone_number) client.messages.create( to=phone_number, from_="+12566932623", body="Download the DanceDeets App at http://www.dancedeets.com/mobile_apps?action=download", ) except TwilioRestException as e: if 'not a valid phone number' in e.msg: raise InvalidPhoneNumberException(e.msg)
import keys from classes.class_models import StudioClass from events.eventdata import DBEvent from events.event_locations import LocationMapping from event_scraper.potential_events import PotentialEvent from event_scraper.thing_db import Source from rankings.cities import City from fb_api import FacebookCachedObject from loc.gmaps_cached import CachedGeoCode from loc.gmaps_bwcompat import GeoCode from pubsub.pubsub import OAuthToken from search.search import DisplayEvent from servlets.static_db import StaticContent from topics.topic_db import Topic from users.users import User, UserFriendsAtSignup, UserMessage app = Flask(__name__) app.debug = True app.secret_key = keys.get('flask_session_key') admin = Admin(app, name="Admin") for model in [CachedGeoCode, City, DBEvent, DisplayEvent, FacebookCachedObject, GeoCode, LocationMapping, OAuthToken, PotentialEvent, Source, StaticContent, StudioClass, Topic, User, UserFriendsAtSignup, UserMessage]: admin.add_view(appengine.ModelView(model)) if __name__ == '__main__': # Start app app.run(debug=True)
import logging import mandrill import keys mandrill_client = mandrill.Mandrill(keys.get('mandrill_api_key')) def send_message(message): try: result = mandrill_client.messages.send(message=message, async=False) logging.info('Message Contents: %s', message) logging.info('Message Result: %s', result) except mandrill.Error, e: logging.error('A mandrill error occurred: %s: %s', e.__class__, e) return result
import base64 import hashlib import hmac import json import logging import urllib import keys from . import gmaps_backends from util import urls google_maps_private_key = keys.get("google_maps_private_key") google_server_key = keys.get("google_server_key") class LiveBackend(gmaps_backends.GMapsBackend): def __init__(self, protocol_host, path, use_private_key): self.protocol_host = protocol_host self.path = path self.use_private_key = use_private_key def get_json(self, **kwargs): if self.use_private_key: kwargs['client'] = 'free-dancedeets' unsigned_url_path = "%s?%s" % (self.path, urls.urlencode(kwargs)) private_key = google_maps_private_key decoded_key = base64.urlsafe_b64decode(private_key) signature = hmac.new(decoded_key, unsigned_url_path, hashlib.sha1) encoded_signature = base64.urlsafe_b64encode(signature.digest()) url = "%s%s&signature=%s" % (self.protocol_host, unsigned_url_path, encoded_signature)
import logging from twilio.rest import TwilioRestClient from twilio.rest.resources import base # This is the right import for 3.5.2 from twilio import TwilioRestException import keys # put your own credentials here ACCOUNT_SID = "AC4fe8564ea12bfcb3af18df2ee99c2bd9" AUTH_TOKEN = keys.get("twilio_auth_token") class InvalidPhoneNumberException(Exception): pass def send_email_link(phone_number): client = TwilioRestClient(ACCOUNT_SID, AUTH_TOKEN) orig_get_cert_file = base.get_cert_file try: # We monkey patch the cert file to not use anything base.get_cert_file = lambda: None logging.info("Sending SMS to %s", phone_number) client.messages.create( to=phone_number, from_="+12566932623", body= "Download the DanceDeets App at http://www.dancedeets.com/mobile_apps?action=download", )
import suds.client import keys SOURCE_NAME = "DanceDeets" SOURCE_PASSWORD = keys.get("mindbody_api_password") _CLIENTS = {} def get_client(service_name): global _CLIENTS if service_name not in _CLIENTS: url = "https://api.mindbodyonline.com/0_5/" + service_name + "Service.asmx?wsdl" _CLIENTS[service_name] = suds.client.Client(url) return _CLIENTS[service_name] def fill_credentials(client, request, site_ids): source_creds = client.factory.create('SourceCredentials') source_creds.SourceName = SOURCE_NAME source_creds.Password = SOURCE_PASSWORD source_creds.SiteIDs.int = site_ids request.XMLDetail = 'Full' request.SourceCredentials = source_creds def get_request(client, request_name, site_id): request = client.factory.create(request_name) if hasattr(request, 'Request'): request = request.Request fill_credentials(client, request, [site_id]) return request
from google.appengine.api import memcache from google.appengine.ext import ndb from google.appengine.api import taskqueue import twitter from events import eventdata import fb_api import fb_api_util import keys from users import users from util import fetch from util import text from util import urls consumer_key = 'xzpiBnUCGqTWSqTgmE6XtLDpw' consumer_secret = keys.get("twitter_consumer_secret") DATE_FORMAT = "%Y/%m/%d" TIME_FORMAT = "%H:%M" EVENT_PULL_QUEUE = 'event-publishing-pull-queue' def eventually_publish_event(event_id, token_nickname=None): db_event = eventdata.DBEvent.get_by_id(event_id) if not db_event.has_content(): return if (db_event.end_time or db_event.start_time) < datetime.datetime.now(): return args = []
class MindBodyBrowserScraper(items.StudioScraper): allowed_domains = ['clients.mindbodyonline.com'] mindbody_studio_id = 0 mindbody_tab_id = 0 # So we can log in to our scrapinghub splash instance http_user = keys.get('scrapinghub_key') http_pass = '' custom_settings = { 'SPLASH_URL': SERVER_URL, 'DOWNLOADER_MIDDLEWARES': { 'scrapyjs.SplashMiddleware': 725, 'scrapy.downloadermiddlewares.httpauth.HttpAuthMiddleware': 800, }, 'DUPEFILTER_CLASS': 'scrapyjs.SplashAwareDupeFilter', 'HTTPCACHE_STORAGE': 'scrapyjs.SplashAwareFSCacheStorage', 'ITEM_PIPELINES': { 'classes.scraper.items.SaveStudioClassPipeline': 300, } } def __init__(self, *args, **kwargs): super(MindBodyBrowserScraper, self).__init__(*args, **kwargs) def _main_mindbody_url(self): return 'https://clients.mindbodyonline.com/ASP/home.asp?studioid=%s' % self.mindbody_studio_id def _generate_request(self, post_load=''): script = """ function main(splash) assert(splash:go(splash.args.url)) splash:wait(1) -- We need to click on the "CLASSES" tab. -- Unfortunately, attempts to call click() or evaljs onclick don't seem to work... -- So instead load the tab's URL directly in our main browser window splash:go("https://clients.mindbodyonline.com/classic/mainclass?fl=true&%s") splash:wait(1) %s return splash:evaljs("document.getElementById('classSchedule-mainTable').outerHTML") end """ url_args = urllib.urlencode({'tabID': self.mindbody_tab_id}) return scrapy.Request( self._main_mindbody_url(), meta={ 'splash': { 'args': { 'lua_source': script % (url_args, post_load), }, 'endpoint': 'execute', # optional parameters 'slot_policy': scrapyjs.SlotPolicy.PER_DOMAIN, } }, ) def _get_url(self, response): return self._main_mindbody_url() def start_requests(self): yield self._generate_request() yield self._generate_request(""" splash:runjs("autoSubmitDateTo(false, true)") splash:wait(3) """) def _valid_item(self, item, row): return True def parse_classes(self, response): date = None for row in response.css('table#classSchedule-mainTable tr'): header = row.css('td.header') cells = row.css('td') if header: date = dateparser.parse(self._extract_text(header)).date() elif len(cells) in [5, 6]: item = items.StudioClass() lst = row.css('td') if len(lst) == 5: start_time, dummy, class_name, teacher, duration = [ self._extract_text(x) for x in lst ] elif len(lst) == 6: start_time, dummy, class_name, teacher, room, duration = [ self._extract_text(x) for x in lst ] if 'Cancelled' in teacher: continue start_time = dateparser.parse(start_time).time() duration_hours_match = re.search('(\d+) hour', duration) duration_hours = duration_hours_match.group( 1) if duration_hours_match else 0 duration_minutes_match = re.search('(\d+) minute', duration) duration_minutes = duration_minutes_match.group( 1) if duration_minutes_match else 0 duration = datetime.timedelta(hours=int(duration_hours), minutes=int(duration_minutes)) item['start_time'] = datetime.datetime.combine( date, start_time) item['end_time'] = datetime.datetime.combine( date, start_time) + duration item['style'] = class_name if ' for ' in teacher: sub, orig = teacher.split(' for ') sub = adjust_caps(sub) orig = adjust_caps(orig) teacher = '%s for %s' % (sub, orig) teacher = adjust_caps(teacher) item['teacher'] = teacher if self._valid_item(item, row): yield item
def get_edition_fellows(self): "Return link fellows from edition." return r.hgetall(keys.get(EDITION_FELLOWS, self.id))
def get_picks(self, count=config.PICKS_COUNT, withscores=False): "Return latest reader picks as links." return r.zrange(keys.get(READER_PICKS, self.id), 0, count-1, desc=True, withscores=withscores)
import logging from mailchimp3 import MailChimp import md5 from requests import exceptions import keys from util import runtime client = MailChimp(keys.get('mailchimp_username'), keys.get('mailchimp_key')) LIST_WEB_ID = 554989 LIST_ID = '93ab23d636' class UserNotFound(Exception): pass def get_list_id(): lists = client.lists.all() list_id = [x['id'] for x in lists['lists'] if x['web_id'] == LIST_WEB_ID][0] return list_id def add_members(list_id, members): data = { 'members': members, 'update_existing': True, } if runtime.is_appengine(): return client.lists.update_members(list_id=list_id, data=data)
import random from firebase import firebase import time import keys from servlets import api auth = firebase.FirebaseAuthentication(keys.get('firebase_secret'), None) db = firebase.FirebaseApplication('https://dancedeets-hrd.firebaseio.com', auth) #result = db.get('/events', None) #print result @api.apiroute(r'/event_signups/register') class RegisterHandler(api.ApiHandler): supports_auth = True def post(self): event_id = self.json_body.get('event_id') category_id = self.json_body.get('category_id') team = self.json_body.get('team') team_name = team.get('team_name') dancers = [] dancer_index = 1 while team.get('dancer_name_%s' % dancer_index): dancer_name = team.get('dancer_name_%s' % dancer_index) dancer_id = team.get('dancer_id_%s' % dancer_index) or dancer_name dancers.append({'name': dancer_name, 'id': dancer_id}) dancer_index += 1
class FeedHandler(ApiHandler): def get(self): if self.json_body: url = self.json_body.get('url') else: url = self.request.get('url') feed = feedparser.parse(url) json_string = json.dumps(feed, cls=DateHandlingJSONEncoder) json_data = json.loads(json_string) self.write_json_success(json_data) post = get import random from firebase import firebase auth = firebase.FirebaseAuthentication(keys.get('firebase_secret'), None) db = firebase.FirebaseApplication('https://dancedeets-hrd.firebaseio.com', auth) #result = db.get('/events', None) #print result @apiroute(r'/event_signups/register') class RegisterHandler(ApiHandler): supports_auth = True def post(self): event_id = self.json_body.get('event_id') category_id = self.json_body.get('category_id') team = self.json_body.get('team') team_name = team.get('team_name') dancers = {}
def get_shub_project(): conn = scrapinghub.Connection(keys.get('scrapinghub_key')) project = scrapinghub.Project(conn, 27474) return project
def get_fellows(self, count=config.FELLOWS_COUNT, withscores=False): "Return fellows as readers based on recorded picks." # self.set_fellows() # real time return r.zrange(keys.get(READER_FELLOWS, self.id), 0, count-1, desc=True, withscores=withscores)
from flask import Flask from flask_graphql import GraphQL import graphene from graphene import relay import keys app = Flask(__name__) app.debug = True app.secret_key = keys.get('flask_session_key') # http://dev.dancedeets.com:8080/graphql?query={rebels%20{name,ships(first:1%20after:%22YXJyYXljb25uZWN0aW9uOjA=%22){edges{cursor,node{name}}}}} #from starwars import schema schema = graphene.Schema(name='Nonexistant Schema') class Query(graphene.ObjectType): node = relay.NodeField() schema.query = Query #schema.mutation = Mutation GraphQL(app, schema=schema) if __name__ == '__main__': # Start app app.run(debug=True)
class BrowserScraperMixin(object): # So we can log in to our scrapinghub splash instance http_user = keys.get('scrapinghub_key') http_pass = ''
def get_edition(self, count=config.NEWS_COUNT, withscores=False): "Return news edition as links based on fellows." # self.set_edition() # real time return r.zrange(keys.get(READER_EDITION, self.id), 0, count-1, desc=True, withscores=withscores)