def get_attendees_within(bounds, max_attendees):
    geoname_ids = _get_geoname_ids_within(bounds)
    logging.info('Loading PRCityCategory for top 10 cities: %s', geoname_ids)
    if not geoname_ids:
        return {}
    memcache_key = 'AttendeeCache: %s' % hashlib.md5('\n'.join(str(x) for x in geoname_ids).encode('utf-8')).hexdigest()
    memcache_result = memcache.get(memcache_key)
    if memcache_result:
        logging.info('Reading memcache key %s with value length: %s', memcache_key, len(memcache_result))
        result = json.loads(memcache_result)
    else:
        people_rankings = get_people_rankings_for_city_names(geoname_ids, attendees_only=True)
        logging.info('Loaded %s People Rankings', len(people_rankings))
        if runtime.is_local_appengine() and False:
            for x in people_rankings:
                logging.info(x.key)
                for person in x.worthy_top_people():
                    logging.info('  - %s' % person)
        groupings = combine_rankings(people_rankings, max_people=max_attendees)
        result = groupings.get('ATTENDEE', {})
        # Trim out the unnecessary names
        for category_city, people in result.iteritems():
            for person in people:
                del person['name']
        json_dump = json.dumps(result)
        try:
            logging.info('Writing memcache key %s with value length: %s', memcache_key, len(json_dump))
            memcache.set(memcache_key, json_dump, time=24 * 60 * 60)
        except ValueError:
            logging.warning('Error writing memcache key %s with value length: %s', memcache_key, len(json_dump))
            logging.warning('Tried to write: %s', json.dumps(result, indent=2))
    return result
Beispiel #2
0
def after_add_event(event_id, fbl, send_email, post_pubsub):
    logging.info("New event, publishing to twitter/facebook")
    if post_pubsub:
        pubsub.eventually_publish_event(event_id)
    if fbl:
        crawl_event_source(fbl, event_id)
    if send_email and not runtime.is_local_appengine():
        # This has to occur *after* the event sources have been crawled (and the sources' emails are saved)
        event_emails_sending.send_event_add_emails(event_id, should_send=True)
Beispiel #3
0
    def __init__(self, classified_event, debug=None):
        if not self.vertical:
            raise ValueError('Need to configure vertical')

        self._classified_event = classified_event
        if debug is None:
            debug = runtime.is_local_appengine()
        self._debug = debug
        self._suppress_all_logs = False

        self._logs = []
        self._log_category = ['global']
        self._log('Detected language %s', self._classified_event.language)
    def __init__(self, classified_event, debug=None):
        if not self.vertical:
            raise ValueError('Need to configure vertical')

        self._classified_event = classified_event
        if debug is None:
            debug = runtime.is_local_appengine()
        self._debug = debug
        self._suppress_all_logs = False

        self._logs = []
        self._log_category = ['global']
        self._log('Detected language %s', self._classified_event.language)
def get_connection(database_name, fallback_database_name=None):
    full_db_name = '%s.db' % database_name
    if os.environ.get('TRAVIS') or runtime.is_local_appengine():
        db_path = os.path.join(DEV_PATH, full_db_name)
        if not os.path.exists(db_path):
            db_path = fallback_database_name
    else:
        db_path = os.path.join(SERVER_PATH, full_db_name)
        if not os.path.exists(db_path):
            start = time.time()
            _download_sqlite(db_path)
            timelog.log_time_since('Downloading PRCityCategory sqlite db', start)
    logging.info('Opening db %s', db_path)
    conn = sqlite3.connect(db_path)
    # Cannot be shared between threads, be careful if making this global!
    return conn
Beispiel #6
0
def cache_image_and_get_size(event, index=None):
    # For testing purposes:
    if event.full_image_url.startswith('test:'):
        return 100, 100
    else:
        mimetype, response = _raw_get_image(event, index=index)
        try:
            # If image 0 *is* the flyer...then let's ignore image zero and just proxy the flyer
            if index == 0 and event.full_image_url == event.extra_image_urls(
            )[0]:
                index = None
            gcs.put_object(EVENT_IMAGE_BUCKET,
                           _event_image_filename(event.id, index=index),
                           response)
            _clear_out_resize_caches(event.id, index=index)
        except:
            if runtime.is_local_appengine():
                logging.exception('Error saving event image: %s', event.id)
            else:
                raise
        img = images.Image(response)
        return img.width, img.height
Beispiel #7
0
def get_attendees_within(bounds, max_attendees):
    geoname_ids = _get_geoname_ids_within(bounds)
    logging.info('Loading PRCityCategory for top 10 cities: %s', geoname_ids)
    if not geoname_ids:
        return {}
    memcache_key = 'AttendeeCache: %s' % hashlib.md5('\n'.join(
        str(x) for x in geoname_ids).encode('utf-8')).hexdigest()
    memcache_result = memcache.get(memcache_key)
    if memcache_result:
        logging.info('Reading memcache key %s with value length: %s',
                     memcache_key, len(memcache_result))
        result = json.loads(memcache_result)
    else:
        people_rankings = get_people_rankings_for_city_names(
            geoname_ids, attendees_only=True)
        logging.info('Loaded %s People Rankings', len(people_rankings))
        if runtime.is_local_appengine() and False:
            for x in people_rankings:
                logging.info(x.key)
                for person in x.worthy_top_people():
                    logging.info('  - %s' % person)
        groupings = combine_rankings(people_rankings, max_people=max_attendees)
        result = groupings.get('ATTENDEE', {})
        # Trim out the unnecessary names
        for category_city, people in result.iteritems():
            for person in people:
                del person['name']
        json_dump = json.dumps(result)
        try:
            logging.info('Writing memcache key %s with value length: %s',
                         memcache_key, len(json_dump))
            memcache.set(memcache_key, json_dump, time=24 * 60 * 60)
        except ValueError:
            logging.warning(
                'Error writing memcache key %s with value length: %s',
                memcache_key, len(json_dump))
            logging.warning('Tried to write: %s', json.dumps(result, indent=2))
    return result
Beispiel #8
0
def _generate_path(city, week_start):
    path = '%s/%s.gif' % (week_start.strftime('%Y-%m-%d'), city.display_name())
    if runtime.is_local_appengine():
        path = 'dev/%s' % path
    return path
Beispiel #9
0
#!/usr/bin/env python

# Because the app.yaml handlers are imported via:
# Traceback (most recent call last):
#  File "/env/local/lib/python2.7/site-packages/vmruntime/wsgi_config.py", line 55, in app_for_script
#    app, unused_filename, err = wsgi.LoadObject(script)
#  File "/env/local/lib/python2.7/site-packages/google/appengine/runtime/wsgi.py", line 85, in LoadObject
#    obj = __import__(path[0])
# It means we have no chance to hook-in and update the sys.path before they are loaded.
# So this wrapper just does that...updates the sys.path, and loads the _APP handlers

from google.appengine.ext import vendor
from dancedeets.util import runtime

vendor.add('lib-both')
if runtime.is_local_appengine():
    vendor.add('lib-local')

from pipeline.handlers import _APP

# Disables the pipeline auth checking, since we now handle that ourselves
import pipeline
pipeline.set_enforce_auth(False)
def _generate_path(city, week_start):
    path = '%s/%s.gif' % (week_start.strftime('%Y-%m-%d'), city.display_name())
    if runtime.is_local_appengine():
        path = 'dev/%s' % path
    return path
                            password=keys.get('redis_memcache_password'))

    # Non-existent functions necessary to adhere to the memcache API expected by gae_memcache's setup_client()
    client.set_servers = None
    client.forget_dead_hosts = None
    client.debuglog = None
    client.replace_multi = None
    client.offset_multi = None
    if gae_memcache:
        # Try to use this redis memcache for all GAE stuff seamlessly
        gae_memcache.setup_client(client)
    return client


from dancedeets.util import runtime
if runtime.is_local_appengine():
    memcache_client = gae_memcache._CLIENT
else:
    # TODO: enable this Redis memcache (and pay for it) when we need to switch off the built-in GAE memcache
    # memcache_client = init_memcache()
    pass

# Expose a simplified memcache_client API here...will we need it at all?


def get(key):
    return memcache_client.get(key)


def get_multi(keys):
    return memcache_client.get_multi(keys)