Beispiel #1
0
def cache_image_and_get_size(event):
    # For testing purposes:
    if event.full_image_url.startswith('test:'):
        return 100, 100
    else:
        mimetype, response = _raw_get_image(event)
        try:
            gcs.put_object(EVENT_IMAGE_BUCKET, _event_image_filename(event.id),
                           response)
            _clear_out_resize_caches(event.id)
        except:
            if runtime.is_local_appengine():
                logging.exception('Error saving event image: %s', event.id)
            else:
                raise
        img = images.Image(response)
        return img.width, img.height
def get_people_rankings_for_city_names(city_names, attendees_only=False):
    if runtime.is_local_appengine():
        pr_city_categories = load_from_dev(city_names,
                                           attendees_only=attendees_only)
    else:
        args = []
        if attendees_only:
            args = [PRCityCategory.person_type == 'ATTENDEE']
        pr_city_categories = PRCityCategory.query(
            PRCityCategory.city.IN(city_names), *args).fetch(100)

    results = []
    for city_category in pr_city_categories:
        results.append(
            PeopleRanking(city_category.city, city_category.category,
                          city_category.person_type,
                          city_category.top_people_json))

    return results
def get_attendees_within(bounds, max_attendees):
    city_names = _get_city_names_within(bounds)
    logging.info('Loading PRCityCategory for top 10 cities: %s', city_names)
    if not city_names:
        return {}
    memcache_key = 'AttendeeCache: %s' % hashlib.md5(
        '\n'.join(city_names).encode('utf-8')).hexdigest()
    memcache_result = memcache.get(memcache_key)
    if memcache_result:
        logging.info('Reading memcache key %s with value length: %s',
                     memcache_key, len(memcache_result))
        result = json.loads(memcache_result)
    else:
        people_rankings = get_people_rankings_for_city_names(
            city_names, attendees_only=True)
        logging.info('Loaded %s People Rankings', len(people_rankings))
        if runtime.is_local_appengine() and False:
            for x in people_rankings:
                logging.info(x.key)
                for person in x.worthy_top_people():
                    logging.info('  - %s' % person)
        groupings = combine_rankings(people_rankings, max_people=max_attendees)
        result = groupings.get('ATTENDEE', {})
        # Trim out the unnecessary names
        for category_city, people in result.iteritems():
            for person in people:
                del person['name']
        json_dump = json.dumps(result)
        try:
            logging.info('Writing memcache key %s with value length: %s',
                         memcache_key, len(json_dump))
            memcache.set(memcache_key, json_dump, time=24 * 60 * 60)
        except ValueError:
            logging.warning(
                'Error writing memcache key %s with value length: %s',
                memcache_key, len(json_dump))
            logging.warning('Tried to write: %s', json.dumps(result, indent=2))
    return result
def import_cities():
    # Download this file from http://download.geonames.org/export/dump/
    # Generally we import locally (to avoid 30sec servlet limits), then download-and-upload data:
    # appcfg.py download_data --application=dancedeets --kind="City" --url=http://127.0.0.1:8080/remote_api --filename=cities.db
    # appcfg.py upload_data --application=dancedeets --kind="City" --url=http://dancedeets.appspot.com/remote_api --filename=cities.db
    count = 0
    for line in open('cities15000.txt'):
        count += 1

        if not count % 1000:
            logging.info('Imported %s cities', count)
        # List of fields from http://download.geonames.org/export/dump/
        geonameid, name, asciiname, alternatenames, latitude, longitude, feature_class, feature_code, country_code, cc2, admin1_code, admin2_code, admin3_code, admin4_code, population, elevation, gtopo30, timezone, modification_date = line.split(
            '\t')

        #if int(population) < 50000:
        #    print name, population
        #    continue
        city = City.get_or_insert(', '.join(
            [asciiname, admin1_code, country_code]))
        city.city_name = asciiname
        city.state_name = admin1_code
        city.country_name = country_code
        city.latitude = float(latitude)
        city.longitude = float(longitude)
        city.population = int(population)
        city.geohashes = []
        if runtime.is_local_appengine():
            city.has_nearby_events = True
        for x in CITY_GEOHASH_PRECISIONS:
            city.geohashes.append(
                str(
                    geohash.Geostring((float(latitude), float(longitude)),
                                      depth=x)))
        city.timezone = timezone
        city.put()
                            password=keys.get('redis_memcache_password'))

    # Non-existent functions necessary to adhere to the memcache API expected by gae_memcache's setup_client()
    client.set_servers = None
    client.forget_dead_hosts = None
    client.debuglog = None
    client.replace_multi = None
    client.offset_multi = None
    if gae_memcache:
        # Try to use this redis memcache for all GAE stuff seamlessly
        gae_memcache.setup_client(client)
    return client


from util import runtime
if runtime.is_local_appengine():
    memcache_client = gae_memcache._CLIENT
else:
    # TODO: enable this Redis memcache (and pay for it) when we need to switch off the built-in GAE memcache
    # memcache_client = init_memcache()
    pass

# Expose a simplified memcache_client API here...will we need it at all?


def get(key):
    return memcache_client.get(key)


def get_multi(keys):
    return memcache_client.get_multi(keys)
Beispiel #6
0
def _generate_path(city, week_start):
    path = '%s/%s.gif' % (week_start.strftime('%Y-%m-%d'), city.display_name())
    if runtime.is_local_appengine():
        path = 'dev/%s' % path
    return path