def task():
        while RUN:
            bug_beans = zen_tao_client.get_my_bug()

            # 过滤结果,并将最新数据放入缓存
            new_beans = cache.filter_new("bug", bug_beans)
            cache.add("bug", bug_beans)

            if len(new_beans) != 0:
                tools.print_log(__name__ + "." + sys._getframe().f_code.co_name,
                                "本次显示的bug:" + str(new_beans))

                for bean in new_beans:
                    msg_title = "当前有Bug指向您"
                    msg_content = bean.title + "\n\t\n"
                    msg_content += "级别:" + bean.level + "\n" \
                                   + "类型:" + bean.error_type + "\n" \
                                   + "From:" + bean.author
                    tools.show_notify(msg_title, msg_content)
            else:
                pass

            if len(new_beans) != 0:
                sleep_time = len(new_beans) * 13
            else:
                sleep_time = 1
            time.sleep(sleep_time)
Ejemplo n.º 2
0
    def post(self):
        tx = self.request.get_range('x', None)
        ty = self.request.get_range('y', None)
        z = self.request.get_range('z', None)
        limit = self.request.get_range('limit', min_value=1, max_value=1000, default=1000)
        offset = self.request.get_range('offset', min_value=0, default=0)
        name = self.request.get('name', None)
        source_name = self.request.get('source', None)

        if tx is None or ty is None or z is None or name is None or source_name is None:
            self.error(400)
            return
        
        key = 'tile-%s-%s-%s-%s-%s' % (z, ty, tx, source_name, name)
        png = cache.get(key)
        if png is None:
            png = get_tile_png(tx, ty, z, name, source_name, limit, offset) 
            if png is None:
                png = PointTile.blank()
            cache.add(key, png, dumps=False)
    
        logging.info('TILE BACKEND MEMORY = %s' % runtime.memory_usage().current())
        self.response.set_status(200)
        self.response.headers['Content-Type'] = 'image/png'
        self.response.out.write(png)            
Ejemplo n.º 3
0
def unlocked(path):
    folderConfig = os.path.join(path, FOLDERCFG)
    lock = getParam('LOCK', folderConfig)
    if not lock:
        return True

    import cache
    if cache.exists(path):
        return True
    
    md5 = checkPassword(path, lock)

    if len(md5) == 0:
        return False

    if md5 == 'ERROR':
        utils.DialogOK(GETTEXT(30080))
        return False

    periods = [0, 1, 5, 15]
    setting = int(ADDON.getSetting('CACHE'))
    period  = periods[setting]

    cache.add(path, period)

    return True
Ejemplo n.º 4
0
def delete_all_notifications(user_email):
    notifications = db.Notification.all().filter('recipient_email =',
                                                 user_email)
    for n in notifications:
        n.delete()
    cache_key = cache.MC_USER_NOTIFICATION_PREFIX + user_email
    cache.add(cache_key, 0)
Ejemplo n.º 5
0
def get_series_us(self, chat_id, message_id, search_query_id):
    search_query = db.get_search_query(search_query_id)
    result = avtonomer.validate_us_plate_series(search_query.query_text)
    state, series_number = result.groups()
    key = f"avtonomer.get_series_us({state}, {series_number})"
    state_id, ctype_id = avtonomer.US_STATES_ID[state]

    result = cache.get(key)
    if not result:
        result = avtonomer.get_series_us(state_id, ctype_id, series_number)

    if result is None:
        logger.warning(f"Not data for query {series_number}")
        bot.send_message(
            chat_id,
            "Нет данных",
            reply_to_message_id=message_id,
        )
        return

    cache.add(key, result, timedelta(minutes=5))

    url = avtonomer.get_series_us_url(state_id, ctype_id, series_number)
    message = (
        f"В серии [{series_number}]({url}) штата `{state}` пока нет ни одного номера"
        if result == 0 else
        f"Количество фотографий в серии [{series_number}]({url}) штата `{state}`: {result}"
    )
    bot.send_message(
        chat_id,
        message,
        parse_mode="Markdown",
        reply_to_message_id=message_id,
    )
Ejemplo n.º 6
0
def unlocked(path):
    folderConfig = os.path.join(path, FOLDERCFG)
    lock = getParam('LOCK', folderConfig)
    if not lock:
        return True

    import cache
    if cache.exists(path):
        return True

    md5 = checkPassword(path, lock)

    if len(md5) == 0:
        return False

    if md5 == 'ERROR':
        utils.DialogOK(GETTEXT(30080))
        return False

    periods = [0, 1, 5, 15]
    setting = int(ADDON.getSetting('CACHE'))
    period = periods[setting]

    cache.add(path, period)

    return True
Ejemplo n.º 7
0
def get_ru_region(self, chat_id, message_id, search_query_id):
    search_query = db.get_search_query(search_query_id)
    result = avtonomer.validate_ru_region(search_query.query_text)
    region = result.groups()[0]
    key = f"avtonomer.get_ru_region({region})"
    region_name, region_id = avtonomer.RU_REGIONS_ID[region]

    result = cache.get(key)
    if not result:
        result = avtonomer.search_ru(
            ctype=1,
            regions=[region],
            tags=[avtonomer.TAG_NEW_LETTER_COMBINATION],
        )

    cache.add(key, result, timedelta(minutes=5))

    message = f"Регион *{region}* — {region_name}"
    if result is not None:
        message += "\n\nСвежие серии:\n"
        message += "\n".join([
            "• {} /{} — {} {}".format(
                car.date,
                avtonomer.translate_to_latin(car.license_plate.replace(
                    " ", "")).upper(),
                car.make,
                car.model,
            ) for car in result.cars
        ])
    bot.send_message(
        chat_id,
        message,
        parse_mode="Markdown",
        reply_to_message_id=message_id,
    )
Ejemplo n.º 8
0
 def post(self):
     # Check parameters
     name = self.request.get('name', None)
     if not name:
         self.error(400)
         return
     source_name = self.request.get('source', None)
     
     # Check cache
     key = self.cache_key(name, source_name)
     names = cache.get(key)
     if names:
         self.response.set_status(200)
         self.response.headers['Content-Type'] = "application/json"
         self.response.out.write(names)
         return
     
     source = sources.get(source_name)
     if not source:
         # TODO: Get names from all sources?
         self.error(404)
         return
     
     # Make service request for names
     names, status_code = self.get_names(source, name)        
     if status_code != 200:
         self.error(status_code)
         return
     
     # Update cache and send response
     cache.add(key, names)
     self.response.set_status(200)
     self.response.headers['Content-Type'] = "application/json"
     self.response.out.write(simplejson.dumps(names))
Ejemplo n.º 9
0
def create_user_model(offset_param, user_query_func, href_create_func,
                      memcache_cursor_key_func):
    if offset_param:
        offset = int(offset_param)
    else:
        offset = 0

    if offset < 0:
        offset = 0

    fetch_count = users_page_size
    query = user_query_func()

    if offset == 0:
        all_users = query.run(limit=fetch_count + 1)
    else:
        cursor = cache.get(memcache_cursor_key_func(offset))
        if cursor:
            query = query.with_cursor(start_cursor=cursor)
            all_users = query.run(limit=fetch_count + 1)
        else:
            all_users = query.run(limit=fetch_count + 1, offset=offset)
            cache.add(memcache_cursor_key_func(offset), query.cursor())

    import logging
    has_prev_page = (offset > 0)
    has_next_page = False

    logging.error('offset={0} has_prev_page={1}'.format(offset, has_prev_page))

    index = 0
    users = []
    for u in all_users:
        index = index + 1
        if index > fetch_count:
            has_next_page = True
        else:
            converted_user = convert.convert_user_profile(u)
            users.append(converted_user)
            if index == fetch_count:
                cache.add(memcache_cursor_key_func(offset + fetch_count),
                          query.cursor())

    next_page_href = href_create_func(offset + fetch_count)

    if offset - users_page_size <= 1:
        prev_page_href = href_create_func(0)
    else:
        prev_page_href = href_create_func(offset - users_page_size)

    return {
        'has_next_page': has_next_page,
        'has_prev_page': has_prev_page,
        'next_page_href': next_page_href,
        'prev_page_href': prev_page_href,
        'first_page_href': href_create_func(0),
        'users': users
    }
Ejemplo n.º 10
0
def search_license_plate(self, chat_id, message_id, search_query_id, page,
                         edit):
    search_query = db.get_search_query(search_query_id)
    lp_num = search_query.query_text
    lp_type = search_query.num_type
    key = f"avtonomer.search_{lp_type}({lp_num})"

    result = cache.get(key)
    if not result:
        if lp_type == "ru":
            result = avtonomer.search_ru(lp_num)
        else:
            result = avtonomer.search_su(lp_num)

    if not result:
        bot.send_message(
            chat_id,
            "По вашему запросу ничего не найдено",
            reply_to_message_id=message_id,
        )
        return

    cache.add(key, result, timedelta(minutes=5))

    car = result.cars[page]
    cars_count = len(result.cars)
    key = f"avtonomer.load_photo({car.thumb_url})"

    file_id = cache.get(key)
    if not file_id:
        photo = avtonomer.load_photo(car.thumb_url)
        if not photo:
            photo = open(PHOTO_NOT_FOUND, "rb")
    else:
        photo = file_id

    caption = get_car_caption(car, lp_num, page, cars_count)
    markup = get_car_reply_markup(cars_count, search_query_id, page)

    if edit:
        message = bot.edit_message_media(
            media=telegram.InputMediaPhoto(photo, caption=caption),
            reply_markup=markup,
            chat_id=chat_id,
            message_id=message_id,
        )
    else:
        message = bot.send_photo(
            chat_id,
            photo,
            caption,
            reply_to_message_id=message_id,
            reply_markup=markup,
        )

    if not file_id:
        cache.add(key, message.photo[-1].file_id, timedelta(minutes=30))
Ejemplo n.º 11
0
def _do_compile(compile_cmd, source):
    try:
        compiled = cache.get_compiled_file_path(source)
        cmd = fill_placeholders(compile_cmd, source, compiled)
        _execute(cmd, check=True)
        cache.add(source, compiled)
        return compiled
    except subprocess.CalledProcessError as error:
        raise CompilationError from error
Ejemplo n.º 12
0
def add_user_profile(profile):
    profile_exist = get_user_profile(profile.email)
    if profile_exist:
        return profile_exist

    result = profile.put()
    cache.add(cache.MC_USER_PROFILE + profile.email, profile)
    cache.delete(cache.MC_MAIN_PAGE_RECENT_IMAGES_KEY)
    cache.delete(cache.MC_MAIN_PAGE_RECENT_COMMENTS)
    return result
Ejemplo n.º 13
0
 def _update_cache(self, pkg):
     if pkg.flags.rcode != protocol.Flags.RCODE_OK:
         for record in pkg.questions:
             cache = self._cache["error"]
             cache.add(record.domain_name, "", time.time() + 30 * 60)
     for record in chain(pkg.answers, pkg.access_rights, pkg.extra_records):
         key = record.domain_name
         qtype = record.type
         remove_time = time.time() + record.ttl
         rdata = record.rdata
         self._cache[qtype].add(key, rdata, remove_time)
Ejemplo n.º 14
0
    def initialize(self, request, response):
        user_agent = request.headers.get('User-Agent')
        if user_agent:
            check_user_agent(user_agent)

        webapp.RequestHandler.initialize(self, request, response)
        self.user_info = UserInfo(self.request.uri)

        self.settings = cache.get(cache.MC_SETTINGS)
        if not self.settings:
            self.settings = get_settings()
            cache.add(cache.MC_SETTINGS, self.settings)
Ejemplo n.º 15
0
def get_notification_count(user_email):
    cache_key = cache.MC_USER_NOTIFICATION_PREFIX + user_email
    value = cache.get(cache_key)

    if value <> None:
        return value
    else:
        count = db.Notification.all().filter('recipient_email =',
                                             user_email).filter(
                                                 'read =', False).count()
        cache.add(cache_key, count)
        return count
Ejemplo n.º 16
0
def get_friends(session=None, target=None):
    starget = str(target)
    if cache.contains(starget):
        return cache.get(starget)
    else:
        try:
            friends = session.friends.get(user_id=target)["items"]
        except vk_api.exceptions.ApiError:
            print("[!] ID " + str(target) + " is private. Skipping")
            return -1
        cache.add(starget, friends)
        return friends
Ejemplo n.º 17
0
def get_artwork_favorite_count(artwork):
    memcache_key = cache.MC_FAVORITE_COUNT + str(artwork.key().id())
    result = cache.get(memcache_key)
    if result:
        return result
    else:
        fav = db.FavoriteCounter.all().filter('artwork =', artwork).get()
        if fav:
            result = fav.count
        else:
            result = 0
        cache.add(memcache_key, result)
        return result
Ejemplo n.º 18
0
 def handle_result(self, rpc, url, type, X, Y):    
     
    result = rpc.get_result()
    try:
        if result.status_code == 200:
            logging.info('%s tile X:%i Y:%i made it' % (type, X, Y))
            self.addResult(result.content, type, X, Y)
            logging.info('caching url %s' % url)
            memcache.add(url, result.content)
            cache.add(url, result.content, value_type="blob")
     
    except:
        logging.info('%s tile X:%i Y:%i failed' % (type, X, Y))
        self.tilesDone = self.tilesDone + 1
Ejemplo n.º 19
0
def is_follower(leader_email, follower_email):
    memcache_key = cache.MC_FAVORITE_BY_USER + leader_email + '_' + follower_email
    result = cache.get(memcache_key)

    if result == True or result == False:
        return result
    else:
        follow = db.Follow.all().filter('leader_email =', leader_email).filter(
            'follower_email =', follower_email).get()
        if follow:
            cache.add(memcache_key, True)
            return True
        else:
            cache.add(memcache_key, False)
            return False
Ejemplo n.º 20
0
def tag_by_url_name(url_name):
    cache_tag = cache.get(cache.MC_TAG + url_name)
    if cache_tag:
        return cache_tag
    else:
        tag = db.Tag.all().filter('url_name =', url_name).get()
        if tag:
            cache.add(cache.MC_TAG + url_name, tag)
        else:
            tag = db.Tag()
            tag.url_name = url_name
            tag.title = url_name
            tag.title_lower = url_name

        return tag
Ejemplo n.º 21
0
def check_favorite(user_email, artwork_id):
    cache_key = FAVORITE_CACHE_PREFIX + user_email
    cache_value = cache.get(cache_key)
    result = False
    if cache_value is None:
        result = True
    else:
        if artwork_id <> cache_value[VALUE_ARTWORK_ID]:
            result = True
            
    cache_value = {
        VALUE_ARTWORK_ID: artwork_id
        }
    cache.add(cache_key, cache_value)
    
    return result
    
Ejemplo n.º 22
0
def get_user_profile(user_email):
    cache_value = cache.get(cache.MC_USER_PROFILE + user_email)
    if cache_value:
        return cache_value
    else:
        user_profile = db.UserProfile.all().filter('email =', user_email).get()
        if user_profile:
            cache.add(cache.MC_USER_PROFILE + user_email, user_profile)
            return user_profile
        else:
            user_profile = db.UserProfile.all().filter('alternative_emails =',
                                                       user_email).get()
            if user_profile:
                cache.add(cache.MC_USER_PROFILE + user_email, user_profile)
                return user_profile
            else:
                return None
Ejemplo n.º 23
0
def is_artwork_favorite_by_user(artwork, user_email):
    if not user_email:
        return False

    memcache_key = cache.MC_FAVORITE_BY_USER + str(
        artwork.key().id()) + '_' + user_email
    result = cache.get(memcache_key)
    if result == True or result == False:
        return result
    else:
        fav_user = db.Favorite.all().filter('artwork =', artwork).filter(
            'user_email =', user_email).get()
        if fav_user:
            cache.add(memcache_key, True)
            return True
        else:
            cache.add(memcache_key, False)
            return False
Ejemplo n.º 24
0
 def get(self):
     articles = Node.get_all_articles()
     self.values['articles'] = articles
     self.values['urls'] = Node.get_urls()
     site_domain = self.values.get('site_domain', None)
     site_domain_sync = Datum.get('site_domain_sync', None)
     mentions_web = cache.get('mentions_web')
     if mentions_web == None and site_domain:
         link = ''.join([
             'http://blogsearch.google.com/blogsearch_feeds?hl=en&q=',
             urllib.quote('link:' + site_domain),
             '&ie=utf-8&num=10&output=atom'
         ])
         mentions_web = feedparser.parse(link)
         cache.set('mentions_web', mentions_web, 600)
     if mentions_web:
         self.values['mentions_web'] = mentions_web.entries
     else:
         self.values['mentions_web'] = None
     mentions_twitter = cache.get('mentions_twitter')
     if mentions_twitter is None:
         q = None
         if site_domain_sync is None:
             q = site_domain
         else:
             q = ' OR '.join([site_domain, site_domain_sync])
         q = urllib.quote(q)
         try:
             result = urlfetch.fetch(
                 'http://search.twitter.com/search.json?q=' + q)
             if result.status_code == 200:
                 mentions_twitter = simplejson.loads(result.content)
                 cache.add('mentions_twitter', mentions_twitter, 600)
         except:
             mentions_twitter = None
     if mentions_twitter and len(mentions_twitter['results']) > 0:
         self.values['mentions_twitter'] = mentions_twitter['results']
     else:
         self.values['mentions_twitter'] = None
     self.render('backstage/overview.html', **self.values)
Ejemplo n.º 25
0
def check_comment(user_email, artwork_id, comment_text):
    cache_key = COMMENT_CACHE_PREFIX + user_email
    cache_value = cache.get(cache_key)
    result = False
    if cache_value is None:
        result = True
    else:
        if datetime.now() - cache_value[VALUE_DATE] > COMMENT_MIN_INTERVAL:
            if artwork_id <> cache_value[VALUE_ARTWORK_ID]:
                result = True
            else:
                if comment_text <> cache_value[VALUE_COMMENT]:
                    result = True
                    
    cache_value = {
        VALUE_DATE: datetime.now(),
        VALUE_ARTWORK_ID: artwork_id,
        VALUE_COMMENT: comment_text
        }
    cache.add(cache_key, cache_value)
                    
    return result
Ejemplo n.º 26
0
 def get (self):
     articles = Node.get_all_articles ()
     self.values['articles'] = articles
     self.values['urls'] = Node.get_urls ()
     site_domain = self.values.get ('site_domain', None)
     site_domain_sync = Datum.get ('site_domain_sync', None)
     mentions_web = cache.get ('mentions_web')
     if mentions_web == None and site_domain:
         link = ''.join (['http://blogsearch.google.com/blogsearch_feeds?hl=en&q=',
                          urllib.quote('link:' + site_domain),
                          '&ie=utf-8&num=10&output=atom'])
         mentions_web = feedparser.parse (link)
         cache.set ('mentions_web', mentions_web, 600)
     if mentions_web :
         self.values ['mentions_web'] = mentions_web.entries
     else:
         self.values ['mentions_web'] = None
     mentions_twitter = cache.get('mentions_twitter')
     if mentions_twitter is None:    
         q = None
         if site_domain_sync is None:
             q = site_domain
         else:
             q = ' OR '.join ([site_domain, site_domain_sync])
         q = urllib.quote (q)
         try:
             result = urlfetch.fetch('http://search.twitter.com/search.json?q=' + q)
             if result.status_code == 200:
                 mentions_twitter = simplejson.loads(result.content)
                 cache.add('mentions_twitter', mentions_twitter, 600)
         except:
             mentions_twitter = None
     if mentions_twitter and len(mentions_twitter['results']) > 0:
         self.values['mentions_twitter'] = mentions_twitter['results']
     else:
         self.values['mentions_twitter'] = None
     self.render ('backstage/overview.html', **self.values)
Ejemplo n.º 27
0
def get_series_ru(self, chat_id, message_id, search_query_id):
    search_query = db.get_search_query(search_query_id)
    series_number = search_query.query_text
    key = f"avtonomer.get_series_ru({series_number})"

    result = cache.get(key)
    if not result:
        result = avtonomer.search_ru(fastsearch="{}*{}".format(
            series_number[:1],
            series_number[1:],
        ), )
        if result is not None:
            result = result.total_results

    if result is None:
        logger.warning(f"No data for query {series_number}")
        bot.send_message(
            chat_id,
            "Нет данных",
            reply_to_message_id=message_id,
        )
        return

    cache.add(key, result, timedelta(minutes=5))

    url = avtonomer.get_series_ru_url(series_number)
    series_number = avtonomer.translate_to_cyrillic(series_number)
    message = (
        f"В серии [{series_number}]({url}) пока нет ни одного номера"
        if result == 0 else
        f"Количество фотографий в серии [{series_number}]({url}): {result}")
    bot.send_message(
        chat_id,
        message,
        parse_mode="Markdown",
        reply_to_message_id=message_id,
    )
Ejemplo n.º 28
0
    def post(self):
        # Check parameters
        name = self.request.get('name', None)
        source_name = self.request.get('source', None)
        if not name or not source_name:
            logging.info('Unable to harvest without name and source')
            self.error(400)
            return

        # Check cache for harvest job (handles polling)
        key = harvest.get_job_cache_key(name, source_name)
        job = cache.get(key)
        if job:
            self.response.set_status(200)
            self.response.headers['Content-Type'] = "application/json"
            self.response.out.write(simplejson.dumps(job))
            return
        else:
            cache.add(key, harvest.get_job(name, source_name, 'new'))
        
        # Add harvest task that targets harvest backed instance 1
        params = dict(name=name, source=source_name)
        taskqueue.add(url='/backend/harvest', queue_name='backend-harvest', eta=datetime.datetime.now(), target='harvest', params=params)
        self.response.set_status(202) # Accepted
Ejemplo n.º 29
0
    def post(self):
        # Parameters checked by frontend
        name = self.request.get('name')
        source_name = self.request.get('source')

        # Get source
        source = sources.get(source_name)
        if not source:
            logging.error('Cannot harvest without a source')
            self.error(404)
            # Update job status to 'error'
            job = get_job(name, source_name, 'error', msg='Unsupported source')
            cache.add(key, job)
            return

        # Check cache for harvest job 
        key = get_job_cache_key(name, source_name)
        job = cache.get(key)
        if not job:
                self.error(404)
                self.response.headers['Content-Type'] = "application/json"
                self.response.out.write('{"error":"unknown job %s"}' % key)
                return
            
        count = 0

        # Update job status to 'working'
        cache.add(key, get_job(name, source_name, 'working', msg=count))       
        
        # Get points from source and put them into datastore in batches
        pcount = 0
        for points in self.get_points(name, source):
            logging.info('HARVEST BACKEND MEMORY = %s after %s points' % (runtime.memory_usage().current(), count))
            entities = []
            for p in points:
                pkey = Key('Point', '%s-%s-%s' % (source_name, name, pcount))
                pcount += 1
                entities.append(Point(key=pkey, lat=p[0], lng=p[1]))
                entities.append(PointIndex.create(pkey, p[0], p[1], name, source_name))
            model.put_multi(entities)
            count += len(points)
            cache.add(key, get_job(name, source_name, 'working', msg=count))

        # Update job status to 'done'
        # TODO: Done now or after backend rendering completes?
        cache.add(key, get_job(name, source_name, 'done', msg=count))
Ejemplo n.º 30
0
 def cacheImage(self):
     memcache.add(self.request.url, self.image)
     cache.add(self.request.url, self.image,value_type="blob")
Ejemplo n.º 31
0
 def testExpired(self):
     self.assertEqual(cache.add("a",GOOD_RESPONSE,1),True)
     time.sleep(1)
     self.assertEqual(cache.get('a'),False)
Ejemplo n.º 32
0
 def testClear(self):
     cache.add("a",GOOD_RESPONSE,2)
     self.assertEqual(cache.get('a'),'result')
     self.assertEqual(cache.clear(),True)
     self.assertEqual(cache.get('a'),False)
Ejemplo n.º 33
0
 def testRemove(self):
     cache.add("a",GOOD_RESPONSE,2)
     self.assertEqual(cache.get('a'),'result')
     self.assertEqual(cache.remove('a'),True)
     self.assertEqual(cache.get('a'),False)
Ejemplo n.º 34
0
import metro_transit
import cache
from model import BusLocationList
from datetime import datetime
import time

cached_time = 60  # 60 seconds


def get_bus_locations(stop_number):

    if cached_bus_locations_list := cache.fetch(stop_number, BusLocationList):
        print('RETURN FROM CACHE')
        return cached_bus_locations_list
    else:
        print('RETURN FROM API CALL')
        bus_locations = metro_transit.get_bus_locations(stop_number)
        bus_locations_list = BusLocationList(bus_locations, stop_number,
                                             now_plus_expiry())
        cache.add(bus_locations_list)
        return bus_locations_list


def now_plus_expiry():
    now = time.time()
    return now + cached_time
Ejemplo n.º 35
0
    def get(self):
        newsfeed = None
        if self.user_info.user:
            newsfeed_artworks = db.NewsFeed.all().filter(
                'user_email =',
                self.user_info.user_email).order('-date').fetch(7)
            newsfeed = [
                convert.convert_artwork_for_page(a, 200, 150)
                for a in newsfeed_artworks
            ]
            if 0 < len(newsfeed) < 3:
                for i in range(len(newsfeed), 3):
                    newsfeed.append({'not_found': True})
            elif len(newsfeed) < 7:
                newsfeed = newsfeed[0:3]

        recent_artworks = cache.get(cache.MC_MAIN_PAGE_RECENT_IMAGES_KEY)
        if not recent_artworks:
            all_artworks = db.Artwork.all()
            all_artworks = all_artworks.order('-date').fetch(3, 0)
            recent_artworks = [
                convert.convert_artwork_for_page(a, 200, 150)
                for a in all_artworks
            ]
            cache.add(cache.MC_MAIN_PAGE_RECENT_IMAGES_KEY, recent_artworks)

        editor_choice = cache.get(cache.MC_MAIN_PAGE_RECENT_EDITOR_CHOICE)
        if not editor_choice:
            choice_artworks = db.Artwork.all().filter('editor_choice =', True)
            choice_artworks = choice_artworks.order(
                '-editor_choice_date').fetch(3, 0)
            editor_choice = [
                convert.convert_artwork_for_page(a, 200, 150)
                for a in choice_artworks
            ]
            cache.add(cache.MC_MAIN_PAGE_RECENT_EDITOR_CHOICE, editor_choice)

        top_favorites = cache.get(cache.MC_MAIN_PAGE_TOP_FAVORITES)
        if not top_favorites:
            top_favorite_artworks = db.FavoriteCounter.all().order(
                '-count').order('-date')
            top_favorite_artworks = top_favorite_artworks.fetch(3, 0)
            top_favorites = [
                convert.convert_artwork_for_page(a, 200, 150)
                for a in top_favorite_artworks
            ]
            cache.add(cache.MC_MAIN_PAGE_TOP_FAVORITES, top_favorites)

        recent_favorites = cache.get(cache.MC_MAIN_PAGE_RECENT_FAVORITES)
        if not recent_favorites:
            recent_favorites_artworks = db.Favorite.all().order('-date')
            recent_favorites_artworks = recent_favorites_artworks.fetch(3, 0)
            recent_favorites = [
                convert.convert_artwork_for_page(a, 200, 150)
                for a in recent_favorites_artworks
            ]
            cache.add(cache.MC_MAIN_PAGE_RECENT_FAVORITES, recent_favorites)

        recent_comments = cache.get(cache.MC_MAIN_PAGE_RECENT_COMMENTS)
        if not recent_comments:
            comments = db.Comment.all().order('-date').fetch(5, 0)
            recent_comments = [
                convert.convert_comment_for_page_rich(c) for c in comments
            ]
            cache.add(cache.MC_MAIN_PAGE_RECENT_COMMENTS, recent_comments)

        productive_artists = cache.get(cache.MC_MAIN_PAGE_PRODUCTIVE_ARTISTS)
        if not productive_artists:
            p_artists = db.UserProfile.all().order('-artworks_count').fetch(5)
            productive_artists = [
                convert.convert_user_profile(a) for a in p_artists
            ]
            cache.add(cache.MC_MAIN_PAGE_PRODUCTIVE_ARTISTS,
                      productive_artists)

        top_rated_artists = cache.get(cache.MC_MAIN_PAGE_TOP_RATED_ARTISTS)
        if not top_rated_artists:
            r_artists = db.UserProfile.all().order('-favorite_count').fetch(5)
            top_rated_artists = [
                convert.convert_user_profile(a) for a in r_artists
            ]
            cache.add(cache.MC_MAIN_PAGE_TOP_RATED_ARTISTS, top_rated_artists)

        last_week_favorites = cache.get(cache.MC_MAIN_PAGE_LAST_WEEK_FAVORITES)
        if not last_week_favorites:
            last_week_favorites = db.LastWeekFavoriteCounters.all().order(
                '-count')
            last_week_favorites = last_week_favorites.fetch(3, 0)
            last_week_favorites = [
                convert.convert_artwork_for_page(a, 200, 150)
                for a in last_week_favorites
            ]
            cache.add(cache.MC_MAIN_PAGE_LAST_WEEK_FAVORITES,
                      last_week_favorites)

        last_month_favorites = cache.get(
            cache.MC_MAIN_PAGE_LAST_MONTH_FAVORITES)
        if not last_month_favorites:
            last_month_favorites = db.LastMonthFavoriteCounters.all().order(
                '-count')
            last_month_favorites = last_month_favorites.fetch(3, 0)
            last_month_favorites = [
                convert.convert_artwork_for_page(a, 200, 150)
                for a in last_month_favorites
            ]
            cache.add(cache.MC_MAIN_PAGE_LAST_MONTH_FAVORITES,
                      last_month_favorites)

        self.write_template(
            'templates/index.html', {
                'artworks': recent_artworks,
                'editor_choice': editor_choice,
                'top_favorites': top_favorites,
                'last_week_favorites': last_week_favorites,
                'last_month_favorites': last_month_favorites,
                'recent_favorites': recent_favorites,
                'comments': recent_comments,
                'productive_artists': productive_artists,
                'top_rated_artists': top_rated_artists,
                'newsfeed': newsfeed,
                'og_title': 'Grid Paint',
                'og_description':
                'An online tool for pixel drawing with different shapes of pixels.',
                'og_url': 'https://grid-paint.com',
                'og_image': 'https://grid-paint.com/img/grid-paint-poster.png'
            })
Ejemplo n.º 36
0
def create_gallery_model(offset_param,
                         artworks_query_func,
                         href_create_func,
                         memcache_cursor_key_func,
                         additional_values_func=None):
    """
    Create a model of artwork list based on request.
    offset_param - offset parameter from request
    artworks_query_func() - function for create query for artworks
    href_create_func(offset) - function for create next and prev page hyperlinks
    memcache_cursor_key_func(offset) - function to generate keys for cursors stored in MemCache
    additional_values_func(object, values_dict) - function extracts additional values from objects of query result
    """
    if offset_param:
        offset = int(offset_param)
    else:
        offset = 0

    if offset < 0:
        offset = 0

    if offset == 0:
        fetch_count = page_size + 1
    else:
        fetch_count = page_size

    query = artworks_query_func()

    if offset == 0:
        all_artworks = query.run(limit=fetch_count + 1)
    else:
        cursor = cache.get(memcache_cursor_key_func(offset))
        if cursor:
            query = query.with_cursor(start_cursor=cursor)
            all_artworks = query.run(limit=fetch_count + 1)
        else:
            all_artworks = query.run(limit=fetch_count + 1, offset=offset)
            cache.add(memcache_cursor_key_func(offset), query.cursor())

    has_prev_page = (offset > 0)
    has_next_page = False

    index = 0
    artworks = []
    for a in all_artworks:
        index = index + 1
        if index > fetch_count:
            has_next_page = True
        else:
            converted_artwork = convert.convert_artwork_for_page(a, 200, 150)

            if additional_values_func:
                additional_values_func(a, converted_artwork)

            artworks.append(converted_artwork)
            if index == fetch_count:
                cache.add(memcache_cursor_key_func(offset + fetch_count),
                          query.cursor())

    next_page_href = href_create_func(offset + fetch_count)

    if offset - page_size <= 1:
        prev_page_href = href_create_func(0)
    else:
        prev_page_href = href_create_func(offset - page_size)

    return {
        'has_next_page': has_next_page,
        'has_prev_page': has_prev_page,
        'next_page_href': next_page_href,
        'prev_page_href': prev_page_href,
        'artworks': artworks
    }
Ejemplo n.º 37
0
def explore (root):

	todolist.append( root )
	
	count = 0
	
	while len(todolist) != 0:
		
		log('[debug] todolist has {0} nodes'.format (len(todolist)))
		
		if len(todolist) == MAXTODOLEN:
			if (count == MAXTODOLEN):
				log  ('\n\n\n ******* Stuck in a loop.. hence exiting !! ********\n\n')
				print '\n\n\n ******* Stuck in a loop.. hence exiting !! ********\n\n'
				return
			count += 1
		else:
			count = 0
		
		n = todolist.pop(0)	
		blist = []
		#log('[debug] n.title : {0}\n does_exist returns : {1}'.format (n.title, graph.does_exist(n.title)) )
		
		if graph.does_exist (n.title) == False:
			try:
				n.setinfo ()
				if n.authorlist == None:
					log('\n\n[error] node.setinfo rendered authorlist None')	
					raise Exception
			except:
				log('[error] node.setinfo failed for : {0} url[{1}]\n'.format (n.title, n.url) )
				cache.add ( n.title, '[failure] '+n.url)
				continue
			try:
				graph.addnode (n)
			except:
				log("[error] <addnode failed for {0}> {1}".format ( n.title ,str(traceback.print_stack()) ))
				continue
			
			for b in n.blist:
				graph.addedge (n, b)
				
			blist = n.blist
		else:
			log('[debug] ** traversing already crawled node **')
			blist = graph.get_neighbours(n.title)
			
			
		cache.add ( n.title, n.url )
		
		print blist
		for b in blist:
			if len(todolist) == MAXTODOLEN :
				log('[debug] ** MAXTODOLEN hit !! **')
				continue
			if cache.get(b.title) == None:
				todolist.append(b)
			else:
				log('[debug] ** Already cached !! **')
	
	log ('\n\n\n[finished] nothing left in todolist.\n\n')
Ejemplo n.º 38
0
    def _start_thread(self, client_conn, client_addr):
        r = client_conn.recv(config['MAX_LEN'])       

        request = Request(r)
        if (request.host is None) : 
            client_conn.close()
            return

        client_conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
       
        cacheData = cache.get(request.path)
        if cacheData is not None:
            client_conn.send(cacheData)
            client_conn.close()
            return

        # Site Blocker
        if blocked_sites.isBlocked(request.host):
            client_conn.send(blocked_sites.not_allowed(request.host).encode())
            return

        # blocked_sites.print()
        print("REQUESTED HOST: " + request.host)
        
        server_conn = None
        fst = True
        try : 
            if (request.command == "GET") :
                if (request.port is None) :
                    request.port = 80

                server_conn = self._create_sock(request)
                server_conn.sendall(r)                                           

                # print("GET: " + request.path)
                filter = contentfilter.ContentFilter("config")

                while 1:
                    data = server_conn.recv(config['MAX_LEN'])         # receive data from web server
                    if (len(data) > 0):
                        if filter.notGZIPPath(request.path):
                            client_conn.send(data)
                            continue
                        try:
                            data_filtered = filter.storeData(data)
                            client_conn.send(data_filtered)
                            if data_filtered != b"":
                                cache.add(request.path, data_filtered)
                        except UnicodeDecodeError:
                            client_conn.send(data)
                        except OSError as e:
                            if "Not a gzipped file" in str(e):
                                client_conn.send(data)
                                          
                    else:
                        break
       
            elif (request.command == "CONNECT") : 
                
                if (request.port is None) :
                    request.port = 443
                
                server_conn = self._create_sock(request)

                conn_resp = "HTTP/1.1 200 Connection established\r\n"+"Proxy-agent: Pyx\r\n"+"\r\n"
                client_conn.sendall(conn_resp.encode())

                # print("sent established connection")
                
                data = client_conn.recv(config['MAX_LEN'])         
                server_conn.send(data)
               
                # print("data sent")

                while (1) :
                    data = server_conn.recv(config['MAX_LEN'])         
                    if (len(data) > 0):
                        client_conn.send(data)
                    else:
                        break
            
            if server_conn is not None:
                server_conn.close()
            if client_conn:
                client_conn.close()

        except socket.error as error_msg:
            print ("ERROR: ",client_addr, error_msg)
            if server_conn:
                server_conn.close()
            if client_conn:
                client_conn.close()