コード例 #1
0
ファイル: views.py プロジェクト: TeGie/cttabletennis
def send_emails(request):
    num_emails = 0
    if 'pid' not in request.GET:
        players = Player.objects.all()
        urls = []
        for player in players:
            url = '/tournament/send_emails/?pid=%s' % player.pk
            taskqueue.add(url=url, method='GET')
            urls.append(url)
        return HttpResponse("Queued:\n%s" % urls)
    really = True
    player = Player.objects.get(pk=request.GET['pid'])
    to_email = '*****@*****.**'
    cc = ['*****@*****.**']
    if really:
        to_email = player.user.email
        cc.extend(('*****@*****.**', '*****@*****.**'))
    message_body = render_to_string('tournament/begin_email.txt',
                                    {'player': player})
    message = mail.EmailMessage(
        sender="Table Tennis Tournament <*****@*****.**>",
        to=to_email,
        cc=cc,
        subject='Table Tennis Tournament Begins!',
        body=message_body)
    message.send()
    num_emails += 1
    return HttpResponse("sent %d emails" % num_emails)
コード例 #2
0
ファイル: user_app.py プロジェクト: ikon42/reddit-unite
 def POST(self, user_id):
     user = users.get_current_user()
     if user:
         d = web.input()
         f = contact_form(message=d.message)
         if f.validate() and util.user_exists(user_id.lower()):
             taskqueue.add(
                 url="/task/send_mail",
                 queue_name="email-throttle",
                 params={"sender_id": user.user_id(), "recipient_id": user_id, "message": f.message.data},
             )
             raise web.seeother("/" + user_id + "#message_sent")
         elif f.validate() and user_id.lower() == "us":
             taskqueue.add(
                 url="/task/send_mail",
                 queue_name="email-throttle",
                 params={"sender_id": user.user_id(), "recipient_id": "us", "message": f.message.data},
             )
             raise web.seeother("/" + user_id + "#message_sent")
         else:
             return t.render(
                 util.data(
                     title="Get in touch!",
                     instructions="""You will always reveal your email address
             when you send a message!""",
                     form=f,
                     subject=" ".join([user.nickname(), "wants to get in touch!"]),
                 )
             )
     else:
         return t.render(util.data(title="Not allowed!", instructions="You must be signed in to send messages!"))
コード例 #3
0
ファイル: favorite.py プロジェクト: cwyark/v2ex
 def get(self, topic_num):
     if 'Referer' in self.request.headers:
         go = self.request.headers['Referer']
     else:
         go = '/'
     member = CheckAuth(self)
     t = self.request.get('t').strip()
     if member:
         if member.username_lower_md5 == t:
             topic = GetKindByNum('Topic', int(topic_num))
             if topic is not False:
                 q = db.GqlQuery("SELECT * FROM TopicBookmark WHERE topic = :1 AND member = :2", topic, member)
                 if q.count() == 0:
                     bookmark = TopicBookmark(parent=member)
                     bookmark.topic = topic
                     bookmark.member = member
                     bookmark.put()
                     member = db.get(member.key())
                     member.favorited_topics = member.favorited_topics + 1
                     member.put()
                     memcache.set('Member_' + str(member.num), member, 86400)
                     n = 'r/t' + str(topic.num) + '/m' + str(member.num)
                     memcache.set(n, True, 86400 * 14)
                     taskqueue.add(url='/add/star/topic/' + str(topic.key()))
     self.redirect(go)
コード例 #4
0
ファイル: stock.py プロジェクト: homan/warenix.appengine
	def enque(self, stock_code, target_price):
		url = '/stock/monitor/worker/'
		params = {
			'stock_code': stock_code,
			'target_price': target_price,
		}
		taskqueue.add(url=url, params=params)
コード例 #5
0
ファイル: views.py プロジェクト: axiak/cttabletennis
def send_emails(request):
    num_emails = 0
    if 'pid' not in request.GET:
        players = Player.objects.all()
        urls = []
        for player in players:
            url = '/tournament/send_emails/?pid=%s' % player.pk
            taskqueue.add(url=url, method='GET')
            urls.append(url)
        return HttpResponse("Queued:\n%s" % urls)
    really = True
    player = Player.objects.get(pk=request.GET['pid'])
    to_email = '*****@*****.**'
    cc = ['*****@*****.**']
    if really:
        to_email = player.user.email
        cc.extend(('*****@*****.**', '*****@*****.**'))
    message_body = render_to_string('tournament/begin_email.txt', {'player': player})
    message = mail.EmailMessage(sender="Table Tennis Tournament <*****@*****.**>",
                                to=to_email,
                                cc=cc,
                                subject='Table Tennis Tournament Begins!',
                                body=message_body)
    message.send()
    num_emails += 1
    return HttpResponse("sent %d emails" % num_emails)
コード例 #6
0
ファイル: main.py プロジェクト: dpnchl/tasks-pycharm
 def delete(self, guid):
   """Delete the task with the given id"""
   if helpers.authorized(self.request.params['UUID'], self.request.params['ATO'], self.request.params['action']):
     # search for the Project and delete if found
     key = db.Key.from_path('Task', int(guid))
     task = db.get(key)
     wantsNotifications = {"true": True, "false": False}.get(self.request.params['notify'].lower())
     currentUserId = self.request.params['UUID']
     cukey = db.Key.from_path('User', int(currentUserId))
     user = db.get(cukey)
     if not task == None:
       # cache current values before updates
       taskName = task.name
       taskType = task.type
       taskPriority = task.priority
       taskStatus = task.developmentStatus
       taskValidation = task.validation
       taskSubmitterId = task.submitterId
       taskAssigneeId = task.assigneeId
       taskEffort = task.effort
       taskProjectId = task.projectId
       taskDescription = task.description
       # Push notification email on the queue if we need to notify
       if notification.should_notify(currentUserId,task,"deleteTask",wantsNotifications):
         taskqueue.add(url='/mailer', params={'taskId': int(guid), 'currentUUID': self.request.params['UUID'], 'action': "deleteTask", 'name': taskName, 'type': taskType, 'priority': taskPriority, 'status': taskStatus, 'validation': taskValidation, 'submitterId': taskSubmitterId, 'assigneeId': taskAssigneeId, 'effort': taskEffort, 'projectId': taskProjectId, 'description': taskDescription})
       task.delete()
       self.response.set_status(204, "Deleted")
     else:
       self.response.set_status(404, "Not Found")
   else:
     self.response.set_status(401, "Not Authorized")
コード例 #7
0
 def get(self):
     logging.info("Starting to load tasks %s" % datetime.datetime.now())
     ret = ''
     gp_pub = RsaKey.gql("WHERE name = :1", 'gp_pub').get()
     gae_priv = RsaKey.gql("WHERE name = :1", 'gae_priv').get()
     gp_pubkey = self.makePubKey(gp_pub.keystring)
     gae_privkey = self.makePrivKey(gae_priv.keystring)
     
     
     users = Account.gql('WHERE active = :1', True)
     
     count = 0
     for user in users:
         #logging.info(user.user)
         try:
             ret = '%s!gp!%s!gp!%s' % (user.user, user.gPass, user.twitter)
             gae_one = rsa.encrypt(str(ret), gp_pubkey)
             #logging.debug("WTF %s" % ret)
             send_key = gae_one.replace('\n', '!gp!')
             taskqueue.add(url='/worker/', params={'key': send_key})
             #logging.error("KEY !%s!" % gae_one.replace('\n', '!gp!'))
             #logging.error("KEY !%d!" % gae_one.find('\n'))
             logging.debug("USER: %s TWITTER: %s KEY: %s" % (user.user, user.twitter, send_key))
             count += 1
         except:
             logging.error("something is f****d with USER: %s TWITTER: %s" % (user.user, user.twitter))
     
     logging.info("Ended load tasks (%d users) %s" % (count, datetime.datetime.now()))
コード例 #8
0
  def post(self):
    start = self.request.get('start')
    kind = self.request.get('kind')

    if kind == 'second':
      to_kind = SecondUserKind
      from_kind = FirstUserKind
      migrate = second_from_first
    else:
      to_kind = FirstUserKind
      from_kind = SecondUserKind
      migrate = first_from_second

    query = from_kind.all()
    if start:
      query.filter('__key__ >', db.Key(start))
    old = query.fetch(3)
    if not old:
      logging.info('All done!')
      return

    last_key = old[-1].key()
    new = [migrate(x) for x in old]
    db.put(new)
    db.delete(old)

    taskqueue.add(
        url='/worker/migration',
        params=dict(
            start=last_key,
            kind=kind))
コード例 #9
0
def refresh_movie_source(request):
    if 'source_key' not in request.REQUEST:
        return HttpResponseServerError(
            'No source key specified in request params')

    source = models.MovieListingSource.get(request.REQUEST['source_key'])
    if not source:
        logging.error('Unable to find MovieListingSource: %s',
                      request.REQUEST['source_key'])
        return HttpResponse('Error unable to find Source')
    elif not source.yql:
        logging.error('No yql for MovieListingSource: %s' % str(source))
        return HttpResponse('No YQL for source')
    elif not source.settings:
        logging.error('No settings for MovieListingSource: %s' % str(source))
        return HttpResponse('No settings for source')

    logging.info('Refreshing movie from source %s', str(source))

    yql = source.yql
    if 'offset' in request.REQUEST:
        query_offset = int(request.REQUEST['offset']) + 1
        yql = '%s limit %d offset %d' % (
            yql, settings.MOVIE_REFRESH_QUERY_SIZE, query_offset)

    form_data = urllib.urlencode({
        "q": yql,
        "format": "xml",
        "diagnostics": "false"
    })
    result = urlfetch.fetch(url=settings.YQL_BASE_URL,
                            payload=form_data,
                            method=urlfetch.POST)
    dom = minidom.parseString(result.content)

    result_nodes = dom.getElementsByTagName('results')[0].childNodes
    name_nodes = xpath.find(source.settings.name_xpath, dom)
    leaches_nodes = xpath.find(source.settings.leaches_xpath, dom)
    logging.info('Found %d raw names', len(name_nodes))

    strip_white_pattern = re.compile(r"\s+")
    source_results = []
    for index, name_node in enumerate(name_nodes):
        logging.debug('Node: ' + result_nodes[index].toxml())
        raw_name = strip_white_pattern.sub(' ', getText(name_node))
        leaches = strip_white_pattern.sub(' ', getText(leaches_nodes[index]))
        logging.info('Raw Name: %s, Leaches: %s', raw_name, leaches)
        source_results.append(
            models.MovieListEntry(raw_movie_name=raw_name,
                                  leaches=int(leaches),
                                  active=False))

    db.put(source_results)

    #Refresh done using map/reduce.  First we map to find the movie details
    for source_result in source_results:
        taskqueue.add(url=reverse('topmovies.task_handler.find_movie'),
                      params={'source_entry_key': source_result.key()})

    return HttpResponse("Loaded results for source: %s" % str(source))
コード例 #10
0
ファイル: favorite.py プロジェクト: zu1kbackup/v2ex-gae
 def get(self, topic_num):
     if 'Referer' in self.request.headers:
         go = self.request.headers['Referer']
     else:
         go = '/'
     member = CheckAuth(self)
     t = self.request.get('t').strip()
     if member:
         if member.username_lower_md5 == t:
             topic = GetKindByNum('Topic', int(topic_num))
             if topic is not False:
                 q = db.GqlQuery("SELECT * FROM TopicBookmark WHERE topic = :1 AND member = :2", topic, member)
                 if q.count() == 0:
                     bookmark = TopicBookmark(parent=member)
                     bookmark.topic = topic
                     bookmark.member = member
                     bookmark.put()
                     member = db.get(member.key())
                     member.favorited_topics = member.favorited_topics + 1
                     member.put()
                     memcache.set('Member_' + str(member.num), member, 86400)
                     n = 'r/t' + str(topic.num) + '/m' + str(member.num)
                     memcache.set(n, True, 86400 * 14)
                     taskqueue.add(url='/add/star/topic/' + str(topic.key()))
     self.redirect(go)
コード例 #11
0
    def post(self):
        key = self.request.get('key')

        # Add the task to the default queue.
        taskqueue.add(url='/worker', params={'key': key})

        self.redirect('/')
コード例 #12
0
ファイル: tasks.py プロジェクト: datacommunitydc/eventgrinder
def schedule_next_newsletter(request):
    try:
        site=db.get(db.Key(request.POST['site']))
        now=datetime.now()
        schedule=site.tz.localize(now+relativedelta(days=1,weekday=MO, hour=6, minute=0, second=0)).astimezone(utc).replace(tzinfo=None)
        if os.environ['SERVER_SOFTWARE'].startswith('Dev'):schedule=datetime.now()
        #next_newsletter=SitesNextNewsletter.get_or_insert('%s-%s' %(site.slug, schedule.strftime("%Y%W")),
        #publish_at=site.tz.localize(schedule))
        chimp=site.chimp
        if chimp:
            url="http://%s/week-of/%s/newsletter" %( site.host,date(schedule,"Y-n-j") )
            url=url.replace('localhost:8083', 'dctechevents.com')
            params={'url':url, 'apikey':chimp.apikey, 'listid':chimp.listid,
            'from_name':  site.name,
            'from_email': '*****@*****.**',
            'subject': "%s Weekly" % site.name

            }
            logging.info("Scheduling campaign with params %s" % str(params) )
            taskqueue.add(url='/subscriptions/create_and_send/',
                params=params,
                name="weekly-%s-%s"%(site.slug, schedule.strftime("%Y%W")), eta=schedule)
        else:
            logging.error("no mailchimp setup for %s" % site.slug)

    except TaskAlreadyExistsError:
        pass

    except Exception,e:
        logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
コード例 #13
0
 def get(self):
     data = self.__get_page_content()
     data = json.loads(data)
     count = 0
     for stock in data['searchresults']:
         if stock['ticker'].find('399') == 0 or stock['ticker'].find(
                 '000') == 0 and stock['exchange'] == 'SHA' or stock[
                     'ticker'].find('900') == 0 and stock[
                         'exchange'] == 'SHA' or stock['ticker'].find(
                             '200') == 0 and stock['exchange'] == 'SHE':
             continue
         queue_name = 'queue' + str(count % 10)
         taskqueue.add(url='/tasks/updatesinglemarketcapital',
                       queue_name='updatesinglemarketcapital',
                       params={
                           'ticker':
                           stock['ticker'],
                           'title':
                           stock['title'],
                           'exchange':
                           stock['exchange'],
                           'local_currency_symbol':
                           stock['local_currency_symbol'],
                           'value':
                           stock['columns'][0]['value']
                       },
                       method='GET')
         count += 1
コード例 #14
0
ファイル: admin.py プロジェクト: chrisbbenyard/bookreader
  def post(self):
    all_chapters_query = Database.Chapter.all()  
    
    (broken_chapters, checked_chapter_number, last_chapter_cursor) = get_broken_chapters()  
    
    if last_chapter_cursor:    
      all_chapters_query.with_cursor(last_chapter_cursor)
    else:
      checked_chapter_number = 0
      broken_chapters = []
        
    chapters = all_chapters_query.fetch(20)        
    for chapter in chapters:
      checked_chapter_number = checked_chapter_number + 1
      try:
        chapter.catalog_ref
      except:
        broken_chapters.append(chapter.key())
    
    if chapters:
      last_chapter_cursor = all_chapters_query.cursor()
    else:
      last_chapter_cursor = None
      
    memcache.set('chapter_cursor', last_chapter_cursor)
    memcache.set('broken_chapters', broken_chapters)
    memcache.set('checked_chapter_number', checked_chapter_number)

    if last_chapter_cursor:
      taskqueue.add(url='/admin/task/check_broken_chapters', params={})
コード例 #15
0
def task_enqueue_categories(request):
    """
    Enqueues animal tables. To be run by cron
    """
    q = Queue('animal-indexes')
    category = None  #request.GET.get('category')
    if category:
        taskqueue.add(
            Task(url='/adopt/_tasks_/fetch_category',
                 method='post',
                 payload=json.dumps({
                     'category': category,
                     'url': _getpetharbor_url(x)
                 })))
    else:
        for x in PETHARBOR_URLS.keys():
            q.add(
                Task(url='/adopt/_tasks_/fetch_category',
                     name="fetch-%s-%s" %
                     (x, datetime.datetime.now().strftime('%Y%m%d%H%M%S')),
                     method='post',
                     payload=str(
                         json.dumps({
                             'category': x,
                             'url': _get_petharbor_url(x)
                         }))))
    return TextResponse('OK')
コード例 #16
0
    def post(self):
        n = int(self.request.get('n'))
        l = 50

        import csv

        csvReader = csv.reader(open('attractions4.csv'))

        count = 1
        for row in csvReader:

            if count >= n and count < n + l:

                attractions = Attraction.all()
                attractions.filter("id =", row[0])
                attraction = attractions.get()

                if attraction:
                    attraction.datetime = datetime.datetime(
                        year=int(row[8][0:4]),
                        month=int(row[8][5:7]),
                        day=int(row[8][8:10]),
                        hour=int(row[8][11:13]),
                        minute=int(row[8][14:16]),
                        second=int(row[8][17:19]))
                    attraction.put()

            elif count >= n:

                taskqueue.add(url='/fixdate', params={
                    'n': n + l,
                })
                break

            count = count + 1
コード例 #17
0
 def append_call_task(self, f):
     taskqueue.add(url='/worker',
                   params={
                       'task': 'call_function',
                       'func': f,
                       '_pass': botconfig.BotConfig["InternalPass"]
                   })
コード例 #18
0
ファイル: main.py プロジェクト: ideamonk/slidemytime
    def get(self):
        if not users.is_current_user_admin():
            self.redirect("/")

        for i in xrange(100):
            taskqueue.add(url='/cleaner', params={}, method="GET")
        self.response.out.write ("Queued 100 jobs to clean 1000 images")
コード例 #19
0
ファイル: views.py プロジェクト: kelvinn/unmetric
def probe_one_website(request):
    if request.method == 'GET':
        url = request.GET.get('url')
        q = Website.all()
        q.filter("url =", url)
        website_obj = q.get()

        try:
            result = urlfetch.fetch(url,
                                    method='HEAD',
                                    deadline=6,
                                    headers={'Cache-Control': 'max-age=30'})
            result_code = result.status_code
        except:
            result_code = 0

        if website_obj.status == 'Up' and result_code != 200:
            taskqueue.add(url='/verify_down/', params={'url': url})
        elif website_obj.status == 'Down' and result_code == 200:
            taskqueue.add(url='/verify_up/', params={'url': url})
        else:
            website_obj.status_code = result_code
            website_obj.probe_time = datetime.now()
            website_obj.put()

    return HttpResponse("OK")
コード例 #20
0
ファイル: models.py プロジェクト: IanLewis/kay
    def txn():
      key_name = cls.get_key_name(user_name)
      user = cls.get_by_key_name(key_name)
      if user:
        from kay.auth import DuplicateKeyError
        raise DuplicateKeyError(_(u"This user name is already taken."
                                  " Please choose another user name."))
      salt = crypto.gen_salt()
      activation_key = crypto.sha1(salt+user_name).hexdigest()
      profile_key = db.Key.from_path(cls.kind(), key_name,
                                     RegistrationProfile.kind(),
                                     activation_key)

      expiration_date = datetime.datetime.now() + \
          datetime.timedelta(seconds=settings.ACCOUNT_ACTIVATION_DURATION)
      taskqueue.add(url=url_for('_internal/expire_registration',
                                registration_key=str(profile_key)),
                    eta=expiration_date, transactional=True)
      taskqueue.add(url=url_for('_internal/send_registration_confirm',
                                registration_key=str(profile_key)),
                    transactional=True)
      user = cls(key_name=key_name, activated=False, user_name=user_name,
                 password=crypto.gen_pwhash(password), email=email)
      profile = RegistrationProfile(user=user, parent=user,
                                    key_name=activation_key)
      db.put([profile, user])
      return user
コード例 #21
0
ファイル: helpers.py プロジェクト: lizconlan/uksnow
def fetch_snow_tweets():
    logging.info("Fetching tweets for tag #uksnow")
    key = 'uksnow'
    feeditemcount = 1
    latest_cached = 0
    since_id = models.get_from_keystore(key)
    tweets = twitter.get_twitter_search('uksnow&filter=links', since_id)
    for tweet in tweets['results']:
        if feeditemcount == 1:
            try:
                latest_cached = str(tweet['id'])
            except:
                latest_cached = 0
        taskqueue.add(url='/services/store_tweet',
                      params={
                          'userid': tweet['from_user_id'],
                          'username': tweet['from_user'],
                          'imgurl': tweet['profile_image_url'],
                          'message': tweet['text'],
                          'messageid': tweet['id'],
                          'message_created': tweet['created_at'],
                          'geo': tweet['geo'],
                      })
        feeditemcount += 1
    if latest_cached > 0:
        models.add_to_keystore(key, latest_cached)
コード例 #22
0
    def get(self):
        n = self.request.get('n') or 1
        f = self.request.get('f') or 1

        taskqueue.add(url='/fixregion', params={'n': n, 'f': f})

        self.response.out.write("running")
コード例 #23
0
 def get(self, key_name):
     p = OSMPOI.get_by_key_name(key_name)
     p.update_importance(p.importance + 1,
                         max_z=config.max_z_osm,
                         gt_key_name="osm")
     taskqueue.add(url='/gt/admin/update_tiles', method='GET')
     self.response.out.write('increased importance')
コード例 #24
0
    def post(self):
        delete = self.request.get("delete")
        if delete:
            g = Guild.get( delete )
            if users.is_current_user_admin() or g.owner == users.get_current_user():
                # the character fetcher deletes the characters once it notices that their
                # guild is gone - this makes the delete step here faster.
                g.delete()
            return
        
        if not users.get_current_user():
            return self.redirect("/")

        continent = self.request.get("continent")
        realm = self.request.get("realm")
        guildname = self.request.get("guild")

        guild = Guild.find_or_create( continent, realm, guildname )
        guild.owner = users.get_current_user()
        guild.put()
        if not guild:
            return self.error(404)
        taskqueue.add(url='/fetcher/guild/', params={'key': guild.key()})
        
        self.redirect( guild.url() )
コード例 #25
0
 def get(self):
     if is_download_on():
         ll = self.request.get('ll').strip()
         taskqueue.add(url='/gt/do_update', params={'ll': ll})
         self.response.out.write("added task")
     else:
         self.response.out.write("OSM download state is off")
コード例 #26
0
ファイル: tasks.py プロジェクト: LeWaGeorge/CMStatsServer
    def get(self):
        db.delete(DeviceVersions.all().fetch(400))

        total = (Device.all().count() / 10) + 1
        for x in xrange(total):
            offset = x * 10
            taskqueue.add(url='/tasks/AggregateVersionsWorker', params={'offset': offset})
コード例 #27
0
def main():
    """
  This method adds a task to the task queue intended to querie the SoundCloud API, 
  fetch the latest tracks having been uploaded since the last backend update,
  and adds them to the task queue for further processing.
  It is intended to be called by a cronjob on a short basis, like every 3 minutes.
  """
    try:
        logging.info("Backend update started")

        time_from = backend_utils.calculate_time_from()
        time_to = datetime.datetime.now().isoformat()

        taskqueue.add(url='/backend-update-task',
                      params={
                          'time_from': time_from,
                          'time_to': time_to
                      })

        logging.info("Added backend update task to task queue. time_from: %s time_to: %s" % \
                    (time_from, time_to))

    except DeadlineExceededError:
        logging.warning(
            "Backend Update has been canceled due to Deadline Exceeded")
        for name in os.environ.keys():
            logging.info("%s = %s" % (name, os.environ[name]))
コード例 #28
0
    def test_get_registers_appropriate_tasks(self):
        moxer = Mox()
        request, response = new_mock_request_response(moxer)

        _stub_taskqueue(moxer)
        moxer.StubOutWithMock(Feed, "find_active", use_mock_anything=True)

        def create_call(i):
            source_name = "source-%i" % i
            source = MockEntity(key_name=source_name, name=source_name)
            return MockEntity(key_name="feed-%i" % i,
                              artifact_source=source,
                              url="hi")

        q_range = xrange(0, 5)
        Feed.find_active().AndReturn(
            MockQuery(q_range, create_call=create_call))

        # expects queued tasks for each feed
        for i in q_range:
            taskqueue.add(name=IgnoreArg(), url=IgnoreArg())

        moxer.ReplayAll()
        handler = CronIngestDriverHandler()
        handler.initialize(request, response)
        handler.get()
        moxer.VerifyAll()
コード例 #29
0
def split_gdata(request):
    try:
        if request.method == 'POST':
            key=db.Key(request.POST.get('ical_key'))
            source=ICalendarSource.get(key)
            gdata_source=memcache.get(request.POST.get('cache_key'))
            memcache.delete(request.POST.get('cache_key'))
            feed=gdata.calendar.CalendarEventFeedFromString(gdata_source)
            cal_count=0
            for gevent in feed.entry:
                cal_count=cal_count +1
                source_cache_key=request.POST.get('cache_key')
                cache_key=source_cache_key +"-"+ str(cal_count)
                memcache.set(cache_key, gevent.ToString(),1200)

                params=params={'cache_key': cache_key,
                                'ical_key': request.POST['ical_key']}
                taskqueue.add(url='/events/parse_one_gdata/',
                               params=params,
                              name=cache_key,countdown=30)


    except urlfetch.DownloadError:
        raise

    except Exception,e:
                logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
コード例 #30
0
ファイル: tasks.py プロジェクト: pombredanne/eventgrinder
def split_gdata(request):
    try:
        if request.method == 'POST':
            key=db.Key(request.POST.get('ical_key'))
            source=ICalendarSource.get(key)
            gdata_source=memcache.get(request.POST.get('cache_key'))
            memcache.delete(request.POST.get('cache_key'))
            feed=gdata.calendar.CalendarEventFeedFromString(gdata_source)
            cal_count=0
            for gevent in feed.entry:
                cal_count=cal_count +1
                source_cache_key=request.POST.get('cache_key')
                cache_key=source_cache_key +"-"+ str(cal_count)
                memcache.set(cache_key, gevent.ToString(),1200)
                
                params=params={'cache_key': cache_key,
                                'ical_key': request.POST['ical_key']}
                taskqueue.add(url='/events/parse_one_gdata/',
                               params=params,
                              name=cache_key,countdown=30)
    
    
    except urlfetch.DownloadError:
        raise
        
    except Exception,e:
                logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
コード例 #31
0
ファイル: tasks.py プロジェクト: rosskarchner/eventgrinder
def migrate_profiles(request):
    try:
        set_namespace('')
        old_site = db.get(db.Key(request.POST.get('old_site')))
        q = Query(Profile, namespace='').filter('site = ', old_site)
        old_profiles = q.fetch(1000)

        set_namespace(request.POST.get('new_namespace'))
        new_site = db.get(db.Key(request.POST.get('new_site')))

        for old_profile in old_profiles:
            new_profile = clone_entity(old_profile,
                                       key_name=old_profile.key().name())
            new_profile.site = new_site
            new_profile.put()
            #old_profile.delete()

        taskqueue.add(
            url='/admin/migrate-sources/',
            params={
                'new_namespace': request.POST.get('new_namespace'),
                'old_site': old_site.key(),
                'new_site': new_site.key(),
            },
        )

    except Exception, e:
        logging.error("%s in \n%s" %
                      (traceback.format_exc(), str(request.POST)))
コード例 #32
0
 def post(self):
     """Starts crawling."""
     project_name = self.GetRequiredParameter("project_name")
     start_index = self.GetOptionalParameter("start_index", None)
     if not start_index:
         last = crawl_state.GetLastCrawlResults(bugs_util.Provider.ISSUETRACKER, project_name)
         start_index = last.end_index
     else:
         start_index = int(start_index)
     max_results = 25
     query = gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results)
     phclient = gdata.projecthosting.client.ProjectHostingClient()
     try:
         issues = phclient.get_issues(project_name, query=query)
     except gdata.client.Error, e:
         retries = int(self.request.headers.get("X-AppEngine-TaskRetryCount", 0))
         if retries < 4:
             logging.warning(
                 "Retry crawling, retries is less than 5, " "current retries:  %s, start_index: %d",
                 retries,
                 start_index,
             )
             raise  # Re-raise, so that the task is re-tried.
         else:
             # Skip current, try at start_index + 1.
             logging.warning("Skipping current index, start_index: %d", start_index)
             taskqueue.add(
                 url="/tasks/crawl/issuetracker/recrawl_project",
                 params={"project_name": project_name, "start_index": start_index + 1},
             )
             return
コード例 #33
0
 def post(self):
     """Starts crawling."""
     project_name = self.GetRequiredParameter('project_name')
     start_index = self.GetOptionalParameter('start_index', None)
     if not start_index:
         last = crawl_state.GetLastCrawlResults(
             bugs_util.Provider.ISSUETRACKER, project_name)
         start_index = last.end_index
     else:
         start_index = int(start_index)
     max_results = 25
     query = gdata.projecthosting.client.Query(start_index=start_index,
                                               max_results=max_results)
     phclient = gdata.projecthosting.client.ProjectHostingClient()
     try:
         issues = phclient.get_issues(project_name, query=query)
     except gdata.client.Error, e:
         retries = int(
             self.request.headers.get('X-AppEngine-TaskRetryCount', 0))
         if retries < 4:
             logging.warning(
                 'Retry crawling, retries is less than 5, '
                 'current retries:  %s, start_index: %d', retries,
                 start_index)
             raise  # Re-raise, so that the task is re-tried.
         else:
             # Skip current, try at start_index + 1.
             logging.warning('Skipping current index, start_index: %d',
                             start_index)
             taskqueue.add(url='/tasks/crawl/issuetracker/recrawl_project',
                           params={
                               'project_name': project_name,
                               'start_index': start_index + 1
                           })
             return
コード例 #34
0
ファイル: main.py プロジェクト: DrOctogon/sample-apps
  def post(self):
    key = self.request.get('key')

    # Add the task to the default queue.
    taskqueue.add(url='/worker', params={'key': key})

    self.redirect('/')
コード例 #35
0
ファイル: cron.py プロジェクト: dmaulikr/JourneyTag
    def get(self):
        key = self.request.get('key')
        if key:
            query = db.GqlQuery(
                "SELECT * FROM Inventory WHERE __key__ > :1 ORDER BY __key__",
                db.Key(key))
        else:
            query = db.GqlQuery("SELECT * FROM Inventory ORDER BY __key__")

        newKey = None
        inventories = query.fetch(PAGE_SIZE + 1)
        if len(inventories) == PAGE_SIZE + 1:
            newKey = str(inventories[-1].key())

        for inventory in inventories:
            span = datetime.datetime.utcnow() - inventory.dateCreated
            if inventory.tag.status != 'new' and span.days > jt.gamesettings.autoDropTagDays:
                inventory.tag.pickedUp = False
                inventory.tag.lastUpdated = datetime.datetime.utcnow()
                inventory.tag.put()

                db.delete(inventory)

        if newKey:
            nextUrl = '/cron/tag_timeout?key=%s' % newKey
        else:
            nextUrl = None

        if nextUrl:
            taskqueue.add(url=nextUrl, method='GET')
コード例 #36
0
ファイル: email.py プロジェクト: axiak/homesearchr
def enqueue_notify(email, admin=False):
    clean_email = email.encode('base64').rstrip().rstrip('=')
    taskqueue.add(url="/filters/email/one/",
                  params={"email": email, "admin": str(int(admin))},
                  name="Notify-%s-%s" % (clean_email,
                                         randstr()),
                  method="GET")
コード例 #37
0
ファイル: main.py プロジェクト: bopopescu/TurkNet
    def post(self):
        if experiment_grouping_already_started(
                self.experiment):  # be idempotent
            return

        cycle = Cycle(range(self.experiment.cohort_count))

        workers, peer_workers = [], {}

        for worker in Worker.all().filter('experiment = ', self.experiment):
            worker.cohort_index = cycle.next()

            workers.append(worker)

            if peer_workers.has_key(worker.cohort_index):
                peer_workers[worker.cohort_index].append(worker)
            else:
                peer_workers[worker.cohort_index] = [worker]

        for worker in workers:
            previous_cohort_index = index_decr(worker.cohort_index,
                                               self.experiment.cohort_count)

            previous_cohort = peer_workers[previous_cohort_index]

            worker.peer_worker = previous_cohort.pop(
                random.randrange(len(previous_cohort)))
            worker.put()

            if worker.cohort_index == 0:
                taskqueue.add(queue_name='worker-notification',
                              params={'key': worker.key()})
コード例 #38
0
ファイル: handlers.py プロジェクト: bovine/cowguard
 def queueTask(camkey, delay=0):
     taskqueue.add(queue_name="poll-source-queue",            
                 url="/tasks/poll_sources",
                 method="POST",
                 countdown=delay,
                 params=dict(camera=camkey, epoch=time.mktime(datetime.now().timetuple()) ),
                 transactional=False)
コード例 #39
0
    def get(self):
        feed_id = self.request.get('feed_id')

        if feed_id:
            feed_key = db.Key(feed_id)
            feed = bloggerproxy.BlogProxy.get(feed_key)

            now = datetime.datetime.utcnow()
            if now - feed.last_crawled > datetime.timedelta(seconds=30):
                # Update the time first. Even if the crawl fails, we want to
                # throttle it.
                feed.last_crawled = now
                feed.put()
                bloggerproxy.crawlProxiedFeed(feed_key)
            else:
                logging.info('Feed crawled recently enough. Skipping.')
        else:
            proxied = bloggerproxy.BlogProxy.all().fetch(500)

            for bp in proxied:
                taskqueue.add(url='/recrawl.do',
                              method='GET',
                              params=dict(feed_id=bp.key()))

        self.response.set_status(200)
コード例 #40
0
	def get(self, sitecode):
		site = models.Site.get_by_key_name(sitecode)
		feeds = models.Feed.all().filter('site =', site)
		for feed in feeds:
			taskqueue.add(url='/index/feed', params={'page': "1", "url":feed.url}, method='GET')
			self.response.out.write(feed.url)
			self.response.out.write("<br />")
コード例 #41
0
    def get(self):
        """
        Query the DB and queue any feeds that haven't been processed since
        update_interval
        """

        update_interval = timedelta(hours=1)

        current_datetime = timeutils.now_utc()

        query = ContentFeed.all()
        query.filter("last_update <", current_datetime - update_interval)

        if query.count() == 0:
            logging.debug("No entries to queue")
        else:
            for feed in query:
                # get the member's OAuth token and secret

                last_update = timeutils.add_utc_tzinfo(feed.last_update)
                feed_consumer_params = {
                    "feed_key": feed.key(),
                    "owner_id": feed.owner.user_id()
                }

                try:
                    taskqueue.add(url="/blogs/feed/consumer",
                                  params=feed_consumer_params)
                    logging.debug("Queued feed: \"%s\" %s" %
                                  (feed.url, last_update.ctime()))
                except taskqueue.Error:
                    logging.error("Unable to queue feed: \"%s\"", feed.url)
                    return
コード例 #42
0
ファイル: main.py プロジェクト: dpnchl/tasks-pycharm
 def post(self):
   wantsNotifications = {"true": True, "false": False}.get(self.request.params['notify'].lower())
   # collect the data from the record
   task_json = simplejson.loads(self.request.body)
   logging.info(self.request.body)
   # if the user is a guest the project must be unallocated
   currentUserId = self.request.params['UUID']
   cukey = db.Key.from_path('User', int(currentUserId))
   user = db.get(cukey)
   if str(user.role) != '_Guest' or (task_json.has_key('projectId') == False or task_json['projectId'] == None):
     # create a new task with the passed in json
     task = helpers.apply_json_to_model_instance(Task(),task_json)
     # save task
     task.put()
     guid = task.key().id_or_name()
     # Push notification email on the queue if the task has some sort of status, etc..
     if notification.should_notify(currentUserId,task,"createTask", wantsNotifications):
       taskqueue.add(url='/mailer', params={'taskId': int(guid), 'currentUUID': self.request.params['UUID'], 'action': "createTask", 'name': "New Task"})
     new_url = "/tasks-server/task/%s" % guid
     task_json["id"] = guid
     self.response.set_status(201, "Task created")
     self.response.headers['Location'] = new_url
     self.response.headers['Content-Type'] = 'text/json'
     self.response.out.write(simplejson.dumps(task_json))
   else:
     self.response.set_status(401, "Not Authorized")
コード例 #43
0
        def txn():
            key_name = cls.get_key_name(user_name)
            user = cls.get_by_key_name(key_name)
            if user:
                from kay.auth import DuplicateKeyError
                raise DuplicateKeyError(
                    _(u"This user name is already taken."
                      " Please choose another user name."))
            salt = crypto.gen_salt()
            activation_key = crypto.sha1(salt + user_name).hexdigest()
            profile_key = db.Key.from_path(cls.kind(), key_name,
                                           RegistrationProfile.kind(),
                                           activation_key)

            expiration_date = datetime.datetime.now() + \
                datetime.timedelta(seconds=settings.ACCOUNT_ACTIVATION_DURATION)
            taskqueue.add(url=url_for('_internal/expire_registration',
                                      registration_key=str(profile_key)),
                          eta=expiration_date,
                          transactional=True)
            taskqueue.add(url=url_for('_internal/send_registration_confirm',
                                      registration_key=str(profile_key)),
                          transactional=True)
            user = cls(key_name=key_name,
                       activated=False,
                       user_name=user_name,
                       password=crypto.gen_pwhash(password),
                       email=email)
            profile = RegistrationProfile(user=user,
                                          parent=user,
                                          key_name=activation_key)
            db.put([profile, user])
            return user
コード例 #44
0
ファイル: main.py プロジェクト: zh/ReaTiWe
 def post(self, entryid):
   user = users.get_current_user()
   if user:
     logout_url = users.create_logout_url("/")
     microUser = getMicroUser(user)
     entry = MicroEntry.get_by_id(int(entryid))  
     if not entry:
       self.redirect('/')
     content = self.request.get('content').strip()
     if content:
       content = content.replace('\n','').replace('\r',' ').replace('\t',' ')
       comment = Comment(author=microUser, content=content)
       addCommentEntry(entry, comment)
       # send the comment to the entry owner (but not myself)
       if entry.author.validated and not entry.author.silent and microUser.nick != entry.author.nick:
         msg = "comment on entry #%d:\n" % int(entryid)
         msg += content
         msg += "\nhttp://%s/entry/%d\n" % (settings.SITE_URL, int(entryid)) 
         taskqueue.add(url="/send", params={"from":microUser.nick, 
                                            "to":entry.author.nick, 
                                            "message":msg, 
                                            "secret":microUser.secret})
     self.redirect("/entry/%d" % int(entryid))  
   else:
     login_url = users.create_login_url('/')
     self.redirect(login_url) 
コード例 #45
0
 def get(self):
     rivi=VoittoRivi.gql("ORDER BY vuosi DESC,kierros DESC").get()
     if rivi:
         tuorein_kierros=rivi.kierros
     else:
         tuorein_kierros=0
     taskqueue.add(url=self.request.path,params={"kierros":tuorein_kierros})
コード例 #46
0
ファイル: clementine.py プロジェクト: Shedward/Website
 def get(self):
     self.redirect(ICECAST_URL)
     try:
         taskqueue.add(url='/_tasks/counters',
                       params={'key': 'icecast-directory'})
     except taskqueue.Error, e:
         logging.warning('Failed to add task: %s', e)
コード例 #47
0
ファイル: tasks.py プロジェクト: mayanks/smstweet
  def post(self):
    # New user has joined in. Follow him and post a welcome message
    try:
      sms_client = OAuthClient('twitter', self)
      sms_client.token = OAuthAccessToken.all().filter(
                'specifier =', 'smstweetin').filter(
                'service =', 'twitter').fetch(1)[0]

      user = self.request.get('screen_name')
      count = int(self.request.get('count'))
      info = sms_client.post('/friendships/create', 'POST', (200,401,403), screen_name=user)  # TODO : this may fail, try three times 
      # Stop sending the follow status
      #status = "@%s has started using SMSTweet. Welcome %s to the group and tell about us to your friends" % (user, user)
      #info = sms_client.post('/statuses/update', 'POST', (200,401), status=status)  # TODO : this may fail, try three times 

      sms_client.token = OAuthAccessToken.all().filter(
                'specifier =', user).filter(
                'service =', 'twitter').fetch(1)[0]
      info = sms_client.post('/friendships/create', 'POST', (200,401,403), screen_name='smstweetin')  # TODO : this may fail, try three times 

    except (urlfetch.DownloadError, ValueError, Timeout), e:
      logging.warning("SmsTweetin:Friendship/create failed (%d) %s" % (count,e))
      if count > 10:
        logging.error("SmsTweetin:Friendship/create Finally giving up")
      else:
        # Try again
        taskqueue.add(url = '/tasks/follow_new_user', params = { 'screen_name' : user, 'count' : count + 1 })
コード例 #48
0
def update_or_create_document(yaml_obj):
    """
    Submit an object read from our YAML files and it will update it in the
    database, creating it if it doesn't already exist. 
    
    Returns the database object, and a boolean that is true if a new object 
    was created.
    """
    # Check if the table already exists in the datastore
    obj = Document.get_by_key_name(yaml_obj.get('slug'))
    # Update the obj if it exists
    if obj:
        # Loop through the keys and update the object one by one.
        for key in yaml_obj.keys():
            # With some special casing for projects...
            if key == 'project_slug':
                proj = Project.get_by_key_name(yaml_obj.get('project_slug'))
                obj.project = proj
            # ...and for tags.
            elif key == 'tags':
                obj.tags = get_tag_keys(yaml_obj.get("tags"))
            else:
                setattr(obj, key, yaml_obj.get(key))
        # Save it out
        obj.put()
        created = False
    # Create it if it doesn't
    else:
        # If it has tags....
        if yaml_obj.has_key('tags'):
            # Convert to database keys
            tags = get_tag_keys(yaml_obj.pop("tags"))
            # Load the data
            obj = Document(key_name=yaml_obj.get('slug'), **yaml_obj)
            # Set the tags
            obj.tags = tags
        # Otherwise....
        else:
            # Update the basic values
            obj = Document(key_name=yaml_obj.get('slug'), **yaml_obj)
            # And clear out the tag data
            obj.tags = []
            obj.similar_documents = []
        # Connected it to a project, if it exists
        if yaml_obj.has_key('project_slug'):
            proj = Project.get_by_key_name(yaml_obj.get('project_slug'))
            obj.project = proj
        # Save it out
        obj.put()
        created = True
    
    # Update the similarity lists of documents with the same tags
    taskqueue.add(
        url='/_/document/update-similar/',
        params=dict(key=obj.key()),
        method='GET'
    )
    
    # Pass it out
    return obj, created
コード例 #49
0
ファイル: tasks.py プロジェクト: mayanks/smstweet
  def post(self):
    status = self.request.get('status')
    phone = self.request.get('phone')
    count = int(self.request.get('count'))

    tuser = TwitterUser.get_by_phonenumber(phone)
    if tuser == None:
      logging.warning("Could not fetch tuser based on phone number %s",phone)
      return

    client = OAuthClient('twitter', self)
    try:
      info = client.post('/statuses/update', 'POST', (200,401,403), tuser, status=status)
      if 'error' in info:
        logging.warning("Submiting failed as credentials were incorrect (user:%s) %s", tuser.user, info['error'])
        tuser.lastError = "Twitter returned '%s' for your last update. You may be over limit or may have to register with SMSTweet again" % info['error']
        tuser.put()
      else:
        logging.debug("updated the status for user %s", tuser.user)
        Tweet.save_tweet(info)

    except (urlfetch.DownloadError, ValueError), e:
      logging.warning("Update:update (%d) could not be fetched. %s " % (count,e))
      if count > 10:
        logging.error("Tried updating the message 10 times. Finally giving up.")
      else:
        # Try again
        taskqueue.add(url = '/tasks/post_message', params = { 'phone' : phone, 'count' : count + 1, 'status' : status })
コード例 #50
0
	def get(self):
		url = self.request.get("url")
		item = models.Item.get_by_key_name(url)
		query = "select src, height, width from html where url='"+ url +"' and xpath='//div[@id=\"main\"]//img' and height > 200 and width > 200"
		result = helpers.do_yql(query)
		self.response.out.write(url)
		self.response.out.write("<br />")
		imgurl = False
		try:
			element = result['query']['results']['img'][0]
			if "www.dn.se" in element['src']:
				imgurl = element['src']
			else:
				imgurl = "http://www.dn.se%s" % (element['src'])
			self.response.out.write("<img src=\"%s\"/>" % imgurl)
			if imgurl:
				item.img_url = imgurl
			else:
				item.img_url = False
			item.put()
			self.response.out.write("<br />")
			self.response.out.write(imgurl)
		except:
			self.response.out.write(result)
		self.response.out.write("<br />")
		if item:
			self.response.out.write("is an item")
			taskqueue.add(url='/scrape/imagecache', params={"url":item.item_url, "imgurl":imgurl}, method='GET')
		else:
			self.response.out.write("scrape item")
		self.response.out.write("<br />")
コード例 #51
0
ファイル: main.py プロジェクト: vishalmanohar/tweetonim
    def post(self):
        cursor = int(self.request.get("cursor", default_value=0))
        tokens = OAuthAccessToken.all().fetch(10, offset = cursor)

        for token in tokens:
            if xmpp.get_presence(token.email):
                client = OAuthClient('twitter', self, token.email)
                
                user_cache = memcache.get("user_cache_" + token.email)
                
                if user_cache is not None and user_cache.since_id is not None:
                    #response = client.get('/statuses/home_timeline', count = 5, 
                    #                  since_id = last_sent_id)
                    logging.info('since_id:' +  str(user_cache.since_id))
                    response = self.fetchTweets(client, token.email, 
                                    '/statuses/home_timeline', count = 5, 
                                      since_id = user_cache.since_id)
                else:
                    response = self.fetchTweets(client, token.email, 
                                    '/statuses/home_timeline', count = 5)
                
                if len(response) > 0:
                    xmpp.send_message(token.email, 'Some recent tweets:' + getFormattedMessage(response))
        
        if len(tokens) == 10:
            taskqueue.add(url='/sendrecentworker', 
                          params={'cursor': cursor + 10})
コード例 #52
0
ファイル: tasks.py プロジェクト: LeWaGeorge/CMStatsServer
    def get(self):
        clsName = self.request.get('cls')
        cls = loadClass(clsName)

        total = (cls.all().count() / 100) + 1
        for x in xrange(total):
            taskqueue.add(url='/tasks/FlushCounterWorker', params={'cls': clsName})
コード例 #53
0
ファイル: webapp.py プロジェクト: imuhata8ri/kemonotag
  def get(self):
    size = 10
    obj = MyPixivRtagData(tag=u'ケモノ',pagenum = int(0), paretntag=u'ケモノ')
    obj.save()

    #Delete all tags in MyRtagData.
    query = MyPixivRtagData.all().order('time').fetch(1500)
    db.delete(query)

    #Find Relative tags of kemono x 5 pages
    kemono='ケモノ'
    kemonotag = []

    for i in range(1, 6):
      print i
      kemonosubtag = []
      tgnm = reltagparser(i,kemono)
      for j in range(0, len(tgnm)):
        tg = tgnm[j]
        obj = MyPixivRtagData(tag=tg,pagenum = int(i), parenttag=kemono)
        obj.save()
        kemonosubtag.append(tg)
      kemonotag.append(kemonosubtag)
    #Request relative tags in RtagDataVertex
    datas = MyPixivRtagData.all().fetch(100)
    for i in range(0, len(datas), size):
      params = {}
      for j in range(0, size):
        if i+ j >= len(datas):
          break
        params["tag"+str(j)] = datas[i+j].tag
        params["pagenum"+str(j)] = datas[i+j].pagenum
      taskqueue.add(url='/pixivrtagtask', params = params)
コード例 #54
0
ファイル: blog.py プロジェクト: giolekva/socialme
    def post(self, slug):
        body = self.get_argument("body")
        title = self.get_argument("title")
        blog = Entry.all().filter("slug =", slug).get()

        was_public = blog.was_public

        blog.body = body
        blog.title = title
        if self.get_argument("save", None):
            blog.is_public = False
        else:
            if not blog.was_public:
                blog.published = datetime.now()
            blog.is_public = True
            blog.was_public = True

        blog.put()

        cat = Categories.all().ancestor(blog).get()
        cat.is_public = blog.is_public
        cat.put()

        if was_public:
            taskqueue.add(url="/admin/ping_hub", method="GET")

        self.recalc_archive()
        self.recalc_tags()

        self.redirect("/%s" % blog.slug)
コード例 #55
0
ファイル: main.py プロジェクト: DFectuoso/hd-log
def sendNotifyIoNotifications(update):
    profiles = Profile.all().filter("notifyIoNotification =", True)
    for profile in profiles:
        taskqueue.add(
            url="/notifications/notifyio/post",
            params={"email": profile.user, "text": update.user_fullname() + ":" + update.body, "title": "New HD-Log"},
        )
コード例 #56
0
ファイル: tasks.py プロジェクト: saga/kindledump
def fetch_pages(request):
    user_email = request.POST.get('user_email', None)

    user = users.User(email=user_email)
    if user is None:
        logging.error('User not found: %s', user_email)
        raise TypeError('User not found')

    page_urls_p = request.POST.get('page_urls', None)
    if not page_urls_p:
        logging.error('Page urls not given')
        raise TypeError('Page urls not given')

    page_urls = pickle.loads(str(page_urls_p))

    logging.debug('fetching pages: %s;;%s', user_email, page_urls)
    pages = []
    for url in page_urls:
        html = urllib.urlopen(url).read(10240)
        doc = redability.Document(html)
        pages.append(doc.summary().encode('ascii','ignore'))

    rd = ReadyData(owner=user, data_type='page')
    rd.content = '<hr>'.join(p for p in pages)
    rd.merged = len(pages)
    rd.put()
    logging.debug('ReadyData for fetched pages created: %s', page_urls)

    # schedule task for fetched data send
    params = {'ready_data_key': rd.key()}
    taskqueue.add(url=reverse('fetcher-send'), params=params)
    logging.debug('task created')

    return True
コード例 #57
0
  def getAndPostNextTask(self, objCommonTaskMessage, taskStatusHistoryKey): 
  
    nextSeqNum = objCommonTaskMessage.currentSeqNum + 1 
    logging.info("nextSeqNum=" + str(nextSeqNum) + " processCode=" + objCommonTaskMessage.processCode + "\n\n") 
    query = Tasks.gql("WHERE sequence > :1 and processCode = :2", nextSeqNum, objCommonTaskMessage.processCode)
    LIMIT = 1  #just get the one next record with the next higher sequence number 
    taskList = query.fetch(LIMIT,offset=0)
    logging.info("After DB Query: len(taskList)=" + str(len(taskList)) + "\n\n") 
    if len(taskList) < 1:  
       #then we are done, cuz no more tasks for this process code 
       self.UpdateDatabaseForEndProcess(objCommonTaskMessage, taskStatusHistoryKey)
       return 
    #if taskList[0].processCode != objCommonTaskMessage.processCode:
       #then we are done because we found
       # BUT query was changed to look for same process code above so we don't need this "IF" 
       #UpdateDatabaseForEndProcess(objCommonTaskMessage)
    
    logging.info("nextTaskCode=" + taskList[0].taskCode + " seq=" + str(taskList[0].sequence) ) 
    #set the next task code and seqNum, and write back to task/queue       
    objCommonTaskMessage.taskCode       = taskList[0].taskCode 
    objCommonTaskMessage.currentSeqNum  = taskList[0].sequence 
    objCommonTaskMessage.isManual       = taskList[0].isManual 
    strPickledObject = jsonpickle.encode(objCommonTaskMessage) 
    logging.info("strPickledObject to taskQueue = " + strPickledObject) 

    taskqueue.add(url='/commonTaskHandler', 
               method='Post',
               payload = str(strPickledObject)
               )