Exemple #1
0
    def delete(self, request):
        self.check_authenciation()

        resource = ndb.Key(urlsafe=request.id).get()
        if resource:

            try:
                resource.key.delete()

                if not settings.DEBUG:
                    taskqueue.add(url='/tasks/delete_resources',
                                  params={
                                      'object_name': resource.object_name,
                                      'bucket_name': resource.bucket
                                  },
                                  queue_name='resource-delete')

                else:
                    logging.info(
                        '/_ah/spi is not a dispatchable path, task queue:delete_resources won\'t be executed at development env. '
                    )

            except taskqueue.Error, error:
                logging.error('An error occurred in endpoints APIs: %s' %
                              error)
Exemple #2
0
 def GET(self):
     
     result = {}
     service_parameter = web.input(service = None)['service']
     question_id_parameter = web.input(question_id = None)['question_id']
     sort_parameter = web.input(sort = 'week')['sort']
     pagesize_parameter = web.input(pagesize = 30)['pagesize']
     try:
         if service_parameter:
             se_downloader = StackExchangeDownloader(service_parameter)
             if question_id_parameter:
                 se_downloader.get_post(question_id_parameter, False)
             else:
                 questions = se_downloader.get_questions_by_hotness(pagesize = pagesize_parameter, 
                                                                    sort = sort_parameter)
                 for question in questions:
                     question_id = int(question['question_id'])
                     score = int(question['score'])
                     if score > VOTES_ENTRY_LEVEL:
                         taskqueue.add(url='/admin/topquestionsretriever?service=%s&question_id=%s&sort=%s' % \
                                       (service_parameter, question_id, sort_parameter) , 
                                       method = 'GET', 
                                       queue_name = 'retriever')
         else:
             supported_services = StackAuthDownloader.get_supported_services()
             for service in supported_services.keys:
                 if not service.startswith('meta.'):
                     taskqueue.add(url='/admin/topquestionsretriever?service=%s&sort=%s' % \
                                   (service, sort_parameter), 
                                   method = 'GET', 
                                   queue_name = 'retriever')
     except Exception:
         logging.exception("Exception handling TopQuestionRetriever")
     result['result'] = True
     return render.admin(result)
Exemple #3
0
 def GET(self):
     #Cache ajax request
     taskqueue.add(url='/admin?action=cacherefresh',
                  method = 'GET',
                  queue_name = 'populate',
                  countdown = 5)
     pass
Exemple #4
0
 def request(self, request):
     user = users.get_current_user()
     email = user.email()
     fromDate = parseMsgTime(request.fromDate)
     toDate = parseMsgTime(request.toDate)
     taskqueue.add(url='/reportWorker', method='GET', params={'email': email, 'fromDate':fromDate, 'toDate':toDate, 'type': 'dateSpan'})
     return RequestReportResponseMessage(message = 'a Report from UTC:%s to UTC:%s. Report Email is %s' % (fromDate, toDate, email))
Exemple #5
0
    def get(self, request):
        """
        タイトルから
        ---
        parameters:
                -   name: title
                    type: string
                    paramType: query
                -   name: isbn
                    type: string
                    paramType: query
                -   name: author
                    type: string
                    paramType: query
        """
        title = request.GET.get("title", None)
        isbn = request.GET.get("isbn", None)
        creator = request.GET.get("author", None)

        request = SRU()
        request.cnt = 20
        books = request.search(title, creator, isbn)
        books = self.remove(books)
        results = {"books": [self.serialize(book) for book in books]}
        for book in books:
            taskqueue.add(url="/tasks/save/amazon",
                          params={
                              u"title": book.title,
                              u"original_title": book.original_title,
                              u"isbn": book.isbn
                          })
        return Response(results)
    def add_taste_movie(self, movie, taste=1.0):
        """
        Add user's taste of a movie.
        :param movie: movie to be voted
        :type movie: Movie
        :param taste: taste to associate to the movie
        :type taste: float
        :return: None
        """
        # TODO: improve, I don't know if it is the best way to see if there is yet TasteArtist
        taste_movie = TasteMovie(id=(movie.key.id() + self.key.id()),  # Create the user's taste with unique id
                                 taste=taste)

        if taste == 1.0:
            taste_movie.added = True

        taste_movie.add_movie(movie)
        taste_movie_key = taste_movie.put()

        if taste_movie_key not in self.tastes_movies:
            self.tastes_movies.append(taste_movie_key)  # Append the taste to user's tastes
            self.put()

        taskqueue.add(url='/_ah/start/task/movie_tastes/' + self.key.id() +
                          '/' + movie.key.id() + '/' + str(taste), method='GET')
Exemple #7
0
 def GET(self):
     
     result = {}
     service_parameter = web.input(service = None)['service']
     question_id_parameter = web.input(question_id = None)['question_id']
     sort_parameter = web.input(sort = 'week')['sort']
     pagesize_parameter = web.input(pagesize = 30)['pagesize']
     try:
         if service_parameter:
             se_downloader = StackExchangeDownloader(service_parameter)
             if question_id_parameter:
                 se_downloader.get_post(question_id_parameter, False)
             else:
                 questions = se_downloader.get_questions_by_hotness(pagesize = pagesize_parameter, 
                                                                    sort = sort_parameter)
                 for question in questions:
                     question_id = int(question['question_id'])
                     score = int(question['score'])
                     if score > VOTES_ENTRY_LEVEL:
                         taskqueue.add(url='/admin/topquestionsretriever?service=%s&question_id=%s&sort=%s' % \
                                       (service_parameter, question_id, sort_parameter) , 
                                       method = 'GET', 
                                       queue_name = 'retriever')
         else:
             supported_services = StackAuthDownloader.get_supported_services()
             for service in supported_services.keys:
                 if not service.startswith('meta.'):
                     taskqueue.add(url='/admin/topquestionsretriever?service=%s&sort=%s' % \
                                   (service, sort_parameter), 
                                   method = 'GET', 
                                   queue_name = 'retriever')
     except Exception:
         logging.exception("Exception handling TopQuestionRetriever")
     result['result'] = True
     return render.admin(result)
Exemple #8
0
  def get(self, token_string):
    logging.info("Received a self vote")
    num_votes = self.request.get("votes")

    params = {'token':token_string, 'votes':num_votes}
    taskqueue.add(url='/worker/self_vote', params=params, queue_name='email', countdown=0)
    self.response.out.write("Success!")
Exemple #9
0
def run_trigger(trigger):
	params = {
		"trigger_key": trigger.key.urlsafe()
	}

	url = '/queue/trigger/run'
	taskqueue.add(url=url, params=params, queue_name="trigger-run")
Exemple #10
0
def close(order):
    company = CompanyNew.get_by_iiko_id(order.venue_id)
    if company.auto_token and order.source == AUTO_APP_SOURCE:
        close_order(order, company.auto_token)
    if order.payment_type == PaymentType.CARD:
        delivery_terminal = DeliveryTerminal.get_by_id(order.delivery_terminal_id) \
            if order.delivery_terminal_id else None
        pay_result = pay_by_card(company, delivery_terminal,
                                 order.alfa_order_id, 0)
        logging.info("pay result: %s" % str(pay_result))
        success = 'errorCode' not in pay_result.keys() or str(
            pay_result['errorCode']) == '0'
        if not success:
            logging.warning("pay failed")
            return
    if company.is_iiko_system:
        bonus = SharedBonus.query(
            SharedBonus.recipient == order.customer,
            SharedBonus.status == SharedBonus.READY).get()
        if bonus:
            taskqueue.add(url='/single_task/bonus/activate',
                          params={'order_id': order.order_id})
    order_user_agent = order.customer.get().user_agent
    if company.review_enable and supports_review(company.iiko_org_id,
                                                 order_user_agent):
        taskqueue.add(url='/single_task/push/review',
                      params={'order_id': order.order_id},
                      countdown=60 * 30)

    send_order_status_push(order)
Exemple #11
0
 def generateDallyReport(self): #todo remove dailly report as functionality , spanDate should be used
     days = int( self.request.get('days') )
     user = users.get_current_user()
     email = user.email()
             # Add the task to the default queue.
     taskqueue.add(url='/reportWorker', method='GET', params={'email': email, 'days':days, 'type:': 'days'})
     self.response.write('email is going to be sent at %s' % email)
Exemple #12
0
    def addReportJob(self, settings, days):
        logging.info('make plan for settings %s' % settings)
        utcDate =  datetime.datetime.utcnow() #fixme might be incorrect
        utcDateTrimmed = datetime.datetime.combine(utcDate, datetime.time(0,0))

        eta = utcDateTrimmed + datetime.timedelta(days = 1) - datetime.timedelta(hours = settings.timeZoneOffset)
        taskqueue.add(url='/reportWorker', method='GET', params={'email': settings.email, 'days':days - 1}, eta = eta)
        pass
    def queue_task(self):
        name = self.request.get('name')
        url = None
        if name == 'load_churches':
            url = '/_tasks/load_churches'
        elif name == 'index_churches':
            url = '/_tasks/index_churches'

        taskqueue.add(queue_name='default-tasks', url=url)
        logging.info('Added %s task to queue' % name)
        self.success_response()
Exemple #14
0
def send_review_push(order):
    namespace_manager.set_namespace(order.key.namespace())
    module = config.REVIEW_MODULE
    if module and module.status:
        review = ReviewPush(order=order.key)
        review.put()
        start = datetime.utcnow() + timedelta(seconds=module.wait_seconds)
        taskqueue.add(url='/task/pushes/review',
                      method='POST',
                      eta=start,
                      params={'review_id': review.key.id()})
Exemple #15
0
def incr(name, delta=1, update_interval=10):
    """Increments a counter.  The increment is generally a memcache-only
    operation, though a task will also be enqueued about once per
    update_interval.  May under-count if memcache contents is lost.

    Args:
      name: The name of the counter.
      delta: Amount to increment counter by, defaulting to 1.
      update_interval: Approximate interval, in seconds, between updates.  Must
                       be greater than zero.
    """
    lock_key = "ctr_lck:" + name
    delta_key = "ctr_val:" + name

    # update memcache
    if delta >= 0:
        v = memcache.incr(delta_key, delta, initial_value=BASE_VALUE)
    elif delta < 0:
        v = memcache.decr(delta_key, -delta, initial_value=BASE_VALUE)

    if memcache.add(lock_key, None, time=update_interval):
        # time to enqueue a new task to persist the counter
        # note: cast to int on next line is due to GAE issue 2012
        # (http://code.google.com/p/googleappengine/issues/detail?id=2012)
        v = int(v)
        delta_to_persist = v - BASE_VALUE
        if delta_to_persist == 0:
            return  # nothing to save

        try:
            qn = random.randint(0, 4)
            qname = 'PersistCounter%d' % qn
            taskqueue.add(url='/task/counter_persist_incr',
                          queue_name=qname,
                          params=dict(name=name,
                                      delta=delta_to_persist))
        except:
            # task queue failed but we already put the delta in memcache;
            # just try to enqueue the task again next interval
            return

        # we added the task --> need to decr memcache so we don't double-count
        failed = False
        if delta_to_persist > 0:
            if memcache.decr(delta_key, delta=delta_to_persist) is None:
                failed = True
        elif delta_to_persist < 0:
            if memcache.incr(delta_key, delta=-delta_to_persist) is None:
                failed = True
        if failed:
            logging.warn("counter %s reset failed (will double-count): %d",
                         name, delta_to_persist)
Exemple #16
0
    def get(self):
        if self.request.get('admin') == credentials.ADMIN_KEY:
            params = {'admin': credentials.ADMIN_KEY}
            if self.request.get('id'):
                params['id'] = self.request.get('id')

            if self.request.get('rank_id'):
                params['rank_id'] = self.request.get('rank_id')

            taskqueue.add(url='/populate', params=params)
            self.response.write('Added task!')
        else:
            self.response.write('Wrong key!')
Exemple #17
0
def announce_new_gig(the_gig, the_gig_url, is_edit=False, is_reminder=False, change_string="", the_members=[]):

    the_params = pickle.dumps({'the_gig_key':   the_gig.key,
                               'the_gig_url':   the_gig_url,
                               'is_edit':       is_edit,
                               'is_reminder':   is_reminder,
                               'change_string': change_string,
                               'the_members':   the_members})

    taskqueue.add(
            url='/announce_new_gig_handler',
            params={'the_params': the_params
            })
 def post(self,*args,**kargs):
     logging.info("it is here "+self.request.__str__())
     projmemid = self.request.get("id")
     user_info = self.auth.get_user_by_session()
     key= ndb.Key('ProjectMembers',int(projmemid))
     projmem = key.get()
     
     projmem.modified_by = user_info['email_address']
     projmem.modified_date = datetime.now()
     projmem.status = False
     projmem.put()
     
     sprints = sprint.Sprint().get_by_project(projmem.projectid)
     logging.info
     if not sprints:
         logging.info("No sprints")
     else:
         task = taskqueue.add(
         queue_name = "my-push-queue",                 
         url='/deleteusereffortspersist',
         params={'projectid': projmem.projectid.id(),'userid':projmem.userid.id()})
     
     
  #   projmem.status = 'False'
  #   projmem.set()
     #projmem.delete_entity(projmemid)
     self.response.write("success")
Exemple #19
0
def make_rss_feed_for_band(the_band):
    the_params = pickle.dumps({'the_band_key': the_band.key})

    task = taskqueue.add(
            url='/make_rss_feed_handler',
            params={'the_params': the_params
            })
Exemple #20
0
 def request(self, request):
     user = users.get_current_user()
     email = user.email()
     fromDate = parseMsgTime(request.fromDate)
     toDate = parseMsgTime(request.toDate)
     taskqueue.add(url='/reportWorker',
                   method='GET',
                   params={
                       'email': email,
                       'fromDate': fromDate,
                       'toDate': toDate,
                       'type': 'dateSpan'
                   })
     return RequestReportResponseMessage(
         message='a Report from UTC:%s to UTC:%s. Report Email is %s' %
         (fromDate, toDate, email))
Exemple #21
0
def rename_section(section, section_product_cls, section_name):
    section_products = section_product_cls.query(section_product_cls.section_key == section.key)
    for section_product in section_products:
        product_mem_key = uuid()
        if not section_product.product_key:
            continue
        memcache.add(
            product_mem_key,
            {'%s' % section_name: section.name},
            7200
        )
        taskqueue.add(
            url=url_for(
                'product.task.update_product',
                key_id=section_product.product_key.id(),
                mem_key=product_mem_key
            ))
Exemple #22
0
 def post(self):
     email = self.request.POST.get('email')
     if not email or "@" not in email or "." not in email:
         self.request.add_message(
             'Please enter a valid email address', 'error')
         return self.get()
     existing_account = UserProfile.get_key('password', email).get()
     if not existing_account:
         self.request.add_message(
             'The email address you provided was not found. '
             'Please try again.', 'error')
         return self.get()
     user = User.get_by_auth_id(existing_account.key.id())
     taskqueue.add(url='/account/tasks/password-recovery-email', params={
         'recipient_id': user.key.id(),
         })
     return self.sent(email)
Exemple #23
0
def announce_new_gig(the_gig,
                     the_gig_url,
                     is_edit=False,
                     is_reminder=False,
                     change_string="",
                     the_members=[]):

    the_params = pickle.dumps({
        'the_gig_key': the_gig.key,
        'the_gig_url': the_gig_url,
        'is_edit': is_edit,
        'is_reminder': is_reminder,
        'change_string': change_string,
        'the_members': the_members
    })

    taskqueue.add(url='/announce_new_gig_handler',
                  params={'the_params': the_params})
Exemple #24
0
 def rename_category(self):
   #change the name of a cat
   old_cat = self.request.params["old_cat"]
   new_cat = self.request.params["new_cat"]
   if Category.get_by_id(new_cat) == None:
     new_cat_instance = Category(id=new_cat)
     new_cat_instance.title = new_cat
     new_cat_instance.put()
   taskqueue.add(url='/task/batch_update_items',
                   params={
                     'migration':'rename_category',
                     'p1': old_cat,
                     'p2': new_cat})
   taskqueue.add(url='/task/batch_update_votes',
                   params={
                     'migration':'rename_category',
                     'p1': old_cat,
                     'p2': new_cat})
   self.response.out.write("Batch job started")
Exemple #25
0
def new_game(request):
    user = User.query(User.name == request.user_name).get()

    total_words = Word.query().count()
    rand_num = random.randint(1, total_words)
    word = Word.query(Word.word_id == rand_num).get()
    if not user:
        raise endpoints.NotFoundException(
            'A User with that name does not exist!')
    try:
        game = Game.new_game(user.key, word.key)
    except AttributeError:
        raise endpoints.BadRequestException('Random word not found')

    # Use a task queue to update the average attempts remaining.
    # This operation is not needed to complete the creation of a new game
    # so it is performed out of sequence.
    taskqueue.add(url='/tasks/cache_average_attempts')
    return game
Exemple #26
0
def make_move(request):
    game = get_by_urlsafe(request.urlsafe_game_key, Game)
    game.number_of_guess += 1
    word = Word.query(Word.key == game.word).get()
    split_word = list(word.word)
    guess = request.guess
    game_history = History.query(History.game == game.key)
    letters_guessed_so_far = []

    for history in game_history.iter():
        letters_guessed_so_far.append(history.guess)

    if game.game_over is True:
        msg = 'Game already over!'
    elif guess == '' or guess.isdigit() or len(guess) > 1:
        msg = "Please enter a single alpha character only"
    elif guess in letters_guessed_so_far:
        msg = "You have already used that letter"
    elif guess not in split_word:
        msg = "letter isn't in word"
        game.guesses_remaining -= 1
        save_history(game.key, game.number_of_guess, guess, False)
    else:
        msg = "Letter is in word!"
        save_history(game.key, game.number_of_guess, guess, True)

    # Added in a sleep because of latency writing to the datastore.
    # http://stackoverflow.com/questions/9137214/writing-then-reading-entity-does-not-fetch-entity-from-datastore
    time.sleep(0.1)

    count_of_success = History.query(History.status == True).filter(History.game == game.key).count()

    if len(word.word) == count_of_success:
        msg = "You've won! The word was {}".format(word.word)
        game.end_game(True)
    elif game.guesses_remaining == 0:
        msg = 'You have run out of guesses! The word was {}'.format(word.word)
        game.end_game()
    taskqueue.add(url='/tasks/cache_average_attempts')
    game.put()

    return game, msg
    def post(self,*args,**kargs):
        #role=model.user.Groups()
        tenant_domain = self.request.get('company_domain')
        tenant_name = self.request.get('company_name')
        tenant = model.user.Tenant()
        tenant.name = tenant_name
        tenant.domain = tenant_domain
        tenant.created_by = self.request.get('email')
        duplicate_tenant = tenant.query(model.user.Tenant.domain==tenant_domain).fetch() 
        if duplicate_tenant:
            self.response.write('Domain already exists with the same name.')
            return
        else:
            tenant_key_added = tenant.put()
        role=ndb.Key(urlsafe=self.request.get('role'))
        user_name = self.request.get('email')
        email = self.request.get('email')
        name = self.request.get('company_name')
        last_name = self.request.get('company_name')
        designation = ""
        empid=""
        contact=""
        tenant_key = tenant_key_added
        password = name+empid
        #unique_properties = ['email_address']
        user_data = self.user_model.create_user(user_name,
            email_address=email, name=name, password_raw=password,designation=designation,empid=empid,contact=contact,
            last_name=last_name,role=role,tenant_domain=tenant_domain,status = True,tenant_key=tenant_key,project_permission=True,verified=False)
        if not user_data[0]: #user_data is a tuple
            self.response.write('User already exists with the same email.')
            return

        user = user_data[1]
        user_id = user.get_id()
        token = self.user_model.create_signup_token(user_id)
        verification_url = self.uri_for('verification', type='v', user_id=user_id,signup_token=token, _full=True)
        
        logging.info("before email");
        message= """Hi """+name+""",
        Thank you for registering on APM. Please follow the below url to activate your account.
        Remember to change your password.
        You will be able to do so by visiting 
        {url}"""
        
        task = taskqueue.add(
            queue_name = "my-push-queue",                 
            url='/email',
            params={'To_email':email,'verification_url':verification_url,'message':message})
        logging.info("after email")
        
        
        
        logging.info(verification_url)
        self.response.write('true')
Exemple #28
0
 def GET(self):
     result = {}
     submitted = True
     action = web.input(action = None)['action']
     if action == 'renametags_init':
           tags_to_rename = web.input(tags_to_rename = None)['tags_to_rename']
           tag_destination = web.input(tag_destination = None)['tag_destination']
           
           taskqueue.add(url='/admin/tags',
                         method = 'POST', 
                         queue_name = 'populate',
                         countdown = 5,
                         params = {
                           'action' : 'renametags',
                           'tags_to_rename': tags_to_rename,
                           'tag_destination' : tag_destination,
                         })
           result[action] = "Done"
     if action == 'renametags':
           tags_to_rename = web.input(tags_to_rename = None)['tags_to_rename']
           tag_destination = web.input(tag_destination = None)['tag_destination']
           for tag_to_rename in tags_to_rename.split():
               if tag_to_rename:
                   entities = models.Post.all().filter('tags',tag_to_rename).fetch(1000)
                   for entity in entities:
                       entity_tags_old = entity.tags 
                       entity_tags_new = [tag.strip() for tag in entity_tags_old if tag and tag != tag_to_rename ]
                       entity_tags_new.append(tag_destination.strip())
                       entity.tags = list(set(entity_tags_new))
                       entity.put()
                       models.Tag.update_tags(list(set(entity_tags_new)), list(set(entity_tags_old)))
           result[action] = "Done"
     if action == 'deletetags':
           keys = models.Tag.all(keys_only = True).filter('counter =', 0).fetch(1000)
           db.delete(keys)
           result[action] = "Done"
     return render_template(render.admin.admin_tags(
                                                submitted, 
                                                result, 
                                                action)
                           )
Exemple #29
0
    def post(self):
        the_params = pickle.loads(self.request.get('the_params'))

        the_gig_key = the_params['the_gig_key']
        the_gig_url = the_params['the_gig_url']
        is_edit = the_params['is_edit']
        is_reminder = the_params['is_reminder']
        change_string = the_params['change_string']
        the_members = the_params['the_members']

        the_gig = the_gig_key.get()
        the_band_key = the_gig_key.parent()
        the_assocs = assoc.get_confirmed_assocs_of_band_key(
            the_band_key, include_occasional=the_gig.invite_occasionals)

        if is_reminder and the_members:
            recipient_assocs = []
            for a in the_assocs:
                if a.member in the_members:
                    recipient_assocs.append(a)
        else:
            recipient_assocs = the_assocs

        logging.info('announcing gig {0} to {1} people'.format(
            the_gig_key, len(recipient_assocs)))

        the_shared_params = pickle.dumps({
            'the_gig_key': the_gig_key,
            'the_band_key': the_band_key,
            'the_gig_url': the_gig_url,
            'is_edit': is_edit,
            'is_reminder': is_reminder,
            'change_string': change_string
        })

        for an_assoc in recipient_assocs:
            if an_assoc.email_me:
                the_member_key = an_assoc.member

                the_member_params = pickle.dumps(
                    {'the_member_key': the_member_key})

                task = taskqueue.add(queue_name='emailqueue',
                                     url='/send_new_gig_handler',
                                     params={
                                         'the_shared_params':
                                         the_shared_params,
                                         'the_member_params': the_member_params
                                     })

        logging.info('announced gig {0}'.format(the_gig_key))

        self.response.write(200)
Exemple #30
0
    def post(self):
        the_params = pickle.loads(self.request.get('the_params'))

        the_gig_key  = the_params['the_gig_key']
        the_gig_url = the_params['the_gig_url']
        is_edit = the_params['is_edit']
        is_reminder = the_params['is_reminder']
        change_string = the_params['change_string']
        the_members = the_params['the_members']

        the_gig = the_gig_key.get()
        the_band_key = the_gig_key.parent()
        the_assocs = assoc.get_confirmed_assocs_of_band_key(the_band_key, include_occasional=the_gig.invite_occasionals)

        if is_reminder and the_members:
            recipient_assocs=[]
            for a in the_assocs:
                if a.member in the_members:
                    recipient_assocs.append(a)
        else:
            recipient_assocs = the_assocs

        logging.info('announcing gig {0} to {1} people'.format(the_gig_key,len(recipient_assocs)))

        the_shared_params = pickle.dumps({
            'the_gig_key': the_gig_key,
            'the_band_key': the_band_key,
            'the_gig_url': the_gig_url,
            'is_edit': is_edit,
            'is_reminder': is_reminder,
            'change_string': change_string
        })

        for an_assoc in recipient_assocs:
            if an_assoc.email_me:
                the_member_key = an_assoc.member

                the_member_params = pickle.dumps({
                    'the_member_key': the_member_key
                })

                task = taskqueue.add(
                    queue_name='emailqueue',
                    url='/send_new_gig_handler',
                    params={'the_shared_params': the_shared_params,
                            'the_member_params': the_member_params
                    })
        
        logging.info('announced gig {0}'.format(the_gig_key))

        stats.update_band_email_stats(the_band_key, len(recipient_assocs))

        self.response.write( 200 )
Exemple #31
0
def main(request):

    reLink = re.compile("((https?|ftp|file):\/\/[\-A-Z0-9+&@#\/%?=~_|!:,.;]*[\-A-Z0-9+&@#\/%=~_|])",re.IGNORECASE)
    parseUrlList = Dao.DataSource_GetListAll()


    errorOutput = ''
#        logging.info('Starting to fetch items from Twitter: %s items' % parseUrlList.count())
    hrefList = []
    for parseUrl in parseUrlList:
        logging.info('fetching twitter feed: %s' % parseUrl.SourceUrl)
        try:
            page = urlfetch.fetch(parseUrl.SourceUrl, headers = {'Cache-Control' : 'max-age=0'})
            logging.info(page)
            if page.status_code == 200:
                statusList = []
                try:
                    xmlDoc = minidom.parseString(page.content)
                    statusList = xmlDoc.getElementsByTagName('text')
                except ExpatError, e:
                    errorOutput += 'invalid Xml document.<BR>' + page.content
                for statusText in statusList:
                    reResult = reLink.search(statusText.childNodes[0].nodeValue)
                    r = reLink.findall(statusText.childNodes[0].nodeValue)
                    for linkText in r:
                        linkUrl = urlparse.urlparse(linkText[0])
                        logging.info('queueing url: %s' % linkUrl.geturl())
                        try:
#                                taskqueue.add( url='/fetchdata', params={'dataUrl':linkUrl.geturl(), 'dataSource': parseUrl.SourceName })
                            params={'dataUrl':linkUrl.geturl(), 'dataSource': parseUrl.SourceName }
                            #url = '/fetchdata/?dataUrl='+linkUrl.geturl()+'&dataSource='+parseUrl.SourceName
                            url = '/fetchdata/'
                            taskqueue.add( name=generateTaskName(linkUrl, parseUrl.SourceName), url=url, params=params, method='GET')
                            logging.info('get url: %s' % url)
                            logging.info('post data: %s' % params)
                            hrefList.append( linkUrl.geturl())
                        except taskqueue.TaskAlreadyExistsError, e:
                            errorOutput += '<li>%(sourcename)s - Duplicate task: %(taskname)s</li>' % {'sourcename': parseUrl.SourceName, 'taskname': generateTaskName(linkUrl, parseUrl.SourceName) }
                        except taskqueue.TombstonedTaskError, e:
                            errorOutput += '<li>%(sourcename)s - Tombstoned task: %(taskname)s</li>' % {'sourcename': parseUrl.SourceName, 'taskname': generateTaskName(linkUrl, parseUrl.SourceName) }
 def post(self,*args,**kargs):
     currentUser=self.auth.get_user_by_session()
     logging.info("it is here "+self.request.__str__())
     userid = self.request.get("userid")
     projid = self.request.get("projid")
     roleid   = self.request.get("role")
     logging.info("it is here and the userId is"+userid)
     logging.info("it is here and the projid is"+projid)
     logging.info("it is here and the role is"+roleid)
     
     companyId= self.user_model.get_by_id(currentUser['user_id']).tenant_key
     
     projemem = project.ProjectMembers()
     userkey = ndb.Key('OurUser',int(userid))
     model = userkey.get()
     projemem.userName =  model.name
     projemem.projectid = ndb.Key('Project',int(projid))
     projemem.companyid = companyId
     projemem.userid =    userkey
     projemem.roleid = ndb.Key('Groups',int(roleid))
     projemem.userRole = ndb.Key('Groups',int(roleid)).get().role
     
     user_info = self.auth.get_user_by_session()
     projemem.created_by = user_info['email_address']
     projemem.status = True
     
     projekey = projemem.set()
     projmodel = projekey.get()
     
     sprints = sprint.Sprint().get_by_project(projmodel.projectid)
     logging.info
     if not sprints:
         logging.info("No sprints")
     else:
         task = taskqueue.add(
         queue_name = "my-push-queue",                 
         url='/newusereffortspersist',
         params={'projectid': projmodel.projectid.id(),'userid':projmodel.userid.id()})
     
     
     data = {}
     data['id'] = projmodel.key.id()
     data['projectid'] = projmodel.projectid.id()
     data['companyid'] = projmodel.companyid.id()
     data['userName'] = (projmodel.userid).get().name
     data['userid'] = projmodel.userid.id()
     data['userRole'] = projmodel.userRole
     
     self.response.write(json.dumps(data, ensure_ascii=False))
Exemple #33
0
def incr(name, delta=1):
    """Increments a counter.  The increment is generally a memcache-only
    operation, though a task will also be enqueued about once per
    update_interval.  May under-count if memcache contents is lost.

    Args:
      name: The name of the counter.
      delta: Amount to increment counter by, defaulting to 1.
      update_interval: Approximate interval, in seconds, between updates.  Must
                       be greater than zero.
    """
    lock_key = "ctr_lck:" + name
    delta_key = "ctr_val:" + name
    
    # update memcache
    if delta >= 0:
        v = memcache.incr(delta_key, delta, initial_value = 0)
    elif delta < 0:
        v = memcache.decr(delta_key, -delta, initial_value = 0)
        
    if memcache.add(lock_key, None, time=UPDATE_INTERVAL):
        # time to enqueue a new task to persist the counter
        # note: cast to int on next line is due to GAE issue 2012
        # (http://code.google.com/p/googleappengine/issues/detail?id=2012)
        v = int(v)
        try:
            qn = random.randint(0, 4)
            qname = 'PersistCounter%d' % qn
            taskqueue.add(url='/task/counter_persist_incr',
                          queue_name=qname,
                          params=dict(name=name,
                                      value=v))
        except:
            # task queue failed but we already put the delta in memcache;
            # just try to enqueue the task again next interval
            return
    def remove_taste_movie(self, movie):
        """

        :param movie:
        :return:
        """
        taste_movie_id = movie.key.id() + self.key.id()
        taste_movie = TasteMovie.get_by_id(taste_movie_id)

        if taste_movie is not None:
            taste_movie_key = taste_movie.key



            taste_movie.key.delete()
            if taste_movie_key in self.tastes_movies:
                self.tastes_movies.remove(taste_movie_key)
                self.put()

        taskqueue.add(url='/_ah/start/task/movie_untaste/' + self.key.id() +
                          '/' + movie.key.id(), method='GET')

        # Recalculate proposal
        self.remove_proposal()
def put_dataset(self, datasetskey, resource, **kwargs):
    model = ndb.Key(urlsafe=datasetskey).get()
    if not model:
        errors.create(404)

    args = helper.parse_args_for_model(model, [{'name': 'secret', 'required': True}], validator=False)
    if model.secret != args.get('secret'):
        errors.create(401, payload={'message': 'Invalid secret key. You do not have access to modify this dataset.'})

    model.populate(**args)
    model.status = {'code': 'parsing'}
    model.put()

    task = taskqueue.add( # first process headers
        queue_name='processor-queue',
        url='/tasks/process_data',
        payload=json.dumps(dict(
            key=model.get_key_urlsafe())
        ))
    return model.to_dict()
def add_dataset(self, resource, **kwargs):
    _Model = resource.get_model()
    args = helper.parse_args_for_model(_Model, [{'name': 'files', 'required': True, 'action': 'append'}])
    args['secret'] = util.uuid()
    args['headers'] = []
    files = args.pop('files')
    model = _Model(**args)
    model.status = dict(code='started')
    model.put()

    # start the processing
    response = model.to_dict()
    response['secret'] = model.secret # force show the secret on creation

    task = taskqueue.add( # first process headers
        queue_name='processor-queue',
        url='/tasks/process_headers',
        payload= json.dumps(dict(
            key=model.get_key_urlsafe(),
            files=files)
        ))
    return response
Exemple #37
0
 def wipe_votes_json(self):
   #reset all votes json
   taskqueue.add(url='/task/batch_update_votes',
                   params={'migration':'reset-votes-json'})
   self.response.out.write("Batch job started<br/>")
Exemple #38
0
 def _common(self):
   (req,rsp,rheaders,rcookies) = common_init(self)
   pre_str = u'restoreTimer: '
   
   rsp.set_status(200)
   loginfo(pre_str+u'start')
   
   max_num = MAX_RESTORE_NUM_PER_CYCLE
   last_id = req.get('last_id')
   flg_first = False
   if not last_id:
     set_maintenance_mode(True)
     flg_first = True
     timer_initialize()
     db_gc_list = dbGaeCronUser.all().order('date').fetch(max_num)
     cnt = 0
     tcnt = 0
   else:
     last_time = isofmt_to_datetime(req.get('last_time'))
     log(' last_id=%s' % (last_id))
     log(' last_time=%s' % (req.get('last_time')))
     db_gc_list = dbGaeCronUser.all().filter('date >=', last_time).order('date').fetch(max_num)
     cnt = int(req.get('cnt'),0)
     tcnt = int(req.get('tcnt'),0)
   
   gae_timer = GAE_Timer()
   set_timer = gae_timer.set_timer
   rel_timer = gae_timer.rel_timer
   
   for db_gc in db_gc_list:
     db_gc_id = str(db_gc.key().id())
     if db_gc_id == last_id:
       continue
     
     cnt += 1
     email = db_gc.email
     loginfo(u'%d: %s  %s' % (cnt,email,db_gc.date))
     
     cron_info_dict = json.loads(db_gc.croninfo)
     
     for (no,cron_info) in cron_info_dict.items():
       tid = None
       if cron_info['valid']:
         timerid=u'%s-%s-%s' % (NAMESPACE,db_gc_id,str(no))
         if cron_info['kind'] == 'cycle':
           tid = set_timer(minutes=cron_info['cycle_info']['cycle'],url=cron_info['url'],user_id=email,user_info=no,timerid=timerid,sem=False,save_after=True)
         else:
           _c = cron_info['cron_info']
           _crontime = ' '.join([_c['min'],_c['hour'],_c['day'],_c['month'],_c['wday']])
           tid = set_timer(crontime=_crontime,url=cron_info['url'],user_id=email,user_info=no,tz_hours=_c['tz_hours'],timerid=timerid,sem=False,save_after=True)
         
         if tid:
           tcnt += 1
           loginfo(u'  timer(No.%d) update (timerid=%s)' % (1+int(no),tid))
         else:
           cron_info['valid'] = 0
           logerr(u'  timer(No.%d) set error' % (1+int(no)))
       
       cron_info['tid'] = tid
     
     db_gc.croninfo = db.Text(json.dumps(cron_info_dict))
     dbPut(db_gc)
     
     last_id = db_gc_id
     last_time = db_gc.date
   
   if max_num<=len(db_gc_list):
     str_rsp = pre_str+u'continue'
     url=PATH_RESTORE_TIMER+'?last_id=%s&last_time=%s&cnt=%d&tcnt=%d' % (urllib.quote(last_id),urllib.quote(datetime_to_isofmt(last_time)),cnt,tcnt)
     log(u'call:"%s"' % (url))
     for ci in range(3):
       try:
         taskqueue.add(url=url,method='GET',headers={'X-AppEngine-TaskRetryCount':0})
         break
       except Exception, s:
         str_rsp = pre_str+u'taskqueue error: %s' % (str(s))
         pass
       time.sleep(1)
Exemple #39
0
 def txn():
     taskqueue.add(url=url_for(
         'api/internal_delete_gifts_json_1c',
         task_id=task_id), transactional=True)
Exemple #40
0
def _safe_taskqueue_add(url, params):
    params['the_key'] = cryptoutil.encrypt_string("Trust Me")
    taskqueue.add(queue_name='emailqueue', url=url, params=params)
 def create(site_uri):
     taskqueue.add(url=service_uri, params={'site_uri': site_uri})
Exemple #42
0
 def get(self):
     if self.request.get('admin') == credentials.ADMIN_KEY:
         taskqueue.add(url='/deleteData', params={'admin': credentials.ADMIN_KEY})
         self.response.write('Added task!')
     else:
         self.response.write('Wrong key!')