Пример #1
0
	def post(self):
		# Gets the parameters.
		user_id = self.request.get('user_id').encode('utf-8')
		folder = self.request.get('folder').encode('utf-8')
		extension = self.request.get('extension').encode('utf-8')
		tz_hour = self.request.get('tz_hour').encode('utf-8')
		tz_minute = self.request.get('tz_minute').encode('utf-8')

		# Checks whether the user exists or not.
		if not user_id or urlfetch.fetch('http://smart.fm/users/%s' % (user_id), method=urlfetch.HEAD, follow_redirects=False).status_code != 200:
			self.error(405)
			self.response.headers['Content-Type'] = 'text/plain; charset="UTF-8"'
			self.response.out.write('User not found.')
			return

		timestamp = datetime.now().strftime('%Y%m%d%H%M%S')

		self.request.str_POST['timestamp'] = timestamp	# ad hoc...

		# Stores the options.
		memcache.set(get_key(self, 'is_flat'), folder == 'flat')
		memcache.set(get_key(self, 'has_extension'), extension == 'extension')
		memcache.set(get_key(self, 'tz_hour'), tz_hour)
		memcache.set(get_key(self, 'tz_minute'), tz_minute)

		# Moves to the next phase.
		memcache.set(get_key(self, 'phase'), 1)
		taskqueue.add(url='/smartfm/memento/tasks/search', params={'user_id': user_id, 'timestamp': timestamp, 'page': 1})

		self.redirect('/smartfm/memento/progress?user_id=%s&timestamp=%s' % (user_id, timestamp))
Пример #2
0
 def get(self):
   countdown = 0
   for membership in Membership.all().filter("status =", None):
     if (datetime.datetime.now().date() - membership.created.date()).days > 1:
       countdown += 90
       self.response.out.write("bye %s " % (membership.email))
       taskqueue.add(url="/tasks/clean_row", params={"user": membership.key().id()}, countdown=countdown)
Пример #3
0
    def notify (key, event, pub_data):
        """Send notification to the specified subscription.
        """
        sub = Subscription.get(db.Key(encoded=key))
        if not sub:
            return None
            
        data = {
            'key' : key,
            'event': event,
            'pub_data': pub_data,
            'sub_data': json.loads(sub.data)
        }   

        if sub.url.startswith ('/'):
            #Handle local urls through the task queue
             taskqueue.add(
                url=sub.url, 
                headers = {'Content-Type':'application/json'},
                payload=json.dumps(data))
        else:
            #for external urls use urllib2             
            req = urllib2.Request(sub.url)
            req.add_header('Content-Type', 'application/json')
            response = urllib2.urlopen(req, json.dumps(data))
Пример #4
0
	def post(self):
		# Gets the parameters.
		user_id = self.request.get('user_id')
		timestamp = self.request.get('timestamp')
		index = int(self.request.get('resource_index'))

		memcache.set(get_key(self, 'counter'), index)

		# Moves to the next phase.
		if get_phase(self) == 3:
			memcache.set(get_key(self, 'counter'), 0)
			memcache.incr(get_key(self, 'phase'))

		# Terminates the process if canceled.
		if get_phase(self) != 4:
			logging.info('This request was canceled.')
			return

		if index < int(memcache.get(get_key(self, 'resource_num')) or 0):
			# Fetches a resource file.
			resource = fetch_content(memcache.get(get_resources_key(self, index)))

			# Updates download size.
			bytes = int(memcache.get(get_key(self, 'resources_bytes')) or 0)
			bytes += len(resource)
			memcache.set(get_key(self, 'resources_bytes'), bytes)

			taskqueue.add(url='/smartfm/memento/tasks/fetch', params={'user_id': user_id, 'timestamp': timestamp, 'resource_index': index + 1})

		else:
			# Executes the next task to move to the next phase.
			taskqueue.add(url='/smartfm/memento/tasks/divide', params={'user_id': user_id, 'timestamp': timestamp})
Пример #5
0
    def get(self):
        lines = []
        jps = models.JourneyPattern.query().order(models.JourneyPattern.line)
        for jp in jps:
            if jp.line not in lines:
                logging.info(jp.line)
                lines.append(jp.line)
            logging.info([s.get().name for s in jp.stops])      
          
        return 

#         return taskqueue.add(url='/timetable', queue_name='default', params={'file_name': 'tfl_1-BAK_-390106-y05.xml'})

        if self.is_local():
            taskqueue.add(url='/timetable', queue_name='default')
        else:
            bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
#             bucket_name = 'jeg376-tm470.appspot.com'
            filename = '/%s/stream.zip' % bucket_name
            gcs_file = cloudstorage.open(filename)
    
            tfl_zip = ZipFile(gcs_file, 'r')
            tfl_data = tfl_zip.read('LULDLRRiverTramCable.zip')
            tfl_data = ZipFile(StringIO.StringIO(tfl_data), 'r')
            
            tube_file_matcher = re.compile('^tfl_\d-\w{3}_.*\.xml$')
            for file_name in tfl_data.namelist():
                if tube_file_matcher.match(file_name):
                    taskqueue.add(url='/timetable', queue_name='default', params={'file_name': file_name})
Пример #6
0
 def get(self):
     #Delete all the old items in the index
     SGPostal.delete_all_in_index()
 
     #10 json files contains the address info
     for i in range(1):
         jsonUrl = "https://s3-ap-southeast-1.amazonaws.com/clt-friso/%dpostal.json" % i
         logging.debug("Downloading json file %d" %i)
         urlfetch.set_default_fetch_deadline(40)
         result = urlfetch.fetch(jsonUrl)
         if result.status_code == 200:
             #logging.debug("Download complete")
             #logging.debug("Loading json file")
             
             myData = json.loads(result.content)    
             logging.debug("File loaded, total %d items" % len(myData))
             chunks=[myData[x:x+250] for x in xrange(0, len(myData), 250)]
             i = 1
             for chunk in chunks:
                 logging.debug(str(len(chunk)))
                 strChunk = json.dumps(chunk)
                 taskqueue.add(url='/super_admin/parse_postal', countdown = 60, params = {'postalRows': strChunk, "item": i, "total": len(chunks), 'total_items': len(myData)}, queue_name='updatepostal')
                 i += 1
         else:
             logging.debug("File %d not found" % i)                    
Пример #7
0
  def post(self):
    """ Handler for POST requests. """
    success_msg = ''
    err_msg = ''
    if not self.request.POST.multi or \
      'app_file_data' not in self.request.POST.multi or \
      not hasattr(self.request.POST.multi['app_file_data'], 'file'):
      self.render_page(page='apps', template_file=self.TEMPLATE, values={
          'error_message' : 'You must specify a file to upload.',
          'success_message' : ''
        })
      return

    if self.dstore.can_upload_apps():
      try:
        success_msg = self.helper.upload_app(
          self.request.POST.multi['app_file_data'].filename,
          self.request.POST.multi['app_file_data'].file)
      except AppHelperException as err:
        err_msg = str(err)
      if success_msg:
        try:
          taskqueue.add(url='/status/refresh')
          taskqueue.add(url='/status/refresh', countdown=self.REFRESH_WAIT_TIME)
        except Exception as err:
          logging.exception(err)
    else:
      err_msg = "You are not authorized to upload apps."
    self.render_page(page='apps', template_file=self.TEMPLATE, values={
        'error_message' : err_msg,
        'success_message' : success_msg
      })
Пример #8
0
    def get(self, backup_date):
        # Make sure the requested backup exists
        backup_bucket = self.get_backup_bucket()
        backup_dir = "/{}/{}/".format(backup_bucket, backup_date)

        backup_files = cloudstorage.listbucket(backup_dir)
        bucket_prefix = "/{}/".format(backup_bucket)
        count = 0
        for bfile in backup_files:
            if bfile.is_dir:
                continue

            count += 1
            fname = bfile.filename
            path = fname[len(bucket_prefix):]
            taskqueue.add(
                url='/backend-tasks/backup/archive/file',
                params={
                    'bucket': backup_bucket,
                    'object': path,
                },
                queue_name='backups',
                method='POST')

        self.response.out.write("Enqueued updates for {} files".format(count))
Пример #9
0
def api_call(method_name, countdown=0, **kwargs):
    payload = json.dumps(kwargs)
    taskqueue.add(queue_name='outbox', url='/telegram/' + method_name, payload=payload,
                  countdown=countdown)
    countdown_details = ' (countdown {}s)'.format(countdown) if countdown else ''
    logging.info('Request queued: ' + method_name + countdown_details)
    logging.debug(payload)
Пример #10
0
    def _createSpeakerObject(self, request):
        """Create a Speaker object, returning SpeakerForm/request."""
        
        # Getting and Verifying current user
        user = getUser()

        # Confirm the field is filled out
        checkField(request.name, 'name')

        # Copy SpeakerForm/ProtoRPC Message into dict
        data = ({field.name: getattr(request, field.name)
                for field in request.all_fields()})

        # Create a key for the Speaker
        s_id  = Session.allocate_ids(size=1)[0]
        s_key = ndb.Key(Speaker, s_id)
        
        # Update stored session with session keys
        data['key'] = s_key
        
        # Create and update session and return the form
        Speaker(**data).put()
        
        taskqueue.add(
        
            params = {
                'email'   : user.email(),
                'subject' : 'You Added %s as a Speaker!' % data['name'],
                'body'    : 'Here are the details for the added speaker:',
                'info'    : repr(request)},
        
            url    = '/tasks/send_confirmation_email')
        
        return request
Пример #11
0
 def receive(self, msg, to):
     msg.original.add_header("X-Original-To", msg.original.get("To"))
     msg.original.replace_header("To", to)
     mime_message = msg.original.as_string()
     logging.info(mime_message)
     taskqueue.add(url='/process_mail',
                   params={'mime_message': mime_message})
Пример #12
0
 def post(self):
     countdown = 0
     for membership in Membership.all().filter('status =', "suspended"):
       if not membership.unsubscribe_reason and membership.spreedly_token and "Deleted" not in membership.last_name and membership.extra_dnd != True:
         countdown += 1200 # One e-mail every 20 min = 72 e-mails a day (100 is free appengine limit)
         self.response.out.write("Are you still there "+membership.email+ "?<br/>")
         taskqueue.add(url='/tasks/areyoustillthere_mail', params={'user': membership.key().id()}, countdown=countdown)
Пример #13
0
 def post(self):
     countdown = 0
     for membership in Membership.all().filter('status =', None):
         if (datetime.now().date() - membership.created.date()).days > 1:
             countdown += 90
             self.response.out.write("bye "+membership.email+ " ")
             taskqueue.add(url='/tasks/clean_row', params={'user': membership.key().id()}, countdown=countdown)
Пример #14
0
 def retry(countdown=3):
     retries = int(self.request.get('retries', 0)) + 1
     if retries <= 5:
         taskqueue.add(url='/tasks/create_user', method='POST', countdown=countdown,
             params={'hash': self.request.get('hash'), 'retries': retries})
     else:
         fail(Exception("Too many retries for %s" % self.request.get('hash')))
Пример #15
0
    def post(self, ids=None):
        subscriber_ids = self.request.get('subscriber_ids').split(',')
        c = Config()
        s = spreedly.Spreedly(c.SPREEDLY_ACCOUNT, token=c.SPREEDLY_APIKEY)
        for id in subscriber_ids:
            subscriber = s.subscriber_details(sub_id=int(id))
            logging.debug("customer_id: "+ subscriber['customer-id'])
            member = Membership.get_by_id(int(subscriber['customer-id']))
            if member:
                if member.status == 'paypal':
                    mail.send_mail(sender=EMAIL_FROM,
                        to=PAYPAL_EMAIL,
                        subject="Please cancel PayPal subscription for %s" % member.full_name(),
                        body=member.email)
                member.status = 'active' if subscriber['active'] == 'true' else 'suspended'
                if member.status == 'active' and not member.username:
                    taskqueue.add(url='/tasks/create_user', method='POST', params={'hash': member.hash}, countdown=3)
                if member.status == 'active' and member.unsubscribe_reason:
                    member.unsubscribe_reason = None
                member.spreedly_token = subscriber['token']
                member.plan = subscriber['feature-level'] or member.plan
                if not subscriber['email']:
                  subscriber['email'] = "*****@*****.**"
                member.email = subscriber['email']                
                member.put()
                # TODO: After a few months (now() = 06.13.2011), only suspend/restore if status CHANGED
                # As of right now, we can't trust previous status, so lets take action on each call to /update
                if member.status == 'active' and member.username:
                    logging.info("Restoring User: "******"Suspending User: "******"ok")
Пример #16
0
 def post(self):
   """Query for tests, and put ones with no new data on the delete queue."""
   datastore_hooks.SetPrivilegedRequest()
   cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
   tests, next_cursor, more = graph_data.TestMetadata.query().fetch_page(
       _TESTS_TO_CHECK_AT_ONCE, keys_only=True, start_cursor=cursor)
   if more:
     taskqueue.add(
         url='/delete_old_tests',
         params={'cursor': next_cursor.urlsafe()},
         queue_name=_TASK_QUEUE_NAME)
   for test in tests:
     # Delete this test if:
     # 1) It has no Rows newer than the cutoff
     # 2) It has no descendant tests
     no_new_rows = False
     last_row = graph_data.Row.query(
         graph_data.Row.parent_test == utils.OldStyleTestKey(test)).order(
             -graph_data.Row.timestamp).get()
     if last_row:
       if last_row.timestamp < datetime.datetime.today() - _CUTOFF_DATE:
         no_new_rows = True
     else:
       no_new_rows = True
     descendants = list_tests.GetTestDescendants(test, keys_only=True)
     descendants.remove(test)
     if not descendants and no_new_rows:
       taskqueue.add(
           url='/delete_test_data',
           params={
               'test_path': utils.TestPath(test),  # For manual inspection.
               'test_key': test.urlsafe(),
           },
           queue_name=_DELETE_TASK_QUEUE_NAME)
Пример #17
0
def do_task_update_users(force, cursor):
    force = bool(int(force))
    cursor = int(cursor)
    friend_list, data = api._GetFriends(cursor=cursor)
    batch = []
    for friend in friend_list:
        username = friend.GetScreenName()
        isnew = not CurryUser.get_by_key_name(username)
        if force or isnew:
            batch.append(CurryUser(key_name=username))
            taskqueue.add(
                url=('/task/update_links/%s/%d' % (username, (isnew and not force))),
                queue_name='update-links-queue'
                )

    db.put(batch)
    logging.debug('added %d users from %d friends of bot' % (len(batch), len(friend_list)))
    logging.debug('next cursor=%d' % data['next_cursor'])

    if int(data['next_cursor']) != 0:
        taskqueue.add(
                url=('/task/update_users/%d/%d' % (force, int(data['next_cursor']))),
                queue_name='update-users-queue'
                )
    else:
        logging.info('update users: done')

    return 'ok'
    def get(self):
        cursorString = self.request.get('cursor')

        cursor = None
        if cursorString != 'None':
            cursor = Cursor(urlsafe=cursorString)

        listVectors, next_cursor, more = Vector.query().fetch_page(BATCH_SIZE, start_cursor=cursor)
        to_put = []

        for vector in listVectors:
            if any([
            (vector.server == 'OGC' and vector.dataset == 'BusStops' and vector.name == 'GetCapabilities' and vector.httpMethod == 'GET' and vector.returnType == 'XML'),
            (vector.server == 'OGC' and vector.dataset == 'BusStops' and vector.name == 'AttributeFilter' and vector.httpMethod == 'GET' and vector.returnType == 'JSON'),
            (vector.server == 'OGC' and vector.dataset == 'BusStops' and vector.name == 'GetCapabilities' and vector.httpMethod == 'POST' and vector.returnType == 'XML'),
            (vector.server == 'OGC' and vector.dataset == 'Topo' and vector.name == 'Big' and vector.httpMethod == 'GET' and vector.returnType == 'Image'),
            (vector.server == 'OGC' and vector.dataset == 'Topo' and vector.name == 'Small' and vector.httpMethod == 'GET' and vector.returnType == 'Image'),
            (vector.server == 'GME' and vector.dataset == 'BusStops' and vector.name == 'Small' and vector.httpMethod == 'GET' and vector.returnType == 'JSON'),
            (vector.server == 'GME' and vector.dataset == 'AerialPhoto' and vector.name == 'WMSGetCapabilities' and vector.httpMethod == 'GET' and vector.returnType == 'XML'),
            (vector.server == 'GME' and vector.dataset == 'AerialPhoto' and vector.name == 'WMTSGetCapabilities' and vector.httpMethod == 'GET' and vector.returnType == 'XML')
            ]):
                vector.referenceCheckValid = False
                print 'Changed flag! False!'
                to_put.append(vector)
            else:
                vector.referenceCheckValid = True
                print 'Changed flag! True!'
                to_put.append(vector)

        if to_put:
            ndb.put_multi(to_put)

        if more:
            print next_cursor.urlsafe()
            taskqueue.add(url='/updateschemaworker', method='GET', params={'cursor':next_cursor.urlsafe()})
Пример #19
0
    def post(self):
        # To prevent CSRF attacks, all requests must be from the task queue
        if "X-AppEngine-QueueName" not in self.request.headers:
            logging.error("Potential CSRF attack detected")
            self.response.set_status(403, message="Potential CSRF attack detected due to missing header.")
            return

        if not _is_in_progress(self.request.path):
            logging.info("Cancelled.")
            return
        cursor = self.request.get("cursor")
        count = self.request.get("count")
        if not count:
            count = 0
        count = int(count)
        kind = self.request.get("kind")
        query = self.get_keys_query(kind)
        if cursor:
            query.with_cursor(cursor)
        done = False
        new_cursor = cursor
        # dev server doesn't throw DeadlineExceededError so we do it ourselves
        deadline = datetime.datetime.now() + datetime.timedelta(seconds=25)
        try:
            for key in query:

                def do_update():
                    e = db.get(key)
                    if self.update(e):
                        e.put()
                    if datetime.datetime.now() > deadline:
                        raise DeadlineExceededError

                if self.use_transaction():
                    db.run_in_transaction(do_update)
                else:
                    do_update()
                new_cursor = query.cursor()
                count = count + 1
            _set_in_progress(self.request.path, False)
            logging.info("Finished! %d %s processed.", count, kind)
            done = True
        except DeadlineExceededError:
            pass
        except:
            logging.exception("Unexpected exception")
        finally:
            if done:
                return
            if new_cursor == cursor:
                logging.error("Stopped due to lack of progress at %d %s with cursor = %s", count, kind, new_cursor)
                _set_in_progress(self.request.path, False)
            else:
                logging.info("Processed %d %s so far.  Continuing in a new task...", count, kind)
                new_params = {}
                for name, value in self.request.params.items():
                    new_params[name] = value
                new_params["cursor"] = new_cursor
                new_params["count"] = count
                taskqueue.add(url=self.request.path, params=new_params)
 def get(self):
     team_keys = Team.query().fetch(keys_only=True)
     for team_key in team_keys:
         taskqueue.add(
             queue_name='search-index-update',
             url='/tasks/do/update_team_search_index/' + team_key.id(),
             method='GET')
Пример #21
0
 def _createSpeakerObject(self, request):
     """Create a Speaker object, returning SpeakerForm/request."""
     # Ensure that the current user is logged in and get user ID
     user = endpoints.get_current_user()
     if not user:
         raise endpoints.UnauthorizedException('Authorization required')
     # Verify that a name was provided for the speaker
     if not request.name:
         raise endpoints.BadRequestException(
             "Speaker 'name' field required")
     # Copy SpeakerForm/ProtoRPC Message into dict
     data = ({field.name: getattr(request, field.name)
             for field in request.all_fields()})
     # Create a key for the Speaker
     s_id  = Session.allocate_ids(size=1)[0]
     s_key = ndb.Key(Speaker, s_id)
     # Update stored session with session keys
     data['key'] = s_key
     # create Session, send email to organizer confirming
     # creation of Session & return (modified) SessionForm
     Speaker(**data).put()
     taskqueue.add(
         params = {
             'email'   : user.email(),
             'subject' : 'You Added %s as a Speaker!' % data['name'],
             'body'    : 'Here are the details for the added speaker:',
             'info'    : repr(request)},
         url    = '/tasks/send_confirmation_email')
     return request
 def get(self, year):
     self._require_admin()
     taskqueue.add(
         queue_name='admin',
         target='backend-tasks',
         url='/backend-tasks/do/rebuild_divisions/{}'.format(year),
         method='GET')
Пример #23
0
 def get(self):
     #
     # TODO: what if somebody is reading from the data store as 
     # we're refreshing?
     #
     taskqueue.add(url='/admin/update_task', params={})
     self.response.out.write('Scheduled UpdateTask')
Пример #24
0
    def post(self):
        msg = json.loads(self.request.get("message"))
        country = self.request.get("country")
        if 1 in msg["request"]["platforms"]:
                appconfig = AppConfig.get_or_insert("config")
                gcmmessage = convertToGcmMessage(self,msg)
                gcm_reg_ids = []
                x=0
                q = GcmToken.query(GcmToken.country == country)
                push_notification = PushModel(sent_to=q.count(),country=country)
                push_id = push_notification.put().id()
                gcmmessage["data"]["push_id"]=push_id
                for token in q.iter():
                    if x == appconfig.gcm_multicast_limit:
                        taskqueue.add(url = '/push/gcmtask',params={'message':json.dumps(gcmmessage),'reg':json.dumps(gcm_reg_ids)})
                        gcm_reg_ids=[]
                        x = 0
                    gcm_reg_ids.append(token.gcm_token)
                    x = x + 1
                if len(gcm_reg_ids) > 0:
                    print(gcm_reg_ids)
                    print(gcmmessage)
                    taskqueue.add(url = '/push/gcmtask',params={'message':json.dumps(gcmmessage),'reg':json.dumps(gcm_reg_ids)})

        if 2 in msg["request"]["platforms"]:
            #Send to iOS devices using APNS
            tagid = self.request.get("tagid")

            q = ApnsSandboxTag.query(ApnsSandboxTag.tag == tagid)
            for tag in q.iter():
                sendSingleApnsMessage(self, convertToApnsMessage(self, msg), tag.token.get().apns_token)

        #Return result
        self.response.write("OK")
    def _createSessionObject(self, request):
        """Create Session object from request and store in datastore."""
        # Check that user logged in
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')
        user_id = getUserId(user)

        # Get conference
        urlkey = request.websafeConferenceKey
        conf_key = ndb.Key(urlsafe=urlkey)
        conf = conf_key.get()

        # Check that conference exists
        if not conf:
            raise endpoints.NotFoundException(
                'No conference found with key: %s' % urlkey)

        # Check that logged in user is organizer
        if user_id != conf.organizerUserId:
            raise endpoints.ForbiddenException(
                'Only the organizer can add sessions to the conference.')

        # Every session must have a name
        if not request.name:
            raise endpoints.BadRequestException("Session 'name' field required")

        # Copy SessionForm/ProtoRPC Message into dictionary
        data = {field.name: getattr(request, field.name) for field in request.all_fields()}

        # Prepare all data for SessionForm
        del data['websafeConferenceKey']
        del data['websafeKey']
        del data['organizerDisplayName']
        data['organizerUserId'] = user_id

        # Convert dates from strings to DateTime objects
        if data['date']:
          data['date'] = datetime.strptime(data['date'][:10], "%Y-%m-%d").date()

        # Convert time from strings to DateTime objects
        if data['startTime']:
          data['startTime'] = datetime.strptime(data['startTime'],"%H:%M:%S").time()

        # Generate session id
        session_id = Session.allocate_ids(size=1, parent=conf_key)[0]

        # Generate session key with conference key as parent
        session_key = ndb.Key(Session, session_id, parent=conf_key)
        data['key'] = session_key

        # Write to datastore
        Session(**data).put()


        # Make announcement for featured speaker via task queue.
        speaker = data['speaker']
        taskqueue.add(params={'speaker': speaker, 'websafeConferenceKey': urlkey}, url='/tasks/set_session_announcement')

        return self._copySessionToForm(session_key.get())
Пример #26
0
def broadcastGcmMessage(self, message):
    appconfig = AppConfig.get_or_insert("config")
    gcmmessage = message
    
    gcm_reg_ids = []
    q = GcmToken.query(GcmToken.enabled == True)
    x=0
    push_notification = PushModel(sent_to=q.count())
    push_id = push_notification.put().id()
    gcmmessage["data"]["push_id"]=push_id
    print(push_id)
    for token in q.iter():
        if x == appconfig.gcm_multicast_limit:
            #sendMulticastGcmMessage(self, gcm_reg_ids, gcmmessage)
            taskqueue.add(url = '/push/gcmtask',params={'message':json.dumps(gcmmessage),'reg':json.dumps(gcm_reg_ids)})
            gcm_reg_ids=[]
            x = 0

        gcm_reg_ids.append(token.gcm_token)
        x = x + 1
    if len(gcm_reg_ids) > 0:
        #sendMulticastGcmMessage(self, gcm_reg_ids, gcmmessage)
        print(gcm_reg_ids)
        print(gcmmessage)
        taskqueue.add(url = '/push/gcmtask',params={'message':json.dumps(gcmmessage),'reg':json.dumps(gcm_reg_ids)})



    return x;
Пример #27
0
def broadcastApnsMessage(self, message):
    appconfig = AppConfig.get_or_insert("config")
    apnsmessage = message
    
    apns_reg_ids = []
    if appconfig.apns_test_mode:
        q = ApnsSandboxToken.query(ApnsSandboxToken.enabled == True)
    else:
        q = ApnsToken.query(ApnsToken.enabled == True)

    x=0
    
    for token in q.iter():
        if x == appconfig.apns_multicast_limit:
            #sendMulticastApnsMessage(self, apns_reg_ids, apnsmessage)
            taskqueue.add(url = '/push/apnstask',params={'message':json.dumps(apnsmessage),'reg':json.dumps(apns_reg_ids)})

            apns_reg_ids=[]
            x = 0
        
        apns_reg_ids.append(token.apns_token)
        x = x + 1
    
    if len(apns_reg_ids) > 0:
        #sendMulticastApnsMessage(self, apns_reg_ids, apnsmessage)
        taskqueue.add(url = '/push/apnstask',params={'message':json.dumps(apnsmessage),'reg':json.dumps(apns_reg_ids)})
Пример #28
0
 def post(self):
         logging.info(self.request.body)
         logging.info(self.request.get("payload"))
         body = simplejson.loads(self.request.get("payload"))
         repository = Repository.all().filter("url =", body["repository"]["url"]).get()
         if not repository:
                 repository = Repository.fromJSON(body["repository"])
                 repository.put()
         for commit in body["commits"]:
                 commit['pusher'] = body['pusher']
                 cmt = Commit.fromJSON(repository, commit)
                 cmt.put()
                 repository.last_update = datetime.now()
                 repository.put()
                 taskqueue.add(url="/metric", params={"id": cmt.id, "author_email": cmt.author_email, "author_name": cmt.author_name, "repo": cmt.repository.url, 
                         "message": cmt.message})
                 c = {
                                 "id": cmt.id,
                                 "url": cmt.url,
                                 "author_name": cmt.author_name,
                                 "author_hash": cmt.author_hash,
                                 "timestamp": cmt.timestamp,
                                 "message": cmt.summary,
                                 "repo_name": cmt.repository.name,
                                 "repo_url": cmt.repository.url,
                                 "pusher": cmt.pusher,
                                 "origin": "commit"
                         }
                 taskqueue.add(url="/pusher", params=c) 
Пример #29
0
    def save_turn_one_game_state(game, deck, player_one_hand):
        """Save the state of the game after player one has made a move.

        Args:
          game: current game the player is playing in.
          deck: the deck state after the player has drawn cards for the card_id
            exchange phase.
          hand: the final hand the player has after the desired cards have been
            replaced.
        """

        # Save player one's final hand

        hand = Poker.serialize_hand(player_one_hand)
        final_hand = Hand(
            player=game.player_one,
            game=game.key,
            hand=hand,
            state=str(HandState.ENDING)
        )
        final_hand.put()

        game.active_player = game.player_two
        game.deck = deck.serialize()
        game.put()
        taskqueue.add(
            url='/tasks/send_move_email',
            params={
                'game_key': game.key.urlsafe(),
                'user_key': game.active_player.urlsafe()
            },
            transactional=True
        )
 def get(self, year):
     self._require_admin()
     taskqueue.add(
         queue_name='admin',
         target='backend-tasks',
         url='/backend-tasks/do/backfill_playoff_type/{}'.format(year),
         method='GET')
Пример #31
0
    def admin_stock_process(self):
        from ..models.stock_history_model import create_history
        self.meta.change_view('json')
        msg = []
        data = []
        check_list = []
        length = self.params.get_integer('length')
        remake = self.params.get_string('remake')
        warehouse = self.params.get_ndb_record('warehouse')
        operation = self.params.get_string('operation', '庫存管理')
        auto_fill = self.params.get_boolean('auto_fill', False)
        target_warehouse = self.params.get_ndb_record('target_warehouse')

        for index in range(0, length):
            sku = self.params.get_ndb_record('sku_key_%s' % index)
            quantity = self.params.get_integer('sku_quantity_%s' % index)
            operation_type = self.params.get_string('sku_operation_type_%s' %
                                                    index)
            if sku is not None and quantity != 0:
                if operation_type == 'in':
                    check_list.append({
                        'sku': sku,
                        'quantity': quantity,
                        'operation_type': operation_type
                    })
                if operation_type == 'out':
                    try:
                        warehouse.stock_out_check(sku,
                                                  quantity,
                                                  auto_fill=auto_fill)
                        check_list.append({
                            'sku': sku,
                            'quantity': quantity,
                            'operation_type': operation_type
                        })
                    except Exception as error:
                        msg.append(u'%s' % error)
                if operation_type == 'move':
                    target_warehouse = self.params.get_ndb_record(
                        'target_warehouse')
                    try:
                        pass
                    except Exception as error:
                        msg.append(u'%s' % error)
                    # 轉倉

        if len(msg) > 0:
            create_history(self.application_user, operation, remake, False,
                           u'<br>\n'.join(msg))
            self.context['message'] = u'<br>\n'.join(msg)
            self.context['data'] = {'items': data}
            return

        order = self.params.get_ndb_record('order_key')
        history = create_history(self.application_user,
                                 operation,
                                 remake,
                                 order=order)
        if order is not None:
            task = taskqueue.add(url=self.uri(
                'taskqueue:product_stock:stock:reset_order_quantity'),
                                 params={
                                     'order': self.util.encode_key(order),
                                     'history': self.util.encode_key(history)
                                 })

        for item in check_list:
            sku = item['sku']
            quantity = item['quantity']
            operation_type = item['operation_type']
            if operation_type == 'in':
                data.append(warehouse.stock_in(sku, quantity, history, u'入庫'))
            if operation_type == 'out':
                data.append(
                    warehouse.stock_out(sku, quantity, history, u'出庫',
                                        auto_fill))
        self.context['message'] = u'完成'
        self.context['data'] = {'items': data}
Пример #32
0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

     http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@author: Michinobu Maeda
'''
from google.appengine.api import taskqueue
import model

ken_page = model.SourceContent.get_or_insert("ken_all_page")
if not (ken_page is None):
    ken_page.checked = None
    ken_page.put()
jig_page = model.SourceContent.get_or_insert("jigyosyo_page")
if not (jig_page is None):
    jig_page.checked = None
    jig_page.put()

print 'Content-Type: text/plain'
print ''
print 'exit'
taskqueue.add(url='/fetch')
    def testAddingTaskWithMethod(self):
        """Adds a task with an HTTP method other than default."""

        taskqueue.add(url='/put', params={'foo': 'bar'}, method='PUT')
    def testAddingTaskWithContentType(self):
        """Adds a task with a distinct content-type header."""

        taskqueue.add(url='/run', params={'foo': 'bar'})
Пример #35
0
 def kick_off_sync(self):
     taskqueue.add(url='/task/sync/fm/', queue_name='sync', target=self.service_backend_name)
Пример #36
0
 def _post_put_hook(self, future):
     taskqueue.add(url='/emails/tasks/send',
                   queue_name='contact-form-emails',
                   params=dict(email=self.key.urlsafe()))
Пример #37
0
 def timeline_followers_notify(self, timeline):
     taskqueue.add(url='/tasks/notify/timeline/',
                   params={'timeline': timeline.key()},
                   method='POST',
                   queue_name='timelineFollowers')
Пример #38
0
def insert_handler(food_type, user_id):
    '''Inserting the yelp bundle into the timeline'''
    taskqueue.add(url='/yelp_item', params={'user_id':user_id, 'food_type':food_type})

    return 'The bundle item has been inserted'
Пример #39
0
 def get(self):
     if not db.WRITE_CAPABILITY.is_enabled():
         self.response.set_status(503)
         self.response.headders['Retry-After'] = 120
         logging.info("Told that the db was down for maintenance")
         self.response.out.write('Currently down for maintenance')
     else:
         t = int(time.time()) - 7200
         logging.info(
             'CRON AccountNewRecords: totaling funds for after %d' % t)
         dispersal = Dispersals()
         commissions_total = 0
         designer_total = 0
         maintainers_total = 0
         total = 0
         maintainers = AppSettings.get_or_insert(
             "maintainers",
             value="00000000-0000-0000-0000-000000000000|0").value.split(
                 "|")
         people_to_pay = []
         query = Purchases.gql("WHERE accounted = :1", "0")
         for record in query:
             logging.info('CRON: %s|%s' %
                          (record.key().id(), record.seller))
             total += record.amount_paid
             token = 'Distributor_%s' % record.seller
             cacheditem = memcache.get(token)
             if cacheditem is None:
                 dist = Distributor.gql("WHERE avkey = :1",
                                        record.seller).get()
                 dist_info = {
                     "max_discount": dist.max_discount,
                     "commission": dist.commission
                 }
                 memcache.set(token, yaml.safe_dump(dist_info))
             else:
                 #pull the item's details out of the yaml'd dict
                 dist_info = yaml.safe_load(cacheditem)
             if dist_info['commission'] > 0:
                 commission = record.paid_amount * dist_info[
                     'commission'] / 100
                 commissions_total += commission
                 stoken = '%s_seller' % (record.seller)
                 commission_total = commission + getattr(
                     dispersal, stoken, 0)
                 setattr(dispersal, stoken, commission_total)
                 people_to_pay.append(stoken)
             name = record.item
             item = tools.get_item(name, True)
             if item is None:
                 logging.error(
                     'Error, Paid item %s not found. Requested by %s using %s.'
                     %
                     (name, self.request.headers['X-SecondLife-Owner-Name'],
                      self.request.headers['X-SecondLife-Object-Name']))
             if item['designer_cut'] > 0:
                 cut = record.paid_amount * item['designer_cut'] / 100
                 designer_total += cut
                 dtoken = '%s_designer' % (item['designer'])
                 cut_total = cut + getattr(dispersal, dtoken, 0)
                 setattr(dispersal, dtoken, cut_total)
                 people_to_pay.append(dtoken)
         for maintainer, amount in zip(maintainers[::2], maintainers[1::2]):
             cut = total * int(amount) / 100
             maintainers_total += cut
             mtoken = '%s_maintainer' % (maintainer)
             setattr(dispersal, mtoken, cut)
             people_to_pay.append(mtoken)
         if query.count(1) > 0:
             if total >= (maintainers_total + designer_total +
                          commissions_total):
                 setattr(dispersal, 'commissions_total', commissions_total)
                 setattr(dispersal, 'designers_total', designer_total)
                 setattr(dispersal, 'maintainers_total', maintainers_total)
                 setattr(dispersal, 'dispersal_total',
                         (maintainers_total + designer_total +
                          commissions_total))
                 setattr(dispersal, 'total', total)
                 setattr(dispersal, 'people_to_pay',
                         "\n".join(people_to_pay))
                 dispersal.put()
                 logging.info('CRON AccountNewRecords: saved')
                 #add right url
                 taskqueue.add(url='/paiddist/disperse?id=%s' %
                               (dispersal.key().id()),
                               headers={},
                               queue_name='Disperse',
                               method='PUT')
                 for record in query:
                     record.accounted = "1"
                     record.put()
             else:
                 logging.error(
                     "CRON AccountNewRecords: total dispersal %s is greater than total paid %s"
                     % (maintainers_total + designer_total +
                        commissions_total, total))
                 redirecturl = "not needed?"
                 alarm.SendAlarm(
                     'Dispersal', t, True,
                     "total dispersal %s is greater than total paid %s" %
                     (maintainers_total + designer_total +
                      commissions_total, total), redirecturl)
                 self.error(500)
         else:
             logging.info('CRON AccountNewRecords: No records')
         logging.info('CRON AccountNewRecords: Finished')
Пример #40
0
    def _createConferenceObject(self, request):
        """Create or update Conference object,
        returning ConferenceForm/request."""
        # preload necessary data items
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')
        user_id = getUserId(user)

        if not request.name:
            raise endpoints.BadRequestException(
                "Conference 'name' field required")

        # copy ConferenceForm/ProtoRPC Message into dict
        data = {
            field.name: getattr(request, field.name)
            for field in request.all_fields()
        }
        del data['websafeKey']
        del data['organizerDisplayName']

        # add default values for those missing
        # (both data model & outbound Message)
        for df in DEFAULTS:
            if data[df] in (None, []):
                data[df] = DEFAULTS[df]
                setattr(request, df, DEFAULTS[df])

        # convert dates from strings to Date objects;
        # set month based on start_date
        if data['startDate']:
            data['startDate'] = datetime.strptime(data['startDate'][:10],
                                                  "%Y-%m-%d").date()
            data['month'] = data['startDate'].month
        else:
            data['month'] = 0
        if data['endDate']:
            data['endDate'] = datetime.strptime(data['endDate'][:10],
                                                "%Y-%m-%d").date()

        # set seatsAvailable to be same as maxAttendees on creation
        # both for data model & outbound Message
        if data["maxAttendees"] > 0:
            data["seatsAvailable"] = data["maxAttendees"]
            setattr(request, "seatsAvailable", data["maxAttendees"])

        # make Profile Key from user ID
        p_key = ndb.Key(Profile, user_id)
        # allocate new Conference ID with Profile key as parent
        c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
        # make Conference key from ID
        c_key = ndb.Key(Conference, c_id, parent=p_key)
        data['key'] = c_key
        data['organizerUserId'] = request.organizerUserId = user_id

        # create Conference & return (modified) ConferenceForm
        Conference(**data).put()
        taskqueue.add(params={
            'email': user.email(),
            'conferenceInfo': repr(request)
        },
                      url='/tasks/send_confirmation_email')

        return request
Пример #41
0
 def get(self):
     all_users = User.all().filter("enabled =", True).fetch(500)
     for user in all_users:
         taskqueue.add(url='/onedigest', params={'email': user.email})
 def get(self):
     taskqueue.add(
         target='backend-tasks-b2',
         url='/backend-tasks-b2/do/csv_backup_teams',
         method='GET')
     self.response.out.write("Enqueued CSV teams backup")
Пример #43
0
class ConferenceApi(remote.Service):
    """Conference API v0.1"""

    # - - - Profile objects - - - - - - - - - - - - - - - - - - -

    def _copyProfileToForm(self, prof):
        """Copy relevant fields from Profile to ProfileForm."""
        # copy relevant fields from Profile to ProfileForm
        pf = ProfileForm()
        for field in pf.all_fields():
            if hasattr(prof, field.name):
                # convert t-shirt string to Enum; just copy others
                if field.name == 'teeShirtSize':
                    setattr(pf, field.name,
                            getattr(TeeShirtSize, getattr(prof, field.name)))
                else:
                    setattr(pf, field.name, getattr(prof, field.name))
        pf.check_initialized()
        return pf

    def _getProfileFromUser(self):
        """Return user Profile from datastore, creating new one if non-existent."""
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')

        # get Profile from datastore
        user_id = getUserId(user)
        p_key = ndb.Key(Profile, user_id)
        profile = p_key.get()
        # create new Profile if not there
        if not profile:
            profile = Profile(
                key=p_key,
                displayName=user.nickname(),
                mainEmail=user.email(),
                teeShirtSize=str(TeeShirtSize.NOT_SPECIFIED),
            )
            profile.put()

        return profile  # return Profile

    def _doProfile(self, save_request=None):
        """Get user Profile and return to user, possibly updating it first."""
        # get user Profile
        prof = self._getProfileFromUser()

        # if saveProfile(), process user-modifyable fields
        if save_request:
            for field in ('displayName', 'teeShirtSize'):
                if hasattr(save_request, field):
                    val = getattr(save_request, field)
                    if val:
                        setattr(prof, field, str(val))
            prof.put()

        # return ProfileForm
        return self._copyProfileToForm(prof)

    @endpoints.method(message_types.VoidMessage,
                      ProfileForm,
                      path='profile',
                      http_method='GET',
                      name='getProfile')
    def getProfile(self, request):
        """Return user profile."""
        return self._doProfile()

    @endpoints.method(ProfileMiniForm,
                      ProfileForm,
                      path='profile',
                      http_method='POST',
                      name='saveProfile')
    def saveProfile(self, request):
        """Update & return user profile."""
        return self._doProfile(request)

# - - - Conference objects - - - - - - - - - - - - - - - - - - -

    def _copyConferenceToForm(self, conf, displayName):
        """Copy relevant fields from Conference to ConferenceForm."""
        cf = ConferenceForm()
        for field in cf.all_fields():
            if hasattr(conf, field.name):
                # convert Date to date string; just copy others
                if field.name.endswith('Date'):
                    setattr(cf, field.name, str(getattr(conf, field.name)))
                else:
                    setattr(cf, field.name, getattr(conf, field.name))
            elif field.name == "websafeKey":
                setattr(cf, field.name, conf.key.urlsafe())
        if displayName:
            setattr(cf, 'organizerDisplayName', displayName)
        cf.check_initialized()
        return cf

    def _createConferenceObject(self, request):
        """Create or update Conference object, returning ConferenceForm/request."""
        # preload necessary data items
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')
        user_id = getUserId(user)

        if not request.name:
            raise endpoints.BadRequestException(
                "Conference 'name' field required")

        # copy ConferenceForm/ProtoRPC Message into dict
        data = {
            field.name: getattr(request, field.name)
            for field in request.all_fields()
        }
        del data['websafeKey']
        del data['organizerDisplayName']

        # add default values for those missing (both data model & outbound Message)
        for df in DEFAULTS:
            if data[df] in (None, []):
                data[df] = DEFAULTS[df]
                setattr(request, df, DEFAULTS[df])

        # convert dates from strings to Date objects; set month based on start_date
        if data['startDate']:
            data['startDate'] = datetime.strptime(data['startDate'][:10],
                                                  "%Y-%m-%d").date()
            data['month'] = data['startDate'].month
        else:
            data['month'] = 0
        if data['endDate']:
            data['endDate'] = datetime.strptime(data['endDate'][:10],
                                                "%Y-%m-%d").date()

        # set seatsAvailable to be same as maxAttendees on creation
        # both for data model & outbound Message
        if data["maxAttendees"] > 0:
            data["seatsAvailable"] = data["maxAttendees"]
            setattr(request, "seatsAvailable", data["maxAttendees"])

        # make Profile Key from user ID
        p_key = ndb.Key(Profile, user_id)
        # allocate new Conference ID with Profile key as parent
        c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
        # make Conference key from ID
        c_key = ndb.Key(Conference, c_id, parent=p_key)
        data['key'] = c_key
        data['organizerUserId'] = request.organizerUserId = user_id

        # create Conference, send email to organizer confirming
        # creation of Conference & return (modified) ConferenceForm
        Conference(**data).put()
        taskqueue.add(params={
            'email': user.email(),
            'conferenceInfo': repr(request)
        },
                      url='/tasks/send_confirmation_email')

        return request

    @endpoints.method(ConferenceQueryForms,
                      ConferenceForms,
                      path='queryConferences',
                      http_method='POST',
                      name='queryConferences')
    def queryConferences(self, request):
        """Query for conferences."""
        conferences = self._getQuery(request)

        # return individual ConferenceForm object per Conference
        return ConferenceForms(
            items=[self._copyConferenceToForm(conf, "") \
            for conf in conferences]
        )

    @endpoints.method(ConferenceForm,
                      ConferenceForm,
                      path='conference',
                      http_method='POST',
                      name='createConference')
    def createConference(self, request):
        """Create new conference."""
        return self._createConferenceObject(request)

    @endpoints.method(message_types.VoidMessage,
                      ConferenceForms,
                      path='getConferencesCreated',
                      http_method='POST',
                      name='getConferencesCreated')
    def getConferencesCreated(self, request):
        """Return conferences created by user."""
        # make sure user is authed
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')

        # make profile key
        p_key = ndb.Key(Profile, getUserId(user))
        # create ancestor query for this user
        conferences = Conference.query(ancestor=p_key)
        # get the user profile and display name
        prof = p_key.get()
        displayName = getattr(prof, 'displayName')
        # return set of ConferenceForm objects per Conference
        return ConferenceForms(items=[
            self._copyConferenceToForm(conf, displayName)
            for conf in conferences
        ])

    @endpoints.method(message_types.VoidMessage,
                      ConferenceForms,
                      path='filterPlayground',
                      http_method='POST',
                      name='filterPlayground')
    def filterPlayground(self, request):
        q = Conference.query()
        q = q.filter(Conference.city == "London")
        q = q.filter(Conference.topics == "Medical Innovations")
        q = q.order(Conference)
        q = q.filter(Conference.maxAttendees > 10)

        return ConferenceForms(
            items=[self._copyConferenceToForm(conf, "") for conf in q])

    def _getQuery(self, request):
        """Return formatted query from the submitted filters."""
        q = Conference.query()
        inequality_filter, filters = self._formatFilters(request.filters)

        # If exists, sort on inequality filter first
        if not inequality_filter:
            q = q.order(Conference.name)
        else:
            q = q.order(ndb.GenericProperty(inequality_filter))
            q = q.order(Conference.name)

        for filtr in filters:
            if filtr["field"] in ["month", "maxAttendees"]:
                filtr["value"] = int(filtr["value"])
            formatted_query = ndb.query.FilterNode(filtr["field"],
                                                   filtr["operator"],
                                                   filtr["value"])
            q = q.filter(formatted_query)
        return q

    def _formatFilters(self, filters):
        """Parse, check validity and format user supplied filters."""
        formatted_filters = []
        inequality_field = None

        for f in filters:
            filtr = {
                field.name: getattr(f, field.name)
                for field in f.all_fields()
            }

            try:
                filtr["field"] = FIELDS[filtr["field"]]
                filtr["operator"] = OPERATORS[filtr["operator"]]
            except KeyError:
                raise endpoints.BadRequestException(
                    "Filter contains invalid field or operator.")

            # Every operation except "=" is an inequality
            if filtr["operator"] != "=":
                # check if inequality operation has been used in previous filters
                # disallow the filter if inequality was performed on a different field before
                # track the field on which the inequality operation is performed
                if inequality_field and inequality_field != filtr["field"]:
                    raise endpoints.BadRequestException(
                        "Inequality filter is allowed on only one field.")
                else:
                    inequality_field = filtr["field"]

            formatted_filters.append(filtr)
        return (inequality_field, formatted_filters)

    @ndb.transactional(xg=True)
    def _conferenceRegistration(self, request, reg=True):
        """Register or unregister user for selected conference."""
        retval = None
        prof = self._getProfileFromUser()  # get user Profile

        # check if conf exists given websafeConfKey
        # get conference; check that it exists
        wsck = request.websafeConferenceKey
        conf = ndb.Key(urlsafe=wsck).get()
        if not conf:
            raise endpoints.NotFoundException(
                'No conference found with key: %s' % wsck)

        # register
        if reg:
            # check if user already registered otherwise add
            if wsck in prof.conferenceKeysToAttend:
                raise ConflictException(
                    "You have already registered for this conference")

            # check if seats avail
            if conf.seatsAvailable <= 0:
                raise ConflictException("There are no seats available.")

            # register user, take away one seat
            prof.conferenceKeysToAttend.append(wsck)
            conf.seatsAvailable -= 1
            retval = True

        # unregister
        else:
            # check if user already registered
            if wsck in prof.conferenceKeysToAttend:

                # unregister user, add back one seat
                prof.conferenceKeysToAttend.remove(wsck)
                conf.seatsAvailable += 1
                retval = True
            else:
                retval = False

        # write things back to the datastore & return
        prof.put()
        conf.put()
        return BooleanMessage(data=retval)

    @endpoints.method(CONF_GET_REQUEST,
                      BooleanMessage,
                      path='conference/{websafeConferenceKey}',
                      http_method='POST',
                      name='registerForConference')
    def registerForConference(self, request):
        """Register user for selected conference."""
        return self._conferenceRegistration(request)

    @endpoints.method(CONF_GET_REQUEST,
                      BooleanMessage,
                      path='conference/{websafeConferenceKey}',
                      http_method='DELETE',
                      name='unregisterFromConference')
    def unregisterFromConference(self, request):
        """Register user for selected conference."""
        return self._conferenceRegistration(request, reg=False)

    @endpoints.method(message_types.VoidMessage,
                      ConferenceForms,
                      path='conferences/attending',
                      http_method='GET',
                      name='getConferencesToAttend')
    def getConferencesToAttend(self, request):
        """Get list of conferences that user has registered for."""
        # TODO:
        # step 1: get user profile
        # step 2: get conferenceKeysToAttend from profile.
        # to make a ndb key from websafe key you can use:
        # ndb.Key(urlsafe=my_websafe_key_string)
        # step 3: fetch conferences from datastore.
        # Use get_multi(array_of_keys) to fetch all keys at once.
        # Do not fetch them one by one!
        # return set of ConferenceForm objects per Conference
        """Get list of conferences that user has registered for."""
        prof = self._getProfileFromUser()  # get user Profile
        conf_keys = [
            ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend
        ]
        conferences = ndb.get_multi(conf_keys)

        # get organizers
        organisers = [
            ndb.Key(Profile, conf.organizerUserId) for conf in conferences
        ]
        profiles = ndb.get_multi(organisers)

        # put display names in a dict for easier fetching
        names = {}
        for profile in profiles:
            names[profile.key.id()] = profile.displayName

        return ConferenceForms(items=[
            self._copyConferenceToForm(conf, "") for conf in conferences
        ])

    @endpoints.method(CONF_GET_REQUEST,
                      ConferenceForm,
                      path='conference/{websafeConferenceKey}',
                      http_method='GET',
                      name='getConference')
    def getConference(self, request):
        """Return requested conference (by websafeConferenceKey)."""
        # get Conference object from request; bail if not found
        conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
        if not conf:
            raise endpoints.NotFoundException(
                'No conference found with key: %s' %
                request.websafeConferenceKey)
        prof = conf.key.parent().get()
        # return ConferenceForm
        return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))

    @staticmethod
    def _cacheAnnouncement():
        """Create Announcement & assign to memcache; used by
        memcache cron job & putAnnouncement().
        """
        confs = Conference.query(
            ndb.AND(Conference.seatsAvailable <= 5,
                    Conference.seatsAvailable > 0)).fetch(
                        projection=[Conference.name])

        if confs:
            # If there are almost sold out conferences,
            # format announcement and set it in memcache
            announcement = '%s %s' % (
                'Last chance to attend! The following conferences '
                'are nearly sold out:', ', '.join(conf.name for conf in confs))
            memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
        else:
            # If there are no sold out conferences,
            # delete the memcache announcements entry
            announcement = ""
            memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)

        return announcement

    @endpoints.method(message_types.VoidMessage,
                      StringMessage,
                      path='conference/announcement/get',
                      http_method='GET',
                      name='getAnnouncement')
    def getAnnouncement(self, request):
        """Return Announcement from memcache."""
        # TODO 1
        # return an existing announcement from Memcache or an empty string.
        announcement = memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY)
        if not announcement:
            announcement = ''
        return StringMessage(data=announcement)

    ####################### Begin Project 4 work ###################

    def _copySessionToForm(self, session):
        """Copy relevant fields from Session to SessionForm."""
        sf = SessionForm()
        logging.debug(type(session))
        for field in sf.all_fields():
            if hasattr(session, field.name):
                # convert Date to date string; just copy others
                if field.name.endswith('Date'):
                    setattr(sf, field.name, str(getattr(session, field.name)))
                else:
                    setattr(sf, field.name, getattr(session, field.name))
            elif field.name == "websafeSessionKey":
                setattr(sf, field.name, session.key.urlsafe())
        sf.check_initialized()
        return sf

    @endpoints.method(CONF_GET_REQUEST,
                      SessionForms,
                      path='conference/sessions/{websafeConferenceKey}',
                      http_method='GET',
                      name='getConferenceSessions')
    def getConferenceSessions(self, request):
        """Given a websaveConferenceKey, return all sessions"""
        sessions = Session.query()
        sessions = sessions.filter(
            Session.webSafeConfId == request.websafeConferenceKey)

        # return set of SessionForm objects one per Session
        return SessionForms(
            items=[self._copySessionToForm(sn) for sn in sessions])

    @endpoints.method(
        SESSION_BY_TYPE,
        SessionForms,
        path='conference/{websafeConferenceKey}/sessions/{typeOfSession}',
        http_method='GET',
        name='getSessionsByType')
    def getSessionsByType(self, request):
        """Given a websaveConferenceKey, return all sessions of a specified type (eg lecture, keynote, workshop)"""
        sessions = Session.query()
        sessions = sessions.filter(
            Session.webSafeConfId == request.websafeConferenceKey)
        sessions = sessions.filter(Session.type == request.typeOfSession)

        # return set of SessionForm objects one per Session
        return SessionForms(
            items=[self._copySessionToForm(sn) for sn in sessions])

    @endpoints.method(SESSION_BY_SPEAKER,
                      SessionForms,
                      path='sessions/{speaker}',
                      http_method='GET',
                      name='getSessionsBySpeaker')
    def getSessionsBySpeaker(self, request):
        """Given a speaker, return all sessions given by this particular speaker, across all conferences"""
        sessions = Session.query()
        sessions = sessions.filter(Session.speaker == request.speaker)

        # return set of SessionForm objects one per Session
        return SessionForms(
            items=[self._copySessionToForm(sesn) for sesn in sessions])

    @endpoints.method(SESSION_GET_REQUEST,
                      SessionForm,
                      path='session/{websafeConferenceKey}',
                      http_method='POST',
                      name='createSession')
    def createSession(self, request):
        """Create or update Session object, returning SessionForm/request.
           Note: open only to the organizer of the conference"""
        if not request.name:
            raise endpoints.BadRequestException(
                "Session 'name' field required")

        # check for authorization, valid conference key, and that the current user is the conference orgainizer
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')
        user_id = getUserId(user)
        try:
            conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
        except TypeError:
            raise endpoints.BadRequestException(
                'Sorry, only string is allowed as websafeConferenceKey input')
        except Exception, e:
            if e.__class__.__name__ == 'ProtocolBufferDecodeError':
                raise endpoints.BadRequestException(
                    'Sorry, the websafeConferenceKey string seems to be invalid'
                )
            else:
                raise
        if not conf:
            raise endpoints.NotFoundException(
                'No conference found with key: %s' % request.webSafeConfId)
        if user_id != getattr(conf, 'organizerUserId'):
            raise endpoints.UnauthorizedException(
                'Only conference organizer is authorized to add sessions.')

        # copy SessionForm/ProtoRPC Message into dict
        data = {
            field.name: getattr(request, field.name)
            for field in request.all_fields()
        }
        del data[
            'websafeConferenceKey']  # session does not have a websafeConferenceKey

        # add default values for those missing (both data model & outbound Message)
        for df in SESSION_DEFAULTS:
            if data[df] in (None, []):
                data[df] = SESSION_DEFAULTS[df]
                setattr(request, df, SESSION_DEFAULTS[df])

        # convert dates from strings to Date objects
        if data['date']:
            data['date'] = datetime.strptime(data['date'][:10],
                                             "%Y-%m-%d").date()

        data['webSafeConfId'] = request.websafeConferenceKey
        del data['websafeSessionKey']  # this is only in the SessionForm

        logging.debug(data)
        # creation of Session, record the key to get the item & return (modified) SessionForm
        sessionKey = Session(**data).put()
        # start the task to update the conference featured speaker if needed
        if data['speaker'] is not SESSION_DEFAULTS['speaker']:
            taskqueue.add(params={
                'websafeConferenceKey': request.websafeConferenceKey,
                'speaker': data['speaker']
            },
                          url='/tasks/set_featured_speaker')

        return self._copySessionToForm(sessionKey.get())
Пример #44
0
 def get(self):
   queue_name = self.request.get('queue_name', 'background')
   for feed in Feed.all():
     taskqueue.add(url='/update/feed', queue_name=queue_name,
                   params={'id': feed.key().name()})
Пример #45
0
def site_checks():
    account_query = Account.query()
    for account in account_query.iter():
        taskqueue.add(url='/queue/site_check/{}'.format(account.user_id),
                      params={},
                      method="GET")
Пример #46
0
 def get(self):
     all_users = User.all().filter("enabled =", True).fetch(500)
     for user in all_users:
         # TODO: Check if one has already been submitted for this period.
         taskqueue.add(url='/onereminder', params={'email': user.email})
Пример #47
0
	def post( self ):
		coind_type = self.get_request_coind_type()

		# パラメータを取得する
		try:
			params = json.loads( self.request.get( 'params' ) )
		except ValueError as e:
			raise ValidationError( 'params', e.message )

		sign = params['sign']
		pub_key = params.get( 'pub_key', u'' )
		payload = params['payload']

		# payload をパースする
		try:
			payload = json.loads( bz2.decompress( b64decode( payload ) ) )
		except ValueError as e:
			raise ValidationError( 'params', e.message )
		except Exception as e:
			raise ValidationError( 'params', 'decompress' )

		# payload のハッシュ値検査
		if sha256( payload['body'] ).hexdigest() != payload['hash']:
			raise ValidationError( 'params', 'sha256' )

		# payload の本体をパースする
		try:
			payload = json.loads( payload['body'] )
		except ValueError as e:
			raise ValidationError( 'params', e.message )

		# payload を分解
		vin_txid = payload['vin_txid']
		vin_idx = payload['vin_idx']
		vin_type = payload['vin_type']
		vin_reqSigs = payload['vin_reqSigs']
		vout_lt = unhexlify( payload['vout_lt'] )
		hash = payload['hash']
		from_pk = payload['from_pk']
		log_data = payload['log_data']


		# sign の検証
		if not isinstance( sign, list ):
			raise ValidationError( 'sign', 'list' )
		if len( sign ) != len( hash ):
			raise ValidationError( 'sign', 'n' )
		for i in range( 0, len( hash ) ):
			if not isinstance( sign[i], list ):
				raise ValidationError( 'sign', 'list' )
			if len( sign[i] ) != vin_reqSigs[i]:
				raise ValidationError( 'sign', 'reqSigs' )

			for e in sign[i]:
				if len( e ) != 128:
					raise ValidationError( 'sign', 'len' )

				# 形式検査 : 16進としてパースできれば OK
				try:
					unhexlify( e )
				except TypeError as e:
					raise ValidationError( 'sign', e.message )


		# pub_key の検証
		if not isinstance( pub_key, unicode ):
			raise ValidationError( 'pub_key', 'unicode' )
		if vin_type == 'pubkeyhash':
			# 形式検査とパース
			pub_key = parse_pub_key( pub_key, 'pub_key' )


		# 電子署名の検証
		if vin_type == 'pubkeyhash':
			for i in range( 0, len( hash ) ):
				# 署名対象ハッシュ値を数値に
				h = long( hash[i]['hash'], 16 )

				# 署名をパース
				sig_r = long( sign[i][0][0:64], 16 )
				sig_s = long( sign[i][0][64:128], 16 )

				# 公開鍵をパース
				pk_x, pk_y = ecdsa.decompress( pub_key )

				if not ecdsa.verify( h, sig_r, sig_s, pk_x, pk_y ):
					raise ValidationError( 'sign', 'verify' )
		elif vin_type == 'multisig':
			for i in range( 0, len( hash ) ):
				# 署名対象ハッシュ値を数値に
				h = long( hash[i]['hash'], 16 )

				# 何番目の公開鍵まで走査したか
				k = 0

				for e in sign[i]:
					# 署名をパース
					sig_r = long( e[0:64], 16 )
					sig_s = long( e[64:128], 16 )

					while True:
						# 有効な公開鍵があるか
						if k == len( from_pk ):
							raise ValidationError( 'sign', 'verify' )

						# preparetx に送った公開鍵をパース
						pk_x, pk_y = ecdsa.decompress( bytearray( b64decode( from_pk[k] ) ) )
						k = k + 1

						# 署名検証に成功したら次へ進む
						if ecdsa.verify( h, sig_r, sig_s, pk_x, pk_y ):
							break


		# トランザクションデータの先頭はバージョン番号から始まる
		tx = bytearray( pack( '<i', 2 ) )

		# vin の組み立て
		tx = tx + bytearray( var_int( len( vin_txid ) ) )
		for i in range( 0, len( vin_txid ) ):
			# アンロックスクリプト (入力スクリプト) の作成
			script = self.make_script( sign[i], pub_key, vin_type )

			# 入力トランザクションを追加
			tx = tx + bytearray( unhexlify( vin_txid[i] )[::-1] + pack( '<I', vin_idx[i] ) )
			tx = tx + var_int( len( script ) ) + script + bytearray( pack( '<I', 0 ) )

		# vout~locktime 区間を連結
		tx = tx + vout_lt


		# ログデータに追記
		log_data['sign'] = sign
		log_data['tx'] = hexlify( tx )


		# キューに投げるデータを payload としてまとめる
		payload_body = json.dumps( {
			'tx': hexlify( tx ),
			'log_data': log_data
		} )

		# さらにハッシュをつけて包む
		payload = {
			'body': payload_body,
			'hash': sha256( payload_body ).hexdigest()
		}

		# taskqueue に積む
		taskqueue.add(
			url = '/maintain/sendrawtransaction',
			params = {
				'coind_type': coind_type,
				'payload': b64encode( bz2.compress( json.dumps( payload ) ) )
			},
			queue_name = 'send-tx'
		)

		# 作成した TXID を返す
		self.write_json( {
			'result': hexlify( sha256( sha256( tx ).digest() ).digest()[::-1] )
		} )
class AbstractExportBulkWorker(webapp2.RequestHandler):

    def __init__(self, *args, **kwargs):
        super(AbstractExportBulkWorker, self).__init__(*args, **kwargs)
        self.event = None
        self.sites_per_task = DEFAULT_SITES_PER_TASK

    def _write_csv_rows(self, fd, sites):
        writer = csv.writer(fd)
        event_short_name = self.event.short_name if self.event else None
        fields = get_csv_fields_list(event_short_name)
        for site in sites:
            writer.writerow(site.ToCsvLine(fields))

    def get_base_query(self):
        raise NotImplementedError

    def filter_sites(self):
        raise NotImplementedError

    def get_continuation_param_dict(self):
        return {
            'cursor': self.end_cursor,
            'event': self.filtering_event_key,
            'filename': self.filename,
            'csv_header': self.csv_header,
            'worker_url': self.worker_url,
        }

    def post(self):
        # get args
        self.start_cursor = self.request.get('cursor')
        self.filtering_event_key = self.request.get('event')
        self.filename = self.request.get('filename')
        self.csv_header = self.request.get('csv_header')
        self.worker_url = self.request.get('worker_url')

        self.event = Event.get(self.filtering_event_key) if self.filtering_event_key else None

        # get (base) query, skip query to cursor, filter for sites
        query = self.get_base_query()
        if self.start_cursor:
            query.with_cursor(self.start_cursor)
        fetched_sites = query.fetch(limit=self.sites_per_task)
        sites = self.filter_sites(fetched_sites)

        # try deleting before uploading
        try: 
        	logging.info("try to delete bucket")
        	cloudstorage.delete(BUCKET_NAME + '/' + self.filename)
        except Exception, e:
        	logging.error("Deleting bucket failed: %s" % e)
        # write part of csv file to GCS
        csv_part_gcs_fd = cloudstorage.open(
            BUCKET_NAME + '/' + self.filename + '.part.' + self.start_cursor,
            'w',
            content_type='text/csv'
        )
        self._write_csv_rows(csv_part_gcs_fd, sites)
        csv_part_gcs_fd.close()

        # decide what to do next
        self.end_cursor = query.cursor()
        if self.end_cursor and self.start_cursor != self.end_cursor:
            # chain to next task
            taskqueue.add(
                url=self.worker_url,
                params=self.get_continuation_param_dict(),
                retry_options=taskqueue.TaskRetryOptions(task_retry_limit=3),
            )
        else:
            # finish file: combine parts and deduplicate lines
            logging.info(u"Deduplicating to create %s ..." % self.filename)

            sio = StringIO()
            path_prefix = BUCKET_NAME + '/' + self.filename + '.part'
            for gcs_file_stat in cloudstorage.listbucket(path_prefix):
                csv_part_gcs_fd = cloudstorage.open(gcs_file_stat.filename)
                for line in csv_part_gcs_fd:
                    sio.write(line)
                csv_part_gcs_fd.close()
            sio.seek(0)
            deduplicated_lines = set(line for line in sio)




            # write csv header and deduplicated lines to new file

            csv_complete_gcs_fd = cloudstorage.open(
                BUCKET_NAME + '/' + self.filename,
                'w',
                content_type='text/csv'
            )
            csv_complete_gcs_fd.write(self.csv_header.encode('utf-8'))
            for line in deduplicated_lines:
                csv_complete_gcs_fd.write(line)
            csv_complete_gcs_fd.close()
Пример #49
0
    def get(self, event_key):
        # Fetch for later
        event_future = Event.get_by_id_async(event_key)
        matches_future = match_query.EventMatchesQuery(event_key).fetch_async()

        # Rebuild event teams
        taskqueue.add(url='/tasks/math/do/eventteam_update/' + event_key,
                      method='GET')

        # Create Winner/Finalist awards for offseason events
        awards = []
        event = event_future.get_result()
        if event.event_type_enum == EventType.OFFSEASON:
            matches = MatchHelper.organizeMatches(matches_future.get_result())
            bracket = MatchHelper.generateBracket(matches,
                                                  event.alliance_selections)
            if 'f' in bracket:
                winning_alliance = '{}_alliance'.format(
                    bracket['f'][1]['winning_alliance'])
                if winning_alliance == 'red_alliance':
                    losing_alliance = 'blue_alliance'
                else:
                    losing_alliance = 'red_alliance'

                awards.append(
                    Award(
                        id=Award.render_key_name(event.key_name,
                                                 AwardType.WINNER),
                        name_str="Winner",
                        award_type_enum=AwardType.WINNER,
                        year=event.year,
                        event=event.key,
                        event_type_enum=event.event_type_enum,
                        team_list=[
                            ndb.Key(Team, 'frc{}'.format(team))
                            for team in bracket['f'][1][winning_alliance]
                            if team.isdigit()
                        ],
                        recipient_json_list=[
                            json.dumps({
                                'team_number': team,
                                'awardee': None
                            }) for team in bracket['f'][1][winning_alliance]
                        ],
                    ))

                awards.append(
                    Award(
                        id=Award.render_key_name(event.key_name,
                                                 AwardType.FINALIST),
                        name_str="Finalist",
                        award_type_enum=AwardType.FINALIST,
                        year=event.year,
                        event=event.key,
                        event_type_enum=event.event_type_enum,
                        team_list=[
                            ndb.Key(Team, 'frc{}'.format(team))
                            for team in bracket['f'][1][losing_alliance]
                            if team.isdigit()
                        ],
                        recipient_json_list=[
                            json.dumps({
                                'team_number': team,
                                'awardee': None
                            }) for team in bracket['f'][1][losing_alliance]
                        ],
                    ))
                AwardManipulator.createOrUpdate(awards)

        self.response.out.write(
            "Finished post-event tasks for {}. Created awards: {}".format(
                event_key, awards))
Пример #50
0
def call(from_number):
    logging.log(logging.INFO, from_number)
    task = taskqueue.add(url='/celia',
                         target='main',
                         params={'from': from_number})
Пример #51
0
def dispatch_task():
    from google.appengine.api import taskqueue
    task = taskqueue.add(url='/api/transaction/extract_all')
    return str(task), 200
Пример #52
0
    def receive(self, message):
        #如果有多个收件人的话,只解释第一个收件人
        to = parseaddr(message.to)[1]
        to = to.split('@')[0] if to and '@' in to else 'xxx'
        if '__' in to:
            listto = to.split('__')
            username = listto[0] if listto[0] else 'admin'
            to = listto[1]
        else:
            username = '******'

        user = KeUser.all().filter('name = ', username).get()
        if not user:
            username = '******'
            user = KeUser.all().filter('name = ', username).get()

        if not user or not user.kindle_email:
            self.response.out.write('No account or no email configured!')
            return

        sender = parseaddr(message.sender)[1]
        mailhost = sender.split('@')[1] if sender and '@' in sender else None
        if (not sender or not mailhost) or \
            (not user.whitelist.filter('mail = ', '*').get()
            and not user.whitelist.filter('mail = ', sender.lower()).get()
            and not user.whitelist.filter('mail = ', '@' + mailhost.lower()).get()):
            self.response.out.write("Spam mail!")
            default_log.warn('Spam mail from : %s' % sender)
            return

        if hasattr(message, 'subject'):
            subject = decode_subject(message.subject)
        else:
            subject = u"NoSubject"

        #通过邮件触发一次“现在投递”
        if to.lower() == 'trigger':
            return self.TrigDeliver(subject, username)

        #获取和解码邮件内容
        txt_bodies = message.bodies('text/plain')
        html_bodies = message.bodies('text/html')
        try:
            allBodies = [body.decode() for ctype, body in html_bodies]
        except:
            default_log.warn('Decode html bodies of mail failed.')
            allBodies = []

        #此邮件为纯文本邮件
        if len(allBodies) == 0:
            default_log.info('no html body, use text body.')
            try:
                allBodies = [body.decode() for ctype, body in txt_bodies]
            except:
                default_log.warn('Decode text bodies of mail failed.')
                allBodies = []
            bodies = u''.join(allBodies)
            if not bodies:
                return
            bodyurls = []
            for l in bodies.split('\n'):
                l = l.strip()
                if not l:
                    continue
                link = IsHyperLink(l)
                if link:
                    bodyurls.append('<a href="%s">%s</a><br />' % (link, link))
                else:
                    break

            bodies = u"""<html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
              <title>%s</title></head><body>%s</body></html>""" % (
                subject, ''.join(bodyurls) if bodyurls else bodies)
            allBodies = [bodies.encode('utf-8')]

        #开始处理邮件内容
        soup = BeautifulSoup(allBodies[0], 'lxml')

        #合并多个邮件文本段
        if len(allBodies) > 1:
            for o in allBodies[1:]:
                so = BeautifulSoup(o, 'lxml')
                b = so.find('body')
                if not b:
                    continue
                for c in b.contents:
                    soup.body.append(c)

        #判断邮件内容是文本还是链接(包括多个链接的情况)
        links = []
        body = soup.body if soup.find('body') else soup
        for s in body.stripped_strings:
            link = IsHyperLink(s)
            if link:
                links.append(link)
            else:  #如果是多个链接,则必须一行一个
                break
        if not links:  #正常字符判断没有链接,看html的a标签
            links = list(soup.find_all('a', attrs={'href': True}))
            link = links[0]['href'] if links else ''
            text = ' '.join([s for s in body.stripped_strings])
            text = text.replace(link, '')
            #如果字数太多,则认为直接推送正文内容
            if len(links) != 1 or len(text) > WORDCNT_THRESHOLD_FOR_APMAIL:
                links = []

        if links:
            #判断是下载文件还是转发内容
            isbook = bool(to.lower() in ('book', 'file', 'download'))
            isbook = link[-5:].lower() in ('.mobi', '.epub',
                                           '.docx') if not isbook else isbook
            isbook = link[-4:].lower() in ('.pdf', '.txt', '.doc',
                                           '.rtf') if not isbook else isbook

            param = {
                'u': username,
                'urls': '|'.join(links),
                'type': 'Download' if isbook else user.book_type,
                'to': user.kindle_email,
                'tz': user.timezone,
                'subject': subject[:SUBJECT_WORDCNT_FOR_APMAIL],
                'lng': user.ownfeeds.language,
                'keepimage': '1' if user.ownfeeds.keep_image else '0'
            }
            taskqueue.add(url='/url2book',
                          queue_name="deliverqueue1",
                          method='GET',
                          params=param,
                          target='worker')
        else:  #直接转发邮件正文
            #先判断是否有图片
            from lib.makeoeb import MimeFromFilename
            hasimage = False
            if hasattr(message, 'attachments'):
                for f, c in message.attachments:
                    if MimeFromFilename(f):
                        hasimage = True
                        break

            #先修正不规范的HTML邮件
            h = soup.find('head')
            if not h:
                h = soup.new_tag('head')
                soup.html.insert(0, h)
            t = soup.head.find('title')
            if not t:
                t = soup.new_tag('title')
                t.string = subject
                soup.head.insert(0, t)

            #有图片的话,要生成MOBI或EPUB才行
            #而且多看邮箱不支持html推送,也先转换epub再推送
            if hasimage or (user.book_type == "epub"):
                from main import local_time
                from lib.makeoeb import (getOpts, CreateOeb, setMetaData,
                                         ServerContainer, byteStringIO,
                                         EPUBOutput, MOBIOutput)

                #仿照Amazon的转换服务器的处理,去掉CSS
                if DELETE_CSS_FOR_APPSPOTMAIL:
                    tag = soup.find('style', attrs={'type': 'text/css'})
                    if tag:
                        tag.extract()
                    for tag in soup.find_all(attrs={'style': True}):
                        del tag['style']

                #将图片的src的文件名调整好
                for img in soup.find_all('img', attrs={'src': True}):
                    if img['src'].lower().startswith('cid:'):
                        img['src'] = img['src'][4:]

                opts = getOpts()
                oeb = CreateOeb(default_log, None, opts)

                setMetaData(oeb,
                            subject[:SUBJECT_WORDCNT_FOR_APMAIL],
                            user.ownfeeds.language,
                            local_time(tz=user.timezone),
                            pubtype='book:book:KindleEar')
                oeb.container = ServerContainer(default_log)
                id, href = oeb.manifest.generate(id='page', href='page.html')
                item = oeb.manifest.add(id,
                                        href,
                                        'application/xhtml+xml',
                                        data=unicode(soup))
                oeb.spine.add(item, False)
                oeb.toc.add(subject, href)

                if hasattr(message, 'attachments'):
                    for filename, content in message.attachments:
                        mimetype = MimeFromFilename(filename)
                        if mimetype:
                            try:
                                content = content.decode()
                            except:
                                pass
                            else:
                                id, href = oeb.manifest.generate(id='img',
                                                                 href=filename)
                                item = oeb.manifest.add(id,
                                                        href,
                                                        mimetype,
                                                        data=content)

                oIO = byteStringIO()
                o = EPUBOutput() if user.book_type == "epub" else MOBIOutput()
                o.convert(oeb, oIO, opts, default_log)
                BaseHandler.SendToKindle(username, user.kindle_email,
                                         subject[:SUBJECT_WORDCNT_FOR_APMAIL],
                                         user.book_type, str(oIO.getvalue()),
                                         user.timezone)
            else:  #没有图片则直接推送HTML文件,阅读体验更佳
                m = soup.find('meta', attrs={"http-equiv": "Content-Type"})
                if not m:
                    m = soup.new_tag('meta',
                                     content="text/html; charset=utf-8")
                    m["http-equiv"] = "Content-Type"
                    soup.html.head.insert(0, m)
                else:
                    m['content'] = "text/html; charset=utf-8"

                html = unicode(soup).encode('utf-8')
                BaseHandler.SendToKindle(username, user.kindle_email,
                                         subject[:SUBJECT_WORDCNT_FOR_APMAIL],
                                         'html', html, user.timezone, False)
        self.response.out.write('Done')
    def get(self, year):
        df_config = Sitevar.get_or_insert('event_list_datafeed_config')
        df = DatafeedFMSAPI('v2.0')
        df2 = DatafeedFIRSTElasticSearch()

        fmsapi_events, event_list_districts = df.getEventList(year)
        if df_config.contents.get('enable_es') == True:
            elasticsearch_events = df2.getEventList(year)
        else:
            elasticsearch_events = []

        # All regular-season events can be inserted without any work involved.
        # We need to de-duplicate offseason events from the FRC Events API with a different code than the TBA event code
        fmsapi_events_offseason = [e for e in fmsapi_events if e.is_offseason]
        event_keys_to_put = set([e.key_name for e in fmsapi_events]) - set(
            [e.key_name for e in fmsapi_events_offseason])
        events_to_put = [
            e for e in fmsapi_events if e.key_name in event_keys_to_put
        ]

        matched_offseason_events, new_offseason_events = \
            OffseasonEventHelper.categorize_offseasons(int(year), fmsapi_events_offseason)

        # For all matched offseason events, make sure the FIRST code matches the TBA FIRST code
        for tba_event, first_event in matched_offseason_events:
            tba_event.first_code = first_event.event_short
            events_to_put.append(
                tba_event)  # Update TBA events - discard the FIRST event

        # For all new offseason events we can't automatically match, create suggestions
        SuggestionCreator.createDummyOffseasonSuggestions(new_offseason_events)

        merged_events = EventManipulator.mergeModels(
            list(events_to_put), elasticsearch_events
        ) if elasticsearch_events else list(events_to_put)
        events = EventManipulator.createOrUpdate(merged_events) or []

        fmsapi_districts = df.getDistrictList(year)
        merged_districts = DistrictManipulator.mergeModels(
            fmsapi_districts, event_list_districts)
        if merged_districts:
            districts = DistrictManipulator.createOrUpdate(merged_districts)
        else:
            districts = []

        # Fetch event details for each event
        for event in events:
            taskqueue.add(queue_name='datafeed',
                          target='backend-tasks',
                          url='/backend-tasks/get/event_details/' +
                          event.key_name,
                          method='GET')

        template_values = {
            "events": events,
            "districts": districts,
        }

        if 'X-Appengine-Taskname' not in self.request.headers:  # Only write out if not in taskqueue
            path = os.path.join(
                os.path.dirname(__file__),
                '../templates/datafeeds/fms_event_list_get.html')
            self.response.out.write(template.render(path, template_values))
Пример #54
0
                                                  is_new_list):
            # Only attrs that affect stats
            if is_new or set(['alliances_json', 'score_breakdown_json'
                              ]).intersection(set(updated_attrs)) != set():
                affected_stats_event_keys.add(match.event.id())
            try:
                FirebasePusher.update_match(match)
            except Exception:
                logging.warning("Firebase update_match failed!")

        # Enqueue statistics
        for event_key in affected_stats_event_keys:
            # Enqueue task to calculate matchstats
            try:
                taskqueue.add(url='/tasks/math/do/event_matchstats/' +
                              event_key,
                              method='GET')
            except Exception:
                logging.error("Error enqueuing event_matchstats for {}".format(
                    event_key))
                logging.error(traceback.format_exc())

            # Enqueue task to calculate district points
            try:
                taskqueue.add(
                    url='/tasks/math/do/district_points_calc/{}'.format(
                        event_key),
                    method='GET')
            except Exception:
                logging.error(
                    "Error enqueuing district_points_calc for {}".format(
Пример #55
0
def handle_error(request, response, exception):
    exc_type, exc_value, exc_tb = sys.exc_info()

    c = {
        'exception': str(exception),
        'url': request.url,
    }

    if request.app.config.get('send_mail_developer') is not False:
        # send email
        subject = "[{}] {} Error ({})".format(
            request.app.config.get('environment').upper(),
            request.app.config.get('app_name'), exc_type.__name__)

        lines = traceback.format_exception(exc_type, exc_value, exc_tb)
        ua = httpagentparser.detect(request.user_agent)
        _os = ua.has_key('flavor') and 'flavor' or 'os'

        operating_system = str(ua[_os]['name']) if "name" in ua[_os] else "-"
        if 'version' in ua[_os]:
            operating_system += ' ' + str(ua[_os]['version'])
        if 'dist' in ua:
            operating_system += ' ' + str(ua['dist'])

        city = i18n.get_city_code(request)
        region = i18n.get_region_code(request)
        country = i18n.get_country_code(request)
        coordinates = i18n.get_city_lat_long(request)

        browser = ua['browser']['name'] if 'browser' in ua else "-"
        browser_version = ua['browser']['version'] if 'browser' in ua else "-"

        message = '<strong>Application ID:</strong> ' + app_identity.get_application_id() + "<br />" + \
                  '<strong>Application Version:</strong> ' + os.environ['CURRENT_VERSION_ID'] + "<br />" + \
                  '<hr><strong>IP Address:</strong> ' + str(request.remote_addr) + "<br />" + \
                  '<strong>City:</strong> ' + str(city) + "<br />" + \
                  '<strong>Region:</strong> ' + str(region) + "<br />" + \
                  '<strong>Country:</strong> ' + str(country) + "<br />" + \
                  '<strong>Coordinates:</strong> <a href="https://www.google.com.au/maps/preview/@' + str(
            coordinates) + ',8z">' + str(coordinates) + '</a><br />' + \
                  '<hr><strong>User Agent:</strong> ' + str(request.user_agent) + "<br />" + \
                  '<strong>Operating System:</strong> ' + str(operating_system) + "<br />" + \
                  '<strong>Browser:</strong> ' + str(browser) + "<br />" + \
                  '<strong>Browser Version:</strong> ' + str(browser_version) + "<br />" + \
                  '<hr><strong>Error Type:</strong> ' + exc_type.__name__ + "<br />" + \
                  '<strong>Description:</strong> ' + c['exception'] + "<br />" + \
                  '<strong>Method:</strong> ' + str(os.environ['REQUEST_METHOD']) + "<br />" + \
                  '<strong>URL:</strong> ' + c['url'] + "<br />" + \
                  '<strong>Referrer:</strong> ' + str(request.referer) + "<br />" + \
                  '<strong>Traceback:</strong> <br />' + '<br />'.join(lines)

        if c['exception'] is not 'Error saving Email Log in datastore':
            email_url = webapp2.uri_for('taskqueue-send-email')

            for dev in request.app.config.get('developers'):
                taskqueue.add(url=email_url,
                              params={
                                  'to':
                                  dev[1],
                                  'subject':
                                  subject,
                                  'body':
                                  message,
                                  'sender':
                                  request.app.config.get('contact_sender'),
                              })

    status_int = hasattr(exception,
                         'status_int') and exception.status_int or 500
    template = request.app.config.get('error_templates')[status_int]
    t = jinja2.get_jinja2(factory=jinja_bootstrap.jinja2_factory,
                          app=webapp2.get_app()).render_template(
                              template, **c)
    logging.error("Error {}: {}".format(status_int, exception))
    response.write(t)
    response.set_status(status_int)
Пример #56
0
    def post(self):
        # refuse to start the tasks if there are some already running
        queue = Queue()
        stats = queue.fetch_statistics()
        if stats.tasks == 0:
            print 'nop'
            taskqueue.add(url='/builder', params={'start': 'A', 'end': 'B'})
            taskqueue.add(url='/builder', params={'start': 'C', 'end': 'E'})
            taskqueue.add(url='/builder', params={'start': 'F', 'end': 'G'})
            taskqueue.add(url='/builder', params={'start': 'H', 'end': 'H'})
            taskqueue.add(url='/builder', params={'start': 'I', 'end': 'M'})
            taskqueue.add(url='/builder', params={'start': 'N', 'end': 'O'})
            taskqueue.add(url='/builder', params={'start': 'P', 'end': 'R'})
            taskqueue.add(url='/builder', params={'start': 'S', 'end': 'S'})
            taskqueue.add(url='/builder', params={'start': 'T', 'end': 'T'})
            taskqueue.add(url='/builder', params={'start': 'U', 'end': 'Z'})

        self.redirect('/rank')
Пример #57
0
	def queue(self):
		params = {}
		params["resulttask"] = self.toPickleString()
		taskqueue.add(queue_name=config.result_task_queue, url=config.result_task_url, params=params)
Пример #58
0
    def post(self):
        if users.get_current_user():
            node.author = users.get_current_user()

        post_channel = self.request.get('post_channel', '')
        post_user = self.request.get('post_user', '')
        post_url = self.request.get('post_url', '')

        # Add http:// when needed
        if not post_url.startswith('http'):
            post_url = 'http://' + post_url

        logging.debug('Post: C=%s U=%s P=%s' %
                      (post_channel, post_user, post_url))

        # 1. tarkista onko olemassa jo ko. Url, lisää jos ei, muuten päivitä (udate, valid?): valid-juttu joo ehkä jos tarpeen, ei muuten
        url = Url.all().filter('url =', post_url).get()
        if not url:
            url = Url()
            url.url = post_url
            url.put()

            # Title
            name = ''.join(
                re.findall('[a-zA-Z0-9_-]',
                           post_channel + '_' + post_url))[:500]
            try:
                taskqueue.add(name=name,
                              queue_name='urlfetch',
                              url='/tasks/title',
                              params={'post_url': post_url})
            except taskqueue.TombstonedTaskError:
                logging.warning('TombstonedError %s' % post_url)
            except taskqueue.TaskAlreadyExistsError:
                logging.warning('TaskAlredyExists: %s' % post_url)

        # 2. tarkista onko olemassa jo ko. Channel, lisää jos ei
        channel = Channel.all().filter('name =', post_channel).get()
        if not channel:
            channel = Channel()
            channel.name = post_channel
            if post_channel.startswith('!'):
                channel.private = True
            channel.put()

        # 3. tarkista onko url jo olemassa channel-tasolla
        channelurl = ChannelUrl.all().filter('url =',
                                             url).filter('channel =',
                                                         channel).get()
        if not channelurl:
            channelurl = ChannelUrl()
            channelurl.channel = channel
            channelurl.url = url
            #channelurl.user=post_user
            channelurl.put()
        else:
            logging.info('OLDIE! %s %s' %
                         (channelurl.channel.name, channelurl.url.url))

        # 4. Lisätään postaus
        post = Post()
        post.channelurl = channelurl
        post.user = post_user
        post.put()
Пример #59
0
	def queue(self):
		params = {}
		params["workertask"] = self.toPickleString()
		taskqueue.add(queue_name=config.worker_task_queue, url=config.worker_task_url, params=params)
Пример #60
0
 def add_task_for_repo(repo, name, action, **kwargs):
     """Queues up a task for an individual repository."""
     task_name = '%s-%s-%s' % (repo, name, int(time.time() * 1000))
     path = '/%s/%s' % (repo, action)
     taskqueue.add(name=task_name, method='GET', url=path, params=kwargs)