コード例 #1
0
ファイル: displaydata.py プロジェクト: cketcham/StressChill
	def get(self):
		if os.environ.get('HTTP_HOST'):
			base_url = 'http://' + os.environ['HTTP_HOST'] + '/'
		else:
			base_url = 'http://' + os.environ['SERVER_NAME'] + '/'

		extracted = memcache.get('saved')

		if not extracted:
			surveys = SurveyData.all().order('-timestamp').fetch(PAGE_SIZE*5+1)
			extracted = helper.extract_surveys (surveys)
			if surveys is not None:
				#memcache.set('saved', extracted, 604800)
				memcache.set('saved', extracted)
		template_values = { 'surveys' : extracted, 'base_url' : base_url }
		template_values['map'] = True
		path = os.path.join (os.path.dirname(__file__), 'views/map.html')
		self.response.out.write (helper.render(self, path, template_values))
コード例 #2
0
ファイル: user.py プロジェクト: cketcham/StressChill
	def get(self):
		sess = gmemsess.Session(self)

		# if session is new, user was not logged in, redirect
		if sess.is_new():
			sess['error'] = 'Please log in to view this page.'
			sess['redirect'] = '/user/map'
			sess.save()
			self.redirect('/user/login')
			return
		# if username not set in session, user not logged in, redirect
		if not sess.has_key('username'):
			sess['error'] = 'Please log in to view this page.'
			sess['redirect'] = '/user/map'
			sess.save()
			self.redirect('/user/login')
			return

		if os.environ.get('HTTP_HOST'):
			base_url = 'http://' + os.environ['HTTP_HOST'] + '/'
		else:
			base_url = 'http://' + os.environ['SERVER_NAME'] + '/'


		# form user data cache name
		cache_name = 'data_' + sess['userid']

		extracted = memcache.get(cache_name)

		if not extracted:
			logging.debug('cache miss, populate')
			# get 5 pages of most recent records and cache
			surveys = SurveyData.all().filter('username ='******'userid']).order('-timestamp').fetch(PAGE_SIZE*5 + 1)
			extracted = helper.extract_surveys (surveys)
			# if values returned, save in cache
			if surveys is not None:
				memcache.set(cache_name, extracted)

		template_values = { 'surveys' : extracted, 'base_url' : base_url }
		template_values['usermap'] = True
		path = os.path.join (os.path.dirname(__file__), 'views/map.html')
		self.response.out.write (helper.render(self, path, template_values))
コード例 #3
0
ファイル: user.py プロジェクト: cketcham/StressChill
	def get(self):
		sess = gmemsess.Session(self)

		# redirect to login page if not logged in
		if sess.is_new() or not sess.has_key('username'):
			sess['error'] = 'Please log in to use this feature.'
			sess['redirect'] = '/user/delete?key=' + self.request.get('key')
			sess.save()
			self.redirect('/user/login')
			return
		
		# check if key set
		if not self.request.get('key'):
			sess['error'] = 'No observation was selected.'
			sess.save()
			self.redirect('/user/data')
			return

		# check valid key
		try:
			db_key = db.Key(self.request.get('key'))
			if db_key.kind() != 'SurveyData':
				sess['error'] = 'Bad key.'
				sess.save()
				self.redirect('/user/data')
				return

		except:
			sess['error'] = 'Bad key.'
			sess.save()
			self.redirect('/user/data')
			return

		# check if user owns observation
		observation = db.get(self.request.get('key'))

		# if no observation exists with key, error
		if not observation:
			sess['error'] = 'No observation exists with this key or you do not have permission to delete this observation'
			sess.save()
			self.redirect('/user/data')
			return

		# if user not have permission, error
		if observation.username != sess['userid']:
			sess['error'] = 'No observation exists with this key or you do not have permission to delete this observation'
			sess.save()
			self.redirect('/user/data')
			return

		# format data...
		surveys = []
		surveys.append(observation)
		extracted = helper.extract_surveys(surveys)
		observation = extracted[0]

		if os.environ.get('HTTP_HOST'):
			base_url = 'http://' + os.environ['HTTP_HOST'] + '/'
		else:
			base_url = 'http://' + os.environ['SERVER_NAME'] + '/'

		# display delete confirmation page
		template_values = {'observation': observation, 'base_url':base_url}

		path = os.path.join (os.path.dirname(__file__), 'views/delete.html')
		self.response.out.write (helper.render(self, path, template_values))
コード例 #4
0
ファイル: user.py プロジェクト: cketcham/StressChill
	def get(self):
		sess = gmemsess.Session(self)

		# if session is new, user was not logged in, redirect
		if sess.is_new():
			sess['error'] = 'Please log in to view this page.'
			sess['redirect'] = '/user/data'
			sess.save()
			self.redirect('/user/login')
			return
		# if username not set in session, user not logged in, redirect
		if not sess.has_key('username'):
			sess['error'] = 'Please log in to view this page.'
			sess['redirect'] = '/user/data'
			sess.save()
			self.redirect('/user/login')
			return


		if os.environ.get('HTTP_HOST'):
			base_url = 'http://' + os.environ['HTTP_HOST'] + '/'
		else:
			base_url = 'http://' + os.environ['SERVER_NAME'] + '/'

		# get bookmark
		bookmark = self.request.get('bookmark')

		logging.debug(self.request.get('bookmark'))

		template_values = { 'base_url' : base_url }

		forward = True

		page = None

		# check if page set
		if self.request.get('page'):
			page = int(self.request.get('page'))
		elif not bookmark:
			page = 1

		# fetch cached values if any
		saved = None
		extracted = None

		# form user data cache name
		cache_name = 'data_' + sess['userid']

		# if page set, and page in range, get page for cache
		if page > 0 and page <=5: 
			saved = memcache.get(cache_name)

			# if not in cache, try fetching from datastore
			if not saved:
				logging.debug('cache miss, populate')
				# get 5 pages of most recent records and cache
				surveys = SurveyData.all().filter('username ='******'userid']).order('-timestamp').fetch(PAGE_SIZE*5 + 1)
				saved = helper.extract_surveys (surveys)
				# if values returned, save in cache
				if surveys is not None:
					memcache.set(cache_name, saved)

			# if data, setup display 
			if saved:
				# get page
				extracted = helper.get_page_from_cache(saved, page, PAGE_SIZE)

				logging.debug(len(extracted))

				# if got page
				if extracted is not None:
					if len(extracted) == PAGE_SIZE + 1:
						template_values['next'] = str(extracted[-1]['realtime'])
						template_values['nextpage'] = page + 1
						extracted = extracted[:PAGE_SIZE-1]

					# if not on first page, setup back  
					if page > 1:
						template_values['back'] = str(extracted[0]['realtime'])
						template_values['backpage'] = page - 1


		else: # pages beyond 5th not cached
			logging.debug('not using cache')
			# determine direction to retreive records
			# if starts with '-', going backwards
			if bookmark.startswith('-'):
				forward = False
				bookmark = bookmark[1:]
			
			# if bookmark set, retrieve page relative to bookmark
			if bookmark:
				# string to datetime code from:
				#	http://aralbalkan.com/1512
				m = re.match(r'(.*?)(?:\.(\d+))?(([-+]\d{1,2}):(\d{2}))?$',
					str(bookmark))
				datestr, fractional, tzname, tzhour, tzmin = m.groups()
				if tzname is None:
					tz = None
				else:
					tzhour, tzmin = int(tzhour), int(tzmin)
					if tzhour == tzmin == 0:
						tzname = 'UTC'
					tz = FixedOffset(timedelta(hours=tzhour,
											   minutes=tzmin), tzname)
				x = datetime.datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S")
				if fractional is None:
					fractional = '0'
					fracpower = 6 - len(fractional)
					fractional = float(fractional) * (10 ** fracpower)
				dt = x.replace(microsecond=int(fractional), tzinfo=tz)


				if forward:
					surveys = SurveyData.all().filter('username ='******'userid']).filter('timestamp <', dt).order('-timestamp').fetch(PAGE_SIZE+1)
					# if PAGE_SIZE + 1 rows returned, more pages to display
					if len(surveys) == PAGE_SIZE + 1:
						template_values['next'] = str(surveys[-2].timestamp)
						if page is not None:
							logging.debug(page)
							template_values['nextpage'] = page + 1
						surveys = surveys[:PAGE_SIZE]

					# if bookmark set, assume there was a back page
					template_values['back'] = '-'+str(surveys[0].timestamp)
					if page is not None:
						template_values['backpage'] = page - 1
				else:
					surveys = SurveyData.all().filter('username ='******'userid']).filter('timestamp >', dt).order('timestamp').fetch(PAGE_SIZE+1)
					# if PAGE_SIZE + 1 rows returned, more pages to diplay
					if len(surveys) == PAGE_SIZE + 1:
						template_values['back'] = '-'+str(surveys[-2].timestamp)
						if page is not None:
							template_values['backpage'] = page - 1
						surveys = surveys[:PAGE_SIZE]
					# if bookmark set, assume there is a next page
					template_values['next'] = str(surveys[0].timestamp)
					if page is not None:
						template_values['nextpage'] = page + 1
					# reverse order of results since they were returned backwards by query
					surveys.reverse()
			else: # if no bookmark set, retrieve first records
				surveys = SurveyData.all().filter('username ='******'userid']).order('-timestamp').fetch(PAGE_SIZE+1)
				if len(surveys) == PAGE_SIZE + 1:
					template_values['next'] = str(surveys[-2].timestamp)
					template_values['nextpage'] = 2
					surveys = surveys[:PAGE_SIZE]

			extracted = helper.extract_surveys (surveys)

		template_values['surveys'] = extracted 
		template_values['userdata'] = True

		path = os.path.join (os.path.dirname(__file__), 'views/user_data.html')
		self.response.out.write (helper.render(self, path, template_values))
コード例 #5
0
ファイル: phone.py プロジェクト: eyuen/StressChill
    def handle(self):
        logging.debug("\n\nProtected Resource 2------")
        logging.debug(self.request.method)
        logging.debug(self.request.url)
        logging.debug(self.request.headers)

        # get any extra parameters required by server
        self.paramdict = {}
        for j in self.request.arguments():
            self.paramdict[j] = self.request.get(j)

        logparam = self.paramdict
        if logparam.has_key("file"):
            del logparam["file"]

        logging.debug("parameters received: " + str(logparam))

        req_token = self.request.get("oauth_token")

        # check valid token given
        if req_token != "":
            t = Token().all().filter("ckey = ", req_token).get()

            if not t:
                logging.error("if you got here, token lookup failed.")
                self.error(401)
                return

                # check user exists, and get class
            user = UserTable().all().filter("ckey =", t.user).get()
            if not user:
                logging.error("this user does not exist:" + str(t.user))
                return

                # get official class list from memcache if exists
            classlist = memcache.get("classlist")
            # if not exist, fetch from datastore
            if not classlist:
                cl = ClassList().all()

                classlist = []
                for c in cl:
                    classlist.append(c.classid)

                    # save to memcache to prevent this lookup from happening everytime
                memcache.set("classlist", classlist)

                # get classid of class, or set to 'tester'
            classid = "testers"
            if user.classid in classlist:
                classid = user.classid

                # insert new observation to datastore
            s = SurveyData()

            s.username = t.user
            s.longitude = self.request.get("longitude")
            s.latitude = self.request.get("latitude")
            if self.request.get("stressval"):
                s.stressval = float(self.request.get("stressval"))
            else:
                s.stressval = 0
            s.comments = str(self.request.get("comments")).replace("\n", " ")
            s.category = self.request.get("category")
            s.subcategory = self.request.get("subcategory")
            s.version = self.request.get("version")
            s.classid = classid

            file_content = self.request.get("file")

            # insert photo if one and valid
            if file_content:
                try:
                    # upload image as blob to SurveyPhoto
                    new_photo = SurveyPhoto()
                    new_photo.photo = db.Blob(file_content)
                    # create a thumbnail of image to store in SurveyPhoto
                    tmb = images.Image(new_photo.photo)
                    tmb.resize(width=180, height=130)
                    # execute resize
                    new_photo.thumb = tmb.execute_transforms(output_encoding=images.JPEG)
                    # insert
                    new_photo.put()
                    # set reference to photo for SurveyData
                    s.photo_ref = new_photo.key()
                    s.hasphoto = True
                except:
                    logging.debug("exception occured when trying to process or insert photo")
                    s.photo_ref = None
                    s.hasphoto = False
            else:
                s.photo_ref = None
                s.hasphoto = False

            s.put()

            # update running stats (this should probably be moved to the task queue)
            logging.debug("increment stats for category, " + s.category + ", & subcategory, " + s.subcategory)

            # get subcategory/category stat key if exist
            subcat = (
                SubCategoryStat().all().filter("subcategory =", s.subcategory).filter("category = ", s.category).get()
            )

            sckey = None
            if subcat is not None:
                sckey = subcat.key()

                # run in transaction so update not overwritten by a concurrent request
            db.run_in_transaction(SubCategoryStat().increment_stats, sckey, s.category, s.subcategory, s.stressval)

            # update running daily stats (this should probably be moved to the task queue)
            pdt = s.timestamp - datetime.timedelta(hours=7)
            time_key = str(pdt).split(" ")[0]
            dt = datetime.datetime.strptime(time_key, "%Y-%m-%d")
            date = datetime.date(dt.year, dt.month, dt.day)

            subcat = (
                DailySubCategoryStat()
                .all()
                .filter("subcategory =", s.subcategory)
                .filter("category = ", s.category)
                .filter("date =", date)
                .get()
            )

            dsckey = None
            if subcat is not None:
                dsckey = subcat.key()

            db.run_in_transaction(
                DailySubCategoryStat().increment_stats, dsckey, s.subcategory, s.category, date, s.stressval
            )

            statclass = "testers"
            if s.classid in classlist:
                statclass = s.classid

                # update user running stats (this should probably be moved to the task queue)
            userstat = (
                UserStat()
                .all()
                .filter("subcategory =", s.subcategory)
                .filter("category = ", s.category)
                .filter("user_id = ", s.username)
                .get()
            )

            ukey = None
            if userstat is not None:
                ukey = userstat.key()

            db.run_in_transaction(
                UserStat().increment_stats, ukey, s.username, s.subcategory, s.category, s.stressval, statclass
            )

            # update user total stats (this should probably be moved to the task queue)
            userstat = UserTotalStat().all().filter("user_id = ", s.username).filter("class_id =", statclass).get()

            ukey = None
            username = UserTable().get_username(s.username)
            if userstat is not None:
                ukey = userstat.key()

            db.run_in_transaction(UserTotalStat().increment_stats, ukey, s.username, username, statclass)

            # write to csv blob and update memcache

            # dictionary of data to write
            data_row = {}
            data_row["key"] = str(s.key())
            data_row["username"] = s.username
            data_row["timestamp"] = s.timestamp
            data_row["latitude"] = s.latitude
            data_row["longitude"] = s.longitude
            data_row["stressval"] = s.stressval
            data_row["category"] = s.category
            data_row["subcategory"] = s.subcategory
            data_row["comments"] = s.comments
            data_row["version"] = s.version
            data_row["hasphoto"] = s.hasphoto
            data_row["classid"] = s.classid
            if s.hasphoto:
                data_row["photo_key"] = str(s.photo_ref.key())
            else:
                data_row["photo_key"] = None

                # get csv blob to write to
            csv_data = SurveyCSV.all().filter("page =", 1).get()

            # run in transaction so update not overwritten by a concurrent request
            insert_csv = None
            if csv_data:
                insert_csv = db.run_in_transaction(SurveyCSV().update_csv, data_row, csv_data.key())
            else:
                insert_csv = db.run_in_transaction(SurveyCSV().update_csv, data_row)

                # add to cache (writes should update this cached value)
            memcache.set("csv", insert_csv.csv)

            # if not in testers, add to clean csv
            if statclass != "testers":
                csv_data = CleanSurveyCSV.all().filter("page =", 1).get()

                # run in transaction so update not overwritten by a concurrent request
                insert_csv = None
                if csv_data:
                    insert_csv = db.run_in_transaction(CleanSurveyCSV().update_csv, data_row, csv_data.key())
                else:
                    insert_csv = db.run_in_transaction(CleanSurveyCSV().update_csv, data_row)

                    ### append to user csv blob

                    # init csv writer
            csv_data = UserSurveyCSV.all().filter("userid =", s.username).filter("page =", 1).get()
            # run in transaction so update not overwritten by a concurrent request
            if csv_data:
                db.run_in_transaction(UserSurveyCSV().update_csv, data_row, csv_data.key())
            else:
                db.run_in_transaction(UserSurveyCSV().update_csv, data_row)

                ### append to class csv blob

                # init csv writer
            csv_data = ClassSurveyCSV.all().filter("classid =", s.classid).filter("page =", 1).get()

            # run in transaction so update not overwritten by a concurrent request
            if csv_data:
                db.run_in_transaction(ClassSurveyCSV().update_csv, data_row, csv_data.key())
            else:
                db.run_in_transaction(ClassSurveyCSV().update_csv, data_row)

            try:
                # update data page cache with new value, pop oldest value
                logging.debug("update cache")
                saved = memcache.get("saved")
                if saved is not None:
                    s_list = []
                    s_list.append(s)
                    extract = helper.extract_surveys(s_list)
                    d = deque(saved)
                    if len(saved) >= 5 * PAGE_SIZE:
                        d.pop()
                    d.appendleft(extract[0])
                    memcache.set("saved", list(d))
                else:
                    logging.debug("no cache set")
            except:
                logging.debug("cache write failed")

            try:
                # update user data page cache with new value, pop oldest value
                cache_name = "data_" + s.username
                saved = memcache.get(cache_name)
                if saved is not None:
                    logging.debug("updating user cache: " + cache_name)
                    s_list = []
                    s_list.append(s)
                    extract = helper.extract_surveys(s_list)
                    d = deque(saved)
                    if len(saved) >= 5 * PAGE_SIZE:
                        d.pop()
                    d.appendleft(extract[0])
                    memcache.set(cache_name, list(d))
            except:
                logging.debug("user cache write failed")

            try:
                # update class data page cache with new value, pop oldest value
                cache_name = "class_" + s.classid
                saved = memcache.get(cache_name)
                if saved is not None:
                    logging.debug("updating class cache: " + cache_name)
                    s_list = []
                    s_list.append(s)
                    extract = helper.extract_surveys(s_list)
                    d = deque(saved)
                    if len(saved) >= 5 * PAGE_SIZE:
                        d.pop()
                    d.appendleft(extract[0])
                    memcache.set(cache_name, list(d))
            except:
                logging.debug("class cache write failed")

            try:
                # update point summary cache with new value, pop oldest value
                pointsummary = memcache.get("pointsummary")
                if pointsummary is not None:
                    e = {}
                    e["latitude"] = s.latitude
                    e["longitude"] = s.longitude
                    e["stressval"] = s.stressval
                    e["comments"] = s.comments
                    e["key"] = str(s.key())
                    e["version"] = s.version
                    if s.hasphoto:
                        e["photo_key"] = str(s.photo_ref.key())
                    else:
                        e["photo_key"] = None

                    d = {}
                    d[0] = e
                    for i in range(1, (50)):
                        d[i] = pointsummary[i - 1]

                    memcache.set("pointsummary", d)
            except:
                logging.debug("point summary cache write failed")

        else:
            logging.error("request token empty")
            self.response.set_status(401, "request token empty.")
            self.response.out.write("request token empty.")
コード例 #6
0
ファイル: admin.py プロジェクト: eyuen/StressChill
	def get(self):
		sess = gmemsess.Session(self)

		# setup redirect strings
		bookmark = self.request.get('bookmark')
		page = self.request.get('page')

		data_redirect_str = '/admin/data'
		detail_redirect_str = '/admin/detail?key=' + self.request.get('key')
		if bookmark and len(bookmark) != 0:
			data_redirect_str += '?bookmark=' + str(bookmark)
			detail_redirect_str += '&bookmark=' + str(bookmark)
			if page and len(page) != 0:
				data_redirect_str += '&page=' + str(page)
				detail_redirect_str += '&page=' + str(page)
		elif page and len(page) != 0:
				data_redirect_str += '?page=' + str(page)
				detail_redirect_str += '&page=' + str(page)

		logging.debug('data direct: ' + data_redirect_str)
		logging.debug('detail direct: ' + detail_redirect_str)


		# redirect to login page if not logged in
		if sess.is_new() or not sess.has_key('username'):
			sess['error'] = 'Please log in to use this feature.'
			#sess['redirect'] = '/admin/detail?key=' + self.request.get('key')
			sess['redirect'] = detail_redirect_str
			sess.save()
			self.redirect('/user/login')
			return

		logging.debug('key: ' + str(self.request.get('key')))
		logging.debug('date: ' + str(self.request.get('date')))
		
		# check if key set or date set
		if not self.request.get('key') and not self.request.get('date'):
			sess['error'] = 'No observation was selected.'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# check valid key
		db_key = None
		try:
			if self.request.get('key'):
				db_key = db.Key(self.request.get('key'))
				if db_key.kind() != 'SurveyData':
					sess['error'] = 'Bad key.'
					sess.save()
					self.redirect(data_redirect_str)
					return
			else:
				m = re.match(r'(.*?)(?:\.(\d+))?(([-+]\d{1,2}):(\d{2}))?$',
					str(self.request.get('date')))
				datestr, fractional, tzname, tzhour, tzmin = m.groups()
				if tzname is None:
					tz = None
				else:
					tzhour, tzmin = int(tzhour), int(tzmin)
					if tzhour == tzmin == 0:
						tzname = 'UTC'
					tz = FixedOffset(timedelta(hours=tzhour,
											   minutes=tzmin), tzname)
				x = datetime.datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S")
				if fractional is None:
					fractional = '0'
					fracpower = 6 - len(fractional)
					fractional = float(fractional) * (10 ** fracpower)
				dt = x.replace(microsecond=int(fractional), tzinfo=tz)
				db_key = SurveyData().all().filter('timestamp =', dt).key()
		except:
			sess['error'] = 'Bad key.'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# check if user owns observation
		observation = db.get(db_key)

		# if no observation exists with key, error
		if not observation:
			sess['error'] = 'No observation exists with this key or you do not have permission to delete this observation'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# if user not have permission, error
		admin_flag = UserTable().all().filter('ckey =', sess['userid']).get()

		if not admin_flag.admin:
			sess['error'] = 'No observation exists with this key or you do not have permission to delete this observation'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# format data...
		surveys = []
		surveys.append(observation)
		extracted = helper.extract_surveys(surveys)
		observation = extracted[0]

		if os.environ.get('HTTP_HOST'):
			base_url = 'http://' + os.environ['HTTP_HOST'] + '/'
		else:
			base_url = 'http://' + os.environ['SERVER_NAME'] + '/'

		# display delete confirmation page
		template_values = {'observation': observation, 'base_url':base_url}
		template_values['current_bookmark'] = bookmark
		template_values['current_page'] = page

		path = os.path.join (os.path.dirname(__file__), 'views/quarantine_observation.html')
		self.response.out.write (helper.render(self, path, template_values))
コード例 #7
0
ファイル: user.py プロジェクト: eyuen/StressChill
	def get(self):
		sess = gmemsess.Session(self)

		bookmark = self.request.get('bookmark')
		page = self.request.get('page')

		# setup redirect strings
		data_redirect_str = '/user/data'
		delete_redirect_str = '/user/delete?key=' + self.request.get('key')
		if bookmark and len(bookmark) != 0:
			data_redirect_str += '?bookmark=' + str(bookmark)
			delete_redirect_str += '&bookmark=' + str(bookmark)
			if page and len(page) != 0:
				data_redirect_str += '&page=' + str(page)
				delete_redirect_str += '&page=' + str(page)
		elif page and len(page) != 0:
				data_redirect_str += '?page=' + str(page)
				delete_redirect_str += '&page=' + str(page)

		logging.debug('data redirect: ' + data_redirect_str)
		logging.debug('delete redirect: ' + delete_redirect_str)

		# redirect to login page if not logged in
		if sess.is_new() or not sess.has_key('username'):
			sess['error'] = 'Please log in to use this feature.'
			#sess['redirect'] = '/user/delete?key=' + self.request.get('key')
			sess['redirect'] = delete_redirect_str
			sess.save()
			self.redirect('/user/login')
			return
		
		# check if key set
		if not self.request.get('key'):
			sess['error'] = 'No observation was selected.'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# check valid key
		try:
			db_key = db.Key(self.request.get('key'))
			if db_key.kind() != 'SurveyData':
				sess['error'] = 'Bad key.'
				sess.save()
				self.redirect(data_redirect_str)
				return

		except:
			sess['error'] = 'Bad key.'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# check if user owns observation
		observation = db.get(self.request.get('key'))

		# if no observation exists with key, error
		if not observation:
			sess['error'] = 'No observation exists with this key or you do not have permission to delete this observation'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# if user not have permission, error
		if observation.username != sess['userid']:
			sess['error'] = 'No observation exists with this key or you do not have permission to delete this observation'
			sess.save()
			self.redirect(data_redirect_str)
			return

		# format data...
		surveys = []
		surveys.append(observation)
		extracted = helper.extract_surveys(surveys)
		observation = extracted[0]

		if os.environ.get('HTTP_HOST'):
			base_url = 'http://' + os.environ['HTTP_HOST'] + '/'
		else:
			base_url = 'http://' + os.environ['SERVER_NAME'] + '/'

		# display delete confirmation page
		template_values = {'observation': observation, 'base_url':base_url}
		template_values['current_bookmark'] = bookmark
		template_values['current_page'] = page

		path = os.path.join (os.path.dirname(__file__), 'views/delete.html')
		self.response.out.write (helper.render(self, path, template_values))
コード例 #8
0
ファイル: phone.py プロジェクト: cketcham/StressChill
	def handle(self):
		logging.debug('\n\nProtected Resource 2------')
		logging.debug(self.request.method)
		logging.debug(self.request.url)
		logging.debug(self.request.headers)

		# get any extra parameters required by server
		self.paramdict = {}
		for j in self.request.arguments():
			self.paramdict[j] = self.request.get(j)

		logparam = self.paramdict
		if logparam['file']:
			del logparam['file']

		logging.debug('parameters received: ' +str(logparam))

		req_token = self.request.get('oauth_token')

		if req_token != '':
			t = Token().all().filter('ckey = ', req_token).get()

			if not t:
				logging.error('if you got here, token lookup failed.')
				self.error(401)
				return

			# check user exists, and get class
			user = UserTable().all().filter('ckey =', t.user).get()
			if not user:
				logging.error('this user does not exist:' + str(t.user))
				return

			# get official class list from memcache if exists
			classlist = memcache.get('classlist')
			# if not exist, fetch from datastore
			if not classlist:
				cl = ClassList().all()

				classlist = []
				for c in cl:
					classlist.append(c.classid)

				# save to memcache to prevent this lookup from happening everytime
				memcache.set('classlist', classlist)

			# get classid of class, or set to 'tester'
			classid = 'testers'
			if user.classid in classlist:
				classid = user.classid

			# insert new observation to datastore
			s = SurveyData()

			s.username = t.user
			s.longitude = self.request.get('longitude')
			s.latitude = self.request.get('latitude')
			if self.request.get('stressval'):
				s.stressval = float(self.request.get('stressval'))
			else:
				s.stressval = 0
			s.comments = str(self.request.get('comments')).replace('\n', ' ')
			s.category = self.request.get('category')
			s.subcategory = self.request.get('subcategory')
			s.version = self.request.get('version')
			s.classid = classid

			file_content = self.request.get('file')

			# insert photo if one
			if file_content:
				try:
					# upload image as blob to SurveyPhoto
					new_photo = SurveyPhoto()
					new_photo.photo = db.Blob(file_content)
					# create a thumbnail of image to store in SurveyPhoto
					tmb = images.Image(new_photo.photo)
					tmb.resize(width=180, height=130)
					# execute resize
					new_photo.thumb = tmb.execute_transforms(output_encoding=images.JPEG)
					# insert
					new_photo.put()
					# set reference to photo for SurveyData
					s.photo_ref = new_photo.key()
					s.hasphoto = True
				except:
					s.photo_ref = None
					s.hasphoto = False
			else:
				s.photo_ref = None
				s.hasphoto = False

			s.put()

			# update running stats (this should probably be moved to the task queue)
			logging.debug('increment stats for category, ' + s.category + ', & subcategory, ' +s.subcategory)

			# get subcategory/category stat key if exist
			subcat = SubCategoryStat().all().filter('subcategory =', s.subcategory).filter('category = ', s.category).get()

			sckey = None
			if subcat is not None:
				sckey = subcat.key()

			# run in transaction so update not overwritten by a concurrent request
			db.run_in_transaction(SubCategoryStat().increment_stats, sckey, s.category, s.subcategory, s.stressval)

			# update running daily stats (this should probably be moved to the task queue)
			pdt = s.timestamp - datetime.timedelta(hours=7)
			time_key = str(pdt).split(' ')[0]
			dt = datetime.datetime.strptime(time_key, "%Y-%m-%d")
			date = datetime.date(dt.year, dt.month, dt.day)

			subcat = DailySubCategoryStat().all().filter('subcategory =', s.subcategory).filter('category = ', s.category).filter('date =', date).get()

			dsckey = None
			if subcat is not None:
				dsckey = subcat.key()

			db.run_in_transaction(DailySubCategoryStat().increment_stats, dsckey, s.subcategory, s.category, date, s.stressval)

			# update user running stats (this should probably be moved to the task queue)
			userstat = UserStat().all().filter('subcategory =', s.subcategory).filter('category = ', s.category).filter('user_id = ', s.username).get()

			ukey = None
			if subcat is not None:
				ukey = subcat.key()
			
			db.run_in_transaction(UserStat().increment_stats, ukey, s.username, s.subcategory, s.category, s.stressval)
				
			#write to csv blob and update memcache

			# init csv writer
			output = cStringIO.StringIO()
			writer = csv.writer(output, delimiter=',')

			base_url = ''
			if os.environ.get('HTTP_HOST'):
				base_url = os.environ['HTTP_HOST']
			else:
				base_url = os.environ['SERVER_NAME']

			# append to csv blob

			# this will have to change if multiple pages are ever needed (limits?)
			insert_csv = SurveyCSV.all().filter('page =', 1).get()

			# write header row if csv blob doesnt exist yet
			if not insert_csv:
				logging.debug('csv not exist, writing header row')
				header_row = [	'id',
					'userid',
					'timestamp',
					'latitude',
					'longitude',
					'stress_value',
					'category',
					'subcategory',
					'comments',
					'image_url'
					]
				writer.writerow(header_row)

			# form image url
			if s.hasphoto:
				photo_url = 'http://' + base_url + "/get_an_image?key="+str(s.photo_ref.key())

			else:
				photo_url = 'no_image'

			hashedval = hashlib.sha1(str(s.key()))
			sha1val = hashedval.hexdigest()

			userhashedval = hashlib.sha1(s.username)
			usersha1val = hashedval.hexdigest()

			# write csv data row
			new_row = [
					sha1val,
					usersha1val,
					s.timestamp,
					s.latitude,
					s.longitude,
					s.stressval,
					s.category,
					s.subcategory,
					s.comments,
					photo_url
					]
			writer.writerow(new_row)

			# create new blob if one does not exist
			if not insert_csv:
				logging.debug('csv not exist, setup')
				insert_csv = SurveyCSV()
				insert_csv.csv = str(output.getvalue())
				insert_csv.last_entry_date = s.timestamp
				insert_csv.count = 1
				insert_csv.page = 1
			else:	#if blob exists, append and update
				logging.debug('csv exist, append')
				insert_csv.csv += output.getvalue()
				insert_csv.last_entry_date = s.timestamp
				insert_csv.count += 1

			insert_csv.put()

			# add to cache (writes should update this cached value)
			memcache.set('csv', insert_csv.csv)
			output.close()

			### append to user csv blob

			# init csv writer
			output = cStringIO.StringIO()
			writer = csv.writer(output, delimiter=',')

			# this will have to change if multiple pages are ever needed (limits?)
			insert_csv = UserSurveyCSV.all().filter('userid =', s.username).filter('page =', 1).get()

			# write header row if csv blob doesnt exist yet
			if not insert_csv:
				header_row = [	'id',
					'userid', 
					'timestamp',
					'latitude',
					'longitude',
					'stress_value',
					'category',
					'subcategory',
					'comments',
					'image_url'
					]
				writer.writerow(header_row)

			# form image url
			if s.hasphoto:
				photo_url = 'http://' + base_url + "/get_an_image?key="+str(s.photo_ref.key())

			else:
				photo_url = 'no_image'

			hashedval = hashlib.sha1(str(s.key()))
			sha1val = hashedval.hexdigest()

			userhashedval = hashlib.sha1(s.username)
			usersha1val = userhashedval.hexdigest()

			# write csv data row
			new_row = [
					sha1val,
					usersha1val,
					s.timestamp,
					s.latitude,
					s.longitude,
					s.stressval,
					s.category,
					s.subcategory,
					s.comments,
					photo_url
					]
			writer.writerow(new_row)

			# create new blob if one does not exist
			if not insert_csv:
				insert_csv = UserSurveyCSV()
				insert_csv.csv = str(output.getvalue())
				insert_csv.last_entry_date = s.timestamp
				insert_csv.count = 1
				insert_csv.page = 1
				insert_csv.userid = s.username
			else:	#if blob exists, append and update
				insert_csv.csv += output.getvalue()
				insert_csv.last_entry_date = s.timestamp
				insert_csv.count += 1

			insert_csv.put()
			output.close()

			### append to class csv blob

			# init csv writer
			output = cStringIO.StringIO()
			writer = csv.writer(output, delimiter=',')

			# this will have to change if multiple pages are ever needed (limits?)
			insert_csv = ClassSurveyCSV.all().filter('classid =', s.classid).filter('page =', 1).get()

			# write header row if csv blob doesnt exist yet
			if not insert_csv:
				header_row = [	'id',
					'userid', 
					'timestamp',
					'latitude',
					'longitude',
					'stress_value',
					'category',
					'subcategory',
					'comments',
					'image_url'
					]
				writer.writerow(header_row)

			# form image url
			if s.hasphoto:
				photo_url = 'http://' + base_url + "/get_an_image?key="+str(s.photo_ref.key())

			else:
				photo_url = 'no_image'

			hashedval = hashlib.sha1(str(s.key()))
			sha1val = hashedval.hexdigest()

			userhashedval = hashlib.sha1(s.username)
			usersha1val = userhashedval.hexdigest()

			# write csv data row
			new_row = [
					sha1val,
					usersha1val,
					s.timestamp,
					s.latitude,
					s.longitude,
					s.stressval,
					s.category,
					s.subcategory,
					s.comments,
					photo_url
					]
			writer.writerow(new_row)

			# create new blob if one does not exist
			if not insert_csv:
				insert_csv = ClassSurveyCSV()
				insert_csv.csv = str(output.getvalue())
				insert_csv.last_entry_date = s.timestamp
				insert_csv.count = 1
				insert_csv.page = 1
				insert_csv.classid = s.classid
			else:	#if blob exists, append and update
				insert_csv.csv += output.getvalue()
				insert_csv.last_entry_date = s.timestamp
				insert_csv.count += 1

			insert_csv.put()
			output.close()

			try:
				# update data page cache with new value, pop oldest value
				logging.debug('update cache')
				saved = memcache.get('saved')
				if saved is not None:
					s_list = []
					s_list.append(s)
					extract = helper.extract_surveys(s_list)
					d = deque(saved)
					if len(saved) >= 5*PAGE_SIZE:
						d.pop()
					d.appendleft(extract[0])
					memcache.set('saved', list(d))
				else:
					logging.debug('no cache set')
			except:
				logging.debug('cache write failed')


			try:
				# update user data page cache with new value, pop oldest value
				cache_name = 'data_' + s.username
				saved = memcache.get(cache_name)
				logging.debug('updating user cache: ' + cache_name)
				if saved is not None:
					s_list = []
					s_list.append(s)
					extract = helper.extract_surveys(s_list)
					d = deque(saved)
					if len(saved) >= 5*PAGE_SIZE:
						d.pop()
					d.appendleft(extract[0])
					memcache.set(cache_name, list(d))
			except:
				logging.debug('cache write failed')


			# we should convert the dict to a list so this is easier to do
			try:
				# update point summary cache with new value, pop oldest value
				pointsummary = memcache.get('pointsummary')
				if pointsummary is not None:
					e = {}
					e['latitude'] = s.latitude
					e['longitude'] = s.longitude
					e['stressval'] = s.stressval
					e['comments'] = s.comments
					e['key'] = str(s.key())
					e['version'] = s.version
					if s.hasphoto:
						e['photo_key'] = str(s.photo_ref.key())
					else:
						e['photo_key'] = None

					d = {}
					d[0] = e
					for i in range(1,(50)):
						d[i] = pointsummary[i-1]
				
					memcache.set('pointsummary', d)
			except:
				logging.debug('point summary cache write failed')

		else:
			logging.error('request token empty')
			self.response.set_status(401, 'request token empty.')
			self.response.out.write('request token empty.')