Пример #1
0
	def on_refresh_complete(cls, response, id, callback):
		"""Callback for request to get a new access token based on refresh token."""

		if response.code in (400, 401):

			if 'invalid_grant' in response.body:
				# Our refresh token is invalid, which means that we don't have
				# permission to access this user's content anymore. Forget them.
				Cache.delete(cls.auth_cache_key_template % id)
				Cache.delete(cls.profile_cache_key_template % id)
				TokenIdMapping.remove_id(id)
				logging.error("Access was revoked for %s; cached data deleted.", id)

			logging.error("HTTP %s while trying to refresh access token for %s.", response.code, id)
			return IOLoop.instance().add_callback(lambda: callback(None))

		elif response.code != 200:
			logging.error("Non-200 response to refresh token request (%s, id=%s): %r" % (response.code, id, response.body))
			return IOLoop.instance().add_callback(lambda: callback(None))

		results = json.loads(response.body)

		# sanity check
		if results['token_type'] != "Bearer":
			logging.error('Unknown token type received: %s' % results['token_type'])
			return IOLoop.instance().add_callback(lambda: callback(None))

		token = results['access_token']
		Cache.set(cls.auth_cache_key_template % id, token, time=results['expires_in'])

		IOLoop.instance().add_callback(lambda: callback(token))
Пример #2
0
    def get(self, user_id, page_id):

        ratelimit_key = self.ratelimit_key_template % self.request.remote_ip
        remote_ip_rate = Cache.incr(ratelimit_key)
        if remote_ip_rate is None:
            Cache.set(ratelimit_key, 1, time=60)
        elif remote_ip_rate > 60:
            self.set_status(503)
            self.set_header('Retry-After', '60')
            self.write(
                'Rate limit exceeded. Please do not make more than 60 requests per minute.'
            )

            # Don't log every single time we rate limit a host (that would get spammy fast),
            # but do log significant breakpoints on exactly how spammy a host is being.
            if remote_ip_rate in (61, 100, 1000, 10000):
                logging.info('Rate limited IP %s - %s requests/min' %
                             (self.request.remote_ip, remote_ip_rate))

            return self.finish()

        self.gplus_user_id = user_id
        self.gplus_page_id = page_id

        if len(user_id) != 21:
            self.write(
                "Google+ profile IDs are exactly 21 digits long. Please specify a proper profile ID."
            )
            return self.finish()

        if page_id and len(page_id) != 21:
            self.write(
                "Google+ page IDs are exactly 21 digits long. Please specify a proper page ID."
            )

        self.cache_key = self.cache_key_template % user_id
        if page_id:
            self.cache_key += str(page_id)

        cached_result = Cache.get(self.cache_key)
        flush_requested = self.request.arguments.get('flush', [None])[0]
        if cached_result:
            if not Config.getboolean('cache',
                                     'allow-flush') or not flush_requested:
                return self._respond(**cached_result)

        if page_id:
            OAuth2Handler.authed_fetch(
                user_id, self.json_url % (page_id, self.request.remote_ip),
                self._on_api_response)
        else:
            OAuth2Handler.authed_fetch(
                user_id, self.json_url % ('me', self.request.remote_ip),
                self._on_api_response)
Пример #3
0
	def on_profile_request_complete(self, person):
		"""Callback for the initial OAuth flow's call to fetch_person_by_token."""
		# We compute the time= param here to take into account potential time
		# spent during the API call.
		Cache.set(self.auth_cache_key_template  % person['id'], self.gplus_access_token,
			time=int((self.gplus_expires_at - datetime.datetime.today()).total_seconds()),
		)

		# store refresh token and gplus user id in database
		if self.gplus_refresh_token is not None:
			TokenIdMapping.update_refresh_token(person['id'], self.gplus_refresh_token)
	
		self.set_cookie('gplus_id', str(person['id']))
		self.redirect('/')
Пример #4
0
	def _on_api_response(self, response):
		if response is None:
			logging.error("API request for %s failed." % self.gplus_user_id)
			self.write("Unable to fetch content for this Google+ ID; it may not be authenticated. See http://%s for more information." % self.request.host)
			self.set_status(401)
			return self.finish()
		if response.error:
			if response.code == 403:
				logging.error("API Request 403: %r" % (json.loads(response.body)))
				self.set_status(503)
				self.write("Unable to fulfill request at this time - Google+ API rate limit exceeded.")
				return self.finish()
			else:
				logging.error("AsyncHTTPRequest error: %r, %r" % (response.error, response))
				return self.send_error(500)
		else:
			data = json.loads(response.body)

			headers = {'Content-Type': 'application/atom+xml'}
			params = {
				'userid': self.gplus_page_id or self.gplus_user_id,
				'baseurl': 'http://%s' % self.request.host,
				'requesturi': 'http://%s%s' % (self.request.host, self.request.uri.split('?', 1)[0]),
			}

			if 'items' not in data or not data['items']:
				params['lastupdate'] = dateutils.to_atom_format(datetime.datetime.today())
				return self._respond(headers, empty_feed_template.format(**params))

			posts = data['items']

			lastupdate = max(dateutils.from_iso_format(p['updated']) for p in posts)
			params['author'] = xhtml_escape(posts[0]['actor']['displayName'])
			params['lastupdate'] = dateutils.to_atom_format(lastupdate)

			headers['Last-Modified'] = dateutils.to_http_format(lastupdate)

			params['entrycontent'] = u''.join(entry_template.format(**get_post_params(p)) for p in posts)

			body = feed_template.format(**params)

			Cache.set(self.cache_key, {'headers': headers, 'body': body}, time=Config.getint('cache', 'stream-expire'))
			return self._respond(headers, body)
Пример #5
0
	def get(self, user_id, page_id):

		ratelimit_key = self.ratelimit_key_template % self.request.remote_ip
		remote_ip_rate = Cache.incr(ratelimit_key)
		if remote_ip_rate is None:
			Cache.set(ratelimit_key, 1, time=60)
		elif remote_ip_rate > 60:
			self.set_status(503)
			self.set_header('Retry-After', '60')
			self.write('Rate limit exceeded. Please do not make more than 60 requests per minute.')

			# Don't log every single time we rate limit a host (that would get spammy fast),
			# but do log significant breakpoints on exactly how spammy a host is being.
			if remote_ip_rate in (61, 100, 1000, 10000):
				logging.info('Rate limited IP %s - %s requests/min' % (self.request.remote_ip, remote_ip_rate))

			return self.finish()

		self.gplus_user_id = user_id
		self.gplus_page_id = page_id

		if len(user_id) != 21:
			self.write("Google+ profile IDs are exactly 21 digits long. Please specify a proper profile ID.")
			return self.finish()

		if page_id and len(page_id) != 21:
			self.write("Google+ page IDs are exactly 21 digits long. Please specify a proper page ID.")

		self.cache_key = self.cache_key_template % user_id
		if page_id:
			self.cache_key += str(page_id)

		cached_result = Cache.get(self.cache_key)
		flush_requested = self.request.arguments.get('flush', [None])[0]
		if cached_result:
			if not Config.getboolean('cache', 'allow-flush') or not flush_requested:
				return self._respond(**cached_result)

		if page_id:
			OAuth2Handler.authed_fetch(user_id, self.json_url % (page_id, self.request.remote_ip), self._on_api_response)
		else:
			OAuth2Handler.authed_fetch(user_id, self.json_url % ('me', self.request.remote_ip), self._on_api_response)
Пример #6
0
	def on_fetch_person_complete(cls, response, callback):
		"""Callback for the people/me API call in fetch_person_by_token."""
		person = json.loads(response.body)
		Cache.set(cls.profile_cache_key_template % person['id'], person, time=Config.getint('cache', 'profile-expire'))
		return IOLoop.instance().add_callback(lambda: callback(person))
Пример #7
0
    def _on_api_response(self, response):
        if response is None:
            logging.error("API request for %s failed." % self.gplus_user_id)
            self.write(
                "Unable to fetch content for this Google+ ID; it may not be authenticated. See http://%s for more information."
                % self.request.host)
            self.set_status(401)
            return self.finish()
        if response.error:
            if response.code == 403:
                logging.error("API Request 403: %r" %
                              (json.loads(response.body)))
                self.set_status(503)
                self.write(
                    "Unable to fulfill request at this time - Google+ API rate limit exceeded."
                )
                return self.finish()
            else:
                logging.error("AsyncHTTPRequest error: %r, %r" %
                              (response.error, response))
                return self.send_error(500)
        else:
            data = json.loads(response.body)

            headers = {'Content-Type': 'application/atom+xml'}
            params = {
                'userid':
                self.gplus_page_id or self.gplus_user_id,
                'baseurl':
                'http://%s' % self.request.host,
                'requesturi':
                'http://%s%s' %
                (self.request.host, self.request.uri.split('?', 1)[0]),
            }

            if 'items' not in data or not data['items']:
                params['lastupdate'] = dateutils.to_atom_format(
                    datetime.datetime.today())
                return self._respond(headers,
                                     empty_feed_template.format(**params))

            posts = data['items']

            lastupdate = max(
                dateutils.from_iso_format(p['updated']) for p in posts)
            params['author'] = xhtml_escape(posts[0]['actor']['displayName'])
            params['lastupdate'] = dateutils.to_atom_format(lastupdate)

            headers['Last-Modified'] = dateutils.to_http_format(lastupdate)

            params['entrycontent'] = u''.join(
                entry_template.format(**get_post_params(p)) for p in posts)

            body = feed_template.format(**params)

            Cache.set(self.cache_key, {
                'headers': headers,
                'body': body
            },
                      time=Config.getint('cache', 'stream-expire'))
            return self._respond(headers, body)
Пример #8
0
class FFF(commands.AutoShardedBot):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.config = config
        self.bot_config = self.config['bot']
        self.session = aiohttp.ClientSession()
        self.logger = logger
        self.cache = Cache()
        self.database = Database(self.config['database'])
        self.pool = None
        self.debug = debug
        self.title = self.bot_config['title']

        self.base_extensions = self.bot_config['extensions']['base']
        self.debug_extensions = self.bot_config['extensions']['debug']

        if self.debug:
            self.bot_extensions = self.base_extensions + self.debug_extensions
            self.token = self.bot_config['debug_token']
            self.prefix = self.bot_config['debug_prefix']
        else:
            self.bot_extensions = self.base_extensions
            self.token = self.bot_config['production_token']
            self.prefix = self.bot_config['production_prefix']

    async def on_ready(self):
        # self.remove_command("help")
        self.pool = await self.database.connect()
        await self.update_cache()
        await self.load_extensions()
        await self.update_activity()

        self.logger.info(f"Logged in as {self.user} ({self.user.id}).")
        if self.debug:
            self.logger.critical(
                "Starting in debug mode, do not use this in production!")

    async def close(self):
        self.logger.info("\nShutting down!")
        await super(FFF, self).close()
        await self.session.close()
        await self.pool.close()

    async def on_message_edit(self, before, after):
        if after.author.bot:
            return
        await self.process_commands(after)

    async def on_command_error(self, ctx, error):
        if isinstance(error, commands.CommandNotFound):
            pass
        elif isinstance(error, commands.CheckFailure):
            pass
        elif isinstance(error, commands.MissingRequiredArgument):
            await ctx.send("Invalid command arguments!")
        else:
            try:
                raise error
            except Exception as error:
                cause = str(error.__cause__)
                owner = self.get_user(self.owner_ids[0])
                length = 1024 - (len(cause) + 3)

                embed = Embed(title="An error has occurred!",
                              color=ctx.author.color)
                embed.add_field(name=cause,
                                value=str(traceback.format_exc()[:length]) +
                                "...",
                                inline=False)
                embed.add_field(name="Command name",
                                value=ctx.command.qualified_name,
                                inline=False)
                embed.add_field(name="Executed by",
                                value=f"`{ctx.author}` ({ctx.author.id})",
                                inline=False)
                embed.add_field(
                    name="Executed in",
                    value=f"`{ctx.guild.name}` ({ctx.guild.id})\n"
                    f"<#{ctx.channel.id}> (`{ctx.channel.name}`, {ctx.channel.id})",
                    inline=False)
                await owner.send(embed=embed)

                embed = Embed(title="An error has occurred!",
                              color=ctx.author.color)
                embed.add_field(
                    name=cause,
                    value="The owner has been notified about this error.")
                await ctx.send(embed=embed)
                self.logger.error(traceback.format_exc())

    async def load_extensions(self):
        for extension in self.bot_extensions:
            self.load_extension(f"extensions.{extension}")
            self.logger.info(f"Loaded {extension}.")
        self.logger.info("Starting...")

    async def update_activity(self):
        activity = discord.Activity(
            name=self.config['bot']['activity']['name'],
            type=getattr(discord.ActivityType,
                         self.config['bot']['activity']['type']))
        await self.change_presence(activity=activity)

    async def update_cache(self):
        self.logger.debug("Updating cache...")
        async with self.pool.acquire() as conn:
            guild_data = await self.database.get_guild_data(conn)
            guild_data_history = await self.database.get_guild_data_history(
                conn)
            self.cache.set({
                "guild_data": guild_data,
                "guild_data_history": guild_data_history
            })
        self.logger.debug("Successfully updated the cache!")