def _authorize(self): """ Internal. Either starts intial authorization, or refreshes using supplied refresh token. """ if getattr(self, "refresh_token", None) is not None: try: self._refresh() return except Exception as ex: logger.error("Failed getting authorization using refresh token. Will open authentication address.\n{0}(): {1}\n{2}".format( type(ex).__name__, str(ex), traceback.format_exc(ex) )) authorization_code = self._get_user_authorization() response = self.post_request( url_join(AUTH_ENDPOINT, "token"), client_id = self.client_id, client_secret = self.client_secret, grant_type = "authorization_code", code = authorization_code ) response_data = response.json() self.access_token = response_data["access_token"] self.refresh_token = response_data["refresh_token"] self.account_id = response_data["account_id"] self.account_username = response_data["account_username"] self.expires = datetime.datetime.utcnow() + datetime.timedelta(seconds = response_data["expires_in"]) logger.info("Imgur client authenticated.")
def reply_function(comment): if conn is not None: conn.send("reply_evaluated") if comment.author is not None: if comment.body.lower().find("ignore me") != -1: if conn is not None: conn.send("user_ignored") logger.info( "Ignoring user '{0}' by request.".format( comment.author.name)) database.ignore_user(comment.author.name)
def run(self): """ The threads "run" method. """ logger.info("Opening authorization listening address at {0}:{1}".format(self.host, self.port)) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind((self.host, int(self.port))) sock.listen(1) conn, addr = sock.accept() data = conn.recv(1024) self.code = data.splitlines()[0].split()[1].split("=")[1] conn.send(AuthorizationListener.HTTP_RESPONSE.format("Authorization code {0} received.".format(self.code))) sock.close() self.received.set()
def _refresh(self): """ Internal. Uses the supplied refresh token to re-authenticate. """ logger.info("Refreshing imgur authorization.") response = self.post_request( url_join(AUTH_ENDPOINT, "token"), client_id = self.client_id, client_secret = self.client_secret, grant_type = "refresh_token", refresh_token = self.refresh_token ) response_data = response.json() self.access_token = response_data["access_token"] self.refresh_token = response_data["refresh_token"] self.expires = datetime.datetime.utcnow() + datetime.timedelta(seconds = response_data["expires_in"])
def find_filter_subtitles(check_text, minimum_likeness=0.2): if len(check_text) < configuration.REDDIT_MINIMUM_LENGTH: return [] logger.info("Checking for text '{0}'".format(check_text)) found_subtitles = database.find_subtitles(check_text) for match in body_search_regex.findall(check_text): if len(match) > configuration.REDDIT_MINIMUM_LENGTH: logger.info( "Checking for text '{0}'".format(match)) found_subtitles.extend( database.find_subtitles(match)) arr = [ found_subtitle for found_subtitle in found_subtitles if found_subtitle[7] >= minimum_likeness and ( found_subtitle[9] is None or found_subtitle[9] > configuration.REDDIT_IGNORE_THRESHOLD) ] arr.sort(key=lambda f: f[7]) arr.reverse() return arr
def vote_function(comment): if conn is not None: conn.send("vote_evaluated") if comment.body.strip().startswith("Sorry"): return for line in comment.body.splitlines(): m = comment_search_regex_2.search(line) if m: season = int(m.group("season")) episode = int(m.group("episode")) line_1 = int(m.group("line_1")) - 1 line_2 = int(m.group("line_2")) - 1 logger.info( "Upserting comment ID '{0}' into comment database. (season {1}, episode {2}, lines {3}-{4}, score {5})" .format(comment, season, episode, line_1, line_2, comment.score)) database.upsert_comment(str(comment), comment.score, season, episode, line_1, line_2) return m = comment_search_regex_1.search(line) if m: season = int(m.group("season")) episode = int(m.group("episode")) line = int(m.group("line")) - 1 logger.info( "Upserting comment ID '{0}' into comment database. (season {1}, episode {2}, line {3}, score {4})" .format(comment, season, episode, line, comment.score)) database.upsert_comment(str(comment), comment.score, season, episode, line, line) return logger.error( "Could not parse information from comment ID '{0}'. Body:\n{1}" .format(comment, comment.body))
def _crawl_subtitles(self): """ Crawl through the directory for subtitles and update the database accordingly. Called on instantiation. """ cursor = self.get_connection().cursor() for season in os.listdir(self.directory): if season.lower().startswith("s") and os.path.isdir(os.path.join(self.directory, season)): season_directory = os.path.join(self.directory, season) try: season_number = int(re.sub(r"\D", "", season)) for filename in os.listdir(season_directory): if filename.endswith(".srt") and filename.lower().startswith("e"): subtitle_path = os.path.join(season_directory, filename) try: md5 = md5sum(subtitle_path) episode_number = int(re.sub(r"\D", "", filename)) cursor.execute("SELECT md5sum FROM srt WHERE path = %s", (subtitle_path,)) row = cursor.fetchone() if not row or row[0] != md5: logger.info("New or changed subtitle file '{0}' found (season {1}, episode {2}), crawling.".format( subtitle_path, season_number, episode_number )) cursor.execute( """ DELETE FROM subtitles WHERE season = %s AND episode = %s """, (season_number, episode_number) ) cursor.execute( """ DELETE FROM srt WHERE path = %s """, (subtitle_path,) ) self.get_connection().commit() subtitles = Subtitles(subtitle_path) for j in range(self.concatenation_depth): for i in range(len(subtitles.subtitles) - j): start_subtitle = subtitles.subtitles[i] end_subtitle = subtitles.subtitles[i+j] text = "\n".join([ subtitle.text for subtitle in subtitles.subtitles[i:i+j+1] ]) cursor.execute( """ INSERT INTO subtitles ( season, episode, start_index, end_index, start_time, end_time, subtitle ) VALUES ( %s, %s, %s, %s, %s, %s, E%s )""", ( season_number, episode_number, i, i + j, start_subtitle.start.total_seconds(), end_subtitle.end.total_seconds(), text ) ) cursor.execute( """ INSERT INTO srt ( path, md5sum ) VALUES ( %s, %s )""", (subtitle_path, md5) ) self.get_connection().commit() except Exception as ex: logger.error("Could not parse SRT file at path '{0}', reason: {1}(): {2}".format( subtitle_path, type(ex).__name__, str(ex) )) logger.error(traceback.format_exc(ex)) cursor = self.get_connection().cursor() continue except Exception as ex: logger.error("Could not find season number in directory '{0}', continuing.".format(season_directory)) cursor = self.get_connection().cursor() continue
def mention_function(comment): if conn is not None: conn.send("mention_evaluated") try: body = re.sub("/u/{0}".format( configuration.REDDIT_USERNAME), "", comment.body, flags=re.IGNORECASE).strip() author = comment.author if author is not None: if database.get_user_ignored(author.name): logger.info( "Ignoring post from user '{0}'.".format( author.name)) return if len(body) < configuration.REDDIT_MINIMUM_LENGTH: logger.info( "Ignoring text body '{0}': too short.".format( body)) return "Sorry{0}, that message is a bit too short. I try to avoid short phrases as they're overly common in subtitles.".format( " /u/{0}".format(author.name) if author is not None else "") subtitles = find_filter_subtitles(body, 0.1) if not subtitles: return "Sorry{0}, I couldn't find any quotes with that phrase.".format( " /u/{0}".format(author.name) if author is not None else "") else: maximum_likeness = subtitles[0][7] random_chosen_subtitle = random.choice([ subtitle for subtitle in subtitles if subtitle[7] == maximum_likeness ]) season, episode, start_index, end_index, start_time, end_time, text, likeness, comment_count, comment_score, episode_title = random_chosen_subtitle text = text.decode("UTF-8") logger.info( "Found subtitle S{0:02d}E{1:02d} \"{2:s}\", uploading." .format(season, episode, text)) url = convert_upload(comment, season, episode, start_index, end_index, start_time, end_time, text, likeness, comment_count, comment_score) if author is not None: logger.debug( "Incrementing uses for author '{0}'.". format(author.name)) database.increment_user_uses(author.name) uses = database.get_user_uses(author.name) else: uses = None if conn is not None: conn.send("comment_made") return format_comment(url, text, season, episode, start_index, end_index, comment_count, comment_score, uses, likeness, episode_title) except Exception as ex: alerter.send( "Receieved an exception when posting a comment.\n\n{0}(): {1}\n\n{2}" .format( type(ex).__name__, str(ex), traceback.format_exc(ex))) raise ex