def generate_overview(self): self.log(self.logger.INFO, 'generating %s/overview.html' % self.config['output_directory']) t_engine_mappings_overview = dict() t_engine_mappings_overview['boardlist'] = self.get_board_list() t_engine_mappings_overview['news'] = self._generate_news_data() weekdays = ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday') max_post = 0 stats = list() bar_length = 20 days = 30 utc_offset = str(self.config['utc_time_offset']) + ' seconds' totals = int(self.overchandb.execute('SELECT count(1) FROM articles WHERE sent > strftime("%s", "now", "-' + str(days) + ' days")').fetchone()[0]) stats.append(self.t_engine['stats_usage_row'].substitute({'postcount': totals, 'date': 'all posts', 'weekday': '', 'bar': 'since %s days' % days})) datarow = list() for row in self.overchandb.execute('SELECT count(1) as counter, strftime("%Y-%m-%d", sent, "unixepoch", "' + utc_offset + '") as day, strftime("%w", sent, "unixepoch", "' + utc_offset + '") as weekday FROM articles WHERE sent > strftime("%s", "now", "-' + str(days) + ' days") GROUP BY day ORDER BY day DESC').fetchall(): if row[0] > max_post: max_post = row[0] datarow.append((row[0], row[1], weekdays[int(row[2])])) for row in datarow: graph = '=' * int(float(row[0])/max_post*bar_length) if len(graph) == 0: graph = ' ' stats.append(self.t_engine['stats_usage_row'].substitute({'postcount': row[0], 'date': row[1], 'weekday': row[2], 'bar': graph})) t_engine_mappings_overview['stats_usage_rows'] = '\n'.join(stats) postcount = 50 stats = list() exclude_flags = self.cache['flags']['hidden'] | self.cache['flags']['no-overview'] | self.cache['flags']['blocked'] for row in self.overchandb.execute('SELECT articles.last_update, group_name, subject, message, article_uid, ph_name FROM groups, articles WHERE \ groups.group_id = articles.group_id AND (cast(groups.flags as integer) & ?) = 0 AND \ (articles.parent = "" OR articles.parent = articles.article_uid) ORDER BY articles.last_update DESC LIMIT ?', (exclude_flags, str(postcount))).fetchall(): latest_posts_row = dict() latest_posts_row['last_update'] = datetime.utcfromtimestamp(row[0] + self.config['utc_time_offset']).strftime(self.config['datetime_format']) latest_posts_row['board'] = row[5] if row[5] != '' else basicHTMLencode(row[1].split('.', 1)[-1].replace('"', '')) latest_posts_row['articlehash'] = sha1(row[4]).hexdigest()[:10] latest_posts_row['subject'] = row[2] if row[2] not in ('', 'None') else row[3] latest_posts_row['subject'] = latest_posts_row['articlehash'] if latest_posts_row['subject'] == '' else latest_posts_row['subject'].replace('\n', ' ')[:55] stats.append(self.t_engine['latest_posts_row'].substitute(latest_posts_row)) t_engine_mappings_overview['latest_posts_rows'] = '\n'.join(stats) stats = list() exclude_flags = self.cache['flags']['hidden'] | self.cache['flags']['blocked'] for row in self.overchandb.execute('SELECT count(1) as counter, group_name, ph_name FROM groups, articles WHERE \ groups.group_id = articles.group_id AND (cast(groups.flags as integer) & ?) = 0 GROUP BY \ groups.group_id ORDER BY counter DESC', (exclude_flags,)).fetchall(): board = row[2] if row[2] != '' else basicHTMLencode(row[1].replace('"', '')) stats.append(self.t_engine['stats_boards_row'].substitute({'postcount': row[0], 'board': board})) t_engine_mappings_overview['stats_boards_rows'] = '\n'.join(stats) yield 'overview', self.t_engine['overview'].substitute(t_engine_mappings_overview) for help_data in self.generate_help(t_engine_mappings_overview['news']): yield help_data
def parse_message(self, message_id, fd): hash_message_uid = sha1(message_id).hexdigest() subject = 'No Title' sent = 0 sender = 'None' email = '*****@*****.**' body = list() ishidden = 0 lang = '' body_found = False for line in fd: line = line.rstrip('\n\r') if not body_found: key = line.split(': ')[0].lower() value = line.split(': ', 1)[-1] if not line: body_found = True elif key == 'subject': subject = basicHTMLencode(value.decode('UTF-8')[:65]) elif key == 'date': sent_tz = parsedate_tz(value) if sent_tz: offset = sent_tz[-1] if sent_tz[-1] else 0 sent = timegm((datetime(*sent_tz[:6]) - timedelta(seconds=offset)).timetuple()) else: sent = int(time.time()) elif key == 'from': data = value.decode('UTF-8').rsplit(' <', 1) if len(data) > 1: sender = basicHTMLencode(data[0][:30]) email = basicHTMLencode(data[1].replace('>', '')[:50]) elif key == 'hidden': if value.lower() in ('true', 'yes'): ishidden = 1 elif key == 'language': lang = self._lang_by_name(basicHTMLencode(value.lower())) if value.lower() != 'auto' else '' else: body.append(line) if not body_found or not body: self.log(self.logger.ERROR, 'empty NNTP message \'%s\'. wtf?' % message_id) return False if not lang: lang = self._detect_lang_name(subject, body) body = '\n'.join(body).decode('UTF-8') self.generate_paste(hash_message_uid[:10], body, subject, sender, sent, lang, ishidden) self.sqlite.execute('INSERT INTO pastes VALUES (?,?,?,?,?,?,?,?,?,?,?)', (message_id, hash_message_uid, sender, email, subject, sent, body, '', int(time.time()), lang, ishidden)) self.sqlite.commit() return True
def handle_postman_mod(self, line): self.log(self.logger.DEBUG, "handle postman-mod: %s" % line) userkey, base64_blob = line.split(" ", 2)[1:] try: local_name, allow, expires, logout = [base64.urlsafe_b64decode(x).decode('UTF-8') for x in base64_blob.split(':')] except: self.log(self.logger.WARNING, 'get corrupted data for %s' % userkey) return userkey, None local_name = basicHTMLencode(local_name[:20]) try: allow = int(allow) except ValueError: allow = 0 if allow not in (0, 1): allow = 0 current_time = int(time.time()) try: expires = int(expires) * 24 * 3600 + current_time except ValueError: expires = current_time if expires < current_time or expires - current_time > 3650 * 24 * 3600: expires = current_time try: if int(self.postmandb.execute('SELECT count(userkey) FROM userkey WHERE userkey = ?', (userkey,)).fetchone()[0]) == 0: self.log(self.logger.DEBUG, "handle postman-mod: new userkey") self.postmandb.execute("INSERT INTO userkey (userkey, local_name, allow, expires) VALUES (?, ?, ?, ?)", (userkey, local_name, allow, expires)) else: self.postmandb.execute("UPDATE userkey SET local_name = ?, allow = ?, expires = ? WHERE userkey = ?", (local_name, allow, expires, userkey)) if logout != '': self.postmandb.execute("UPDATE userkey SET cookie = ? WHERE userkey = ?", ('', userkey)) self.postmandb.commit() except Exception as e: self.log(self.logger.WARNING, "could not handle postman-mod: %s, line = '%s'" % (e, line)) return userkey, None
def handle_line(self, line, key_id, timestamp, is_replay=False, message_id=None): if message_id is None: is_local = True source = 'local' else: is_local = False source = message_id command = line.lower().split(" ", 1)[0] if '#' in line: line, comment = line.split("#", 1) line = line.rstrip(" ") else: comment = '' if not command in self.command_mapper: self.log(self.logger.WARNING, 'got unknown command: "{}", source: "{}"'.format(line, source)) return accepted, reason_id = self.allowed(key_id, command, is_replay, is_local) if self.command_cache[command][0] != -1: command_id = self.command_cache[command][0] else: self.log(self.logger.ERROR, "command %s not found in command_cache. FIXME!" % command) return if accepted == 1: data, groups = self.command_mapper[command](line) if groups: for group in groups: self.redistribute_command(group, line, comment, timestamp) else: data = line.lower().split(" ", 1)[-1].split(" ", 1)[0] self.log(self.logger.DEBUG, 'not authorized for "{}": {}. source: {}'.format(command, key_id, source)) try: self.censordb.execute('INSERT INTO log (accepted, command_id, data, key_id, reason_id, comment, timestamp, source) VALUES (?, ?, ?, ?, ?, ?, ?, ?)', \ (accepted, command_id, data.decode('UTF-8'), key_id, reason_id, basicHTMLencode(comment).decode('UTF-8'), int(time.time()), source)) except sqlite3.Error: pass
def generate_menu(self): self.log(self.logger.INFO, 'generating %s/menu.html' % self.config['output_directory']) menu_entry = dict() menu_entries = list() exclude_flags = self.cache['flags']['hidden'] | self.cache['flags']['blocked'] for group_row in self.overchandb.execute('SELECT group_name, group_id, ph_name, link FROM groups WHERE \ (cast(groups.flags as integer) & ?) = 0 ORDER by group_name ASC', (exclude_flags,)).fetchall(): menu_entry['group_name'] = group_row[0].split('.', 1)[-1].replace('"', '').replace('/', '') menu_entry['group_link'] = group_row[3] if self.config['use_unsecure_aliases'] and group_row[3] != '' else '%s-1.html' % menu_entry['group_name'] menu_entry['group_name_encoded'] = group_row[2] if group_row[2] != '' else basicHTMLencode(menu_entry['group_name']) if self.config['enable_recent']: # get fresh posts count timestamp = int(time.time()) - 3600*24 menu_entry['postcount'] = self.overchandb.execute('SELECT count(article_uid) FROM articles WHERE group_id = ? AND sent > ?', (group_row[1], timestamp)).fetchone()[0] menu_entries.append(self.t_engine['menu_entry'].substitute(menu_entry)) yield 'menu', self.t_engine['menu'].substitute(menu_entries='\n'.join(menu_entries))
def parse_body(self): self._parser.feed(self._fd.read()) result = self._parser.close() self._parser = None if result.is_multipart(): if len(result.get_payload()) == 1 and result.get_payload()[0].get_content_type() == "multipart/mixed": result = result.get_payload()[0] for part in result.get_payload(): if part.get_content_type().lower() == 'text/plain': self.message += part.get_payload(decode=True) else: self.attachments.append(self._read_filedata(part)) else: if result.get_content_type().lower() == 'text/plain': self.message += result.get_payload(decode=True) else: self.attachments.append(self._read_filedata(result)) del result self.message = basicHTMLencode(self.message)
def _parse_headers(self): headers_found = False line = self._fd.readline() while line: self._parser.feed(line) head, _, data = line.partition(': ') head = head.lower() data = data[:-1] if head == 'subject': self.headers['subject'] = basicHTMLencode(data[4:]) if data.lower().startswith('re: ') else basicHTMLencode(data) elif head == 'date': sent_tz = parsedate_tz(data) if sent_tz: offset = 0 if sent_tz[-1]: offset = sent_tz[-1] self.headers['sent'] = timegm((datetime(*sent_tz[:6]) - timedelta(seconds=offset)).timetuple()) elif head == 'from': sender, _, email = data.rpartition(' <') email = email.replace('>', '') if sender: self.headers['sender'] = sender if email: self.headers['email'] = email elif head == 'references': self.headers['parent'] = data.split(' ')[0] elif head == 'newsgroups': self.headers['group_name'] = data.split(';')[0].split(',')[0] elif head == 'x-sage': self.headers['sage'] = True elif head == 'x-pubkey-ed25519': self.headers['public_key'] = data elif head == 'x-signature-ed25519-sha512': self._signature = data elif line == '\n': headers_found = True break line = self._fd.readline() if not headers_found: self.headers = None
def _read_filedata(part): data = dict() data['obj'] = part.get_payload(decode=True) data['hash'] = sha1(data['obj']).hexdigest() data['name'] = 'empty_file_name' if part.get_filename() is None or part.get_filename().strip() == '' else basicHTMLencode(part.get_filename()) data['ext'] = os.path.splitext(data['name'])[1].lower() data['type'] = mimetypes.types_map.get(data['ext'], '/') if data['type'] == '/': # mime not detected from file ext. Use remote mimetype for detection file ext. Ignore unknown mimetype. test_ext = mimetypes.guess_extension(part.get_content_type()) if test_ext: data['ext'] = test_ext data['type'] = mimetypes.types_map.get(data['ext'], '/') data['name'] += data['ext'] if len(data['name']) > 512: data['name'] = data['name'][:512] + '...' data['maintype'], data['subtype'] = data['type'].split('/', 2) return data