class TelegramAction(BasicAction): """Action to send a Telegram message to a certain user or group""" name = "TelegramAction" def __init__(self, token, receiver, custom_payload=None, template=None): super().__init__() self.logger = logging.getLogger(__name__) if not re.match("[0-9]+:[a-zA-Z0-9\-_]+", token) or token is None: raise ValueError("Bot token not correct or None!") self.token = token self.receiver = receiver self.custom_payload = custom_payload self.template = template self.request = Request() # TODO add possibility to send a template message and inject the paste data into the template def perform(self, paste, analyzer_name=None): """Send a message via a Telegram bot to a specified user, without checking for errors""" # if self.template: # text = self.template.format() text = "New paste matched by analyzer '{0}' - Link: {1}".format(analyzer_name, paste.full_url) api_url = "https://api.telegram.org/bot{0}/sendMessage?chat_id={1}&text={2}".format(self.token, self.receiver, text) self.request.get(api_url)
def callback(): code = request.args.get('code') if not code: logger.error("缺少code参数") return jsonify(Response.error(400, "缺少code参数")) logger.info("【code】" + str(code)) # 通过code请求到access_token token_url = urls.get_token_url(code) resp = Request.get(token_url, to_dict=False) print(resp) try: access_token = re.findall("access_token=(.*?)&expires_in", resp)[0] logger.info("【access_token】" + str(access_token)) except IndexError: logger.error('获取access_token错误') return jsonify(Response.error(400, "获取access_token错误")) session['qq_access_token'] = access_token # 通过access_token得到openid openid_url = urls.get_openid_url(access_token) resp = Request.get(openid_url) print(resp) openid = resp.get('openid') logger.info("【openid】" + str(openid)) session['openid'] = openid return redirect(url_for('get_user_info'))
def get_user_info(): """ 从session中得到用户的access_token和openid得到用户的基本信息 :return: """ if 'qq_access_token' in session: openid = session.get('openid') access_token = session.get('qq_access_token') logger.info("【openid】" + str(openid)) logger.info("【access_token】" + str(access_token)) user_info_url = urls.get_user_info_url(access_token, openid) resp = Request.get(user_info_url) return jsonify(Response.success(resp)) return jsonify(Response.error(400, "获取用户信息失败"))
class PastebinScraper(BasicScraper): name = "PastebinScraper" api_base_url = "https://scrape.pastebin.com" def __init__(self, paste_queue=None, exception_event=None): super().__init__(exception_event) self.logger = logging.getLogger(__name__) self._last_scrape_time = 0 self.paste_queue = paste_queue or Queue() self._tmp_paste_queue = Queue() self._known_pastes = [] self._known_pastes_limit = 1000 self.request = Request() def _check_error(self, body): """Checks if an error occurred and raises an exception if it did""" if body is None: raise EmptyBodyException() if "DOES NOT HAVE ACCESS" in body: self._exception_event.set() raise IPNotRegisteredError() def _get_recent(self, limit=100): """Downloads a list of the most recent pastes - the amount is limited by the <limit> parameter""" endpoint = "api_scraping.php" api_url = "{0}/{1}?limit={2}".format(self.api_base_url, endpoint, limit) try: response_data = self.request.get(api_url) self._check_error(response_data) pastes_dict = json.loads(response_data) pastes = [] # Loop through the response and create objects by the data for paste in pastes_dict: paste_obj = Paste(key=paste.get("key"), title=paste.get("title"), user=paste.get("user"), size=paste.get("size"), date=paste.get("date"), expire=paste.get("expire"), syntax=paste.get("syntax"), scrape_url=paste.get("scrape_url"), full_url=paste.get("full_url")) pastes.append(paste_obj) return pastes except Exception as e: self.logger.error(e) return None def _get_paste_content(self, key): """Downloads the content of a certain paste""" endpoint = "api_scrape_item.php" api_url = "{0}/{1}?i={2}".format(self.api_base_url, endpoint, key) content = "" self.logger.debug("Downloading paste {0}".format(key)) try: response_data = self.request.get(api_url) self._check_error(response_data) content = response_data except Exception as e: self.logger.error(e) return content def _body_downloader(self): """Downloads the body of pastes from pastebin, which have been put into the queue""" while self.running: try: self.logger.debug("Queue size: {}".format(self._tmp_paste_queue.qsize())) if self._stop_event.is_set() or self._exception_event.is_set(): self.running = False break paste = self._tmp_paste_queue.get(True, 1) # if paste is not known, download the body and put it on the queue and into the list last_body_download_time = round(time.time(), 2) body = self._get_paste_content(paste.key) paste.set_body(body) self.paste_queue.put(paste) current_time = round(time.time(), 2) diff = round(current_time - last_body_download_time, 2) if diff >= 1: continue sleep_diff = round(1 - diff, 3) self.logger.debug("Sleep time is: {0}".format(sleep_diff)) time.sleep(sleep_diff) except Empty: continue def start(self, paste_queue): """Start the scraping process and download the paste metadata""" self.paste_queue = paste_queue self.running = True start_thread(self._body_downloader, "BodyDownloader", self._exception_event) while self.running: self._last_scrape_time = int(time.time()) pastes = self._get_recent(limit=100) counter = 0 if pastes is not None: for paste in pastes: # check if paste is in list of known pastes if paste.key in self._known_pastes: # Do nothing, if it's already known continue self.logger.debug("Paste is unknown - adding ot to list {}".format(paste.key)) self._tmp_paste_queue.put(paste) self._known_pastes.append(paste.key) counter += 1 if self._stop_event.is_set() or self._exception_event.is_set(): self.running = False break self.logger.debug("{0} new pastes fetched!".format(counter)) # Delete some of the last pastes to not run into memory/performance issues if len(self._known_pastes) > 1000: self.logger.debug("known_pastes > 1000 - cleaning up!") start_index = len(self._known_pastes) - self._known_pastes_limit self._known_pastes = self._known_pastes[start_index:] if self._stop_event.is_set() or self._exception_event.is_set(): self.logger.debug('stopping {0}'.format(self.name)) self.running = False break # check if time since last current_time = int(time.time()) diff = current_time - self._last_scrape_time # if the last scraping happened less than 60 seconds ago, # wait until the 60 seconds passed if diff < 60: sleep_time = 60 - diff time.sleep(sleep_time)