def get_threads(self, fetch_tool): alive_threads = set() result = [] status, data = fetch_tool(self._url) if status != 200: return result try: threads = data['threads'] except KeyError as e: utils.inform('{} {} {}'.format(e, threads.keys(), self._url), level=utils.WARNING) return result for thread in threads: number = int(thread['num']) if number not in self._threads: result.append(Thread(self._section, number)) self._threads.add(number) alive_threads.add(number) utils.inform('Found {} threads'.format(len(result)), level=utils.INFO) self._threads = alive_threads return result
def _get_response(self, request): utils.inform('Requesting {}'.format(request), level=utils.INFO) self._conn.request('GET', request, headers=self._headers) resp = self._conn.getresponse() utils.inform('Response is {}: {}'.format(resp.status, resp.reason), level=utils.INFO) return resp
def _get_response(self, request): utils.inform('Requesting {}'.format(request), level=utils.INFO) self._conn.request('GET', request, headers=self._headers) resp = self._conn.getresponse() utils.inform('Response is {}: {}'.format( resp.status, resp.reason), level=utils.INFO) return resp
def _f(self, *args, **kwargs): while True: try: return function(self, *args, **kwargs) except exceptions as e: utils.inform(e, level=utils.WARNING) self._connect() time.sleep(1)
def callback(ch, method, properties, body): try: url, thumb, md5, size = body.decode('utf8').split() except (ValueError, UnicodeDecodeError): utils.inform('unexpected format: "{}"'.format(body), level=utils.ERROR) utils.inform(body, level=utils.IMPORTANT_INFO) ch.basic_ack(delivery_tag=method.delivery_tag)
def check_task_q(task_q): try: task = task_q.get(timeout=0.1) except queue.Empty: return None else: if task == utils.STOP_SIGNAL: utils.inform("I'm done", level=utils.IMPORTANT_INFO) exit()
def get_json(self, request): json_data = {} resp = self._get_response(request) data = resp.read() try: data = data.decode('utf8') except (AttributeError, UnicodeDecodeError): pass if resp.status == 200: try: json_data = json.loads(data) except ValueError as e: utils.inform(e, level=utils.WARNING) except TypeError as e: utils.inform(e, level=utils.WARNING) return resp.status, json_data
def check(self): try: self.room_info = self.get_room_info() if self.room_info['status']: utils.print_log(self.room_id, self.room_info['roomname']) if self.recording_lock.locked(): return True else: # Calls inform only when it is enabled and no inform was sent in the last 100 secs # This prevents unexpectedly spamming the infrom service in edge cases, for example, when the streamer had a bad network connnection or the streamer didn't turn off live after disconnection. if self.enable_inform and time.time( ) - self.last_inform > 100: utils.inform(self.room_id, self.room_info['roomname'], self.inform_url) self.last_inform = time.time() return self.get_live_urls() else: utils.print_log(self.room_id, '等待开播') return False except Exception as e: utils.print_log(self.room_id, 'Error:' + str(e)) return False
def get_webms(self, fetch_tool): utils.inform('Searching for webms. Last post: {}'.format( self._last_post), level=utils.INFO) webms = [] status, data = fetch_tool(self._url) if status == 404: return None elif status != 200: return webms found_webms_count = 0 try: posts = data['threads'][0]['posts'] except KeyError as e: utils.inform('Key error: {}'.format(e), level=utils.WARNING) return webms for post in posts: if post['num'] <= self._last_post: continue try: files = post['files'] except KeyError: continue for f in files: if f.get('type', None) == utils.WEBM: found_webms_count += 1 url = utils.RESOURCE_URL.format(self._section, f['path']) thumb = utils.RESOURCE_URL.format(self._section, f['thumbnail']) md5 = f['md5'] size = f['size'] webms.append(Webm(url, thumb, md5, size)) utils.inform('Found {} webms'.format(found_webms_count), level=utils.INFO) self._last_post = data['threads'][0]['posts'][-1]['num'] return webms
def start_thread(task_q, webm_q, url, sections): threading.Thread(target=work, args=(task_q, webm_q, url, sections)).start() utils.inform('Parser thread started', level=utils.IMPORTANT_INFO)
def start_thread(channel, q_name): threading.Thread(target=work, args=[channel, q_name]).start() utils.inform('Download thread started', level=utils.IMPORTANT_INFO)
def _connect(self): self._conn = http.HTTPSConnection(self._host) self._set_headers() utils.inform('Connected to {}'.format(self._host), level=utils.INFO)
if webms is None: del threads[i] length -= 1 continue i += 1 for webm in webms: qhandler.put(webm_q, webm) def start_thread(task_q, webm_q, url, sections): threading.Thread(target=work, args=(task_q, webm_q, url, sections)).start() utils.inform('Parser thread started', level=utils.IMPORTANT_INFO) def stop_threads(task_q, workers): for _ in range(workers): task_q.put(utils.STOP_SIGNAL) if __name__ == '__main__': task_q = qhandler.get_task_q() webm_q = qhandler.create_channel() url = utils.BOARD_URL sections = ['b'] try: work(task_q, webm_q, url, sections) except (KeyboardInterrupt, SystemExit): utils.inform("I'm done", level=utils.IMPORTANT_INFO) exit()
def print_output(webm): utils.inform(webm, level=utils.INFO)