def save_to_database(self): db = Database() query = None buffer = [] empty = False while not empty: try: query = self.db_queue.get(timeout=240) except Empty: self.logger.warning('Database queue empty.') empty = True buffer.append(query) if len(buffer) >= self.config['database']['bufferLimit'] or ( empty and len(buffer) > 0): buffer.sort(key=lambda k: k['table']) for key, group in groupby(buffer, key=lambda k: k['table']): db_attempts = 3 while db_attempts: try: db.insert(key, [g['columns'] for g in group]) break except Exception as err: self.logger.error( 'Failed to save to database: {} Attempting {} more times.' .format(repr(err), db_attempts)) db_attempts -= 1 buffer.clear()
def upload_video(video_dict): share_url = bd_upload(video_dict['Title']) if share_url: if enable_db: db = Database('Video') db.insert(video_dict['Title'], share_url, video_dict['Date']) bot(f"[下载提示] {video_dict['Title']} 已上传, 请查看页面") else: raise RuntimeError(f'Upload {video_dict["Title"]} failed')
def upload_video(video_dict): upload_way_dict = {'bd': BDUpload, 's3': S3Upload} upload_way = upload_way_dict.get(upload_by) uploader = upload_way() uploader.upload_item(f"{ddir}/{video_dict['Title']}", video_dict['Title']) if upload_by == 'bd': share_url = uploader.share_item(video_dict['Title']) if enable_db: db = Database('Video') db.insert(video_dict['Title'], share_url, video_dict['Date']) elif upload_by == 's3': if enable_db: db = Database('Video') db.insert(video_dict['Title'], f"gets3/{quote(video_dict['Title'])}", video_dict['Date']) else: raise RuntimeError(f'Upload {video_dict["Title"]} failed') bot(f"[下载提示] {video_dict['Title']} 已上传, 请查看页面")
def upload_video(video_dict, user_config): upload_way_dict = {'bd': BDUpload, 's3': S3Upload} upload_way = upload_way_dict.get(config['upload_by']) uploader = upload_way() ddir = get_ddir(user_config) uploader.upload_item(f"{ddir}/{video_dict['Title']}", video_dict['Title']) if config['upload_by'] == 'bd': share_url = uploader.share_item(video_dict['Title']) if config['enable_mongodb']: db = Database(user_map(video_dict['User'])) db.insert(video_dict['Title'], share_url, video_dict['Date']) elif config['upload_by'] == 's3': if config['enable_mongodb']: db = Database(user_map(video_dict['User'])) db.insert(video_dict['Title'], f"gets3/{quote(video_dict['Title'])}", video_dict['Date']) else: raise RuntimeError(f'Upload {video_dict["Title"]} failed') bot(f"[下载提示] {video_dict['Title']} 已上传, 请查看https://matsuri.design/", user_config)
def collect_info(self, key): api = APIRequest(api_key=key) db = Database() queue_attempts = 3 # Gets channel from queue while queue_attempts: try: self.api_semaphore.acquire(blocking=True, timeout=120) channel_id = self.api_queue.get(timeout=120) print('{} - Thread {}'.format(self.api_queue.qsize(), current_thread().name)) except Empty: queue_attempts -= 1 self.logger.warning( 'API queue empty. Attempting {} more time(s).'.format( queue_attempts)) continue finally: self.api_semaphore.release() # Checks if channel is already in database, otherwise saves it channel_dbid = self.db_ids.get(channel_id, 0) if not channel_dbid: try: db_id_query = db.select( 'channel', *['channel_id'], where=['yt_id LIKE "{}"'.format(channel_id)]) if not db_id_query: request, response = api.list( 'channels', **{ 'part': 'snippet', 'id': channel_id }) try: snippet = response['items'][0]['snippet'] channel_query = { 'yt_id': channel_id, 'title': snippet['title'], 'description': snippet['description'], 'published_at': self.parse_date(snippet['publishedAt'], return_datetime=True).strftime( '%Y-%m-%d %H:%M:%S') } channel_dbid = db.insert('channel', channel_query) self.db_ids[channel_id] = channel_dbid except KeyError as err: self.logger.error( 'KeyError while getting channel info: {}'. format(repr(err))) continue except Exception as err: self.logger.error( 'Error while getting channel info: {}'.format( repr(err))) continue else: channel_dbid = db_id_query[0]['channel_id'] self.db_ids[channel_id] = channel_dbid except Exception as err: self.logger.error(repr(err)) continue # Collects channel, attempting 3 times collect_attempts = 3 while collect_attempts: try: self.collect_channel(api, channel_id) break except Exception as err: collect_attempts -= 1 self.logger.error( 'Failed to collect channel {}: {} Attempting {} more times.' .format(channel_id, repr(err), collect_attempts)) if not collect_attempts: self.logger.info( 'Could not collect channel: {}'.format(channel_id)) continue # Retrieves recent videos playlist_id = 'UU' + channel_id[ 2:] # Playlist ID of channel c's uploads, can be derived from channel ID fetch_attempts = 3 request, response = None, None while fetch_attempts: try: request, response = api.list( 'playlistItems', **{ 'part': 'contentDetails', 'playlistId': playlist_id, 'maxResults': 50 }) break except Exception as err: fetch_attempts -= 1 self.logger.error( 'Error while getting videos for channel {}: {} Attempting {} more times.' .format(channel_id, repr(err), fetch_attempts)) if not fetch_attempts: self.logger.error( 'Failed to get videos for {}'.format(channel_id)) continue video_list = [] limit_reached = False while request and not limit_reached: for v in response['items']: video_id = v['contentDetails']['videoId'] published_at = self.parse_date( v['contentDetails']['videoPublishedAt']) if self.now - published_at > self.limit: limit_reached = True break try: db_id_query = \ db.select('video', 'video_id', where=['yt_id LIKE "{}"'.format(video_id)]) if not db_id_query: request, response = api.list( 'videos', **{ 'part': 'snippet,contentDetails', 'id': video_id }) try: snippet = response['items'][0]['snippet'] content_details = response['items'][0][ 'contentDetails'] video_query = { 'yt_id': video_id, 'title': snippet['title'], 'description': snippet['description'], 'channel_id': channel_dbid, 'length_seconds': int( parse_duration( content_details['duration']). total_seconds()), 'published_at': self.parse_date( snippet['publishedAt'], return_datetime=True).strftime( '%Y-%m-%d %H:%M:%S') } video_dbid = db.insert('video', video_query) self.db_ids[video_id] = video_dbid except KeyError as err: self.logger.error( 'KeyError while getting video info: {}'. format(repr(err))) continue else: video_dbid = db_id_query[0]['video_id'] self.db_ids[video_id] = video_dbid except Exception as err: self.logger.error( 'Error while getting video data: {}'.format( repr(err))) continue video_list.append(video_id) collect_attempts = 3 while collect_attempts: try: self.collect_videos(api, video_list) break except Exception as err: collect_attempts -= 1 self.logger.error( 'Failed to collect videos from channel {}: ' '{} Attempting {} more times.'.format( channel_id, repr(err), collect_attempts)) finally: video_list.clear() if not collect_attempts: self.logger.info( 'Could not collect videos from channel: {}'.format( channel_id)) if not limit_reached: execute_attempts = 3 while execute_attempts: try: request, response = api.list_next( 'playlistItems', request, response) break except Exception as err: execute_attempts -= 1 self.logger.error( 'Video fetch response error: {} Attempting {} more time(s)' .format(' '.join(err.args), execute_attempts)) continue if not execute_attempts: break queue_attempts = 3 self.logger.info('Finished execution for Thread {}'.format( get_ident()))