def compare_db_filtered(videos, limit, discarded=False, downloaded=False): logger.info("Comparing filtered videos with DB") return_list = [] counter = 0 filter_days = read_config('Requests', 'filter_videos_days_old') DatabaseListener.static_instance.startRead.emit(threading.get_ident()) for video in videos: if filter_days >= 0: date = datetime.datetime.utcnow() - datetime.timedelta( days=filter_days) if video.date_published < date: break db_vid = get_vid_by_id(video.video_id) if db_vid: if db_vid.downloaded: if downloaded: continue if db_vid.discarded: if discarded: continue return_list.append(Video.to_video_d(video)) counter += 1 else: return_list.append(video) counter += 1 if counter >= limit: break DatabaseListener.static_instance.finishRead.emit(threading.get_ident()) db_session.remove() return return_list
def run(self): """ Override threading.Thread.run() with its own code :return: """ try: # youtube = youtube_auth_keys() # self.videos = get_channel_uploads(self.youtube, channel_id) use_tests = read_config('Requests', 'use_tests') if self.deep_search: temp_videos = [] list_uploaded_videos_search(self.youtube, self.channel_id, temp_videos, self.search_pages) list_uploaded_videos(self.youtube, temp_videos, self.playlist_id, self.list_pages) self.merge_same_videos_in_list(temp_videos) self.videos.extend(temp_videos) elif use_tests: channel = db_session.query(Channel).get(self.channel_id) miss = read_config('Requests', 'miss_limit') pages = read_config('Requests', 'test_pages') extra_pages = read_config('Requests', 'extra_list_pages') list_pages = 0 list_videos = [] search_videos = [] for test in channel.tests: if test.test_pages > list_pages: list_pages = test.test_pages if test.test_miss < miss or test.test_pages > pages: db_session.remove() list_uploaded_videos_search(self.youtube, self.channel_id, search_videos, self.search_pages) break db_session.remove() list_uploaded_videos(self.youtube, list_videos, self.playlist_id, min(pages + extra_pages, list_pages + extra_pages)) if len(search_videos) > 0: return_videos = self.merge_two_videos_list_grab_info(list_videos, search_videos) else: return_videos = list_videos self.videos.extend(return_videos) else: use_playlist_items = read_config('Debug', 'use_playlistItems') if use_playlist_items: list_uploaded_videos(self.youtube, self.videos, self.playlist_id, self.list_pages) else: list_uploaded_videos_search(self.youtube, self.channel_id, self.videos, self.search_pages) except Exception as e: # Save the exception details, but don't rethrow. self.exc = e pass self.job_done = True
def update_download_tile(self, download_tile): result = db_session.query(DBDownloadTile).filter( DBDownloadTile.video_id == format( download_tile.video.video_id)).first() # stmt = DBDownloadTile.__table__.select().where( # text("video_id = '{}'".format(download_tile.video.video_id))) # result = engine.execute(stmt).first() if result: result.update_tile(download_tile) db_session.commit() db_session.remove() else: self.logger.warning( "Download tile not found in db, so no update was performed: {}" .format(download_tile.__dict__))
def new_download_tile(self, new_tile): lock.acquire() result = db_session.query(DBDownloadTile).filter( DBDownloadTile.video_id == format( new_tile.video.video_id)).first() if not result: download_tile = DBDownloadTile(new_tile) if not download_tile.video: self.logger.error("No video in new tile: {}".format( download_tile.__dict__), exc_info=True) return db_session.add(download_tile) db_session.commit() db_session.remove() lock.release()
def run_channels_test(): logger.info('Running Channels Test') subscriptions = get_subscriptions(cached_subs) youtube_keys = load_keys(len(subscriptions)) test_threads = [] results = [] logger.info("Channels Test: Starting miss and pages tests") for subscription, youtube_key in (subscriptions, youtube_keys): test = RunTestsThreaded(subscription, youtube_key, results) test.start() test_threads.append(test) logger.info("Channels Test: Waiting for test threads") for thread in test_threads: thread.join() for result in results: test = Test(result[0], result[1], result[2], result[3]) db_session.add(test) db_session.commit() db_session.remove()
def get_db_videos_playback(limit, filters=(~Video.watched, Video.downloaded, ~Video.discarded), sort_method=(asc(Video.watch_prio), desc(Video.date_downloaded), desc(Video.date_published))): """ Get a list of videos from the database, limited by limit and filters. Sorted by sort_method. :param filters: Tuple of SQLAlchemy Column(Boolean) objects. :param sort_method: Tuple of SQLAlchemy sort expressions. :param limit: Integer value determining how many videos to grab. :return: A list of VideoD objects. """ # Initiate DB Query session. db_query = db_session.query(Video) # Apply filters to query. db_query = db_query.filter(*filters) # Signal DB listener about started read operation (Used in DB status indicator and logs) DatabaseListener.static_instance.startRead.emit(threading.get_ident()) # Execute query, ordered by sort_method and limited by limit. db_videos = db_query.order_by(*sort_method).limit(limit).all() # Signal DB listener about finished read operation (Used in DB status indicator and logs) DatabaseListener.static_instance.finishRead.emit(threading.get_ident()) # Convert Video objects to VideoD (Detached model) objects. videos = Video.to_video_ds(db_videos) # Close DB Query session. db_session.remove() logger.debug5("sane_yt_subfeed.debug.date_formats: {}".format( sane_yt_subfeed.debug.date_formats)) sane_yt_subfeed.debug.clear_date_formats() return videos
def get_db_videos_subfeed(limit, filters=(~Video.downloaded, ~Video.discarded)): """ Get a list of videos from the database, limited by limit and filters. :param filters: Tuple of SQLAlchemy Column(Boolean) objects. :param limit: Integer value determining how many videos to grab. :return: A list of VideoD objects. """ logger.info("Getting newest stored videos (filters={})".format(filters)) # Check whether "filter by video age" is enabled, if so apply the filter to filters. filter_days = read_config('Requests', 'filter_videos_days_old') if filter_days >= 0: date = datetime.datetime.utcnow() - datetime.timedelta( days=filter_days) filters = filters + (Video.date_published > date, ) # Signal DB listener about started read operation (Used in DB status indicator and logs) DatabaseListener.static_instance.startRead.emit(threading.get_ident()) # Execute query, ordered by publish date (descending), filtered by filters and limited by limit. db_videos = db_session.query(Video).order_by(desc( Video.date_published)).filter(*filters).limit(limit).all() # Signal DB listener about finished read operation (Used in DB status indicator and logs) DatabaseListener.static_instance.finishRead.emit(threading.get_ident()) # Convert Video objects to VideoD (Detached model) objects. videos = Video.to_video_ds(db_videos) # Close DB Query session. db_session.remove() logger.debug5("sane_yt_subfeed.debug.date_formats: {}".format( sane_yt_subfeed.debug.date_formats)) sane_yt_subfeed.debug.clear_date_formats() return videos
def get_best_downloaded_videos(limit, filters=(~Video.watched, Video.downloaded, ~Video.discarded), sort_method=(asc(Video.watch_prio), desc(Video.date_downloaded), desc(Video.date_published))): """ :param filters: Tuple of filters :param sort_method: :param limit: :return: list(VideoD) """ db_query = db_session.query(Video) db_query = db_query.filter(*filters) DatabaseListener.static_instance.startRead.emit(threading.get_ident()) db_videos = db_query.order_by(*sort_method).limit(limit).all() DatabaseListener.static_instance.finishRead.emit(threading.get_ident()) videos = Video.to_video_ds(db_videos) db_session.remove() return videos
def get_newest_stored_videos(limit, filters=(~Video.downloaded, ~Video.discarded)): """ :param filters: :param limit: :return: list(VideoD) """ logger.info("Getting newest stored videos (filters={})".format(filters)) filter_days = read_config('Requests', 'filter_videos_days_old') if filter_days >= 0: date = datetime.datetime.utcnow() - datetime.timedelta( days=filter_days) filters = filters + (Video.date_published > date, ) DatabaseListener.static_instance.startRead.emit(threading.get_ident()) db_videos = db_session.query(Video).order_by(desc( Video.date_published)).filter(*filters).limit(limit).all() DatabaseListener.static_instance.finishRead.emit(threading.get_ident()) videos = Video.to_video_ds(db_videos) db_session.remove() return videos
def filter_videos(videos, limit, filter_discarded=False, filter_downloaded=False): """ Takes a list of videos and excludes items that match any enabled exclusion filter. :param videos: A list of Video objects to be compared. :param limit: Integer value determining how many videos to return. :param filter_discarded: Boolean determining whether or not to exclude discarded videos. :param filter_downloaded: Boolean determining whether or not to exclude filter_downloaded videos. :return: A list of VideoD objects. """ logger.info("Comparing filtered videos with DB") return_list = [] counter = 0 # Check whether "filter by video age" is enabled, if so apply the filter to filters. filter_days = read_config('Requests', 'filter_videos_days_old') # Signal DB listener about started read operation (Used in DB status indicator and logs) DatabaseListener.static_instance.startRead.emit(threading.get_ident()) # Iterate through videos: for video in videos: # If "filter by video age" is enabled, skip videos older than filter_days days. if filter_days >= 0: date = datetime.datetime.utcnow() - datetime.timedelta( days=filter_days) if video.date_published < date: break # Check if video is in database, if so, convert it and add it to list, unless it matches an exclusion filter. db_vid = get_vid_by_id(video.video_id) if db_vid: # Skip video if it matches an enabled exclusion filter. if (db_vid.downloaded and filter_downloaded) or (db_vid.discarded and filter_discarded): continue # Convert Video object to VideoD (Detached model) object and append it to the list, then increment counter. return_list.append(Video.to_video_d(video)) counter += 1 # If video isn't in database, no filters apply and it can simply be appended to the list. else: # Append VideoD object to list, then increment counter. return_list.append(video) counter += 1 # Break once the limit has been reached. if counter >= limit: break # Signal DB listener about finished read operation (Used in DB status indicator and logs) DatabaseListener.static_instance.finishRead.emit(threading.get_ident()) # Close DB Query session. db_session.remove() return return_list
def new_file(self, vid_id, vid_path): vid = db_session.query(Video).get(vid_id) if vid: if not vid.downloaded: vid.vid_path = vid_path vid.date_downloaded = datetime.datetime.utcnow() vid.downloaded = True thumb_path = os.path.join(THUMBNAILS_PATH, '{}.jpg'.format(vid.video_id)) downloaded_thumbnail = os.path.isfile(thumb_path) if downloaded_thumbnail and (not vid.thumbnail_path): vid.thumbnail_path = thumb_path self.logger.warning( "Thumbnail downloaded, but path didn't exist in db, for video: {}" .format(vid.__dict__)) elif (not vid.thumbnail_path) or (not downloaded_thumbnail): if not downloaded_thumbnail: self.logger.warning( "Thumbnail path in db, but not on disk, for video: {}" .format(vid.__dict__)) self.logger.info("Downloading thumbnail for: {}".format( vid.__dict__)) download_thumbnails_threaded([vid]) self.logger.info( "Updating existing record in db: {} - {}".format( vid.title, vid.__dict__)) db_session.commit() self.model.update_subfeed_videos_from_db() self.model.update_playback_videos_from_db() else: self.logger.info( "File already downloaded by this system: {} - {}".format( vid.title, vid.__dict__)) db_session.remove() else: db_session.remove() youtube_keys = load_keys(1) self.logger.info( "Grabbing new video information from youtube: {}".format( vid_id)) response_videos = list_uploaded_videos_videos( youtube_keys[0], [vid_id], 1) if len(response_videos) > 0: video = response_videos[0] video.vid_path = vid_path video.downloaded = True video.watched = False video.date_downloaded = datetime.datetime.utcnow() self.logger.info("Downloading thumbnail: {} - {}".format( video.title, video.__dict__)) download_thumbnails_threaded([video]) self.logger.info("Adding new file to db: {} - {}".format( video.title, video.__dict__)) UpdateVideo(video, finished_listeners=[ self.model.playback_grid_view_listener. downloadedVideosChangedinDB ]).start() else: self.logger.warning( "Video with id {}, not found on youtube servers".format( vid_id))