Exemplo n.º 1
0
def cli(no_gui, test_channels, update_watch_prio, set_watched_day):
    logger = create_logger(__name__)
    if no_gui:
        run_print()
    if update_watch_prio:
        videos = db_session.query(Video).all()
        watch_prio = read_config('Play', 'default_watch_prio')
        logger.debug("Setting watch_prio {}, for: {} videos".format(watch_prio, len(videos)))
        for video in videos:
            video.watch_prio = watch_prio
        db_session.commit()
        return

    if set_watched_day:
        videos = db_session.query(Video).filter(or_(Video.downloaded == True, (Video.vid_path.is_(None)))).all()
        for video in videos:
            vid_age = datetime.datetime.utcnow() - video.date_published
            if vid_age > datetime.timedelta(days=int(set_watched_day)):
                logger.debug("Setting watched, {} - {} - {}".format(vid_age, video.title, video.__dict__))
                video.watched = True
        db_session.commit()
        return
    if test_channels:
        run_channels_test()
    else:
        """
        PyQT raises and catches exceptions, but doesn't pass them along. 
        Instead it just exits with a status of 1 to show an exception was caught. 
        """
        # Back up the reference to the exceptionhook
        sys._excepthook = sys.excepthook

        def my_exception_hook(exctype, value, traceback):
            # Ignore KeyboardInterrupt so a console python program can exit with Ctrl + C.
            if issubclass(exctype, KeyboardInterrupt):
                sys.__excepthook__(exctype, value, traceback)
                return

            # Log the exception with the logger
            logger.critical("Intercepted Exception", exc_info=(exctype, value, traceback))

            # Call the normal Exception hook after
            sys._excepthook(exctype, value, traceback)

            # sys.exit(1)       # Alternatively, exit

        # Set the exception hook to our wrapping function
        sys.excepthook = my_exception_hook

        run_with_gui()
Exemplo n.º 2
0
def get_stored_subscriptions():
    logger.info("Getting subscriptions from DB.")
    channels = db_session.query(Channel).filter(
        or_(Channel.subscribed, Channel.subscribed_override)).all()
    if len(channels) < 1:
        return get_remote_subscriptions_cached_oauth()
    return channels
Exemplo n.º 3
0
 def __init__(self, download_tile):
     self.finished = download_tile.finished
     self.started_date = download_tile.started_date
     self.finished_date = download_tile.finished_date
     self.video = db_session.query(Video).get(download_tile.video.video_id)
     self.video_downloaded = download_tile.video_downloaded
     self.total_bytes = download_tile.total_bytes
     self.last_event = download_tile.last_event
     self.cleared = download_tile.cleared
Exemplo n.º 4
0
    def run(self):
        """
        Override threading.Thread.run() with its own code
        :return:
        """
        try:

            # youtube = youtube_auth_keys()

            # self.videos = get_channel_uploads(self.youtube, channel_id)
            use_tests = read_config('Requests', 'use_tests')

            if self.deep_search:
                temp_videos = []
                list_uploaded_videos_search(self.youtube, self.channel_id, temp_videos, self.search_pages)
                list_uploaded_videos(self.youtube, temp_videos, self.playlist_id, self.list_pages)
                self.merge_same_videos_in_list(temp_videos)
                self.videos.extend(temp_videos)

            elif use_tests:
                channel = db_session.query(Channel).get(self.channel_id)
                miss = read_config('Requests', 'miss_limit')
                pages = read_config('Requests', 'test_pages')
                extra_pages = read_config('Requests', 'extra_list_pages')
                list_pages = 0
                list_videos = []
                search_videos = []
                for test in channel.tests:
                    if test.test_pages > list_pages:
                        list_pages = test.test_pages
                    if test.test_miss < miss or test.test_pages > pages:
                        db_session.remove()
                        list_uploaded_videos_search(self.youtube, self.channel_id, search_videos, self.search_pages)
                        break
                db_session.remove()
                list_uploaded_videos(self.youtube, list_videos, self.playlist_id,
                                     min(pages + extra_pages, list_pages + extra_pages))

                if len(search_videos) > 0:
                    return_videos = self.merge_two_videos_list_grab_info(list_videos, search_videos)
                else:
                    return_videos = list_videos
                self.videos.extend(return_videos)

            else:
                use_playlist_items = read_config('Debug', 'use_playlistItems')
                if use_playlist_items:
                    list_uploaded_videos(self.youtube, self.videos, self.playlist_id, self.list_pages)
                else:
                    list_uploaded_videos_search(self.youtube, self.channel_id, self.videos, self.search_pages)

        except Exception as e:
            # Save the exception details, but don't rethrow.
            self.exc = e
            pass

        self.job_done = True
Exemplo n.º 5
0
def delete_sub_not_in_list(subs):
    delete_channels = db_session.query(Channel).filter(
        ~Channel.id.in_(subs)).all()
    for channel in delete_channels:
        if channel.subscribed or channel.subscribed is None:
            channel.subscribed = False
            create_logger(__name__).warning(
                "Setting unsubscribed for channel: {} - {}".format(
                    channel.title, channel.__dict__))
            stmt = update_channel_from_remote(channel)
            engine.execute(stmt)
Exemplo n.º 6
0
 def update_download_tile(self, download_tile):
     result = db_session.query(DBDownloadTile).filter(
         DBDownloadTile.video_id == format(
             download_tile.video.video_id)).first()
     # stmt = DBDownloadTile.__table__.select().where(
     #     text("video_id = '{}'".format(download_tile.video.video_id)))
     # result = engine.execute(stmt).first()
     if result:
         result.update_tile(download_tile)
         db_session.commit()
         db_session.remove()
     else:
         self.logger.warning(
             "Download tile not found in db, so no update was performed: {}"
             .format(download_tile.__dict__))
Exemplo n.º 7
0
 def new_download_tile(self, new_tile):
     lock.acquire()
     result = db_session.query(DBDownloadTile).filter(
         DBDownloadTile.video_id == format(
             new_tile.video.video_id)).first()
     if not result:
         download_tile = DBDownloadTile(new_tile)
         if not download_tile.video:
             self.logger.error("No video in new tile: {}".format(
                 download_tile.__dict__),
                               exc_info=True)
             return
         db_session.add(download_tile)
         db_session.commit()
     db_session.remove()
     lock.release()
Exemplo n.º 8
0
 def load_db_download_tiles(self):
     db_result = db_session.query(DBDownloadTile).filter(
         DBDownloadTile.cleared == false()).all()
     detached_db_result = DDBDownloadTile.list_detach(db_result)
     use_youtube_dl = read_config('Youtube-dl', 'use_youtube_dl')
     download_finished_signals = [
         static_listeners.STATIC_GRID_VIEW_LISTENER.downloadFinished
     ]
     for tile in detached_db_result:
         if use_youtube_dl and not tile.finished:
             self.logger.info(
                 "Starting paused in progress download for: {}".format(
                     tile.video.__dict__))
             tile.progress_listener = \
                 DownloadHandler.download_using_youtube_dl(tile.video,
                                                           youtube_dl_finished_listener=download_finished_signals,
                                                           wait=True)
     self.dbDownloadTiles.emit(detached_db_result)
Exemplo n.º 9
0
def get_db_videos_playback(limit,
                           filters=(~Video.watched, Video.downloaded,
                                    ~Video.discarded),
                           sort_method=(asc(Video.watch_prio),
                                        desc(Video.date_downloaded),
                                        desc(Video.date_published))):
    """
    Get a list of videos from the database, limited by limit and filters. Sorted by sort_method.

    :param filters:     Tuple of SQLAlchemy Column(Boolean) objects.
    :param sort_method: Tuple of SQLAlchemy sort expressions.
    :param limit:       Integer value determining how many videos to grab.
    :return:            A list of VideoD objects.
    """
    # Initiate DB Query session.
    db_query = db_session.query(Video)

    # Apply filters to query.
    db_query = db_query.filter(*filters)

    # Signal DB listener about started read operation (Used in DB status indicator and logs)
    DatabaseListener.static_instance.startRead.emit(threading.get_ident())

    # Execute query, ordered by sort_method and limited by limit.
    db_videos = db_query.order_by(*sort_method).limit(limit).all()

    # Signal DB listener about finished read operation (Used in DB status indicator and logs)
    DatabaseListener.static_instance.finishRead.emit(threading.get_ident())

    # Convert Video objects to VideoD (Detached model) objects.
    videos = Video.to_video_ds(db_videos)

    # Close DB Query session.
    db_session.remove()

    logger.debug5("sane_yt_subfeed.debug.date_formats: {}".format(
        sane_yt_subfeed.debug.date_formats))
    sane_yt_subfeed.debug.clear_date_formats()

    return videos
Exemplo n.º 10
0
def get_db_videos_subfeed(limit,
                          filters=(~Video.downloaded, ~Video.discarded)):
    """
    Get a list of videos from the database, limited by limit and filters.

    :param filters: Tuple of SQLAlchemy Column(Boolean) objects.
    :param limit:   Integer value determining how many videos to grab.
    :return:        A list of VideoD objects.
    """
    logger.info("Getting newest stored videos (filters={})".format(filters))

    # Check whether "filter by video age" is enabled, if so apply the filter to filters.
    filter_days = read_config('Requests', 'filter_videos_days_old')
    if filter_days >= 0:
        date = datetime.datetime.utcnow() - datetime.timedelta(
            days=filter_days)
        filters = filters + (Video.date_published > date, )

    # Signal DB listener about started read operation (Used in DB status indicator and logs)
    DatabaseListener.static_instance.startRead.emit(threading.get_ident())

    # Execute query, ordered by publish date (descending), filtered by filters and limited by limit.
    db_videos = db_session.query(Video).order_by(desc(
        Video.date_published)).filter(*filters).limit(limit).all()

    # Signal DB listener about finished read operation (Used in DB status indicator and logs)
    DatabaseListener.static_instance.finishRead.emit(threading.get_ident())

    # Convert Video objects to VideoD (Detached model) objects.
    videos = Video.to_video_ds(db_videos)

    # Close DB Query session.
    db_session.remove()

    logger.debug5("sane_yt_subfeed.debug.date_formats: {}".format(
        sane_yt_subfeed.debug.date_formats))
    sane_yt_subfeed.debug.clear_date_formats()

    return videos
Exemplo n.º 11
0
def get_best_downloaded_videos(limit,
                               filters=(~Video.watched, Video.downloaded,
                                        ~Video.discarded),
                               sort_method=(asc(Video.watch_prio),
                                            desc(Video.date_downloaded),
                                            desc(Video.date_published))):
    """

    :param filters: Tuple of filters
    :param sort_method:
    :param limit:
    :return: list(VideoD)
    """
    db_query = db_session.query(Video)

    db_query = db_query.filter(*filters)
    DatabaseListener.static_instance.startRead.emit(threading.get_ident())
    db_videos = db_query.order_by(*sort_method).limit(limit).all()
    DatabaseListener.static_instance.finishRead.emit(threading.get_ident())
    videos = Video.to_video_ds(db_videos)
    db_session.remove()
    return videos
Exemplo n.º 12
0
def get_newest_stored_videos(limit,
                             filters=(~Video.downloaded, ~Video.discarded)):
    """

    :param filters:
    :param limit:
    :return: list(VideoD)
    """
    logger.info("Getting newest stored videos (filters={})".format(filters))

    filter_days = read_config('Requests', 'filter_videos_days_old')
    if filter_days >= 0:
        date = datetime.datetime.utcnow() - datetime.timedelta(
            days=filter_days)
        filters = filters + (Video.date_published > date, )

    DatabaseListener.static_instance.startRead.emit(threading.get_ident())
    db_videos = db_session.query(Video).order_by(desc(
        Video.date_published)).filter(*filters).limit(limit).all()
    DatabaseListener.static_instance.finishRead.emit(threading.get_ident())
    videos = Video.to_video_ds(db_videos)
    db_session.remove()
    return videos
Exemplo n.º 13
0
    def new_file(self, vid_id, vid_path):
        vid = db_session.query(Video).get(vid_id)
        if vid:
            if not vid.downloaded:
                vid.vid_path = vid_path
                vid.date_downloaded = datetime.datetime.utcnow()
                vid.downloaded = True

                thumb_path = os.path.join(THUMBNAILS_PATH,
                                          '{}.jpg'.format(vid.video_id))
                downloaded_thumbnail = os.path.isfile(thumb_path)
                if downloaded_thumbnail and (not vid.thumbnail_path):
                    vid.thumbnail_path = thumb_path
                    self.logger.warning(
                        "Thumbnail downloaded, but path didn't exist in db, for video: {}"
                        .format(vid.__dict__))
                elif (not vid.thumbnail_path) or (not downloaded_thumbnail):
                    if not downloaded_thumbnail:
                        self.logger.warning(
                            "Thumbnail path in db, but not on disk, for video: {}"
                            .format(vid.__dict__))
                    self.logger.info("Downloading thumbnail for: {}".format(
                        vid.__dict__))
                    download_thumbnails_threaded([vid])

                self.logger.info(
                    "Updating existing record in db: {} - {}".format(
                        vid.title, vid.__dict__))
                db_session.commit()
                self.model.update_subfeed_videos_from_db()
                self.model.update_playback_videos_from_db()
            else:
                self.logger.info(
                    "File already downloaded by this system: {} - {}".format(
                        vid.title, vid.__dict__))
            db_session.remove()

        else:
            db_session.remove()
            youtube_keys = load_keys(1)
            self.logger.info(
                "Grabbing new video information from youtube: {}".format(
                    vid_id))
            response_videos = list_uploaded_videos_videos(
                youtube_keys[0], [vid_id], 1)
            if len(response_videos) > 0:
                video = response_videos[0]
                video.vid_path = vid_path
                video.downloaded = True
                video.watched = False
                video.date_downloaded = datetime.datetime.utcnow()
                self.logger.info("Downloading thumbnail: {} - {}".format(
                    video.title, video.__dict__))
                download_thumbnails_threaded([video])
                self.logger.info("Adding new file to db: {} - {}".format(
                    video.title, video.__dict__))
                UpdateVideo(video,
                            finished_listeners=[
                                self.model.playback_grid_view_listener.
                                downloadedVideosChangedinDB
                            ]).start()
            else:
                self.logger.warning(
                    "Video with id {}, not found on youtube servers".format(
                        vid_id))
Exemplo n.º 14
0
def cli(no_gui, test_channels, update_watch_prio, set_watched_day,
        refresh_and_print_subfeed, print_subscriptions, print_watched_videos,
        print_discarded_videos, print_downloaded_videos, print_playlist_items,
        print_playlist_items_url_only, auth_oauth2):
    logger = create_logger(__name__)

    if update_watch_prio:
        videos = db_session.query(Video).all()
        watch_prio = read_config('Play', 'default_watch_prio')
        logger.debug("Setting watch_prio {}, for: {} videos".format(
            watch_prio, len(videos)))
        for video in videos:
            video.watch_prio = watch_prio
        db_session.commit()
        return

    if set_watched_day:
        videos = db_session.query(Video).filter(
            or_(Video.downloaded == True, (Video.vid_path.is_(None)))).all()
        for video in videos:
            vid_age = datetime.datetime.utcnow() - video.date_published
            if vid_age > datetime.timedelta(days=int(set_watched_day)):
                logger.debug("Setting watched, {} - {} - {}".format(
                    vid_age, video.title, video.__dict__))
                video.watched = True
        db_session.commit()
        return

    if test_channels:
        run_channels_test()

    if refresh_and_print_subfeed:
        cli_refresh_and_print_subfeed()

    if print_subscriptions:
        cached_subs = True
        subs = get_subscriptions(cached_subs)
        for channel in subs:
            if channel.subscribed_override:
                print(("[{}]    {} [Subscription override]".format(
                    channel.id, channel.title)))
            else:
                print(("[{}]    {}".format(channel.id, channel.title)))

    if print_watched_videos:
        videos = db_session.query(Video).filter(
            and_(Video.watched is True, (Video.vid_path.isnot(None)))).all()
        print_functions.print_videos(videos, path_only=True)

    if print_discarded_videos:
        videos = db_session.query(Video).filter(
            and_(Video.discarded is True, (Video.vid_path.isnot(None)))).all()
        print_functions.print_videos(videos, path_only=True)

    if print_downloaded_videos:
        videos = db_session.query(Video).filter(
            and_(Video.downloaded is True,
                 (Video.vid_path.isnot(None)))).all()
        print_functions.print_videos(videos, path_only=True)

    if print_playlist_items:
        youtube_auth_resource = load_keys(1)[0]
        playlist_video_items = []
        youtube.youtube_requests.list_uploaded_videos(youtube_auth_resource,
                                                      playlist_video_items,
                                                      print_playlist_items, 50)
        for vid in playlist_video_items:
            if print_playlist_items_url_only:
                print(vid.url_video)
            else:
                print(vid)

    if auth_oauth2:
        youtube_oauth = youtube_auth_oauth()
        if youtube_oauth is None:
            logger.critical("Failed to authenticate YouTube API OAuth2!")
            return None
        save_youtube_resource_oauth(youtube_oauth)

    if no_gui:
        run_with_cli()
    else:
        if LEGACY_EXCEPTION_HANDLER:
            """
            PyQT raises and catches exceptions, but doesn't pass them along. 
            Instead it just exits with a status of 1 to show an exception was caught. 
            """
            # Back up the reference to the exceptionhook
            sys._excepthook = sys.excepthook

            def my_exception_hook(exctype, value, traceback):
                global exc_id, exceptions
                # Ignore KeyboardInterrupt so a console python program can exit with Ctrl + C.
                if issubclass(exctype, KeyboardInterrupt):
                    sys.__excepthook__(exctype, value, traceback)
                    return

                # Log the exception with the logger
                logger.exception("Intercepted Exception #{}".format(exc_id),
                                 exc_info=(exctype, value, traceback))

                # Store intercepted exceptions in a reference list of lists
                exceptions.append([exctype, value, traceback, exc_id])

                # Increment Exception Identifier
                exc_id += 1

                # Call the normal Exception hook after
                # noinspection PyProtectedMember
                sys._excepthook(exctype, value, traceback)

                # sys.exit(1)       # Alternatively, exit

            # Set the exception hook to our wrapping function
            sys.excepthook = my_exception_hook

        run_with_gui()