示例#1
0
def test_download_sound__episode_downloaded__file_correct__ignore_downloading__ok(
    generate_rss_mock,
    db_objects,
    podcast,
    episode_data,
    mocked_youtube: MockYoutube,
    mocked_s3: MockS3Client,
):
    new_episode_data = {
        **episode_data,
        **{
            "status": "published",
            "source_id": mocked_youtube.video_id,
            "watch_url": mocked_youtube.watch_url,
            "file_size": 1024,
        },
    }
    episode: Episode = Episode.create(**new_episode_data)
    mocked_s3.get_file_size.return_value = episode.file_size
    generate_rss_mock.return_value = f"file_{episode.source_id}.mp3"
    result = download_episode(episode.watch_url, episode.id)

    with db_objects.allow_sync():
        updated_episode: Episode = Episode.select().where(
            Episode.id == episode.id).first()

    generate_rss_mock.assert_called_with(episode.podcast_id)
    assert result == EPISODE_DOWNLOADING_IGNORED
    assert not mocked_youtube.download.called
    assert updated_episode.status == "published"
    assert updated_episode.published_at == updated_episode.created_at
示例#2
0
def test_download_sound__youtube_exception__download_rollback(
    download_audio_mock,
    db_objects,
    podcast,
    episode_data,
    mocked_youtube: MockYoutube,
    mocked_s3: MockS3Client,
):
    new_episode_data = {
        **episode_data,
        **{
            "status": "new",
            "source_id": mocked_youtube.video_id,
            "watch_url": mocked_youtube.watch_url,
            "file_size": 1024,
        },
    }
    episode: Episode = Episode.create(**new_episode_data)

    download_audio_mock.side_effect = YoutubeException(
        "Youtube video is not available")
    result = download_episode(episode.watch_url, episode.id)

    with db_objects.allow_sync():
        updated_episode: Episode = Episode.select().where(
            Episode.id == episode.id).first()

    download_audio_mock.assert_called_with(episode.watch_url,
                                           episode.file_name)

    assert result == EPISODE_DOWNLOADING_ERROR
    assert updated_episode.status == "new"
    assert updated_episode.published_at is None
示例#3
0
    def test_delete__episodes_in_another_podcast__ok(
        self, client, episode_data, user, mocked_s3, dbs
    ):
        dbs = dbs
        podcast_1 = await_(Podcast.async_create(dbs, **get_podcast_data(created_by_id=user.id)))
        episode_data["status"] = Episode.Status.PUBLISHED
        episode_data["podcast_id"] = podcast_1.id
        episode_1 = await_(Episode.async_create(dbs, **episode_data))
        episode_1_1 = await_(Episode.async_create(dbs, **get_episode_data(podcast_1, "published")))

        podcast_2 = await_(Podcast.async_create(dbs, **get_podcast_data()))
        episode_data["status"] = Episode.Status.PUBLISHED
        episode_data["podcast_id"] = podcast_2.id
        # creating episode with same `source_id` in another podcast
        episode_2 = await_(Episode.async_create(dbs, **episode_data))

        await_(dbs.commit())
        client.login(user)
        url = self.url.format(id=podcast_1.id)
        response = client.delete(url)
        assert response.status_code == 200
        assert await_(Podcast.async_get(dbs, id=podcast_1.id)) is None
        assert await_(Episode.async_get(dbs, id=episode_1.id)) is None

        assert await_(Podcast.async_get(dbs, id=podcast_2.id)) is not None
        assert await_(Episode.async_get(dbs, id=episode_2.id)) is not None

        mocked_s3.delete_files_async.assert_called_with([episode_1_1.file_name])
示例#4
0
def _update_episode_data(source_id: str, update_data: dict):
    """ Allows to update data for episodes (filtered by source_id)"""

    logger.info("[%s] Episodes update data: %s", source_id, update_data)
    Episode.update(**update_data).where(
        Episode.source_id == source_id,
        Episode.status != Episode.STATUS_ARCHIVED).execute()
示例#5
0
    def test_delete__episodes_deleted_too__ok(self, client, podcast, user, mocked_s3, dbs):
        episode_1 = await_(Episode.async_create(dbs, **get_episode_data(podcast)))
        episode_2 = await_(Episode.async_create(dbs, **get_episode_data(podcast, "published")))
        await_(dbs.commit())

        client.login(user)
        url = self.url.format(id=podcast.id)
        response = client.delete(url)
        assert response.status_code == 200
        assert await_(Podcast.async_get(dbs, id=podcast.id)) is None
        assert await_(Episode.async_get(dbs, id=episode_1.id)) is None
        assert await_(Episode.async_get(dbs, id=episode_2.id)) is None

        mocked_s3.delete_files_async.assert_called_with([episode_2.file_name])
示例#6
0
def test_generate_rss__ok(db_objects, podcast, episode_data, mocked_s3):

    new_episode_data = {
        **episode_data,
        **{
            "source_id": generate_video_id(),
            "status": "new"
        },
    }
    episode_new: Episode = Episode.create(**new_episode_data)

    new_episode_data = {
        **episode_data,
        **{
            "source_id": generate_video_id(),
            "status": "downloading"
        },
    }
    episode_downloading: Episode = Episode.create(**new_episode_data)

    new_episode_data = {
        **episode_data,
        **{
            "source_id": generate_video_id(),
            "status": "published",
            "published_at": datetime.utcnow(),
        },
    }
    episode_published: Episode = Episode.create(**new_episode_data)

    rss_path = generate_rss(podcast.id)

    mocked_s3.upload_file.assert_called_with(
        rss_path,
        f"{podcast.publish_id}.xml",
        remote_path=settings.S3_BUCKET_RSS_PATH)

    with open(rss_path) as file:
        generated_rss_content = file.read()

    assert episode_published.title in generated_rss_content
    assert episode_published.description in generated_rss_content
    assert episode_published.file_name in generated_rss_content

    assert episode_new.source_id not in generated_rss_content
    assert episode_downloading.source_id not in generated_rss_content

    os.remove(rss_path)
示例#7
0
    async def _delete_files(self, podcast: Podcast, episodes: List[Episode]):
        podcast_file_names = {episode.file_name for episode in episodes}
        same_file_episodes = await self.request.app.objects.execute(
            Episode.select().where(
                Episode.podcast_id != podcast.id,
                Episode.file_name.in_(podcast_file_names),
            ))
        exist_file_names = {
            episode.file_name
            for episode in same_file_episodes or []
        }

        files_to_remove = podcast_file_names - exist_file_names
        files_to_skip = exist_file_names & podcast_file_names
        if files_to_skip:
            self.logger.warning(
                "There are another episodes with files %s. Skip this files removing.",
                files_to_skip,
            )

        storage = StorageS3()
        await storage.delete_files_async(list(files_to_remove))
        await storage.delete_files_async(
            [f"{podcast.publish_id}.xml"],
            remote_path=settings.S3_BUCKET_RSS_PATH)
 def test_delete__ok(self, client, episode, user, mocked_s3, dbs):
     client.login(user)
     url = self.url.format(id=episode.id)
     response = client.delete(url)
     assert response.status_code == 204
     assert await_(Episode.async_get(dbs, id=episode.id)) is None
     mocked_s3.delete_files_async.assert_called_with([episode.file_name])
示例#9
0
 def test_download_episode__unexpected_error__ok(self, episode,
                                                 mocked_youtube, dbs):
     mocked_youtube.download.side_effect = RuntimeError("Oops")
     result = await_(DownloadEpisodeTask(db_session=dbs).run(episode.id))
     episode = await_(Episode.async_get(dbs, id=episode.id))
     assert result == FinishCode.ERROR
     assert episode.status == Episode.Status.ERROR
     assert episode.published_at is None
示例#10
0
    def test_generate__single_podcast__ok(self, user, mocked_s3, dbs):

        podcast_1: Podcast = await_(
            Podcast.async_create(dbs, **get_podcast_data()))
        podcast_2: Podcast = await_(
            Podcast.async_create(dbs, **get_podcast_data()))

        episode_data = get_episode_data(podcast_1, creator=user)
        episode_data["status"] = Episode.Status.NEW
        episode_new = await_(Episode.async_create(dbs, **episode_data))

        episode_data = get_episode_data(podcast_1, creator=user)
        episode_data["status"] = Episode.Status.DOWNLOADING
        episode_downloading = await_(Episode.async_create(dbs, **episode_data))

        episode_data = get_episode_data(podcast_1, creator=user)
        episode_data["status"] = Episode.Status.PUBLISHED
        episode_data["published_at"] = datetime.now()
        episode_published = await_(Episode.async_create(dbs, **episode_data))

        episode_data = get_episode_data(podcast_2, creator=user)
        episode_data["status"] = Episode.Status.PUBLISHED
        episode_podcast_2 = await_(Episode.async_create(dbs, **episode_data))
        await_(dbs.commit())

        expected_file_path = mocked_s3.tmp_upload_dir / f"{podcast_1.publish_id}.xml"
        generate_rss_task = tasks.GenerateRSSTask(db_session=dbs)
        result_code = await_(generate_rss_task.run(podcast_1.id))
        assert result_code == FinishCode.OK

        assert os.path.exists(
            expected_file_path), f"File {expected_file_path} didn't uploaded"
        with open(expected_file_path) as file:
            generated_rss_content = file.read()

        assert episode_published.title in generated_rss_content
        assert episode_published.description in generated_rss_content
        assert episode_published.file_name in generated_rss_content

        for episode in [episode_new, episode_downloading, episode_podcast_2]:
            assert episode.source_id not in generated_rss_content, f"{episode} in RSS {podcast_1}"

        podcast_1 = await_(Podcast.async_get(dbs, id=podcast_1.id))
        assert podcast_1.rss_link == str(expected_file_path)
示例#11
0
def test_download_sound__episode_new__correct_downloading(
    download_audio_mock,
    generate_rss_mock,
    db_objects,
    podcast,
    episode_data,
    mocked_youtube: MockYoutube,
    mocked_s3: MockS3Client,
    mocked_ffmpeg: Mock,
):
    new_episode_data = {
        **episode_data,
        **{
            "status": "new",
            "source_id": mocked_youtube.video_id,
            "watch_url": mocked_youtube.watch_url,
            "file_size": 1024,
        },
    }
    episode: Episode = Episode.create(**new_episode_data)

    download_audio_mock.return_value = episode.file_name
    generate_rss_mock.return_value = f"file_{episode.source_id}.mp3"
    result = download_episode(episode.watch_url, episode.id)

    with db_objects.allow_sync():
        updated_episode: Episode = Episode.select().where(
            Episode.id == episode.id).first()

    generate_rss_mock.assert_called_with(episode.podcast_id)
    download_audio_mock.assert_called_with(episode.watch_url,
                                           episode.file_name)
    mocked_ffmpeg.assert_called_with(episode.file_name)

    assert result == EPISODE_DOWNLOADING_OK
    assert updated_episode.status == "published"
    assert updated_episode.published_at == updated_episode.created_at
示例#12
0
    def test_download_episode__file_correct__ignore(
        self,
        episode_data,
        podcast_data,
        mocked_youtube,
        mocked_ffmpeg,
        mocked_s3,
        mocked_generate_rss_task,
        dbs,
    ):
        podcast_1 = await_(Podcast.async_create(dbs, **get_podcast_data()))
        podcast_2 = await_(Podcast.async_create(dbs, **get_podcast_data()))

        episode_data.update({
            "status": "published",
            "source_id": mocked_youtube.video_id,
            "watch_url": mocked_youtube.watch_url,
            "file_size": 1024,
            "podcast_id": podcast_1.id,
        })
        await_(Episode.async_create(dbs, **episode_data))
        episode_data["status"] = "new"
        episode_data["podcast_id"] = podcast_2.id
        episode_2 = await_(Episode.async_create(dbs, **episode_data))
        await_(dbs.commit())

        mocked_s3.get_file_size.return_value = episode_2.file_size
        result = await_(DownloadEpisodeTask(db_session=dbs).run(episode_2.id))
        await_(dbs.refresh(episode_2))
        mocked_generate_rss_task.run.assert_called_with(
            podcast_1.id, podcast_2.id)
        assert result == FinishCode.SKIP
        assert not mocked_youtube.download.called
        assert not mocked_ffmpeg.called
        assert episode_2.status == Episode.Status.PUBLISHED
        assert episode_2.published_at == episode_2.created_at
示例#13
0
def _update_all_rss(source_id: str):
    """ Allows to regenerate rss for all podcasts with requested episode (by source_id) """

    logger.info(
        "Episodes with source #%s: updating rss for all podcasts included for",
        source_id,
    )

    affected_episodes = list(
        Episode.select(Episode.podcast).where(Episode.source_id == source_id))
    podcast_ids = [episode.podcast_id for episode in affected_episodes]
    logger.info("Found podcasts for rss updates: %s", podcast_ids)

    for podcast_id in podcast_ids:
        generate_rss(podcast_id)
    def test_delete__same_episode_exists__ok(
        self,
        client,
        podcast,
        episode_data,
        mocked_s3,
        same_episode_status,
        delete_called,
        dbs,
    ):
        source_id = get_video_id()

        user_1 = create_user(dbs)
        user_2 = create_user(dbs)

        podcast_1 = await_(
            Podcast.async_create(dbs,
                                 db_commit=True,
                                 **get_podcast_data(created_by_id=user_1.id)))
        podcast_2 = await_(
            Podcast.async_create(dbs,
                                 db_commit=True,
                                 **get_podcast_data(created_by_id=user_2.id)))

        episode_data["created_by_id"] = user_1.id
        _ = create_episode(dbs,
                           episode_data,
                           podcast_1,
                           status=same_episode_status,
                           source_id=source_id)

        episode_data["created_by_id"] = user_2.id
        episode_2 = create_episode(dbs,
                                   episode_data,
                                   podcast_2,
                                   status=Episode.Status.NEW,
                                   source_id=source_id)

        url = self.url.format(id=episode_2.id)
        client.login(user_2)
        response = client.delete(url)
        assert response.status_code == 204, f"Delete API is not available: {response.text}"
        assert await_(Episode.async_get(dbs, id=episode_2.id)) is None
        if delete_called:
            mocked_s3.delete_files_async.assert_called_with(
                [episode_2.file_name])
        else:
            assert not mocked_s3.delete_files_async.called
示例#15
0
    def test_download_episode__upload_to_s3_failed__fail(
            self, episode, mocked_youtube, mocked_ffmpeg, mocked_s3,
            mocked_generate_rss_task, dbs):
        file_path = settings.TMP_AUDIO_PATH / episode.file_name
        with open(file_path, "wb") as file:
            file.write(b"EpisodeData")

        mocked_s3.upload_file.side_effect = lambda *_, **__: ""

        result = await_(DownloadEpisodeTask(db_session=dbs).run(episode.id))
        assert result == FinishCode.ERROR

        mocked_generate_rss_task.run.assert_not_called()
        episode = await_(Episode.async_get(dbs, id=episode.id))
        assert episode.status == Episode.Status.ERROR
        assert episode.published_at is None
示例#16
0
    async def _delete_file(self, episode: Episode):
        """ Removing file associated with requested episode """

        same_file_episodes = await self.request.app.objects.execute(
            Episode.select().where(
                Episode.source_id == episode.source_id,
                Episode.status != Episode.STATUS_NEW,
                Episode.id != episode.id,
            ))
        if same_file_episodes:
            episode_ids = ",".join(
                [f"#{episode.id}" for episode in same_file_episodes])
            self.logger.warning(
                f"There are another episodes for file {episode.file_name}: {episode_ids}. "
                f"Skip file removing.")
            return

        return await StorageS3().delete_files_async([episode.file_name])
示例#17
0
def create_episode(
    db_session: AsyncSession,
    episode_data: dict,
    podcast: Podcast,
    status: Episode.Status = Episode.Status.NEW,
    file_size: int = 0,
    source_id: str = None,
) -> Episode:
    src_id = source_id or get_video_id()
    episode_data.update(
        {
            "podcast_id": podcast.id,
            "source_id": src_id,
            "file_name": f"file_name_{src_id}.mp3",
            "status": status,
            "file_size": file_size,
        }
    )
    return await_(Episode.async_create(db_session, db_commit=True, **episode_data))
示例#18
0
    def test_download_episode__downloading_failed__roll_back_changes__ok(
            self, episode, mocked_youtube, mocked_ffmpeg, mocked_s3,
            mocked_generate_rss_task, dbs):
        file_path = settings.TMP_AUDIO_PATH / episode.file_name
        with open(file_path, "wb") as file:
            file.write(b"EpisodeData")

        mocked_youtube.download.side_effect = DownloadError(
            "Video is not available")

        result = await_(DownloadEpisodeTask(db_session=dbs).run(episode.id))

        episode = await_(Episode.async_get(dbs, id=episode.id))
        mocked_youtube.download.assert_called_with([episode.watch_url])
        mocked_s3.upload_file.assert_not_called()
        mocked_generate_rss_task.run.assert_not_called()

        assert result == FinishCode.ERROR
        assert episode.status == Episode.Status.ERROR
        assert episode.published_at is None
示例#19
0
    def test_download_episode__ok(self, episode, mocked_youtube, mocked_ffmpeg,
                                  mocked_s3, mocked_generate_rss_task, dbs):
        file_path = settings.TMP_AUDIO_PATH / episode.file_name
        with open(file_path, "wb") as file:
            file.write(b"EpisodeData")

        result = await_(DownloadEpisodeTask(db_session=dbs).run(episode.id))
        episode = await_(Episode.async_get(dbs, id=episode.id))

        mocked_youtube.download.assert_called_with([episode.watch_url])
        mocked_ffmpeg.assert_called_with(episode.file_name)
        self.assert_called_with(
            mocked_s3.upload_file,
            src_path=str(file_path),
            dst_path=settings.S3_BUCKET_AUDIO_PATH,
        )
        mocked_generate_rss_task.run.assert_called_with(episode.podcast_id)

        assert result == FinishCode.OK
        assert episode.status == Episode.Status.PUBLISHED
        assert episode.published_at == episode.created_at
示例#20
0
    def test_download_episode__file_bad_size__ignore(
        self,
        episode_data,
        mocked_youtube,
        mocked_ffmpeg,
        mocked_s3,
        mocked_generate_rss_task,
        dbs,
    ):

        episode_data.update({
            "status": "published",
            "source_id": mocked_youtube.video_id,
            "watch_url": mocked_youtube.watch_url,
            "file_size": 1024,
        })
        episode = await_(
            Episode.async_create(dbs, db_commit=True, **episode_data))

        file_path = settings.TMP_AUDIO_PATH / episode.file_name
        with open(file_path, "wb") as file:
            file.write(b"EpisodeData")

        mocked_s3.get_file_size.return_value = 32

        result = await_(DownloadEpisodeTask(db_session=dbs).run(episode.id))

        await_(dbs.refresh(episode))
        mocked_youtube.download.assert_called_with([episode.watch_url])
        mocked_ffmpeg.assert_called_with(episode.file_name)
        self.assert_called_with(
            mocked_s3.upload_file,
            src_path=str(file_path),
            dst_path=settings.S3_BUCKET_AUDIO_PATH,
        )
        mocked_generate_rss_task.run.assert_called_with(episode.podcast_id)

        assert result == FinishCode.OK
        assert episode.status == Episode.Status.PUBLISHED
        assert episode.published_at == episode.created_at
示例#21
0
def episode(podcast, user, loop, dbs) -> Episode:
    episode_data = get_episode_data(podcast, creator=user)
    episode = loop.run_until_complete(Episode.async_create(
        dbs, **episode_data))
    loop.run_until_complete(dbs.commit())
    return episode
示例#22
0
def download_episode(youtube_link: str, episode_id: int):
    """ Allows to download youtube video and recreate specific rss (by requested episode_id) """

    episode = Episode.get_by_id(episode_id)
    logger.info(
        "=== [%s] START downloading process URL: %s FILENAME: %s ===",
        episode.source_id,
        youtube_link,
        episode.file_name,
    )
    stored_file_size = StorageS3().get_file_size(episode.file_name)

    if stored_file_size and stored_file_size == episode.file_size:
        logger.info(
            "[%s] Episode already downloaded and file correct. Downloading will be ignored.",
            episode.source_id,
        )
        _update_episodes(episode.source_id, stored_file_size)
        _update_all_rss(episode.source_id)
        return EPISODE_DOWNLOADING_IGNORED

    elif episode.status not in (Episode.STATUS_NEW,
                                Episode.STATUS_DOWNLOADING):
        logger.error(
            "[%s] Episode is %s but file-size seems not correct. "
            "Removing not-correct file %s and reloading it from youtube.",
            episode.source_id,
            episode.status,
            episode.file_name,
        )
        StorageS3().delete_file(episode.file_name)

    logger.info(
        "[%s] Mark all episodes with source_id [%s] as downloading.",
        episode.source_id,
        episode.source_id,
    )
    query = Episode.update(status=Episode.STATUS_DOWNLOADING).where(
        Episode.source_id == episode.source_id,
        Episode.status != Episode.STATUS_ARCHIVED,
    )
    query.execute()

    try:
        result_filename = youtube_utils.download_audio(youtube_link,
                                                       episode.file_name)
    except YoutubeException as error:
        logger.exception(
            "=== [%s] Downloading FAILED: Could not download track: %s. "
            "All episodes will be rolled back to NEW state",
            episode.source_id,
            error,
        )
        Episode.update(status=Episode.STATUS_NEW).where(
            Episode.source_id == episode.source_id).execute()
        return EPISODE_DOWNLOADING_ERROR

    logger.info("=== [%s] DOWNLOADING was done ===", episode.source_id)

    youtube_utils.ffmpeg_preparation(result_filename)
    logger.info("=== [%s] POST PROCESSING was done === ", episode.source_id)

    # ----- uploading file to cloud -----
    remote_url = podcast_utils.upload_episode(result_filename)
    if not remote_url:
        logger.warning("=== [%s] UPLOADING was broken === ")
        _update_episodes(episode.source_id,
                         file_size=0,
                         status=Episode.STATUS_ERROR)
        return EPISODE_DOWNLOADING_ERROR

    _update_episode_data(episode.source_id, {
        "file_name": result_filename,
        "remote_url": remote_url
    })
    logger.info("=== [%s] UPLOADING was done === ", episode.source_id)
    # -----------------------------------

    # ----- update episodes data -------
    _update_episodes(episode.source_id,
                     file_size=StorageS3().get_file_size(result_filename))
    _update_all_rss(episode.source_id)
    podcast_utils.delete_file(
        os.path.join(settings.TMP_AUDIO_PATH, result_filename))
    # -----------------------------------

    logger.info("=== [%s] DOWNLOADING total finished ===", episode.source_id)
    return EPISODE_DOWNLOADING_OK
示例#23
0
    async def _get_episode_data(self, same_episode: Episode) -> dict:
        """
        Allows to get information for new episode.
        This info can be given from same episode (episode which has same source_id)
        and part information - from YouTube.

        :return: dict with information for new episode
        """

        if same_episode:
            logger.info(
                f"Episode for video {self.source_id} already exists: {same_episode}."
            )
            same_episode_data = same_episode.to_dict()
        else:
            logger.info(
                f"New episode for video {self.source_id} will be created.")
            same_episode_data = {}

        extract_error, youtube_info = await get_youtube_info(self.source_url)

        if youtube_info:
            logger.info("Episode will be created from the YouTube video.")
            new_episode_data = {
                "source_id":
                self.source_id,
                "watch_url":
                youtube_info.watch_url,
                "title":
                self._replace_special_symbols(youtube_info.title),
                "description":
                self._replace_special_symbols(youtube_info.description),
                "image_url":
                youtube_info.thumbnail_url,
                "author":
                youtube_info.author,
                "length":
                youtube_info.length,
                "file_size":
                same_episode_data.get("file_size"),
                "file_name":
                same_episode_data.get("file_name")
                or get_file_name(self.source_id),
                "remote_url":
                same_episode_data.get("remote_url"),
            }

        elif same_episode:
            logger.info(
                "Episode will be copied from other episode with same video.")
            same_episode_data.pop("id", None)
            new_episode_data = same_episode_data

        else:
            raise YoutubeFetchError(
                f"Extracting data for new Episode failed: {extract_error}")

        new_episode_data.update({
            "podcast_id": self.podcast_id,
            "created_by_id": self.user_id
        })
        return new_episode_data
示例#24
0
    async def _get_episode_data(self, same_episode: Episode, podcast_id: int,
                                video_id: str, youtube_link: str) -> dict:
        """
        Allows to get information for new episode.
        This info can be given from same episode (episode which has same source_id)
        and part information - from YouTube.

        :return: dict with information for new episode
        """

        if same_episode:
            self.logger.info(
                f"Episode for video {video_id} already exists: {same_episode}. "
                f"Using for information about downloaded file.")
            same_episode_data = same_episode.to_dict(field_names=[
                "source_id",
                "watch_url",
                "title",
                "description",
                "image_url",
                "author",
                "length",
                "file_size",
                "file_name",
                "remote_url",
            ])
        else:
            self.logger.info(
                f"New episode for video {video_id} will be created.")
            same_episode_data = {}

        youtube_info = None
        try:
            youtube_info = await get_youtube_info(youtube_link)
        except YoutubeExtractInfoError:
            add_message(self.request,
                        "Sorry.. Fetching YouTube video was failed",
                        kind="error")

        if youtube_info:
            new_episode_data = {
                "source_id":
                video_id,
                "watch_url":
                youtube_info.watch_url,
                "title":
                self._replace_special_symbols(youtube_info.title),
                "description":
                self._replace_special_symbols(youtube_info.description),
                "image_url":
                youtube_info.thumbnail_url,
                "author":
                youtube_info.author,
                "length":
                youtube_info.length,
                "file_size":
                same_episode_data.get("file_size"),
                "file_name":
                same_episode_data.get("file_name") or get_file_name(video_id),
                "remote_url":
                same_episode_data.get("remote_url"),
            }
            message = "Episode was successfully created from the YouTube video."
            self.logger.info(message)
            self.logger.debug("New episode data = %s", new_episode_data)
            add_message(self.request, message)
        elif same_episode:
            message = "Episode will be copied from other episode with same video."
            self.logger.info(message)
            add_message(self.request, message)
            new_episode_data = same_episode_data
        else:
            raise YoutubeFetchError

        new_episode_data.update({
            "podcast_id": podcast_id,
            "created_by_id": self.user.id
        })
        return new_episode_data
示例#25
0
    async def post(self):
        podcast_id = int(self.request.match_info.get("podcast_id"))
        podcast: Podcast = await self._get_object()
        cleaned_data = await self._validate()
        youtube_link = cleaned_data["youtube_link"].strip()

        video_id = get_video_id(youtube_link)
        if not video_id:
            add_message(self.request,
                        f"YouTube link is not correct: {youtube_link}")
            return redirect(self.request,
                            "podcast_details",
                            podcast_id=podcast_id)

        same_episodes: Iterable[
            Episode] = await self.request.app.objects.execute(
                Episode.select().where(Episode.source_id == video_id).order_by(
                    Episode.created_at.desc()))
        episode_in_podcast, last_same_episode = None, None
        for episode in same_episodes:
            last_same_episode = last_same_episode or episode
            if episode.podcast_id == podcast_id:
                episode_in_podcast = episode
                break

        if episode_in_podcast:
            self.logger.info(
                f"Episode for video [{video_id}] already exists for current "
                f"podcast {podcast_id}. Redirecting to {episode_in_podcast}..."
            )
            add_message(self.request, "Episode already exists in podcast.")
            return redirect(
                self.request,
                "episode_details",
                podcast_id=podcast_id,
                episode_id=episode_in_podcast.id,
            )

        try:
            episode_data = await self._get_episode_data(
                same_episode=last_same_episode,
                podcast_id=podcast_id,
                video_id=video_id,
                youtube_link=youtube_link,
            )
        except YoutubeFetchError:
            return redirect(self.request,
                            "podcast_details",
                            podcast_id=podcast_id)

        episode = await self.request.app.objects.create(
            Episode, **episode_data)

        if podcast.download_automatically:
            episode.status = Episode.STATUS_DOWNLOADING
            await self.request.app.objects.update(episode)
            await self._enqueue_task(
                tasks.download_episode,
                youtube_link=episode.watch_url,
                episode_id=episode.id,
            )
            add_message(
                self.request,
                f"Downloading for youtube {episode.source_id} was started.",
            )

        if is_mobile_app(self.request):
            return redirect(self.request, "progress")

        return redirect(
            self.request,
            "episode_details",
            podcast_id=podcast_id,
            episode_id=str(episode.id),
        )
示例#26
0
def episode(db_objects, episode_data):
    with db_objects.allow_sync():
        yield Episode.create(**episode_data)
示例#27
0
def teardown_module(module):
    print(f"module teardown {module}")
    Episode.truncate_table()
    Podcast.truncate_table()
    User.truncate_table()