Example #1
0
def video_song(song_name):
    # grab the first video
    searches = find_three_links(f"reginaldo rossi {song_name}")
    link = searches[0][1]

    if not link:
        return False

    yt = YouTube(link)
    input_video = "data/videos/clip.mp4"
    output_video = "data/videos/clip2.mp4"

    # download the vid
    try:
        yt.streams.filter(
            progressive=True, res='360p',
            file_extension='mp4').first().download(filename=input_video)

        # cut the vid, add fade in and out, the save it
        clip = VideoFileClip(input_video).subclip(10, 55)
        # clip = clip.resize(480, 360)
        clip = fadein(clip, duration=2)
        clip = audio_fadein(clip, duration=2)
        clip = fadeout(clip, duration=5)
        clip = audio_fadeout(clip, duration=5)
        clip.write_videofile(output_video, codec="libx264", audio_codec="aac")

        media = api.media_upload(filename=output_video,
                                 media_category="tweet_video")
        return {"media": media, "link": link}
    except AttributeError as e:
        print(e)
Example #2
0
    def mentionAdder(self):

        checkuser = getpass.getuser()
        video2 = mp.VideoFileClip('C:/Users/'+checkuser+'/Documents/VCC/Today\'s Clips/' + self.title + '.mp4')
        video1 = video2.resize(height=1080)
        video = video1.fx(afx.audio_normalize).fx(afx.volumex, 0.6)

        ment = (mp.ImageClip('C:/Users/'+checkuser+'/Documents/VCC/Today\'s Clips/' + self.title + '.png')
                  .set_duration(6)
                  .resize(0.3)
                  .set_position(("left","bottom")))

        ment = vfx.fadein(ment,1, initial_color=[255,255,255])
        ment = vfx.fadeout(ment,1,final_color=[255,255,255])

        final = mp.CompositeVideoClip([video, ment])
        final.write_videofile('C:/Users/'+checkuser+'/Documents/VCC/Today\'s Clips/' + self.title + 'Final.mp4',threads=3,fps=30)
        final.close()
Example #3
0
def clipIt(vod, momentTime, sample_window, VOD_ID=None, suspenseSound=None):
    """
    returns vfx clip with fade
    """

    dt_sample_window = datetime.timedelta(0, sample_window)
    dt_sample_window_end = 10

    startTime = (momentTime - dt_sample_window).strftime(TIME_FORMAT)

    endTime = (momentTime + dt_sample_window_end).strftime(TIME_FORMAT)
    print(f"Found most engaged moment at: {startTime} to {endTime}", )

    clip = vod.subclip(startTime, endTime)

    # Add watermark
    if VOD_ID:
        txt_clip = mpy.TextClip(f"twitch.tv/videos/{VOD_ID}",
                                fontsize=14,
                                color="white")
        txt_clip = txt_clip.set_pos("bottom").set_duration(sample_window)
        clip = mpy.CompositeVideoClip([clip, txt_clip])

    # Add fade in and fade out
    FADE_DURATION = 3
    clip = vfx.fadeout(clip, FADE_DURATION)
    clip = vfx.fadein(clip, FADE_DURATION)

    if suspenseSound:
        # fade in some audio sound
        audioclip = mpy.AudioFileClip(suspenseSound).set_duration(
            sample_window)

        audioclip = afx.audio_fadeout(audioclip, FADE_DURATION)
        audioclip = afx.audio_fadein(audioclip, round(FADE_DURATION * 2))

        clipAudio = mpy.CompositeAudioClip([clip.audio, audioclip])
        clip.audio = clipAudio

    return clip
def randomizeClip(clip, x):
    try:
        if x == "accel_decel":
            dur = random.randint(0, np.floor(clip.duration) * 2)
            if dur == 0:
                dur == None
            a = random.uniform(-1, 1)
            s = random.uniform(0, 100)
            return vfx.accel_decel(clip, new_duration=dur, abruptness=a, soonness=s)
        elif x == "blackwhite":
            return vfx.blackwhite(clip)
        elif x == "blink":
            do = random.randint(0, 10)
            doff = random.randint(0, 10)
            return vfx.blink(clip, d_on=do, d_off=doff)
        elif x == "colorx":
            factor = random.randint(1, 1000)
            return vfx.colorx(clip, factor=factor)
        elif x == "crop":
            return clip
        elif x == "even_size":
            return vfx.even_size(clip)
        elif x == "fadein":
            d = random.randint(0, np.floor(clip.duration))
            i = random.random()
            return vfx.fadein(clip, d, i)
        elif x == "fadeout":
            d = random.randint(0, np.floor(clip.duration))
            i = random.random()
            return vfx.fadeout(clip, d, i)
        elif x == "freeze":
            t = random.randint(0, np.floor(clip.duration))
            td = random.randint(0, np.floor(clip.duration))
            return vfx.freeze(clip, t=t, total_duration=td)
        elif x == "freeze_region":
            return vfx.freeze_region(clip, mask=ImageClip(np.random.rand(clip.size[0], clip.size[1]), ismask=True))
        elif x == "gamma_corr":
            g = random.randint(0, 10)
            return vfx.gamma_corr(clip, g)
        elif x == "headblur":
            pass
        elif x == "invert_colors":
            return vfx.invert_colors(clip)
        elif x == "loop":
            ls = random.randint(0, 10)
            return vfx.loop(clip, n=ls)
        elif x == "lum_contrast":
            return vfx.lum_contrast(clip)
        elif x == "make_loopable":
            ls = random.randint(0, np.floor(clip.duration))
            return vfx.make_loopable(clip, ls)
        elif x == "margin":
            s = clip.size(0) / random.randint(2, 10)
            o = random.random()
            return vfx.margin(clip, left=s, right=s, top=s, bottom=s, opacity=o)
        elif x == "mask_and":
            return vfx.mask_and(clip, ImageClip(np.random.rand(clip.size[0], clip.size[1]), ismask=True))
        elif x == "mask_color":
            thr = random.random()
            return vfx.mask_color(clip, thr=thr)
        elif x == "mask_or":
            return vfx.mask_or(clip, ImageClip(np.random.rand(clip.size[0], clip.size[1]), ismask=True))
        elif x == "mirror_x":
            return vfx.mirror_x(clip)
        elif x == "mirror_y":
            return vfx.mirror_y(clip)
        elif x == "painting":
            s = random.uniform(0, np.floor(clip.duration))
            b = random.randint(0, 100)/1000
            return vfx.painting(clip, saturation=s, black=b)
        elif x == "resize":
            u = random.random()
            return vfx.resize(clip, u)
        elif x == "rotate":
            u = random.uniform(0, 360)
            return vfx.rotate(clip, u)
        elif x == "scroll":
            return clip
        elif x == "speedx":
            u = random.uniform(0, 100)
            return vfx.speedx(clip, u)
        elif x == "supersample":
            g = random.randint(0, 10)
            d = int(clip.duriation/2)
            return vfx.supersample(clip, d, g)
        elif x == "time_mirror":
            return vfx.time_mirror(clip)
        elif x == "time_symmetrize":
            return vfx.time_symmetrize(clip)
        else:
            return clip
    except:
        return clip
Example #5
0
    # clip: editor.VideoFileClip = clip.subclip(0, 4)
    # if len(clips) > 4: break

    # -------------------------------------------------------------
    # CLIP MARGIN
    # Adding margin to this clip.
    # https://zulko.github.io/moviepy/examples/ukulele_concerto.html
    # -------------------------------------------------------------
    clip: editor.VideoFileClip = vfx.margin(clip, mar=20, color=(245, 248, 250))

    # -------------------------------------------------------------
    # CLIP TRANSFORMATIONS
    # Applying different video effects and styling to all Clips.
    # -------------------------------------------------------------
    clip: editor.VideoFileClip = vfx.fadein(clip, duration=1)
    clip: editor.VideoFileClip = vfx.fadeout(clip, duration=1)
    clip: editor.VideoFileClip = vfx.lum_contrast(clip, contrast=0.2, lum=3)
    # clip: editor.VideoFileClip = vfx.speedx(clip, factor=0.90)

    # -------------------------------------------------------------
    # CLIP AUDIO
    # Detecting the audio volumeX factor dynamically.
    # https://stackoverflow.com/questions/28119082
    # https://stackoverflow.com/questions/9012761
    # The curve has been fit with this tool:
    # http://www.colby.edu/chemistry/PChem/scripts/lsfitpl.html
    # -------------------------------------------------------------
    sound: np.array = clip.audio.to_soundarray(fps=22000)
    sound: np.array = sound[sound > 0]
    sound.sort()
    PERCENTILE: float = 0.1