Example #1
0
def test_release_of_file_via_close():
    # Create a random video file.
    red = ColorClip((256, 200), color=(255, 0, 0))
    green = ColorClip((256, 200), color=(0, 255, 0))
    blue = ColorClip((256, 200), color=(0, 0, 255))

    red.fps = green.fps = blue.fps = 10

    # Repeat this so we can see no conflicts.
    for i in range(3):
        # Get the name of a temporary file we can use.
        local_video_filename = os.path.join(
            TMP_DIR,
            "test_release_of_file_via_close_%s.mp4" % int(time.time()))

        clip = clips_array([[red, green, blue]]).with_duration(0.5)
        clip.write_videofile(local_video_filename)

        # Open it up with VideoFileClip.
        video = VideoFileClip(local_video_filename)
        video.close()
        clip.close()

        # Now remove the temporary file.
        # This would fail on Windows if the file is still locked.

        # This should succeed without exceptions.
        os.remove(local_video_filename)

    red.close()
    green.close()
    blue.close()
def test_release_of_file_via_close():
    # Create a random video file.
    red = ColorClip((1024, 800), color=(255, 0, 0))
    green = ColorClip((1024, 800), color=(0, 255, 0))
    blue = ColorClip((1024, 800), color=(0, 0, 255))

    red.fps = green.fps = blue.fps = 30

    # Repeat this so we can see no conflicts.
    for i in range(5):
        # Get the name of a temporary file we can use.
        local_video_filename = join(TMP_DIR, "test_release_of_file_via_close_%s.mp4" % int(time.time()))

        with clips_array([[red, green, blue]]) as ca:
            video = ca.set_duration(1)

            video.write_videofile(local_video_filename)

        # Open it up with VideoFileClip.
        with VideoFileClip(local_video_filename) as clip:
            # Normally a client would do processing here.
            pass

        # Now remove the temporary file.
        # This would fail on Windows if the file is still locked.

        # This should succeed without exceptions.
        remove(local_video_filename)

    red.close()
    green.close()
    blue.close()
Example #3
0
def test_release_of_file_via_close():
    # Create a random video file.
    red = ColorClip((1024, 800), color=(255, 0, 0))
    green = ColorClip((1024, 800), color=(0, 255, 0))
    blue = ColorClip((1024, 800), color=(0, 0, 255))

    red.fps = green.fps = blue.fps = 30

    # Repeat this so we can see no conflicts.
    for i in range(5):
        # Get the name of a temporary file we can use.
        local_video_filename = join(
            TMP_DIR,
            "test_release_of_file_via_close_%s.mp4" % int(time.time()))

        with clips_array([[red, green, blue]]) as ca:
            video = ca.set_duration(1)

            video.write_videofile(local_video_filename)

        # Open it up with VideoFileClip.
        with VideoFileClip(local_video_filename) as clip:
            # Normally a client would do processing here.
            pass

        # Now remove the temporary file.
        # This would fail on Windows if the file is still locked.

        # This should succeed without exceptions.
        remove(local_video_filename)

    red.close()
    green.close()
    blue.close()
Example #4
0
def test_setup():
    """Test VideoFileClip setup."""
    red = ColorClip((1024,800), color=(255,0,0))
    green = ColorClip((1024,800), color=(0,255,0))
    blue = ColorClip((1024,800), color=(0,0,255))

    red.fps = green.fps = blue.fps = 30
    with clips_array([[red, green, blue]]).set_duration(5) as video:
        video.write_videofile(os.path.join(TMP_DIR, "test.mp4"))

    assert os.path.exists(os.path.join(TMP_DIR, "test.mp4"))

    with VideoFileClip(os.path.join(TMP_DIR, "test.mp4")) as clip:
        assert clip.duration == 5
        assert clip.fps == 30
        assert clip.size == [1024*3, 800]

    red.close()
    green.close()
    blue.close()
Example #5
0
def test_setup():
    """Test VideoFileClip setup."""
    red = ColorClip((1024, 800), color=(255, 0, 0))
    green = ColorClip((1024, 800), color=(0, 255, 0))
    blue = ColorClip((1024, 800), color=(0, 0, 255))

    red.fps = green.fps = blue.fps = 30
    with clips_array([[red, green, blue]]).set_duration(5) as video:
        video.write_videofile(os.path.join(TMP_DIR, "test.mp4"))

    assert os.path.exists(os.path.join(TMP_DIR, "test.mp4"))

    with VideoFileClip(os.path.join(TMP_DIR, "test.mp4")) as clip:
        assert clip.duration == 5
        assert clip.fps == 30
        assert clip.size == [1024 * 3, 800]

    red.close()
    green.close()
    blue.close()
Example #6
0
def test_PR_458():
    clip = ColorClip([1000, 600], color=(60, 60, 60), duration=10)
    clip.write_videofile(os.path.join(TMP_DIR, "test.mp4"),
                         progress_bar=False, fps=30)
    clip.close()
Example #7
0
def test_PR_458():
    clip = ColorClip([1000, 600], color=(60, 60, 60), duration=2)
    clip.write_videofile(os.path.join(TMP_DIR, "test.mp4"), logger=None, fps=30)
    clip.close()
Example #8
0
def test_PR_458():
    clip = ColorClip([1000, 600], color=(60, 60, 60), duration=10)
    clip.write_videofile(os.path.join(TMP_DIR, "test.mp4"),
                         progress_bar=False,
                         fps=30)
    clip.close()
Example #9
0
class CompositeVideoClip(VideoClip):

    """ 
    
    A VideoClip made of other videoclips displayed together. This is the
    base class for most compositions.
    
    Parameters
    ----------

    size
      The size (height x width) of the final clip.

    clips
      A list of videoclips. Each clip of the list will
      be displayed below the clips appearing after it in the list.
      For each clip:
       
      - The attribute ``pos`` determines where the clip is placed.
          See ``VideoClip.set_pos``
      - The mask of the clip determines which parts are visible.
        
      Finally, if all the clips in the list have their ``duration``
      attribute set, then the duration of the composite video clip
      is computed automatically

    bg_color
      Color for the unmasked and unfilled regions. Set to None for these
      regions to be transparent (will be slower).

    use_bgclip
      Set to True if the first clip in the list should be used as the
      'background' on which all other clips are blitted. That first clip must
      have the same size as the final clip. If it has no transparency, the final
      clip will have no mask. 
    
    The clip with the highest FPS will be the FPS of the composite clip.

    """

    def __init__(self, clips, size=None, bg_color=None, use_bgclip=False, ismask=False):

        if size is None:
            size = clips[0].size

        if use_bgclip and (clips[0].mask is None):
            transparent = False
        else:
            transparent = bg_color is None

        if bg_color is None:
            bg_color = 0.0 if ismask else (0, 0, 0)

        fpss = [c.fps for c in clips if getattr(c, "fps", None)]
        self.fps = max(fpss) if fpss else None

        VideoClip.__init__(self)

        self.size = size
        self.ismask = ismask
        self.clips = clips
        self.bg_color = bg_color

        if use_bgclip:
            self.bg = clips[0]
            self.clips = clips[1:]
            self.created_bg = False
        else:
            self.clips = clips
            self.bg = ColorClip(size, color=self.bg_color, ismask=ismask)
            self.created_bg = True

        # compute duration
        ends = [c.end for c in self.clips]
        if None not in ends:
            duration = max(ends)
            self.duration = duration
            self.end = duration

        # compute audio
        audioclips = [v.audio for v in self.clips if v.audio is not None]
        if audioclips:
            self.audio = CompositeAudioClip(audioclips)

        # compute mask if necessary
        if transparent:
            maskclips = [
                (c.mask if (c.mask is not None) else c.add_mask().mask)
                .set_position(c.pos)
                .set_end(c.end)
                .set_start(c.start, change_end=False)
                for c in self.clips
            ]

            self.mask = CompositeVideoClip(
                maskclips, self.size, ismask=True, bg_color=0.0
            )

        def make_frame(t):
            """ The clips playing at time `t` are blitted over one
                another. """

            f = self.bg.get_frame(t)
            for c in self.playing_clips(t):
                f = c.blit_on(f, t)
            return f

        self.make_frame = make_frame

    def playing_clips(self, t=0):
        """ Returns a list of the clips in the composite clips that are
            actually playing at the given time `t`. """
        return [c for c in self.clips if c.is_playing(t)]

    def close(self):
        if self.created_bg and self.bg:
            # Only close the background clip if it was locally created.
            # Otherwise, it remains the job of whoever created it.
            self.bg.close()
            self.bg = None
        if hasattr(self, "audio") and self.audio:
            self.audio.close()
            self.audio = None
Example #10
0
class CompositeVideoClip(VideoClip):
    """
    A VideoClip made of other videoclips displayed together. This is the
    base class for most compositions.

    Parameters
    ----------

    size
      The size (width, height) of the final clip.

    clips
      A list of videoclips.

      Clips with a higher ``layer`` attribute will be dislayed
      on top of other clips in a lower layer.
      If two or more clips share the same ``layer``,
      then the one appearing latest in ``clips`` will be displayed
      on top (i.e. it has the higher layer).

      For each clip:

      - The attribute ``pos`` determines where the clip is placed.
          See ``VideoClip.set_pos``
      - The mask of the clip determines which parts are visible.

      Finally, if all the clips in the list have their ``duration``
      attribute set, then the duration of the composite video clip
      is computed automatically

    bg_color
      Color for the unmasked and unfilled regions. Set to None for these
      regions to be transparent (will be slower).

    use_bgclip
      Set to True if the first clip in the list should be used as the
      'background' on which all other clips are blitted. That first clip must
      have the same size as the final clip. If it has no transparency, the final
      clip will have no mask.

    The clip with the highest FPS will be the FPS of the composite clip.

    """

    def __init__(
        self, clips, size=None, bg_color=None, use_bgclip=False, is_mask=False
    ):

        if size is None:
            size = clips[0].size

        if use_bgclip and (clips[0].mask is None):
            transparent = False
        else:
            transparent = bg_color is None

        if bg_color is None:
            bg_color = 0.0 if is_mask else (0, 0, 0)

        fpss = [clip.fps for clip in clips if getattr(clip, "fps", None)]
        self.fps = max(fpss) if fpss else None

        VideoClip.__init__(self)

        self.size = size
        self.is_mask = is_mask
        self.clips = clips
        self.bg_color = bg_color

        if use_bgclip:
            self.bg = clips[0]
            self.clips = clips[1:]
            self.created_bg = False
        else:
            self.clips = clips
            self.bg = ColorClip(size, color=self.bg_color, is_mask=is_mask)
            self.created_bg = True

        # order self.clips by layer
        self.clips = sorted(self.clips, key=lambda clip: clip.layer)

        # compute duration
        ends = [clip.end for clip in self.clips]
        if None not in ends:
            duration = max(ends)
            self.duration = duration
            self.end = duration

        # compute audio
        audioclips = [v.audio for v in self.clips if v.audio is not None]
        if audioclips:
            self.audio = CompositeAudioClip(audioclips)

        # compute mask if necessary
        if transparent:
            maskclips = [
                (clip.mask if (clip.mask is not None) else clip.add_mask().mask)
                .with_position(clip.pos)
                .with_end(clip.end)
                .with_start(clip.start, change_end=False)
                .with_layer(clip.layer)
                for clip in self.clips
            ]

            self.mask = CompositeVideoClip(
                maskclips, self.size, is_mask=True, bg_color=0.0
            )

    def make_frame(self, t):
        """The clips playing at time `t` are blitted over one another."""
        frame = self.bg.get_frame(t).astype("uint8")
        im = Image.fromarray(frame)

        if self.bg.mask is not None:
            frame_mask = self.bg.mask.get_frame(t)
            im_mask = Image.fromarray(255 * frame_mask).convert("L")
            im = im.putalpha(im_mask)

        for clip in self.playing_clips(t):
            im = clip.blit_on(im, t)

        return np.array(im)

    def playing_clips(self, t=0):
        """Returns a list of the clips in the composite clips that are
        actually playing at the given time `t`.
        """
        return [clip for clip in self.clips if clip.is_playing(t)]

    def close(self):
        """Closes the instance, releasing all the resources."""
        if self.created_bg and self.bg:
            # Only close the background clip if it was locally created.
            # Otherwise, it remains the job of whoever created it.
            self.bg.close()
            self.bg = None
        if hasattr(self, "audio") and self.audio:
            self.audio.close()
            self.audio = None
Example #11
0
class CompositeVideoClip(VideoClip):
    """ 
    
    A VideoClip made of other videoclips displayed together. This is the
    base class for most compositions.
    
    Parameters
    ----------

    size
      The size (height x width) of the final clip.

    clips
      A list of videoclips. Each clip of the list will
      be displayed below the clips appearing after it in the list.
      For each clip:
       
      - The attribute ``pos`` determines where the clip is placed.
          See ``VideoClip.set_pos``
      - The mask of the clip determines which parts are visible.
        
      Finally, if all the clips in the list have their ``duration``
      attribute set, then the duration of the composite video clip
      is computed automatically

    bg_color
      Color for the unmasked and unfilled regions. Set to None for these
      regions to be transparent (will be slower).

    use_bgclip
      Set to True if the first clip in the list should be used as the
      'background' on which all other clips are blitted. That first clip must
      have the same size as the final clip. If it has no transparency, the final
      clip will have no mask. 
    
    The clip with the highest FPS will be the FPS of the composite clip.

    """
    def __init__(self,
                 clips,
                 size=None,
                 bg_color=None,
                 use_bgclip=True,
                 ismask=False):

        if size is None:
            size = clips[0].size

        if use_bgclip and (clips[0].mask is None):
            transparent = False
        else:
            transparent = (bg_color is None)

        if bg_color is None:
            bg_color = 0.0 if ismask else (0, 0, 0)

        fpss = [c.fps for c in clips if getattr(c, 'fps', None)]
        self.fps = max(fpss) if fpss else None

        VideoClip.__init__(self)

        self.size = size
        self.ismask = ismask
        self.clips = clips
        self.bg_color = bg_color

        if use_bgclip:
            self.bg = clips[0]
            self.clips = clips[1:]
            self.created_bg = False
        else:
            self.clips = clips
            self.bg = ColorClip(size, color=self.bg_color)
            self.created_bg = True

        # compute duration
        ends = [c.end for c in self.clips]
        if None not in ends:
            duration = max(ends)
            self.duration = duration
            self.end = duration

        # compute audio
        audioclips = [v.audio for v in self.clips if v.audio is not None]
        if audioclips:
            self.audio = CompositeAudioClip(audioclips)

        # compute mask if necessary
        if transparent:
            maskclips = [
                (c.mask if
                 (c.mask is not None) else c.add_mask().mask).set_position(
                     c.pos).set_end(c.end).set_start(c.start, change_end=False)
                for c in self.clips
            ]

            self.mask = CompositeVideoClip(maskclips,
                                           self.size,
                                           ismask=True,
                                           bg_color=0.0)

        def make_frame(t):
            full_w, full_h = self.bg.size
            f = self.bg.get_frame(t)
            bg_im = Image.fromarray(f)
            for c in self.playing_clips(t):
                img, pos, mask, ismask = c.new_blit_on(t, f)

                x, y = pos
                w, h = c.size

                out_x = x < -w or x == full_w
                out_y = y < -h or y == full_h

                if out_x and out_y:
                    continue

                pos = (int(round(min(max(-w, x), full_w))),
                       int(round(min(max(-h, y), full_h))))

                paste_im = Image.fromarray(img)

                if mask is not None:
                    mask_im = Image.fromarray(255 * mask).convert('L')
                    bg_im.paste(paste_im, pos, mask_im)
                else:
                    bg_im.paste(paste_im, pos)

            result_frame = np.array(bg_im)

            return result_frame.astype('uint8') if (
                not ismask) else result_frame

        self.make_frame = make_frame

    def playing_clips(self, t=0):
        """ Returns a list of the clips in the composite clips that are
            actually playing at the given time `t`. """
        return [c for c in self.clips if c.is_playing(t)]

    def close(self):
        if self.created_bg and self.bg:
            # Only close the background clip if it was locally created.
            # Otherwise, it remains the job of whoever created it.
            self.bg.close()
            self.bg = None
        if hasattr(self, "audio") and self.audio:
            self.audio.close()
            self.audio = None
Example #12
0
class CompositeVideoClip(VideoClip):

    """ 
    
    A VideoClip made of other videoclips displayed together. This is the
    base class for most compositions.
    
    Parameters
    ----------

    size
      The size (height x width) of the final clip.

    clips
      A list of videoclips. Each clip of the list will
      be displayed below the clips appearing after it in the list.
      For each clip:
       
      - The attribute ``pos`` determines where the clip is placed.
          See ``VideoClip.set_pos``
      - The mask of the clip determines which parts are visible.
        
      Finally, if all the clips in the list have their ``duration``
      attribute set, then the duration of the composite video clip
      is computed automatically

    bg_color
      Color for the unmasked and unfilled regions. Set to None for these
      regions to be transparent (will be slower).

    use_bgclip
      Set to True if the first clip in the list should be used as the
      'background' on which all other clips are blitted. That first clip must
      have the same size as the final clip. If it has no transparency, the final
      clip will have no mask. 
    
    The clip with the highest FPS will be the FPS of the composite clip.

    """

    def __init__(self, clips, size=None, bg_color=None, use_bgclip=False,
                 ismask=False):

        if size is None:
            size = clips[0].size

        
        if use_bgclip and (clips[0].mask is None):
            transparent = False
        else:
            transparent = (bg_color is None)
        
        if bg_color is None:
            bg_color = 0.0 if ismask else (0, 0, 0)

        fpss = [c.fps for c in clips if hasattr(c, 'fps') and c.fps is not None]
        if len(fpss) == 0:
            self.fps = None
        else:
            self.fps = max(fpss)

        VideoClip.__init__(self)
        
        self.size = size
        self.ismask = ismask
        self.clips = clips
        self.bg_color = bg_color

        if use_bgclip:
            self.bg = clips[0]
            self.clips = clips[1:]
            self.created_bg = False
        else:
            self.clips = clips
            self.bg = ColorClip(size, col=self.bg_color)
            self.created_bg = True

        
        
        # compute duration
        ends = [c.end for c in self.clips]
        if not any([(e is None) for e in ends]):
            self.duration = max(ends)
            self.end = max(ends)

        # compute audio
        audioclips = [v.audio for v in self.clips if v.audio is not None]
        if len(audioclips) > 0:
            self.audio = CompositeAudioClip(audioclips)

        # compute mask if necessary
        if transparent:
            maskclips = [(c.mask if (c.mask is not None) else
                          c.add_mask().mask).set_pos(c.pos).set_end(c.end).set_start(c.start, change_end=False)
                          for c in self.clips]

            self.mask = CompositeVideoClip(maskclips,self.size, ismask=True,
                                               bg_color=0.0)

        def make_frame(t):
            """ The clips playing at time `t` are blitted over one
                another. """

            f = self.bg.get_frame(t)
            for c in self.playing_clips(t):
                    f = c.blit_on(f, t)
            return f

        self.make_frame = make_frame

    def playing_clips(self, t=0):
        """ Returns a list of the clips in the composite clips that are
            actually playing at the given time `t`. """
        return [c for c in self.clips if c.is_playing(t)]

    def close(self):
        if self.created_bg and self.bg:
            # Only close the background clip if it was locally created.
            # Otherwise, it remains the job of whoever created it.
            self.bg.close()
            self.bg = None
        if hasattr(self, "audio") and self.audio:
            self.audio.close()
            self.audio = None