Example #1
0
def visulize_intro_from_hashes(videofile,
                               hashes,
                               pause=0.01,
                               end=600):  # pragma: no cover
    """Play the frames that maches the hashes."""
    import matplotlib.pyplot as plt
    import cv2

    first_vid = video_frame_by_frame(videofile, frame_range=False, end=end)
    ax1 = plt.subplot(1, 2, 1)
    im1 = ax1.imshow(np.zeros([150, 150, 3], dtype=np.uint8))
    ax1.set_xlabel(os.path.basename(videofile))

    for first_frame, first_pos in first_vid:
        h = ImageHash(create_imghash(first_frame))
        H = str(h)
        if h and H in hashes:
            # Convert as the colors are off for matplotlib.
            vis_frame = cv2.cvtColor(first_frame, cv2.COLOR_BGR2RGB)

            ax1.set_title('Source %s | %s' % (to_time(first_pos / 1000), H))
            im1.set_data(vis_frame)
            plt.pause(pause)

    #plt.ioff() # due to infinite loop, this gets never called.
    plt.show()
Example #2
0
def check_movement(path, debug=True):  # pragma: no cover
    """Nothing usefull atm. TODO"""

    kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
    #k = np.zeros((3,3),np.uint8)
    #fgbg = cv2.createBackgroundSubtractorGMG()
    #fgbg = cv2.createBackgroundSubtractorMOG2(history=1, varThreshold=2, detectShadows=False)
    #fgbg = cv2.createBackgroundSubtractorMOG()
    # (int history=500, double dist2Threshold=400.0, bool detectShadows=true
    fgbg = cv2.createBackgroundSubtractorKNN(1, 200, False)
    frame = None
    r_size = (640, 480)

    for _, (frame, millisec) in enumerate(video_frame_by_frame(path, offset=0,
                                                               step=0, frame_range=False)):
        if frame is not None:
            fgmask = fgbg.apply(frame)
            fgmask = cv2.erode(fgmask, kernel, iterations=20)
            fgmask = cv2.morphologyEx(fgmask, cv2.MORPH_OPEN, kernel)
            fgmask = cv2.morphologyEx(fgmask, cv2.MORPH_CLOSE, kernel)

            if debug:
                # Need to add a extra channel
                m = cv2.cvtColor(fgmask, cv2.COLOR_GRAY2BGR)
                # Resize so it easier to see them side by side.
                m = cv2.resize(m, r_size)
                f = cv2.resize(frame.copy(), r_size)
                vis = np.concatenate((m, f), axis=1)

                cv2.imshow('frame', vis)

            k = cv2.waitKey(0) & 0xff
            if k == 27:
                break
Example #3
0
def hash_file(path, step=1, frame_range=False, end=None):
    import cv2
    # dont think this is need. Lets keep it for now.
    if isinstance(path, str) and path.endswith(image_type):
        yield ImageHash(create_imghash(path)), cv2.imread(path, 0), 0
        return

    for (h, pos) in video_frame_by_frame(path,
                                         frame_range=frame_range,
                                         step=step,
                                         end=end):
        hashed_img = create_imghash(h)
        nn = ImageHash(hashed_img)
        yield nn, h, pos
Example #4
0
def find_credits(path, offset=0, fps=None, duration=None, check=7, step=1, frame_range=True):
    """Find the start/end of the credits and end in a videofile.
       This only check frames so if there is any silence in the video this is simply skipped as
       opencv only handles videofiles.

       use frame_range to so we only check frames every 1 sec.

       # TODO just ffmepg to check for silence so we calculate the correct time? :(

       Args:
            path (str): path to the videofile
            offset(int): If given we should start from this one.
            fps(float?): fps of the video file
            duration(None, int): Duration of the vfile in seconds.
            check(int): Stop after n frames with text, set a insane high number to check all.
                        end is not correct without this!
            step(int): only use every n frame
            frame_range(bool). default true, precalc the frames and only check thous frames.

       Returns:
            1, 2


    """
    # LOG.debug('%r %r %r %r %r %r %r', path, offset, fps, duration, check, step, frame_range)
    if cv2 is None:
        return
    frames = []
    start = -1
    end = -1
    LOG.debug('Trying to find the credits for %s', path)

    try:
        if fps is None:
            # we can just grab the fps from plex.
            cap = cv2.VideoCapture(path)
            fps = cap.get(cv2.CAP_PROP_FPS)
            cap.release()

        for i, (frame, millisec) in enumerate(video_frame_by_frame(path, offset=offset,
                                                                   step=step, frame_range=frame_range)):
            # LOG.debug('progress %s', millisec / 1000)
            if frame is not None:
                recs = locate_text(frame, debug=False)

                if recs:
                    frames.append(millisec)

                if check != -1 and len(frames) >= check:
                    break

        if frames:
            LOG.debug(frames)
            start = min(frames) / 1000
            end = max(frames) / 1000

        LOG.debug('credits_start %s, credits_end %s', start, end)

    except:  # pragma: no cover
        # We just want to log the exception not halt the entire process to db.
        LOG.exception('There was a error in find_credits')

    return start, end
Example #5
0
def find_credits(path, offset=0, fps=None, duration=None,
                 check=7, step=1, frame_range=True, debug=False, method='east'):
    """Find the start/end of the credits and end in a videofile.
       This only check frames so if there is any silence in the video this is simply skipped as
       opencv only handles videofiles.

       use frame_range to so we only check frames every 1 sec.

       # TODO just ffmepg to check for silence so we calculate the correct time? :(

       Args:
            path (str): path to the videofile
            offset(int): If given we should start from this one.
            fps(float?): fps of the video file
            duration(None, int): Duration of the vfile in seconds.
            check(int): Stop after n frames with text, set a insane high number to check all.
                        end is not correct without this!
            step(int): only use every n frame
            frame_range(bool). default true, precalc the frames and only check thous frames.
            debug(bool): Disable the images.
            method(str): east is better but slower.

       Returns:
            1, 2


    """
    # LOG.debug('%r %r %r %r %r %r %r', path, offset, fps, duration, check, step, frame_range)
    if cv2 is None:
        return
    frames = []
    start = -1
    end = -1
    LOG.debug('Trying to find the credits for %s', path)

    if method == 'east':
        func = locate_text_east
    else:
        func = locate_text

    try:
        if fps is None:
            # we can just grab the fps from plex.
            cap = cv2.VideoCapture(path)
            fps = cap.get(cv2.CAP_PROP_FPS)
            cap.release()

        for _, (frame, millisec) in enumerate(video_frame_by_frame(path, offset=offset,
                                                                   step=step, frame_range=frame_range)):

            try:
                # LOG.debug('progress %s', millisec / 1000)
                if frame is not None:
                    # recs = locate_text(frame, debug=True)
                    recs = func(frame, debug=debug)
                    len_recs = len(recs)

                    # If we get 1 match we should verify.
                    # now this is pretty harsh but we really
                    # don't want false positives.
                    if len_recs == 0:
                        continue
                    elif len_recs == 1:
                        t = extract_text(frame)
                        if t:
                            frames.append(millisec)
                    else:
                        frames.append(millisec)

                    # check for motion here?

                    if check != -1 and len(frames) >= check:
                        break

            except DEBUG_STOP:
                break
                if hasattr(cv2, 'destroyAllWindows'):
                    cv2.destroyAllWindows()

        if frames:
            start = min(frames) / 1000
            end = max(frames) / 1000

        LOG.debug('credits_start %s, credits_end %s', start, end)

    except:  # pragma: no cover
        # We just want to log the exception not halt the entire process to db.
        LOG.exception('There was a error in find_credits')

    return start, end