Exemple #1
0
def PassThroughAudioSegment(tmpFilePath, edit, backplane_name):
    """\
    Prefab.
    
    For a particular edit decision; reads in the audio frames corresponding to
    the video frames referred to in the reframing instructions in sequence.
    Outputs the audio frames out of the "outbox" outbox.
    
    Arguments:
    
    - edlfile      -- full filepathname of the EDL xml file
    - tmpFilePath  -- temp directory into which video frames have been saved
    
    Inboxes:
    
    - "inbox"    -- NOT USED
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- raw audio data, chunked by frames
    - "signal"  -- Shutdown signalling
    """
    print " Audio segment: ", edit
    filenames = [
        tmpFilePath + "%08d.wav" % i
        for i in range(edit["start"], edit["end"] + 1)
    ]

    return Graphline( \
        FILENAMES = ForwardIteratingChooser(filenames),
        FRAME_LOADER = Carousel( lambda filename :
                                 Graphline(
                                     READ = MaxSpeedFileReader(filename),
                                     PARS = WAVParser(),
                                     META = PublishTo(backplane_name),
                                     linkages = {
                                         ("READ","outbox") : ("PARS","inbox"),
                                         ("PARS","outbox") : ("","outbox"),

                                         ("PARS","all_meta") : ("META","inbox"),

                                         ("","control") : ("READ","control"),
                                         ("READ","signal") : ("PARS","control"),
                                         ("PARS","signal") : ("META","control"),
                                         ("META","signal") : ("","signal"),
                                     },
                                     boxsizes = { ("PARS","inbox") : 2 },
                                 ),
                                 make1stRequest=False ),
        linkages = {
            ("FRAME_LOADER", "requestNext") : ("FILENAMES", "inbox"),

            ("FILENAMES",    "outbox") : ("FRAME_LOADER", "next"),
            ("FRAME_LOADER", "outbox") : ("", "outbox"),

            ("FILENAMES",    "signal") : ("FRAME_LOADER", "control"),
            ("FRAME_LOADER", "signal") : ("", "signal"),
        },
    )
Exemple #2
0
    def RateLimitedPlayback(frame):
        fps = frame["frame_rate"]
        x, y = tuple(frame["size"])
        print "Frames per second:", fps
        print "(width,height):", (x, y)

        pgd = PygameDisplay(width=x, height=y).activate()
        PygameDisplay.setDisplayService(pgd)

        return Graphline( \
            LIMIT = PromptedTurnstile(),
            RATE  = ByteRate_RequestControl(rate=fps, chunksize=1.0, allowchunkaggregation=False),
            PLAY  = player,
            linkages = {
                ("",      "inbox" ) : ("LIMIT", "inbox"),
                ("LIMIT", "outbox") : ("PLAY",  "inbox"),
                ("PLAY",  "outbox") : ("",      "outbox"),

                ("RATE", "outbox" ) : ("LIMIT", "next"),

                ("",      "control") : ("RATE",  "control"),
                ("RATE",  "signal" ) : ("LIMIT", "control"),
                ("LIMIT", "signal" ) : ("PLAY",  "control"),
                ("PLAY",  "signal" ) : ("",      "signal"),
            },
            boxsizes = {
                ("LIMIT","inbox") : 2,
            },
        )
Exemple #3
0
def ProcessEditDecision(tmpFilePath, edit, width, height):
    """\
    Prefab.
    
    Applies an edit decision - reading in the relevant video frames and applying
    the reframing. Outputs the reframed video frames out of the "outbox" outbox.
    
    Arguments:
    
    - tmpFilePath  -- temp directory into which video frames have been saved
    - edit         -- the edit instruction (dictionary containing: "start","end","left","top","right","bottom")
    - width        -- width (in pixels) for output video frames
    - height       -- height (in pixels) for output video frames
    
    Inboxes:
    
    - "inbox"    -- NOT USED
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- NOT USED
    - "signal"  -- Shutdown signalling
    """
    print " Video segment: ", edit
    filenames = [
        tmpFilePath + "%08d.yuv" % i
        for i in range(edit["start"], edit["end"] + 1)
    ]
    newsize = (width, height)
    cropbounds = (edit["left"], edit["top"], edit["right"], edit["bottom"])

    return Graphline( \
        FILENAMES = ForwardIteratingChooser(filenames),
        FRAME_LOADER = Carousel( lambda filename :
                                 Pipeline(
                                     2, MaxSpeedFileReader(filename,chunksize=1024*1024),
                                     2, YUV4MPEGToFrame(),
                                     ),
                                 make1stRequest=False ),
        REFRAMING = Pipeline( 2, ToRGB_interleaved(),
                              2, CropAndScale(newsize, cropbounds),
                              2, ToYUV420_planar(),
                            ),
        linkages = {
            ("FRAME_LOADER", "requestNext") : ("FILENAMES", "inbox"),

            ("FILENAMES",    "outbox") : ("FRAME_LOADER", "next"),
            ("FRAME_LOADER", "outbox") : ("REFRAMING", "inbox"),
            ("REFRAMING",    "outbox") : ("", "outbox"),

            ("FILENAMES",    "signal") : ("FRAME_LOADER", "control"),
            ("FRAME_LOADER", "signal") : ("REFRAMING", "control"),
            ("REFRAMING",    "signal") : ("", "signal"),
        },
        boxsizes = {
        },
    )
Exemple #4
0
def SaveAudioFrames(frame_rate,tmpFilePath,edlfile):
    """\
    Prefab.
    
    Saves WAV audio data sent to the "inbox" inbox into the specified temp
    directory. Chunks the audio into frames, as per the specified frame-rate.
    Only saves those frames actually referenced in the EDL file.
    
    Frames are saved in individual files in WAV format. They are named
    sequentially "00000001.wav", "00000002.wav", "00000003.wav", etc - being
    assigned frame numbers as they arrive, starting at 1.
    
    Arguments:
    
    - frame_rate   -- the frame rate to chunk the audio into for saving
    - tmpFilePath  -- temp directory into which frames should be saved
    - edlfile      -- full filepathname of the EDL xml file
    
    Inboxes:
    
    - "inbox"    -- WAV format audio data
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- NOT USED
    - "signal"  -- Shutdown signalling
    """
    return \
        Graphline(
            WAV = WAVParser(),
            AUD = Carousel(
                lambda ameta : AudioSplitterByFrames( frame_rate,
                                                      ameta['channels'],
                                                      ameta['sample_rate'],
                                                      ameta['sample_format'],
                                                      tmpFilePath,
                                                      edlfile,
                                                    )
                ),
            linkages = {
                # incoming WAV file passed to decoder
                ("", "inbox") : ("WAV", "inbox"),
                # raw audio sent to the carousel for splitting and writing
                ("WAV", "outbox") : ("AUD", "inbox"),
                
                # pass audio format info to the carousel
                ("WAV", "all_meta") : ("AUD", "next"),
                
                ("", "control") : ("WAV", "control"),
                ("WAV", "signal") : ("AUD", "control"),
            },
            boxsizes = {
                ("WAV", "inbox") : 10,
                ("AUD", "inbox") : 10,
            },
        )
Exemple #5
0
def FilterForWantedFrameNumbers(edlfile):
    """\
    Prefab.
    
    Send messages of the form (framenum, data) to the "inbox" inbox. Items with
    a frame number that isn't in the edit decision list are dropped. Other items
    with frame numbers that are in the edit decision list are passed through out
    of the "outbox" outbox.
    
    Arguments:
    
    - edlfile  -- full filepathname of the EDL xml file
    
    Inboxes:
    
    - "inbox"    -- (framenum, data) items to be filtered
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- items not filtered out
    - "signal"  -- Shutdown signalling
    """
    class ExtractRanges(object):
        def filter(self, edit):
            try:
                return (edit['start'],edit['end'])
            except:
                return None
    
    return Graphline(
        RANGESRC = Pipeline(
                       RateControlledFileReader(edlfile,readmode="lines",rate=1000000),
                       SimpleXMLParser(),
                       EDLParser(),
                       Filter(filter = ExtractRanges()),
                       Collate(),
                   ),
        FILTER   = Carousel(lambda ranges : RangeFilter(ranges)),
        linkages = {
            ("RANGESRC","outbox") : ("FILTER","next"),
            
            ("","inbox") : ("FILTER","inbox"),
            ("FILTER","outbox") : ("","outbox"),
            
            ("","control") : ("FILTER","control"),
            ("FILTER","signal") :("","signal"),
        },
        )
Exemple #6
0
def PassThroughAudio(edlfile, tmpFilePath):
    """\
    Prefab.
    
    Goes through the specified edit decision list file and reads in the audio
    frames corresponding to the video frames referred to in the reframing
    instructions in sequence. Outputs the audio frames out of the "outbox"
    outbox.
    
    Arguments:
    
    - edlfile      -- full filepathname of the EDL xml file
    - tmpFilePath  -- temp directory into which video frames have been saved
    
    Inboxes:
    
    - "inbox"    -- NOT USED
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- raw audio data, chunked by frames
    - "signal"  -- Shutdown signalling
    """
    backplane_name = "AUDIO_FORMAT"
    return Graphline( \
        GET_EDL = EditDecisionSource(edlfile),
        AUDIO = Carousel( lambda edit : PassThroughAudioSegment(tmpFilePath, edit, backplane_name),
                          make1stRequest=True),
        
        BACKPLANE = Backplane(backplane_name),
        AUDIOFORMAT = Pipeline( SubscribeTo(backplane_name), FirstOnly() ),
        
        linkages = {
            ("AUDIO", "requestNext") : ("GET_EDL", "inbox"),
            
            ("GET_EDL", "outbox") : ("AUDIO", "next"),
            
            ("AUDIO", "outbox") : ("", "outbox"),
            
            ("AUDIOFORMAT", "outbox") : ("", "audioformat"),
            
            ("GET_EDL", "signal") : ("AUDIO", "control"),
            ("AUDIO", "signal") : ("AUDIOFORMAT", "control"),
            ("AUDIOFORMAT", "signal") : ("BACKPLANE", "control"),
            ("BACKPLANE", "signal") : ("", "signal"),
        },
        )
Exemple #7
0
def EditDecisionSource(edlfile):
    """\
    Prefab.
    
    Reads in the edit decisions from the edit decision list file; then sends
    then out, one at a time, out of the "outbox" outbox whenever a message is
    sent to the "inbox" inbox. The message sent to the inbox does not matter.
    
    Edit decisions are of the form::
    
        { "start"  : start frame number for this edit decision
          "end"    : end frame number for this edit decision
          "left"   : left edge to crop to (in pixels)
          "top"    : top edge to crop to (in pixels)
          "right"  : right edge to crop to (in pixels)
          "bottom" : bottom edge to crop to (in pixels)
        }
    
    Arguments:
    
    - edlfile      -- full filepathname of the EDL xml file
    
    Inboxes:
    
    - "inbox"    -- Messages to trigger sending out of edit decisions
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- Individual edit decisions
    - "signal"  -- Shutdown signalling
    """
    return Graphline( \
        PARSING = Pipeline( RateControlledFileReader(edlfile,readmode="lines",rate=1000000),
                            SimpleXMLParser(),
                            EDLParser(),
                          ),
        GATE = PromptedTurnstile(),
        linkages = {
            ("", "inbox") : ("GATE", "next"),

            ("PARSING", "outbox") : ("GATE", "inbox"),
            ("GATE",    "outbox") : ("",     "outbox"),
            
            ("PARSING", "signal") : ("GATE", "control"),
            ("GATE",    "signal") : ("", "signal"),

        } )
Exemple #8
0
def WriteToFiles():
    """\
    Prefab.
    
    Takes in audio and video frames and writes them as a single YUV4MPEG2 and
    WAV files ("test.yuv" and "test.wav").
    
    Used for testing
    
    Inboxes:
    
    - "inbox"    -- NOT USED
    - "control"  -- Shutdown signalling
    - "video"    -- Video frames to be saved
    - "audio"    -- Auio frames to be saved
    
    Outboxes:
    
    - "outbox"  -- NOT USED
    - "signal"  -- Shutdown signalling
    """
    return Graphline( \
               VIDEO = FrameToYUV4MPEG(),
               AUDIO = WAVWriter(2, "S16_LE", 48000),
               TEST = SimpleFileWriter("test.yuv"),
               TESTA = SimpleFileWriter("test.wav"),
               linkages = {
                   ("","video") : ("VIDEO","inbox"),
                   ("VIDEO","outbox") : ("TEST","inbox"),
                   
                   ("","audio") : ("AUDIO", "inbox"),
                   ("AUDIO","outbox") : ("TESTA","inbox"),
                   
                   ("","control") : ("VIDEO","control"),
                   ("VIDEO","signal") : ("AUDIO","control"),
                   ("AUDIO","signal") : ("TEST", "control"),
                   ("TEST", "signal") : ("TESTA", "control"),
                   ("TESTA", "signal") : ("", "signal"),
               },
           )
Exemple #9
0
def ReframeVideo(edlfile, tmpFilePath, width, height):
    """\
    Prefab.
    
    Goes through the specified edit decision list file and reads in video frames
    applying the reframing instructions in sequence. Outputs the reframed video
    frames out of the "outbox" outbox.
    
    Arguments:
    
    - tmpFilePath  -- temp directory into which video frames have been saved
    - edlfile      -- full filepathname of the EDL xml file
    - width        -- width (in pixels) for output video frames
    - height       -- height (in pixels) for output video frames
    
    Inboxes:
    
    - "inbox"    -- NOT USED
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- NOT USED
    - "signal"  -- Shutdown signalling
    """
    return Graphline( \
        GET_EDL = EditDecisionSource(edlfile),
        REFRAMER = Carousel( lambda edit : ProcessEditDecision(tmpFilePath, edit, width, height),
                             make1stRequest=True ),
        linkages = {
            ("REFRAMER", "requestNext") : ("GET_EDL", "inbox"),
            
            ("GET_EDL", "outbox") : ("REFRAMER", "next"),
            
            ("REFRAMER", "outbox") : ("", "outbox"),
            
            ("GET_EDL", "signal") : ("REFRAMER", "control"),
            ("REFRAMER", "signal") : ("", "signal"),
        },
        )
Exemple #10
0
def ReEncode(outFileName):
    """\
    Prefab.
    
    Takes in audio and video frames and encodes them to a compressed video file
    using ffmpeg to do the compression.
    
    Inboxes:
    
    - "inbox"    -- NOT USED
    - "control"  -- Shutdown signalling
    - "video"    -- Video frames to be saved
    - "audio"    -- Auio frames to be saved
    
    Outboxes:
    
    - "outbox"  -- NOT USED
    - "signal"  -- Shutdown signalling
    """
    vidpipe = tmpFilePath+"vidPipe2.yuv"
    try:
        os.remove(vidpipe)
    except:
        pass
    
    audpipe = tmpFilePath+"audPipe2.wav"
    try:
        os.remove(audpipe)
    except:
        pass
    
    vidpipe=vidpipe.replace(" ","\ ")
    audpipe=audpipe.replace(" ","\ ")
    outFileName=outFileName.replace(" ","\ ")
    
    encoder = "ffmpeg -f yuv4mpegpipe -i "+vidpipe+" -f wav -i "+audpipe+" -y "+outFileName
    print encoder
             
    return Graphline( \
               VIDEO = FrameToYUV4MPEG(),
               AUDIO = Carousel( lambda format : WAVWriter(**format),
                                 make1stRequest=False),
               ENCODE =  UnixProcess2(encoder,buffersize=327680,inpipes={vidpipe:"video",audpipe:"audio"},boxsizes={"inbox":2,"video":2,"audio":2}),
               DEBUG = ConsoleEchoer(),
               linkages = {
                   ("","audioformat") : ("AUDIO","next"),
                   ("","video") : ("VIDEO","inbox"),
                   ("VIDEO","outbox") : ("ENCODE","video"),
                   
                   ("","audio") : ("AUDIO", "inbox"),
                   ("AUDIO","outbox") : ("ENCODE", "audio"),
                   
                   ("","control") : ("VIDEO","control"),
                   ("VIDEO","signal") : ("AUDIO","control"),
                   ("AUDIO","signal") : ("ENCODE", "control"),
                   ("ENCODE", "signal") : ("DEBUG", "control"),
                   ("DEBUG", "signal") : ("", "signal"),

                   ("ENCODE","outbox") : ("DEBUG","inbox"),
                   ("ENCODE","error") : ("DEBUG","inbox"),
               },
               boxsizes = {
                   ("VIDEO",  "inbox") : 2,
                   ("AUDIO",  "inbox") : 2,
               }
           )
Exemple #11
0
def DecodeAndSeparateFrames(inFileName, tmpFilePath, edlfile,maxframe):
    """\
    Prefab.
    
    Decompresses audio and video from the specified file (using ffmpeg) and
    saves them as individual files into the provided temp directory. Only
    reads up to maxframes frames from the video file.
    
    Arguments:
    
    - inFileName   -- The video file to be decompressed
    - tmpFilePath  -- temp directory into which frames of audio and video should be saved
    - edlfile      -- full filepathname of the EDL xml file
    - maxframe     -- the number of frames to decompress
    
    Inboxes:
    
    - "inbox"    -- NOT USED
    - "control"  -- Shutdown signalling
    
    Outboxes:
    
    - "outbox"  -- NOT USED
    - "signal"  -- Shutdown signalling
    """
    vidpipe = tmpFilePath+"vidPipe.yuv"
    try:
        os.remove(vidpipe)
    except:
        pass
    
    audpipe = tmpFilePath+"audPipe.wav"
    try:
        os.remove(audpipe)
    except:
        pass
    
    mplayer = "ffmpeg -vframes %d -i %s -f yuv4mpegpipe -y %s -f wav -y %s" % ((maxframe+25),inFileName.replace(" ","\ "),vidpipe,audpipe)
    print mplayer
    
    return Graphline(
            DECODER = UnixProcess2(mplayer, 2000000, {vidpipe:"video",audpipe:"audio"}),
            FRAMES = YUV4MPEGToFrame(),
            SPLIT = TwoWaySplitter(),
            FIRST = FirstOnly(),
            VIDEO = SaveVideoFrames(tmpFilePath,edlfile),
            AUDIO = Carousel(lambda vformat: SaveAudioFrames(vformat['frame_rate'],tmpFilePath,edlfile)),
            linkages = {
                ("DECODER","video") : ("FRAMES","inbox"),
                ("FRAMES","outbox") : ("SPLIT","inbox"),
                ("SPLIT","outbox") : ("VIDEO","inbox"),
                
                ("SPLIT","outbox2") : ("FIRST","inbox"),
                ("FIRST","outbox") : ("AUDIO","next"),
                ("DECODER","audio") : ("AUDIO","inbox"),
                
                ("DECODER","signal") : ("FRAMES","control"),
                ("FRAMES","signal") : ("SPLIT","control"),
                ("SPLIT","signal") : ("VIDEO","control"),
                ("SPLIT","signal2") : ("FIRST","control"),
                ("FIRST","signal") : ("AUDIO","control"),
                ("AUDIO","signal") : ("","signal"),
                },
            boxsizes = {
                ("FRAMES", "inbox") : 2,
                ("SPLIT",  "inbox") : 1,
                ("FIRST", "inbox") : 2,
                ("VIDEO", "inbox") : 2,
                ("AUDIO", "inbox") : 10,
                }
            )
Exemple #12
0
try:
    os.mkdir(tmpFilePath[:-1])
except:
    pass


Seq( "Decoding & separating frames...",
     Graphline(
          MAXF = DetermineMaxFrameNumber(edlfile),
          DO = Carousel( lambda maxframe : 
              DecodeAndSeparateFrames(inFileName, tmpFilePath, edlfile,maxframe),
          ),
          STOP = TriggeredOneShot(""),
          linkages = {
              ("MAXF","outbox"):("DO","next"),
              ("DO","outbox"):("","outbox"),
              
              ("DO","requestNext"):("STOP","inbox"),
              ("STOP","signal"):("DO","control"),
              ("DO","signal"):("","signal"),
          },
          ),
     "Processing edits...",
        Graphline(
            REFRAMING = ReframeVideo(edlfile, tmpFilePath, output_width, output_height),
            SOUND     = PassThroughAudio(edlfile, tmpFilePath),
            ENCODING  = ReEncode(outFileName),
        linkages = {
            ("REFRAMING","outbox") : ("ENCODING","video"),
            ("SOUND","outbox") : ("ENCODING","audio"),
Exemple #13
0
Graphline(
    DECODE=UnixProcess2(
        "ffmpeg -i " + infile +
        " -f yuv4mpegpipe -y vidpipe.yuv -f wav -y audpipe.wav",
        outpipes={
            "vidpipe.yuv": "video",
            "audpipe.wav": "audio"
        },
        buffersize=131072,
    ),
    VIDEO=Pipeline(
        1,
        YUV4MPEGToFrame(),
        FrameRateLimitedPlayback(VideoOverlay()),
    ),
    AUDIO=Graphline(
        PARSE=WAVParser(),
        OUT=Carousel(lambda format: Output(format['sample_rate'],
                                           format['channels'],
                                           format['sample_format'],
                                           maximumLag=0.5)),
        linkages={
            ("", "inbox"): ("PARSE", "inbox"),
            ("PARSE", "outbox"): ("OUT", "inbox"),
            ("PARSE", "all_meta"): ("OUT", "next"),
            ("", "control"): ("PARSE", "control"),
            ("PARSE", "signal"): ("OUT", "control"),
            ("OUT", "signal"): ("", "signal"),
        },
        boxsizes={
            ("PARSE", "inbox"): 5,
        },
    ),
    DEBUG=ConsoleEchoer(),
    linkages={
        ("DECODE", "video"): ("VIDEO", "inbox"),
        ("DECODE", "audio"): ("AUDIO", "inbox"),
        ("DECODE", "outbox"): ("DEBUG", "inbox"),
        #               ("DECODE", "error") : ("DEBUG", "inbox"),
        ("DECODE", "signal"): ("AUDIO", "control"),
        ("AUDIO", "signal"): ("VIDEO", "control"),
    },
).run()
Exemple #14
0
if __name__ == "__main__":
    from Kamaelia.Experimental.Chassis import Graphline, Pipeline
    from Kamaelia.Util.DataSource import DataSource
    from Kamaelia.Util.RateFilter import MessageRateLimit
    from Kamaelia.Util.Console import ConsoleEchoer

    Graphline(SRC=DataSource([str(i) + "\n" for i in range(0, 100)]),
              SPLIT=TwoWaySplitter(),
              DST1=Pipeline(
                  10,
                  MessageRateLimit(10, 5),
                  ConsoleEchoer(),
              ),
              DST2=Pipeline(
                  10,
                  MessageRateLimit(20, 5),
                  ConsoleEchoer(),
              ),
              linkages={
                  ("SRC", "outbox"): ("SPLIT", "inbox"),
                  ("SPLIT", "outbox"): ("DST1", "inbox"),
                  ("SPLIT", "outbox2"): ("DST2", "inbox"),
                  ("SRC", "signal"): ("SPLIT", "control"),
                  ("SPLIT", "signal"): ("DST1", "control"),
                  ("SPLIT", "signal2"): ("DST2", "control"),
              },
              boxsizes={
                  ("SPLIT", "inbox"): 999,
              }).run()