Exemplo n.º 1
0
def make_hands():
    clips = []

    backgrounds = glob.glob('images/*.jpg')
    background_clip = Clip(random.choice(backgrounds))

    clips.append(background_clip)

    hands = glob.glob('hands/*.mp4')
    hand = random.choice(hands)

    x = random.randint(-400, -200)
    distance = 100

    duration = 0

    for i in range(0, 6):
        clip = Clip(hand)
        clip.chroma()
        clip.fadein(0.1)
        clip.fadeout(0.1)
        clip.set_offset(i * 0.5)
        clip.position(x=x)
        clips.append(clip)

        x += distance
        duration = clip.duration + i * 0.5

    composition = Composition(clips, width=1280, height=720, duration=duration)
    composition.preview()
def compose_with_vidpy(
    maxduration=60,
    thresh=0.2,
    fade=0.3,
    duration=4,
    sections=3,
    padding=0.5,
    outname="home.mp4",
):
    shots = {}
    allshots = []

    for f in glob("videos/*.shots.json"):
        # if re.search(r'^videos/\d+', f) is None:
        #     continue

        with open(f, "r") as infile:
            data = json.load(infile)

        f = f.replace(".shots.json", "")

        _shots = [(f, d["time"]) for d in data if d["score"] > thresh]
        _shots = [d["time"] for d in data if d["score"] > thresh]
        shots[f] = []
        for i, d in enumerate(_shots):
            if i > 0:
                start = _shots[i - 1]
            else:
                start = 0
            end = d
            shots[f].append((start, end))

        # if len(_shots) > 5:
        #     shots[f] = _shots
        #     allshots += _shots

    offset = 0
    clips = []
    while offset < maxduration:
        filename = random.choice(list(shots.keys()))
        if len(shots[filename]) < 5:
            continue
        start, end = random.choice(shots[filename])
        start += padding
        end -= padding
        dur = min(end - start, duration - padding)

        clip = Clip(filename, start=start, end=start + dur, offset=offset)
        clip.zoompan([0, 0, "100%", "100%"], ["-25%", "-25%", "150%", "150%"],
                     0, 100)
        clip.fadein(fade)
        offset += dur - fade
        clips.append(clip)

    # if stitch:
    comp = Composition(clips)
    comp.save(outname)
Exemplo n.º 3
0
def make_video():

    clips=[]

    for video in mergeSequence:
        #print mergeSequence
    #print each video of the sequence and tagsself.and colors

    #filename = "./videos/" + video['id']
        filename = video["path"]
    #print video
    #print filename
        clip = Clip(filename, start=video['start'], end= video['end'])
        if "abstract" in video["tags"]:
            clip.fadein(1)
            clip.fadeout(1.5)
        for video in beginning:
            clip.fadein(3)
        for video in ending:
            clip.fadeout(1.5)
        for video in randomSounds:
            clip.fadein(1)
            clip.fadeout(1.5)


        clip.glow()
        clips.append(clip)
        print (mergeSequence[0]["id"])

    composition = Composition(clips,singletrack=True, width=800, height=800)
    videoName = "render/videoMergeClassic" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") +".mp4"
    composition.save(videoName)
Exemplo n.º 4
0
def make_video():

    clips = []

    for video in mergeSequence:
        #print mergeSequence
        #print each video of the sequence and tagsself.and colors

        #filename = "./videos/" + video['id']
        filename = video["path"]
        #print video
        #print filename
        clip = Clip(filename, start=video['start'], end=video['end'])
        if "abstract" in video["tags"]:
            clip.fadein(1)
            clip.fadeout(1.5)
        for video in beginning:
            clip.fadein(3)
        for video in ending:
            clip.fadeout(1.5)
        for video in randomSounds:
            clip.fadein(1)
            clip.fadeout(1.5)

        clip.glow()
        clips.append(clip)
        print mergeSequence[0]["id"]

    composition = Composition(clips, singletrack=True, width=800, height=800)
    composition.save('mergeVideo3_nature4.mp4')
Exemplo n.º 5
0
def make_video():

    clips = []
    blankclip = Clip('black.mp4')
    clips.append(blankclip)

    for video in mergeSequence:
        #print mergeSequence
        #print each video of the sequence and tagsself.and colors

        #filename = "./videos/" + video['id']
        filename = video["path"]
        #print video
        print(filename)
        # if filename.endswith('mp3'):
        #    continue
        clip = Clip(filename, start=video['start'], end=video['end'])
        # clip.position(x=0, y=0, w='100%', h='100%', distort=True)
        # clip.fx('crop', {
        #     'center': 1
        # })
        # clip.fx('affine', {
        #     'transition.geometry': '0/0:640x640',
        #     'transition.fill': 1,
        #     'transition.distort': 1,
        #     'transition.scale': 1,
        #     'scale': 1
        # })

        if "abstract" in video["tags"]:
            clip.fadein(1)
            clip.fadeout(1.5)
        if video in beginning:
            clip.fadein(3)
        if video in ending:
            clip.fadeout(1.5)
        if video in randomSounds:
            clip.fadein(1)
            clip.fadeout(1.5)

        clip.glow()
        clips.append(clip)
        print(mergeSequence[0]["id"])
    # clips = clips[0:3]
    composition = Composition(clips, singletrack=True)
    videoName = "render/videoMergeClassic" + datetime.datetime.now().strftime(
        "%Y%m%d%H%M%S") + ".mp4"
    #composition.save(videoName)
    composition.preview()
Exemplo n.º 6
0
def make_video(tag):

    clips = []

    for video in mergeSequence:
        #print mergeSequence
        #print each video of the sequence and tagsself.and colors

        #filename = "./videos/" + video['id']
        filename = video["path"]
        #print video
        #print filename
        clip = Clip(filename, start=video['start'], end=video['end'])
        if "abstract" in video["tags"]:
            clip.fadein(1)
            clip.fadeout(1.5)
        if video in beginning:
            clip.fadein(3)
        if video in ending:
            clip.fadeout(1.5)
        if video in randomSounds:
            clip.fadein(1)
            clip.fadeout(1.5)

        clip.glow()
        clips.append(clip)
        #print (mergeSequence[0]["id"])

    composition = Composition(clips, singletrack=True, width=800, height=800)
    #videoName = "render/videoConnected10" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") +".mp4"
    # videoName = "render/video_" + tag +  datetime.datetime.now().strftime("%Y%m%d%H%M%S") +".mp4"
    videoName = "render/video_" + tag + datetime.datetime.now().strftime(
        "%Y%m%d%H%M%S") + ".mov"
    composition.save(videoName)
    #composition.save(videoName)
    #datetime.datetime.now().strftime("%Y%m%d%H%M&S")

    #setup a client (api adress, localhost)
    client = udp_client.UDPClient("127.0.0.1", 8000)
    print("testing message")

    #composition.preview()

    #now built the messagingSenderId
    msg = osc_message_builder.OscMessageBuilder(address="/video")
    msg.add_arg("nature")
    msg = msg.build()
    client.send(msg)
Exemplo n.º 7
0
import time
from vidpy import Clip, Composition

hands = glob.glob('hands/*.mp4')
hand = random.choice(hands)
print hand

clips = []
x = -400
start = 0

backgrounds = ['#2cffff', '#fc282a', '#fcdb2a', '#2452d8']

for i in range(0, 6):  # same as for i in [0, 1, 2, 3, 4]
    clip = Clip(hand)
    clip.chroma()
    clip.set_offset(start)
    clip.position(x=x)
    clip.fadein(0.2)
    clip.fadeout(0.2)

    clips.append(clip)

    start += 0.5
    x += 150

composition = Composition(clips, bgcolor=random.choice(backgrounds))

outname = 'coolhands_{}.mp4'.format(int(time.time()))
composition.save(outname)
Exemplo n.º 8
0
# while y is inside of canvas
while y < canvasHeight:

    # create a clip
    clip = Clip(video)

    # set clip position
    # add random offsets to each variable
    randomX = x + random.uniform(-delta, delta)
    randomY = y + random.uniform(-delta, delta)
    randomWidth = vidWidth + random.uniform(-delta, delta)
    randomHeight = vidHeight + random.uniform(-delta, delta)
    clip.position(x=randomX, y=randomY, w=randomWidth, h=randomHeight)

    # fade in for 0.1 second
    clip.fadein(0.1)

     # repeat the clip three times
    clip.repeat(3)

    # start the clip based on existing clips
    # add a random offset
    clip.set_offset(len(clips)*random.random())

    # add clip to list of clips
    clips.append(clip)

    # increment the x and y position
    x += vidWidth
    # if x is outside of canvas
    if x > canvasWidth:
Exemplo n.º 9
0
Arquivo: grid.py Projeto: yathit/vidpy
vid_height = (vid_width/canvas_width) * canvas_height

x = 0
y = 0

clips = []

while y < canvas_height:
    # create a clip
    clip = Clip(video)

    # set clip position
    clip.position(x=x, y=y, w=vid_width, h=vid_height)

    # fade in for 1/2 second
    clip.fadein(0.5)

    # repeat the clip three times
    clip.repeat(3)

    # start the clip based on existing clips
    clip.set_offset(len(clips)*.1)

    clips.append(clip)

    # increment the x and y position
    x += vid_width
    if x > canvas_width:
        y += vid_height
        x = 0