def interact():
        global speed_camera
        global cursor
        global rot

        # nvisii camera matrix
        cam_matrix = camera.get_transform().get_local_to_world_matrix()
        dt = nvisii.vec4(0, 0, 0, 0)

        # translation
        if nvisii.is_button_held("W"): dt[2] = -speed_camera
        if nvisii.is_button_held("S"): dt[2] = speed_camera
        if nvisii.is_button_held("A"): dt[0] = -speed_camera
        if nvisii.is_button_held("D"): dt[0] = speed_camera
        if nvisii.is_button_held("Q"): dt[1] = -speed_camera
        if nvisii.is_button_held("E"): dt[1] = speed_camera

        # control the camera
        if nvisii.length(dt) > 0.0:
            w_dt = cam_matrix * dt
            camera.get_transform().add_position(nvisii.vec3(w_dt))

        # camera rotation
        cursor[2] = cursor[0]
        cursor[3] = cursor[1]
        cursor[0] = nvisii.get_cursor_pos().x
        cursor[1] = nvisii.get_cursor_pos().y
        if nvisii.is_button_held("MOUSE_LEFT"):
            nvisii.set_cursor_mode("DISABLED")
            rotate_camera = True
        else:
            nvisii.set_cursor_mode("NORMAL")
            rotate_camera = False

        if rotate_camera:
            rot.x -= (cursor[0] - cursor[2]) * 0.001
            rot.y -= (cursor[1] - cursor[3]) * 0.001
            init_rot = nvisii.angleAxis(nvisii.pi() * .5, (1, 0, 0))
            yaw = nvisii.angleAxis(rot.x, (0, 1, 0))
            pitch = nvisii.angleAxis(rot.y, (1, 0, 0))
            camera.get_transform().set_rotation(init_rot * yaw * pitch)

        # change speed movement
        if nvisii.is_button_pressed("UP"):
            speed_camera *= 0.5
            print('decrease speed camera', speed_camera)
        if nvisii.is_button_pressed("DOWN"):
            speed_camera /= 0.5
            print('increase speed camera', speed_camera)
예제 #2
0
    transform = nvisii.transform.create("light_2"),
)
# a light is an entity with a light added to it. 
obj_entity.set_light(
    nvisii.light.create('light_2')
)
obj_entity.get_light().set_intensity(2)

# you can also set the light color manually
obj_entity.get_light().set_color_texture(tex)

#lets set the size and placement of the light
obj_entity.get_transform().set_scale((0.1, 0.1, 0.1))
obj_entity.get_transform().set_position((-0.5,0.4,0))
obj_entity.get_transform().set_rotation(
    nvisii.angleAxis(90, (0,0,1))
)

# # # # # # # # # # # # # # # # # # # # # # # # #

# Lets set some objects in the scene
room = nvisii.entity.create(
    name="room",
    mesh = nvisii.mesh.create_box('room'),
    transform = nvisii.transform.create("room"),
    material = nvisii.material.create("room"),
)
room.get_transform().set_scale((2.0,2.0,2.0))
room.get_transform().set_position((0,0,2.0))
mat = nvisii.material.get("room")
mat.set_base_color(nvisii.vec3(0.19,0.16,0.19)) 
opt.out = '15_camera_motion_car_blur.png'
opt.control = True

# # # # # # # # # # # # # # # # # # # # # # # # #
nvisii.initialize()
nvisii.set_dome_light_intensity(.8)
nvisii.resize_window(int(opt.width), int(opt.height))
# # # # # # # # # # # # # # # # # # # # # # # # #

# load the textures
dome = nvisii.texture.create_from_file("dome", "content/teatro_massimo_2k.hdr")

# we can add HDR images to act as dome
nvisii.set_dome_light_texture(dome)
nvisii.set_dome_light_rotation(
    nvisii.angleAxis(nvisii.pi() * .5, nvisii.vec3(0, 0, 1)))

car_speed = 0
car_speed_x = car_speed
car_speed_y = -2 * car_speed

camera_height = 80
# # # # # # # # # # # # # # # # # # # # # # # # #

if not opt.noise is True:
    nvisii.enable_denoiser()

camera = nvisii.entity.create(name="camera",
                              transform=nvisii.transform.create("camera"),
                              camera=nvisii.camera.create(
                                  name="camera",
예제 #4
0
# Create a scene to use for exporting segmentations
floor = nvisii.entity.create(name="floor",
                             mesh=nvisii.mesh.create_plane("floor"),
                             transform=nvisii.transform.create("floor"),
                             material=nvisii.material.create("floor"))

floor.get_transform().set_scale((2, 2, 2))
floor.get_material().set_roughness(1.0)
areaLight1 = nvisii.entity.create(
    name="areaLight1",
    light=nvisii.light.create("areaLight1"),
    transform=nvisii.transform.create("areaLight1"),
    mesh=nvisii.mesh.create_plane("areaLight1"),
)
areaLight1.get_transform().set_rotation(nvisii.angleAxis(3.14, (1, 0, 0)))
areaLight1.get_light().set_intensity(1)
areaLight1.get_light().set_temperature(8000)
areaLight1.get_transform().set_position((0, 0, .6))
areaLight1.get_transform().set_scale((.2, .2, .2))

mesh1 = nvisii.entity.create(name="mesh1",
                             mesh=nvisii.mesh.create_teapotahedron(
                                 "mesh1", segments=64),
                             transform=nvisii.transform.create("mesh1"),
                             material=nvisii.material.create("mesh1"))

brick_base_color = nvisii.texture.create_from_file(
    "bricks_base_color", "./content/Bricks051_2K_Color.jpg")
brick_normal = nvisii.texture.create_from_file(
    "bricks_normal", "./content/Bricks051_2K_Normal.jpg", linear=True)
# # # # # # # # # # # # # # # # # # # # # # # # #

# lets turn off the ambiant lights
# load a random skybox
skyboxes = glob.glob(f'{opt.skyboxes_folder}/*.hdr')
skybox_random_selection = skyboxes[random.randint(0, len(skyboxes) - 1)]

dome_tex = visii.texture.create_from_file('dome_tex', skybox_random_selection)
visii.set_dome_light_texture(dome_tex)
visii.set_dome_light_intensity(random.uniform(1.1, 2))
# visii.set_dome_light_intensity(1.15)
visii.set_dome_light_rotation(
    # visii.angleAxis(visii.pi()/2,visii.vec3(1,0,0)) \
    # * visii.angleAxis(visii.pi()/2,visii.vec3(0,0,1))\
    visii.angleAxis(random.uniform(-visii.pi(),visii.pi()),visii.vec3(0,0,1))\
    # * visii.angleAxis(visii.pi()/2,visii.vec3(0,1,0))\
    # * visii.angleAxis(random.uniform(-visii.pi()/8,visii.pi()/8),visii.vec3(0,0,1))\
)

# # # # # # # # # # # # # # # # # # # # # # # # #
# Lets set some objects in the scene

mesh_loaded = {}
objects_to_move = []

sample_space = [[-20, 20], [-20, 20], [-30, -2]]

if opt.interactive:
    physicsClient = p.connect(p.GUI)  # non-graphical version
else:
예제 #6
0
# Create a scene to use for exporting segmentations
floor = nvisii.entity.create(name="floor",
                             mesh=nvisii.mesh.create_plane("floor"),
                             transform=nvisii.transform.create("floor"),
                             material=nvisii.material.create("floor"))

floor.get_transform().set_scale((2, 2, 2))
floor.get_material().set_roughness(1.0)
areaLight1 = nvisii.entity.create(
    name="areaLight1",
    light=nvisii.light.create("areaLight1"),
    transform=nvisii.transform.create("areaLight1"),
    mesh=nvisii.mesh.create_plane("areaLight1"),
)
areaLight1.get_transform().set_rotation(nvisii.angleAxis(3.14, (1, 0, 0)))
areaLight1.get_light().set_intensity(1)
areaLight1.get_light().set_exposure(-3)
areaLight1.get_light().set_temperature(8000)
areaLight1.get_transform().set_position((0, 0, .6))
areaLight1.get_transform().set_scale((.2, .2, .2))

mesh1 = nvisii.entity.create(name="mesh1",
                             mesh=nvisii.mesh.create_teapotahedron(
                                 "mesh1", segments=64),
                             transform=nvisii.transform.create("mesh1"),
                             material=nvisii.material.create("mesh1"))

brick_base_color = nvisii.texture.create_from_file(
    "bricks_base_color", "./content/Bricks051_2K_Color.jpg")
brick_normal = nvisii.texture.create_from_file(
예제 #7
0
# Checkout create_hsv, create_add, create_multiply, and create_mix
floor_tex = nvisii.texture.create_hsv("floor",
                                      tex,
                                      hue=0,
                                      saturation=.5,
                                      value=1.0,
                                      mix=1.0)

# we can add HDR images to act as a dome that lights up our scene

# use "enable_cdf" for dome light textures that contain
# bright objects that cast shadows (like the sun). Note
# that this has a significant impact on rendering performance,
# and is disabled by default.
nvisii.set_dome_light_texture(dome, enable_cdf=True)
nvisii.set_dome_light_rotation(nvisii.angleAxis(nvisii.pi() * .1, (0, 0, 1)))

# Lets set some objects in the scene
entity = nvisii.entity.create(
    name="floor",
    mesh=nvisii.mesh.create_plane("mesh_floor"),
    transform=nvisii.transform.create("transform_floor"),
    material=nvisii.material.create("material_floor"))
entity.get_transform().set_scale((1, 1, 1))
mat = nvisii.material.get("material_floor")

mat.set_roughness(.5)

# Lets set the base color and roughness of the object to use a texture.
# but the textures could also be used to set other
# material propreties
예제 #8
0
blocker = nvisii.entity.create(name="blocker",
                               mesh=nvisii.mesh.create_capped_tube(
                                   "blocker", innerRadius=.04),
                               transform=nvisii.transform.create("blocker"),
                               material=nvisii.material.create("blocker"))
blocker.get_transform().set_scale((10, 10, .01))
blocker.get_transform().set_position((0, 0, 3.0))

# Teapot
teapotahedron = nvisii.entity.create(
    name="teapotahedron",
    mesh=nvisii.mesh.create_teapotahedron("teapotahedron", segments=32),
    transform=nvisii.transform.create("teapotahedron"),
    material=nvisii.material.create("teapotahedron"))
teapotahedron.get_transform().set_rotation(
    nvisii.angleAxis(nvisii.pi() / 4.0, (0, 0, 1)))
teapotahedron.get_transform().set_position((0, 0, 0))
teapotahedron.get_transform().set_scale((0.4, 0.4, 0.4))
teapotahedron.get_material().set_base_color(
    (255.0 / 255.0, 100.0 / 255.0, 2.0 / 256.0))
teapotahedron.get_material().set_roughness(0.0)
teapotahedron.get_material().set_specular(1.0)
teapotahedron.get_material().set_metallic(1.0)

# Make a QT window to demonstrate the difference between alpha transparency and transmission
app = QApplication([])  # Start an application.
window = QWidget()  # Create a window.
layout = QVBoxLayout()  # Create a layout.


def rotateCamera(value):
예제 #9
0
normal_tex.set_scale((.1, .1))
rough_tex.set_scale((.1, .1))

mat.set_base_color_texture(color_tex)
mat.set_normal_map_texture(normal_tex)
mat.set_roughness_texture(rough_tex)

# # # # # # # # # # # # # # # # # # # # # # # # #

for i in range(100):
    light_entity.get_transform().look_at(
        at=(0, 0, 0),
        up=(0, 0, 1),
        eye=(math.cos(math.pi * 2.0 * (i / 100.0)),
             math.sin(math.pi * 2.0 * (i / 100.0)), 1))
    test_plane.get_transform().set_rotation(
        nvisii.angleAxis(-math.pi * 2.0 * (i / 100.0), (0, 0, 1)))
    # time.sleep(.1)
    nvisii.render_to_file(width=int(opt.width),
                          height=int(opt.height),
                          samples_per_pixel=int(opt.spp),
                          file_path=f"{opt.outf}/{str(i).zfill(5)}.png")

# let's clean up the GPU
nvisii.deinitialize()

subprocess.call([
    'ffmpeg', '-y', '-framerate', '24', '-i', r"%05d.png", '-vcodec',
    'libx264', '-pix_fmt', 'yuv420p', '../output.mp4'
],
                cwd=os.path.realpath(opt.outf))
예제 #10
0
def interact():
    global prev_window_size
    global speed_camera
    global cursor
    global init_rot
    global rot
    global i

    window_size = nv.vec2(nv.get_window_size().x, nv.get_window_size().y)
    if (nv.length(window_size - prev_window_size) > 0):
        camera.get_camera().set_fov(.8, window_size.x / float(window_size.y))
    prev_window_size = window_size

    # nvisii camera matrix
    cam_matrix = camera.get_transform().get_local_to_world_matrix()
    dt = nv.vec4(0, 0, 0, 0)

    # translation
    if nv.is_button_held("W"): dt[2] = -speed_camera
    if nv.is_button_held("S"): dt[2] = speed_camera
    if nv.is_button_held("A"): dt[0] = -speed_camera
    if nv.is_button_held("D"): dt[0] = speed_camera
    if nv.is_button_held("Q"): dt[1] = -speed_camera
    if nv.is_button_held("E"): dt[1] = speed_camera

    # control the camera
    if nv.length(dt) > 0.0:
        w_dt = cam_matrix * dt
        camera.get_transform().add_position(nv.vec3(w_dt))

    # camera rotation
    cursor[2] = cursor[0]
    cursor[3] = cursor[1]
    cursor[0] = nv.get_cursor_pos().x
    cursor[1] = nv.get_cursor_pos().y
    if nv.is_button_held("MOUSE_LEFT"):
        rotate_camera = True
    else:
        rotate_camera = False

    if rotate_camera:
        rot.x -= (cursor[0] - cursor[2]) * 0.001
        rot.y -= (cursor[1] - cursor[3]) * 0.001
        # init_rot = nv.angleAxis(nv.pi() * .5, (1,0,0))
        yaw = nv.angleAxis(rot.x, (0, 1, 0))
        pitch = nv.angleAxis(rot.y, (1, 0, 0))
        camera.get_transform().set_rotation(init_rot * yaw * pitch)

    # change speed movement
    if nv.is_button_pressed("UP"):
        speed_camera *= 0.5
        print('decrease speed camera', speed_camera)

    if nv.is_button_pressed("DOWN"):
        speed_camera /= 0.5
        print('increase speed camera', speed_camera)

    # Render out an image
    if nv.is_button_pressed("SPACE"):
        i = i + 1
        nv.render_to_file(nv.get_window_size().x,
                          nv.get_window_size().y, 256,
                          str(i) + ".png")
예제 #11
0
        name = str(x) + "_" + str(y)
        m = box_mesh
        if y % 4 < 2:
            m = sphere_mesh

        nvisii.entity.create(
            name=name,
            mesh=m,
            transform=nvisii.transform.create(
                name=name,
                # position = (x * 1.3 + .33 * pow(-1, y), 0, y * .35),
                position=(x * .25, 0, y * .25 * .7 + .02 * pow(-1, y)),
                # scale = (.15, .15, .15),
                scale=((y % 2) * .01 + .09, (y % 2) * .01 + .09,
                       (y % 2) * .01 + .09),
                rotation=nvisii.angleAxis(-.78, (1, 0, 0))),
            material=nvisii.material.create(name=name))
        mat = nvisii.material.get(name)

        # The diffuse, metal, or glass surface color
        # mat.set_base_color(...)
        mat.set_base_color(hsv_to_rgb(y / 60.0, y % 2, 1.0))

        # Specifies the microfacet roughness of the surface for diffuse
        # or specular reflection
        if y == 0 or y == 1:
            mat.set_roughness(x / 20.0)

        # Blends between a non-metallic and a metallic material model.
        if y == 2 or y == 3:
            mat.set_roughness(0.0)
예제 #12
0
obj_entity = nvisii.entity.create(
    name="light_2",
    mesh=nvisii.mesh.create_teapotahedron('light_2'),
    transform=nvisii.transform.create("light_2"),
)
# a light is an entity with a light added to it.
obj_entity.set_light(nvisii.light.create('light_2'))
obj_entity.get_light().set_intensity(2)

# you can also set the light color manually
obj_entity.get_light().set_color((1., .0, .0))

#lets set the size and placement of the light
obj_entity.get_transform().set_scale((0.1, 0.1, 0.1))
obj_entity.get_transform().set_position((0.2, -0.7, 0.10))
obj_entity.get_transform().set_rotation(nvisii.angleAxis(90, (0, 0, 1)))

# third light
obj_entity = nvisii.entity.create(
    name="light_3",
    mesh=nvisii.mesh.create_plane('light_3', flip_z=True),
    transform=nvisii.transform.create("light_3"),
)
obj_entity.set_light(nvisii.light.create('light_3'))
# Intensity effects the appearance of the light in
# addition to what intensity that light emits.
obj_entity.get_light().set_intensity(1)

# Exposure does not effect direct appearance of the light,
# but does effect the relative power of the light in illuminating
# other objects.
예제 #13
0
# load an example brick texture 
color_tex = nvisii.texture.create_from_file("color",'content/Bricks051_2K_Color.jpg')
normal_tex = nvisii.texture.create_from_file("normal",'content/Bricks051_2K_Normal.jpg', linear = True)
rough_tex = nvisii.texture.create_from_file("rough",'content/Bricks051_2K_Roughness.jpg', linear = True)

color_tex.set_scale((.1,.1))
normal_tex.set_scale((.1,.1))
rough_tex.set_scale((.1,.1))

mat.set_base_color_texture(color_tex)
mat.set_normal_map_texture(normal_tex)
mat.set_roughness_texture(rough_tex)

# # # # # # # # # # # # # # # # # # # # # # # # #

for i in range(100):
    light_entity.get_transform().look_at(at = (0,0,0), up = (0,0,1), eye = (math.cos(math.pi * 2.0 * (i / 100.0)), math.sin(math.pi * 2.0 * (i / 100.0)),1))
    test_plane.get_transform().set_rotation(nvisii.angleAxis(-math.pi * 2.0 * (i / 100.0), (0,0,1)))
    # time.sleep(.1)
    nvisii.render_to_file(
        width=int(opt.width), 
        height=int(opt.height), 
        samples_per_pixel=int(opt.spp),
        file_path=f"{opt.outf}/{str(i).zfill(5)}.png"
    )

# let's clean up the GPU
nvisii.deinitialize()

subprocess.call(['ffmpeg', '-y', '-framerate', '24', '-i', r"%05d.png",  '-vcodec', 'libx264', '-pix_fmt', 'yuv420p', '../output.mp4'], cwd=os.path.realpath(opt.outf))
예제 #14
0
                                  name="camera",
                                  aspect=float(opt.width) / float(opt.height)))

camera.get_transform().look_at(
    at=(-5, 0, 12),  # look at (world coordinate)
    up=(0, 0, 1),  # up vector
    eye=(5, -15, 18))
nvisii.set_camera_entity(camera)

# # # # # # # # # # # # # # # # # # # # # # # # #

sdb = nvisii.import_scene(
    file_path='content/salle_de_bain_separated/salle_de_bain_separated.obj',
    position=(1, 0, 0),
    scale=(1.0, 1.0, 1.0),
    rotation=nvisii.angleAxis(3.14 * .5, (1, 0, 0)),
    args=["verbose"]  # list assets as they are loaded
)

# Using the above function,
# nvisii loads each obj model as its own entity.
# You can find them by name (with an optional prefix added
# to front of each generated component name)

# nvisii generates the same naming pattern for the different
# materials defined in the mtl file

# since obj/mtl do not have definition for metallic properties
# lets add them manually to the material
mirror = nvisii.material.get('Mirror')