예제 #1
0
    def ConvertScreenToWorld(self, scrx, scry, worldz, cam):
        invproj = vmath.inverse(cam.projectionMatrix)
        invview = cam.viewInverseMatrix

        w, h = core.viewSize()
        x = scrx / w * 2
        y = scry / h * 2

        pos = vmath.vec4(x, y, 0.0, 1.0)
        npos = invproj * pos
        npos = invview * npos
        npos.z /= npos.w
        npos.x /= npos.w
        npos.y /= npos.w
        npos.w = 1.0

        pos = vmath.vec4(x, y, 1.0, 1.0)
        fpos = invproj * pos
        fpos = invview * fpos
        fpos.z /= fpos.w
        fpos.x /= fpos.w
        fpos.y /= fpos.w
        fpos.w = 1.0

        dir = vmath.normalize(fpos - npos)
        return npos + (dir * (npos.z - worldz))
예제 #2
0
def getLay(cam: core.camera, x, y):
    iproj = vmath.inverse(cam.projectionMatrix)
    iview = cam.viewInverseMatrix

    w, h = core.viewSize()
    pos = vmath.vec4(x / w / 2, y / h / 2, 0.0, 1.0)

    nearPos = iproj * pos
    nearPos = iview * nearPos
    pos.z = 1.0
    farPos = iproj * pos
    farPos = iview * farPos

    return nearPos, farPos
예제 #3
0
from igeCore import apputil
import igeVmath as vmath
import imgui
from igeCore.apputil.imguirenderer import ImgiIGERenderer


# open or resize window (This function is valid only on PC,Ignored in smartphone apps)
core.window(True, 480, 640)

imgui.create_context()

impl = ImgiIGERenderer()

while True:
    core.update()
    w, h = core.viewSize()
    curX = 0
    curY = 0
    press = 0
    touch = core.singleTouch()
    if touch != None:
        curX = touch['cur_x'] + w // 2
        curY = -touch['cur_y'] + h // 2
        press = touch['is_holded'] | touch['is_moved']
    impl.process_inputs()

    core.update()

    imgui.new_frame()

    if imgui.begin_main_menu_bar():
예제 #4
0
while True:
    core.update()
    min, max = figure2.getAABB(-1, core.WorldSpace)
    poss, _, _, _ = shapes.makeBoxFromAABB(min, max)
    aabbBox2.setVertexElements("mesh", core.ATTRIBUTE_ID_POSITION, poss)

    touch = core.singleTouch()
    if touch is not None:
        if touch['is_moved']:
            rotX += touch['delta_x'] * 0.05
            rotY += touch['delta_y'] * 0.05
            figure2.rotation = vmath.quat_rotationZYX((rotY, rotX, 0.0))
        elif touch['is_tapped']:
            invProj = vmath.inverse(cam.projectionMatrix)
            invView = cam.viewInverseMatrix
            sw, sh = core.viewSize()
            sx = touch['cur_x'] / (sw / 2)
            sy = touch['cur_y'] / (sh / 2)
            near = vmath.vec4(sx, sy, 0.0, 1.0)
            if cam.orthographicProjection:
                near.z = -1.0
            near = invProj * near
            near = invView * near
            near /= near.w
            far = vmath.vec4(sx, sy, 1.0, 1.0)
            far = invProj * far
            far = invView * far
            far /= far.w
            dir = vmath.normalize(far - near)
            hit, dist, pos = IntersectRayAABB(near, dir, min, max)
            if hit: