Esempio n. 1
0
    def _load_disk(self):
        """overrides method of Loadable
    Pngfont, Font, Defocus and ShadowCaster inherit from Texture but
    don't do all this so have to override this
    """

        # If already loaded, abort
        if self._loaded:
            return

        if self.string_type == FILE:
            s = self.file_string + ' '
            im = Image.open(self.file_string)
        elif self.string_type == PIL_IMAGE:
            s = 'PIL.Image '
            im = self.file_string
        else:
            s = 'numpy.ndarray '
            self.iy, self.ix, mode = self.file_string.shape
            self.image = self.file_string
            self._tex = ctypes.c_int()
            self._loaded = True
            return  # skip the rest for numpy arrays - faster but no size checking

        # only do this if loading from disk or PIL image
        self.ix, self.iy = im.size
        s += '(%s)' % im.mode

        if self.mipmap:
            resize_type = Image.BICUBIC
        else:
            resize_type = Image.NEAREST

        # work out if sizes > MAX_SIZE or coerce to golden values in WIDTHS
        if self.iy > self.ix and self.iy > MAX_SIZE:  # fairly rare circumstance
            im = im.resize((int((MAX_SIZE * self.ix) / self.iy), MAX_SIZE))
            self.ix, self.iy = im.size
        n = len(WIDTHS)
        for i in xrange(n - 1, 0, -1):
            if self.ix == WIDTHS[i]:
                break  # no need to resize as already a golden size
            if self.ix > WIDTHS[i]:
                im = im.resize((WIDTHS[i], int(
                    (WIDTHS[i] * self.iy) / self.ix)), resize_type)
                self.ix, self.iy = im.size
                break

        if VERBOSE:
            print('Loading ...{}'.format(s))

        if self.flip:
            im = im.transpose(Image.FLIP_TOP_BOTTOM)

        #self.image = im.tostring('raw', RGBs) # TODO change to tobytes WHEN Pillow is default PIL in debian (jessie becomes current)
        self.image = self._img_to_array(im)
        self._tex = ctypes.c_int()
        if self.string_type == FILE and 'fonts/' in self.file_string:
            self.im = im

        self._loaded = True
Esempio n. 2
0
    def destroy(self, display=None):
        if self.active:
            ###### brute force tidying experiment TODO find nicer way ########
            if display:
                func_list = [
                    [
                        opengles.glIsBuffer, opengles.glDeleteBuffers,
                        dict(display.vbufs_dict.items() +
                             display.ebufs_dict.items())
                    ],
                    [
                        opengles.glIsTexture, opengles.glDeleteTextures,
                        display.textures_dict
                    ], [opengles.glIsProgram, opengles.glDeleteProgram, 0],
                    [opengles.glIsShader, opengles.glDeleteShader, 0]
                ]
                i_ct = (ctypes.c_int * 1)(0)  #convoluted 0
                for func in func_list:
                    max_streak = 100
                    streak_start = 0
                    if func[2]:  # list to work through
                        for i in func[2]:
                            if func[0](func[2][i]
                                       [0]) == 1:  #check if i exists as a name
                                func[1](1, ctypes.byref(func[2][i][0]))
                    else:  # just do sequential numbers
                        for i in xrange(10000):
                            if func[0](i) == 1:  #check if i exists as a name
                                i_ct[0] = i  #convoluted 1
                                func[1](ctypes.byref(i_ct))
                                streak_start = i
                            elif i > (streak_start + 100):
                                break
            ##################################################################
            openegl.eglSwapBuffers(self.display, self.surface)
            openegl.eglMakeCurrent(self.display, EGL_NO_SURFACE,
                                   EGL_NO_SURFACE, EGL_NO_CONTEXT)
            openegl.eglDestroySurface(self.display, self.surface)
            openegl.eglDestroyContext(self.display, self.context)
            openegl.eglTerminate(self.display)
            if PLATFORM == PLATFORM_PI:
                self.dispman_update = bcm.vc_dispmanx_update_start(0)
                bcm.vc_dispmanx_element_remove(self.dispman_update,
                                               self.dispman_element)
                bcm.vc_dispmanx_update_submit_sync(self.dispman_update)
                bcm.vc_dispmanx_display_close(self.dispman_display)

            self.active = False
            if pi3d.USE_PYGAME:
                import pygame
                pygame.display.quit()
            elif PLATFORM != PLATFORM_PI and PLATFORM != PLATFORM_ANDROID:
                xlib.XCloseDisplay(self.d)
Esempio n. 3
0
  def destroy(self, display=None):
    if self.active:
      ###### brute force tidying experiment TODO find nicer way ########
      if display:
        func_list = [[opengles.glIsBuffer, opengles.glDeleteBuffers,
            dict(display.vbufs_dict.items() + display.ebufs_dict.items())],
            [opengles.glIsTexture, opengles.glDeleteTextures,
            display.textures_dict],
            [opengles.glIsProgram, opengles.glDeleteProgram, 0],
            [opengles.glIsShader, opengles.glDeleteShader, 0]]
        i_ct = (ctypes.c_int * 1)(0) #convoluted 0
        for func in func_list:
          max_streak = 100
          streak_start = 0
          if func[2]: # list to work through
            for i in func[2]:
              if func[0](func[2][i][0]) == 1: #check if i exists as a name
                func[1](1, ctypes.byref(func[2][i][0]))
          else: # just do sequential numbers
            for i in xrange(10000):
              if func[0](i) == 1: #check if i exists as a name
                i_ct[0] = i #convoluted 1
                func[1](ctypes.byref(i_ct))
                streak_start = i
              elif i > (streak_start + 100):
                break
      ##################################################################
      openegl.eglSwapBuffers(self.display, self.surface)
      openegl.eglMakeCurrent(self.display, EGL_NO_SURFACE, EGL_NO_SURFACE,
                             EGL_NO_CONTEXT)
      openegl.eglDestroySurface(self.display, self.surface)
      openegl.eglDestroyContext(self.display, self.context)
      openegl.eglTerminate(self.display)
      if PLATFORM == PLATFORM_PI:
        self.dispman_update = bcm.vc_dispmanx_update_start(0)
        bcm.vc_dispmanx_element_remove(self.dispman_update, self.dispman_element)
        bcm.vc_dispmanx_update_submit_sync(self.dispman_update)
        bcm.vc_dispmanx_display_close(self.dispman_display)

      self.active = False
      if pi3d.USE_PYGAME:
        import pygame
        pygame.display.quit()
      elif PLATFORM != PLATFORM_PI and PLATFORM != PLATFORM_ANDROID:
        xlib.XCloseDisplay(self.d)
#!/usr/bin/python

from __future__ import absolute_import, division, print_function, unicode_literals

import experiment

import pi3d
import sys
import time

from six_mod.moves import xrange

DEFAULT_SLEEP = 0.0
DEFAULT_ITERATIONS = 5000

SLEEP = DEFAULT_SLEEP if len(sys.argv) < 2 else float(sys.argv[1])
ITERATIONS = DEFAULT_ITERATIONS if len(sys.argv) < 3 else float(sys.argv[2])

for i in xrange(ITERATIONS):
  d = pi3d.Display.create()
  d.destroy()
  print(i)
  if SLEEP > 0:
    time.sleep(SLEEP)
Esempio n. 5
0
    def _load_disk(self):
        """overrides method of Loadable
    Pngfont, Font, Defocus and ShadowCaster inherit from Texture but
    don't do all this so have to override this
    """

        # If already loaded, abort
        if self._loaded:
            return

        if self.string_type == FILE and PIL_OK:
            s = self.file_string + ' '
            im = Image.open(self.file_string)
        elif self.string_type == PIL_IMAGE and PIL_OK:
            s = 'PIL.Image '
            im = self.file_string
        else:
            if self.string_type == NUMPY:
                s = 'numpy.ndarray '
                self.image = self.file_string
            else:  # i.e. FILE but not PIL_OK
                ''' NB this has to be a compressed numpy array saved using something like
              im = np.array(Image.open('{}.png'.format(FNAME)))
              np.savez_compressed('{}'.format(FNAME), im)
        which will produce a file with extension .npz '''
                s = self.file_string + ' '
                self.image = np.load(self.file_string)[
                    'arr_0']  # has to be saved with default key

            self.iy, self.ix, _mode = self.image.shape
            self._tex = GLuint()
            self._loaded = True
            return  # skip the rest for numpy arrays - faster but no size checking

        # only do this if loading from disk or PIL image
        self.ix, self.iy = im.size
        s += '(%s)' % im.mode

        if self.mipmap:
            resize_type = Image.BICUBIC
        else:
            resize_type = Image.NEAREST

        # work out if sizes > MAX_SIZE or coerce to golden values in WIDTHS
        if self.automatic_resize:  # default True
            if self.iy > self.ix and self.iy > MAX_SIZE:  # fairly rare circumstance
                im = im.resize((int((MAX_SIZE * self.ix) / self.iy), MAX_SIZE))
                self.ix, self.iy = im.size
            n = len(WIDTHS)
            for i in xrange(n - 1, 0, -1):
                if self.ix == WIDTHS[i]:
                    break  # no need to resize as already a golden size
                if self.ix > WIDTHS[i]:
                    im = im.resize(
                        (WIDTHS[i], int(
                            (WIDTHS[i] * self.iy) / self.ix)), resize_type)
                    self.ix, self.iy = im.size
                    break

        LOGGER.debug('Loading ...%s', s)

        if isinstance(self.flip, bool):
            # Old behaviour
            if self.flip:
                im = im.transpose(Image.FLIP_TOP_BOTTOM)
        else:
            if self.flip & 1:
                im = im.transpose(Image.FLIP_TOP_BOTTOM)
            if self.flip & 2:
                im = im.transpose(Image.FLIP_LEFT_RIGHT)

        #self.image = im.tostring('raw', RGBs) # TODO change to tobytes WHEN Pillow is default PIL in debian (jessie becomes current)
        self.image = self._img_to_array(im)
        self._tex = GLuint()
        if self.string_type == FILE and 'fonts/' in self.file_string:
            self.im = im

        self._loaded = True
Esempio n. 6
0
  def __init__(self, mapfile, camera=None, light=None,
               width=100.0, depth=100.0, height=10.0,
               divx=0, divy=0, ntiles=1.0, name="",
               x=0.0, y=0.0, z=0.0, rx=0.0, ry=0.0, rz=0.0,
               sx=1.0, sy=1.0, sz=1.0, cx=0.0, cy=0.0, cz=0.0, smooth=True, 
               cubic=False, texmap=None):
    """uses standard constructor for Shape

    Arguments:
      *mapfile*
        Greyscale image path/file, string. If multiple bytes per pixel
        only the first one will be used for elevation. jpg files will
        create slight errors that will cause mis-matching of edges for
        tiled maps (i.e. use png for these) NB also see divx, divy below
        i.e. div=64x64 requires image 65x65 pixels

    Keyword arguments:
      *width, depth, height*
        Of the map in world units.
      *divx, divy*
        Number of divisions into which the map will be divided.
        to create vertices (vertices += 1) NB if you want to create a map
        with 64x64 divisions there will be 65x65 vertices in each direction so
        the mapfile (above) needs to 65x65 pixels in order to specify
        elevations precisely and avoid resizing errors.
      *ntiles*
        Number of repeats for tiling the texture image.
      *smooth*
        Calculate normals with averaging rather than pointing
        straight up, slightly faster if false.
      *texmap*
        Image file path or PIL.Image to be used to represent each of four
        textures and normals using the uv_elev_map shader. The image is
        converted to greyscale and apportioned between darkest (first and
        second entries in Buffer.textures list) and lightest (seventh and
        eighth entries). The resulting 0.0, 1.0, 2.0 or 3.0 is added to the
        uv texture coordinate i.e. Buffer.array_buffer[:,6:8]
    """
    super(ElevationMap, self).__init__(camera, light, name, x, y, z, rx, ry, rz,
                                       sx, sy, sz, cx, cy, cz)
    divx += 1 # one more vertex in each direction than number of divisions
    divy += 1
    if divx > 200 or divy > 200:
      LOGGER.warning("... Map size can't be bigger than 199x199 divisions")
      divx = 200
      divy = 200
    #print(type(mapfile), type(""))

    if PIL_OK:
      try:
        if '' + mapfile == mapfile: #HORRIBLE. Only way to cope with python2v3
          mapfile = file_pathify(mapfile)
          LOGGER.info("Loading height map ...%s", mapfile)

          im = Image.open(mapfile)
        else:
          im = mapfile #allow image files to be passed as mapfile
      except:
        im = mapfile
      ix, iy = im.size
      if (ix > 200 and divx == 0) or (ix != divx and iy != divy):
        if divx == 0:
          divx = 200
          divy = 200
        im = im.resize((divx, divy), Image.ANTIALIAS)
        ix, iy = im.size
      im = im.convert('L')
      im = im.transpose(Image.FLIP_TOP_BOTTOM)
      im = im.transpose(Image.FLIP_LEFT_RIGHT)
      self.pixels = im.load()
      if texmap is not None:
        try:
          texmap = file_pathify(texmap)
          tm = Image.open(texmap)
        except:
          tm = texmap
        tm = tm.convert('L')
        tm = tm.resize((ix, iy))
        tm = np.array(tm)
        tm = np.floor(tm * 3.99 / (tm.max() - tm.min()))

    else: 
      ''' images saved as compressed numpy npz file. No resizing so needs 
      to be right size.'''
      mapfile = file_pathify(mapfile)
      self.pixels = np.load(mapfile)['arr_0'][::-1,::-1] # has to be saved with default key
      ix, iy = self.pixels.shape[:2]
    
    self.width = width
    self.depth = depth
    self.height = height
    self.ix = ix
    self.iy = iy
    self.ttype = GL_TRIANGLE_STRIP
    self.ht_y = 0.0
    self.ht_n = np.array([0.0, 1.0, 0.0])

    LOGGER.info("Creating Elevation Map ...%d x %d", ix, iy)

    self.wh = width * 0.5
    self.hh = depth * 0.5
    self.ws = width / (ix - 1.0)
    self.hs = depth / (iy - 1.0)
    self.ht = height / 255.0
    tx = 1.0 * ntiles / ix
    ty = 1.0 * ntiles / iy

    verts = []
    norms = []
    tex_coords = []
    idx = []

    for y in xrange(0, iy):
      for x in xrange(0, ix):
        hgt = self.pixels[x, y] * self.ht
        verts.append((-self.wh + x * self.ws, 
                      hgt, 
                      -self.hh + y * self.hs))
        if texmap is not None:
          tex_n = tm[x, y]
        else:
          tex_n = 0.0
        tex_coords.append((tex_n + (ix - x) * tx, (iy - y) * ty))

    s = 0
    #create one long triangle_strip by alternating X directions
    for y in range(0, iy-1):
      for x in range(0, ix-1):
        i = (y * ix)+x
        idx.append((i, i+ix, i+ix+1))
        idx.append((i+ix+1, i+1, i))
        s += 2

    self.buf = []
    self.buf.append(Buffer(self, verts, tex_coords, idx, None, smooth))
Esempio n. 7
0
    def __init__(
        self,
        mapfile,
        camera=None,
        light=None,
        width=100.0,
        depth=100.0,
        height=10.0,
        divx=0,
        divy=0,
        ntiles=1.0,
        name="",
        x=0.0,
        y=0.0,
        z=0.0,
        rx=0.0,
        ry=0.0,
        rz=0.0,
        sx=1.0,
        sy=1.0,
        sz=1.0,
        cx=0.0,
        cy=0.0,
        cz=0.0,
        smooth=True,
        cubic=False,
    ):
        """uses standard constructor for Shape

    Arguments:
      *mapfile*
        Greyscale image path/file, string. If multiple bytes per pixel
        only the first one will be used for elevation. jpg files will
        create slight errors that will cause mis-matching of edges for
        tiled maps (i.e. use png for these) NB also see divx, divy below
        i.e. div=64x64 requires image 65x65 pixels

    Keyword arguments:
      *width, depth, height*
        Of the map in world units.
      *divx, divy*
        Number of divisions into which the map will be divided.
        to create vertices (vertices += 1) NB if you want to create a map
        with 64x64 divisions there will be 65x65 vertices in each direction so
        the mapfile (above) needs to 65x65 pixels in order to specify
        elevations precisely and avoid resizing errors.
      *ntiles*
        Number of repeats for tiling the texture image.
      *smooth*
        Calculate normals with averaging rather than pointing
        straight up, slightly faster if false.
    """
        super(ElevationMap, self).__init__(camera, light, name, x, y, z, rx, ry, rz, sx, sy, sz, cx, cy, cz)
        divx += 1  # one more vertex in each direction than number of divisions
        divy += 1
        if divx > 200 or divy > 200:
            print("... Map size can't be bigger than 199x199 divisions")
            divx = 200
            divy = 200
        # print(type(mapfile), type(""))

        if PIL_OK:
            try:
                if "" + mapfile == mapfile:  # HORRIBLE. Only way to cope with python2v3
                    if mapfile[0] != "/":
                        for p in sys.path:
                            if os.path.isfile(
                                p + "/" + mapfile
                            ):  # this could theoretically get different files with same name
                                mapfile = p + "/" + mapfile
                                break
                    if VERBOSE:
                        print("Loading height map ...", mapfile)

                    im = Image.open(mapfile)
                else:
                    im = mapfile  # allow image files to be passed as mapfile
            except:
                im = mapfile
            ix, iy = im.size
            if (ix > 200 and divx == 0) or (ix != divx and iy != divy):
                if divx == 0:
                    divx = 200
                    divy = 200
                im = im.resize((divx, divy), Image.ANTIALIAS)
                ix, iy = im.size

            im = im.transpose(Image.FLIP_TOP_BOTTOM)
            im = im.transpose(Image.FLIP_LEFT_RIGHT)
            self.pixels = im.load()
        else:
            """ images saved as compressed numpy npz file. No resizing so needs 
      to be right size. TODO make this repeated code less WET"""
            if mapfile[0] != "/":
                for p in sys.path:
                    if os.path.isfile(p + "/" + mapfile):  # this could theoretically get different files with same name
                        mapfile = p + "/" + mapfile
                        break
            self.pixels = np.load(mapfile)["arr_0"][::-1, ::-1]  # has to be saved with default key
            ix, iy = self.pixels.shape[:2]
        self.width = width
        self.depth = depth
        self.height = height
        self.ix = ix
        self.iy = iy
        self.ttype = GL_TRIANGLE_STRIP
        self.ht_y = 0.0
        self.ht_n = np.array([0.0, 1.0, 0.0])

        if VERBOSE:
            print("Creating Elevation Map ...", ix, iy)

        self.wh = width * 0.5
        self.hh = depth * 0.5
        self.ws = width / (ix - 1.0)
        self.hs = depth / (iy - 1.0)
        self.ht = height / 255.0
        tx = 1.0 * ntiles / ix
        ty = 1.0 * ntiles / iy

        verts = []
        norms = []
        tex_coords = []
        idx = []

        for y in xrange(0, iy):
            for x in xrange(0, ix):
                pxl = self.pixels[x, y]
                hgt = pxl[0] if hasattr(pxl, "__iter__") else pxl
                hgt *= self.ht
                this_x = -self.wh + x * self.ws
                this_z = -self.hh + y * self.hs
                verts.append((this_x, hgt, this_z))
                tex_coords.append(((ix - x) * tx, (iy - y) * ty))

        s = 0
        # create one long triangle_strip by alternating X directions
        for y in range(0, iy - 1):
            for x in range(0, ix - 1):
                i = (y * ix) + x
                idx.append((i, i + ix, i + ix + 1))
                idx.append((i + ix + 1, i + 1, i))
                s += 2

        self.buf = []
        self.buf.append(Buffer(self, verts, tex_coords, idx, None, smooth))
Esempio n. 8
0
def loadFileEGG(model, fileName):
    """Loads an panda3d egg file to produce Buffer object
  as part of a Shape.

  Arguments:
    *model*
      Model object to add to.
    *fileName*
      Path and name of egg file relative to program file.

  """
    model.coordinateSystem = "Y-up"
    model.materialList = {}
    model.textureList = {}
    model.vertexGroupList = {}
    model.vertexList = []
    model.polygonList = []
    model.childModelList = []
    model.parentModel = None
    model.childModel = [
    ]  # don't really need parent and child pointers but will speed up traversing tree
    model.vNormal = False
    model.vGroup = {}  # holds the information for each vertex group

    # read in the file and parse into some arrays

    if fileName[0] != '/':
        import os
        for p in sys.path:
            if os.path.isfile(
                    p + '/' + fileName
            ):  # this could theoretically get different files with same name
                fileName = p + '/' + fileName
                break
    filePath = os.path.split(os.path.abspath(fileName))[0]
    if VERBOSE:
        print(filePath)
    f = open(fileName, 'r')
    l = f.read(
    )  # whole thing as a string in memory this will only work for reasonably small files!!!

    ############### function to parse file as one off process to avoid re-traversing string #########
    # convertes the '<a> b { c <d> e {f} <g> h {i} }' structure
    # into nested arrays ['a', 'b', 'c',[['d','e','',['','','f',[]]],['g','h','',['','','i',[]]]]]
    def pRec(x, bReg, l, i):
        while 1:
            try:
                nxtFind = advance_iterator(bReg)
                j = nxtFind.start()
            except:
                return i + 1
            c = l[j]
            if c == "<":  # add entry to array at this level
                if len(x[3]) == 0:
                    x[2] = l[i:j].strip(
                    )  # text after "{" and before "<Tabxyz>"
                i = j + 1  # save marker for start of descriptor
                x[3].append(["", "", "", []])

            elif c == "{":
                xn = x[3][len(x[3]) - 1]
                tx = l[i - 1:j].strip().split()
                xn[0] = tx[
                    0]  #x[0] & x[1] is the "<Tabxyz>" & "123" prior to "{"
                xn[1] = tx[1] if len(tx) > 1 else ""
                i = pRec(xn, bReg, l, j + 1)
            else:  #i.e. c="}" # go up one level of recursion
                if len(x[3]) == 0: x[2] = l[i:j].strip()
                return j + 1

    ################### end of pRec #################

    ####### go through all the nested <Groups> ####################
    def groupDrill(gp, np):
        structVList = {}
        offsetVList = {}
        structPList = []
        offset = 0
        #numv = 0
        #numi = 0
        for x in gp:
            if len(x) == 0: continue
            if ("<Group>" in x[0]):
                if len(x[1]) > 0:
                    nextnp = np + x[1]
                else:
                    nextnp = np + str(randint(10000, 99999))
                groupDrill(x[3], nextnp)
            else:
                #build vertex, polygon, normal, triangles, UVs etc etc
                if "<VertexPool>" in x[0]:
                    vp = x[1]
                    structVList[vp] = []
                    offsetVList[vp] = offset
                    for v in x[3]:
                        #if "<Vertex>" in v[0]: #try with this test first!
                        coords = [float(n) for n in v[2].strip().split()
                                  ]  # before first < error if no coords!
                        # texture mapping
                        UVcoords = []
                        normal = []
                        for u in v[3]:
                            if "<UV>" in u[0]:
                                UVcoords = [
                                    float(n) for n in u[2].strip().split()
                                ]
                            #TODO get UVtangent and UVbinormal out of UVset (and use them!)
                            # if ("<Tangent>" in vList[v]): UVtangent = [float(n) for n in (extBracket("<Tangent>", vList[v]))[0].split()] # not sure how to use this at this stage
                            #else: UVtangent = []
                            # if ("<Binormal>" in vList[v]): UVbinormal = [float(n) for n in (extBracket("<Binormal>", vList[v]))[0].split()] # not sure how to use this at this stage
                            #else: UVbinormal = []
                            # normals, used in 'smoothing' edges between polygons
                            if "<Normal>" in u[0]:
                                normal = [
                                    float(n) for n in u[2].strip().split()
                                ]
                        vInt = int(v[1])
                        while (len(structVList[vp]) < (vInt + 1)):
                            structVList[vp].append("")
                        structVList[vp][vInt] = (vertex(
                            coords, UVcoords, normal))
                        offset += 1
        #
        # now go through splitting out the Polygons from this Group same level as vertexGroup
            if "<Polygon>" in x[0]:
                normal = []
                rgba = []
                MRef = ""
                TRef = ""
                for p in x[3]:
                    if ("<Normal>" in p[0]):
                        normal = [float(n) for n in p[2].strip().split()]
                    if ("<RGBA>" in p[0]):
                        rgba = [float(n) for n in p[2].strip().split()]
                    if ("<MRef>" in p[0]): MRef = p[2].strip()
                    if ("<TRef>" in p[0]): TRef = p[2].strip()
                    if ("<VertexRef>" in p[0]):
                        vref = []
                        for n in p[2].strip().split():
                            vref.append(int(n))
                            #numv += 1
                            #numi += 3
                        #numi -= 6 # number of corners of triangle = (n-2)*3 where n is the number of corners of face
                        vpKey = p[3][0][2].strip(
                        )  # ought to do a for r in p[3]; if "Ref in...
                # add to list
                #while (len(structPList) < (p+1)): structPList.append("")
                #
                structPList.append(
                    polygon(normal, rgba, MRef, TRef, vref, vpKey))

        # now go through the polygons in order of vertexPool+id, trying to ensure that the polygon arrays in each group are built in the order of vertexPool names
        # only cope with one material and one texture per group
        #numv -= 1
        #numi -= 1
        g_vertices = []
        g_normals = []
        g_tex_coords = []
        g_indices = []
        nv = 0  # vertex counter in this material
        #ni = 0 # triangle vertex count in this material

        gMRef = ""
        gTRef = ""
        nP = len(structPList)
        for p in xrange(nP):
            if (len(structPList[p].MRef) > 0): gMRef = structPList[p].MRef
            else: gMRef = ""
            if (len(structPList[p].TRef) > 0): gTRef = structPList[p].TRef
            else: gTRef = ""

            vpKey = structPList[p].vpKey
            vref = structPList[p].vref
            startV = nv
            for j in vref:

                if (len(structVList[vpKey][j].normal) > 0):
                    model.vNormal = True
                else:
                    model.vNormal = False
                if model.coordinateSystem == "z-up":
                    thisV = [
                        structVList[vpKey][j].coords[1],
                        structVList[vpKey][j].coords[2],
                        -structVList[vpKey][j].coords[0]
                    ]
                    if model.vNormal:
                        thisN = [
                            structVList[vpKey][j].normal[1],
                            structVList[vpKey][j].normal[2],
                            -structVList[vpKey][j].normal[0]
                        ]
                else:
                    thisV = [
                        structVList[vpKey][j].coords[0],
                        structVList[vpKey][j].coords[1],
                        -structVList[vpKey][j].coords[2]
                    ]
                    if model.vNormal:
                        thisN = [
                            structVList[vpKey][j].normal[0],
                            structVList[vpKey][j].normal[1],
                            -structVList[vpKey][j].normal[2]
                        ]
                g_vertices.append(thisV)
                if model.vNormal: nml = thisN
                else: nml = structPList[p].normal
                g_normals.append(nml)
                uvc = structVList[vpKey][j].UVcoords
                if (len(uvc) == 2):
                    g_tex_coords.append(uvc)
                else:
                    g_tex_coords.append([0.0, 0.0])
                nv += 1
            n = nv - startV - 1
            for j in range(1, n):
                g_indices.append((startV, startV + j + 1, startV + j))

        ilen = len(g_vertices)
        if ilen > 0:
            if len(g_normals) != len(g_vertices):
                g_normals = None  # force Buffer.__init__() to generate normals
            model.buf.append(
                Buffer(model, g_vertices, g_tex_coords, g_indices, g_normals))
            n = len(model.buf) - 1
            model.vGroup[np] = n

            model.buf[n].indicesLen = ilen
            model.buf[n].material = (0.0, 0.0, 0.0, 0.0)
            model.buf[n].ttype = GL_TRIANGLES

            # load the texture file TODO check if same as previously loaded files (for other loadModel()s)
            if (gTRef in model.textureList):
                model.buf[model.vGroup[np]].textures = [
                    model.textureList[gTRef]["texID"]
                ]
                model.buf[model.vGroup[np]].texFile = model.textureList[gTRef][
                    "filename"]
            else:
                model.buf[model.vGroup[np]].textures = []
                model.buf[model.vGroup[np]].texFile = None
                #TODO  don't create this array if texture being used but should be able to combine
                if (gMRef in model.materialList):
                    redVal = float(model.materialList[gMRef]["diffr"])
                    grnVal = float(model.materialList[gMRef]["diffg"])
                    bluVal = float(model.materialList[gMRef]["diffb"])
                    model.buf[model.vGroup[np]].material = (redVal, grnVal,
                                                            bluVal, 1.0)
                    model.buf[model.vGroup[np]].unib[3:6] = [
                        redVal, grnVal, bluVal
                    ]

                else:
                    model.buf[model.vGroup[np]].material = (0.0, 0.0, 0.0, 0.0)
        ####### end of groupDrill function #####################

    bReg = re.finditer('[{}<]', l)
    xx = ["", "", "", []]
    pRec(xx, bReg, l, 0)
    l = None  #in case it's running out of memory?
    f.close()

    for x in xx[3]:
        if "<Texture>" in x[0]:
            model.textureList[x[1]] = {}
            for i in xrange(len(x[3])):
                model.textureList[x[1]][x[3][i][1]] = x[3][i][2]
            model.textureList[x[1]]["filename"] = x[2].strip("\"")
            if VERBOSE:
                print(filePath, model.textureList[x[1]]["filename"])
            model.textureList[x[1]]["texID"] = Texture(
                os.path.join(filePath, model.textureList[x[1]]["filename"]),
                False, True)  # load from file
        if "<CoordinateSystem>" in x[0]:
            model.coordinateSystem = x[2].lower()
        if "<Material>" in x[0]:
            model.materialList[x[1]] = {}
            for i in xrange(len(x[3])):
                model.materialList[x[1]][x[3][i][1]] = x[3][i][2]
        if "<Group>" in x[0]:
            groupDrill(x[3], x[1])
Esempio n. 9
0
    def groupDrill(gp, np):
        structVList = {}
        offsetVList = {}
        structPList = []
        offset = 0
        #numv = 0
        #numi = 0
        for x in gp:
            if len(x) == 0: continue
            if ("<Group>" in x[0]):
                if len(x[1]) > 0:
                    nextnp = np + x[1]
                else:
                    nextnp = np + str(randint(10000, 99999))
                groupDrill(x[3], nextnp)
            else:
                #build vertex, polygon, normal, triangles, UVs etc etc
                if "<VertexPool>" in x[0]:
                    vp = x[1]
                    structVList[vp] = []
                    offsetVList[vp] = offset
                    for v in x[3]:
                        #if "<Vertex>" in v[0]: #try with this test first!
                        coords = [float(n) for n in v[2].strip().split()
                                  ]  # before first < error if no coords!
                        # texture mapping
                        UVcoords = []
                        normal = []
                        for u in v[3]:
                            if "<UV>" in u[0]:
                                UVcoords = [
                                    float(n) for n in u[2].strip().split()
                                ]
                            #TODO get UVtangent and UVbinormal out of UVset (and use them!)
                            # if ("<Tangent>" in vList[v]): UVtangent = [float(n) for n in (extBracket("<Tangent>", vList[v]))[0].split()] # not sure how to use this at this stage
                            #else: UVtangent = []
                            # if ("<Binormal>" in vList[v]): UVbinormal = [float(n) for n in (extBracket("<Binormal>", vList[v]))[0].split()] # not sure how to use this at this stage
                            #else: UVbinormal = []
                            # normals, used in 'smoothing' edges between polygons
                            if "<Normal>" in u[0]:
                                normal = [
                                    float(n) for n in u[2].strip().split()
                                ]
                        vInt = int(v[1])
                        while (len(structVList[vp]) < (vInt + 1)):
                            structVList[vp].append("")
                        structVList[vp][vInt] = (vertex(
                            coords, UVcoords, normal))
                        offset += 1
        #
        # now go through splitting out the Polygons from this Group same level as vertexGroup
            if "<Polygon>" in x[0]:
                normal = []
                rgba = []
                MRef = ""
                TRef = ""
                for p in x[3]:
                    if ("<Normal>" in p[0]):
                        normal = [float(n) for n in p[2].strip().split()]
                    if ("<RGBA>" in p[0]):
                        rgba = [float(n) for n in p[2].strip().split()]
                    if ("<MRef>" in p[0]): MRef = p[2].strip()
                    if ("<TRef>" in p[0]): TRef = p[2].strip()
                    if ("<VertexRef>" in p[0]):
                        vref = []
                        for n in p[2].strip().split():
                            vref.append(int(n))
                            #numv += 1
                            #numi += 3
                        #numi -= 6 # number of corners of triangle = (n-2)*3 where n is the number of corners of face
                        vpKey = p[3][0][2].strip(
                        )  # ought to do a for r in p[3]; if "Ref in...
                # add to list
                #while (len(structPList) < (p+1)): structPList.append("")
                #
                structPList.append(
                    polygon(normal, rgba, MRef, TRef, vref, vpKey))

        # now go through the polygons in order of vertexPool+id, trying to ensure that the polygon arrays in each group are built in the order of vertexPool names
        # only cope with one material and one texture per group
        #numv -= 1
        #numi -= 1
        g_vertices = []
        g_normals = []
        g_tex_coords = []
        g_indices = []
        nv = 0  # vertex counter in this material
        #ni = 0 # triangle vertex count in this material

        gMRef = ""
        gTRef = ""
        nP = len(structPList)
        for p in xrange(nP):
            if (len(structPList[p].MRef) > 0): gMRef = structPList[p].MRef
            else: gMRef = ""
            if (len(structPList[p].TRef) > 0): gTRef = structPList[p].TRef
            else: gTRef = ""

            vpKey = structPList[p].vpKey
            vref = structPList[p].vref
            startV = nv
            for j in vref:

                if (len(structVList[vpKey][j].normal) > 0):
                    model.vNormal = True
                else:
                    model.vNormal = False
                if model.coordinateSystem == "z-up":
                    thisV = [
                        structVList[vpKey][j].coords[1],
                        structVList[vpKey][j].coords[2],
                        -structVList[vpKey][j].coords[0]
                    ]
                    if model.vNormal:
                        thisN = [
                            structVList[vpKey][j].normal[1],
                            structVList[vpKey][j].normal[2],
                            -structVList[vpKey][j].normal[0]
                        ]
                else:
                    thisV = [
                        structVList[vpKey][j].coords[0],
                        structVList[vpKey][j].coords[1],
                        -structVList[vpKey][j].coords[2]
                    ]
                    if model.vNormal:
                        thisN = [
                            structVList[vpKey][j].normal[0],
                            structVList[vpKey][j].normal[1],
                            -structVList[vpKey][j].normal[2]
                        ]
                g_vertices.append(thisV)
                if model.vNormal: nml = thisN
                else: nml = structPList[p].normal
                g_normals.append(nml)
                uvc = structVList[vpKey][j].UVcoords
                if (len(uvc) == 2):
                    g_tex_coords.append(uvc)
                else:
                    g_tex_coords.append([0.0, 0.0])
                nv += 1
            n = nv - startV - 1
            for j in range(1, n):
                g_indices.append((startV, startV + j + 1, startV + j))

        ilen = len(g_vertices)
        if ilen > 0:
            if len(g_normals) != len(g_vertices):
                g_normals = None  # force Buffer.__init__() to generate normals
            model.buf.append(
                Buffer(model, g_vertices, g_tex_coords, g_indices, g_normals))
            n = len(model.buf) - 1
            model.vGroup[np] = n

            model.buf[n].indicesLen = ilen
            model.buf[n].material = (0.0, 0.0, 0.0, 0.0)
            model.buf[n].ttype = GL_TRIANGLES

            # load the texture file TODO check if same as previously loaded files (for other loadModel()s)
            if (gTRef in model.textureList):
                model.buf[model.vGroup[np]].textures = [
                    model.textureList[gTRef]["texID"]
                ]
                model.buf[model.vGroup[np]].texFile = model.textureList[gTRef][
                    "filename"]
            else:
                model.buf[model.vGroup[np]].textures = []
                model.buf[model.vGroup[np]].texFile = None
                #TODO  don't create this array if texture being used but should be able to combine
                if (gMRef in model.materialList):
                    redVal = float(model.materialList[gMRef]["diffr"])
                    grnVal = float(model.materialList[gMRef]["diffg"])
                    bluVal = float(model.materialList[gMRef]["diffb"])
                    model.buf[model.vGroup[np]].material = (redVal, grnVal,
                                                            bluVal, 1.0)
                    model.buf[model.vGroup[np]].unib[3:6] = [
                        redVal, grnVal, bluVal
                    ]

                else:
                    model.buf[model.vGroup[np]].material = (0.0, 0.0, 0.0, 0.0)
Esempio n. 10
0
  def _load_disk(self):
    """overrides method of Loadable
    Pngfont, Font, Defocus and ShadowCaster inherit from Texture but
    don't do all this so have to override this
    """
    
    # If already loaded, abort
    if self._loaded:
      return

    if self.string_type == FILE and PIL_OK:
      s = self.file_string + ' '
      im = Image.open(self.file_string)
    elif self.string_type == PIL_IMAGE and PIL_OK:
      s = 'PIL.Image '
      im = self.file_string
    else:
      if self.string_type == NUMPY:
        s = 'numpy.ndarray '
        self.image = self.file_string
      else: # i.e. FILE but not PIL_OK
        ''' NB this has to be a compressed numpy array saved using something like
              im = np.array(Image.open('{}.png'.format(FNAME)))
              np.savez_compressed('{}'.format(FNAME), im)
        which will produce a file with extension .npz '''
        s = self.file_string + ' '
        self.image = np.load(self.file_string)['arr_0'] # has to be saved with default key

      self.iy, self.ix, mode = self.image.shape
      self._tex = ctypes.c_uint()
      self._loaded = True
      return # skip the rest for numpy arrays - faster but no size checking

    # only do this if loading from disk or PIL image
    self.ix, self.iy = im.size
    s += '(%s)' % im.mode

    if self.mipmap:
      resize_type = Image.BICUBIC
    else:
      resize_type = Image.NEAREST

    # work out if sizes > MAX_SIZE or coerce to golden values in WIDTHS
    if self.iy > self.ix and self.iy > MAX_SIZE: # fairly rare circumstance
      im = im.resize((int((MAX_SIZE * self.ix) / self.iy), MAX_SIZE))
      self.ix, self.iy = im.size
    n = len(WIDTHS)
    for i in xrange(n-1, 0, -1):
      if self.ix == WIDTHS[i]:
        break # no need to resize as already a golden size
      if self.ix > WIDTHS[i]:
        im = im.resize((WIDTHS[i], int((WIDTHS[i] * self.iy) / self.ix)),
                        resize_type)
        self.ix, self.iy = im.size
        break

    if VERBOSE:
      print('Loading ...{}'.format(s))

    if self.flip:
      im = im.transpose(Image.FLIP_TOP_BOTTOM)

    #self.image = im.tostring('raw', RGBs) # TODO change to tobytes WHEN Pillow is default PIL in debian (jessie becomes current)
    self.image = self._img_to_array(im)
    self._tex = ctypes.c_uint()
    if self.string_type == FILE and 'fonts/' in self.file_string:
      self.im = im
      
    self._loaded = True
#!/usr/bin/python
from __future__ import absolute_import, division, print_function, unicode_literals

import subprocess, time

from six_mod.moves import xrange

for i in xrange(500):
    p = subprocess.Popen(["python", "/home/pi/pi3d/demos/Minimal.py"],
                         stdin=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    time.sleep(7.0)
    stdoutdata, stderrdata = p.communicate(chr(27))
    with open("/home/pi/pi3d/experiments/minimal_count.txt", "w") as myfile:
        myfile.write(str(i))
Esempio n. 12
0
  def __init__(self, mapfile, camera=None, light=None,
               width=100.0, depth=100.0, height=10.0,
               divx=0, divy=0, ntiles=1.0, name="",
               x=0.0, y=0.0, z=0.0, rx=0.0, ry=0.0, rz=0.0,
               sx=1.0, sy=1.0, sz=1.0, cx=0.0, cy=0.0, cz=0.0, smooth=True, cubic=False):
    """uses standard constructor for Shape

    Arguments:
      *mapfile*
        Greyscale image path/file, string. If multiple bytes per pixel
        only the first one will be used for elevation. jpg files will
        create slight errors that will cause mis-matching of edges for
        tiled maps (i.e. use png for these) NB also see divx, divy below
        i.e. div=64x64 requires image 65x65 pixels

    Keyword arguments:
      *width, depth, height*
        Of the map in world units.
      *divx, divy*
        Number of divisions into which the map will be divided.
        to create vertices (vertices += 1) NB if you want to create a map
        with 64x64 divisions there will be 65x65 vertices in each direction so
        the mapfile (above) needs to 65x65 pixels in order to specify
        elevations precisely and avoid resizing errors.
      *ntiles*
        Number of repeats for tiling the texture image.
      *smooth*
        Calculate normals with averaging rather than pointing
        straight up, slightly faster if false.
    """
    super(ElevationMap, self).__init__(camera, light, name, x, y, z, rx, ry, rz,
                                       sx, sy, sz, cx, cy, cz)
    divx += 1 # one more vertex in each direction than number of divisions
    divy += 1
    if divx > 200 or divy > 200:
      print("... Map size can't be bigger than 199x199 divisions")
      divx = 200
      divy = 200
    #print(type(mapfile), type(""))

    if PIL_OK:
      try:
        if '' + mapfile == mapfile: #HORRIBLE. Only way to cope with python2v3
          if mapfile[0] != '/':
            for p in sys.path:
              if os.path.isfile(p + '/' + mapfile): # this could theoretically get different files with same name
                mapfile = p + '/' + mapfile
                break
          if VERBOSE:
            print("Loading height map ...", mapfile)

          im = Image.open(mapfile)
        else:
          im = mapfile #allow image files to be passed as mapfile
      except:
        im = mapfile
      ix, iy = im.size
      if (ix > 200 and divx == 0) or (ix != divx and iy != divy):
        if divx == 0:
          divx = 200
          divy = 200
        im = im.resize((divx, divy), Image.ANTIALIAS)
        ix, iy = im.size

      im = im.transpose(Image.FLIP_TOP_BOTTOM)
      im = im.transpose(Image.FLIP_LEFT_RIGHT)
      self.pixels = im.load()
    else: 
      ''' images saved as compressed numpy npz file. No resizing so needs 
      to be right size. TODO make this repeated code less WET'''
      if mapfile[0] != '/':
        for p in sys.path:
          if os.path.isfile(p + '/' + mapfile): # this could theoretically get different files with same name
            mapfile = p + '/' + mapfile
            break
      self.pixels = np.load(mapfile)['arr_0'][::-1,::-1] # has to be saved with default key
      ix, iy = self.pixels.shape[:2]
    self.width = width
    self.depth = depth
    self.height = height
    self.ix = ix
    self.iy = iy
    self.ttype = GL_TRIANGLE_STRIP
    self.ht_y = 0.0
    self.ht_n = np.array([0.0, 1.0, 0.0])

    if VERBOSE:
      print("Creating Elevation Map ...", ix, iy)

    self.wh = width * 0.5
    self.hh = depth * 0.5
    self.ws = width / (ix - 1.0)
    self.hs = depth / (iy - 1.0)
    self.ht = height / 255.0
    tx = 1.0 * ntiles / ix
    ty = 1.0 * ntiles / iy

    verts = []
    norms = []
    tex_coords = []
    idx = []

    for y in xrange(0, iy):
      for x in xrange(0, ix):
        pxl = self.pixels[x, y]
        hgt = pxl[0] if hasattr(pxl, '__iter__') else pxl
        hgt *= self.ht
        this_x = -self.wh + x * self.ws
        this_z = -self.hh + y * self.hs
        verts.append((this_x, hgt, this_z))
        tex_coords.append(((ix - x) * tx, (iy - y) * ty))

    s = 0
    #create one long triangle_strip by alternating X directions
    for y in range(0, iy-1):
      for x in range(0, ix-1):
        i = (y * ix)+x
        idx.append((i, i+ix, i+ix+1))
        idx.append((i+ix+1, i+1, i))
        s += 2

    self.buf = []
    self.buf.append(Buffer(self, verts, tex_coords, idx, None, smooth))
Esempio n. 13
0
def loadFileEGG(model, fileName):
  """Loads an panda3d egg file to produce Buffer object
  as part of a Shape.

  Arguments:
    *model*
      Model object to add to.
    *fileName*
      Path and name of egg file relative to program file.

  """
  model.coordinateSystem = "Y-up"
  model.materialList = {}
  model.textureList = {}
  model.vertexGroupList = {}
  model.vertexList = []
  model.polygonList = []
  model.childModelList = []
  model.parentModel = None
  model.childModel = [] # don't really need parent and child pointers but will speed up traversing tree
  model.vNormal = False
  model.vGroup = {} # holds the information for each vertex group

  # read in the file and parse into some arrays

  if fileName[0] != '/':
    import os
    for p in sys.path:
      if os.path.isfile(p + '/' + fileName): # this could theoretically get different files with same name
        fileName = p + '/' + fileName
        break
  filePath = os.path.split(os.path.abspath(fileName))[0]
  if VERBOSE:
    print(filePath)
  f = open(fileName, 'r')
  l = f.read() # whole thing as a string in memory this will only work for reasonably small files!!!

  ############### function to parse file as one off process to avoid re-traversing string #########
  # convertes the '<a> b { c <d> e {f} <g> h {i} }' structure
  # into nested arrays ['a', 'b', 'c',[['d','e','',['','','f',[]]],['g','h','',['','','i',[]]]]]
  def pRec(x, bReg, l, i):
    while 1:
      try:
        nxtFind = advance_iterator(bReg)
        j = nxtFind.start()
      except:
        return i+1
      c = l[j]
      if c == "<": # add entry to array at this level
        if len(x[3]) == 0: x[2] = l[i:j].strip() # text after "{" and before "<Tabxyz>"
        i = j+1 # save marker for start of descriptor
        x[3].append(["", "", "", []])

      elif c == "{":
        xn = x[3][len(x[3])-1]
        tx = l[i-1:j].strip().split()
        xn[0] = tx[0] #x[0] & x[1] is the "<Tabxyz>" & "123" prior to "{"
        xn[1] = tx[1] if len(tx) > 1 else ""
        i = pRec(xn, bReg, l, j+1)
      else: #i.e. c="}" # go up one level of recursion
        if len(x[3]) == 0: x[2] = l[i:j].strip()
        return j+1
  ################### end of pRec #################

  ####### go through all the nested <Groups> ####################
  def groupDrill(gp, np):
    structVList = {}
    offsetVList = {}
    structPList = []
    offset = 0
    #numv = 0
    #numi = 0
    for x in gp:
      if len(x) == 0: continue
      if ("<Group>" in x[0]):
        if len(x[1]) > 0:
          nextnp = np+x[1]
        else:
          nextnp = np+str(randint(10000, 99999))
        groupDrill(x[3], nextnp)
      else:
        #build vertex, polygon, normal, triangles, UVs etc etc
        if "<VertexPool>" in x[0]:
          vp = x[1]
          structVList[vp] = []
          offsetVList[vp] = offset
          for v in x[3]:
            #if "<Vertex>" in v[0]: #try with this test first!
            coords = [float(n) for n in v[2].strip().split()] # before first < error if no coords!
            # texture mapping
            UVcoords = []
            normal = []
            for u in v[3]:
              if "<UV>" in u[0]: UVcoords = [float(n) for n in u[2].strip().split()]
              #TODO get UVtangent and UVbinormal out of UVset (and use them!)
              # if ("<Tangent>" in vList[v]): UVtangent = [float(n) for n in (extBracket("<Tangent>", vList[v]))[0].split()] # not sure how to use this at this stage
              #else: UVtangent = []
              # if ("<Binormal>" in vList[v]): UVbinormal = [float(n) for n in (extBracket("<Binormal>", vList[v]))[0].split()] # not sure how to use this at this stage
              #else: UVbinormal = []
              # normals, used in 'smoothing' edges between polygons
              if "<Normal>" in u[0]: normal = [float(n) for n in u[2].strip().split()]
            vInt = int(v[1])
            while (len(structVList[vp]) < (vInt+1)): structVList[vp].append("")
            structVList[vp][vInt] = (vertex(coords, UVcoords, normal))
            offset += 1
    #
      # now go through splitting out the Polygons from this Group same level as vertexGroup
      if "<Polygon>" in x[0]:
        normal = []
        rgba = []
        MRef = ""
        TRef = ""
        for p in x[3]:
          if ("<Normal>" in p[0]): normal = [float(n) for n in p[2].strip().split()]
          if ("<RGBA>" in p[0]): rgba = [float(n) for n in p[2].strip().split()]
          if ("<MRef>" in p[0]): MRef = p[2].strip()
          if ("<TRef>" in p[0]): TRef = p[2].strip()
          if ("<VertexRef>" in p[0]):
            vref = []
            for n in p[2].strip().split():
              vref.append(int(n))
              #numv += 1
              #numi += 3
            #numi -= 6 # number of corners of triangle = (n-2)*3 where n is the number of corners of face
            vpKey = p[3][0][2].strip() # ought to do a for r in p[3]; if "Ref in...
        # add to list
        #while (len(structPList) < (p+1)): structPList.append("")
        #
        structPList.append(polygon(normal, rgba, MRef, TRef, vref, vpKey))

    # now go through the polygons in order of vertexPool+id, trying to ensure that the polygon arrays in each group are built in the order of vertexPool names
    # only cope with one material and one texture per group
    #numv -= 1
    #numi -= 1
    g_vertices = []
    g_normals = []
    g_tex_coords = []
    g_indices = []
    nv = 0 # vertex counter in this material
    #ni = 0 # triangle vertex count in this material

    gMRef = ""
    gTRef = ""
    nP = len(structPList)
    for p in xrange(nP):
      if (len(structPList[p].MRef) > 0): gMRef = structPList[p].MRef
      else: gMRef = ""
      if (len(structPList[p].TRef) > 0): gTRef = structPList[p].TRef
      else: gTRef = ""

      vpKey = structPList[p].vpKey
      vref = structPList[p].vref
      startV = nv
      for j in vref:

        if (len(structVList[vpKey][j].normal) > 0): model.vNormal = True
        else: model.vNormal = False
        if model.coordinateSystem == "z-up":
          thisV = [structVList[vpKey][j].coords[1], structVList[vpKey][j].coords[2], -structVList[vpKey][j].coords[0]]
          if model.vNormal:
            thisN = [structVList[vpKey][j].normal[1], structVList[vpKey][j].normal[2], -structVList[vpKey][j].normal[0]]
        else:
          thisV = [structVList[vpKey][j].coords[0], structVList[vpKey][j].coords[1], -structVList[vpKey][j].coords[2]]
          if model.vNormal:
            thisN = [structVList[vpKey][j].normal[0], structVList[vpKey][j].normal[1], -structVList[vpKey][j].normal[2]]
        g_vertices.append(thisV)
        if model.vNormal: nml = thisN
        else: nml = structPList[p].normal
        g_normals.append(nml)
        uvc = structVList[vpKey][j].UVcoords
        if (len(uvc) == 2):
          g_tex_coords.append(uvc)
        else:
          g_tex_coords.append([0.0, 0.0])
        nv += 1
      n = nv - startV - 1
      for j in range(1, n):
        g_indices.append((startV, startV + j + 1, startV + j))

    ilen = len(g_vertices)
    if ilen > 0:
      if len(g_normals) != len(g_vertices):
        g_normals = None # force Buffer.__init__() to generate normals
      model.buf.append(Buffer(model, g_vertices, g_tex_coords, g_indices, g_normals))
      n = len(model.buf) - 1
      model.vGroup[np] = n

      model.buf[n].indicesLen = ilen
      model.buf[n].material = (0.0, 0.0, 0.0, 0.0)
      model.buf[n].ttype = GL_TRIANGLES

      # load the texture file TODO check if same as previously loaded files (for other loadModel()s)
      if (gTRef in model.textureList):
        model.buf[model.vGroup[np]].textures = [model.textureList[gTRef]["texID"]]
        model.buf[model.vGroup[np]].texFile = model.textureList[gTRef]["filename"]
      else:
        model.buf[model.vGroup[np]].textures = []
        model.buf[model.vGroup[np]].texFile = None
        #TODO  don't create this array if texture being used but should be able to combine
        if (gMRef in model.materialList):
          redVal = float(model.materialList[gMRef]["diffr"])
          grnVal = float(model.materialList[gMRef]["diffg"])
          bluVal = float(model.materialList[gMRef]["diffb"])
          model.buf[model.vGroup[np]].material = (redVal, grnVal, bluVal, 1.0)
          model.buf[model.vGroup[np]].unib[3:6] = [redVal, grnVal, bluVal]

        else: model.buf[model.vGroup[np]].material = (0.0, 0.0, 0.0, 0.0)
    ####### end of groupDrill function #####################

  bReg = re.finditer('[{}<]', l)
  xx = ["", "", "", []]
  pRec(xx, bReg, l, 0)
  l = None #in case it's running out of memory?
  f.close()

  for x in xx[3]:
    if "<Texture>" in x[0]:
      model.textureList[x[1]] = {}
      for i in xrange(len(x[3])): model.textureList[x[1]][x[3][i][1]] = x[3][i][2]
      model.textureList[x[1]]["filename"] = x[2].strip("\"")
      if VERBOSE:
        print(filePath, model.textureList[x[1]]["filename"])
      model.textureList[x[1]]["texID"] = Texture(os.path.join(filePath, model.textureList[x[1]]["filename"]), False, True) # load from file
    if "<CoordinateSystem>" in x[0]:
      model.coordinateSystem = x[2].lower()
    if "<Material>" in x[0]:
      model.materialList[x[1]] = {}
      for i in xrange(len(x[3])): model.materialList[x[1]][x[3][i][1]] = x[3][i][2]
    if "<Group>" in x[0]:
      groupDrill(x[3], x[1])
Esempio n. 14
0
  def groupDrill(gp, np):
    structVList = {}
    offsetVList = {}
    structPList = []
    offset = 0
    #numv = 0
    #numi = 0
    for x in gp:
      if len(x) == 0: continue
      if ("<Group>" in x[0]):
        if len(x[1]) > 0:
          nextnp = np+x[1]
        else:
          nextnp = np+str(randint(10000, 99999))
        groupDrill(x[3], nextnp)
      else:
        #build vertex, polygon, normal, triangles, UVs etc etc
        if "<VertexPool>" in x[0]:
          vp = x[1]
          structVList[vp] = []
          offsetVList[vp] = offset
          for v in x[3]:
            #if "<Vertex>" in v[0]: #try with this test first!
            coords = [float(n) for n in v[2].strip().split()] # before first < error if no coords!
            # texture mapping
            UVcoords = []
            normal = []
            for u in v[3]:
              if "<UV>" in u[0]: UVcoords = [float(n) for n in u[2].strip().split()]
              #TODO get UVtangent and UVbinormal out of UVset (and use them!)
              # if ("<Tangent>" in vList[v]): UVtangent = [float(n) for n in (extBracket("<Tangent>", vList[v]))[0].split()] # not sure how to use this at this stage
              #else: UVtangent = []
              # if ("<Binormal>" in vList[v]): UVbinormal = [float(n) for n in (extBracket("<Binormal>", vList[v]))[0].split()] # not sure how to use this at this stage
              #else: UVbinormal = []
              # normals, used in 'smoothing' edges between polygons
              if "<Normal>" in u[0]: normal = [float(n) for n in u[2].strip().split()]
            vInt = int(v[1])
            while (len(structVList[vp]) < (vInt+1)): structVList[vp].append("")
            structVList[vp][vInt] = (vertex(coords, UVcoords, normal))
            offset += 1
    #
      # now go through splitting out the Polygons from this Group same level as vertexGroup
      if "<Polygon>" in x[0]:
        normal = []
        rgba = []
        MRef = ""
        TRef = ""
        for p in x[3]:
          if ("<Normal>" in p[0]): normal = [float(n) for n in p[2].strip().split()]
          if ("<RGBA>" in p[0]): rgba = [float(n) for n in p[2].strip().split()]
          if ("<MRef>" in p[0]): MRef = p[2].strip()
          if ("<TRef>" in p[0]): TRef = p[2].strip()
          if ("<VertexRef>" in p[0]):
            vref = []
            for n in p[2].strip().split():
              vref.append(int(n))
              #numv += 1
              #numi += 3
            #numi -= 6 # number of corners of triangle = (n-2)*3 where n is the number of corners of face
            vpKey = p[3][0][2].strip() # ought to do a for r in p[3]; if "Ref in...
        # add to list
        #while (len(structPList) < (p+1)): structPList.append("")
        #
        structPList.append(polygon(normal, rgba, MRef, TRef, vref, vpKey))

    # now go through the polygons in order of vertexPool+id, trying to ensure that the polygon arrays in each group are built in the order of vertexPool names
    # only cope with one material and one texture per group
    #numv -= 1
    #numi -= 1
    g_vertices = []
    g_normals = []
    g_tex_coords = []
    g_indices = []
    nv = 0 # vertex counter in this material
    #ni = 0 # triangle vertex count in this material

    gMRef = ""
    gTRef = ""
    nP = len(structPList)
    for p in xrange(nP):
      if (len(structPList[p].MRef) > 0): gMRef = structPList[p].MRef
      else: gMRef = ""
      if (len(structPList[p].TRef) > 0): gTRef = structPList[p].TRef
      else: gTRef = ""

      vpKey = structPList[p].vpKey
      vref = structPList[p].vref
      startV = nv
      for j in vref:

        if (len(structVList[vpKey][j].normal) > 0): model.vNormal = True
        else: model.vNormal = False
        if model.coordinateSystem == "z-up":
          thisV = [structVList[vpKey][j].coords[1], structVList[vpKey][j].coords[2], -structVList[vpKey][j].coords[0]]
          if model.vNormal:
            thisN = [structVList[vpKey][j].normal[1], structVList[vpKey][j].normal[2], -structVList[vpKey][j].normal[0]]
        else:
          thisV = [structVList[vpKey][j].coords[0], structVList[vpKey][j].coords[1], -structVList[vpKey][j].coords[2]]
          if model.vNormal:
            thisN = [structVList[vpKey][j].normal[0], structVList[vpKey][j].normal[1], -structVList[vpKey][j].normal[2]]
        g_vertices.append(thisV)
        if model.vNormal: nml = thisN
        else: nml = structPList[p].normal
        g_normals.append(nml)
        uvc = structVList[vpKey][j].UVcoords
        if (len(uvc) == 2):
          g_tex_coords.append(uvc)
        else:
          g_tex_coords.append([0.0, 0.0])
        nv += 1
      n = nv - startV - 1
      for j in range(1, n):
        g_indices.append((startV, startV + j + 1, startV + j))

    ilen = len(g_vertices)
    if ilen > 0:
      if len(g_normals) != len(g_vertices):
        g_normals = None # force Buffer.__init__() to generate normals
      model.buf.append(Buffer(model, g_vertices, g_tex_coords, g_indices, g_normals))
      n = len(model.buf) - 1
      model.vGroup[np] = n

      model.buf[n].indicesLen = ilen
      model.buf[n].material = (0.0, 0.0, 0.0, 0.0)
      model.buf[n].ttype = GL_TRIANGLES

      # load the texture file TODO check if same as previously loaded files (for other loadModel()s)
      if (gTRef in model.textureList):
        model.buf[model.vGroup[np]].textures = [model.textureList[gTRef]["texID"]]
        model.buf[model.vGroup[np]].texFile = model.textureList[gTRef]["filename"]
      else:
        model.buf[model.vGroup[np]].textures = []
        model.buf[model.vGroup[np]].texFile = None
        #TODO  don't create this array if texture being used but should be able to combine
        if (gMRef in model.materialList):
          redVal = float(model.materialList[gMRef]["diffr"])
          grnVal = float(model.materialList[gMRef]["diffg"])
          bluVal = float(model.materialList[gMRef]["diffb"])
          model.buf[model.vGroup[np]].material = (redVal, grnVal, bluVal, 1.0)
          model.buf[model.vGroup[np]].unib[3:6] = [redVal, grnVal, bluVal]

        else: model.buf[model.vGroup[np]].material = (0.0, 0.0, 0.0, 0.0)
Esempio n. 15
0
#!/usr/bin/python
from __future__ import absolute_import, division, print_function, unicode_literals

import subprocess, time

from six_mod.moves import xrange

for i in xrange(500):
  p = subprocess.Popen(["python", "/home/pi/pi3d/demos/Minimal.py"],
          stdin=subprocess.PIPE, stderr=subprocess.PIPE)
  time.sleep(7.0)
  stdoutdata, stderrdata = p.communicate(chr(27))
  with open("/home/pi/pi3d/experiments/minimal_count.txt", "w") as myfile:
    myfile.write(str(i))