Example #1
0
def generate_metadata(filename, typeid):
    
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    
    dae_data = get_hash(hash)['data']

    subfile_map = {}
    subfile_sizes = {}
    subfile_sizes_gzip = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        subfile_sizes[subfile] = len(img_data)
        subfile_sizes_gzip[subfile] = get_gzip_size(img_data)
        base_name = os.path.basename(os.path.split(subfile)[0])
        subfile_map[base_name] = img_data
    
    def customImageLoader(filename):
        return subfile_map[posixpath.basename(filename)]
    
    mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)
    
    stream_hash = metadata['types'][typeid].get('progressive_stream', None)
    stream_data = get_hash(stream_hash)['data'] if stream_hash is not None else None
    
    if stream_data is not None:
        # add back the progressive stream so we get accurate metadata
        mesh = add_back_pm(mesh, StringIO(stream_data), 100)
    
    json_data = json.loads(getJSON(mesh))

    metadata_info = {}
    metadata_info['num_triangles'] = json_data['num_triangles']
    metadata_info['num_materials'] = len(json_data['materials'])
    metadata_info['num_images'] = len(json_data['images'])
    metadata_info['texture_ram_usage'] = json_data['texture_ram']
    metadata_info['num_draw_calls'] = json_data['num_draw_with_batching']
    metadata_info['num_vertices'] = json_data['num_vertices']
    metadata_info['bounds_info'] = json_data['bounds_info']

    triangulate = meshtool.filters.factory.getInstance('triangulate')
    mesh = triangulate.apply(mesh)
    save_ply = meshtool.filters.factory.getInstance('save_ply')
    ply_temp_file = tempfile.mktemp(suffix='.ply', prefix='meshtool-genmetadata-zernike')
    save_ply.apply(mesh, ply_temp_file)
    
    zernike_calc = os.path.join(os.path.dirname(__file__), 'zernike_calculator')
    zernike_output = subprocess.check_output([zernike_calc, ply_temp_file])
    zernike_nums = zernike_output.split(',')
    zernike_nums = map(float, zernike_nums)
    metadata_info['zernike'] = zernike_nums
    os.remove(ply_temp_file)

    split = filename.split("/")
    version = split[-1:][0]
    file_key = "/".join(split[:-1])
    added_metadata = { 'metadata': metadata_info }
    
    # the size of the mesh, gzipped
    added_metadata['size_gzip'] = get_gzip_size(dae_data)
    
    # the size of each subfile
    added_metadata['subfile_sizes'] = subfile_sizes
    # the size of each subfile, gzipped
    added_metadata['subfile_sizes_gzip'] = subfile_sizes_gzip
    
    if stream_data is not None:
        # the size of the progressive stream, if exists
        added_metadata['progressive_stream_size'] = len(stream_data)
        added_metadata['progressive_stream_size_gzip'] = get_gzip_size(stream_data)
    
    
    add_metadata(file_key, version, typeid, added_metadata)
Example #2
0
def generate_panda3d(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    progressive_stream = metadata['types'][typeid].get('progressive_stream')
    progressive_data = get_hash(progressive_stream)['data'] if progressive_stream else None
    mipmaps = metadata['types'][typeid].get('mipmaps')
    pathinfo = PathInfo(filename)
    dae_data = get_hash(hash)['data']

    if mipmaps is not None:

        mipmap_data = {}
        for mipmap_name, mipmap_info in mipmaps.iteritems():
            tar_hash = mipmap_info['hash']
            tar_data = get_hash(tar_hash)['data']
            
            min_range = None
            max_range = None
            min_size = 128
            for byte_range in mipmap_info['byte_ranges']:
                if byte_range['width'] <= min_size and byte_range['height'] <= min_size:
                    min_range = (byte_range['offset'], byte_range['length'])
                max_range = (byte_range['offset'], byte_range['length'])
    
            mipmap_data[mipmap_name] = {}
            mipmap_data[mipmap_name]['base'] = tar_data[min_range[0]:min_range[0]+min_range[1]]
            mipmap_data[mipmap_name]['full'] = tar_data[max_range[0]:max_range[0]+max_range[1]]
    
        def base_loader(filename):
            return mipmap_data[filename]['base']
        def full_loader(filename):
            return mipmap_data[filename]['full']
    
        base_mesh = collada.Collada(StringIO(dae_data), aux_file_loader=base_loader)
        base_bam_data = getBam(base_mesh, 'base_' + filename)
        base_bam_hex_key = hashlib.sha256(base_bam_data).hexdigest()
        save_file_data(base_bam_hex_key, base_bam_data, "model/x-bam")
    
        full_mesh = collada.Collada(StringIO(dae_data), aux_file_loader=full_loader)
        if progressive_data is not None:
            full_mesh = add_back_pm.add_back_pm(full_mesh, StringIO(progressive_data), 100)
        full_bam_data = getBam(full_mesh, 'full_' + filename)
        full_bam_hex_key = hashlib.sha256(full_bam_data).hexdigest()
        save_file_data(full_bam_hex_key, full_bam_data, "model/x-bam")
    
        add_metadata(pathinfo.basepath, pathinfo.version, typeid, {'panda3d_base_bam': base_bam_hex_key,
                                                                   'panda3d_full_bam': full_bam_hex_key})
    else:
        
        subfile_map = {}
        for subfile in subfiles:
            img_meta = get_file_metadata(subfile)
            img_hash = img_meta['hash']
            img_data = get_hash(img_hash)['data']
            base_name = os.path.basename(os.path.split(subfile)[0])
            subfile_map[base_name] = img_data
        
        def customImageLoader(filename):
            return subfile_map[posixpath.basename(filename)]
        
        mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)
        other_bam_data = getBam(mesh, typeid + '_' + filename)
        other_bam_hex_key = hashlib.sha256(other_bam_data).hexdigest()
        save_file_data(other_bam_hex_key, other_bam_data, "model/x-bam")
        
        add_metadata(pathinfo.basepath, pathinfo.version, typeid, {'panda3d_bam': other_bam_hex_key})
        
Example #3
0
def load_into_bamfile(meshdata, subfiles, model):
    """Uses pycollada and panda3d to load meshdata and subfiles and
    write out to a bam file on disk"""

    if os.path.isfile(model.bam_file):
        print 'returning cached bam file'
        return model.bam_file

    mesh = load_mesh(meshdata, subfiles)
    model_name = model.model_json['full_path'].replace('/', '_')

    if model.model_type == 'progressive' and model.model_subtype == 'full':
        progressive_stream = model.model_json['metadata']['types'][
            'progressive'].get('progressive_stream')
        if progressive_stream is not None:
            print 'LOADING PROGRESSIVE STREAM'
            data = model.prog_data
            try:
                mesh = add_back_pm.add_back_pm(mesh, StringIO(data), 100)
                print '-----'
                print 'SUCCESSFULLY ADDED BACK PM'
                print '-----'
            except:
                f = open(model.bam_file, 'w')
                f.close()
                raise

    print 'loading into bamfile', model_name, mesh
    scene_members = pandacore.getSceneMembers(mesh)
    print 'got scene members', model_name, mesh

    rotateNode = GeomNode("rotater")
    rotatePath = NodePath(rotateNode)
    matrix = numpy.identity(4)
    if mesh.assetInfo.upaxis == collada.asset.UP_AXIS.X_UP:
        r = collada.scene.RotateTransform(0, 1, 0, 90)
        matrix = r.matrix
    elif mesh.assetInfo.upaxis == collada.asset.UP_AXIS.Y_UP:
        r = collada.scene.RotateTransform(1, 0, 0, 90)
        matrix = r.matrix
    rotatePath.setMat(Mat4(*matrix.T.flatten().tolist()))

    for geom, renderstate, mat4 in scene_members:
        node = GeomNode("primitive")
        node.addGeom(geom)
        if renderstate is not None:
            node.setGeomState(0, renderstate)
        geomPath = rotatePath.attachNewNode(node)
        geomPath.setMat(mat4)

    print 'created np', model_name, mesh

    if model.model_type != 'optimized_unflattened' and model.model_type != 'progressive':
        print 'ABOUT TO FLATTEN'
        rotatePath.flattenStrong()
        print 'DONE FLATTENING'

    print 'flattened', model_name, mesh

    wrappedNode = pandacore.centerAndScale(rotatePath)
    wrappedNode.setName(model_name)

    wrappedNode.writeBamFile(model.bam_file)
    print 'saved', model_name, mesh
    wrappedNode = None

    return model.bam_file
def load_into_bamfile(meshdata, subfiles, model):
    """Uses pycollada and panda3d to load meshdata and subfiles and
    write out to a bam file on disk"""

    if os.path.isfile(model.bam_file):
        print 'returning cached bam file'
        return model.bam_file

    mesh = load_mesh(meshdata, subfiles)
    model_name = model.model_json['full_path'].replace('/', '_')
    
    if model.model_type == 'progressive' and model.model_subtype == 'full':
        progressive_stream = model.model_json['metadata']['types']['progressive'].get('progressive_stream')
        if progressive_stream is not None:
            print 'LOADING PROGRESSIVE STREAM'
            data = model.prog_data
            try:
                mesh = add_back_pm.add_back_pm(mesh, StringIO(data), 100)
                print '-----'
                print 'SUCCESSFULLY ADDED BACK PM'
                print '-----'
            except:
                f = open(model.bam_file, 'w')
                f.close()
                raise

    print 'loading into bamfile', model_name, mesh
    scene_members = pandacore.getSceneMembers(mesh)
    print 'got scene members', model_name, mesh
    
    rotateNode = GeomNode("rotater")
    rotatePath = NodePath(rotateNode)
    matrix = numpy.identity(4)
    if mesh.assetInfo.upaxis == collada.asset.UP_AXIS.X_UP:
        r = collada.scene.RotateTransform(0,1,0,90)
        matrix = r.matrix
    elif mesh.assetInfo.upaxis == collada.asset.UP_AXIS.Y_UP:
        r = collada.scene.RotateTransform(1,0,0,90)
        matrix = r.matrix
    rotatePath.setMat(Mat4(*matrix.T.flatten().tolist()))

    for geom, renderstate, mat4 in scene_members:
        node = GeomNode("primitive")
        node.addGeom(geom)
        if renderstate is not None:
            node.setGeomState(0, renderstate)
        geomPath = rotatePath.attachNewNode(node)
        geomPath.setMat(mat4)
        
    print 'created np', model_name, mesh

    if model.model_type != 'optimized_unflattened' and model.model_type != 'progressive':
        print 'ABOUT TO FLATTEN'
        rotatePath.flattenStrong()
        print 'DONE FLATTENING'
        
    print 'flattened', model_name, mesh
    
    wrappedNode = pandacore.centerAndScale(rotatePath)
    wrappedNode.setName(model_name)

    wrappedNode.writeBamFile(model.bam_file)
    print 'saved', model_name, mesh
    wrappedNode = None
    
    return model.bam_file