Пример #1
0
def generate_screenshot(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    
    dae_data = get_hash(hash)['data']

    subfile_map = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        base_name = os.path.basename(os.path.split(subfile)[0])
        subfile_map[base_name] = img_data
    
    #The below is a total hack and I feel really dirty doing it, but
    # there is no way to get panda3d to clean up after itself except to
    # exit the process. Celery workers are run as a daemon, so they can't
    # create child processes. Doing so could cause orphaned, defunct processes.
    # I'm doing it anyway because I haven't found any other way to do this. Sorry.
    q = multiprocessing.Queue()
    daemonic = multiprocessing.current_process()._daemonic
    multiprocessing.current_process()._daemonic = False
    p = multiprocessing.Process(target=_get_screenshot, args=[q, dae_data, subfile_map])
    p.start()
    main_screenshot = q.get()
    p.join()
    multiprocessing.current_process()._daemonic = daemonic
    
    im = Image.open(StringIO(main_screenshot))
    thumbnail = StringIO()
    im.thumbnail((96,96), Image.ANTIALIAS)
    im.save(thumbnail, "PNG", optimize=1)
    thumbnail = thumbnail.getvalue()
    
    main_key = hashlib.sha256(main_screenshot).hexdigest()
    thumb_key = hashlib.sha256(thumbnail).hexdigest()
    save_file_data(main_key, main_screenshot, "image/png")
    save_file_data(thumb_key, thumbnail, "image/png")
    
    ss_info = {'screenshot': main_key, 'thumbnail': thumb_key}
    base_filename, version_num = os.path.split(filename)
    add_metadata(base_filename, version_num, typeid, ss_info)
def generate_progressive_errors(filename, typeid):
    if typeid != 'progressive':
        return
    
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    progressive_stream_hash = metadata['types'][typeid]['progressive_stream']
    mipmap_tar_hash = metadata['types'][typeid]['mipmaps'].values()[0]['hash']
    
    dae_data = get_hash(hash)['data']
    pm_data = get_hash(progressive_stream_hash)['data'] if progressive_stream_hash is not None else None
    mipmap_tar_data = get_hash(mipmap_tar_hash)['data']
    
    #The below is a total hack and I feel really dirty doing it, but
    # there is no way to get panda3d to clean up after itself except to
    # exit the process. Celery workers are run as a daemon, so they can't
    # create child processes. Doing so could cause orphaned, defunct processes.
    # I'm doing it anyway because I haven't found any other way to do this. Sorry.
    q = multiprocessing.Queue()
    daemonic = multiprocessing.current_process()._daemonic
    multiprocessing.current_process()._daemonic = False
    p = multiprocessing.Process(target=_get_progressive_errors, args=[q, dae_data, pm_data, mipmap_tar_data])
    p.start()
    success, error_data = q.get()
    p.join()
    multiprocessing.current_process()._daemonic = daemonic
    
    if not success:
        print 'Exception from worker, %s' % str(error_data)
        raise Exception("got exception from worker: %s" % str(error_data))
    
    if error_data is None:
        return
    
    error_info = {'progressive_perceptual_error': error_data}
    base_filename, version_num = os.path.split(filename)
    add_metadata(base_filename, version_num, typeid, error_info)
Пример #3
0
def generate_metadata(filename, typeid):
    
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    
    dae_data = get_hash(hash)['data']

    subfile_map = {}
    subfile_sizes = {}
    subfile_sizes_gzip = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        subfile_sizes[subfile] = len(img_data)
        subfile_sizes_gzip[subfile] = get_gzip_size(img_data)
        base_name = os.path.basename(os.path.split(subfile)[0])
        subfile_map[base_name] = img_data
    
    def customImageLoader(filename):
        return subfile_map[posixpath.basename(filename)]
    
    mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)
    
    stream_hash = metadata['types'][typeid].get('progressive_stream', None)
    stream_data = get_hash(stream_hash)['data'] if stream_hash is not None else None
    
    if stream_data is not None:
        # add back the progressive stream so we get accurate metadata
        mesh = add_back_pm(mesh, StringIO(stream_data), 100)
    
    json_data = json.loads(getJSON(mesh))

    metadata_info = {}
    metadata_info['num_triangles'] = json_data['num_triangles']
    metadata_info['num_materials'] = len(json_data['materials'])
    metadata_info['num_images'] = len(json_data['images'])
    metadata_info['texture_ram_usage'] = json_data['texture_ram']
    metadata_info['num_draw_calls'] = json_data['num_draw_with_batching']
    metadata_info['num_vertices'] = json_data['num_vertices']
    metadata_info['bounds_info'] = json_data['bounds_info']

    triangulate = meshtool.filters.factory.getInstance('triangulate')
    mesh = triangulate.apply(mesh)
    save_ply = meshtool.filters.factory.getInstance('save_ply')
    ply_temp_file = tempfile.mktemp(suffix='.ply', prefix='meshtool-genmetadata-zernike')
    save_ply.apply(mesh, ply_temp_file)
    
    zernike_calc = os.path.join(os.path.dirname(__file__), 'zernike_calculator')
    zernike_output = subprocess.check_output([zernike_calc, ply_temp_file])
    zernike_nums = zernike_output.split(',')
    zernike_nums = map(float, zernike_nums)
    metadata_info['zernike'] = zernike_nums
    os.remove(ply_temp_file)

    split = filename.split("/")
    version = split[-1:][0]
    file_key = "/".join(split[:-1])
    added_metadata = { 'metadata': metadata_info }
    
    # the size of the mesh, gzipped
    added_metadata['size_gzip'] = get_gzip_size(dae_data)
    
    # the size of each subfile
    added_metadata['subfile_sizes'] = subfile_sizes
    # the size of each subfile, gzipped
    added_metadata['subfile_sizes_gzip'] = subfile_sizes_gzip
    
    if stream_data is not None:
        # the size of the progressive stream, if exists
        added_metadata['progressive_stream_size'] = len(stream_data)
        added_metadata['progressive_stream_size_gzip'] = get_gzip_size(stream_data)
    
    
    add_metadata(file_key, version, typeid, added_metadata)
Пример #4
0
def generate_panda3d(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    progressive_stream = metadata['types'][typeid].get('progressive_stream')
    progressive_data = get_hash(progressive_stream)['data'] if progressive_stream else None
    mipmaps = metadata['types'][typeid].get('mipmaps')
    pathinfo = PathInfo(filename)
    dae_data = get_hash(hash)['data']

    if mipmaps is not None:

        mipmap_data = {}
        for mipmap_name, mipmap_info in mipmaps.iteritems():
            tar_hash = mipmap_info['hash']
            tar_data = get_hash(tar_hash)['data']
            
            min_range = None
            max_range = None
            min_size = 128
            for byte_range in mipmap_info['byte_ranges']:
                if byte_range['width'] <= min_size and byte_range['height'] <= min_size:
                    min_range = (byte_range['offset'], byte_range['length'])
                max_range = (byte_range['offset'], byte_range['length'])
    
            mipmap_data[mipmap_name] = {}
            mipmap_data[mipmap_name]['base'] = tar_data[min_range[0]:min_range[0]+min_range[1]]
            mipmap_data[mipmap_name]['full'] = tar_data[max_range[0]:max_range[0]+max_range[1]]
    
        def base_loader(filename):
            return mipmap_data[filename]['base']
        def full_loader(filename):
            return mipmap_data[filename]['full']
    
        base_mesh = collada.Collada(StringIO(dae_data), aux_file_loader=base_loader)
        base_bam_data = getBam(base_mesh, 'base_' + filename)
        base_bam_hex_key = hashlib.sha256(base_bam_data).hexdigest()
        save_file_data(base_bam_hex_key, base_bam_data, "model/x-bam")
    
        full_mesh = collada.Collada(StringIO(dae_data), aux_file_loader=full_loader)
        if progressive_data is not None:
            full_mesh = add_back_pm.add_back_pm(full_mesh, StringIO(progressive_data), 100)
        full_bam_data = getBam(full_mesh, 'full_' + filename)
        full_bam_hex_key = hashlib.sha256(full_bam_data).hexdigest()
        save_file_data(full_bam_hex_key, full_bam_data, "model/x-bam")
    
        add_metadata(pathinfo.basepath, pathinfo.version, typeid, {'panda3d_base_bam': base_bam_hex_key,
                                                                   'panda3d_full_bam': full_bam_hex_key})
    else:
        
        subfile_map = {}
        for subfile in subfiles:
            img_meta = get_file_metadata(subfile)
            img_hash = img_meta['hash']
            img_data = get_hash(img_hash)['data']
            base_name = os.path.basename(os.path.split(subfile)[0])
            subfile_map[base_name] = img_data
        
        def customImageLoader(filename):
            return subfile_map[posixpath.basename(filename)]
        
        mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)
        other_bam_data = getBam(mesh, typeid + '_' + filename)
        other_bam_hex_key = hashlib.sha256(other_bam_data).hexdigest()
        save_file_data(other_bam_hex_key, other_bam_data, "model/x-bam")
        
        add_metadata(pathinfo.basepath, pathinfo.version, typeid, {'panda3d_bam': other_bam_hex_key})
        
Пример #5
0
def generate_progressive(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    path, version = posixpath.split(filename)

    dae_data = get_hash(hash)['data']

    subfile_map = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        base_name = posixpath.basename(posixpath.split(subfile)[0])
        subfile_map[base_name] = img_data

    def customImageLoader(filename):
        return subfile_map[posixpath.basename(filename)]

    mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)

    strip_lines = meshtool.filters.factory.getInstance('strip_lines')
    mesh = strip_lines.apply(mesh)
    med_opts = meshtool.filters.factory.getInstance('medium_optimizations')
    mesh = med_opts.apply(mesh)

    progressive_stream = StringIO()
    sander_simplify = SanderSimplify(mesh, progressive_stream)
    mesh = sander_simplify.simplify()
    
    if sander_simplify.base_tri_count != sander_simplify.orig_tri_count:
        progressive_stream = progressive_stream.getvalue()
        progressive_hex_key = hashlib.sha256(progressive_stream).hexdigest()
        save_file_data(progressive_hex_key, progressive_stream, "model/vnd.pdae")
        progressive_stream_num_triangles = sander_simplify.orig_tri_count - sander_simplify.base_tri_count
    else:
        progressive_hex_key = None
        progressive_stream_num_triangles = 0

    mipmap_metadata = {}
    mipmaps = getMipMaps(mesh)
    for imgpath, (tarbuf, ranges) in mipmaps.iteritems():
        mipmap_tar_hex_key = hashlib.sha256(tarbuf).hexdigest()
        save_file_data(mipmap_tar_hex_key, tarbuf, "application/x-tar")
        mipmap_metadata[imgpath] = {'hash':mipmap_tar_hex_key, 'byte_ranges':ranges}

    #Make sure image paths are just the base name
    current_prefix = "progressive"
    subfile_names = []
    subfile_map = {}
    for img in mesh.images:
        base_name = posixpath.basename(img.path)
        subfile_map[base_name] = img.data

        img_hex_key = hashlib.sha256(subfile_map[base_name]).hexdigest()
        save_file_data(img_hex_key, subfile_map[base_name], "image/%s" % img.pilimage.format.lower())
        img_path = "%s/%s/%s" % (path, current_prefix, base_name)
        img_len = len(subfile_map[base_name])
        img_version_num = get_new_version_from_path(img_path, file_type="image")
        save_file_name(img_path, img_version_num, img_hex_key, img_len)
        subfile_names.append("%s/%s" % (img_path, img_version_num))

    str_buffer = StringIO()
    mesh.write(str_buffer)
    orig_save_data = str_buffer.getvalue()
    orig_hex_key = hashlib.sha256(orig_save_data).hexdigest()

    save_file_data(orig_hex_key, orig_save_data, "application/xml")

    zip_buffer = StringIO()
    combined_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
    combined_zip.writestr(posixpath.basename(path), orig_save_data)
    for img_name, img_data in subfile_map.iteritems():
        combined_zip.writestr(img_name, img_data)
    combined_zip.close()

    zip_save_data = zip_buffer.getvalue()
    zip_hex_key = hashlib.sha256(zip_save_data).hexdigest()
    save_file_data(zip_hex_key, zip_save_data, "application/zip")

    save_version_type(path, version, orig_hex_key, len(orig_save_data),
                      subfile_names, zip_hex_key, "progressive")

    add_metadata(path, version, "progressive", { 'progressive_stream': progressive_hex_key,
                                                 'progressive_stream_num_triangles': progressive_stream_num_triangles,
                                                 'mipmaps': mipmap_metadata  })

    send_task("celery_tasks.generate_screenshot.generate_screenshot", args=[filename, "progressive"])
    send_task("celery_tasks.generate_metadata.generate_metadata", args=[filename, "progressive"])