Example #1
0
def place_upload(main_rowkey, subfiles, title, path, description, selected_dae=None, extra_metadata=None,
                    create_index=True, ephemeral_ttl=None, ephemeral_subfiles=None):
    import_upload.update_state(state="LOADING")
    file_data = get_temp_file(main_rowkey)
    (zip, dae_zip_name, dae_data) = get_file_or_zip(file_data, selected_dae)
    
    if ephemeral_subfiles is None:
        ephemeral_subfiles = {}
    
    if ephemeral_ttl is not None:
        eph_subfile_metadata = get_multi_file_metadata(ephemeral_subfiles.values())
        eph_subfile_hashes = [m['hash'] for m in eph_subfile_metadata.itervalues()]
        eph_subfile_data = multi_get_hash(eph_subfile_hashes)
        
        def eph_subfile_getter(name):
            if name in ephemeral_subfiles:
                return eph_subfile_data[eph_subfile_metadata[ephemeral_subfiles[name]]['hash']]['data']
            else:
                return get_temp_file(subfiles[name])
        
        combined_subfiles = dict(ephemeral_subfiles.items() + subfiles.items())
        (collada_obj, subfile_data, image_objs) = get_collada_and_images(zip, dae_zip_name, dae_data, combined_subfiles, subfile_getter=eph_subfile_getter)
    else:
        import_upload.update_state(state="CHECKING_COLLADA")
        (collada_obj, subfile_data, image_objs) = get_collada_and_images(zip, dae_zip_name, dae_data, subfiles)

    import_upload.update_state(state="SAVING_ORIGINAL")
    try: new_version_num = get_new_version_from_path(path, file_type="collada")
    except cass.DatabaseError: raise DatabaseError()
        
    #Make sure image paths are just the base name
    current_prefix = "original"
    subfile_names = []
    image_names = []
    for img in collada_obj.images:
        rel_path = img.path
        base_name = posixpath.basename(img.path)
        orig_base_name = base_name
        
        if base_name in ephemeral_subfiles:
            subfile_names.append(ephemeral_subfiles[base_name])
            continue
        
        #strip out any character not allowed
        base_name = re.sub('[^\w\-\.]', '', base_name)
        
        #make sure that referenced texture files are unique
        while base_name in image_names:
            dot = base_name.rfind('.')
            ext = base_name[dot:] if dot != -1 else ''
            before_ext = base_name[0:dot] if dot != -1 else base_name
            base_name = "%s-x%s" % (before_ext, ext)
            
        if base_name != orig_base_name:
            subfile_data[base_name] = subfile_data[orig_base_name]
            del subfile_data[orig_base_name]
            image_objs[base_name] = image_objs[orig_base_name]
            del image_objs[orig_base_name]
        
        img.path = "./%s" % base_name
        img.save()
        img_hex_key = hashlib.sha256(subfile_data[base_name]).hexdigest()
        try: save_file_data(img_hex_key, subfile_data[base_name], "image/%s" % image_objs[base_name].format.lower())
        except: raise DatabaseError()
        img_path = "%s/%s/%s" % (path, current_prefix, base_name)
        img_len = len(subfile_data[base_name])
        try: img_version_num = get_new_version_from_path(img_path, file_type="image")
        except cass.DatabaseError: raise DatabaseError()
        try: save_file_name(img_path, img_version_num, img_hex_key, img_len, ttl=ephemeral_ttl)
        except cass.DatabaseError: raise DatabaseError()
        subfile_names.append("%s/%s" % (img_path, img_version_num))

    str_buffer = StringIO()
    collada_obj.write(str_buffer)
    orig_save_data = str_buffer.getvalue()
    orig_hex_key = hashlib.sha256(orig_save_data).hexdigest()
    
    try: save_file_data(orig_hex_key, orig_save_data, "application/xml")
    except: raise
    
    zip_buffer = StringIO()
    combined_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
    combined_zip.writestr(posixpath.basename(path), orig_save_data)
    for img_name, img_data in subfile_data.iteritems():
        combined_zip.writestr(img_name, img_data)
    combined_zip.close()
    zip_save_data = zip_buffer.getvalue()
    zip_hex_key = hashlib.sha256(zip_save_data).hexdigest()
    try: save_file_data(zip_hex_key, zip_save_data, "application/zip")
    except: raise DatabaseError()
    
    if extra_metadata is None:
        extra_metadata = {}
    extra_metadata['ephemeral'] = ephemeral_ttl is not None
    
    try:
        save_version_type(path, new_version_num, orig_hex_key, len(orig_save_data),
                          subfile_names, zip_hex_key, "original", title,
                          description, create_index=create_index, ttl=ephemeral_ttl,
                          extra_metadata=extra_metadata)
    except cass.DatabaseError:
        raise DatabaseError()

    path_with_vers = "%s/%s" % (path, new_version_num)
    
    if ephemeral_ttl is None:
        send_task("celery_tasks.generate_metadata.generate_metadata", args=[path_with_vers, "original"])
        send_task("celery_tasks.generate_screenshot.generate_screenshot", args=[path_with_vers, "original"])
        send_task("celery_tasks.generate_optimized.generate_optimized", args=[path_with_vers, "original"])
        #FIXME: not autorunning this now because it takes too long and is error-prone
        #send_task("celery_tasks.generate_progressive.generate_progressive", args=[path_with_vers, "original"])
    
    return path_with_vers
def generate_optimized(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    path, version = posixpath.split(filename)

    dae_data = get_hash(hash)['data']

    subfile_map = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        base_name = posixpath.basename(posixpath.split(subfile)[0])
        subfile_map[base_name] = img_data

    def customImageLoader(filename):
        return subfile_map[posixpath.basename(filename)]

    mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)

    med_opts = meshtool.filters.factory.getInstance('medium_optimizations')
    mesh = med_opts.apply(mesh)

    #Make sure image paths are just the base name
    current_prefix = "optimized"
    subfile_names = []
    subfile_map = {}
    for img in mesh.images:
        base_name = posixpath.basename(img.path)
        subfile_map[base_name] = img.data

        img_hex_key = hashlib.sha256(subfile_map[base_name]).hexdigest()
        save_file_data(img_hex_key, subfile_map[base_name], "image/%s" % img.pilimage.format.lower())
        img_path = "%s/%s/%s" % (path, current_prefix, base_name)
        img_len = len(subfile_map[base_name])
        img_version_num = get_new_version_from_path(img_path, file_type="image")
        save_file_name(img_path, img_version_num, img_hex_key, img_len)
        subfile_names.append("%s/%s" % (img_path, img_version_num))

    str_buffer = StringIO()
    mesh.write(str_buffer)
    orig_save_data = str_buffer.getvalue()
    orig_hex_key = hashlib.sha256(orig_save_data).hexdigest()

    save_file_data(orig_hex_key, orig_save_data, "application/xml")

    zip_buffer = StringIO()
    combined_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
    combined_zip.writestr(posixpath.basename(path), orig_save_data)
    for img_name, img_data in subfile_map.iteritems():
        combined_zip.writestr(img_name, img_data)
    combined_zip.close()

    zip_save_data = zip_buffer.getvalue()
    zip_hex_key = hashlib.sha256(zip_save_data).hexdigest()
    save_file_data(zip_hex_key, zip_save_data, "application/zip")

    save_version_type(path, version, orig_hex_key, len(orig_save_data),
                      subfile_names, zip_hex_key, "optimized")

    send_task("celery_tasks.generate_screenshot.generate_screenshot", args=[filename, "optimized"])
    send_task("celery_tasks.generate_metadata.generate_metadata", args=[filename, "optimized"])
def generate_progressive(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    path, version = posixpath.split(filename)

    dae_data = get_hash(hash)['data']

    subfile_map = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        base_name = posixpath.basename(posixpath.split(subfile)[0])
        subfile_map[base_name] = img_data

    def customImageLoader(filename):
        return subfile_map[posixpath.basename(filename)]

    mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)

    strip_lines = meshtool.filters.factory.getInstance('strip_lines')
    mesh = strip_lines.apply(mesh)
    med_opts = meshtool.filters.factory.getInstance('medium_optimizations')
    mesh = med_opts.apply(mesh)

    progressive_stream = StringIO()
    sander_simplify = SanderSimplify(mesh, progressive_stream)
    mesh = sander_simplify.simplify()
    
    if sander_simplify.base_tri_count != sander_simplify.orig_tri_count:
        progressive_stream = progressive_stream.getvalue()
        progressive_hex_key = hashlib.sha256(progressive_stream).hexdigest()
        save_file_data(progressive_hex_key, progressive_stream, "model/vnd.pdae")
        progressive_stream_num_triangles = sander_simplify.orig_tri_count - sander_simplify.base_tri_count
    else:
        progressive_hex_key = None
        progressive_stream_num_triangles = 0

    mipmap_metadata = {}
    mipmaps = getMipMaps(mesh)
    for imgpath, (tarbuf, ranges) in mipmaps.iteritems():
        mipmap_tar_hex_key = hashlib.sha256(tarbuf).hexdigest()
        save_file_data(mipmap_tar_hex_key, tarbuf, "application/x-tar")
        mipmap_metadata[imgpath] = {'hash':mipmap_tar_hex_key, 'byte_ranges':ranges}

    #Make sure image paths are just the base name
    current_prefix = "progressive"
    subfile_names = []
    subfile_map = {}
    for img in mesh.images:
        base_name = posixpath.basename(img.path)
        subfile_map[base_name] = img.data

        img_hex_key = hashlib.sha256(subfile_map[base_name]).hexdigest()
        save_file_data(img_hex_key, subfile_map[base_name], "image/%s" % img.pilimage.format.lower())
        img_path = "%s/%s/%s" % (path, current_prefix, base_name)
        img_len = len(subfile_map[base_name])
        img_version_num = get_new_version_from_path(img_path, file_type="image")
        save_file_name(img_path, img_version_num, img_hex_key, img_len)
        subfile_names.append("%s/%s" % (img_path, img_version_num))

    str_buffer = StringIO()
    mesh.write(str_buffer)
    orig_save_data = str_buffer.getvalue()
    orig_hex_key = hashlib.sha256(orig_save_data).hexdigest()

    save_file_data(orig_hex_key, orig_save_data, "application/xml")

    zip_buffer = StringIO()
    combined_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
    combined_zip.writestr(posixpath.basename(path), orig_save_data)
    for img_name, img_data in subfile_map.iteritems():
        combined_zip.writestr(img_name, img_data)
    combined_zip.close()

    zip_save_data = zip_buffer.getvalue()
    zip_hex_key = hashlib.sha256(zip_save_data).hexdigest()
    save_file_data(zip_hex_key, zip_save_data, "application/zip")

    save_version_type(path, version, orig_hex_key, len(orig_save_data),
                      subfile_names, zip_hex_key, "progressive")

    add_metadata(path, version, "progressive", { 'progressive_stream': progressive_hex_key,
                                                 'progressive_stream_num_triangles': progressive_stream_num_triangles,
                                                 'mipmaps': mipmap_metadata  })

    send_task("celery_tasks.generate_screenshot.generate_screenshot", args=[filename, "progressive"])
    send_task("celery_tasks.generate_metadata.generate_metadata", args=[filename, "progressive"])