示例#1
0
def read_back_db():
    print 'CHECKING FOR PREVIOUS DB FILE'
    print '-----------------------------'
    try:
        db = bsddb.hashopen(TEMP_DATA_FILE, 'r')
    except bsddb.db.DBNoSuchFileError:
        print 'Did not find a temporary BDB file. Skipping read-back step.'
        return
    
    previous_keys = db.keys()
    print 'Found', len(previous_keys), 'keys in previous data file.'
    
    if len(previous_keys) == 0:
        print 'Since no keys in previous file, continuing.'
        return
    
    referenced_keys = get_referenced_keys()
    print 'Checking', len(referenced_keys), 'referenced keys for previous items'
    
    still_referenced = referenced_keys.intersection(previous_keys)
    print 'Found', len(still_referenced), 'keys previously deleted that are now referenced.'
    
    if len(still_referenced) == 0:
        erase_check = raw_input('No previously deleted keys are referenced. Okay to erase old db (y/n)? ')
        if erase_check.upper().strip() == 'Y':
            print 'Okay, continuing.'
            return
        else:
            print 'Okay, nothing left to do here. Exiting.'
            sys.exit(0)
    else:
        rewrite_check = raw_input('Would you like me to rewrite the %d keys (y/n)? ' % (len(still_referenced),))
        if rewrite_check.upper().strip() == 'Y':
            print 'Okay, going to write keys now.'
            for previous_key in still_referenced:
                previous_data = marshal.loads(db[previous_key])
                mimetype = previous_data['mimetype']
                data = previous_data['data']
                print 'Writing key', previous_key, 'mimetype', mimetype, 'length', len(data)
                save_file_data(previous_key, data, mimetype)
            print 'Finished writing', len(still_referenced), 'keys. Exiting'
            sys.exit(0)
        else:
            print 'Okay, nothing left to do here. Exiting.'
            sys.exit(0)
def generate_screenshot(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    
    dae_data = get_hash(hash)['data']

    subfile_map = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        base_name = os.path.basename(os.path.split(subfile)[0])
        subfile_map[base_name] = img_data
    
    #The below is a total hack and I feel really dirty doing it, but
    # there is no way to get panda3d to clean up after itself except to
    # exit the process. Celery workers are run as a daemon, so they can't
    # create child processes. Doing so could cause orphaned, defunct processes.
    # I'm doing it anyway because I haven't found any other way to do this. Sorry.
    q = multiprocessing.Queue()
    daemonic = multiprocessing.current_process()._daemonic
    multiprocessing.current_process()._daemonic = False
    p = multiprocessing.Process(target=_get_screenshot, args=[q, dae_data, subfile_map])
    p.start()
    main_screenshot = q.get()
    p.join()
    multiprocessing.current_process()._daemonic = daemonic
    
    im = Image.open(StringIO(main_screenshot))
    thumbnail = StringIO()
    im.thumbnail((96,96), Image.ANTIALIAS)
    im.save(thumbnail, "PNG", optimize=1)
    thumbnail = thumbnail.getvalue()
    
    main_key = hashlib.sha256(main_screenshot).hexdigest()
    thumb_key = hashlib.sha256(thumbnail).hexdigest()
    save_file_data(main_key, main_screenshot, "image/png")
    save_file_data(thumb_key, thumbnail, "image/png")
    
    ss_info = {'screenshot': main_key, 'thumbnail': thumb_key}
    base_filename, version_num = os.path.split(filename)
    add_metadata(base_filename, version_num, typeid, ss_info)
示例#3
0
def generate_optimized(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    path, version = posixpath.split(filename)

    dae_data = get_hash(hash)['data']

    subfile_map = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        base_name = posixpath.basename(posixpath.split(subfile)[0])
        subfile_map[base_name] = img_data

    def customImageLoader(filename):
        return subfile_map[posixpath.basename(filename)]

    mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)

    med_opts = meshtool.filters.factory.getInstance('medium_optimizations')
    mesh = med_opts.apply(mesh)

    #Make sure image paths are just the base name
    current_prefix = "optimized"
    subfile_names = []
    subfile_map = {}
    for img in mesh.images:
        base_name = posixpath.basename(img.path)
        subfile_map[base_name] = img.data

        img_hex_key = hashlib.sha256(subfile_map[base_name]).hexdigest()
        save_file_data(img_hex_key, subfile_map[base_name], "image/%s" % img.pilimage.format.lower())
        img_path = "%s/%s/%s" % (path, current_prefix, base_name)
        img_len = len(subfile_map[base_name])
        img_version_num = get_new_version_from_path(img_path, file_type="image")
        save_file_name(img_path, img_version_num, img_hex_key, img_len)
        subfile_names.append("%s/%s" % (img_path, img_version_num))

    str_buffer = StringIO()
    mesh.write(str_buffer)
    orig_save_data = str_buffer.getvalue()
    orig_hex_key = hashlib.sha256(orig_save_data).hexdigest()

    save_file_data(orig_hex_key, orig_save_data, "application/xml")

    zip_buffer = StringIO()
    combined_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
    combined_zip.writestr(posixpath.basename(path), orig_save_data)
    for img_name, img_data in subfile_map.iteritems():
        combined_zip.writestr(img_name, img_data)
    combined_zip.close()

    zip_save_data = zip_buffer.getvalue()
    zip_hex_key = hashlib.sha256(zip_save_data).hexdigest()
    save_file_data(zip_hex_key, zip_save_data, "application/zip")

    save_version_type(path, version, orig_hex_key, len(orig_save_data),
                      subfile_names, zip_hex_key, "optimized")

    send_task("celery_tasks.generate_screenshot.generate_screenshot", args=[filename, "optimized"])
    send_task("celery_tasks.generate_metadata.generate_metadata", args=[filename, "optimized"])
示例#4
0
def place_upload(main_rowkey, subfiles, title, path, description, selected_dae=None, extra_metadata=None,
                    create_index=True, ephemeral_ttl=None, ephemeral_subfiles=None):
    import_upload.update_state(state="LOADING")
    file_data = get_temp_file(main_rowkey)
    (zip, dae_zip_name, dae_data) = get_file_or_zip(file_data, selected_dae)
    
    if ephemeral_subfiles is None:
        ephemeral_subfiles = {}
    
    if ephemeral_ttl is not None:
        eph_subfile_metadata = get_multi_file_metadata(ephemeral_subfiles.values())
        eph_subfile_hashes = [m['hash'] for m in eph_subfile_metadata.itervalues()]
        eph_subfile_data = multi_get_hash(eph_subfile_hashes)
        
        def eph_subfile_getter(name):
            if name in ephemeral_subfiles:
                return eph_subfile_data[eph_subfile_metadata[ephemeral_subfiles[name]]['hash']]['data']
            else:
                return get_temp_file(subfiles[name])
        
        combined_subfiles = dict(ephemeral_subfiles.items() + subfiles.items())
        (collada_obj, subfile_data, image_objs) = get_collada_and_images(zip, dae_zip_name, dae_data, combined_subfiles, subfile_getter=eph_subfile_getter)
    else:
        import_upload.update_state(state="CHECKING_COLLADA")
        (collada_obj, subfile_data, image_objs) = get_collada_and_images(zip, dae_zip_name, dae_data, subfiles)

    import_upload.update_state(state="SAVING_ORIGINAL")
    try: new_version_num = get_new_version_from_path(path, file_type="collada")
    except cass.DatabaseError: raise DatabaseError()
        
    #Make sure image paths are just the base name
    current_prefix = "original"
    subfile_names = []
    image_names = []
    for img in collada_obj.images:
        rel_path = img.path
        base_name = posixpath.basename(img.path)
        orig_base_name = base_name
        
        if base_name in ephemeral_subfiles:
            subfile_names.append(ephemeral_subfiles[base_name])
            continue
        
        #strip out any character not allowed
        base_name = re.sub('[^\w\-\.]', '', base_name)
        
        #make sure that referenced texture files are unique
        while base_name in image_names:
            dot = base_name.rfind('.')
            ext = base_name[dot:] if dot != -1 else ''
            before_ext = base_name[0:dot] if dot != -1 else base_name
            base_name = "%s-x%s" % (before_ext, ext)
            
        if base_name != orig_base_name:
            subfile_data[base_name] = subfile_data[orig_base_name]
            del subfile_data[orig_base_name]
            image_objs[base_name] = image_objs[orig_base_name]
            del image_objs[orig_base_name]
        
        img.path = "./%s" % base_name
        img.save()
        img_hex_key = hashlib.sha256(subfile_data[base_name]).hexdigest()
        try: save_file_data(img_hex_key, subfile_data[base_name], "image/%s" % image_objs[base_name].format.lower())
        except: raise DatabaseError()
        img_path = "%s/%s/%s" % (path, current_prefix, base_name)
        img_len = len(subfile_data[base_name])
        try: img_version_num = get_new_version_from_path(img_path, file_type="image")
        except cass.DatabaseError: raise DatabaseError()
        try: save_file_name(img_path, img_version_num, img_hex_key, img_len, ttl=ephemeral_ttl)
        except cass.DatabaseError: raise DatabaseError()
        subfile_names.append("%s/%s" % (img_path, img_version_num))

    str_buffer = StringIO()
    collada_obj.write(str_buffer)
    orig_save_data = str_buffer.getvalue()
    orig_hex_key = hashlib.sha256(orig_save_data).hexdigest()
    
    try: save_file_data(orig_hex_key, orig_save_data, "application/xml")
    except: raise
    
    zip_buffer = StringIO()
    combined_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
    combined_zip.writestr(posixpath.basename(path), orig_save_data)
    for img_name, img_data in subfile_data.iteritems():
        combined_zip.writestr(img_name, img_data)
    combined_zip.close()
    zip_save_data = zip_buffer.getvalue()
    zip_hex_key = hashlib.sha256(zip_save_data).hexdigest()
    try: save_file_data(zip_hex_key, zip_save_data, "application/zip")
    except: raise DatabaseError()
    
    if extra_metadata is None:
        extra_metadata = {}
    extra_metadata['ephemeral'] = ephemeral_ttl is not None
    
    try:
        save_version_type(path, new_version_num, orig_hex_key, len(orig_save_data),
                          subfile_names, zip_hex_key, "original", title,
                          description, create_index=create_index, ttl=ephemeral_ttl,
                          extra_metadata=extra_metadata)
    except cass.DatabaseError:
        raise DatabaseError()

    path_with_vers = "%s/%s" % (path, new_version_num)
    
    if ephemeral_ttl is None:
        send_task("celery_tasks.generate_metadata.generate_metadata", args=[path_with_vers, "original"])
        send_task("celery_tasks.generate_screenshot.generate_screenshot", args=[path_with_vers, "original"])
        send_task("celery_tasks.generate_optimized.generate_optimized", args=[path_with_vers, "original"])
        #FIXME: not autorunning this now because it takes too long and is error-prone
        #send_task("celery_tasks.generate_progressive.generate_progressive", args=[path_with_vers, "original"])
    
    return path_with_vers
示例#5
0
def generate_panda3d(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    progressive_stream = metadata['types'][typeid].get('progressive_stream')
    progressive_data = get_hash(progressive_stream)['data'] if progressive_stream else None
    mipmaps = metadata['types'][typeid].get('mipmaps')
    pathinfo = PathInfo(filename)
    dae_data = get_hash(hash)['data']

    if mipmaps is not None:

        mipmap_data = {}
        for mipmap_name, mipmap_info in mipmaps.iteritems():
            tar_hash = mipmap_info['hash']
            tar_data = get_hash(tar_hash)['data']
            
            min_range = None
            max_range = None
            min_size = 128
            for byte_range in mipmap_info['byte_ranges']:
                if byte_range['width'] <= min_size and byte_range['height'] <= min_size:
                    min_range = (byte_range['offset'], byte_range['length'])
                max_range = (byte_range['offset'], byte_range['length'])
    
            mipmap_data[mipmap_name] = {}
            mipmap_data[mipmap_name]['base'] = tar_data[min_range[0]:min_range[0]+min_range[1]]
            mipmap_data[mipmap_name]['full'] = tar_data[max_range[0]:max_range[0]+max_range[1]]
    
        def base_loader(filename):
            return mipmap_data[filename]['base']
        def full_loader(filename):
            return mipmap_data[filename]['full']
    
        base_mesh = collada.Collada(StringIO(dae_data), aux_file_loader=base_loader)
        base_bam_data = getBam(base_mesh, 'base_' + filename)
        base_bam_hex_key = hashlib.sha256(base_bam_data).hexdigest()
        save_file_data(base_bam_hex_key, base_bam_data, "model/x-bam")
    
        full_mesh = collada.Collada(StringIO(dae_data), aux_file_loader=full_loader)
        if progressive_data is not None:
            full_mesh = add_back_pm.add_back_pm(full_mesh, StringIO(progressive_data), 100)
        full_bam_data = getBam(full_mesh, 'full_' + filename)
        full_bam_hex_key = hashlib.sha256(full_bam_data).hexdigest()
        save_file_data(full_bam_hex_key, full_bam_data, "model/x-bam")
    
        add_metadata(pathinfo.basepath, pathinfo.version, typeid, {'panda3d_base_bam': base_bam_hex_key,
                                                                   'panda3d_full_bam': full_bam_hex_key})
    else:
        
        subfile_map = {}
        for subfile in subfiles:
            img_meta = get_file_metadata(subfile)
            img_hash = img_meta['hash']
            img_data = get_hash(img_hash)['data']
            base_name = os.path.basename(os.path.split(subfile)[0])
            subfile_map[base_name] = img_data
        
        def customImageLoader(filename):
            return subfile_map[posixpath.basename(filename)]
        
        mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)
        other_bam_data = getBam(mesh, typeid + '_' + filename)
        other_bam_hex_key = hashlib.sha256(other_bam_data).hexdigest()
        save_file_data(other_bam_hex_key, other_bam_data, "model/x-bam")
        
        add_metadata(pathinfo.basepath, pathinfo.version, typeid, {'panda3d_bam': other_bam_hex_key})
        
def generate_progressive(filename, typeid):
    metadata = get_file_metadata(filename)
    hash = metadata['types'][typeid]['hash']
    subfiles = metadata['types'][typeid]['subfiles']
    path, version = posixpath.split(filename)

    dae_data = get_hash(hash)['data']

    subfile_map = {}
    for subfile in subfiles:
        img_meta = get_file_metadata(subfile)
        img_hash = img_meta['hash']
        img_data = get_hash(img_hash)['data']
        base_name = posixpath.basename(posixpath.split(subfile)[0])
        subfile_map[base_name] = img_data

    def customImageLoader(filename):
        return subfile_map[posixpath.basename(filename)]

    mesh = collada.Collada(StringIO(dae_data), aux_file_loader=customImageLoader)

    strip_lines = meshtool.filters.factory.getInstance('strip_lines')
    mesh = strip_lines.apply(mesh)
    med_opts = meshtool.filters.factory.getInstance('medium_optimizations')
    mesh = med_opts.apply(mesh)

    progressive_stream = StringIO()
    sander_simplify = SanderSimplify(mesh, progressive_stream)
    mesh = sander_simplify.simplify()
    
    if sander_simplify.base_tri_count != sander_simplify.orig_tri_count:
        progressive_stream = progressive_stream.getvalue()
        progressive_hex_key = hashlib.sha256(progressive_stream).hexdigest()
        save_file_data(progressive_hex_key, progressive_stream, "model/vnd.pdae")
        progressive_stream_num_triangles = sander_simplify.orig_tri_count - sander_simplify.base_tri_count
    else:
        progressive_hex_key = None
        progressive_stream_num_triangles = 0

    mipmap_metadata = {}
    mipmaps = getMipMaps(mesh)
    for imgpath, (tarbuf, ranges) in mipmaps.iteritems():
        mipmap_tar_hex_key = hashlib.sha256(tarbuf).hexdigest()
        save_file_data(mipmap_tar_hex_key, tarbuf, "application/x-tar")
        mipmap_metadata[imgpath] = {'hash':mipmap_tar_hex_key, 'byte_ranges':ranges}

    #Make sure image paths are just the base name
    current_prefix = "progressive"
    subfile_names = []
    subfile_map = {}
    for img in mesh.images:
        base_name = posixpath.basename(img.path)
        subfile_map[base_name] = img.data

        img_hex_key = hashlib.sha256(subfile_map[base_name]).hexdigest()
        save_file_data(img_hex_key, subfile_map[base_name], "image/%s" % img.pilimage.format.lower())
        img_path = "%s/%s/%s" % (path, current_prefix, base_name)
        img_len = len(subfile_map[base_name])
        img_version_num = get_new_version_from_path(img_path, file_type="image")
        save_file_name(img_path, img_version_num, img_hex_key, img_len)
        subfile_names.append("%s/%s" % (img_path, img_version_num))

    str_buffer = StringIO()
    mesh.write(str_buffer)
    orig_save_data = str_buffer.getvalue()
    orig_hex_key = hashlib.sha256(orig_save_data).hexdigest()

    save_file_data(orig_hex_key, orig_save_data, "application/xml")

    zip_buffer = StringIO()
    combined_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
    combined_zip.writestr(posixpath.basename(path), orig_save_data)
    for img_name, img_data in subfile_map.iteritems():
        combined_zip.writestr(img_name, img_data)
    combined_zip.close()

    zip_save_data = zip_buffer.getvalue()
    zip_hex_key = hashlib.sha256(zip_save_data).hexdigest()
    save_file_data(zip_hex_key, zip_save_data, "application/zip")

    save_version_type(path, version, orig_hex_key, len(orig_save_data),
                      subfile_names, zip_hex_key, "progressive")

    add_metadata(path, version, "progressive", { 'progressive_stream': progressive_hex_key,
                                                 'progressive_stream_num_triangles': progressive_stream_num_triangles,
                                                 'mipmaps': mipmap_metadata  })

    send_task("celery_tasks.generate_screenshot.generate_screenshot", args=[filename, "progressive"])
    send_task("celery_tasks.generate_metadata.generate_metadata", args=[filename, "progressive"])