def upgrade(): from depot.manager import DepotManager from depot.fields.upload import UploadedFile from depot.fields.sqlalchemy import UploadedFileField from kotti import DBSession, metadata from kotti.resources import File t = sa.Table('files', metadata) t.c.data.type = sa.LargeBinary() dn = DepotManager.get_default() update = t.update() conn = DBSession.connection() for obj in DBSession.query(File): uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) uploaded_file._thaw() uploaded_file.process_content( obj.data, filename=obj.filename, content_type=obj.mimetype) stored_file = DepotManager.get().get(uploaded_file['file_id']) stmt = update.where( t.c.id == obj.id).values(data=uploaded_file.encode()) res = conn.execute(stmt) assert res.rowcount == 1 stored_file.last_modified = obj.modification_date log.info("Migrated {} bytes for File with pk {} to {}/{}".format( len(obj.data), obj.id, dn, uploaded_file['file_id'])) DBSession.flush() if DBSession.get_bind().name != 'sqlite': # not supported by sqlite op.alter_column('files', 'data', type_=UploadedFileField())
def upgrade(): from depot.manager import DepotManager from depot.fields.upload import UploadedFile from depot.fields.sqlalchemy import UploadedFileField from kotti import DBSession, metadata from kotti.resources import File t = sa.Table('files', metadata) t.c.data.type = sa.LargeBinary() dn = DepotManager.get_default() update = t.update() conn = DBSession.connection() for obj in DBSession.query(File): uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) uploaded_file._thaw() uploaded_file.process_content(obj.data, filename=obj.filename, content_type=obj.mimetype) stored_file = DepotManager.get().get(uploaded_file['file_id']) stmt = update.where(t.c.id == obj.id).values( data=uploaded_file.encode()) res = conn.execute(stmt) assert res.rowcount == 1 stored_file.last_modified = obj.modification_date log.info("Migrated {} bytes for File with pk {} to {}/{}".format( len(obj.data), obj.id, dn, uploaded_file['file_id'])) DBSession.flush() if DBSession.get_bind().name != 'sqlite': # not supported by sqlite op.alter_column('files', 'data', type_=UploadedFileField())
def upgrade(): sa.orm.events.MapperEvents._clear() # avoids filedepot magic from depot.manager import DepotManager from depot.fields.upload import UploadedFile from depot.fields.sqlalchemy import UploadedFileField from kotti import DBSession, metadata from kotti.resources import File t = sa.Table("files", metadata) t.c.data.type = sa.LargeBinary() dn = DepotManager.get_default() for obj in DBSession.query(File): uploaded_file = UploadedFile({"depot_name": dn, "files": []}) uploaded_file._thaw() uploaded_file.process_content(obj.data, filename=obj.filename, content_type=obj.mimetype) stored_file = DepotManager.get().get(uploaded_file["file_id"]) obj.data = uploaded_file.encode() stored_file.last_modified = obj.modification_date log.info( "Migrated {} bytes for File with pk {} to {}/{}".format(len(obj.data), obj.id, dn, uploaded_file["file_id"]) ) DBSession.flush() if DBSession.get_bind().name != "sqlite": # not supported by sqlite op.alter_column("files", "data", type_=UploadedFileField())
def create_file(self, lang='en'): fname = {'en': 'hello.txt', 'ru': u_('Крупный'), 'it': u_('àèìòù')}.get(lang, 'unknown') self.UPLOADED_FILES += [DepotManager.get().create(FILE_CONTENT, filename=fname)] return dict(files=self.UPLOADED_FILES, uploaded_to=DepotManager.get_default(), last=self.UPLOADED_FILES[-1])
def __init__(self, fileid, depot_name=None): if depot_name is None: depot_name = DepotManager.get_default() depot_name = DepotManager.resolve_alias(depot_name) if not depot_name: raise ValueError('Storage has not been found in DEPOT') self.depot_name = depot_name self.name = fileid
def __init__(self, content, depot_name=None): super(DepotFileInfo, self).__init__() self._thaw() if isinstance(content, dict): object.__setattr__(self, 'original_content', None) self.update(content) else: object.__setattr__(self, 'original_content', content) if depot_name is None: depot_name = DepotManager.get_default() self['depot_name'] = depot_name self['files'] = [] self.process_content(content) self._freeze()
def __init__(self, content, depot_name=None): super(DepotFileInfo, self).__init__() self._thaw() if isinstance(content, dict): object.__setattr__(self, 'original_content', None) self.update(content) else: object.__setattr__(self, 'original_content', content) if depot_name is None: depot_name = DepotManager.get_default() depot_name = DepotManager.resolve_alias(depot_name) if not depot_name: raise ValueError('Storage has not been found in DEPOT') self['depot_name'] = depot_name self['files'] = [] self.process_content(content) self._freeze()
def upgrade(): from depot.manager import DepotManager from depot.fields.upload import UploadedFile from sqlalchemy import bindparam, Unicode, Column from kotti import DBSession, metadata files = sa.Table('files', metadata) files.c.data.type = sa.LargeBinary() # this restores to old column type dn = DepotManager.get_default() _saved = [] def process(thing): id, data, filename, mimetype = thing uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) uploaded_file._thaw() uploaded_file.process_content( data, filename=filename, content_type=mimetype) _saved.append({'nodeid': id, 'data': uploaded_file.encode()}) log.info("Saved data for node id {}".format(id)) query = DBSession.query( files.c.id, files.c.data, files.c.filename, files.c.mimetype ).order_by(files.c.id).yield_per(10) window_size = 10 window_idx = 0 log.info("Starting migration of blob data") now = time.time() while True: start, stop = window_size * window_idx, window_size * (window_idx + 1) things = query.slice(start, stop).all() if things is None: break for thing in things: process(thing) if len(things) < window_size: break window_idx += 1 log.info("Files written on disk, saving information to DB") op.drop_column('files', 'data') op.add_column('files', Column('data', Unicode(4096))) files.c.data.type = Unicode(4096) update = files.update().where(files.c.id == bindparam('nodeid')).\ values({files.c.data: bindparam('data')}) def chunks(l, n): for i in range(0, len(l), n): yield l[i:i + n] for cdata in chunks(_saved, 10): DBSession.execute(update, cdata) log.info("Blob migration completed in {} seconds".format( int(time.time() - now)))
def test_first_configured_is_default(self): DepotManager.configure('first', {'depot.storage_path': './lfs'}) DepotManager.configure('second', {'depot.storage_path': './lfs2'}) assert DepotManager.get_default() == 'first'
def test_no_configured_is_detected(self): DepotManager.get_default()
def test_changing_default_depot_works(self): DepotManager.configure('first', {'depot.storage_path': './lfs'}) DepotManager.configure('second', {'depot.storage_path': './lfs2'}) DepotManager.set_default('second') assert DepotManager.get_default() == 'second'
def create_file(self): self.UPLOADED_FILES += [DepotManager.get().create(FILE_CONTENT, filename='hello.txt')] return dict(files=self.UPLOADED_FILES, uploaded_to=DepotManager.get_default(), last=self.UPLOADED_FILES[-1])