def test_storage_does_not_exists(self): doc = SimpleDocument(name=u_('Foo')) doc.content = UploadedFile(open(self.fake_file.name, 'rb'), 'missing_storage') DBSession.add(doc) DBSession.flush() DBSession.commit()
def upgrade(): """ Sets all depot files for file typed revisions. Until now, files are both in database and, for the newly created ones, on disk. In order to simplify the migration, this procedure will: - delete the few files on disk, - create all files on disk from database. """ # Creates files depot used in this migration configure_depot() connection = op.get_bind() delete_files_on_disk(connection=connection) select_query = revision_helper.select() \ .where(revision_helper.c.type == 'file') \ .where(revision_helper.c.depot_file.is_(None)) \ .where(func.length(revision_helper.c.file_content) > 0) files = connection.execute(select_query).fetchall() for file in files: file_filename = '{0}{1}'.format( file.label, file.file_extension, ) depot_file_intent = FileIntent( file.file_content, file_filename, file.file_mimetype, ) depot_file_field = UploadedFile(depot_file_intent, 'tracim') update_query = revision_helper.update() \ .where(revision_helper.c.revision_id == file.revision_id) \ .values(depot_file=depot_file_field) connection.execute(update_query)
def upgrade(): from depot.manager import DepotManager from depot.fields.upload import UploadedFile from depot.fields.sqlalchemy import UploadedFileField from kotti import DBSession, metadata from kotti.resources import File t = sa.Table('files', metadata) t.c.data.type = sa.LargeBinary() dn = DepotManager.get_default() update = t.update() conn = DBSession.connection() for obj in DBSession.query(File): uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) uploaded_file._thaw() uploaded_file.process_content(obj.data, filename=obj.filename, content_type=obj.mimetype) stored_file = DepotManager.get().get(uploaded_file['file_id']) stmt = update.where(t.c.id == obj.id).values( data=uploaded_file.encode()) res = conn.execute(stmt) assert res.rowcount == 1 stored_file.last_modified = obj.modification_date log.info("Migrated {} bytes for File with pk {} to {}/{}".format( len(obj.data), obj.id, dn, uploaded_file['file_id'])) DBSession.flush() if DBSession.get_bind().name != 'sqlite': # not supported by sqlite op.alter_column('files', 'data', type_=UploadedFileField())
def process(thing): id, data, filename, mimetype = thing uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) uploaded_file._thaw() uploaded_file.process_content( data, filename=filename, content_type=mimetype) _saved.append({'nodeid': id, 'data': uploaded_file.encode()}) log.info("Saved data for node id {}".format(id))
def process(thing): id, data, filename, mimetype = thing uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) # noinspection PyProtectedMember uploaded_file._thaw() uploaded_file.process_content( data, filename=filename, content_type=mimetype) _saved.append({'nodeid': id, 'data': uploaded_file.encode()}) log.info(f"Saved data for node id {id}")
def test_check_assigned_type(self): doc = Document(name=u_('Foo')) doc.photo = UploadedFile(open(self.fake_file.name, 'rb')) DBSession.add(doc) try: DBSession.flush() except StatementError as e: assert 'ValueError' in str(e) return assert False, 'FLUSH did not raise exception'
def upgrade(): sa.orm.events.MapperEvents._clear() # avoids filedepot magic from depot.manager import DepotManager from depot.fields.upload import UploadedFile from depot.fields.sqlalchemy import UploadedFileField from kotti import DBSession, metadata from kotti.resources import File t = sa.Table("files", metadata) t.c.data.type = sa.LargeBinary() dn = DepotManager.get_default() for obj in DBSession.query(File): uploaded_file = UploadedFile({"depot_name": dn, "files": []}) uploaded_file._thaw() uploaded_file.process_content(obj.data, filename=obj.filename, content_type=obj.mimetype) stored_file = DepotManager.get().get(uploaded_file["file_id"]) obj.data = uploaded_file.encode() stored_file.last_modified = obj.modification_date log.info( "Migrated {} bytes for File with pk {} to {}/{}".format(len(obj.data), obj.id, dn, uploaded_file["file_id"]) ) DBSession.flush() if DBSession.get_bind().name != "sqlite": # not supported by sqlite op.alter_column("files", "data", type_=UploadedFileField())
def upgrade(): from depot.manager import DepotManager from depot.fields.upload import UploadedFile from depot.fields.sqlalchemy import UploadedFileField from kotti import DBSession, metadata from kotti.resources import File t = sa.Table('files', metadata) t.c.data.type = sa.LargeBinary() dn = DepotManager.get_default() update = t.update() conn = DBSession.connection() for obj in DBSession.query(File): uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) uploaded_file._thaw() uploaded_file.process_content( obj.data, filename=obj.filename, content_type=obj.mimetype) stored_file = DepotManager.get().get(uploaded_file['file_id']) stmt = update.where( t.c.id == obj.id).values(data=uploaded_file.encode()) res = conn.execute(stmt) assert res.rowcount == 1 stored_file.last_modified = obj.modification_date log.info("Migrated {} bytes for File with pk {} to {}/{}".format( len(obj.data), obj.id, dn, uploaded_file['file_id'])) DBSession.flush() if DBSession.get_bind().name != 'sqlite': # not supported by sqlite op.alter_column('files', 'data', type_=UploadedFileField())
def process(thing, store): id, data, filename, mimetype = thing logger.debug("Handling file with id %s" % id) uploaded_file = UploadedFile({"depot_name": "local", "files": []}) uploaded_file._thaw() uploaded_file.process_content(data, filename=filename, content_type=mimetype) store.append({"nodeid": thing.id, "depot_datas": uploaded_file.encode()}) logger.info("Saved data for node id {}".format(id))
def process(thing, store): id, data, filename, mimetype = thing logger.debug("Handling file with id %s" % id) uploaded_file = UploadedFile({'depot_name': "local", 'files': []}) uploaded_file._thaw() uploaded_file.process_content(data, filename=filename, content_type=mimetype) store.append({ 'nodeid': thing.id, 'depot_datas': uploaded_file.encode(), }) logger.info("Saved data for node id {}".format(id))