def downgrade(): DBSession.execute(""" INSERT INTO files (id, filename, mimetype, size, data) SELECT id, filename, mimetype, size, data FROM images""") op.drop_table('images') op.create_table( 'images', sa.Column('id', sa.Integer(), sa.ForeignKey('files.id'), primary_key=True)) DBSession.execute(""" INSERT INTO images (id) SELECT id FROM nodes WHERE type = 'image'""")
def __getitem__(self, path): session = DBSession() session._autoflush() if not hasattr(path, '__iter__'): path = (path,) if 'children' in self.__dict__: # If children are already in memory, don't query the database: first, rest = path[0], path[1:] try: [v] = [child for child in self.children if child.name == path[0]] except ValueError: raise KeyError(path) if rest: return v[rest] else: return v # Using the ORM interface here in a loop would join over all # polymorphic tables, so we'll use a 'handmade' select instead: conditions = [nodes.c.id==self.id] alias = nodes for name in path: alias, old_alias = nodes.alias(), alias conditions.append(alias.c.parent_id==old_alias.c.id) conditions.append(alias.c.name==unicode(name)) expr = select([alias.c.id], and_(*conditions)) row = session.execute(expr).fetchone() if row is None: raise KeyError(path) return session.query(Node).get(row.id)
def test_stamp_heads(self): from kotti import main from kotti import DBSession settings = self.required_settings() main({}, **settings) res = DBSession.execute(select( columns=['version_num'], from_obj=['kotti_alembic_version'])) assert tuple(res) # a version_num should exist
def upgrade(): op.drop_table('images') op.create_table( 'images', sa.Column('id', sa.Integer(), sa.ForeignKey('contents.id'), primary_key=True), sa.Column('filename', sa.Unicode(100)), sa.Column('mimetype', sa.String(100)), sa.Column('size', sa.Integer()), sa.Column('data', UploadedFileField())) DBSession.execute(""" INSERT INTO images (id, filename, mimetype, size, data) SELECT f.id, f.filename, f.mimetype, f.size, f.data FROM files f INNER JOIN nodes n ON f.id = n.id WHERE n.type = 'image'""") DBSession.execute(""" DELETE FROM files WHERE id IN (SELECT id FROM images)""")
def test_user(): u = MbaUser(name=u'test') DBSession.add(u) stu = Student(name=u'test2', real_name=u'testit2') DBSession.add(stu) DBSession.flush() #print 'stu type', stu.type #print u.__class__, u.type u.__class__ = Student u.type = 'student' DBSession.execute("insert into students (id,real_name) values (%d,'error_name');" % u.id) DBSession.flush() u2 = DBSession.query(MbaUser).filter_by(name=u'test').first() print u2 #DBSession.query(Student).filter_by(id=u.id).update({'id':u.id,'real_name':'ooooooo'}, synchronize_session=False) u2.real_name='bbbbbbbbbbb' DBSession.flush() u3 = DBSession.query(Student).filter_by(name=u'test').first() print u3.real_name
def __getitem__(self, path: Union[str, Iterable[str]]) -> "Node": db_session = DBSession() db_session._autoflush() # if not hasattr(path, '__iter__'): if isinstance(path, str): path = (path,) path = [p for p in path] # Optimization: don't query children if self._children already there: if "_children" in self.__dict__: rest = path[1:] try: [child] = filter(lambda ch: ch.name == path[0], self._children) except ValueError: raise KeyError(path) if rest: return child[rest] else: return child baked_query = bakery(lambda session: session.query(Node)) if len(path) == 1: try: baked_query += lambda q: q.filter( Node.name == bindparam("name"), Node.parent_id == bindparam("parent_id"), ) return ( baked_query(db_session) .params(name=path[0], parent_id=self.id) .one() ) except NoResultFound: raise KeyError(path) # We have a path with more than one element, so let's be a # little clever about fetching the requested node: nodes = Node.__table__ conditions = [nodes.c.id == self.id] alias = nodes for name in path: alias, old_alias = nodes.alias(), alias conditions.append(alias.c.parent_id == old_alias.c.id) conditions.append(alias.c.name == name) expr = select([alias.c.id], and_(*conditions)) row = db_session.execute(expr).fetchone() if row is None: raise KeyError(path) return baked_query(db_session).get(row.id)
def __getitem__(self, path: Union[str, Iterable[str]]) -> "Node": db_session = DBSession() db_session._autoflush() # if not hasattr(path, '__iter__'): if isinstance(path, str): path = (path, ) path = [p for p in path] # Optimization: don't query children if self._children already there: if "_children" in self.__dict__: rest = path[1:] try: [child] = filter(lambda ch: ch.name == path[0], self._children) except ValueError: raise KeyError(path) if rest: return child[rest] else: return child baked_query = bakery(lambda session: session.query(Node)) if len(path) == 1: try: baked_query += lambda q: q.filter( Node.name == bindparam("name"), Node.parent_id == bindparam("parent_id"), ) return (baked_query(db_session).params( name=path[0], parent_id=self.id).one()) except NoResultFound: raise KeyError(path) # We have a path with more than one element, so let's be a # little clever about fetching the requested node: nodes = Node.__table__ conditions = [nodes.c.id == self.id] alias = nodes for name in path: alias, old_alias = nodes.alias(), alias conditions.append(alias.c.parent_id == old_alias.c.id) conditions.append(alias.c.name == name) expr = select([alias.c.id], and_(*conditions)) row = db_session.execute(expr).fetchone() if row is None: raise KeyError(path) return baked_query(db_session).get(row.id)
def __getitem__(self, path): session = DBSession() session._autoflush() if not hasattr(path, '__iter__'): path = (path,) path = [unicode(p) for p in path] # Optimization: don't query children if self._children is already there: if '_children' in self.__dict__: first, rest = path[0], path[1:] try: [child] = filter(lambda ch: ch.name == path[0], self._children) except ValueError: raise KeyError(path) if rest: return child[rest] else: return child if len(path) == 1: try: return DBSession.query(Node).filter_by( name=path[0], parent=self).one() except NoResultFound: raise KeyError(path) # We have a path with more than one element, so let's be a # little clever about fetching the requested node: nodes = Node.__table__ conditions = [nodes.c.id == self.id] alias = nodes for name in path: alias, old_alias = nodes.alias(), alias conditions.append(alias.c.parent_id == old_alias.c.id) conditions.append(alias.c.name == name) expr = select([alias.c.id], and_(*conditions)) row = session.execute(expr).fetchone() if row is None: raise KeyError(path) return session.query(Node).get(row.id)
def __getitem__(self, path): session = DBSession() session._autoflush() if not hasattr(path, '__iter__'): path = (path,) path = [unicode(p) for p in path] # Optimization: don't query children if self._children already there: if '_children' in self.__dict__: first, rest = path[0], path[1:] try: [child] = filter(lambda ch: ch.name == path[0], self._children) except ValueError: raise KeyError(path) if rest: return child[rest] else: return child if len(path) == 1: try: return DBSession.query(Node).filter_by( name=path[0], parent=self).one() except NoResultFound: raise KeyError(path) # We have a path with more than one element, so let's be a # little clever about fetching the requested node: nodes = Node.__table__ conditions = [nodes.c.id == self.id] alias = nodes for name in path: alias, old_alias = nodes.alias(), alias conditions.append(alias.c.parent_id == old_alias.c.id) conditions.append(alias.c.name == name) expr = select([alias.c.id], and_(*conditions)) row = session.execute(expr).fetchone() if row is None: raise KeyError(path) return session.query(Node).get(row.id)
def upgrade(): from depot.manager import DepotManager from depot.fields.upload import UploadedFile from sqlalchemy import bindparam, Unicode, Column from kotti import DBSession, metadata files = sa.Table('files', metadata) files.c.data.type = sa.LargeBinary() # this restores to old column type dn = DepotManager.get_default() _saved = [] def process(thing): id, data, filename, mimetype = thing uploaded_file = UploadedFile({'depot_name': dn, 'files': []}) uploaded_file._thaw() uploaded_file.process_content( data, filename=filename, content_type=mimetype) _saved.append({'nodeid': id, 'data': uploaded_file.encode()}) log.info("Saved data for node id {}".format(id)) query = DBSession.query( files.c.id, files.c.data, files.c.filename, files.c.mimetype ).order_by(files.c.id).yield_per(10) window_size = 10 window_idx = 0 log.info("Starting migration of blob data") now = time.time() while True: start, stop = window_size * window_idx, window_size * (window_idx + 1) things = query.slice(start, stop).all() if things is None: break for thing in things: process(thing) if len(things) < window_size: break window_idx += 1 log.info("Files written on disk, saving information to DB") op.drop_column('files', 'data') op.add_column('files', Column('data', Unicode(4096))) files.c.data.type = Unicode(4096) update = files.update().where(files.c.id == bindparam('nodeid')).\ values({files.c.data: bindparam('data')}) def chunks(l, n): for i in range(0, len(l), n): yield l[i:i + n] for cdata in chunks(_saved, 10): DBSession.execute(update, cdata) log.info("Blob migration completed in {} seconds".format( int(time.time() - now)))
# relation= DBSession.query(friend).filter( # and_( # friend.c.user_a_id==target_person_id, # friend.c.user_b_id==cur_user.id) # ).one() # # relation.status = 1 # It seems does not work # So we use raw sql session = DBSession() session.execute("""UPDATE friends SET status=1 WHERE user_a_id =:a AND user_b_id=:b """, {'a':target_person_id, 'b': cur_user.id } ) mark_changed(session) transaction.commit() request.session.flash(u"同意对方加友请求", 'success') return RetDict(retval=u"同意对方加友请求") else: # We add frined now # cur_user.friendship.append(target_person) # abondon this method