def add_meeting_sequence_to_period(self): self.op.add_column("periods", Column("meeting_sequence_number", Integer, nullable=True)) periods_table = table("periods", column("id"), column("meeting_sequence_number")) self.execute(periods_table.update().values(meeting_sequence_number=0)) self.op.alter_column("periods", "meeting_sequence_number", existing_type=Integer, nullable=False)
def test_sql_with_for_update(self): table1 = table("mytable", column("myid"), column("name"), column("description")) self.assert_compile( table1.select().with_for_update(), "SELECT mytable.myid, mytable.name, mytable.description " "FROM mytable FOR UPDATE", ) self.assert_compile( table1.select().with_for_update(nowait=True), "SELECT mytable.myid, mytable.name, mytable.description " "FROM mytable FOR UPDATE NOWAIT", ) self.assert_compile( table1.select().with_for_update(read=True), "SELECT mytable.myid, mytable.name, mytable.description " "FROM mytable FOR SHARE LOCK", ) self.assert_compile( table1.select().with_for_update(skip_locked=True), "SELECT mytable.myid, mytable.name, mytable.description " "FROM mytable FOR UPDATE IGNORE LOCKED", )
def get_existing_id_lookup(self): contact_table = table( "contacts", column('id'), column('former_contact_id')) stmt = select([contact_table.c.former_contact_id, contact_table.c.id]) return {key: value for (key, value) in self.db_session.execute(stmt)}
def add_bucket_grouping_to_query(self, q, q_entities, entity, mapped_entity): # Get min, max if not provided. entity_min = 0 entity_max = 0 if (not entity.has_key('min') or not entity.has_key('max')): entity_min, entity_max = self.get_entity_min_max(entity) # Override calculated min/max if values were provided. if entity.has_key('min'): entity_min = entity.get('min') if entity.has_key('max'): entity_max = entity.get('max') num_buckets = entity.get('num_buckets', 10) entity_range = entity_max - entity_min bucket_width = (entity_max - entity_min)/float(num_buckets) # Get bucket field entities. # Can use the line below in case db doesn't have width_bucket function. #bucket_id_entity = func.greatest(func.round( (((mapped_entity - entity_min)/entity_range) * num_buckets ) - .5) + 1, num_buckets).label(self.get_bucket_id_label(entity)) bucket_id_entity = func.width_bucket(mapped_entity, entity_min, entity_max, num_buckets).label(self.get_bucket_id_label(entity)) q_entities.add(bucket_id_entity) bucket_label_entity = case( [(bucket_id_entity == num_buckets + 1, '[' + cast( entity_max, String) + ', ...)')], else_ = '[' + cast(entity_min + bucket_width * (bucket_id_entity - 1), String ) + ', ' + cast(entity_min + bucket_width * (bucket_id_entity), String) + ')' ).label(entity['label']) q_entities.add(bucket_id_entity) q_entities.add(bucket_label_entity) q = q.group_by(column(bucket_id_entity._label), column(bucket_label_entity._label)) return q
def downgrade(): op.create_table( 'planet_name', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.String, nullable=False), ) op.create_unique_constraint('uc_planet_name', 'planet_name', ['name']) op.add_column('libration', sa.Column( 'first_planet_name_id', sa.Integer, sa.ForeignKey('planet_name.id'), nullable=True )) op.add_column('libration', sa.Column( 'second_planet_name_id', sa.Integer, sa.ForeignKey('planet_name.id'), nullable=True )) planet_name_table = table('planet_name', column('id', sa.Integer), column('name', sa.String)) op.bulk_insert(planet_name_table, [ {'id': 9, 'name': 'PLUTO'}, {'id': 8, 'name': 'NEPTUNE'}, {'id': 7, 'name': 'URANUS'}, {'id': 6, 'name': 'SATURN'}, {'id': 5, 'name': 'JUPITER'}, {'id': 4, 'name': 'MARS'}, {'id': 3, 'name': 'EARTHMOO'}, {'id': 2, 'name': 'VENUS'}, {'id': 1, 'name': 'MERCURY'}, ]) op.execute('UPDATE libration SET first_planet_name_id=5, second_planet_name_id=6') op.alter_column('libration', 'first_planet_name_id', nullable=False) op.alter_column('libration', 'second_planet_name_id', nullable=False) op.drop_constraint('libration_resonance_id_key', 'libration') op.create_unique_constraint('uc_resonance_planet_names', 'libration', ['resonance_id', 'first_planet_name_id', 'second_planet_name_id'])
def _str_idx_constraint(m1, m2, op): idx_tbl = table("str_idx_%s_%s" % (m1.table.fullname, m1.key), column("s"), column("id")) return table(m1.table.fullname, column("rowid")).c.rowid.in_( select([idx_tbl.c.id]) .where(idx_tbl.c.s.op(op)(func.mol_signature(m2))) )
def raise_column_length(self): # add new column with the new size self.op.add_column( 'favorites', Column('tmp_plone_uid', String(UID_LENGTH))) # migrate_data _table = table( 'favorites', column("id"), column('plone_uid'), column('tmp_plone_uid')) items = self.connection.execute(_table.select()).fetchall() for item in items: self.execute( _table.update() .values(tmp_plone_uid=item.plone_uid) .where(_table.columns.id == item.id) ) # drop old column self.op.drop_column('favorites', 'plone_uid') # rename new column self.op.alter_column('favorites', 'tmp_plone_uid', new_column_name='plone_uid')
def get_pk_constraint(self, connection, table_name, schema=None, **kw): """ Override TODO: Check if we need PRIMARY Indices or PRIMARY KEY Indices TODO: Check for border cases (No PK Indices) """ if schema is None: schema = self.default_schema_name stmt = select([column('ColumnName'), column('IndexName')], from_obj=[text('dbc.Indices')]).where( and_(text('DatabaseName = :schema'), text('TableName=:table'), text('IndexType=:indextype')) ).order_by(asc(column('IndexNumber'))) # K for Primary Key res = connection.execute(stmt, schema=schema, table=table_name, indextype='K').fetchall() index_columns = list() index_name = None for index_column in res: index_columns.append(self.normalize_name(index_column['ColumnName'])) index_name = self.normalize_name(index_column['IndexName']) # There should be just one IndexName return { "constrained_columns": index_columns, "name": index_name }
class AddMissingForwardingAddedNotificationDefault(SchemaMigration): """Add missing forwarding-added notification_default. """ defaults_table = table( "notification_defaults", column("id"), column("kind"), column("mail_notification_roles"), column("badge_notification_roles") ) def migrate(self): query = self.defaults_table.select().where( self.defaults_table.c.kind == 'forwarding-added') forwarding_added = self.connection.execute(query).fetchall() if not len(forwarding_added): self.insert_forwarding_added_default() def insert_forwarding_added_default(self): values = { 'kind': 'forwarding-added', 'mail_notification_roles': json.dumps([TASK_RESPONSIBLE_ROLE]), 'badge_notification_roles': json.dumps( [TASK_RESPONSIBLE_ROLE, TASK_ISSUER_ROLE])} seq = Sequence('notification_defaults_id_seq') if self.supports_sequences: values['id'] = self.execute(seq) self.execute(self.defaults_table.insert().values(**values))
def get_unique_constraints(self, connection, table_name, schema=None, **kw): """ Overrides base class method """ if schema is None: schema = self.default_schema_name stmt = select([column('ColumnName'), column('IndexName')], from_obj=[text('dbc.Indices')]) \ .where(and_(text('DatabaseName = :schema'), text('TableName=:table'), text('IndexType=:indextype'))) \ .order_by(asc(column('IndexName'))) # U for Unique res = connection.execute(stmt, schema=schema, table=table_name, indextype='U').fetchall() def grouper(fk_row): return { 'name': self.normalize_name(fk_row['IndexName']), } unique_constraints = list() for constraint_info, constraint_cols in groupby(res, grouper): unique_constraint = { 'name': self.normalize_name(constraint_info['name']), 'column_names': list() } for constraint_col in constraint_cols: unique_constraint['column_names'].append(self.normalize_name(constraint_col['ColumnName'])) unique_constraints.append(unique_constraint) return unique_constraints
def migrate_data(self): activities_table = table( "activities", column("id"), column("kind"), column("summary"), column("title"), column("description") ) activities_translation_table = table( "activities_translation", column("id"), column("locale"), column("title"), column("label"), column("summary"), column("description"), ) activities = self.connection.execute(activities_table.select()).fetchall() for activity in activities: self.execute( activities_translation_table.insert( values={ "id": activity.id, "locale": DEFAULT_LOCALE, "title": activity.title, # the label column is new so we use the kind # for existing entries "label": activity.kind, "summary": activity.summary, "description": activity.description, } ) )
def migrate(self): self.add_userid_column() self.migrate_data() self.remove_watcherid_column() self.make_userid_column_required() self.notifications_table = table("notifications", column("id"), column("activity_id"), column("watcher_id"))
def test_expression_matcher() -> None: """Tests expression matching of SQLAlchemy expressions.""" c = column("column") e1 = c.in_(["foo", "bar"]) e2 = c.in_(["foo", "bar"]) e3 = c.in_(["cat", "dog"]) e5 = func.lower(c) assert ExpressionMatcher(e1) == e2 assert ExpressionMatcher({ "foo": c == "foo", "bar": 5, "hello": "world" }) == { "foo": c == "foo", "bar": 5, "hello": "world", } assert ExpressionMatcher(e1) == e2 assert ExpressionMatcher(ExpressionMatcher(e1)) == ExpressionMatcher(e2) assert ExpressionMatcher(e1) == mock.ANY assert ExpressionMatcher(e1) == e2 assert ExpressionMatcher(e5) != func.upper(c) l1 = c.label("foo") l2 = c.label("foo") c2 = column("column2") l4 = c2.label("foo") assert ExpressionMatcher(c) != l1 assert ExpressionMatcher(l1) == l2 assert ExpressionMatcher([c == "foo"]) == [c == "foo"] assert ExpressionMatcher(l1) != l4 assert ExpressionMatcher(e1) != e3 assert ExpressionMatcher(column("column") == "one") != ExpressionMatcher( column("column") == "three")
def _get_ff_why_rows(self, req: "CamcopsRequest") -> List[List[str]]: """ Reasons for giving a particular answer to the friends/family question """ options = self.task.get_ff_options(req) wheres = [ column("ff_rating").isnot(None), column("ff_why").isnot(None), ] self.add_task_report_filters(wheres) # noinspection PyUnresolvedReferences query = (select([column("ff_rating"), column("ff_why") ]).select_from(self.task.__table__).where( and_(*wheres)).order_by("ff_why")) rows = [] for result in req.dbsession.execute(query).fetchall(): rows.append([options[result[0]], result[1]]) return rows
def upgrade(): op.create_table( "planet_name", sa.Column("id", sa.Integer, primary_key=True), sa.Column("name", sa.String, nullable=False) ) op.create_unique_constraint("uc_planet_name", "planet_name", ["name"]) op.add_column( "libration", sa.Column("first_planet_name_id", sa.Integer, sa.ForeignKey("planet_name.id"), nullable=True) ) op.add_column( "libration", sa.Column("second_planet_name_id", sa.Integer, sa.ForeignKey("planet_name.id"), nullable=True) ) planet_name_table = table("planet_name", column("id", sa.Integer), column("name", sa.String)) op.bulk_insert( planet_name_table, [ {"id": 9, "name": "PLUTO"}, {"id": 8, "name": "NEPTUNE"}, {"id": 7, "name": "URANUS"}, {"id": 6, "name": "SATURN"}, {"id": 5, "name": "JUPITER"}, {"id": 4, "name": "MARS"}, {"id": 3, "name": "EARTHMOO"}, {"id": 2, "name": "VENUS"}, {"id": 1, "name": "MERCURY"}, ], ) op.execute("UPDATE libration SET first_planet_name_id=5, second_planet_name_id=6")
def insert_default_value(self, tablename): contact_table = table( tablename, column("id"), column("is_active")) self.connection.execute( contact_table.update().values(is_active=True))
def get_watcherid_userid_mapping(self): mapping = {} watchers_table = table("watchers", column("id"), column("user_id")) watchers = self.connection.execute(watchers_table.select()).fetchall() for watcher in watchers: mapping[watcher.id] = watcher.user_id return mapping
def insert_placeholders(self): """Fill empty MailAddress with a placeholder. """ mailaddress_table = table('mail_addresses', column("id"), column("address")) self.execute(mailaddress_table.update().values( address=EMPTY_ADDRESS_PLACEHOLDER).where( mailaddress_table.columns.address == None))
def insert_placeholders(self): """Fill empty MailAddress with a placeholder. """ mailaddress_table = table('mail_addresses', column("id"), column("address")) self.execute(mailaddress_table.update() .values(address=EMPTY_ADDRESS_PLACEHOLDER) .where(mailaddress_table.columns.address == None))
def insert_default_modified(self): """Insert time of migration as last modified timestamp.""" meeting_table = table( 'meetings', column('id'), column('modified'), ) self.execute(meeting_table.update().values(modified=utcnow_tz_aware()))
def insert_placeholders(self): """Fill empty URLs with a placeholder. """ url_table = table('urls', column("id"), column("url")) self.execute(url_table.update() .values(url=EMPTY_URL_PLACEHOLDER) .where(url_table.columns.url == None))
def insert_placeholders(self): """Fill empty phone_number with a placeholder. """ phonenumbers_table = table('phonenumbers', column("id"), column("phone_number")) self.execute(phonenumbers_table.update() .values(phone_number=EMPTY_PHONE_NUMBER_PLACEHOLDER) .where(phonenumbers_table.columns.phone_number == None))
def add_admin_user(): stmt = table('users', column('username'), column('_password_hash')).insert().\ values\ ( username=sa.bindparam('username'), _password_hash=sa.bindparam('_password_hash'), ) op.get_bind().execute(stmt,[dict(username='******',_password_hash=pw('admin'))])
def migrate_data(self): """Temporarily insert placeholders as dossier_reference_numbers, the real value will be inserted by the 4603 upgradestep. """ proposal_table = table("proposals", column("id"), column("dossier_reference_number")) self.execute( proposal_table.update().values(dossier_reference_number=u'-'))
def migrate_data(self): """Temporarily insert placeholders as repository_folder_title, the real value will be inserted by the 4633 upgradestep. """ default_language = get_preferred_language_code() proposal_table = table("proposals", column("id"), column("repository_folder_title"), column("language")) self.execute(proposal_table.update().values(repository_folder_title=u"-")) self.execute(proposal_table.update().values(language=default_language))
def get_percentage_summaries( self, req: "CamcopsRequest", column_dict: Dict[str, str], num_answers: int, cell_format: str = "{}", min_answer: int = 0, ) -> List[List[str]]: """ Provides a summary of each question, x% of people said each response. """ rows = [] for column_name, question in column_dict.items(): """ e.g. SELECT COUNT(col) FROM perinatal_poem WHERE col IS NOT NULL """ wheres = [column(column_name).isnot(None)] # noinspection PyUnresolvedReferences self.add_task_report_filters(wheres) # noinspection PyUnresolvedReferences total_query = (select([func.count(column_name)]).select_from( self.task_class.__table__).where(and_(*wheres))) total_responses = req.dbsession.execute(total_query).fetchone()[0] row = [question] + [total_responses] + [""] * num_answers """ e.g. SELECT total_responses,col, ((100 * COUNT(col)) / total_responses) FROM perinatal_poem WHERE col is not NULL GROUP BY col """ # noinspection PyUnresolvedReferences query = (select([ column(column_name), ((100 * func.count(column_name)) / total_responses), ]).select_from(self.task_class.__table__).where( and_(*wheres)).group_by(column_name)) # row output is: # 0 1 2 3 # +----------+-----------------+--------------+--------------+---- # | question | total responses | % 1st answer | % 2nd answer | ... # +----------+-----------------+--------------+--------------+---- for result in req.dbsession.execute(query): col = 2 + (result[0] - min_answer) row[col] = cell_format.format(result[1]) rows.append(row) return rows
def add_badge_column(self): self.op.add_column('notifications', Column('is_badge', Boolean, default=False)) notifications_table = table( "notifications", column("id"), column("is_badge"), ) self.execute(notifications_table.update().values(is_badge=True))
def get_contact_mapping(self): if not self._contact_mapping: contact_table = table( "contacts", column('id'), column('former_contact_id')) stmt = select([contact_table.c.former_contact_id, contact_table.c.id]) self._contact_mapping = {key: value for (key, value) in self.db_session.execute(stmt)} return self._contact_mapping
def insert_badge_icon_settings(self): """Enable badge notification for all existing settings. """ defaults_table = table( "notification_defaults", column("id"), column("badge_notification_roles"), ) self.execute(defaults_table.update().values( badge_notification_roles=BADGE_NOTIFICATION_DEFAULT))
def test_idx_string_col_in_fn_no_change(self): """test #880""" m1 = MetaData() m2 = MetaData() t1 = Table("add_ix", m1, Column("x", String(50))) t1.append_constraint(Index("foo_idx", desc(column("x")))) t2 = Table("add_ix", m2, Column("x", String(50))) t2.append_constraint(Index("foo_idx", desc(column("x")))) diffs = self._fixture(m1, m2) eq_(diffs, [])
def get_contact_mapping(self): if not self._contact_mapping: contact_table = table("contacts", column('id'), column('former_contact_id')) stmt = select( [contact_table.c.former_contact_id, contact_table.c.id]) self._contact_mapping = { key: value for (key, value) in self.db_session.execute(stmt) } return self._contact_mapping
def migrate(self): self.add_userid_column() self.migrate_data() self.remove_watcherid_column() self.make_userid_column_required() self.notifications_table = table( "notifications", column("id"), column("activity_id"), column("watcher_id"), )
def add_decision_sequence_to_period(self): self.op.add_column( 'periods', Column('decision_sequence_number', Integer, nullable=True)) periods_table = table('periods', column('id'), column('decision_sequence_number')) self.execute(periods_table.update().values(decision_sequence_number=0)) self.op.alter_column('periods', 'decision_sequence_number', existing_type=Integer, nullable=False)
def add_decision_sequence_to_period(self): self.op.add_column( 'periods', Column('decision_sequence_number', Integer, nullable=True)) periods_table = table( 'periods', column('id'), column('decision_sequence_number')) self.execute(periods_table.update().values(decision_sequence_number=0)) self.op.alter_column('periods', 'decision_sequence_number', existing_type=Integer, nullable=False)
def migrate_data(self): watcher_id_mapping = self.get_watcherid_userid_mapping() notifications_table = table("notifications", column("id"), column("activity_id"), column("watcher_id"), column("userid")) notifications = self.connection.execute( notifications_table.select()).fetchall() for notification in notifications: userid = watcher_id_mapping[notification.watcher_id] self.execute( notifications_table.update().values(userid=userid).where( notifications_table.columns.id == notification.id))
def migrate_data(self): """Temporarily insert placeholders as repository_folder_title, the real value will be inserted by the 4633 upgradestep. """ default_language = get_preferred_language_code() proposal_table = table("proposals", column("id"), column("repository_folder_title"), column("language")) self.execute( proposal_table.update().values(repository_folder_title=u'-')) self.execute(proposal_table.update().values(language=default_language))
def migrate_data(self): watcher_id_mapping = self.get_watcherid_userid_mapping() notifications_table = table( "notifications", column("id"), column("activity_id"), column("watcher_id"), column("userid") ) notifications = self.connection.execute(notifications_table.select()).fetchall() for notification in notifications: userid = watcher_id_mapping[notification.watcher_id] self.execute( notifications_table.update() .values(userid=userid) .where(notifications_table.columns.id == notification.id) )
def insert_notification_defaults(self): defaults_table = table( "notification_defaults", column("id"), column("kind"), column("mail_notification_roles"), ) settings = self.connection.execute(defaults_table.select()).fetchall() for setting in settings: roles = DEFAULT_SETTINGS.get(setting.kind, []) self.execute(defaults_table.update().values( mail_notification_roles=json.dumps(roles)).where( defaults_table.columns.id == setting.id))
def add_roles(): stmt = table('roles', column('name', sa.String(255)), column('auth_level', sa.Integer), column('can_authenticate', sa.Boolean)).insert().values( name=sa.bindparam('name'), auth_level=sa.bindparam('auth_level'), can_authenticate=sa.bindparam('can_authenticate'), ) arg_list = [ dict(name='admin', auth_level=5, can_authenticate=True), dict(name='member', auth_level=3, can_authenticate=True), dict(name='guest', auth_level=0, can_authenticate=False), ] op.get_bind().execute(stmt, arg_list)
def migrate_data(self): activities_table = table( "activities", column("id"), column("tmp_summary"), column("summary"), ) activities = self.connection.execute( activities_table.select()).fetchall() for activity in activities: self.execute(activities_table.update().values( summary=activity.tmp_summary).where( activities_table.columns.id == activity.id))
def insert_org_units_group_id(self): """Insert the current org-unit's group_id for all committees. Bye default we use the users group. This choice is somewhat arbitrary, but id does not really matter since meeting is not in production when this upgrade is executed. """ proposal_table = table("committees", column("id"), column("group_id")) group_id = get_current_org_unit().users_group.groupid self.execute( proposal_table.update().values(group_id=group_id))
def migrate_held_state(self): """The state held lost its relevance and is equivalent to pending. The migrations thus resets all meetings in the state held to the state pending. """ meeting_table = table("meetings", column("id"), column("workflow_state")) self.execute( meeting_table.update() .values(workflow_state='pending') .where(meeting_table.c.workflow_state == 'held'))
def get_idea_events(event_filter=None): # get a mixture of Comments & comments sorted by submission_date q1 = session.query(WFCommentData.submission_date.label('date'), UserData.uid.label('user_uid'), StateData.label.label('event'), IdeaData.id.label('idea_id')) q1 = q1.join(WFCommentData.to_state, WFCommentData.created_by, WFCommentData.idea_wf, IdeaWFContextData.idea) q2 = session.query(CommentData.submission_date.label('date'), UserData.uid.label('user_uid'), literal(u'COMMENT').label('event'), IdeaData.id.label('idea_id')) q2 = q2.join(CommentData.created_by, CommentData.idea, IdeaData.wf_context, IdeaWFContextData.state) # mask comments for ideas that are not in a published state q2 = q2.filter(StateData.label.in_(get_workflow().get_published_states())) q = q1.union(q2) # mask ideas that are currently in the dsig_basket_state q = q.filter(~IdeaData.id.in_(get_dsig_basket_state_ideas())) if event_filter: q = q.filter(column('event').in_(event_filter)) q = q.order_by(desc('date')) return q
def repl(element): if isinstance(element, expression._BindParamClause): return expression.literal_column(_quote_ddl_expr(element.value)) elif isinstance(element, expression.ColumnClause) and element.table is not None: return expression.column(element.name) else: return None
def migrate_data(self): tracking_table = table( TRACKING_TABLE_NAME, column("profileid"), column("upgradeid"), column("old_upgradeid") ) profiles = self.connection.execute(tracking_table.select()).fetchall() for profile in profiles: self.execute( tracking_table.update() .values(upgradeid=profile.old_upgradeid) .where(tracking_table.columns.profileid == profile.profileid) )
def _verify_version(self): stmt = __version__.select() try: row = self.execute(stmt).fetchone() ver = row and row[0] except OperationalError: ver = None if not ver: stmt = select([count(column('name'))]).select_from( table('sqlite_master')).where(column('type') == 'table') table_count = self.execute(stmt).fetchone()[0] if table_count: raise DatabaseNotEmptyError() return if ver != self.version: raise DatabaseVersionError(ver, self.version)
def insert_workflow_state_data(self): agenda_items_table = table('agendaitems', column('id'), column('workflow_state'), column('meeting_id')) self.execute( agenda_items_table.update().values(workflow_state='pending')) meeting_table = table('meetings', column('id'), column('workflow_state')) for meeting in self.execute(meeting_table.select().where( meeting_table.c.workflow_state == 'closed')): self.execute(agenda_items_table.update().where( agenda_items_table.c.meeting_id == meeting.id).values( workflow_state='decided'))
def insert_notification_defaults(self): defaults_table = table( "notification_defaults", column("id"), column("kind"), column("mail_notification_roles"), ) settings = self.connection.execute(defaults_table.select()).fetchall() for setting in settings: roles = DEFAULT_SETTINGS.get(setting.kind, []) self.execute( defaults_table.update() .values(mail_notification_roles=json.dumps(roles)) .where(defaults_table.columns.id == setting.id) )
def extend_query_with_ordering(self, query): """Extends the given `query` with ordering information and returns the new query. """ if self.config.sort_on: sort_on = self.config.sort_on # sqlalchemy_sort_indexes is a dict on the TableSourceConfig which # defines a mapping between default sort_on attributes based on # strings and sqlalchemy sort-indexes based on an sqlalchemy column. # This allows us to sort by joined table columns. if hasattr(self.config, 'sqlalchemy_sort_indexes'): sqlalchemy_sort_index = self.config.sqlalchemy_sort_indexes.get( sort_on) if sqlalchemy_sort_index: sort_on = sqlalchemy_sort_index # Don't plug column names as literal strings into an order_by # clause, but use a ColumnClause instead to allow SQLAlchemy to # properly quote the identifier name depending on the dialect if isinstance(sort_on, basestring): sort_on = column(sort_on) order_f = self.config.sort_reverse and desc or asc query = query.order_by(order_f(sort_on)) return query
def migrate_data(self): activities_table = table( "activities", column("id"), column("tmp_summary"), column("summary"), ) activities = self.connection.execute( activities_table.select()).fetchall() for activity in activities: self.execute( activities_table.update() .values(summary=activity.tmp_summary) .where(activities_table.columns.id == activity.id) )
def create_default_periods_for_committees(self): self.committees_table = table("committees", column("id")) self.date = date.today() self.end_of_year = date(self.date.year, 12, 31) for committee in self.connection.execute( self.committees_table.select()).fetchall(): self.create_initial_period(committee)
def add_roles(): stmt = table('roles', column('name',sa.String(255)), column('auth_level',sa.Integer), column('can_authenticate',sa.Boolean) ).insert().values( name=sa.bindparam('name'), auth_level=sa.bindparam('auth_level'), can_authenticate=sa.bindparam('can_authenticate'), ) arg_list = [ dict(name='admin',auth_level=5,can_authenticate=True), dict(name='member',auth_level=3,can_authenticate=True), dict(name='guest',auth_level=0,can_authenticate=False), ] op.get_bind().execute(stmt,arg_list)