def check_in (puppy_id, shelter_id): # query for requested shelter table row = session.query(Shelter).filter(Shelter.id==shelter_id).one() # if requested shelter is full, iterate through to find an open one if row.occupancy >= row.capacity: print ("Shelter full. Checking for another shelter...\n") table = session.query(Shelter).all() for r in table: # if newly chosen shelter is open, check puppy in if r.occupancy < r.capacity: shelter_id = r.id result = session.execute ( update(Puppy).where(Puppy.id == puppy_id).values (shelter_id=shelter_id) ) print ("Found a new shelter! Checking puppy into %s."%(r.name)) return result # keep checking for open shelter else: pass # if iteration returned no shelter, all shelters are full print ("All shelters are full. Please open a new one.") return None # requested shelter isn't full, so check puppy in else: result = session.execute ( update(Puppy).where(Puppy.id == puppy_id).values (shelter_id=shelter_id) ) print ("As requested, checking puppy into %s."%(row.name)) return result
def test_update_returning(self): dialect = postgresql.dialect() table1 = table( 'mytable', column( 'myid', Integer), column( 'name', String(128)), column( 'description', String(128))) u = update( table1, values=dict( name='foo')).returning( table1.c.myid, table1.c.name) self.assert_compile(u, 'UPDATE mytable SET name=%(name)s ' 'RETURNING mytable.myid, mytable.name', dialect=dialect) u = update(table1, values=dict(name='foo')).returning(table1) self.assert_compile(u, 'UPDATE mytable SET name=%(name)s ' 'RETURNING mytable.myid, mytable.name, ' 'mytable.description', dialect=dialect) u = update(table1, values=dict(name='foo' )).returning(func.length(table1.c.name)) self.assert_compile( u, 'UPDATE mytable SET name=%(name)s ' 'RETURNING length(mytable.name) AS length_1', dialect=dialect)
def test_update_returning(self): table1 = table( "mytable", column("myid", Integer), column("name", String(128)), column("description", String(128)), ) u = update(table1, values=dict(name="foo")).returning( table1.c.myid, table1.c.name ) self.assert_compile( u, "UPDATE mytable SET name=:name RETURNING " "mytable.myid, mytable.name", ) u = update(table1, values=dict(name="foo")).returning(table1) self.assert_compile( u, "UPDATE mytable SET name=:name RETURNING " "mytable.myid, mytable.name, " "mytable.description", ) u = update(table1, values=dict(name="foo")).returning( func.length(table1.c.name) ) self.assert_compile( u, "UPDATE mytable SET name=:name RETURNING " "char_length(mytable.name) AS length_1", )
def apply_default_value(self, column): if column.default: execute = self.table.migration.conn.execute val = column.default.arg table = self.table.migration.metadata.tables[self.table.name] table.append_column(column) cname = getattr(table.c, column.name) if column.default.is_callable: Table = self.table.migration.metadata.tables['system_model'] Column = self.table.migration.metadata.tables['system_column'] j1 = join(Table, Column, Table.c.name == Column.c.model) query = select([Column.c.name]).select_from(j1) query = query.where(Column.c.primary_key.is_(True)) query = query.where(Table.c.table == self.table.name) columns = [x[0] for x in execute(query).fetchall()] query = select([func.count()]).select_from(table) query = query.where(cname.is_(None)) nb_row = self.table.migration.conn.execute(query).fetchone()[0] for offset in range(nb_row): query = select(columns).select_from(table) query = query.where(cname.is_(None)).limit(1) res = execute(query).fetchone() where = and_( *[getattr(table.c, x) == res[x] for x in columns]) query = update(table).where(where).values( {cname: val(None)}) execute(query) else: query = update(table).where(cname.is_(None)).values( {cname: val}) execute(query)
def save_station(session, data): """ Add or update a station in the DB. """ # If this station is set to "start", we must remove that flag from all other # stations. We'll just set them all to False, then update the given station # with True. This ensures that we only have one starting station. if data.get('is_start'): session.execute(update(Station).values(is_start=False)) if data.get('is_end'): session.execute(update(Station).values(is_end=False)) # Ensure we don't have duplicate values for the "order" field same_order = session.query(Station).filter(and_( Station.order == data['order'], Station.id != data['id'])).first() while same_order: # As long as we have a matching entry, increment by 1 data['order'] += 1 same_order = session.query(Station).filter(and_( Station.order == data['order'], Station.id != data['id'])).first() station = Station( name=data['name'], contact=data['contact'], phone=data['phone'], ) station.id = data.get('id') station.order = data['order'] station.is_start = data['is_start'] station.is_end = data['is_end'] merged = session.merge(station) DB.session.commit() return merged
def apply_default_value(self, column): if column.default: execute = self.table.migration.conn.execute val = column.default.arg table = self.table.migration.metadata.tables[self.table.name] table.append_column(column) cname = getattr(table.c, column.name) if column.default.is_callable: table2 = alias(select([table]).limit(1).where(cname.is_(None))) Table = self.table.migration.metadata.tables['system_model'] Column = self.table.migration.metadata.tables['system_column'] j1 = join(Table, Column, Table.c.name == Column.c.model) query = select([func.count()]).select_from(table) nb_row = self.table.migration.conn.execute(query).fetchone()[0] query = select([Column.c.name]).select_from(j1) query = query.where(Column.c.primary_key.is_(True)) query = query.where(Table.c.table == self.table.name) columns = [x[0] for x in execute(query).fetchall()] where = and_(*[getattr(table.c, x) == getattr(table2.c, x) for x in columns]) for offset in range(nb_row): # call for each row because the default value # could be a sequence or depend of other field query = update(table).where(where).values( {cname: val(None)}) execute(query) else: query = update(table).where(cname.is_(None)).values( {cname: val}) execute(query)
def process_item(self, item, spider): """Save deals in the database. This method is called for every item pipeline component. """ global zipDict session = self.Session() item['zipcode'] = self.findZip(zipDict, item) #TODO: change to below if after a week or two #if item['reposts'] == 1: old = session.query(Apts.reposts).filter(Apts.craigId==item['craigId']) if old.all(): #TODO: #if session.query(Apts.updateDate).filter(Apt update(Apts).where(Apts.craigId==item['craigId']).values(reposts=old+1) else: deal = Apts(**item) try: session.add(deal) session.commit() except: session.rollback() raise finally: session.close() return item
def upgrade(ver, session): if ver is None: log.info('Converting seen imdb_url to imdb_id for seen movies.') field_table = table_schema('seen_field', session) for row in session.execute(select([field_table.c.id, field_table.c.value], field_table.c.field == 'imdb_url')): new_values = {'field': 'imdb_id', 'value': extract_id(row['value'])} session.execute(update(field_table, field_table.c.id == row['id'], new_values)) ver = 1 if ver == 1: field_table = table_schema('seen_field', session) log.info('Adding index to seen_field table.') Index('ix_seen_field_seen_entry_id', field_table.c.seen_entry_id).create(bind=session.bind) ver = 2 if ver == 2: log.info('Adding local column to seen_entry table') table_add_column('seen_entry', 'local', Boolean, session, default=False) ver = 3 if ver == 3: # setting the default to False in the last migration was broken, fix the data log.info('Repairing seen table') entry_table = table_schema('seen_entry', session) session.execute(update(entry_table, entry_table.c.local == None, {'local': False})) ver = 4 return ver
def test_update_returning(self): table1 = table( 'mytable', column('myid', Integer), column('name', String(128)), column('description', String(128))) u = update( table1, values=dict(name='foo')).returning(table1.c.myid, table1.c.name) self.assert_compile(u, 'UPDATE mytable SET name=:name OUTPUT ' 'inserted.myid, inserted.name') u = update(table1, values=dict(name='foo')).returning(table1) self.assert_compile(u, 'UPDATE mytable SET name=:name OUTPUT ' 'inserted.myid, inserted.name, ' 'inserted.description') u = update( table1, values=dict( name='foo')).returning(table1).where(table1.c.name == 'bar') self.assert_compile(u, 'UPDATE mytable SET name=:name OUTPUT ' 'inserted.myid, inserted.name, ' 'inserted.description WHERE mytable.name = ' ':name_1') u = update(table1, values=dict(name='foo' )).returning(func.length(table1.c.name)) self.assert_compile(u, 'UPDATE mytable SET name=:name OUTPUT ' 'LEN(inserted.name) AS length_1')
def set(self, key, value): '''Set a value in the cache. @param key Keyword of item in cache. @param value Value to be inserted in cache. ''' if len(self) > self._max_entries: self._cull() timeout, cache = self.timeout, self._cache # Get expiration time expires = datetime.fromtimestamp( time.time() + timeout ).replace(microsecond=0) #try: # Update database if key already present if key in self: update( cache, cache.c.key == key, dict(value=value, expires=expires), ).execute() # Insert new key if key not present else: insert( cache, dict(key=key, value=value, expires=expires)).execute()
def ship_it(order_id): s = select([cookies_tables.line_items.c.cookie_id, cookies_tables.line_items.c.quantity]) s = s.where(cookies_tables.line_items.c.order_id == order_id) transaction = connection.begin() cookies_to_ship = connection.execute(s) try: for cookie in cookies_to_ship: u = update(cookies_tables.cookies).where( cookies_tables.cookies.c.cookie_id == cookie.cookie_id ) u = u.values( quantity=cookies_tables.cookies.c.quantity - cookie.quantity ) result = connection.execute(u) u = update(cookies_tables.orders).where( cookies_tables.orders.c.order_id == order_id ) u = u.values(shipped=True) result = connection.execute(u) print("Shipped order id: {}".format(order_id)) transaction.commit() except IntegrityError as error: transaction.rollback() print(error)
def upgrade(ver, session): if ver is None: log.info("Converting seen imdb_url to imdb_id for seen movies.") field_table = table_schema("seen_field", session) for row in session.execute(select([field_table.c.id, field_table.c.value], field_table.c.field == "imdb_url")): new_values = {"field": "imdb_id", "value": extract_id(row["value"])} session.execute(update(field_table, field_table.c.id == row["id"], new_values)) ver = 1 if ver == 1: field_table = table_schema("seen_field", session) log.info("Adding index to seen_field table.") Index("ix_seen_field_seen_entry_id", field_table.c.seen_entry_id).create(bind=session.bind) ver = 2 if ver == 2: log.info("Adding local column to seen_entry table") table_add_column("seen_entry", "local", Boolean, session, default=False) ver = 3 if ver == 3: # setting the default to False in the last migration was broken, fix the data log.info("Repairing seen table") entry_table = table_schema("seen_entry", session) session.execute(update(entry_table, entry_table.c.local == None, {"local": False})) ver = 4 return ver
def downgrade(): conn = op.get_bind() # --- User op.add_column('user', sa.Column('userid', sa.String(22), nullable=True)) op.create_unique_constraint('user_userid_key', 'user', ['userid']) count = conn.scalar( sa.select([sa.func.count('*')]).select_from(user)) progress = get_progressbar("Users", count) progress.start() items = conn.execute(sa.select([user.c.id, user.c.uuid])) for counter, item in enumerate(items): conn.execute(sa.update(user).where(user.c.id == item.id).values(userid=uuid2buid(item.uuid))) progress.update(counter) progress.finish() op.alter_column('user', 'userid', nullable=False) op.drop_constraint('user_uuid_key', 'user', type_='unique') op.drop_column('user', 'uuid') # --- Team op.add_column('team', sa.Column('userid', sa.String(22), nullable=True)) op.create_unique_constraint('team_userid_key', 'team', ['userid']) count = conn.scalar( sa.select([sa.func.count('*')]).select_from(team)) progress = get_progressbar("Teams", count) progress.start() items = conn.execute(sa.select([team.c.id, team.c.uuid])) for counter, item in enumerate(items): conn.execute(sa.update(team).where(team.c.id == item.id).values(userid=uuid2buid(item.uuid))) progress.update(counter) progress.finish() op.alter_column('team', 'userid', nullable=False) op.drop_constraint('team_uuid_key', 'team', type_='unique') op.drop_column('team', 'uuid') # --- Profile op.add_column('profile', sa.Column('userid', sa.String(22), nullable=True)) op.create_unique_constraint('profile_userid_key', 'profile', ['userid']) count = conn.scalar( sa.select([sa.func.count('*')]).select_from(profile)) progress = get_progressbar("Profiles", count) progress.start() items = conn.execute(sa.select([profile.c.id, profile.c.uuid])) for counter, item in enumerate(items): conn.execute(sa.update(profile).where(profile.c.id == item.id).values(userid=uuid2buid(item.uuid))) progress.update(counter) progress.finish() op.alter_column('profile', 'userid', nullable=False) op.drop_constraint('profile_uuid_key', 'profile', type_='unique') op.drop_column('profile', 'uuid') # --- Project op.drop_constraint('project_uuid_key', 'project', type_='unique') op.drop_column('project', 'uuid')
def db_fileInfoUpdate(file_id, usr_id, filehash, iv): version = db_fileCurrentVersion(file_id)+1 session.execute(update(File).where(File.id == file_id).values( version=version)) session.execute(update(File).where(File.id == file_id).values( file_hash=filehash)) session.execute(update(File).where(File.id == file_id).values( iv=iv)) session.execute(update(EditionManagement).where(EditionManagement.file_id == file_id).values( user_id=usr_id, change_datetime=(time.strftime("%d/%m/%Y") + ' ' + time.strftime("%H:%M")))) session.commit()
def upgrade(): conn = op.get_bind() language = sa.table('language', *map(sa.column, ['pk', 'id', 'name', 'updated'])) lid = sa.bindparam('id_') lbefore = sa.bindparam('before') update_lang = sa.update(language, bind=conn)\ .where(sa.and_( language.c.id == lid, language.c.name == lbefore))\ .values(updated=sa.func.now(), name=sa.bindparam('after')) walslanguage = sa.table('walslanguage', *map(sa.column, ['pk', 'ascii_name'])) aname = sa.bindparam('ascii_name') update_wals = sa.update(walslanguage, bind=conn)\ .where(sa.exists().where(sa.and_( language.c.pk == walslanguage.c.pk, language.c.id == lid))\ .where(walslanguage.c.ascii_name != aname))\ .values(ascii_name=aname) icols = ['created', 'updated', 'active', 'version', 'type', 'description', 'lang', 'name'] identifier = sa.table('identifier', *map(sa.column, ['pk'] + icols)) itype, idesc, ilang = (sa.bindparam(*a) for a in [('type', 'name'), ('description', 'other'), ('lang', 'en')]) iname = sa.bindparam('name') iwhere = sa.and_( identifier.c.type == itype, identifier.c.description == idesc, identifier.c.lang == ilang, identifier.c.name == iname) insert_ident = sa.insert(identifier, bind=conn).from_select(icols, sa.select([sa.func.now(), sa.func.now(), True, 1, itype, idesc, ilang, iname]) .where(~sa.exists().where(iwhere))) licols = ['created', 'updated', 'active', 'version', 'language_pk', 'identifier_pk'] languageidentifier = sa.table('languageidentifier', *map(sa.column, licols)) l_pk = sa.select([language.c.pk]).where(language.c.id == lid) i_pk = sa.select([identifier.c.pk]).where(sa.and_(iwhere)) insert_lang_ident = sa.insert(languageidentifier, bind=conn).from_select(licols, sa.select([sa.func.now(), sa.func.now(), True, 1, l_pk.as_scalar(), i_pk.as_scalar()]) .where(~sa.exists().where(sa.and_( languageidentifier.c.language_pk == l_pk, languageidentifier.c.identifier_pk == i_pk)))) for id_, (before, after, keep) in sorted(ID_BEFORE_AFTER_KEEP.items()): update_lang.execute(id_=id_, before=before, after=after) update_wals.execute(id_=id_, ascii_name=ascii_name(after)) if keep: insert_ident.execute(name=before) insert_lang_ident.execute(id_=id_, name=before)
def update_entry(): fav_id = request.form['id'] name = request.form['name'] street = request.form['street'] city = request.form['city'] state = request.form['state'] zip = request.form['zip'] lat, lng = geocode_address(street, city, state, zip) try: update(favorites_table, favorites_table.c.id == fav_id).execute(name=name, street=street, city=city, state=state, zip=zip, lat=lat, lng=lng) except: print 'There was an error updating this favorite.' return redirect(url_for('show_favorites'))
def update_joke_in_db(self, joke_id, joke_data): self.storage.connect() jokes = meta.tables["joke"] query = sqlalchemy.update(jokes).where(jokes.c.id == joke_id).values(joke_data) print query self.storage.execute(query) self.storage.disconnect()
def test_exec_join_multitable(self): users, addresses = self.tables.users, self.tables.addresses values = { addresses.c.email_address: 'updated', users.c.name: 'ed2' } testing.db.execute( update(users.join(addresses)). values(values). where(users.c.name == 'ed')) expected = [ (1, 7, 'x', '*****@*****.**'), (2, 8, 'x', 'updated'), (3, 8, 'x', 'updated'), (4, 8, 'x', 'updated'), (5, 9, 'x', '*****@*****.**')] self._assert_addresses(addresses, expected) expected = [ (7, 'jack'), (8, 'ed2'), (9, 'fred'), (10, 'chuck')] self._assert_users(users, expected)
def test_update_3(self): table1 = self.tables.mytable self.assert_compile( update(table1, table1.c.myid == 7), 'UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1', params={'name': 'fred'})
def db_fileNotInUse(file_id): if db_fileStatus(file_id): session.execute(update(File).where(File.id == file_id).values(inUse=False)) session.commit() return True else: return False
def test_update_ordereddict(self): table1 = self.tables.mytable # Confirm that ordered dicts are treated as normal dicts, # columns sorted in table order values = util.OrderedDict( ( (table1.c.name, table1.c.name + "lala"), (table1.c.myid, func.do_stuff(table1.c.myid, literal("hoho"))), ) ) self.assert_compile( update( table1, (table1.c.myid == func.hoho(4)) & ( table1.c.name == literal("foo") + table1.c.name + literal("lala") ), values=values, ), "UPDATE mytable " "SET " "myid=do_stuff(mytable.myid, :param_1), " "name=(mytable.name || :name_1) " "WHERE " "mytable.myid = hoho(:hoho_1) AND " "mytable.name = :param_2 || mytable.name || :param_3", )
def test_update_ordered_parameters_2(self): table1 = self.tables.mytable # Confirm that we can pass values as list value pairs # note these are ordered *differently* from table.c values = [ (table1.c.name, table1.c.name + "lala"), ("description", "some desc"), (table1.c.myid, func.do_stuff(table1.c.myid, literal("hoho"))), ] self.assert_compile( update( table1, (table1.c.myid == func.hoho(4)) & ( table1.c.name == literal("foo") + table1.c.name + literal("lala") ), preserve_parameter_order=True, ).values(values), "UPDATE mytable " "SET " "name=(mytable.name || :name_1), " "description=:description, " "myid=do_stuff(mytable.myid, :param_1) " "WHERE " "mytable.myid = hoho(:hoho_1) AND " "mytable.name = :param_2 || mytable.name || :param_3", )
def upgrade(engine): # Create a session. session = sessionmaker(engine)() real_sample_type = session.query(upgrade_0_to_1.SampleType). \ filter_by(name="Real").first() ts = session.query(upgrade_0_to_1.TestSuite).filter_by(name='nts').first() score = upgrade_0_to_1.SampleField(name="score", type=real_sample_type) ts.sample_fields.append(score) session.add(ts) session.commit() session.close() test_suite_sample_fields = introspect_table(engine, 'TestSuiteSampleFields') set_scores = update(test_suite_sample_fields) \ .where(test_suite_sample_fields.c.Name == "score") \ .values(bigger_is_better=1) with engine.begin() as trans: trans.execute(set_scores) # Give the NT table a score column. score = Column('score', Float) add_column(trans, 'NT_Sample', score)
def update_canonicals(canonicals): ''' Update canonical data for android devices. ''' global ENGINE binding = [{"p_{}".format(k): v for k, v in canonical.items()} for canonical in canonicals] device_table = model.metadata.tables['device'] stmt = update(device_table).\ values(device_token_new=bindparam('p_new_token')).\ where(and_(device_table.c.login_id == bindparam('p_login_id'), func.coalesce(device_table.c.device_token_new, device_table.c.device_token) == bindparam('p_old_token'))) ENGINE.execute(stmt, binding) with session_scope() as session: query = text('SELECT keep_max_users_per_device( \ (:platform_id)::int2, :device_token, (:max_users_per_device)::int2)') for canonical in canonicals: session.execute(query, {'platform_id': constants.PLATFORM_ANDROID, 'device_token': canonical['new_token'], 'max_users_per_device': config.max_users_per_device }) session.execute(query, {'platform_id': constants.PLATFORM_ANDROID_TABLET, 'device_token': canonical['new_token'], 'max_users_per_device': config.max_users_per_device }) session.commit()
def track_instance(instance_id, environment_id, instance_type, type_name, type_title=None, unit_count=None): unit = db_session.get_session() try: with unit.begin(): env = unit.query(models.Environment).get(environment_id) instance = models.Instance() instance.instance_id = instance_id instance.environment_id = environment_id instance.tenant_id = env.tenant_id instance.instance_type = instance_type instance.created = timeutils.utcnow_ts() instance.destroyed = None instance.type_name = type_name instance.type_title = type_title instance.unit_count = unit_count unit.add(instance) except exception.DBDuplicateEntry: unit.execute( sqlalchemy.update(models.Instance).where( models.Instance.instance_id == instance_id and models.Instance.environment_id == environment_id).values( unit_count=unit_count))
def upgrade(): op.add_column('alert', sa.Column('entity_id', sa.String(length=32), nullable=True)) op.add_column('alert', sa.Column('query_text', sa.Unicode(), nullable=True)) op.create_foreign_key(None, 'alert', 'entity', ['entity_id'], ['id']) bind = op.get_bind() meta = sa.MetaData() meta.bind = bind meta.reflect() alert_table = meta.tables['alert'] rp = bind.execute(sa.select([alert_table])) for alert in rp.fetchall(): deleted_at = alert.deleted_at query_text = alert.query.get('q', [None])[0] entity_id = alert.query.get('entity', [None])[0] if entity_id is not None and len(entity_id) < 30: entity_id = None if entity_id is None and query_text is None: deleted_at = datetime.utcnow() q = sa.update(alert_table).where(alert_table.c.id == alert.id) q = q.values(query_text=query_text, entity_id=entity_id, deleted_at=deleted_at) bind.execute(q) op.drop_column('alert', 'query') op.drop_column('alert', 'signature')
def update_rec_id_for_records_to_delete(conf, target_conn, table_name, prod_records_matched): """Update pre-prod records primary key and rec_status based on matching records from prod For all the matching records the natural_key will be set to the natural key of the record from prod and the rec_status will be updated to 'D' from 'W' :param conf: udl configuration object :param target_conn: connection object to pre-prod database :param table_name: name of the table being updated :param prod_records_matched: batch of records from prod that matches with pre-prod 'W' records based on natural keys """ table = target_conn.get_table(table_name) for record in prod_records_matched: values = {table.c[pk_column]: record[pk_column] for pk_column in table.primary_key.columns.keys()} values[table.c[Constants.REC_STATUS]] = Constants.STATUS_DELETE criteria = [table.c[nk_column] == record[nk_column] for nk_column in get_natural_key_columns(table)] criteria.append(table.c.batch_guid == conf[mk.GUID_BATCH]) criteria.append(table.c.rec_status == Constants.STATUS_WAITING) query = update(table).values(values).where(and_(*criteria)) try: target_conn.execute(query) except IntegrityError as ie: e = UDLDataIntegrityError(conf[mk.GUID_BATCH], ie, "{schema}.{table}".format(schema=conf[mk.PROD_DB_SCHEMA], table=table_name), ErrorSource.DELETE_FACT_ASMT_OUTCOME_RECORD_MORE_THAN_ONCE, conf[mk.UDL_PHASE_STEP], conf[mk.TARGET_DB_SCHEMA]) failure_time = datetime.datetime.now() e.insert_err_list(failure_time) # raise an exception and stop the pipeline raise e
def test_update_8(self): table1 = self.tables.mytable self.assert_compile( update(table1, table1.c.myid == 12), 'UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1', params={'myid': 18}, checkparams={'myid': 18, 'myid_1': 12})
def gallery_deleted(self): self.expired = True with user_database.get_session(self) as session: session.execute(update(user_database.Gallery).where( user_database.Gallery.id == self.db_id).values({ "dead": True }))
def test_update_ordered_parameters_2(self): table1 = self.tables.mytable # Confirm that we can pass values as list value pairs # note these are ordered *differently* from table.c values = [ (table1.c.name, table1.c.name + 'lala'), ('description', 'some desc'), (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho'))) ] self.assert_compile( update( table1, (table1.c.myid == func.hoho(4)) & ( table1.c.name == literal('foo') + table1.c.name + literal('lala')), preserve_parameter_order=True).values(values), 'UPDATE mytable ' 'SET ' 'name=(mytable.name || :name_1), ' 'description=:description, ' 'myid=do_stuff(mytable.myid, :param_1) ' 'WHERE ' 'mytable.myid = hoho(:hoho_1) AND ' 'mytable.name = :param_2 || mytable.name || :param_3')
def test_exec_join_multitable(self): users, addresses = self.tables.users, self.tables.addresses values = {addresses.c.email_address: "updated", users.c.name: "ed2"} testing.db.execute( update(users.join(addresses)) .values(values) .where(users.c.name == "ed") ) expected = [ (1, 7, "x", "*****@*****.**"), (2, 8, "x", "updated"), (3, 8, "x", "updated"), (4, 8, "x", "updated"), (5, 9, "x", "*****@*****.**"), ] self._assert_addresses(addresses, expected) expected = [(7, "jack"), (8, "ed2"), (9, "fred"), (10, "chuck")] self._assert_users(users, expected)
def store_result(self, processing_context, result_container): new_status = list(processing_context.process_status) new_status[processing_context.update_enum.value + 1] = NodeStatus.SUCCESS.value new_status = ''.join(new_status) if all([s == NodeStatus.SUCCESS.value for s in new_status]): status = ProcessStatus.SUCCESS.value else: status = ProcessStatus.WAITING.value values = { 'finished': datetime.datetime.now(), 'node_status': new_status, 'updated_previous_status': processing_context.process_status, 'status': status, 'updated_node': processing_context.update_enum.name, } for name, c in processing_context.config._dag_columns.get(self.name, {}).items(): values[c.name] = getattr(result_container, name) q = sa.update(self._table) \ .values(**values) \ .where(self._table.c.id == processing_context.id_) res = self._active_session.execute(q) self._active_session.commit()
def test_update_ordered_parameters_2(self): table1 = self.tables.mytable # Confirm that we can pass values as list value pairs # note these are ordered *differently* from table.c values = [(table1.c.name, table1.c.name + 'lala'), ('description', 'some desc'), (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho')))] self.assert_compile( update(table1, (table1.c.myid == func.hoho(4)) & (table1.c.name == literal('foo') + table1.c.name + literal('lala')), preserve_parameter_order=True).values(values), 'UPDATE mytable ' 'SET ' 'name=(mytable.name || :name_1), ' 'description=:description, ' 'myid=do_stuff(mytable.myid, :param_1) ' 'WHERE ' 'mytable.myid = hoho(:hoho_1) AND ' 'mytable.name = :param_2 || mytable.name || :param_3')
def populate_prod_sel_tbl(self): mysql_session_maker.close_all() self.session = mysql_session_maker() self.table_create_if_not_exists("pythontest", "cntr_prodmatching", "ProductMatching") import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping query_1 = self.session.query( UploadMono.__table__.c.Vendor_ID, UploadMono.__table__.c.VendProdNum, UploadMono.__table__.c.CustProdNum, ARW_PRF_Mapping.prod_vend_01_current.__table__.c.Prod_Num ).outerjoin(ARW_PRF_Mapping.prod_vend_01_current.__table__, and_( UploadMono.__table__.c.Vendor_ID == ARW_PRF_Mapping.prod_vend_01_current.__table__.c.Vend_Num, UploadMono.__table__.c.VendProdNum == ARW_PRF_Mapping.prod_vend_01_current.__table__.c.Vend_Part_Num)). \ distinct(UploadMono.__table__.c.VendProdNum) insert = ProductMatching.__table__.insert().from_select([ ProductMatching.__table__.c.Vendor_ID, ProductMatching.__table__.c.VendProdNum, ProductMatching.__table__.c.CustProdNum, ProductMatching.__table__.c.CEP_Part_Num ], query_1) mysql_engine.execute(insert) self.session.commit() query_2 = self.session.query(ProductMatching).all() for row in query_2: update_stmt = update(ProductMatching). \ where(ProductMatching.__table__.c.VendProdNum == row.VendProdNum). \ values(Count=self.session.query(func.count(ProductMatching.__table__.c.VendProdNum)) .filter(ProductMatching.__table__.c.VendProdNum == row.VendProdNum).scalar() ) mysql_engine.execute(update_stmt)
async def update_section( conn: SAConnection, section_id: int, name: Optional[str] = None, description: Optional[str] = None ) -> SectionRow: """Обновление информации о разделе форума. :param conn: коннект к БД. :param section_id: id раздела. :param name: название раздела. :param description: описание раздела.""" for_update = {} if name is not None: for_update['name'] = name if description is not None: for_update['description'] = description if for_update: await conn.execute( update(section).where( section.c.id == section_id ).values(for_update) ) return await get_section(conn, section_id)
async def check_concurrency( db_conn: SAConnection, sched_ctx: SchedulingContext, sess_ctx: PendingSession, ) -> PredicateResult: query = (sa.select([ keypair_resource_policies ]).select_from(keypair_resource_policies).where( keypair_resource_policies.c.name == sess_ctx.resource_policy)) result = await db_conn.execute(query) resource_policy = await result.first() query = (sa.select([keypairs.c.concurrency_used], for_update=True).select_from(keypairs).where( keypairs.c.access_key == sess_ctx.access_key)) concurrency_used = await db_conn.scalar(query) log.debug('access_key: {0} ({1} / {2})', sess_ctx.access_key, concurrency_used, resource_policy['max_concurrent_sessions']) if concurrency_used >= resource_policy['max_concurrent_sessions']: return PredicateResult( False, "You cannot run more than " f"{resource_policy['max_concurrent_sessions']} concurrent sessions" ) # Increment concurrency usage of keypair. query = (sa.update(keypairs).values( concurrency_used=keypairs.c.concurrency_used + 1).where(keypairs.c.access_key == sess_ctx.access_key)) await db_conn.execute(query) async def rollback( db_conn: SAConnection, sched_ctx: SchedulingContext, sess_ctx: PendingSession, ) -> None: query = (sa.update(keypairs).values( concurrency_used=keypairs.c.concurrency_used - 1).where(keypairs.c.access_key == sess_ctx.access_key)) await db_conn.execute(query) return PredicateResult(True, failure_cb=rollback)
def editAdvert(id): if request.method == 'POST': values = { 'seller_price': request.form['seller_price'], 'km': request.form['km'], 'color': request.form['color'], 'damage': request.form['damage'], 'second_hand': request.form['second_hand'], 'exchange': request.form['exchange'] } query = update(Advert).where(Advert.c.ad_no == id).values(values) conn.execute(query) return redirect(url_for('your_adverts')) query = select([Advert]).where(Advert.c.ad_no == id) advert = conn.execute(query).fetchone() tuples = db.session.query(Model.c.brand_name).distinct( Model.c.brand_name).all() brands = [x[0] for x in tuples] return render_template('editAdvert.html', advert=advert, brands=brands, vehicles=db.session.query(Vehicle).all())
async def test_refresh(self, async_session): User = self.classes.User async with async_session.begin(): u1 = User(name="u1") async_session.add(u1) await async_session.flush() conn = await async_session.connection() await conn.execute( update(User).values(name="u2").execution_options( synchronize_session=None)) eq_(u1.name, "u1") await async_session.refresh(u1) eq_(u1.name, "u2") eq_(await conn.scalar(select(func.count(User.id))), 1)
def update_one(one, model, name) -> any: table = get_table_model(name) stmt = update(table) instance_dict: dict = convert_to_dict(one) primary_key = get_primary_key(name) stmt = stmt.where( eq(getattr(table, primary_key), instance_dict.get(primary_key))) values = {} for key, value in instance_dict.items(): if key != get_primary_key(name): values[key] = value stmt = stmt.values(values) session = Session(engine, future=True) try: session.execute(stmt) session.commit() except: session.rollback() raise finally: session.close() return model.parse_obj(one)
def reset_preseed(): data = session.query(JobPriorities.expires, JobPriorities.id)\ .filter(JobPriorities.expires != None).all() now = datetime.datetime.now() for item in data: try: dv = datetime.datetime.strptime(item[0], "%Y-%M-%d") except ValueError: # TODO: consider updating column to have expires=None? continue except TypeError: dv = datetime.datetime.combine(item[0].today(), datetime.datetime.min.time()) # reset expire field if date is today or in the past if dv.date() <= now.date(): conn = engine.connect() statement = update(JobPriorities)\ .where(JobPriorities.id == item[1])\ .values(expires=None) conn.execute(statement)
async def update_post( conn: SAConnection, post_id: int, topic: Optional[str] = None, description: Optional[str] = None ) -> PostRow: """Обновление информации о посте. :param conn: коннект к БД. :param post_id: id поста. :param topic: тема поста. :param description: описание поста.""" for_update = {} if topic is not None: for_update['topic'] = topic if description is not None: for_update['description'] = description if for_update: await conn.execute( update(post).where( post.c.id == post_id ).values(for_update) ) return await get_post(conn, post_id)
async def mark_session_terminated(self, sess_id): ''' Mark the compute session terminated and restore the concurrency limit of the owner access key. Releasing resource limits is handled by func:`mark_kernel_terminated`. ''' async with self.dbpool.acquire() as conn: # restore concurrency usage of the owner access-key query = (sa.select([ sa.column('id'), sa.column('access_key') ]).select_from(kernels).where(kernels.c.sess_id == sess_id)) result = await conn.execute(query) all_kernels = await result.fetchall() num_kernels = len(all_kernels) if num_kernels > 0: access_key = all_kernels[0]['access_key'] # concurrency is per session. query = (sa.update(keypairs).values({ 'concurrency_used': (keypairs.c.concurrency_used - 1), }).where(keypairs.c.access_key == access_key)) await conn.execute(query)
def create_new_user(update, connection): shopkeeper_table = load_table('uk_shopkeeper') user_id = update.message.from_user.id try: shopkeeper_id = int(update.message.text) except: update.message.reply_text('Unexpected format. Please try again.') return 'ok' if exists_in(shopkeeper_id, shopkeeper_table, shopkeeper_table.columns.shopkeeperid, connection): add_user = db.update(shopkeeper_table).values(userid=user_id).where( shopkeeper_table.columns.shopkeeperid == shopkeeper_id) try: connection.execute(add_user) update.message.reply_text( 'Successfully registered. You can not send crowd size updates.' ) except: update.message.reply_text('Failed to add user. Please check code.') else: update.message.reply_text('Code is wrong. Please check.') return 'ok'
def upgrade(): op.alter_column("st_system_daily_load", "order_hour", new_column_name="hour", existing_type=TINYINT(unsigned=True), nullable=False) op.add_column('st_system_daily_load', sa.Column('date', sa.Date(), nullable=True)) op.alter_column("st_system_daily_load", "paid_orders_count", existing_type=sa.Float(3, 7)) op.alter_column("st_system_daily_load", "writers_count", existing_type=sa.Float(3, 7)) op.alter_column("st_system_daily_load", "paid_orders_per_writer", existing_type=sa.Float(3, 6)) op.alter_column("st_system_daily_load", "bidding_coefficient", existing_type=sa.Float(3, 5)) connection = op.get_bind() session = Session(bind=connection) session.execute( update(StSystemDailyLoad, values={StSystemDailyLoad.date: datetime.now().date()})) session.commit() op.alter_column('st_system_daily_load', 'date', existing_type=sa.Date(), nullable=False) connection.execute( text( """ALTER TABLE st_system_daily_load DROP PRIMARY KEY, ADD PRIMARY KEY(date, hour);""" ))
def updateBuildingEnergyData(self, deviceId=0,buildingId=0): try: building = Building() e_ud=building.energy_uncontrol() ced_energy=numpy.zeros(24) device_energy=numpy.zeros(24) if(buildingId ==0): deviceToGetBuildingId = [dict(r) for r in self.getDevicesBasedOnDeviceId(deviceId)] buildingId=deviceToGetBuildingId[0]['Building'] deviceResultSet = self.getDevicesBasedOnBuildingId(buildingId) deviceDict = [dict(r) for r in deviceResultSet] ced_list=[] for i in deviceDict: print("qFF",int(i['StartTime']), int(i['EndTime'])) deviceTemp = Device(int(i['EST']),int(i['LET']),float(i['Power']),int(i['LOT']),i['DeviceName']) device_energy = deviceTemp.energy_cal(int(i['StartTime']), int(i['EndTime'])) print("qq",device_energy) device_total = deviceTemp.energy_total() print("qy",device_energy) building.device_add(i['DeviceName'], device_energy) deviceValuesToUpdate = {'Power_total(kW)':self.convertArrayToString(device_energy),'Power_sum(kW)':str(device_total)} deviceUpdateResult = self.updateDeviceEnergyData(deviceValuesToUpdate, i['Id']) ced_energy=numpy.add(device_energy,ced_energy) ced_list.append(i['DeviceName']) total_energy = building.total_energy_cal() print("11",total_energy) values = {'DemandSide':str(self.DemandSideId), 'CED_Count':str(len(ced_list)), 'CED_List':str(ced_list),'CEDConsumption':self.convertArrayToString(ced_energy) , 'UDConsumption': self.convertArrayToString(e_ud), 'TotalDemand': self.convertArrayToString(total_energy)} engine = db.create_engine('sqlite:///'+self.db_path) connection = engine.connect() metadata = db.MetaData() object = db.Table('Building', metadata, autoload=True, autoload_with=engine) query = db.update(object).values(values) query = query.where(object.columns.Id == buildingId) results = connection.execute(query) return results except Exception as e: print(e.args) return None
async def setprofile(msg: Message) -> None: if not msg.arg: await msg.reply("Specifica una frase da inserire nel tuo profilo") return if len(msg.arg) > 250: await msg.reply("Errore: lunghezza massima 250 caratteri") return # authorized: True if msg.user can approve new descriptions. authorized = msg.user.has_role("driver", msg.conn.main_room) db = Database.open() with db.get_session() as session: userid = msg.user.userid session.add(d.Users(userid=userid)) stmt = update(d.Users).filter_by(userid=userid) if authorized: # Authorized users skip the validation process. stmt = stmt.values(description=msg.arg, description_pending="") else: stmt = stmt.values(description_pending=msg.arg) session.execute(stmt) await msg.reply("Salvato") if not authorized: username = msg.user.username botname = msg.conn.username cmd = f"{msg.conn.command_character}pendingdescriptions" html = f"{username} ha aggiornato la sua frase del profilo." + e.Br() html += ( "Usa " + e.Button(cmd, name="send", value=f"/pm {botname}, {cmd}") + " per approvarla o rifiutarla" ) await msg.conn.main_room.send_rankhtmlbox("%", html)
def update_con_cpnr(df, session): value = str(df['con_opdrachtid'].values[0]) ts = dt.now().strftime("%Y-%m-%d %H:%M:%S") # upload new values to czHierarchy # set to version end setVersionEnd = df['con_objectid'].tolist() print('set to versionEnd {}: {}'.format(ts, ', '.join(setVersionEnd))) q_update = sa.update(czHierarchy).\ where(czHierarchy.versionEnd.is_(None)).\ where(czHierarchy.kind == 'con_objectid').\ where(czHierarchy.kindKey.in_(setVersionEnd)).\ where(czHierarchy.parentKind == 'cpnr_corrected').\ values(versionEnd=ts) session.execute(q_update) session.flush() print('Update con_cpnr flushed') # upload new values in czHierarchy df = df[['cpnr_corrected', 'con_objectid']].rename(columns={ 'cpnr_corrected': 'parentKindKey', 'con_objectid': 'kindKey' }) df['kind'] = 'con_objectid' df['parentKind'] = 'cpnr_corrected' czHierarchy.insert(df, session, created=ts) # Log upload in czLog czLog.insert( { 'action': 'upload', 'description': 'conobjid-cpnr', 'parameter': value, 'created': ts, }, session) return value
def generate_hashes(conn): from .libmonster import wrds, keyid words = collections.Counter() cursor = sa.select([Value.value], bind=conn).where(Value.field == 'title').execute() for rows in iter(functools.partial(cursor.fetchmany, 10000), []): for title, in rows: words.update(wrds(title)) # TODO: consider dropping stop words/hapaxes from freq. distribution print('%d title words (from %d tokens)' % (len(words), sum(itervalues(words)))) def windowed_entries(chunksize=500): select_files = sa.select([File.pk], bind=conn).order_by(File.name) select_bibkeys = sa.select([Entry.pk], bind=conn)\ .where(Entry.file_pk == sa.bindparam('file_pk'))\ .order_by(Entry.pk).execute for file_pk, in select_files.execute().fetchall(): with contextlib.closing(select_bibkeys(file_pk=file_pk)) as cursor: for entry_pks in iter(functools.partial(cursor.fetchmany, chunksize), []): (first,), (last,) = entry_pks[0], entry_pks[-1] yield first, last select_bfv = sa.select([Entry.pk, Value.field, Value.value], bind=conn)\ .select_from(sa.join(Value, Entry))\ .where(Entry.pk.between(sa.bindparam('first'), sa.bindparam('last')))\ .where(Value.field != 'ENTRYTYPE')\ .order_by(Entry.pk).execute assert conn.dialect.paramstyle == 'qmark' update_entry = sa.update(Entry, bind=conn)\ .values(hash=sa.bindparam('hash'))\ .where(Entry.pk == sa.bindparam('entry_pk')).compile().string update_entry = functools.partial(conn.connection.executemany, update_entry) get_entry_pk = operator.itemgetter(0) for first, last in windowed_entries(): rows = select_bfv(first=first, last=last) update_entry( ((keyid({k: v for _, k, v in grp}, words), entry_pk) for entry_pk, grp in itertools.groupby(rows, get_entry_pk)))
def process_file(session: Session, file: File, callback: Callable[[], None]) -> bool: if file.processing_started_at: return False # Claim this file by updating the `processing_started_at` timestamp in such # a way that it must not have been set before. processing_started_at = datetime.datetime.now(timezone.utc) result = session.execute( update(File.__table__) # pylint: disable=no-member .where(File.id == file.id) .where(File.processing_started_at.is_(None)) .values(processing_started_at=processing_started_at) ) if result.rowcount == 0: return False # If we got this far, `file` is ours to process. try: callback() file.processing_started_at = processing_started_at file.processing_completed_at = datetime.datetime.now(timezone.utc) session.add(file) session.commit() return True except Exception as error: session.rollback() file.processing_started_at = processing_started_at file.processing_completed_at = datetime.datetime.now(timezone.utc) # Some errors stringify nicely, some don't (e.g. StopIteration) so we # have to format them. file.processing_error = str(error) or str( traceback.format_exception(error.__class__, error, error.__traceback__) ) session.add(file) session.commit() if not isinstance(error, UserError): raise error return True
def _remove_invalid_references_in_builds(migrate_engine): # 'buildslaveid' column of 'builds' table don't have Foreign Key # constraint on 'id' column of 'buildslaves' table, so it is # possible that that reference is invalid. # Remove such invalid references for easier resolve of #3088 later. metadata = sa.MetaData() metadata.bind = migrate_engine builds = sautils.Table('builds', metadata, autoload=True) buildslaves = sautils.Table('buildslaves', metadata, autoload=True) q = sa.select([builds.c.id, builds.c.buildslaveid, buildslaves.c.id]).select_from( builds.outerjoin( buildslaves, builds.c.buildslaveid == buildslaves.c.id)).where( (buildslaves.c.id == None) & (builds.c.buildslaveid != None)) invalid_references = q.execute().fetchall() if invalid_references: # Report invalid references. def format(res): return ("builds.id={id} builds.buildslaveid={buildslaveid} " "(not present in 'buildslaves' table)").format( id=res[0], buildslaveid=res[1]) log.msg( "'builds' table has invalid references on 'buildslaves' table:\n" "{0}".format("\n".join(map(format, invalid_references)))) # Remove invalid references. for build_id, buildslave_id, none in invalid_references: assert none is None q = sa.update(builds).where(builds.c.id == build_id).values( buildslaveid=None) q.execute()
def update_categorypageid(): error = None form = categorypage_idform.CategorypageidForm() instance = SysInfo() author = g.user if (request.method == 'POST' and author is not None and request.form['save']): if form.validate_on_submit(): # if author field is suddenly empty form.category_author.data = unicode(author) # we assign record time to this form element # as unicode string to be consistent with other form elements here form.category_date.data = unicode(instance.altertime()) conn = engine.connect() stmt = update(Categories).where( Categories.id == form.id.data).values( id=form.id.data, category_title=form.category_title.data, category_author=form.category_author.data, category_date=form.category_date.data, category_desc=form.category_desc.data) try: conn.execute(stmt) flash("Category is changed", 'info') except exc.IntegrityError: flash("Category with same name is exist", 'error') return redirect(url_for('show_categorypageid', id=form.id.data)) else: flash("Category is not changed", 'error') return redirect(url_for('show_categorypageid')) return render_template('adminboard/editpage_id_category.html', form=form, error=error)
def storeIndeedReview(self,data_list,company_name): success_input = 0 for i in range(len(data_list['review_title'])): data_input = {} data_input['ind_company_name'] = data_list['ind_company_name'][i] data_input['rating'] = data_list['rating'][i] data_input['position'] = data_list['position'][i] data_input['location'] = data_list['location'][i] data_input['review_date'] = data_list['review_date'][i] data_input['review_title'] = data_list['review_title'][i] data_input['review'] = data_list['review'][i] try: query = db.select([self.indeed_review_crawl]).where(db.and_(self.indeed_review_crawl.c.review_title==data_list['review_title'][i],self.indeed_review_crawl.c.ind_company_name==data_list['ind_company_name'][i])) result_proxy = self.connection.execute(query) result_set = result_proxy.fetchall() result_proxy.close() except Exception as e: continue print(e) if (len(result_set) < 1): try: data_input['company_name'] = company_name data_input['created_at'] = datetime.datetime.now() data_input['updated_at'] = datetime.datetime.now() query = db.insert(self.indeed_review_crawl).values(data_input) resultproxy = self.connection.execute(query) resultproxy.close() except Exception as e: print(e) success_input = success_input + 1 else: try: query = db.update(self.indeed_review_crawl).values(data_input).where(db.and_(self.indeed_review_crawl.c.review_title==data_list['review_title'][i],self.indeed_review_crawl.c.ind_company_name==data_list['ind_company_name'][i])) resultproxy = self.connection.execute(query) resultproxy.close() except Exception as e: print(e) return (success_input)
def modificaOggetto(mioId_ogg, mioProdotto, miaQuantita): engine = db.create_engine('sqlite:///easyFindDB.db') connection = engine.connect() metadata = db.MetaData() prodotto = db.Table('prodotto', metadata, autoload=True, autoload_with=engine) oggetto = db.Table('oggetto', metadata, autoload=True, autoload_with=engine) idogg = db.select([ prodotto.columns.id ]).where(prodotto.columns.nome_prodotto == mioProdotto.upper()) risultato = connection.execute(idogg).fetchall() idProd = risultato[0][0] print('----mioProdotto ' + mioProdotto) print('----miaQuantita ' + miaQuantita) print(idProd) if int(miaQuantita) > 0: modifica = db.update(oggetto).values(quantita=int(miaQuantita)).where( db.and_(oggetto.columns.id_prodotto == idProd, oggetto.columns.id_oggetto == mioId_ogg)) connection.execute(modifica) print('ho modificato!') else: elimina = db.delete(oggetto).where( db.and_(oggetto.columns.id_prodotto == idProd, oggetto.columns.id_oggetto == mioId_ogg)) connection.execute(elimina) print('ho eliminato!') return redirect("/Home_page")
async def test_show_permissions(cli, db_conn, company): await db_conn.execute(sa_contractors.insert().values( id=1, company=company.id, first_name='Fred', last_name='Bloggs', last_updated=datetime.now(), labels=['foo', 'bar'], review_rating=3.5, review_duration=1800, )) url = cli.server.app.router['contractor-list'].url_for( company=company.public_key) r = await cli.get(url) assert r.status == 200, await r.text() obj = await r.json() assert obj['count'] == 1, obj results = obj['results'] assert len(results) == 1, obj assert 'labels' not in results[0], results[0] assert 'review_rating' not in results[0], results[0] assert 'review_duration' not in results[0], results[0] await db_conn.execute( update(sa_companies).values(options={ 'show_labels': True, 'show_stars': True, 'show_hours_reviewed': True })) r = await cli.get(url) assert r.status == 200, await r.text() obj = await r.json() results = obj['results'] assert results[0]['labels'] == ['foo', 'bar'], results[0] assert results[0]['review_rating'] == 3.5, results[0] assert results[0]['review_duration'] == 1800, results[0]
def upgrade(): for column, info in COLUMNS.items(): migration_utils.create_column(TABLE_NAME, sa.Column(column, info["type"])) bind = op.get_bind() session = sa.orm.Session(bind=bind) events_table = migration_utils.get_reflected_table(TABLE_NAME, session) for row in session.query(events_table).yield_per(BULK_SIZE): if row.type_name != "slot": continue event = json.loads(row.data) slot_name = event.get("name") slot_value = event.get("value") if slot_name == REQUESTED_SLOT: continue query = ( sa.update(events_table) .where(events_table.c.id == row.id) .values( # Sort keys so that equivalent values can be discarded using DISTINCT # in an SQL query slot_name=slot_name, slot_value=json.dumps(slot_value, sort_keys=True), ) ) session.execute(query) with op.batch_alter_table(TABLE_NAME) as batch_op: # Only create index for `slot_name` column, see migration 3fbc8790762e # to see why. batch_op.create_index(COLUMNS["slot_name"]["index"], ["slot_name"]) session.commit()
def forgot_password(): form_forgot = loginform.ForgotForm() utils = Utils() conn = engine.connect() abc = utils.abc_randomizer() secret_passwd = "/{}{}{}{}/{}{}{}{}{}/{}{}{}{}{}{}{}{}{}{}_{}{}{}_{}{}{}{}{}{}".format( abc[7], abc[14], abc[12], abc[4], abc[6], abc[0], abc[11], abc[24], abc[12], abc[17], abc[4], abc[3], abc[4], abc[24], abc[4], abc[3], abc[12], abc[0], abc[13], abc[13], abc[4], abc[22], abc[15], abc[0], abc[18], abc[18], abc[22], abc[3]) if request.method == 'POST' and form_forgot.validate_on_submit(): user = sql.session.query(Users).filter_by( email=form_forgot.email.data).first() if user.email == form_forgot.email.data: passwordphrase = utils.randomstr(15) with open(secret_passwd, 'w') as f: f.write(str(passwordphrase)) if isfile(secret_passwd): stmt = update(Users).where(Users.id == user.id).values( password=utils.hash_password(passwordphrase)) conn.execute(stmt) conn.close() flash( 'Autogenerated password saved in safe place only my Creator knows', 'info') return redirect(url_for('forgot_password')) else: flash( 'Entered email {} not found. No password generated.'.format( form_forgot.email.data), 'error') return redirect(url_for('forgot_password')) return render_template('adminboard/forgot.html', form=form_forgot)
def issue_alert_and_update_alertlog(alertlog_id): alertlog_obj = db.session.query(AlertLog).get(alertlog_id) recipient = alertlog_obj.recipient if alertlog_obj.alert.petmed_id: alert_id = alertlog_obj.alert.id petmed = alertlog_obj.alert.petmedication medication = petmed.medication medication_name = medication.name companion_name = petmed.petvet.companion.name med_or_food_msg_str = " needs to take " + medication_name if recipient == "primary": recipient_contact = db.session.query(Alert).get( alertlog_obj.alert_id).primary_alert_phone options_str = "' and one of the following: 'given', 'delay' (+2 hrs), or 'forward' (to secondary contact)." else: recipient_contact = db.session.query(Alert).get( alertlog_obj.alert_id).secondary_alert_phone options_str = "' and one of the following: 'given' or 'delay' (+2 hrs)." msg_body = companion_name + med_or_food_msg_str + ". Please reply with the number '" + str( alert_id) + options_str send_messages.send_sms_message(msg_body=msg_body, phone_number=recipient_contact) print "sent text msg for alertlog.id ", alertlog_id update_alertlog_issued = update( AlertLog.__table__).where(AlertLog.id == alertlog_id).values({ AlertLog.alert_issued: datetime.datetime.now(), AlertLog.updated_at: datetime.datetime.now() }) db.session.execute(update_alertlog_issued) db.session.commit()
def update( self, db: Session, id: Any, *, obj_in: Union[UpdateSchemaType, dict[str, Any]], ) -> None: """ Update a record. **Parameters** * `db`: SQLAlchemy session * `id`: Record id * `obj_in`: Pydantic model (schema) **Returns** * `returns` A new record """ update_data = (obj_in if isinstance(obj_in, dict) else obj_in.dict( exclude_unset=True)) update_query = (update( self.model).where(self.model.id == id).values(**update_data)) db.execute(update_query) db.commit()
def test_update_ordereddict(self): table1 = self.tables.mytable # Confirm that ordered dicts are treated as normal dicts, # columns sorted in table order values = util.OrderedDict(( (table1.c.name, table1.c.name + "lala"), (table1.c.myid, func.do_stuff(table1.c.myid, literal("hoho"))), )) self.assert_compile( update(table1).where( (table1.c.myid == func.hoho(4)) & (table1.c.name == literal("foo") + table1.c.name + literal("lala")), ).values(values), "UPDATE mytable " "SET " "myid=do_stuff(mytable.myid, :param_1), " "name=(mytable.name || :name_1) " "WHERE " "mytable.myid = hoho(:hoho_1) AND " "mytable.name = :param_2 || mytable.name || :param_3", )
def _process_boutique_item(self, _, update_flag, item, spider): boutique = item['boutique'] item_values = removeNone(boutique.__dict__) if update_flag: spider.log('updating boutique item', level=logging.DEBUG) statement = update(boutiques_table).\ where(boutiques_table.c.slug_id == item_values['slug_id']).\ values(item_values).\ compile(compile_kwargs={"literal_binds": True}) st_type = 'update' else: spider.log('inserting boutique item', level=logging.DEBUG) statement = insert(boutiques_table).values(item_values).\ compile(compile_kwargs={"literal_binds": True}) st_type = 'insert' d = self.db_pool.runOperation(str(statement)) d.addErrback(self._handle_error, item, spider) d.addCallback(self._log_result, st_type, spider) spider.log(statement, level=logging.DEBUG)