def get_game_id(self): if self.game_override is None: game = twitch.get_game_playing() if game is None: return None game_id, game_name = game["_id"], game["name"] games = self.metadata.tables["games"] with self.engine.begin() as conn: game_data.lock_tables(conn, self.metadata) old_id = conn.execute(sqlalchemy.select([games.c.id]).where(games.c.name == game_name)).first() if old_id is None: query = insert(games) query = query.on_conflict_do_update(index_elements=[games.c.id], set_={ "name": query.excluded.name, }) conn.execute(query, { "id": game_id, "name": game_name }) else: old_id, = old_id conn.execute(insert(games).on_conflict_do_nothing(index_elements=[games.c.id]), { "id": game_id, "name": "__LRRBOT_TEMP_GAME_%s__" % game_name, }) game_data.merge_games(conn, self.metadata, old_id, game_id, game_id) conn.execute(games.update().where(games.c.id == game_id), { "name": game_name, }) return game_id return self.game_override
def test_on_conflict_do_nothing_connectionless(self): users = self.tables.users_xtra with testing.db.connect() as conn: result = conn.execute( insert(users).on_conflict_do_nothing( constraint="uq_login_email" ), dict(name="name1", login_email="email1"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, (1,)) result = testing.db.execute( insert(users).on_conflict_do_nothing(constraint="uq_login_email"), dict(name="name2", login_email="email1"), ) eq_(result.inserted_primary_key, None) eq_(result.returned_defaults, None) eq_( testing.db.execute( users.select().where(users.c.id == 1) ).fetchall(), [(1, "name1", "email1", None)], )
def test_on_conflict_do_update_special_types_in_set(self): bind_targets = self.tables.bind_targets with testing.db.connect() as conn: i = insert(bind_targets) conn.execute(i, {"id": 1, "data": "initial data"}) eq_( conn.scalar(sql.select([bind_targets.c.data])), "initial data processed" ) i = insert(bind_targets) i = i.on_conflict_do_update( index_elements=[bind_targets.c.id], set_=dict(data="new updated data") ) conn.execute( i, {"id": 1, "data": "new inserted data"} ) eq_( conn.scalar(sql.select([bind_targets.c.data])), "new updated data processed" )
def test_on_conflict_do_nothing_target(self): users = self.tables.users with testing.db.connect() as conn: result = conn.execute( insert(users).on_conflict_do_nothing( index_elements=users.primary_key.columns ), dict(id=1, name="name1"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) result = conn.execute( insert(users).on_conflict_do_nothing( index_elements=users.primary_key.columns ), dict(id=1, name="name2"), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name1")], )
def test_on_conflict_do_update_no_row_actually_affected(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) i = insert(users) i = i.on_conflict_do_update( index_elements=[users.c.login_email], set_=dict(name="new_name"), where=(i.excluded.name == "other_name"), ) result = conn.execute( i, dict(name="name2", login_email="*****@*****.**") ) eq_(result.returned_defaults, None) eq_(result.inserted_primary_key, None) eq_( conn.execute(users.select()).fetchall(), [ (1, "name1", "*****@*****.**", "not"), (2, "name2", "*****@*****.**", "not"), ], )
def _exotic_targets_fixture(self, conn): users = self.tables.users_xtra conn.execute( insert(users), dict( id=1, name="name1", login_email="*****@*****.**", lets_index_this="not", ), ) conn.execute( users.insert(), dict( id=2, name="name2", login_email="*****@*****.**", lets_index_this="not", ), ) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name1", "*****@*****.**", "not")], )
def external_clips_save(session): ext_channel = server.db.metadata.tables["external_channel"] ext_vids = server.db.metadata.tables["external_video"] if flask.request.values['action'] == "videos": # Save video selection with server.db.engine.begin() as conn: videos = [] for video in flask.request.values.getlist('selected'): chanid, vodid = video.split('-', 1) videos.append({ "channel": int(chanid), "vodid": vodid, }) query = insert(ext_vids).on_conflict_do_nothing(index_elements=[ext_vids.c.vodid]) conn.execute(query, videos) conn.execute(ext_vids.delete().where(ext_vids.c.vodid.notin_(v['vodid'] for v in videos))) return flask.redirect(flask.url_for('clips_vidlist'), code=303) elif flask.request.values['action'] == "add": # Add a new channel channel = get_user(name=flask.request.values['channel']) with server.db.engine.begin() as conn: conn.execute(ext_channel.insert(), channel=channel.name) return flask.redirect(flask.url_for('external_clips'), code=303) elif flask.request.values['action'] == "remove": channel = int(flask.request.values['channel']) with server.db.engine.begin() as conn: conn.execute(ext_channel.delete().where(ext_channel.c.id == channel)) return flask.redirect(flask.url_for('external_clips'), code=303) else: raise ValueError("Unexpected mode %r" % flask.request.values['action'])
def set_game_name(lrrbot, conn, event, respond_to, name): """ Command: !game display NAME Section: info eg. !game display Resident Evil: Man Fellating Giraffe Change the display name of the current game to NAME. """ game_id = lrrbot.get_game_id() if game_id is None: conn.privmsg(respond_to, "Not currently playing any game.") return show_id = lrrbot.get_show_id() games = lrrbot.metadata.tables["games"] game_per_show_data = lrrbot.metadata.tables["game_per_show_data"] with lrrbot.engine.begin() as pg_conn: name_query = sqlalchemy.select([games.c.name]).where(games.c.id == game_id) # NULLIF: https://www.postgresql.org/docs/9.6/static/functions-conditional.html#FUNCTIONS-NULLIF query = insert(game_per_show_data).values({ "game_id": game_id, "show_id": show_id, 'display_name': sqlalchemy.func.nullif(name, name_query), }) query = query.on_conflict_do_update( index_elements=[game_per_show_data.c.game_id, game_per_show_data.c.show_id], set_={ 'display_name': query.excluded.display_name, } ) pg_conn.execute(query) real_name, = pg_conn.execute(name_query).first() conn.privmsg(respond_to, "OK, I'll start calling %s \"%s\"" % (real_name, name))
def test_on_conflict_do_update_exotic_targets_three(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try unique constraint: cause an upsert on target # login_email, not id i = insert(users) i = i.on_conflict_do_update( constraint=self.unique_constraint, set_=dict(id=i.excluded.id, name=i.excluded.name, login_email=i.excluded.login_email) ) # note: lets_index_this value totally ignored in SET clause. conn.execute(i, dict( id=42, name='nameunique', login_email='*****@*****.**', lets_index_this='unique') ) eq_( conn.execute( users.select(). where(users.c.login_email == '*****@*****.**') ).fetchall(), [(42, 'nameunique', '*****@*****.**', 'not')] )
def test_on_conflict_do_update_multivalues(self): users = self.tables.users with testing.db.connect() as conn: conn.execute(users.insert(), dict(id=1, name="name1")) conn.execute(users.insert(), dict(id=2, name="name2")) i = insert(users) i = i.on_conflict_do_update( index_elements=users.primary_key.columns, set_=dict(name="updated"), where=(i.excluded.name != "name12"), ).values( [ dict(id=1, name="name11"), dict(id=2, name="name12"), dict(id=3, name="name13"), dict(id=4, name="name14"), ] ) result = conn.execute(i) eq_(result.inserted_primary_key, [None]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().order_by(users.c.id)).fetchall(), [(1, "updated"), (2, "name2"), (3, "name13"), (4, "name14")], )
def test_on_conflict_do_update_exotic_targets_two(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try primary key constraint: cause an upsert on unique id column i = insert(users) i = i.on_conflict_do_update( index_elements=users.primary_key.columns, set_=dict( name=i.excluded.name, login_email=i.excluded.login_email ), ) result = conn.execute( i, dict( id=1, name="name2", login_email="*****@*****.**", lets_index_this="not", ), ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name2", "*****@*****.**", "not")], )
def test_on_conflict_do_update_exotic_targets_four_no_pk(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try unique constraint by name: cause an # upsert on target login_email, not id i = insert(users) i = i.on_conflict_do_update( index_elements=[users.c.login_email], set_=dict( id=i.excluded.id, name=i.excluded.name, login_email=i.excluded.login_email, ), ) result = conn.execute( i, dict(name="name3", login_email="*****@*****.**") ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, (1,)) eq_( conn.execute(users.select().order_by(users.c.id)).fetchall(), [ (1, "name3", "*****@*****.**", "not"), (2, "name2", "*****@*****.**", "not"), ], )
def test_on_conflict_do_update_exotic_targets_five(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try bogus index i = insert(users) i = i.on_conflict_do_update( index_elements=self.bogus_index.columns, index_where=self.bogus_index.dialect_options["postgresql"][ "where" ], set_=dict( name=i.excluded.name, login_email=i.excluded.login_email ), ) assert_raises( exc.ProgrammingError, conn.execute, i, dict( id=1, name="namebogus", login_email="*****@*****.**", lets_index_this="bogus", ), )
def test_on_conflict_do_nothing(self): users = self.tables.users with testing.db.connect() as conn: conn.execute( insert(users).on_conflict_do_nothing(), dict(id=1, name='name1') ) conn.execute( insert(users).on_conflict_do_nothing(), dict(id=1, name='name2') ) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, 'name1')] )
def test_on_conflict_do_update_exotic_targets_four(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try unique constraint by name: cause an # upsert on target login_email, not id i = insert(users) i = i.on_conflict_do_update( constraint=self.unique_constraint.name, set_=dict( id=i.excluded.id, name=i.excluded.name, login_email=i.excluded.login_email) ) # note: lets_index_this value totally ignored in SET clause. result = conn.execute(i, dict( id=43, name='nameunique2', login_email='*****@*****.**', lets_index_this='unique') ) eq_(result.inserted_primary_key, [43]) eq_(result.returned_defaults, None) eq_( conn.execute( users.select(). where(users.c.login_email == '*****@*****.**') ).fetchall(), [(43, 'nameunique2', '*****@*****.**', 'not')] )
def insert_jockey(self, item): stmt = insert(Jockey).values( **item ).on_conflict_do_update( constraint='jockeys_pkey', set_=dict(item) ) self.conn.execute(stmt)
def insert_horse(self, item): stmt = insert(Horse).values( **item ).on_conflict_do_update( constraint='horses_pkey', set_=dict(item) ) self.conn.execute(stmt)
def add_label(label: str, session_id: int): session = Session.query.get_or_404(session_id) db.session.execute(insert(Label).values(name=label).on_conflict_do_nothing()) db.session.commit() session.labels.append(Label.query.filter_by(name=label).one()) db.session.add(session) db.session.commit()
def test_bad_args(self): assert_raises( ValueError, insert(self.tables.users).on_conflict_do_nothing, constraint='id', index_elements=['id'] ) assert_raises( ValueError, insert(self.tables.users).on_conflict_do_update, constraint='id', index_elements=['id'] ) assert_raises( ValueError, insert(self.tables.users).on_conflict_do_update, constraint='id' ) assert_raises( ValueError, insert(self.tables.users).on_conflict_do_update )
async def patreon_login(session): code = flask.request.args.get('code') state_param = flask.request.args.get('state') state_sess = flask.session.pop('patreon_state', None) if code is None or state_param is None or state_sess is None: flask.flash('OAuth parameters missing', 'error') return flask.redirect(flask.url_for('patreon_index')) if state_param != state_sess: flask.flash('Nonce mismatch: %r not equal to %r' % (state_param, state_sess), 'error') return flask.redirect(flask.url_for('patreon_index')) access_token, refresh_token, expiry = await patreon.request_token('authorization_code', code=code, redirect_uri=config['patreon_redirect_uri'], ) user = await patreon.current_user(access_token) patreon_users = server.db.metadata.tables['patreon_users'] users = server.db.metadata.tables['users'] pledge_start = None for pledge in user['data'].get('relationships', {}).get('pledges', {}).get('data', []): for obj in user['included']: if obj['type'] == pledge['type'] and obj['id'] == pledge['id'] and obj['attributes']['amount_cents'] > 0: pledge_start = dateutil.parser.parse(obj['attributes']['created_at']) break else: continue break with server.db.engine.begin() as conn: query = insert(patreon_users).returning(patreon_users.c.id) query = query.on_conflict_do_update( index_elements=[patreon_users.c.patreon_id], set_={ 'full_name': query.excluded.full_name, 'access_token': query.excluded.access_token, 'refresh_token': query.excluded.refresh_token, 'token_expires': query.excluded.token_expires, 'pledge_start': query.excluded.pledge_start, } ) patreon_user_id, = conn.execute(query, patreon_id=user['data']['id'], full_name=user['data']['attributes']['full_name'], access_token=access_token, refresh_token=refresh_token, token_expires=expiry, pledge_start=pledge_start, ).first() conn.execute(users.update().where(users.c.id == session['user']['id']), patreon_user_id=patreon_user_id) flask.flash('Patreon account linked.', 'success') return flask.redirect(flask.url_for('patreon_index'))
def upsert_into(self, table): """ Upsert from a temporarty table into another table. """ return SessionContext.session.execute( insert(table).from_select( self.c, self, ).on_conflict_do_nothing(), ).rowcount
def upsert(conn, values): current_time = datetime.utcnow() update_values = values.copy() update_values["updated_date"] = current_time values["inserted_date"] = current_time statement = ( postgresql.insert(Offer).values(**values).on_conflict_do_update(index_elements=["id"], set_=update_values) ) conn.execute(statement)
def test_on_conflict_do_nothing_target(self): users = self.tables.users with testing.db.connect() as conn: conn.execute( insert(users) .on_conflict_do_nothing( index_elements=users.primary_key.columns), dict(id=1, name='name1') ) conn.execute( insert(users) .on_conflict_do_nothing( index_elements=users.primary_key.columns), dict(id=1, name='name2') ) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, 'name1')] )
def postgres_merge(merge_stmt, compiler, **kwargs): table = merge_stmt.table primary_key = table.primary_key stmt = postgresql.insert(table, merge_stmt.values) stmt = stmt.on_conflict_do_update( index_elements=['id'], set_={ k: getattr(stmt.excluded, k) for k in stmt.parameters[0] if k not in primary_key } ) return compiler.process(stmt, **kwargs)
def test_on_conflict_do_update_exotic_targets_six(self): users = self.tables.users_xtra with testing.db.connect() as conn: conn.execute( insert(users), dict( id=1, name="name1", login_email="*****@*****.**", lets_index_this="unique_name", ), ) i = insert(users) i = i.on_conflict_do_update( index_elements=self.unique_partial_index.columns, index_where=self.unique_partial_index.dialect_options[ "postgresql" ]["where"], set_=dict( name=i.excluded.name, login_email=i.excluded.login_email ), ) conn.execute( i, [ dict( name="name1", login_email="*****@*****.**", lets_index_this="unique_name", ) ], ) eq_( conn.execute(users.select()).fetchall(), [(1, "name1", "*****@*****.**", "unique_name")], )
def test_on_conflict_do_nothing(self): users = self.tables.users with testing.db.connect() as conn: result = conn.execute( insert(users).on_conflict_do_nothing(), dict(id=1, name='name1') ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) result = conn.execute( insert(users).on_conflict_do_nothing(), dict(id=1, name='name2') ) eq_(result.inserted_primary_key, [1]) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, 'name1')] )
def set(engine, metadata, key, value): state = metadata.tables['state'] with engine.begin() as conn: query = insert(state) query = query.on_conflict_do_update( index_elements=[state.c.key], set_={ 'value': query.excluded.value, } ) conn.execute(query, { 'key': key, 'value': value, })
def increment(con, table, dt, data, **values): stmt = insert(table).values( date=dt, hits=1, transfer=data['content_length'], **values ).on_conflict_do_update( index_elements=['date'], set_=dict( hits=table.c.hits + 1, transfer=table.c.transfer + data['content_length'] ) ) con.execute(stmt)
def test_on_conflict_do_update_exotic_targets_six(self): users = self.tables.users_xtra with testing.db.connect() as conn: conn.execute( insert(users), dict( id=1, name='name1', login_email='*****@*****.**', lets_index_this='unique_name' ) ) i = insert(users) i = i.on_conflict_do_update( index_elements=self.unique_partial_index.columns, index_where=self.unique_partial_index.dialect_options ['postgresql']['where'], set_=dict( name=i.excluded.name, login_email=i.excluded.login_email), ) conn.execute( i, [ dict(name='name1', login_email='*****@*****.**', lets_index_this='unique_name'), ] ) eq_( conn.execute(users.select()).fetchall(), [ (1, 'name1', '*****@*****.**', 'unique_name'), ] )
def process_clip(clip): # I wish there was a better way to get the clip "broadcast time"... # clip['created_at'] exists, but it's the time the clip was created not when # it was broadcast, so it's close when clipped live, but not useful when # clipped from a vod... if clip['vod']: voddata = get_video_info(clip['vod']['id']) if 'httperror' not in voddata: match = RE_STARTTIME.match(clip['vod']['url']) if not match: raise ValueError("Couldn't find start time in %r for %s" % (clip['vod']['url'], clip['slug'])) offset = datetime.timedelta(0) for piece in match.captures(1): val, unit = int(piece[:-1]), piece[-1] if unit == 's': offset += datetime.timedelta(seconds=val) elif unit == 'm': offset += datetime.timedelta(minutes=val) elif unit == 'h': offset += datetime.timedelta(hours=val) vod_start = dateutil.parser.parse(voddata['created_at']) clip_start = vod_start + offset else: clip_start = dateutil.parser.parse(clip['created_at']) else: clip_start = dateutil.parser.parse(clip['created_at']) data = { "slug": clip['slug'], "title": clip['title'], "vodid": clip['vod']['id'] if clip['vod'] else None, "time": clip_start, "data": clip, "deleted": False, "channel": clip['broadcaster']['name'], } with engine.begin() as conn: query = postgresql.insert(TBL_CLIPS) query = query.on_conflict_do_update( index_elements=[TBL_CLIPS.c.slug], set_={ 'title': query.excluded.title, 'vodid': query.excluded.vodid, 'time': query.excluded.time, 'data': query.excluded.data, 'deleted': query.excluded.deleted, 'channel': query.excluded.channel, } ) conn.execute(query, data)
def watch(self, guild_xid: int, user_xid: int, note: Optional[str] = None) -> None: values: dict[str, Any] = { "guild_xid": guild_xid, "user_xid": user_xid, } upsert = insert(Watch).values(**values) if note: max_note_len = Watch.note.property.columns[0].type.length # type: ignore values["note"] = note[:max_note_len] upsert = upsert.on_conflict_do_update( constraint="watches_pkey", index_where=and_( Watch.guild_xid == values["guild_xid"], Watch.user_xid == values["user_xid"], ), set_=dict(note=upsert.excluded.note), ) else: upsert = upsert.on_conflict_do_nothing() DatabaseSession.execute(upsert, values) DatabaseSession.commit()
def store_1_observation(item: DataItem) -> None: """Process and store a single observation. - find insert or update date - store json in Postgresql Args: item (dict): ObservationItem, Observation item containing all parameters. Returns: None """ # Insert simple observations, # each row contains uniq_id, update timestamp and full json body elem = item.elem uniq_id = elem["ID_perm_SINP"] logger.debug(f"Storing observation {uniq_id} to database", ) # Find last update timestamp if "Date_modification" in elem: update_date = elem["Date_modification"] else: update_date = elem["Date_creation"] # Store in Postgresql metadata = item.metadata source = item.source insert_stmt = insert(metadata).values( uuid=uniq_id, source=source, update_ts=update_date, item=elem, ) do_update_stmt = insert_stmt.on_conflict_do_update( constraint=metadata.primary_key, set_=dict(update_ts=update_date, item=elem), where=(metadata.c.update_ts < update_date), ) item.conn.execute(do_update_stmt) return None
def save_product_info(self, shop, product_id, product_json): try: title = product_json.get('title') tags = product_json.get('tags') image = product_json.get('image') vendor = product_json.get('vendor') # # Using a with statement ensures that the connection is always released # # back into the pool at the end of statement (even if an error occurs) with self.db.connect() as conn: from sqlalchemy.dialects.postgresql import insert insert_stmt = insert(self.SHOPIFY_PRODUCTS).values( shop=shop, product_id=product_id, product_json=product_json, title=title, tags=tags, image=image, vendor=vendor, timestamp=datetime.datetime.now(), ) do_update_stmt = insert_stmt.on_conflict_do_update( index_elements=['shop', 'product_id'], set_=dict( product_json=product_json, title=title, tags=tags, image=image, vendor=vendor, timestamp=datetime.datetime.now(), ) ) conn.execute(do_update_stmt) except Exception as e: # If something goes wrong, handle the error in this section. This might # involve retrying or adjusting parameters depending on the situation. # [START_EXCLUDE] self.logger.exception(e) return {'status': 'Failed'} return {'status': 'OK'}
def upsert(instance, reread=True): """ Inserts or updates row :param instance: SQLAlchemy model instance :param reread: bool whether to return upserted instance :return: None | SQLAlchemy instance """ model = instance.__class__ # constraints = instance.__table__.constraints # unique_constraints = [c for c in constraints if isinstance(c, UniqueConstraint)] columns = instance.__table__.columns unique = {c.name for c in columns if c.unique} defaults = {c.name: c.default.arg for c in columns if c.default} if not unique: # or unique_constraints: raise Exception('No unique constraints in model, no need in upsert') # Get data to update instance schema = [] for c in columns: k = '-%s' % c.name if c.primary_key else c.name schema.append(k) data = instance.to_dict(schema=schema, is_greedy=False) # Set default values if needed for k, v in defaults.items(): if data.get(k) is None: if callable(v): v = v(ctx=app.app_context) data[k] = v statement = insert(model).values(**data).on_conflict_do_update( set_=data, index_elements=unique, # constraint=unique_constraints # Must be only one , ) app.db.session.execute(statement) app.db.session.commit() if reread: return model.query.filter_by(**{c: data[c] for c in unique}).one()
def update_case_for_person( session: Session, user_context: UserContext, client: ETLClient, action: CaseUpdateActionType, comment: Optional[str] = None, ) -> CaseUpdate: """This method updates the case_updates table with the newly provided actions. Because the underlying table does not have foreign key constraints, independent validation must be provided before calling this method. """ action_ts = user_context.now() last_version = serialize_client_case_version(action, client).to_json() officer_id = user_context.officer_id person_external_id = user_context.person_id(client) insert_statement = (insert(CaseUpdate).values( person_external_id=person_external_id, officer_external_id=officer_id, state_code=client.state_code, action_type=action.value, action_ts=action_ts, last_version=last_version, comment=comment, ).on_conflict_do_update( constraint="unique_person_officer_action_triple", set_={ "last_version": last_version, "action_ts": action_ts, "comment": comment, }, )) session.execute(insert_statement) session.commit() return (session.query(CaseUpdate).filter( CaseUpdate.person_external_id == person_external_id, CaseUpdate.officer_external_id == officer_id, CaseUpdate.action_type == action.value, ).one())
def add_errors_from_query(mission_type, elements): for element in elements: if element.get('type') == 'node': lon = element.get('lon') lat = element.get('lat') if element.get('type') == 'way' or element.get('type') == 'relation': center = element.get('center') if center: lon = center.get('lon') lat = center.get('lat') else: continue geom = 'SRID=4326;POINT(' + str(lon) + ' ' + str(lat) + ')' lon *= 10000000 lat *= 10000000 osmId = element.get('id') stmt = insert(osm_error).values( error_type_id=overpass_queries.mission_type_ids.get(mission_type), object_id=osmId, object_type=element.get('type'), error_name=mission_type, lat=lat, lon=lon, geom=geom, txt1=element.get('tags').get('name')) stmt = stmt.on_conflict_do_update( constraint=UniqueConstraint(osm_error.error_type_id, osm_error.object_type, osm_error.object_id), set_=dict(error_type_id=overpass_queries.mission_type_ids.get( mission_type), object_id=osmId, object_type=element.get('type'), error_name=mission_type, lat=lat, lon=lon, geom=geom, txt1=element.get('tags').get('name'))) db_session.execute(stmt) db_session.commit()
def _set_product_extent(self, product: ProductSummary): source_product_ids = [ self.index.products.get_by_name(name).id for name in product.source_products ] derived_product_ids = [ self.index.products.get_by_name(name).id for name in product.derived_products ] fields = dict( name=product.name, dataset_count=product.dataset_count, time_earliest=product.time_earliest, time_latest=product.time_latest, source_product_refs=source_product_ids, derived_product_refs=derived_product_ids, # Deliberately do all age calculations with the DB clock rather than local. last_refresh=func.now(), ) # Dear future reader. This section used to use an 'UPSERT' statement (as in, # insert, on_conflict...) and while this works, it triggers the sequence # `product_id_seq` to increment as part of the check for insertion. This # is bad because there's only 32 k values in the sequence and we have run out # a couple of times! So, It appears that this update-else-insert must be done # in two transactions... row = self._engine.execute( select([PRODUCT.c.id ]).where(PRODUCT.c.name == product.name)).fetchone() if row: # Product already exists, so update it self._engine.execute( PRODUCT.update().where(PRODUCT.c.id == row[0]).values(fields)) else: # Product doesn't exist, so insert it row = self._engine.execute( postgres.insert(PRODUCT).values(**fields)).inserted_primary_key self._product.cache_clear() return row[0]
def flush(self, conn) -> None: """Flush cache to the database""" if not self.cache: return print(f"Flushing {len(self.cache)} new positions to database") assert engine.name in ["sqlite", "postgresql"], f"{engine.name} is unsupported" if engine.name == "postgresql": # pylint: disable=import-outside-toplevel from sqlalchemy.dialects.postgresql import insert # type: ignore statement = insert(self.table).on_conflict_do_nothing() elif engine.name == "sqlite": # Can replace with on_conflict_do_nothing() in SQLAlchemy 1.4b2 statement = self.table.insert().prefix_with("OR IGNORE") # Ignore conflicts, indicative of running on an old pitr against a # more recently updated table. Rows will just be stale until we catch # up. conn.execute(statement, *self.cache) self.cache.clear()
def insert_dataset_location(self, dataset_id, uri): """ Add a location to a dataset if it is not already recorded. Returns True if success, False if this location already existed :type dataset_id: str or uuid.UUID :type uri: str :rtype bool: """ scheme, body = _split_uri(uri) r = self._connection.execute( insert(DATASET_LOCATION).on_conflict_do_nothing( index_elements=['uri_scheme', 'uri_body', 'dataset_ref']), dataset_ref=dataset_id, uri_scheme=scheme, uri_body=body, ) return r.rowcount > 0
def save_nylas_token(self, uid, nylas_access_token, nylas_account_id, email): # Verify that the team is one of the allowed options if not uid and not nylas_access_token: return Response( response="Invalid refresh token or token specified.", status=400) # # [START cloud_sql_postgres_sqlalchemy_connection] # # Preparing a statement before hand can help protect against injections. # stmt = sqlalchemy.text('insert into nylas_access_token (uid, nylas_access_token) values (:uid, :nylas_access_token)') # stmt = stmt.bindparams(uid=uid, nylas_access_token=nylas_access_token) try: # # Using a with statement ensures that the connection is always released # # back into the pool at the end of statement (even if an error occurs) with self.db.connect() as conn: from sqlalchemy.dialects.postgresql import insert insert_stmt = insert(self.nylas_access_token).values( uid=uid, nylas_access_token=nylas_access_token, nylas_account_id=nylas_account_id, email=email) do_update_stmt = insert_stmt.on_conflict_do_update( index_elements=['uid'], set_=dict(nylas_access_token=nylas_access_token, nylas_account_id=nylas_account_id, email=email)) conn.execute(do_update_stmt) except Exception as e: # If something goes wrong, handle the error in this section. This might # involve retrying or adjusting parameters depending on the situation. # [START_EXCLUDE] self.logger.exception(e) return Response(status=500, response=f"Unable to update authentication: {e}") return Response(status=200, response="Successfully updated authentication table")
def mutate( args: Dict, info: graphql.execution.base.ResolveInfo, user: InputUser, ): """ Upserts a `ModelUser` record based on the `user` input. Returns MutationUserUpsert: The result of the mutation. """ # Cleanup the Auth0 user ID. auth0_user_id = clean_auth0_user_id(auth0_user_id=str( user.auth0_user_id), ) # Check that the requesting user is authorized to upsert the user with # the given Auth0 ID. check_auth(info=info, auth0_user_id=auth0_user_id) # Retrieve the session out of the context as the `get_query` method # automatically selects the model. session = info.context.get("session") # type: sqlalchemy.orm.Session # Upsert the `ModelUser` record. statement = insert(ModelUser, values={ "auth0_user_id": user.auth0_user_id, "email": user.email, }).on_conflict_do_nothing() # type: Insert # Execute the upsert. result = session.execute(statement) # type: ResultProxy # Retrieve the newly upserted `ModelUser` record object. obj = get_user(session=session, auth0_user_id=auth0_user_id) session.commit() return MutationUserUpsert(user=obj)
def get_key_stats(self, ticker, db_exchange="TSX"): """ This function get key statistics from Morning Star. """ url = ("http://financials.morningstar.com/ajax/exportKR2CSV.html?t={}&" "culture=en-CA®ion=CAN&order=asc&r={}").format( ticker, randint(1, 500000)) req = urllib.request.Request(url, headers=self.headers) resp = urllib.request.urlopen(req) csv_r = csv.reader(codecs.iterdecode(resp, 'utf-8')) on_morningstar = csv_r and resp.headers['content-length'] != '0' if on_morningstar: LOGGER.info("Getting key statistics for {}... ".format(ticker)) else: LOGGER.info("Skipping {}".format(ticker)) return 1 return_dics = self.parse_csv(csv_r, 10, self.special_key_titles, self.column_key_map, start_dic={ "ticker": ticker, "exchange": db_exchange, "update_date": self.today }) for d in return_dics: stmt = insert(MorningStarKeyStatistics).values( d).on_conflict_do_update(constraint='ms_key_statistics_pkey', set_=d) self.session.execute(stmt) self.session.commit() LOGGER.info("Done") return 0
def insert_eval_cluster_glosses(session, collection, cluster_glosses_file): stmt = postgresql.insert(fw_model.EvalCluster.__table__) stmt = stmt.on_conflict_do_update( constraint=fw_model.EvalCluster.__table__.primary_key, set_={ 'gloss': stmt.excluded.gloss, }, ) for line in cluster_glosses_file: eval_topic_rts_id, rts_id, gloss = line.split(maxsplit=2) gloss = gloss.strip() session.execute( stmt.values( eval_topic_rts_id=eval_topic_rts_id, eval_topic_collection=collection, rts_id=rts_id, gloss=gloss, )) session.commit()
async def save_track(*, track_dict: dict, conn=None) -> Optional[dict]: """Save a given track to the database. Any fields given in track_dict will be updated. :param dict track_dict: Keys as in the table columns :param conn: A connection if any open :return: None if it failed, the updated track if not """ assert track_dict assert isinstance(track_dict.get('length'), (int, float)) assert isinstance(track_dict.get('origin'), (str, Origin)) assert isinstance(track_dict.get('extid'), str) assert isinstance(track_dict.get('name'), str) query = psa.insert(Track) \ .values(track_dict) \ .returning(Track) \ .on_conflict_do_update( index_elements=[Track.c.extid, Track.c.origin], set_=track_dict ) return await execute_and_first(query=query, conn=conn)
def error_log( self, controler: str, item: dict, error: str, id_key_name: str = "id_synthese", last_ts: datetime = datetime.now(), ) -> None: metadata = self._metadata.tables[self._config.db_schema_import + "." + "error_log"] insert_stmt = insert(metadata).values( source=self._config.std_name, controler=controler, id_data=item[id_key_name], item=item, last_ts=last_ts, error=error, ) self._conn.execute(insert_stmt) return None
def override_game(self, name): """ Override current game. `name`: Name of the game or `None` to disable override """ if name is None: self.game_override = None else: games = self.metadata.tables["games"] with self.engine.begin() as conn: query = insert(games).returning(games.c.id) # need to update to get the `id` query = query.on_conflict_do_update( index_elements=[games.c.name], set_={ 'name': query.excluded.name, }) self.game_override, = conn.execute(query, { "name": name, }).first() self.get_game_id.reset_throttle()
def _put(self, product_name: Optional[str], year: Optional[int], month: Optional[int], day: Optional[int], summary: TimePeriodOverview): product = self._product(product_name) start_day, period = self._start_day(year, month, day) row = _summary_to_row(summary) ret = self._engine.execute( postgres.insert(TIME_OVERVIEW).returning( TIME_OVERVIEW.c.generation_time).on_conflict_do_update( index_elements=['product_ref', 'start_day', 'period_type'], set_=row, where=and_( TIME_OVERVIEW.c.product_ref == product.id_, TIME_OVERVIEW.c.start_day == start_day, TIME_OVERVIEW.c.period_type == period, ), ).values(product_ref=product.id_, start_day=start_day, period_type=period, **row)) [gen_time] = ret.fetchone() summary.summary_gen_time = gen_time
def test_on_conflict_do_update_exotic_targets_five(self): users = self.tables.users_xtra with testing.db.connect() as conn: self._exotic_targets_fixture(conn) # try bogus index i = insert(users) i = i.on_conflict_do_update( index_elements=self.bogus_index.columns, index_where=self. bogus_index.dialect_options['postgresql']['where'], set_=dict( name=i.excluded.name, login_email=i.excluded.login_email) ) assert_raises( exc.ProgrammingError, conn.execute, i, dict( id=1, name='namebogus', login_email='*****@*****.**', lets_index_this='bogus') )
def get_search_result(self, domain): print(f'Pulling result for {domain}...') # Start a db session self.db_session = db_new_session() # Pull from Bing res = requests.get(f'https://www.bing.com/search?q={domain}&setlang=en-us', verify=False) soup = BeautifulSoup(res.text, 'html.parser') result = soup.select('.b_algo') if len(result) > 0: try: for i, row in enumerate(result): id = f'{domain}-{i}' insert_data = { 'id': id, 'domain': domain, 'position': i, 'title': row.find('a').text, 'url': row.find('a')['href'], } self.db_session.execute( insert(table_result).values( insert_data).on_conflict_do_update( index_elements=['id'], set_={ **insert_data, **{ 'updated_at': datetime.utcnow() } })) self.db_session.commit() # Another task can be triggered here # for example to work on url return True except Exception as e: print(e) return False
def _exotic_targets_fixture(self, conn): users = self.tables.users_xtra conn.execute( insert(users), dict( id=1, name='name1', login_email='*****@*****.**', lets_index_this='not' ) ) conn.execute( users.insert(), dict( id=2, name='name2', login_email='*****@*****.**', lets_index_this='not' ) ) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, 'name1', '*****@*****.**', 'not')] )
def insertData(table, df, db_name): try: dwhConnection = conn.connect() # cloud sql doesn't convert python datetime object properly if 'datetime' in df.columns: df['datetime'] = df['datetime'].dt.tz_convert('UTC').dt.strftime( '%Y-%m-%d %H:%M:%S%z') chunks = split_dataframe(df) for chunk in chunks: pg_sql = insert(table.__table__, chunk.to_dict("records")).on_conflict_do_nothing() dwhConnection.execute(pg_sql) # print(pg_sql) # will give error 'The 'default' dialect with current database version settings does not support in-place multirow inserts.' bc print is not dialect aware print( f'==============\n{len(df)} rows written to {db_name} db {table.__tablename__} table in {len(chunks)} chunks\n==============' ) # logging.debug(f'{len(df)} rows written to {db_name} db {table.__tablename__} table in {len(chunks)} chunks') dwhConnection.close() return True except Exception as e: print(f'==============\nException at insertData: {e}\n==============') return False
def dao_create_or_update_daily_sorted_letter(new_daily_sorted_letter): ''' This uses the Postgres upsert to avoid race conditions when two threads try and insert at the same row. The excluded object refers to values that we tried to insert but were rejected. http://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#insert-on-conflict-upsert ''' table = DailySortedLetter.__table__ stmt = insert(table).values( billing_day=new_daily_sorted_letter.billing_day, file_name=new_daily_sorted_letter.file_name, unsorted_count=new_daily_sorted_letter.unsorted_count, sorted_count=new_daily_sorted_letter.sorted_count) stmt = stmt.on_conflict_do_update( index_elements=[table.c.billing_day, table.c.file_name], set_={ 'unsorted_count': stmt.excluded.unsorted_count, 'sorted_count': stmt.excluded.sorted_count, 'updated_at': datetime.utcnow() } ) db.session.connection().execute(stmt)
def save_campaign_entitlement( self, influencer_email, shop, commission=0, commission_percentage=0, lifo_tracker_id=None, commission_type='one_time_commission_campaign'): try: # # Using a with statement ensures that the connection is always released # # back into the pool at the end of statement (even if an error occurs) with self.db.connect() as conn: from sqlalchemy.dialects.postgresql import insert insert_stmt = insert(self.campaign_entitlement).values( influencer_email=str(influencer_email), shop=shop, lifo_tracker_id=str(lifo_tracker_id), commission=commission, commission_percentage=commission_percentage, commission_type=commission_type, timestamp=datetime.datetime.now(), ) do_update_stmt = insert_stmt.on_conflict_do_update( index_elements=['influencer_email', 'shop'], set_=dict( commission=commission, commission_percentage=commission_percentage, commission_type=commission_type, lifo_tracker_id=str(lifo_tracker_id), timestamp=datetime.datetime.now(), )) conn.execute(do_update_stmt) except Exception as e: # If something goes wrong, handle the error in this section. This might # involve retrying or adjusting parameters depending on the situation. # [START_EXCLUDE] self.logger.exception(e) return {'status': 'Failed'} return {'status': 'OK'}
def import_datafile(db, infile): """ Import the `datafile` Excel sheet to a CSV representation stored within the database that can be further processed without large filesystem operations. This also stores the original file's hash in order to skip importing unchanged data. Returns boolean (whether file was imported). """ hash = md5hash(infile) data_file = db.mapped_classes.data_file # Should maybe make sure error is not set rec = db.get(data_file, hash) # We are done if we've already imported if rec is not None: return False # Values to insert cols = dict(file_hash=hash, basename=infile.stem, import_date=None, import_error=None, csv_data=None) try: cols['csv_data'] = extract_datatable(infile) except NotImplementedError as e: cols['import_error'] = str(e) secho(str(e), fg='red', dim=True) tbl = data_file.__table__ sql = (insert(tbl).values(file_path=str(infile), **cols).on_conflict_do_update( index_elements=[tbl.c.file_path], set_=dict(**cols))) db.session.execute(sql) return True
def save(self, flask_app=None) -> Optional[List[str]]: """Saves the state to the database. Args: flask_app (flask.Flask): Uses the `flask_app.config` to determine whether Orchest needs to be restarted for the global config changes to take effect or if some settings can be updated at runtime. Returns: * `None` if no `flask_app` is given. * List of changed config options that require an Orchest restart to take effect. * Empty list otherwise. """ settings_as_dict = self.as_dict() # Upsert entries. stmt = insert(models.Setting).values([ dict(name=k, value={"value": v}) for k, v in settings_as_dict.items() ]) stmt = stmt.on_conflict_do_update(index_elements=[models.Setting.name], set_=dict(value=stmt.excluded.value)) db.session.execute(stmt) # Delete settings that are not part of the new configuration. models.Setting.query.filter( models.Setting.name.not_in(list( settings_as_dict.keys()))).delete() db.session.commit() if flask_app is None: return return self._apply_runtime_changes(flask_app, settings_as_dict)
def test_on_conflict_do_update_clauseelem_as_key_set(self): users = self.tables.users class MyElem(object): def __init__(self, expr): self.expr = expr def __clause_element__(self): return self.expr with testing.db.connect() as conn: conn.execute( users.insert(), { "id": 1, "name": "name1" }, ) i = insert(users) i = i.on_conflict_do_update( index_elements=[users.c.id], set_={ MyElem(users.c.name): i.excluded.name }, ).values({ MyElem(users.c.id): 1, MyElem(users.c.name): "name1" }) result = conn.execute(i) eq_(result.inserted_primary_key, (1, )) eq_(result.returned_defaults, None) eq_( conn.execute(users.select().where(users.c.id == 1)).fetchall(), [(1, "name1")], )
def update_fact_billing(data, process_day): non_letter_rates, letter_rates = get_rates_for_billing() rate = get_rate(non_letter_rates, letter_rates, data.notification_type, process_day, data.crown, data.letter_page_count, data.postage) billing_record = create_billing_record(data, rate, process_day) table = FactBilling.__table__ ''' This uses the Postgres upsert to avoid race conditions when two threads try to insert at the same row. The excluded object refers to values that we tried to insert but were rejected. http://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#insert-on-conflict-upsert ''' stmt = insert(table).values( bst_date=billing_record.bst_date, template_id=billing_record.template_id, service_id=billing_record.service_id, provider=billing_record.provider, rate_multiplier=billing_record.rate_multiplier, notification_type=billing_record.notification_type, international=billing_record.international, billable_units=billing_record.billable_units, notifications_sent=billing_record.notifications_sent, rate=billing_record.rate, postage=billing_record.postage, ) stmt = stmt.on_conflict_do_update(constraint="ft_billing_pkey", set_={ "notifications_sent": stmt.excluded.notifications_sent, "billable_units": stmt.excluded.billable_units, "updated_at": datetime.utcnow() }) db.session.connection().execute(stmt) db.session.commit()
def upsert(self, schema, table_name, records={}): """[summary] Args: schema ([string]): [schema name] table_name ([string]): [table name] records (dict, optional): [records to be update]. Defaults to {}. Returns: [execute]: [return the execution of the upsert] """ metadata = MetaData(schema=schema) metadata.bind = self.engine table = Table(table_name, metadata, schema=schema, autoload=True) # get list of fields making up primary key primary_keys = [key.name for key in inspect(table).primary_key] # assemble base statement stmt = postgresql.insert(table).values(records) # define dict of non-primary keys for updating update_dict = {c.name: c for c in stmt.excluded if not c.primary_key} # assemble new statement with 'on conflict do update' clause update_stmt = stmt.on_conflict_do_update( index_elements=primary_keys, set_=update_dict, ) # execute with self.engine.connect() as conn: try: result = conn.execute(update_stmt) return result except exc.IntegrityError: return gerrors.fk_error()
def merge_sender_infos(sender_info_dicts): for sender_info_dict in sender_info_dicts: statement = insert(SenderInfo) \ .values(**sender_info_dict) \ .on_conflict_do_update( index_elements=['address', 'address_origin'], set_=sender_info_dict) db.session.execute(statement) db.session.commit() # update sender_infos FK countries countries = { country.iso2: country for country in db.session.query(Country) } parser = flydenity_parser.ARParser() for sender_info in db.session.query(SenderInfo).filter( SenderInfo.country_id == db.null()): datasets = parser.parse(sender_info.registration, strict=True) if datasets is None: continue for dataset in datasets: if 'iso2' in dataset: sender_info.country = countries[dataset['iso2']] db.session.commit() # Update sender_infos FK -> senders upd = db.update(SenderInfo) \ .where(SenderInfo.address == Sender.address) \ .values(sender_id=Sender.id) result = db.session.execute(upd) db.session.commit() return len(sender_info_dicts)
def update_hodlers(self, hodlers_by_address: Dict[str, Any], token: Token) -> None: """ Update existing hodlers amount + Create if they do not exist yet """ with SessionManager.session() as session: rows = (session.query(HodlerModel).filter_by( token_name=token.name).filter( HodlerModel.address.in_(list( hodlers_by_address.keys()))).all()) existing_hodlers = {row.address: row for row in rows} for hodler_addr, hodler in hodlers_by_address.items(): if hodler_addr in existing_hodlers: new_amount = int(hodler['amount']) + int( existing_hodlers[hodler_addr].amount) hodler['amount'] = str(new_amount) hodler['number_transactions'] += existing_hodlers[ hodler_addr].number_transactions hodler['amount'] = str(hodler['amount']).zfill(32) hodler['updated_at'] = func.current_timestamp() hodler_table = HodlerModel.__table__.c with SessionManager.use_connection() as c: stmt = insert(HodlerModel).values(list( hodlers_by_address.values())) stmt = stmt.on_conflict_do_update( constraint="hodler_address_token_unique", set_={ hodler_table.amount.name: stmt.excluded.amount, hodler_table.number_transactions.name: stmt.excluded.number_transactions, hodler_table.last_transaction.name: stmt.excluded.last_transaction, hodler_table.updated_at.name: stmt.excluded.updated_at, }, ) c.execute(stmt)