def get_free_title(connection: Connection, title: str, auth_user_id: str) -> str: """ Get a good version of the title to be inserted into the survey table. If the title as given already exists, this function will append a number. For example, when the title is "survey": 1. "survey" not in table -> "survey" 2. "survey" in table -> "survey(1)" 3. "survey(1)" in table -> "survey(2)" :param connection: a SQLAlchemy Connection :param title: the survey title :param auth_user_id: the user's UUID :return: a title that can be inserted safely """ (does_exist, ), = connection.execute( select((exists().where( survey_table.c.survey_title == title ).where(survey_table.c.auth_user_id == auth_user_id),))) if not does_exist: return title similar_surveys = connection.execute( select([survey_table]).where( survey_table.c.survey_title.like(title + '%') ).where( survey_table.c.auth_user_id == auth_user_id ) ).fetchall() conflicts = list(_conflicting(title, similar_surveys)) free_number = max(conflicts) + 1 if len(conflicts) else 1 return title + '({})'.format(free_number)
def delete(connection: Connection, survey_id: str): """ Delete the survey specified by the given survey_id :param connection: a SQLAlchemy connection :param survey_id: the UUID of the survey """ with connection.begin(): connection.execute(delete_record(survey_table, 'survey_id', survey_id)) return json_response('Survey deleted')
def _create_choices(connection: Connection, values: dict, question_id: str, submission_map: dict, existing_question_id: str=None) -> Iterator: """ Create the choices of a survey question. If this is an update to an existing survey, it will also copy over answers to the questions. :param connection: the SQLAlchemy Connection object for the transaction :param values: the dictionary of values associated with the question :param question_id: the UUID of the question :param submission_map: a dictionary mapping old submission_id to new :param existing_question_id: the UUID of the existing question (if this is an update) :return: an iterable of the resultant choice fields """ choices = values['choices'] new_choices, updates = _determine_choices(connection, existing_question_id, choices) for number, choice in enumerate(new_choices): choice_dict = { 'question_id': question_id, 'survey_id': values['survey_id'], 'choice': choice, 'choice_number': number, 'type_constraint_name': values['type_constraint_name'], 'question_sequence_number': values['sequence_number'], 'allow_multiple': values['allow_multiple']} executable = question_choice_insert(**choice_dict) exc = [('unique_choice_names', RepeatedChoiceError(choice))] result = execute_with_exceptions(connection, executable, exc) result_ipk = result.inserted_primary_key question_choice_id = result_ipk[0] if choice in updates: question_fields = {'question_id': question_id, 'type_constraint_name': result_ipk[2], 'sequence_number': result_ipk[3], 'allow_multiple': result_ipk[4], 'survey_id': values['survey_id']} for answer in get_answer_choices_for_choice_id(connection, updates[choice]): answer_values = question_fields.copy() new_submission_id = submission_map[answer.submission_id] answer_values['question_choice_id'] = question_choice_id answer_values['submission_id'] = new_submission_id answer_metadata = answer.answer_choice_metadata answer_values['answer_choice_metadata'] = answer_metadata connection.execute(answer_choice_insert(**answer_values)) yield question_choice_id
def lock_table(connection: Connection, target_table: Table): """ Lock a table using a PostgreSQL advisory lock The OID of the table in the pg_class relation is used as lock id. :param connection: DB connection :param target_table: Table object """ logger.debug('Locking table "%s"', target_table.name) oid = connection.execute(select([column("oid")]) .select_from(table("pg_class")) .where((column("relname") == target_table.name)) ).scalar() connection.execute(select([func.pg_advisory_xact_lock(oid)])).scalar()
def insert_profile(conn: Connection, insert: str, p: Profile): u, _ = unify_profile_name(p.first_name, p.last_name) b64u = generate_id(u) conn.execute( insert, ( sanitize_text(p.identifier), b64u, sanitize_text(p.first_name), sanitize_text(p.last_name), sanitize_text(p.display_name), sanitize_text(p.link), ), )
def init_db(connection: Connection, force: bool=False, test: bool=False) -> None: import c2cgeoportal_commons.models.main # noqa: F401 import c2cgeoportal_commons.models.static # noqa: F401 from c2cgeoportal_commons.models import schema schema_static = '{}_static'.format(schema) assert schema is not None if force: if schema_exists(connection, schema): connection.execute('DROP SCHEMA {} CASCADE;'.format(schema)) if schema_exists(connection, schema_static): connection.execute('DROP SCHEMA {} CASCADE;'.format(schema_static)) if not schema_exists(connection, schema): connection.execute('CREATE SCHEMA "{}";'.format(schema)) if not schema_exists(connection, schema_static): connection.execute('CREATE SCHEMA "{}";'.format(schema_static)) Base.metadata.create_all(connection) session_factory = get_session_factory(connection) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) if test: setup_test_data(dbsession)
def _return_sql(connection: Connection, result: object, survey_id: str, auth_user_id: str, question_id: str) -> object: """ Get the result for a _scalar-y function. :param connection: a SQLAlchemy Connection :param result: the result of the SQL function :param survey_id: the UUID of the survey :param auth_user_id: the UUID of the user :param question_id: the UUID of the question :return: the result of the SQL function :raise NoSubmissionsToQuestionError: if there are no submissions :raise QuestionDoesNotExistError: if the user is not authorized """ if result is None or result == []: condition = survey_table.c.survey_id == survey_id stmt = select([survey_table]).where(condition) proper_id = connection.execute(stmt).first().auth_user_id if auth_user_id == proper_id: raise NoSubmissionsToQuestionError(question_id) raise QuestionDoesNotExistError(question_id) return result
def _jsonify(connection: Connection, answer: object, question_id: str) -> object: """ This function returns a "nice" representation of an answer which can be serialized as JSON. :param connection: a SQLAlchemy Connection :param answer: a submitted value :param type_constraint_name: the UUID of the question :return: the nice representation """ type_constraint_name = question_select(connection, question_id).type_constraint_name if type_constraint_name in {'location', 'facility'}: geo_json = connection.execute(func.ST_AsGeoJSON(answer)).scalar() return json_decode(geo_json)['coordinates'] elif type_constraint_name in {'date', 'time'}: return maybe_isoformat(answer) elif type_constraint_name == 'decimal': return float(answer) elif type_constraint_name == 'multiple_choice': question_choice = question_choice_select(connection, answer) return question_choice.choice else: return answer
def get_sessions_of_mac(connection: Connection, mac: netaddr.EUI, when: Optional[DatetimeRange]=None, limit: Optional[int]=None) -> Iterable[ Tuple[netaddr.IPAddress, str, datetime, datetime]]: """ Return accounting sessions of a particular MAC address ordered by Session-Start-Time descending. :param connection: A SQLAlchemy connection :param str mac: MAC address :param when: Range in which Session-Start-Time must be within :param limit: Maximum number of records :return: An iterable that yields (NAS-IP-Address, NAS-Port-Id, Session-Start-Time, Session-Stop-Time)-tuples ordered by Session-Start-Time descending """ logger.debug('Getting all sessions for MAC "%s"', mac) query = ( select([radacct.c.NASIPAddress, radacct.c.NASPortId, radacct.c.AcctStartTime, radacct.c.AcctStopTime]) .where(and_(radacct.c.UserName == mac)) .order_by(radacct.c.AcctStartTime.desc()) ) if when is not None: query.where(radacct.c.AcctStartTime.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def get_auth_attempts_of_mac(connection: Connection, mac: netaddr.EUI, when: Optional[DatetimeRange]=None, limit: Optional[int]=None) -> Iterable[ Tuple[netaddr.IPAddress, str, str, Groups, Attributes, datetime]]: """ Return auth attempts of a particular MAC address order by Auth-Date descending. :param connection: A SQLAlchemy connection :param mac: MAC address :param when: Range in which Auth-Date must be within :param limit: Maximum number of records :return: An iterable that yields (NAS-IP-Address, NAS-Port-Id, Packet-Type, Groups, Reply, Auth-Date)-tuples ordered by Auth-Date descending """ logger.debug('Getting all auth attempts of MAC %s', mac) query = ( select([radpostauth.c.NASIPAddress, radpostauth.c.NASPortId, radpostauth.c.PacketType, radpostauth.c.Groups, radpostauth.c.Reply, radpostauth.c.AuthDate]) .where(and_(radpostauth.c.UserName == mac)) .order_by(radpostauth.c.AuthDate.desc()) ) if when is not None: query.where(radpostauth.c.AuthDate.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def get_auth_attempts_at_port(connection: Connection, nas_ip_address: netaddr.IPAddress, nas_port_id: str, when: Optional[DatetimeRange]=None, limit: Optional[int]=None)-> Iterable[ Tuple[str, str, Groups, Attributes, datetime]]: """ Return auth attempts at a particular port of an NAS ordered by Auth-Date descending. :param connection: A SQLAlchemy connection :param nas_ip_address: NAS IP address :param nas_port_id: NAS Port ID :param when: Range in which Auth-Date must be within :param limit: Maximum number of records :return: An iterable that yields (User-Name, Packet-Type, Groups, Reply, Auth-Date)-tuples ordered by Auth-Date descending """ logger.debug('Getting all auth attempts at port %2$s of %1$s', nas_ip_address, nas_port_id) query = ( select([radpostauth.c.UserName, radpostauth.c.PacketType, radpostauth.c.Groups, radpostauth.c.Reply, radpostauth.c.AuthDate]) .where(and_(radpostauth.c.NASIPAddress == nas_ip_address, radpostauth.c.NASPortId == nas_port_id)) .order_by(radpostauth.c.AuthDate.desc()) ) if when is not None: query.where(radpostauth.c.AuthDate.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def survey_select(connection: Connection, survey_id: str, auth_user_id: str=None, email: str=None) -> RowProxy: """ Get a record from the survey table. You must supply either the auth_user_id or the email. :param connection: a SQLAlchemy Connection :param survey_id: the UUID of the survey :param auth_user_id: the UUID of the user :param email: the user's e-mail address :return: the corresponding record :raise SurveyDoesNotExistError: if the UUID is not in the table """ table = survey_table conds = [survey_table.c.survey_id == survey_id] if auth_user_id is not None: if email is not None: raise TypeError('You cannot specify both auth_user_id and email') conds.append(survey_table.c.auth_user_id == auth_user_id) elif email is not None: table = table.join(auth_user_table) conds.append(auth_user_table.c.email == email) else: raise TypeError('You must specify either auth_user_id or email') survey = connection.execute(select([survey_table]).select_from( table).where(and_(*conds))).first() if survey is None: raise SurveyDoesNotExistError(survey_id) return survey
def get_stats(connection: Connection, survey_id: str, email: str) -> dict: """ Get statistics about the specified survey: creation time, number of submissions, time of the earliest submission, and time of the latest submission. :param connection: a SQLAlchemy Connection :param survey_id: the UUID of the survey :param email: the e-mail address of the user :return: a JSON representation of the statistics. """ result = connection.execute( select([ survey_table.c.created_on, count(submission_table.c.submission_id), sqlmin(submission_table.c.submission_time), sqlmax(submission_table.c.submission_time) ]).select_from( auth_user_table.join(survey_table).outerjoin(submission_table) ).where( survey_table.c.survey_id == survey_id ).where( auth_user_table.c.email == email ).group_by( survey_table.c.survey_id ) ).first() return json_response({ 'created_on': maybe_isoformat(result[0]), 'num_submissions': result[1], 'earliest_submission_time': maybe_isoformat(result[2]), 'latest_submission_time': maybe_isoformat(result[3]) })
def get_questions(connection: Connection, survey_id: str, auth_user_id: [str, None]=None, email: [str, None]=None) -> ResultProxy: """ Get all the questions for a survey identified by survey_id ordered by sequence number restricted by auth_user. :param connection: a SQLAlchemy Connection :param survey_id: the UUID of the survey :param auth_user_id: the UUID of the user :param email: the user's e-mail address :return: an iterable of the questions (RowProxy) """ table = question_table.join(survey_table) conds = [question_table.c.survey_id == survey_id] if auth_user_id is not None: if email is not None: raise TypeError('You cannot specify both auth_user_id and email') conds.append(survey_table.c.auth_user_id == auth_user_id) elif email is not None: table = table.join(auth_user_table) conds.append(auth_user_table.c.email == email) else: raise TypeError('You must specify either auth_user_id or email') questions = connection.execute( select([question_table]).select_from(table).where( and_(*conds)).order_by('sequence_number asc')) return questions
def _copy_submission_entries(connection: Connection, existing_survey_id: str, new_survey_id: str, email: str) -> tuple: """ Copy submissions from an existing survey to its updated copy. :param connection: the SQLAlchemy connection used for the transaction :param existing_survey_id: the UUID of the existing survey :param new_survey_id: the UUID of the survey's updated copy :param email: the user's e-mail address :return: a tuple containing the old and new submission IDs """ submissions = get_submissions_by_email( connection, email, survey_id=existing_survey_id ) for sub in submissions: values = {'submitter': sub.submitter, 'submitter_email': sub.submitter_email, 'submission_time': sub.submission_time, 'save_time': sub.save_time, 'survey_id': new_survey_id} result = connection.execute(submission_insert(**values)) yield sub.submission_id, result.inserted_primary_key[0]
def execute_with_exceptions(connection: Connection, executable: [Insert, Update], exceptions: Iterator) -> ResultProxy: """ Execute the given executable (a SQLAlchemy Insert or Update) within a transaction (provided by the Connection object), and raise meaningful exceptions. Normally connection.execute() will raise a generic Integrity error, so use the exceptions parameter to specify which exceptions to raise instead. :param connection: the SQLAlchemy connection (for transaction purposes) :param executable: the object to pass to connection.execute() :param exceptions: an iterable of (name: str, exception: Exception) tuples. name is the string to look for in the IntegrityError, and exception is the Exception to raise instead of IntegrityError :return: a SQLAlchemy ResultProxy """ try: return connection.execute(executable) except IntegrityError as exc: error = str(exc.orig) for name, exception in exceptions: if name in error: raise exception raise
def create_user(connection: Connection, data: dict) -> dict: """ Registers a new user account. :param connection: a SQLAlchemy Connection :param data: the user's e-mail :return: a response containing the e-mail and whether it was created or already exists in the database """ email = data['email'] try: get_auth_user_by_email(connection, email) except UserDoesNotExistError: with connection.begin(): connection.execute(create_auth_user(email=email)) return json_response({'email': email, 'response': 'Created'}) return json_response({'email': email, 'response': 'Already exists'})
def schema_exists(connection: Connection, schema_name: str) -> bool: sql = """ SELECT count(*) AS count FROM information_schema.schemata WHERE schema_name = '{}'; """.format(schema_name) result = connection.execute(sql) row = result.first() return row[0] == 1
def bar_graph(connection: Connection, question_id: str, auth_user_id: str=None, email: str=None, limit: [int, None]=None, count_order: bool=False) -> dict: """ Get a list of the number of times each submission value appears. You must provide either an auth_user_id or e-mail address. :param connection: a SQLAlchemy Connection :param question_id: the UUID of the question :param auth_user_id: the UUID of the user :param email: the e-mail address of the user. :param limit: a limit on the number of results :param count_order: whether to order from largest count to smallest :return: a JSON dict containing the result [[values], [counts]] """ user_id = _get_user_id(connection, auth_user_id, email) allowable_types = {'text', 'integer', 'decimal', 'multiple_choice', 'date', 'time', 'location', 'facility'} question = question_select(connection, question_id) tcn = _get_type_constraint_name(allowable_types, question) # Assume that you only want to consider the non-other answers original_table, column_name = _table_and_column(tcn) table = original_table.join( question_table, original_table.c.question_id == question_table.c.question_id ).join(survey_table) conds = [question_table.c.question_id == question_id, survey_table.c.auth_user_id == user_id] column = get_column(original_table, column_name) column_query = select( [column, sqlcount(column)] ).select_from(table).group_by(column) ordering = desc(sqlcount(column)) if count_order else column ordered_query = column_query.order_by(ordering) result = connection.execute( ordered_query.where(and_(*conds)).limit(limit) ) result = _return_sql(connection, result, question.survey_id, user_id, question_id) bar_graph_result = [[_jsonify(connection, r[0], question_id), r[1]] for r in result] response = json_response( _return_sql(connection, bar_graph_result, question.survey_id, user_id, question_id)) response['query'] = 'bar_graph' return response
def get_number_of_submissions(connection: Connection, survey_id: str) -> int: """ Return the number of submissions for a given survey :param connection: a SQLAlchemy Connection :param survey_id: the UUID of the survey :return: the corresponding number of submissions """ return connection.execute(select([count()]).where( submission_table.c.survey_id == survey_id)).scalar()
def time_series(connection: Connection, question_id: str, auth_user_id: str=None, email: str=None) -> dict: """ Get a list of submissions to the specified question over time. You must provide either an auth_user_id or e-mail address. :param connection: a SQLAlchemy Connection :param question_id: the UUID of the question :param auth_user_id: the UUID of the user :param email: the e-mail address of the user. :return: a JSON dict containing the result [[times], [values]] """ user_id = _get_user_id(connection, auth_user_id, email) allowable_types = {'text', 'integer', 'decimal', 'multiple_choice', 'date', 'time', 'location'} question = question_select(connection, question_id) tcn = _get_type_constraint_name(allowable_types, question) # Assume that you only want to consider the non-other answers original_table, column_name = _table_and_column(tcn) table = original_table.join( survey_table, original_table.c.survey_id == survey_table.c.survey_id ).join( submission_table, original_table.c.submission_id == submission_table.c.submission_id ) column = get_column(original_table, column_name) where_stmt = select( [column, submission_table.c.submission_time] ).select_from(table).where( original_table.c.question_id == question_id ).where( survey_table.c.auth_user_id == user_id ) result = _return_sql( connection, connection.execute(where_stmt.order_by('submission_time asc')), question.survey_id, auth_user_id, question_id) tsr = [[r.submission_time.isoformat(), _jsonify(connection, r[column_name], question_id)] for r in result] time_series_result = tsr response = json_response( _return_sql(connection, time_series_result, question.survey_id, user_id, question_id)) response['query'] = 'time_series' return response
def get_all_dhcp_hosts(connection: Connection) -> Iterable[ Tuple[netaddr.EUI, netaddr.IPAddress]]: """ Return all DHCP host configurations. :param connection: A SQLAlchemy connection :return: An iterable that yields (mac, ip)-tuples """ logger.debug("Getting all DHCP hosts") result = connection.execute(select([dhcphost.c.MAC, dhcphost.c.IPAddress])) return iter(result)
def check_bra_record_exist(con: Connection, massif: str, bra_date: datetime) -> bool: s = (select([BraRecordTable]).select_from( BraRecordTable.join( MassifTable, MassifTable.c.m_id == BraRecordTable.c.br_massif)).where( and_( MassifTable.c.m_name == massif, BraRecordTable.c.br_production_date == bra_date, ))) return con.execute(select([exists(s)])).first()[0]
def get_answers_for_question(connection: Connection, question_id: str) -> ResultProxy: """ Get all the records from the answer table identified by question_id. :param connection: a SQLAlchemy Connection :param question_id: foreign key :return: an iterable of the answers (RowProxy) """ select_stmt = select([answer_table]) where_stmt = select_stmt.where(answer_table.c.question_id == question_id) return connection.execute(where_stmt)
def get_view_names(self, connection: Connection, schema: Optional[str] = None, **kwargs: Any) -> List[str]: # custom builtin query query = "SHOW VALID_VIEWS" result = connection.execute(query) # return a list of table names exclude hidden and empty indexes return [ table.VIEW_NAME for table in result if table.VIEW_NAME[0] != "." ]
def get_branches(connection: Connection, question_id: str) -> ResultProxy: """ Get all the branches for a question identified by question_id. :param connection: a SQLAlchemy Connection :param question_id: foreign key :return: an iterable of the branches (RowProxy) """ select_stmt = select([question_branch_table]) where_stmt = select_stmt.where(question_branch_table.c.from_question_id == question_id) return connection.execute(where_stmt)
def get_all_alternative_dns_ips(connection: Connection) -> Iterable[ netaddr.IPAddress]: """ Return all IPs for alternative DNS configuration. :param connection: A SQLAlchemy connection :return: An iterable that yields ip addresses """ logger.debug("Getting all alternative DNS clients") result = connection.execute(select([alternative_dns.c.IPAddress])) return map(operator.itemgetter(0), result)
def _delete_sourcesystem_cd(conn: Connection, table: Table, sourcesystem_cd: str) -> int: """ Remove all table records with the supplied upload_id :param conn: sql connection :param table: table to modify :param sourcesystem_cd: target sourcesystem code :return: number of records removed """ return conn.execute(delete(table).where(table.c.sourcesystem_cd == sourcesystem_cd)).rowcount \ if sourcesystem_cd else 0
def get_all_alternative_dns_ips( connection: Connection) -> Iterable[netaddr.IPAddress]: """ Return all IPs for alternative DNS configuration. :param connection: A SQLAlchemy connection :return: An iterable that yields ip addresses """ logger.debug("Getting all alternative DNS clients") result = connection.execute(select([alternative_dns.c.IPAddress])) return map(operator.itemgetter(0), result)
def _delete_upload_id(conn: Connection, table: Table, upload_id: int) -> int: """ Remove all table records with the supplied upload_id :param conn: sql connection :param table: table to modify :param upload_id: target upload_id :return: number of records removed """ return conn.execute( delete(table).where( table.c.upload_id == upload_id)).rowcount if upload_id else 0
def insert_rayshift_quest_db( conn: Connection, quest_id: int, phase: int, quest_details: dict[int, QuestDetail] ) -> None: insert_stmt = insert(rayshiftQuest) do_update_stmt = insert_stmt.on_conflict_do_update( index_elements=[rayshiftQuest.c.queryId], set_={rayshiftQuest.c.questDetail: insert_stmt.excluded.questDetail}, ) data = [] for query_id, quest_detail in quest_details.items(): quest_detail_dict = quest_detail.dict() quest_detail_dict["addedTime"] = quest_detail.addedTime.isoformat() data.append( { "queryId": query_id, "questId": quest_id, "phase": phase, "questDetail": quest_detail_dict, } ) conn.execute(do_update_stmt, data)
def get_free_sequence_number(connection: Connection, survey_id: str) -> int: """ Return the highest existing sequence number + 1 (or 1 if there aren't any) associated with the given survey_id. :param connection: a SQLAlchemy Connection :param survey_id: the UUID of the survey :return: the free sequence number """ sequence_number = question_table.c.sequence_number return connection.execute(select( [coalesce(sqlmax(sequence_number, type_=Integer), 0)])).scalar() + 1
def get_all_channels_ordered(db: Connection, user_id: int) -> List[Channel]: """ Get all channels sorted by specified user's order. """ query = select(CHANNELS.c)\ .select_from(CHANNELS.outerjoin(CHANNELS_ORDER, CHANNELS_ORDER.c.channel_id == CHANNELS.c.id))\ .where(or_(CHANNELS_ORDER.c.user_id == user_id, CHANNELS_ORDER.c.user_id.is_(None)))\ .order_by(func.isnull(CHANNELS_ORDER.c.order))\ .order_by(CHANNELS_ORDER.c.order) result = db.execute(query) return [map_object(Channel, row) for row in result]
def get_quest_from_ai(conn: Connection, ai_id: int) -> list[StageLink]: ai_script_pattern = {"aiFieldIds": [{"id": ai_id}]} stmt = select(mstStage.c.questId, mstStage.c.questPhase, mstStage.c.wave).where( mstStage.c.script.contains(ai_script_pattern)) stages = conn.execute(stmt).fetchall() return [ StageLink(questId=stage.questId, phase=stage.questPhase, stage=stage.wave) for stage in stages ]
def get_closest_stop_id( con: Connection, stop_type: str, lat: float, lon: float, limit: int = 1 ) -> List[int]: query = f""" SELECT stop_id FROM {stop_type}_stops ORDER BY abs(stop_lat - {lat}) ^ 2 + abs(stop_lon - {lon}) ^ 2 LIMIT {limit} """ return [row["stop_id"] for row in con.execute(query)]
def test_saving_and_reading(repo: SagaDataRepo, connection: Connection, data: PayingForWonItemSagaData, json_repr: dict) -> None: saga_uuid = uuid4() connection.execute( saga_data_table.insert(values={ "uuid": saga_uuid, "json": json.dumps(json_repr) })) assert repo.get(saga_uuid, type(data)) == data connection.execute( saga_data_table.delete().where(saga_data_table.c.uuid == saga_uuid)) repo.save(saga_uuid, data) row = connection.execute( saga_data_table.select(saga_data_table.c.uuid == saga_uuid)).first() assert json.loads(row.json) == json_repr
def create_temp_copy(connection: Connection, source: Table, destination: Table): """ Create a temporary table as a copy of a source table that will be dropped at the end of the running transaction. :param connection: DB connection :param source: Source table :param destination: Destination table """ logger.debug('Creating temporary table "%s" as copy of "%s"', destination.name, source.name) if not connection.in_transaction(): raise RuntimeError("must be executed in a transaction to have any " "effect") preparer = connection.dialect.identifier_preparer connection.execute( 'CREATE TEMPORARY TABLE {destination} ON COMMIT DROP AS ' 'SELECT * FROM {source}'.format( source=preparer.format_table(source), destination=preparer.format_table(destination), ))
def get_answers(connection: Connection, submission_id: str) -> ResultProxy: """ Get all the records from the answer table identified by submission_id ordered by sequence number. :param connection: a SQLAlchemy Connection :param submission_id: foreign key :return: an iterable of the answers (RowProxy) """ select_stmt = select([answer_table]) where_stmt = select_stmt.where(answer_table.c.submission_id == submission_id) return connection.execute(where_stmt.order_by("sequence_number asc"))
def create_temp_copy(connection: Connection, source: Table, destination: Table): """ Create a temporary table as a copy of a source table that will be dropped at the end of the running transaction. :param connection: DB connection :param source: Source table :param destination: Destination table """ logger.debug('Creating temporary table "%s" as copy of "%s"', destination.name, source.name) if not connection.in_transaction(): raise RuntimeError("must be executed in a transaction to have any " "effect") preparer = connection.dialect.identifier_preparer connection.execute( 'CREATE TEMPORARY TABLE {destination} ON COMMIT DROP AS ' 'SELECT * FROM {source}'.format( source=preparer.format_table(source), destination=preparer.format_table(destination), ) )
def get_all_dhcp_hosts( connection: Connection ) -> Iterable[Tuple[netaddr.EUI, netaddr.IPAddress]]: """ Return all DHCP host configurations. :param connection: A SQLAlchemy connection :return: An iterable that yields (mac, ip)-tuples """ logger.debug("Getting all DHCP hosts") result = connection.execute(select([dhcphost.c.MAC, dhcphost.c.IPAddress])) return iter(result)
def handler(self, conn: Connection): table_type = conn.execute(sql.text(""" SELECT table_type FROM information_schema.tables WHERE table_schema = :schema AND table_name = :table """), schema=self.schema, table=self.table).scalar() if table_type is None: self.error(_("Table not found.")) else: self.success( _("Table found, table type is {}.").format(table_type)) for (priv, req) in ( ('SELECT', True), ('INSERT', False), ('UPDATE', False), ('DELETE', False), ): has_privilege = conn.execute( sql.text("SELECT has_table_privilege(:qname, :privilege)"), qname=self.qname, privilege=priv, ).scalar() if has_privilege: self.success(_("{} privilege is present.").format(priv)) elif not req: self.warning(_("{} privilege is absent.").format(priv)) else: self.error(_("{} privilege is absent.").format(priv)) count = conn.execute( select(func.count('*')).select_from(self.sa_table)).scalar() self.say(_("Number of records: {}.").format(count)) if self.column_id is None or self.column_geom is None: self.error(_("ID or geometry column isn't set.")) return self.inject(TableInspector(conn, self.schema, self.table))
def create_channel(db: Connection, device_id: int, channel_uuid: str, channel_type: int=0, channel_name: str='') -> Optional[Channel]: """ Create new channel. """ query = insert(CHANNELS).values( device_id=device_id, uuid=func.unhex(channel_uuid), type=channel_type, name=channel_name ) result = db.execute(query) return get_channel(db, channel_id=result.lastrowid)
def create_notification(db: Connection, user_id: int, message: str, trigger_id: int = None) -> Optional[int]: """ Create new notification. """ query = insert(NOTIFICATIONS).values(user_id=user_id, trigger_id=trigger_id, message=message, created=datetime.now()) result = db.execute(query) return result.lastrowid
def get_choices(connection: Connection, question_id: str) -> ResultProxy: """ Get all the choices for a question identified by question_id ordered by choice number. :param connection: a SQLAlchemy Connection :param question_id: foreign key :return: an iterable of the choices (RowProxy) """ select_stmt = select([question_choice_table]) where_stmt = select_stmt.where( question_choice_table.c.question_id == question_id) return connection.execute(where_stmt.order_by('choice_number asc'))
def get_questions_no_credentials(connection: Connection, survey_id: str) -> ResultProxy: """ Get all the questions for a survey identified by survey_id ordered by sequence number. :param connection: a SQLAlchemy Connection :param survey_id: foreign key :return: an iterable of the questions (RowProxy) """ select_stmt = select([question_table]) where_stmt = select_stmt.where(question_table.c.survey_id == survey_id) return connection.execute(where_stmt.order_by('sequence_number asc'))
def _queryRunner( pandasTable: SQLTable, dbConn: Connection, columnNameList: Sequence[str], data: Iterable): ''' This handles the fairly rare occurrence of conflicting keys when inserting data into a table. ''' tableFullName = pandasTable.name if (pandasTable.schema): tableFullName = f'{pandasTable.schema}.{tableFullName}' columnNames = ', '.join(columnNameList) valuePlaceholders = ', '.join(['%s'] * len(columnNameList)) sql = ( f'INSERT INTO {tableFullName} ({columnNames}) VALUES ({valuePlaceholders}) ' f'ON DUPLICATE KEY UPDATE {columnNameList[0]}={columnNameList[0]}' # magic ) dbConn.execute(sql, list(data))
def create_new_unknown_nivo_sensor_station( nivo_id: int, connection: Connection ) -> RowProxy: ins = ( insert(SensorStationTable) .values( nss_name=f"UNKNOWN_{nivo_id}", nss_meteofrance_id=nivo_id, the_geom="SRID=4326;POINT(0 0 0)", ) .returning(SensorStationTable.c.nss_id) ) return connection.execute(ins).first()
def get_remapped_stages( conn: Connection, stage_remaps: Iterable[MstStageRemap]) -> list[MstStage]: remapped_conditions = [ and_( mstStage.c.questId == stage_remap.remapQuestId, mstStage.c.questPhase == stage_remap.remapPhase, mstStage.c.wave == stage_remap.remapWave, ) for stage_remap in stage_remaps ] stmt = select(mstStage).where(or_(*remapped_conditions)) return [ MstStage.from_orm(stage) for stage in conn.execute(stmt).fetchall() ]
def another_user(connection: Connection) -> Generator[str, None, None]: email = "*****@*****.**" password = "******" result_proxy = connection.execute( User.__table__.insert( # passwords are hashed automagically by Flask-security {"email": email, "password": password, "active": True} ) ) connection.execute(customers.insert({"id": result_proxy.lastrowid, "email": email})) yield str(result_proxy.lastrowid) connection.execute(User.__table__.delete(User.email == email)) connection.execute(customers.delete(customers.c.email == email))
def _get_longterm_ratio(connection: engine.Connection, meta: MetaData, name: str, b_date: date) -> float: """ Аргументы: соединение с бд, метаданные бд, имя банка и дата(месяц). Расчитывает значение коэффициента долгосрочной ликвидности на начало этой даты. Возвращает это значение. """ # Ассоциация с таблицей f135 bank = meta.tables['f135'] # Извлечение данных из таблицы select_statement = select(bank.c.C2_3).filter( and_(bank.c.NAME_B == name, bank.c.DT == b_date, bank.c.C1_3 == "Н4")) result = connection.execute(select_statement) value = result.fetchone()[0] return value
def get_device(db: Connection, device_id: Union[int, str]) -> Optional[Device]: """ Get device by ID or UUID. """ if isinstance(device_id, int): condition = (DEVICES.c.id == device_id) elif isinstance(device_id, str): condition = (DEVICES.c.uuid == func.unhex(device_id)) else: return None query = select(DEVICES.c).select_from(DEVICES).where(condition) result = db.execute(query) row = result.fetchone() return map_object(Device, row) if row else None
def auction_model_with_a_bid(connection: Connection, winning_bid_amount: Decimal, bidder_id: int, ends_at: datetime) -> RowProxy: connection.execute(auctions.insert().values({ "id": 1, "title": "Cool socks", "starting_price": winning_bid_amount / 2, "current_price": winning_bid_amount, "ends_at": ends_at, "ended": False, })) connection.execute(bids.insert().values({ "amount": winning_bid_amount, "auction_id": 1, "bidder_id": bidder_id })) return connection.execute( auctions.select(whereclause=auctions.c.id == 1)).first()
def get_rayshift_quest_db( conn: Connection, quest_id: int, phase: int ) -> Optional[QuestDetail]: stmt = select(rayshiftQuest.c.questDetail).where( and_( rayshiftQuest.c.questId == quest_id, rayshiftQuest.c.phase == phase, rayshiftQuest.c.questDetail.isnot(None), ) ) rayshift_quest = conn.execute(stmt).fetchone() if rayshift_quest and rayshift_quest.questDetail: return QuestDetail.parse_obj(rayshift_quest.questDetail) return None
def get_multiple_locations(self, con: Connection, dataset: str, locations: Sequence[str], features: Sequence[str]): # make sure that the dataset exists cols = set( con.execute( f'SELECT * FROM nabel LEFT OUTER JOIN zurich LIMIT 1').keys()) query_cols = ['"date"'] for l in locations: for f in features: if f'{l}.{f}' in cols: query_cols.append(f'"{l}.{f}"') else: self.logger.warn(f'{l} has no feature {f}') query = f'SELECT {",".join(query_cols)} FROM {dataset} ORDER BY date' return pd.read_sql_query(query, con, parse_dates='date')
def get_svt_id(conn: Connection, col_no: int) -> int: if col_no == 0: return 0 stmt = select(mstSvt.c.id).where( and_( mstSvt.c.collectionNo == col_no, or_( mstSvt.c.type == SvtType.HEROINE, mstSvt.c.type == SvtType.NORMAL, mstSvt.c.type == SvtType.ENEMY_COLLECTION_DETAIL, ), )) mstSvt_db = conn.execute(stmt).fetchone() if mstSvt_db: return int(mstSvt_db.id) return col_no
def get_all_nas_clients( connection: Connection ) -> Iterable[Tuple[str, str, str, int, str, str, str, str]]: """ Return all NAS clients. :param connection: A SQLAlchemy connection :return: An iterable that yields (shortname, nasname, type, ports, secret, server, community, description)-tuples """ result = connection.execute( select([ nas.c.ShortName, nas.c.NASName, nas.c.Type, nas.c.Ports, nas.c.Secret, nas.c.Server, nas.c.Community, nas.c.Description ])) return iter(result)
def replace_num_sta_by_column_name(line: Dict, con: Connection) -> Dict: """ You have to know that some station have no id (yes...) """ nivo_sensor = int(line["nr_nivo_sensor"]) s = select([SensorStationTable.c.nss_id]).where( SensorStationTable.c.nss_meteofrance_id == nivo_sensor ) res = con.execute(s).first() if res is None: logger.warning( f"No station have been found for id {nivo_sensor} creating an empty one." ) res = create_new_unknown_nivo_sensor_station(nivo_sensor, con) line["nr_nivo_sensor"] = res.nss_id return line
def get_channel(db: Connection, channel_id: Union[int, str]) -> Optional[Channel]: """ Get channel by ID or UUID. """ if isinstance(channel_id, int): condition = (CHANNELS.c.id == channel_id) elif isinstance(channel_id, str): condition = (CHANNELS.c.uuid == func.unhex(channel_id)) else: return None query = select(CHANNELS.c).select_from(CHANNELS).where(condition) result = db.execute(query) row = result.fetchone() return map_object(Channel, row) if row else None
def get_bank_names(connection: engine.Connection, meta: MetaData) -> list: """ Аргументы: соединение с бд, метаданные бд. Возвращает наименования банков из бд. """ # Ассоциация с таблицей req бд table_req = meta.tables['req'] select_statement = select(table_req.c.NAME_B) result = connection.execute(select_statement) bank_names = result.fetchall() # Преобразование списка кортежей в список bank_names_list = [i[0] for i in bank_names] # Удаление повторяющихся наименований банков unique_bank_names = list(set(bank_names_list)) # Сортировка элементов в лексографическом порядке sorted_bank_names = sorted(unique_bank_names) return sorted_bank_names
def get_groups( connection: Connection, mac: netaddr.EUI) -> Iterable[Tuple[netaddr.IPAddress, str, str]]: """ Get the groups of a user. :param connection: A SQLAlchemy connection :param mac: MAC address :return: An iterable that yields (NAS-IP-Address, NAS-Port-Id, Group-Name)- tuples """ logger.debug('Getting groups of MAC "%s"', mac) results = connection.execute( select([ radusergroup.c.NASIPAddress, radusergroup.c.NASPortId, radusergroup.c.GroupName ]).where(radusergroup.c.UserName == mac)) return iter(results)