Beispiel #1
0
def associate_actors(actors, CorActor, pk_name, pk_value):
    for actor in actors:
        if not actor['uuid_organism']:
            continue
        statement = pg_insert(Organisme) \
            .values(
                uuid_organisme=actor['uuid_organism'],
                nom_organisme=actor['organism'],
                email_organisme=actor['email'],
            ).on_conflict_do_update(
                index_elements=['uuid_organisme'],
                set_=dict(
                    nom_organisme=actor['organism'],
                    email_organisme=actor['email'],
                ),
            )
        db.session.execute(statement)
        # retrieve organism id
        org = Organisme.query \
                .filter_by(uuid_organisme=actor['uuid_organism']) \
                .first()
        statement = pg_insert(CorActor) \
            .values(
                #id_acquisition_framework=af.id_acquisition_framework,
                id_organism=org.id_organisme,
                id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature(
                    "ROLE_ACTEUR", actor["actor_role"]
                ),
                **{pk_name: pk_value},
            ).on_conflict_do_nothing(
                index_elements=[pk_name, 'id_organism', 'id_nomenclature_actor_role'],
            )
        db.session.execute(statement)
Beispiel #2
0
 def insert_clusters(self, clusters: list):
     for cluster in clusters:
         if cluster['id'] is None:
             clust = cluster.copy()
             points = clust.pop('points')
             clust.pop('id')
             ins = pg_insert(self.table_cluster).on_conflict_do_nothing(
                 index_elements=['id']).values(clust)
             res = self.engine.execute(ins)
             clust_id = res.inserted_primary_key[0]
             if clust_id is not None:
                 for i in range(len(points)):
                     points[i]["cluster_id"] = clust_id
                 ins_points = pg_insert(
                     self.table_cluster_point).on_conflict_do_nothing(
                         index_elements=['id']).values(points)
                 self.engine.execute(ins_points)
             else:
                 raise Exception('cluster_id is None!')
         else:
             clust = cluster.copy()
             points = clust.pop('points')
             ins = pg_insert(self.table_cluster).values(clust)
             ins = ins.on_conflict_do_update(
                 index_elements=['id'], set_=clust).values(
                     clust)  # Maybe I should pop the id before
             self.engine.execute(ins)
             ins_points = pg_insert(
                 self.table_cluster_point).on_conflict_do_nothing(
                     index_elements=['id']).values(points)
             self.engine.execute(ins_points)
     pass
Beispiel #3
0
def sync_af_and_ds():
    cas_api = INPNCAS()
    mtd_api = MTDInstanceApi(
                config["MTD_API_ENDPOINT"],
                config['MTD']['ID_INSTANCE_FILTER'])

    af_list = mtd_api.get_af_list()
    for af in af_list:
        with db.session.begin_nested():
            add_unexisting_digitizer(af['id_digitizer'])
        actors = af.pop('actors')
        statement = (
            pg_insert(TAcquisitionFramework)
            .values(**af)
            .on_conflict_do_update(
                index_elements=['unique_acquisition_framework_id'],
                set_=af)
            .returning(TAcquisitionFramework.id_acquisition_framework)
        )
        af_id = db.session.execute(statement).scalar()
        af = TAcquisitionFramework.query.get(af_id)
        associate_actors(actors, CorAcquisitionFrameworkActor,
                         'id_acquisition_framework', af.id_acquisition_framework)
        # TODO: remove actors removed from MTD
    db.session.commit()

    ds_list = mtd_api.get_ds_list()
    for ds in ds_list:
        with db.session.begin_nested():
            add_unexisting_digitizer(ds['id_digitizer'])
        actors = ds.pop('actors')
        af_uuid = ds.pop('uuid_acquisition_framework')
        af = TAcquisitionFramework.query.filter_by(unique_acquisition_framework_id=af_uuid).first()
        if af is None:
            continue
        ds['id_acquisition_framework'] = af.id_acquisition_framework
        ds = { k: func.ref_nomenclatures.get_id_nomenclature(NOMENCLATURE_MAPPING[k], v)
                  if k.startswith('id_nomenclature') else v
               for k, v in ds.items()
               if v is not None }
        statement = pg_insert(TDatasets) \
            .values(**ds) \
            .on_conflict_do_update(
                index_elements=['unique_dataset_id'],
                set_=ds)
        db.session.execute(statement)
        ds = TDatasets.query \
                .filter_by(unique_dataset_id=ds['unique_dataset_id']) \
                .first()
        associate_actors(actors, CorDatasetActor,
                         'id_dataset', ds.id_dataset)
    db.session.commit()
Beispiel #4
0
async def _set_skills(conn, contractor_id, skills):
    """
    create missing subjects and qualification levels, then create contractor skills for them.
    """
    if not skills:
        # just delete skills and return
        await conn.execute(sa_con_skills.delete().where(
            sa_con_skills.c.contractor == contractor_id))
        return
    async with conn.begin():
        await conn.execute(
            pg_insert(sa_subjects).values([{
                'id': s.subject_id,
                'name': s.subject,
                'category': s.category
            } for s in _distinct(skills, 'subject_id')
                                           ]).on_conflict_do_nothing())
        await conn.execute(
            pg_insert(sa_qual_levels).values([{
                'id': s.qual_level_id,
                'name': s.qual_level,
                'ranking': s.qual_level_ranking
            } for s in _distinct(skills, 'qual_level_id')
                                              ]).on_conflict_do_nothing())

        con_skills_to_create = {(s.subject_id, s.qual_level_id)
                                for s in skills}

        q = select([
            sa_con_skills.c.id, sa_con_skills.c.subject,
            sa_con_skills.c.qual_level
        ]).where(sa_con_skills.c.contractor == contractor_id)
        to_delete = set()
        async for r in conn.execute(q):
            key = r.subject, r.qual_level
            try:
                con_skills_to_create.remove(key)
            except KeyError:
                # skill doesn't exist in con_skills, it should be deleted
                to_delete.add(r.id)

        to_delete and await conn.execute(sa_con_skills.delete().where(
            sa_con_skills.c.id.in_(to_delete)))

        con_skills_to_create and await conn.execute(
            sa_con_skills.insert().values([
                dict(contractor=contractor_id,
                     subject=subject,
                     qual_level=qual_level)
                for subject, qual_level in con_skills_to_create
            ]))
Beispiel #5
0
    def process_college(self, college_data):

        college = College()
        college.unitid = college_data['UNITID']
        college.name = college_data['INSTNM']
        college.address1 = college_data['ADDR']
        college.city = college_data['CITY']
        college.state = college_data['STABBR']
        college.phone = college_data['GENTELE']
        college.web_url = college_data['WEBADDR']
        college.admissions_url = college_data['ADMINURL']
        college.netprice_url = college_data['NPRICURL']
        college.sector = college_data['SECTOR']
        college.locale = college_data['LOCALE']
        college.hbcu = college_data['HBCU']
        college.latitude = college_data['LATITUDE']
        college.longitude = college_data['LONGITUD']

        #upsert
        statement = pg_insert(College).values(
            **college.as_dict()).on_conflict_do_update(
                constraint='college_pkey', set_=college.props_dict())
        self.db.execute(statement)
        self.db.commit()

        return college.name
Beispiel #6
0
 async def upsert_service_access_rights(
     self, new_access_rights: List[ServiceAccessRightsAtDB]
 ) -> None:
     # update the services_access_rights table (some might be added/removed/modified)
     for rights in new_access_rights:
         insert_stmt = pg_insert(services_access_rights).values(
             **rights.dict(by_alias=True)
         )
         on_update_stmt = insert_stmt.on_conflict_do_update(
             index_elements=[
                 services_access_rights.c.key,
                 services_access_rights.c.version,
                 services_access_rights.c.gid,
                 services_access_rights.c.product_name,
             ],
             set_=rights.dict(by_alias=True, exclude_unset=True),
         )
         try:
             async with self.db_engine.acquire() as conn:
                 await conn.execute(
                     # pylint: disable=no-value-for-parameter
                     on_update_stmt
                 )
         except ForeignKeyViolation:
             logger.warning(
                 "The service %s:%s is missing from services_meta_data",
                 rights.key,
                 rights.version,
             )
Beispiel #7
0
 def upsert_records(self, records):
     for record in records:
         statement = pg_insert(Records) \
             .values(**record.as_dict()) \
             .on_conflict_do_update(constraint=Records.PKEY_NAME, set_=record.props_dict())
         self.db.session.execute(statement)
     self.db.session.commit()
def upsert_discussions(session: Session,
                       discussions: Iterable[Discussion]) -> None:
    stmt_values = []
    urls = set()
    for d in discussions:
        urls.add(d.url)
        stmt_values.append({
            "external_discussion_id": d.external_id,
            "discussion_source_id": d.source.value,
            "url_uuid": d.url.url_uuid,
            "comment_count": d.comment_count,
            "created_at": d.created_at,
            "title": d.title,
        })
    discussion_count = len(stmt_values)
    log.info("upserting %d discussions across %d urls", discussion_count,
             len(urls))
    if discussion_count == 0:
        # Nothing to upsert
        return
    upsert_urls(session, urls)
    insert_stmt = pg_insert(SQLDiscussion.__table__).values(stmt_values)
    upsert_stmt = insert_stmt.on_conflict_do_update(
        index_elements=["discussion_source_id", "external_discussion_id"],
        set_={
            "url_uuid": insert_stmt.excluded.url_uuid,
            "comment_count": insert_stmt.excluded.comment_count,
            "created_at": insert_stmt.excluded.created_at,
            "title": insert_stmt.excluded.title,
        },
    )
    session.execute(upsert_stmt)
Beispiel #9
0
async def test_task_codewars_challenge(client, caplog):
    await tasks.post_slack_codewars_challenge(client.app["slack_client"],
                                              client.app["pg"])

    mocked_request = client.app["slack_client"]._request
    mocked_request.assert_called_once()
    mocked_request_args = mocked_request.call_args.args

    payload = json.loads(mocked_request_args[3])
    assert payload["channel"] == "CEFJ9TJNL"
    assert "No challenge found" in payload["attachments"][0]["text"]

    async with client.app["pg"].acquire() as conn:
        await conn.execute(pg_insert(models.codewars).values(id="foo"))

    client.app["slack_client"]._request.reset_mock()
    await tasks.post_slack_codewars_challenge(client.app["slack_client"],
                                              client.app["pg"])

    mocked_request = client.app["slack_client"]._request
    mocked_request.assert_called_once()
    mocked_request_args = mocked_request.call_args.args

    payload = json.loads(mocked_request_args[3])
    assert payload["channel"] == "CEFJ9TJNL"
    assert payload["attachments"][0][
        "title_link"] == "https://www.codewars.com/kata/foo"
Beispiel #10
0
def create_default_status_mappings(status_mapping_dict: dict):
    unmapped_statuses = []
    for status, status_category_str in status_mapping_dict.items():
        try:
            status_category = StatusCategory(status_category_str)
        except ValueError:
            current_app.logger.error(
                f"Unable to map status `{status}` to status category `{status_category_str}`: Invalid status category value"
            )
            unmapped_statuses.append(status)
            continue

        # on conflict, assume manually set so ignore
        stmt = (pg_insert(StatusCategoryStatusMapping).values(
            status=status,
            status_category=status_category).on_conflict_do_nothing())
        db.session.execute(stmt)
        db.session.commit()
    current_app.logger.info(
        f"Loaded default status mappings: {status_mapping_dict}")
    if unmapped_statuses:
        msg = f"Unmapped statuses (need manual assignment): {unmapped_statuses}"
        flash(msg, "error")
        current_app.logger.info(msg)
    else:
        flash("All statuses successfully mapped", "success")
Beispiel #11
0
    async def allowed_email(self, email: str) -> bool:
        # this really should be in the schema validation, but it doesn't support async checks (yet).
        if "@" not in email:
            return False

        _, domain = email.lower().split("@")

        logger.info("Checking if domain %s should be allowed", domain)
        async with self.pg.acquire() as conn:
            row = await conn.fetchrow(
                select([domains.c.blocked, domains.c.domain]).where(domains.c.domain == domain)
            )
            if row is None:
                logger.info("Domain unknown, saving and allowing")
                await conn.fetchrow(
                    pg_insert(domains)
                    .values(domain=domain, blocked=False, source=Source.INVITE)
                    .on_conflict_do_nothing(index_elements=[domains.c.domain])
                )
                return True

        if row[domains.c.blocked.name]:
            logger.info("Domain '%s' on the blocklist, not allowing.", domain)
            return False
        return True
Beispiel #12
0
 def historize_classification(self, only_qual=None):
     """
        Copy current classification information into history table, for all rows in self.
        :param only_qual: If set, only historize for current rows with this classification.
     """
     # Light up a bit the SQLA expressions
     oh = ObjectHeader
     och = ObjectsClassifHisto
     # What we want to historize, as a subquery
     sel_subqry = select([
         oh.objid, oh.classif_when,
         text("'M'"), oh.classif_id, oh.classif_qual, oh.classif_who
     ])
     if only_qual is not None:
         qual_cond = oh.classif_qual.in_(only_qual)
     else:
         qual_cond = true()
     sel_subqry = sel_subqry.where(
         and_(oh.objid == any_(self.object_ids),
              oh.classif_when.isnot(None), qual_cond))
     # Insert into the log table
     ins_qry: Insert = pg_insert(och.__table__)
     ins_qry = ins_qry.from_select([
         och.objid, och.classif_date, och.classif_type, och.classif_id,
         och.classif_qual, och.classif_who
     ], sel_subqry)
     ins_qry = ins_qry.on_conflict_do_nothing(
         constraint='objectsclassifhisto_pkey')
     logger.info("Histo query: %s",
                 ins_qry.compile(dialect=postgresql.dialect()))
     nb_objs = self.session.execute(ins_qry).rowcount
     logger.info(" %d out of %d rows copied to log", nb_objs,
                 len(self.object_ids))
     return oh
Beispiel #13
0
 def run(self):
     session = self.session()
     engine = session._WopMarsSession__session.bind
     conn = engine.connect()
     #
     snp2phenotype_path = self.input_file(InsertSNP2Phenotype.__input_file_snp2phenotype)
     snp_model = self.input_table(InsertSNP2Phenotype.__input_table_snp)
     phenotype_model = self.input_table(InsertSNP2Phenotype.__input_table_phenotype)
     snp2phenotype_model = self.output_table(InsertSNP2Phenotype.__output_table_snp2phenotype)
     snp2phenotype_df = pandas.read_table(snp2phenotype_path, header=None)
     #
     # read input file
     input_file_obj_list = []
     for line in csv.reader(open(snp2phenotype_path, 'r', encoding='utf-8'), delimiter="\t"):
         snp_rsid = int(line[0])
         phenotype_name = line[1]
         input_file_obj_list.append({'snp_rsid' : snp_rsid, 'phenotype_name' : phenotype_name})
     #
     # create insert
     snp_select = select([snp_model.id]).where(snp_model.rsid==bindparam('snp_rsid'))
     phenotype_select = select([phenotype_model.id]).where(phenotype_model.name==bindparam('phenotype_name'))
     output_table_insert = insert(table=snp2phenotype_model.__table__, values={'snp_id': snp_select, 'phenotype_id': phenotype_select})
     #
     if len(input_file_obj_list) > 0:
         if str(engine.__dict__['url']).split("://")[0]=='sqlite':
             engine.execute(output_table_insert.prefix_with("OR IGNORE", dialect='sqlite'), input_file_obj_list)
         elif str(engine.__dict__['url']).split("://")[0]=='mysql':
                 from warnings import filterwarnings # three lines to suppress mysql warnings
                 import MySQLdb as Database
                 filterwarnings('ignore', category = Database.Warning)
                 engine.execute(output_table_insert.prefix_with("IGNORE", dialect='mysql'), input_file_obj_list)
         elif str(engine.__dict__['url']).split("://")[0]=='postgresql':
             from sqlalchemy.dialects.postgresql import insert as pg_insert
             output_table_insert_pg = pg_insert(table=snp2phenotype_model.__table__, values={'snp_id': snp_select, 'phenotype_id': phenotype_select}).on_conflict_do_nothing(index_elements=['snp_id', 'phenotype_id'])
             engine.execute(output_table_insert_pg, input_file_obj_list)
Beispiel #14
0
async def sync_slack_channels(slack_client: SlackAPI,
                              pg: asyncpg.pool.Pool) -> None:
    logger.debug("Refreshing slack channels cache.")

    try:
        async with pg.acquire() as conn:
            async for channel in slack_client.iter(
                    slack.methods.CONVERSATIONS_LIST):
                values = {
                    "name": channel["name"],
                    "created":
                    datetime.datetime.fromtimestamp(channel["created"]),
                    "archived": channel["is_archived"],
                    "members": channel["num_members"],
                    "topic": channel["topic"]["value"],
                    "purpose": channel["purpose"]["value"],
                }
                await conn.execute(
                    pg_insert(models.SlackChannels).values(
                        id=channel["id"], **values).on_conflict_do_update(
                            index_elements=[models.SlackChannels.c.id],
                            set_=values))
    except asyncio.CancelledError:
        logger.debug("Slack channels cache refresh canceled")
    except Exception:  # pylint: disable=broad-except
        logger.exception("Error refreshing slack channels cache")
 def load_projects(self):
     return_url = get_redirect_target() or self.get_url(".index_view")
     try:
         current_app.logger.info("Attempting to connect to jira")
         jira = jira_core.connect()
     except Exception as e:
         status = "Jira not configured"
         current_app.logger.error(f"{status}: {e}")
         flash(status, "error")
     else:
         if jira is not None:
             # load projects
             projects = jira.projects()
             for project in projects:
                 # create or update in case of change in Jira.
                 stmt = (pg_insert(JiraProject).values(
                     project_key=project.key,
                     project_name=project.name).on_conflict_do_update(
                         constraint="uq_jira_projects_project_key",
                         set_=dict(project_name=project.name),
                     ))
                 db.session.execute(stmt)
                 db.session.commit()
             status = f"{len(projects)} project/s loaded successfully."
             current_app.logger.info(f"{status}")
             flash(status, "success")
         else:
             flash("Jira failed to connect", "error")
     return redirect(return_url)
Beispiel #16
0
        def from_csv(table):
            filepath = os.path.join(dirpath, table.name + ".csv")
            with io.open(filepath, mode="r", encoding="utf-8") as csv_file:
                csv_reader = csv.reader(csv_file)
                header = next(csv_reader)
                conn = self.db.connect()
                with self.lock, conn.begin():
                    for row in csv_reader:
                        values = dict(zip(header, row))

                        if self.engine_dialect_is(self.DIALECT_SQLITE):
                            identifier = values[table.c.id.name]
                            # try to update entry
                            stmt = update(table).values(values).where(
                                table.c.id == identifier)
                            if conn.execute(stmt).rowcount == 0:
                                # identifier doesn't match any => insert new entry
                                stmt = insert(table).values(values)
                                conn.execute(stmt)
                        elif self.engine_dialect_is(self.DIALECT_POSTGRESQL):
                            stmt = pg_insert(table).values(values)\
                                .on_conflict_do_update(index_elements=[table.c.id], set_=values)
                            conn.execute(stmt)

                    if self.engine_dialect_is(self.DIALECT_POSTGRESQL):
                        # update sequence
                        sql = "SELECT setval('{table}_id_seq', max(id)) FROM {table}".format(
                            table=table.name)
                        conn.execute(text(sql))
                conn.close()
Beispiel #17
0
 def run(self):
     session = self.session()
     engine = session._WopMarsSession__session.bind
     conn = engine.connect()
     #
     snp_path = self.input_file(InsertSnp.__input_file_snp)
     snp_model = self.output_table(InsertSnp.__output_table_snp)
     #
     # read input file
     input_file_obj_list = []
     for line in csv.reader(open(snp_path, 'r', encoding='utf-8'), delimiter="\t"):
         chrom = int(line[0])
         position = int(line[1])
         rsid = int(line[2])
         input_file_obj = {'chrom': chrom, 'position': position, 'rsid': rsid}
         input_file_obj_list.append(input_file_obj)
     #
     # insert input_file_obj_list
     if len(input_file_obj_list) > 0:
         if str(engine.__dict__['url']).split("://")[0]=='sqlite':
             engine.execute(snp_model.__table__.insert().prefix_with("OR IGNORE"), input_file_obj_list)
         elif str(engine.__dict__['url']).split("://")[0]=='mysql':
                 from warnings import filterwarnings # three lines to suppress mysql warnings
                 import MySQLdb as Database
                 filterwarnings('ignore', category = Database.Warning)
                 engine.execute(snp_model.__table__.insert().prefix_with("IGNORE"), input_file_obj_list)
         elif str(engine.__dict__['url']).split("://")[0]=='postgresql':
             from sqlalchemy.dialects.postgresql import insert as pg_insert
             engine.execute(pg_insert(snp_model.__table__).on_conflict_do_nothing(index_elements=['rsid']), input_file_obj_list)
         else:
             raise "Error: This engine is not implemented."
        def from_csv(table):
            filepath = os.path.join(dirpath, table.name + ".csv")
            with io.open(filepath, mode="r", encoding="utf-8") as csv_file:
                csv_reader = csv.reader(csv_file)
                header = next(csv_reader)
                with self.lock, self.conn.begin():
                    for row in csv_reader:
                        values = dict(zip(header, row))

                        if self.engine.dialect.name == self.DIALECT_SQLITE:
                            identifier = values[table.c.id]
                            # try to update entry
                            stmt = update(table).values(values).where(
                                table.c.id == identifier)
                            if self.conn.execute(stmt).rowcount == 0:
                                # identifier doesn't match any => insert new entry
                                stmt = insert(table).values(values)
                                self.conn.execute(stmt)
                        elif self.engine.dialect.name == self.DIALECT_POSTGRESQL:
                            stmt = pg_insert(table).values(values)\
                                .on_conflict_do_update(index_elements=[table.c.id], set_=values)
                            self.conn.execute(stmt)

                    if self.DIALECT_POSTGRESQL == self.engine.dialect.name:
                        # update sequences
                        self.conn.execute(
                            text(
                                "SELECT setval('profiles_id_seq', max(id)) FROM profiles"
                            ))
                        self.conn.execute(
                            text(
                                "SELECT setval('spools_id_seq', max(id)) FROM spools"
                            ))
Beispiel #19
0
def postgres_upsert(engine, connection, table, values, if_row_exists):
    """
    Prepares and executes a PostgreSQL INSERT ON CONFLICT...DO NOTHING
    or DO UPDATE statement via sqlalchemy.dialects.postgresql.insert

    Parameters
    ----------
    engine : sqlalchemy.engine.base.Engine
    connection : sqlalchemy.engine.base.Connection
    table : sqlalchemy.sql.schema.Table
    values : list of dict
    if_row_exists : {'update', 'ignore'}
        * If 'update' issues a ON CONFLICT...DO UPDATE statement
        * If 'ignore' issues a ON CONFLICT...DO NOTHING statement
    """
    insert_stmt = pg_insert(table).values(values)
    if if_row_exists == 'ignore':
        upsert = insert_stmt.on_conflict_do_nothing()
    elif if_row_exists == 'update':
        update_cols = [
            c.name for c in table.c if c not in list(table.primary_key.columns)
        ]
        upsert = insert_stmt.on_conflict_do_update(
            index_elements=table.primary_key.columns,
            set_={k: getattr(insert_stmt.excluded, k)
                  for k in update_cols})
    # execute upsert
    connection.execute(upsert)
def _upsert(table: sqa.Table, engine: sqa.engine, clean_data: pd.DataFrame):
    """
    insert data into a table, replacing any duplicate indices
    postgres - see: https://docs.sqlalchemy.org/en/13/dialects/postgresql.html#insert-on-conflict-upsert
    """
    # print(engine.dialect.dbapi.__name__)
    with engine.begin() as con:
        for index, row in clean_data.iterrows():
            # check index uniqueness by attempting insert; if it fails, update
            row = {**row.dropna().to_dict(), clean_data.index.name: index}
            try:
                if engine.dialect.dbapi.__name__ == 'psycopg2':
                    insert = pg_insert(table).values(
                        row).on_conflict_do_update(
                            index_elements=[clean_data.index.name], set_=row)
                else:
                    insert = table.insert().values(row)

                con.execute(insert)

            except sqa.exc.IntegrityError:
                upsert = table.update() \
                    .where(table.c[clean_data.index.name] == index) \
                    .values(row)
                con.execute(upsert)
Beispiel #21
0
 def upsert_matchups(self, matchups):
     for m in matchups:
         statement = pg_insert(Matchups) \
             .values(**m.as_dict()) \
             .on_conflict_do_update(constraint=Matchups.PKEY_NAME, set_=m.props_dict())
         self.db.session.execute(statement)
     self.db.session.commit()
Beispiel #22
0
    async def create_service(
        self,
        new_service: ServiceMetaDataAtDB,
        new_service_access_rights: List[ServiceAccessRightsAtDB],
    ) -> ServiceMetaDataAtDB:

        for access_rights in new_service_access_rights:
            if (
                access_rights.key != new_service.key
                or access_rights.version != new_service.version
            ):
                raise ValueError(
                    f"{access_rights} does not correspond to service {new_service.key}:{new_service.version}"
                )

        async with self.db_engine.acquire() as conn:
            # NOTE: this ensure proper rollback in case of issue
            async with conn.begin() as _transaction:
                row: RowProxy = await (
                    await conn.execute(
                        # pylint: disable=no-value-for-parameter
                        services_meta_data.insert()
                        .values(**new_service.dict(by_alias=True))
                        .returning(literal_column("*"))
                    )
                ).first()
                created_service = ServiceMetaDataAtDB(**row)

                for access_rights in new_service_access_rights:
                    insert_stmt = pg_insert(services_access_rights).values(
                        **access_rights.dict(by_alias=True)
                    )
                    await conn.execute(insert_stmt)
        return created_service
Beispiel #23
0
 def gen_and_save_token(self, user_name):
     """
     Generate unique token for each user.
     Insert or update new token table object.
     :param user_name: target username
     :return: :type: str
     """
     user_id = self.check_user_exist(user_name)
     if user_id:
         user_token = creds.gen_uid()  # generate unique token
         user_token_obj = {
             'user_id': user_id,
             'token': user_token
         }  # basic input save dict
         exclude_key = ['user_id']  # exclude keys
         exclude_obj = [dbm.UserAuth.user_id]  # exclude keys as objects
         # make input dict with exclude keys
         user_token_obj_conflict = {
             k: v
             for k, v in user_token_obj.items() if k not in exclude_key
         }
         # insert or update statement
         statement = pg_insert(
             dbm.UserAuth).values(user_token_obj).on_conflict_do_update(
                 index_elements=exclude_obj, set_=user_token_obj_conflict)
         # execute changes
         self.engine.execute(statement)
         # save changes
         self.session.commit()
         # return new token
         return user_token
     else:
         # return if user not found
         return False
Beispiel #24
0
 def upsert_teams(self, teams):
     for team in teams:
         statement = pg_insert(Teams) \
             .values(**team.as_dict()) \
             .on_conflict_do_update(constraint=Teams.PKEY_NAME, set_=team.props_dict())
         self.db.session.execute(statement)
     self.db.session.commit()
Beispiel #25
0
def upsert(table: Table,
           data: dict,
           update_on_conflict: bool = False) -> Insert:
    insert_data = pg_insert(table).values(**data)
    if update_on_conflict:
        return insert_data.on_conflict_do_update(constraint=table.primary_key,
                                                 set_=data)
    return insert_data.on_conflict_do_nothing(constraint=table.primary_key)
Beispiel #26
0
async def appointment_webhook(request):
    apt_id = request.match_info['id']
    appointment: AppointmentModel = request['model']

    conn = await request['conn_manager'].get_connection()
    v = await conn.execute(select([ser_c.company]).where(ser_c.id == appointment.service_id))
    r = await v.first()
    if r and r.company != request['company'].id:
        raise HTTPConflictJson(
            status='service conflict',
            details=f'service {appointment.service_id} already exists and is associated with another company',
        )

    service_insert_update = dict(
        name=appointment.service_name,
        colour=appointment.colour,
        extra_attributes=[
            ea.dict(exclude={'sort_index'}) for ea in sorted(appointment.extra_attributes, key=attrgetter('sort_index'))
        ],
    )

    await conn.execute(
        pg_insert(sa_services)
        .values(id=appointment.service_id, company=request['company'].id, **service_insert_update)
        .on_conflict_do_update(
            index_elements=[ser_c.id],
            where=ser_c.id == appointment.service_id,
            set_=service_insert_update,
        )
    )
    apt_insert_update = appointment.dict(
        include={'attendees_max', 'attendees_count', 'attendees_current_ids', 'start', 'finish', 'price', 'location'}
    )
    apt_insert_update['topic'] = appointment.appointment_topic

    await conn.execute(
        pg_insert(sa_appointments)
        .values(id=apt_id, service=appointment.service_id, **apt_insert_update)
        .on_conflict_do_update(
            index_elements=[apt_c.id],
            where=apt_c.id == apt_id,
            set_=apt_insert_update,
        )
    )
    return json_response(request, status='success')
Beispiel #27
0
    def wallet_donate(
            cls,
            db: Session,
            idempotency_key: uuid.UUID,
            request: WalletDonateRequest,
    ) -> WalletResponse:
        try:
            db_wallet = (
                db.query(Wallet)
                .filter(Wallet.wallet_id == request.wallet_id)
                .with_for_update()
                .first()
            )
            if not db_wallet:
                raise HTTPException(
                    status_code=404,
                    detail='Wallet is not found',
                )

            utcnow = datetime.datetime.utcnow()

            db_wallet.amount = db_wallet.amount + request.amount
            db_wallet.updated_at = utcnow
            db.add(db_wallet)

            insert_transaction = (
                pg_insert(Transaction)
                .values(
                    idempotency_key=idempotency_key,
                    amount=request.amount,
                    to_wallet_id=db_wallet.wallet_id,
                    to_wallet_amount=db_wallet.amount,
                    created_at=utcnow,
                )
                .returning(Transaction.to_wallet_amount)
            )
            db.execute(insert_transaction).first()

            db.commit()

            return WalletResponse(
                wallet_id=db_wallet.wallet_id,
                amount=db_wallet.amount,
            )
        except exc.IntegrityError:
            db.rollback()

            existed_transaction = (
                db.query(Transaction)
                .filter(Transaction.idempotency_key == idempotency_key)
                .first()
            )

            return WalletResponse(
                wallet_id=existed_transaction.to_wallet_id,
                amount=existed_transaction.to_wallet_amount,
            )
    async def checkout(self, repo_id: int, commit_id: int) -> int:
        """checks out working copy of project_uuid to commit ref_id

        :raises RuntimeError: if local copy has changes (i.e. dirty)
        :return: commit id
        :rtype: int
        """
        async with self.engine.acquire() as conn:
            repo, head_commit, _ = await self._update_state(repo_id, conn)

            if head_commit is None:
                raise NoCommitError(
                    details="Cannot checkout without commit changes first")

            # check if working copy has changes, if so, fail
            if repo.project_checksum != head_commit.snapshot_checksum:
                raise CleanRequiredError(
                    details=
                    "Your local changes would be overwritten by checkout. "
                    "Cannot checkout without commit changes first.")

            # already in head commit
            if head_commit.id == commit_id:
                return commit_id

            async with conn.begin():
                commit = (await self.CommitsOrm(conn).set_filter(
                    id=commit_id).fetch("snapshot_checksum"))
                assert commit  # nosec

                # restores project snapshot
                snapshot = (await self.SnapshotsOrm(conn).set_filter(
                    commit.snapshot_checksum).fetch("content"))
                assert snapshot  # nosec

                await self.ProjectsOrm(conn).set_filter(
                    uuid=repo.project_uuid).update(**snapshot.content)

                # create detached branch that points to (repo_id, commit_id)
                # upsert "detached" branch
                insert_stmt = (pg_insert(projects_vc_branches).values(
                    repo_id=repo_id,
                    head_commit_id=commit_id,
                    name="DETACHED",
                ).returning(projects_vc_branches.c.id))
                upsert_tag = insert_stmt.on_conflict_do_update(
                    constraint="repo_branch_uniqueness",
                    set_=dict(
                        head_commit_id=insert_stmt.excluded.head_commit_id),
                )
                branch_id = await conn.scalar(upsert_tag)

                # updates head
                await self.HeadsOrm(conn).set_filter(repo_id=repo_id).update(
                    head_branch_id=branch_id)

        return commit_id
    async def grant_rights_to_services(services: List[Tuple[str, str]]) -> None:
        for service_key, service_version in services:
            metada_data_values = dict(
                key=service_key,
                version=service_version,
                owner=1,  # a user is required in the database for ownership of metadata
                name="",
                description=f"OVERRIDEN BY TEST in {__file__}",
            )

            access_rights_values = dict(
                key=service_key,
                version=service_version,
                gid=1,
                execute_access=True,
                write_access=True,
            )

            async with aiopg_engine.acquire() as conn:
                # pylint: disable=no-value-for-parameter
                await conn.execute(
                    pg_insert(services_meta_data)
                    .values(**metada_data_values)
                    .on_conflict_do_update(
                        index_elements=[
                            services_meta_data.c.key,
                            services_meta_data.c.version,
                        ],
                        set_=metada_data_values,
                    )
                )
                await conn.execute(
                    pg_insert(services_access_rights)
                    .values(**access_rights_values)
                    .on_conflict_do_update(
                        index_elements=[
                            services_access_rights.c.key,
                            services_access_rights.c.version,
                            services_access_rights.c.gid,
                            services_access_rights.c.product_name,
                        ],
                        set_=access_rights_values,
                    )
                )
Beispiel #30
0
def _upsert(table: sqa.Table,
            engine: sqa.engine,
            cleaned_data: pd.DataFrame):
    """
    insert data into a table, replacing any rows with duplicate indices

    When upsert finds duplicate records, it overwrites ALL VALUES that are present in source DataFrame, including NaN.

    postgres - see: https://docs.sqlalchemy.org/en/13/dialects/postgresql.html#insert-on-conflict-upsert
    """
    if isinstance(cleaned_data.index, pd.MultiIndex):
        multi = True
        names = cleaned_data.index.names
        index_elements = names
    else:
        multi = False
        names = cleaned_data.index.name
        index_elements = [names]

    cleaned_data = cleaned_data.astype('object')

    def map2none(val):
        """sqlalchemy freaks out about NaNs, so replace them with None"""
        if pd.notna(val):
            return val

    with engine.begin() as con:
        for row in cleaned_data.reset_index(drop=False).itertuples(index=False):
            # check index uniqueness by attempting insert; if it fails, update
            row = {k: map2none(v) for k, v in row._asdict().items()}
            try:

                if engine.dialect.dbapi.__name__ == 'psycopg2':
                    insert = pg_insert(table).values(row).on_conflict_do_update(
                        index_elements=index_elements,
                        set_=row
                    )
                else:
                    insert = table.insert().values(row)

                con.execute(insert)

            except sqa.exc.IntegrityError:
                if multi:
                    upsert = table.update()
                    for n in names:
                        upsert = upsert.where(table.c[n] == row[n])
                    con.execute(upsert.values(row))
                else:
                    upsert = table.update() \
                        .where(table.c[names] == row[names]) \
                        .values(row)
                    con.execute(upsert)
Beispiel #31
0
def upsert(table: Table, data: dict, update_on_conflict: bool = False) -> Insert:
    insert_data = pg_insert(table).values(
        **data
    )
    if update_on_conflict:
        return insert_data.on_conflict_do_update(
            constraint=table.primary_key,
            set_=data
        )
    return insert_data.on_conflict_do_nothing(
        constraint=table.primary_key
    )
 def update_selection(self, identifier, client_id, data):
     with self.lock, self.conn.begin():
         values = dict()
         if self.engine.dialect.name == self.DIALECT_SQLITE:
             stmt = insert(self.selections).prefix_with("OR REPLACE")\
                 .values(tool=identifier, client_id=client_id, spool_id=data["spool"]["id"])
         elif self.engine.dialect.name == self.DIALECT_POSTGRESQL:
             stmt = pg_insert(self.selections)\
                 .values(tool=identifier, client_id=client_id, spool_id=data["spool"]["id"])\
                 .on_conflict_do_update(constraint="selections_pkey", set_=dict(spool_id=data["spool"]["id"]))
         self.conn.execute(stmt)
     return self.get_selection(identifier, client_id)
Beispiel #33
0
def test_insert_on_conflict_renders(connection):
    statement = pg_insert(User).values(name='John',
                                       fullname="John Doe",
                                       password='******')
    statement = statement.on_conflict_do_nothing()

    connection.execute(statement)

    assert len(xray_recorder.current_segment().subsegments) == 1
    sql_meta = xray_recorder.current_segment().subsegments[0].sql

    assert sql_meta['sanitized_query'].startswith('INSERT INTO users')
    assert 'ON CONFLICT DO NOTHING' in sql_meta['sanitized_query']