def process_transactions(
        self,
        session: Session,
        processed_transactions: List[ParsedTx],
        db_models: Dict,
        metadata_dictionary: Dict,
    ):
        records: List[Any] = []
        for transaction in processed_transactions:
            instructions = transaction["tx_metadata"]["instructions"]
            meta = transaction["result"]["meta"]
            error = meta["err"]

            if error:
                self.msg(
                    f"Skipping error transaction from chain {transaction['tx_sig']}"
                )
                continue

            for instruction in instructions:
                instruction_name = instruction.get("instruction_name")
                if instruction_name in transaction_handlers:
                    transaction_handlers[instruction_name](
                        session,
                        transaction,
                        instruction,
                        db_models,
                        metadata_dictionary,
                        records,
                    )

        self.invalidate_old_records(session, db_models)
        self.msg(f"Saving {records}")
        session.bulk_save_objects(records)
예제 #2
0
async def create_Admins(new_admins: schemas.WebAdmin, db: Session = Depends(get_db)):
    # make list of objs
    admins = [models.WebAdmins(name = new_admin.name,email_address = new_admin.email_address,sites = new_admin.sites) for new_admin in new_admins]
    try:
        # add admins to db
        db.bulk_save_objects(admins)
        # commit to DB
        db.commit()
    except sqlalchemy.exc.InvalidRequestError as e:
        # log the exception
        msg = f'Error commiting admins:\n\t{e}'
        log.error(msg,exc_info=True)
    except Exception as e:
        # log unknown error
        msg = f'Unknown exception commiting add for admins:\n\t{e}'
        log.error(msg, exc_info=True) 
예제 #3
0
    def create_multi(
            self, db: Session,
            attendances: AttendancesListSchema) -> List[AttendancesModel]:
        list_objects = [
            AttendancesModel(id=uuid.uuid4(),
                             topic_id=attendance["topic_id"],
                             student_id=attendance["student_id"],
                             created_at=datetime.utcnow())
            for attendance in attendances
        ]

        db_objects = [
            self.model(**jsonable_encoder(req_object))
            for req_object in list_objects
        ]
        db.bulk_save_objects(db_objects)
        db.commit()
        # db.refresh(db_objects)

        return db_objects
예제 #4
0
파일: dagrun.py 프로젝트: kosteev/airflow
    def _create_task_instances(
        self,
        dag_id: str,
        tasks: Iterable["Operator"],
        created_counts: Dict[str, int],
        hook_is_noop: bool,
        *,
        session: Session,
    ) -> None:
        """
        Create the necessary task instances from the given tasks.

        :param dag_id: DAG ID associated with the dagrun
        :param tasks: the tasks to create the task instances from
        :param created_counts: a dictionary of number of tasks -> total ti created by the task creator
        :param hook_is_noop: whether the task_instance_mutation_hook is noop
        :param session: the session to use

        """
        try:
            if hook_is_noop:
                session.bulk_insert_mappings(TI, tasks)
            else:
                session.bulk_save_objects(tasks)

            for task_type, count in created_counts.items():
                Stats.incr(f"task_instance_created-{task_type}", count)
            session.flush()
        except IntegrityError:
            self.log.info(
                'Hit IntegrityError while creating the TIs for %s- %s',
                dag_id,
                self.run_id,
                exc_info=True,
            )
            self.log.info('Doing session rollback.')
            # TODO[HA]: We probably need to savepoint this so we can keep the transaction alive.
            session.rollback()
예제 #5
0
        cookie_name="peanut butter",
        cookie_recipe_url="awesome.cookies.miam/cookie/pnb/recipe.html",
        cookie_sku="PNB01",
        quantity=24,
        unit_cost=0.25,
    )

    oat_cookie = Cookie(
        cookie_name="oatmeal resin",
        cookie_recipe_url="awesome.cookies.miam/cookie/oat/recipe.html",
        cookie_sku="EWW01",
        quantity=100,
        unit_cost=1,
    )

    session.bulk_save_objects([pnb_cookie, oat_cookie])
    session.commit()
    print(pnb_cookie.cookie_id)
    print(oat_cookie.cookie_id)
    print("")

    # --------------------------------------------------------------------------------
    print("Exemple 7.4")
    print(11 * "-")

    cookies = session.query(Cookie).all()
    print(cookies)
    print("")

    # --------------------------------------------------------------------------------
    print("Exemple 7.5")
예제 #6
0
    def verify_integrity(self, session: Session = NEW_SESSION):
        """
        Verifies the DagRun by checking for removed tasks or tasks that are not in the
        database yet. It will set state to removed or add the task if required.

        :param session: Sqlalchemy ORM Session
        :type session: Session
        """
        from airflow.settings import task_instance_mutation_hook

        dag = self.get_dag()
        tis = self.get_task_instances(session=session)

        # check for removed or restored tasks
        task_ids = set()
        for ti in tis:
            task_instance_mutation_hook(ti)
            task_ids.add(ti.task_id)
            task = None
            try:
                task = dag.get_task(ti.task_id)
            except AirflowException:
                if ti.state == State.REMOVED:
                    pass  # ti has already been removed, just ignore it
                elif self.state != State.RUNNING and not dag.partial:
                    self.log.warning(
                        "Failed to get task '%s' for dag '%s'. Marking it as removed.",
                        ti, dag)
                    Stats.incr(f"task_removed_from_dag.{dag.dag_id}", 1, 1)
                    ti.state = State.REMOVED

            should_restore_task = (task
                                   is not None) and ti.state == State.REMOVED
            if should_restore_task:
                self.log.info(
                    "Restoring task '%s' which was previously removed from DAG '%s'",
                    ti, dag)
                Stats.incr(f"task_restored_to_dag.{dag.dag_id}", 1, 1)
                ti.state = State.NONE
            session.merge(ti)

        def task_filter(task: "BaseOperator"):
            return task.task_id not in task_ids and (
                self.is_backfill or task.start_date <= self.execution_date)

        created_counts: Dict[str, int] = defaultdict(int)

        # Set for the empty default in airflow.settings -- if it's not set this means it has been changed
        hook_is_noop = getattr(task_instance_mutation_hook, 'is_noop', False)

        if hook_is_noop:

            def create_ti_mapping(task: "BaseOperator"):
                created_counts[task.task_type] += 1
                return TI.insert_mapping(self.run_id, task)

        else:

            def create_ti(task: "BaseOperator") -> TI:
                ti = TI(task, run_id=self.run_id)
                task_instance_mutation_hook(ti)
                created_counts[ti.operator] += 1
                return ti

        # Create missing tasks
        tasks = list(filter(task_filter, dag.task_dict.values()))
        try:
            if hook_is_noop:
                session.bulk_insert_mappings(TI, map(create_ti_mapping, tasks))
            else:
                session.bulk_save_objects(map(create_ti, tasks))

            for task_type, count in created_counts.items():
                Stats.incr(f"task_instance_created-{task_type}", count)
            session.flush()
        except IntegrityError as err:
            self.log.info(str(err))
            self.log.info(
                'Hit IntegrityError while creating the TIs for %s- %s',
                dag.dag_id, self.run_id)
            self.log.info('Doing session rollback.')
            # TODO[HA]: We probably need to savepoint this so we can keep the transaction alive.
            session.rollback()
예제 #7
0
    def verify_integrity(self, session: Session = NEW_SESSION):
        """
        Verifies the DagRun by checking for removed tasks or tasks that are not in the
        database yet. It will set state to removed or add the task if required.

        :param session: Sqlalchemy ORM Session
        """
        from airflow.settings import task_instance_mutation_hook

        dag = self.get_dag()
        tis = self.get_task_instances(session=session)

        # check for removed or restored tasks
        task_ids = set()
        for ti in tis:
            task_instance_mutation_hook(ti)
            task_ids.add(ti.task_id)
            task = None
            try:
                task = dag.get_task(ti.task_id)

                should_restore_task = (
                    task is not None) and ti.state == State.REMOVED
                if should_restore_task:
                    self.log.info(
                        "Restoring task '%s' which was previously removed from DAG '%s'",
                        ti, dag)
                    Stats.incr(f"task_restored_to_dag.{dag.dag_id}", 1, 1)
                    ti.state = State.NONE
            except AirflowException:
                if ti.state == State.REMOVED:
                    pass  # ti has already been removed, just ignore it
                elif self.state != State.RUNNING and not dag.partial:
                    self.log.warning(
                        "Failed to get task '%s' for dag '%s'. Marking it as removed.",
                        ti, dag)
                    Stats.incr(f"task_removed_from_dag.{dag.dag_id}", 1, 1)
                    ti.state = State.REMOVED
                continue

            if not task.is_mapped:
                continue
            task = cast("MappedOperator", task)
            num_mapped_tis = task.parse_time_mapped_ti_count
            # Check if the number of mapped literals has changed and we need to mark this TI as removed
            if num_mapped_tis is not None:
                if ti.map_index >= num_mapped_tis:
                    self.log.debug(
                        "Removing task '%s' as the map_index is longer than the literal mapping list (%s)",
                        ti,
                        num_mapped_tis,
                    )
                    ti.state = State.REMOVED
                elif ti.map_index < 0:
                    self.log.debug(
                        "Removing the unmapped TI '%s' as the mapping can now be performed",
                        ti)
                    ti.state = State.REMOVED
                else:
                    self.log.info("Restoring mapped task '%s'", ti)
                    Stats.incr(f"task_restored_to_dag.{dag.dag_id}", 1, 1)
                    ti.state = State.NONE
            else:
                #  What if it is _now_ dynamically mapped, but wasn't before?
                total_length = task.run_time_mapped_ti_count(self.run_id,
                                                             session=session)

                if total_length is None:
                    # Not all upstreams finished, so we can't tell what should be here. Remove everything.
                    if ti.map_index >= 0:
                        self.log.debug(
                            "Removing the unmapped TI '%s' as the mapping can't be resolved yet",
                            ti)
                        ti.state = State.REMOVED
                    continue
                # Upstreams finished, check there aren't any extras
                if ti.map_index >= total_length:
                    self.log.debug(
                        "Removing task '%s' as the map_index is longer than the resolved mapping list (%d)",
                        ti,
                        total_length,
                    )
                    ti.state = State.REMOVED
                    ...

        def task_filter(task: "Operator") -> bool:
            return task.task_id not in task_ids and (
                self.is_backfill or task.start_date <= self.execution_date and
                (task.end_date is None
                 or self.execution_date <= task.end_date))

        created_counts: Dict[str, int] = defaultdict(int)

        # Set for the empty default in airflow.settings -- if it's not set this means it has been changed
        hook_is_noop = getattr(task_instance_mutation_hook, 'is_noop', False)

        if hook_is_noop:

            def create_ti_mapping(task: "Operator",
                                  indexes: Tuple[int, ...]) -> Generator:
                created_counts[task.task_type] += 1
                for map_index in indexes:
                    yield TI.insert_mapping(self.run_id,
                                            task,
                                            map_index=map_index)

            creator = create_ti_mapping

        else:

            def create_ti(task: "Operator", indexes: Tuple[int,
                                                           ...]) -> Generator:
                for map_index in indexes:
                    ti = TI(task, run_id=self.run_id, map_index=map_index)
                    task_instance_mutation_hook(ti)
                    created_counts[ti.operator] += 1
                    yield ti

            creator = create_ti

        # Create missing tasks -- and expand any MappedOperator that _only_ have literals as input
        def expand_mapped_literals(
                task: "Operator") -> Tuple["Operator", Sequence[int]]:
            if not task.is_mapped:
                return (task, (-1, ))
            task = cast("MappedOperator", task)
            count = task.parse_time_mapped_ti_count or task.run_time_mapped_ti_count(
                self.run_id, session=session)
            if not count:
                return (task, (-1, ))
            return (task, range(count))

        tasks_and_map_idxs = map(expand_mapped_literals,
                                 filter(task_filter, dag.task_dict.values()))
        tasks = itertools.chain.from_iterable(
            itertools.starmap(creator, tasks_and_map_idxs))

        try:
            if hook_is_noop:
                session.bulk_insert_mappings(TI, tasks)
            else:
                session.bulk_save_objects(tasks)

            for task_type, count in created_counts.items():
                Stats.incr(f"task_instance_created-{task_type}", count)
            session.flush()
        except IntegrityError:
            self.log.info(
                'Hit IntegrityError while creating the TIs for %s- %s',
                dag.dag_id,
                self.run_id,
                exc_info=True,
            )
            self.log.info('Doing session rollback.')
            # TODO[HA]: We probably need to savepoint this so we can keep the transaction alive.
            session.rollback()
예제 #8
0
def process_batch_sol_reward_manager_txs(
    session: Session,
    reward_manager_txs: List[RewardManagerTransactionInfo],
    redis: Redis,
):
    """Validates that the transfer instruction is consistent with DB and inserts ChallengeDisbursement DB entries"""
    try:
        logger.error(f"index_reward_manager | {reward_manager_txs}")
        eth_recipients = [
            tx["transfer_instruction"]["eth_recipient"]
            for tx in reward_manager_txs
            if tx["transfer_instruction"] is not None
        ]
        users = (session.query(User.wallet, User.user_id).filter(
            User.wallet.in_(eth_recipients), User.is_current == True).all())
        users_map = {user[0]: user[1] for user in users}

        specifiers = [
            tx["transfer_instruction"]["specifier"]
            for tx in reward_manager_txs
            if tx["transfer_instruction"] is not None
        ]

        user_challenges = (session.query(UserChallenge.specifier).filter(
            UserChallenge.specifier.in_(specifiers), ).all())
        user_challenge_specifiers = {
            challenge[0]
            for challenge in user_challenges
        }

        challenge_disbursements = []
        for tx in reward_manager_txs:
            # Add transaction
            session.add(
                RewardManagerTransaction(
                    signature=tx["tx_sig"],
                    slot=tx["slot"],
                    created_at=datetime.datetime.utcfromtimestamp(
                        tx["timestamp"]),
                ))
            # No instruction found
            if tx["transfer_instruction"] is None:
                logger.warning(
                    f"index_rewards_manager.py | No transfer instruction found in {tx}"
                )
                continue
            transfer_instr: RewardTransferInstruction = tx[
                "transfer_instruction"]
            specifier = transfer_instr["specifier"]
            eth_recipient = transfer_instr["eth_recipient"]
            if specifier not in user_challenge_specifiers:
                logger.error(
                    f"index_rewards_manager.py | Challenge specifier {specifier} not found"
                    "while processing disbursement")
            if eth_recipient not in users_map:
                logger.error(
                    f"index_rewards_manager.py | eth_recipient {eth_recipient} not found while processing disbursement"
                )
                tx_signature = tx["tx_sig"]
                tx_slot = tx["slot"]
                logger.error(
                    f"index_rewards_manager.py | eth_recipient {eth_recipient} not found processing disbursement"
                    f"tx signature={tx_signature}"
                    f"tx slot={tx_slot}"
                    f"specifier = {specifier}")
                # Set this user's id to 0 instead of blocking indexing
                # This state can be rectified asynchronously
                users_map[eth_recipient] = 0

            user_id = users_map[eth_recipient]
            logger.info(
                f"index_rewards_manager.py | found successful disbursement for user_id: [{user_id}]"
            )

            challenge_disbursements.append(
                ChallengeDisbursement(
                    challenge_id=transfer_instr["challenge_id"],
                    user_id=user_id,
                    specifier=specifier,
                    amount=str(transfer_instr["amount"]),
                    slot=tx["slot"],
                    signature=tx["tx_sig"],
                ))

        if challenge_disbursements:
            # Save out the disbursements
            session.bulk_save_objects(challenge_disbursements)
            # Enqueue balance refreshes for the users
            user_ids = [c.user_id for c in challenge_disbursements]
            enqueue_immediate_balance_refresh(redis, user_ids)

    except Exception as e:
        logger.error(f"index_rewards_manager.py | Error processing {e}",
                     exc_info=True)
        raise e