def refresh_token(subject, session):
    """
    :param str subject:
    :param sqlalchemy.orm.Session session:
    """
    __log_extra = {
        'subject': subject,
        'pipeline': 'refresh_token',
        'task': 'refresh_token',
    }

    try:
        user = UserQuery(session) \
            .is_active() \
            .has_sub(subject) \
            .one()
    except orm.exc.NoResultFound:
        logger.info('Could not refresh token: User not found',
                    extra=__log_extra)
        return
    except Exception as e:
        raise

    token = backend.refresh_token(user.refresh_token)

    user.access_token = token['access_token']
    user.refresh_token = token['refresh_token']
    user.token_expire = datetime \
        .fromtimestamp(token['expires_at']) \
        .replace(tzinfo=timezone.utc)
def save_imported_meteringpoints(user, response, session):
    """
    Creates MeteringPoints imported from DataHubService in the database.
    If they already exists, updates their type (consumption or production).

    :param origin.auth.User user:
    :param origin.services.datahub.GetMeteringPointsResponse response:
    :param sqlalchemy.orm.Session session:
    :rtype: list[MeteringPoint]
    """
    imported_meteringpoints = []

    for meteringpoint in response.meteringpoints:
        existing_meteringpoint = MeteringPointQuery(session) \
            .has_gsrn(meteringpoint.gsrn) \
            .one_or_none()

        if meteringpoint.type is DataHubMeteringPointType.PRODUCTION:
            typ = MeteringPointType.PRODUCTION
        elif meteringpoint.type is DataHubMeteringPointType.CONSUMPTION:
            typ = MeteringPointType.CONSUMPTION
        else:
            raise RuntimeError('Should NOT have happened!')

        if existing_meteringpoint:
            logger.info(
                f'MeteringPoint {meteringpoint.gsrn} already exists in DB (updating type)',
                extra={
                    'subject': user.sub,
                    'gsrn': meteringpoint.gsrn,
                    'type': meteringpoint.type.value,
                    'pipeline': 'import_meteringpoints',
                    'task': 'import_meteringpoints_and_insert_to_db',
                })
            existing_meteringpoint.type = typ
        else:
            imported_meteringpoints.append(
                MeteringPoint.create(
                    user=user,
                    gsrn=meteringpoint.gsrn,
                    sector=meteringpoint.sector,
                    type=typ,
                    session=session,
                ))

    session.add_all(imported_meteringpoints)
    session.flush()

    return imported_meteringpoints
예제 #3
0
    def handle_request(self, request, session):
        """
        :param OnMeteringPointAvailableWebhookRequest request:
        :param sqlalchemy.orm.Session session:
        :rtype: bool
        """
        user = UserQuery(session) \
            .is_active() \
            .has_sub(request.sub) \
            .one_or_none()

        # User exists?
        if user is None:
            logger.error(
                f'Can not import MeteringPoint (user not found in DB)',
                extra={
                    'subject': request.sub,
                    'gsrn': request.meteringpoint.gsrn,
                })
            return False

        # MeteringPoint already present in database?
        if self.meteringpoint_exists(request.meteringpoint.gsrn, session):
            logger.info(
                f'MeteringPoint {request.meteringpoint.gsrn} already exists in DB, skipping...',
                extra={
                    'subject': user.sub,
                    'gsrn': request.meteringpoint.gsrn,
                })
            return True

        # Insert new MeteringPoint in to DB
        meteringpoint = self.create_meteringpoint(user, request.meteringpoint)

        logger.info(f'Imported MeteringPoint with GSRN: {meteringpoint.gsrn}',
                    extra={
                        'subject': user.sub,
                        'gsrn': meteringpoint.gsrn,
                        'type': meteringpoint.type.value,
                        'meteringpoint_id': meteringpoint.id,
                    })

        # Send ledger key to DataHubService
        start_send_key_to_datahub_service(user.sub, meteringpoint.gsrn)

        return True
예제 #4
0
    def import_ggos(self, user, gsrn, begin_from, begin_to, session):
        """
        :param User user:
        :param str gsrn:
        :param datetime.datetime begin_from:
        :param datetime.datetime begin_to:
        :param sqlalchemy.orm.Session session:
        :rtype: list[Ggo]
        """
        logger.info(f'Importing GGOs for GSRN: {gsrn}',
                    extra={
                        'gsrn': gsrn,
                        'subject': user.sub,
                        'begin_from': str(begin_from),
                        'begin_to': str(begin_to),
                        'pipeline': 'import_ggos',
                        'task': 'import_ggos_and_insert_to_db',
                    })

        # Import GGOs from DataHub
        imported_ggos = self.fetch_ggos(user, gsrn, begin_from, begin_to)
        mapped_ggos = (self.map_imported_ggo(user, ggo)
                       for ggo in imported_ggos)
        filtered_ggos = (ggo for ggo in mapped_ggos
                         if not self.ggo_exists(ggo.address, session))

        new_ggos = []

        # Filter out GGOs that already exists and map to Ggo type
        for ggo in filtered_ggos:
            session.add(ggo)
            session.flush()
            new_ggos.append(ggo)

        logger.info(f'Imported {len(new_ggos)} GGOs for GSRN: {gsrn}',
                    extra={
                        'gsrn': gsrn,
                        'subject': user.sub,
                        'begin_from': str(begin_from),
                        'begin_to': str(begin_to),
                        'pipeline': 'import_ggos',
                        'task': 'import_ggos_and_insert_to_db',
                    })

        return new_ggos
예제 #5
0
def submit_batch_to_ledger(task, subject, batch_id, session):
    """
    :param celery.Task task:
    :param str subject:
    :param int batch_id:
    :param sqlalchemy.orm.Session session:
    """
    __log_extra = {
        'subject': subject,
        'batch_id': str(batch_id),
        'pipeline': 'submit_batch_to_ledger',
        'task': 'submit_to_ledger',
    }

    # Get Batch from DB
    try:
        batch = session \
            .query(Batch) \
            .filter(Batch.id == batch_id) \
            .one()
    except orm.exc.NoResultFound:
        raise
    except Exception as e:
        raise task.retry(exc=e)

    # Submit batch to ledger
    try:
        handle = ledger.execute_batch(batch.build_ledger_batch())
    except ols.LedgerConnectionError as e:
        logger.exception('Failed to submit batch to ledger, retrying...', extra=__log_extra)
        raise task.retry(exc=e)
    except ols.LedgerException as e:
        if e.code in (15, 17, 18):
            logger.exception(f'Ledger validator error (code {e.code}), retrying...', extra=__log_extra)
            raise task.retry(exc=e)
        elif e.code == 31:
            logger.info(f'Ledger queue is full, retrying...', extra=__log_extra)
            raise task.retry(exc=e)
        else:
            raise

    logger.info(f'Batch submitted to ledger', extra=__log_extra)

    return handle
예제 #6
0
    def handle_request(self, request, token):
        """
        :param SubscribeRequest request:
        :param Token token:
        :rtype: bool
        """
        self.service.subscribe(
            event=self.event,
            subject=token.subject,
            url=request.url,
            secret=request.secret,
        )

        logger.info(f'Webhook subscribed: {self.event.value}', extra={
            'subject': token.subject,
            'event': self.event.value,
            'url': request.url,
        })

        return True
예제 #7
0
def poll_batch_status(task, subject, batch_id, session):
    """
    :param celery.Task task:
    :param str subject:
    :param int batch_id:
    :param sqlalchemy.orm.Session session:
    """
    __log_extra = {
        'subject': subject,
        'batch_id': str(batch_id),
        'pipeline': 'submit_batch_to_ledger',
        'task': 'poll_batch_status',
    }

    # Get batch from DB
    try:
        batch = session \
            .query(Batch) \
            .filter(Batch.id == batch_id) \
            .one()
    except orm.exc.NoResultFound:
        raise
    except Exception as e:
        logger.exception('Failed to load Batch from database', extra=__log_extra)
        raise task.retry(exc=e)

    # Get batch status from ledger
    try:
        response = ledger.get_batch_status(batch.handle)
    except ols.LedgerConnectionError as e:
        logger.exception('Failed to poll ledger for batch status, retrying...', extra=__log_extra)
        raise task.retry(exc=e)

    # Assert status
    if response.status == ols.BatchStatus.COMMITTED:
        logger.info('Ledger batch status: COMMITTED', extra=__log_extra)
    elif response.status == ols.BatchStatus.INVALID:
        # Raising exception triggers the ON ERROR task (rollback_batch())
        raise InvalidBatch('Invalid batch')
    elif response.status == ols.BatchStatus.UNKNOWN:
        logger.info('Ledger batch status: UNKNOWN', extra=__log_extra)
        raise task.retry()
    elif response.status == ols.BatchStatus.PENDING:
        logger.info('Ledger batch status: PENDING', extra=__log_extra)
        raise task.retry()
    else:
        raise RuntimeError('Unknown batch status returned, should NOT have happened!')
def import_meteringpoints_and_insert_to_db(task, subject, session):
    """
    :param celery.Task task:
    :param str subject:
    :param sqlalchemy.orm.Session session:
    """
    __log_extra = {
        'subject': subject,
        'pipeline': 'import_meteringpoints',
        'task': 'import_meteringpoints_and_insert_to_db',
    }

    # Get User from DB
    try:
        user = UserQuery(session) \
            .is_active() \
            .has_sub(subject) \
            .one()
    except orm.exc.NoResultFound:
        raise
    except Exception as e:
        logger.exception('Failed to load User from database, retrying...',
                         extra=__log_extra)
        raise task.retry(exc=e)

    # Import MeteringPoints from DataHubService
    try:
        response = datahub_service.get_meteringpoints(user.access_token)
    except DataHubServiceConnectionError as e:
        logger.exception(
            f'Failed to establish connection to DataHubService, retrying...',
            extra=__log_extra)
        raise task.retry(exc=e)
    except DataHubServiceError as e:
        if e.status_code == 400:
            raise
        else:
            logger.exception('Failed to import MeteringPoints, retrying...',
                             extra=__log_extra)
            raise task.retry(exc=e)

    # Save imported MeteringPoints to database
    try:
        meteringpoints = save_imported_meteringpoints(user, response)
    except Exception as e:
        logger.exception(
            'Failed to save imported Meteringpoints to database, retrying...',
            extra=__log_extra)
        raise task.retry(exc=e)

    logger.info(
        f'Imported {len(meteringpoints)} new MeteringPoints from DataHubService',
        extra=__log_extra)

    # Send MeteringPoint key to DataHubService for each imported MeteringPoint
    tasks = []

    for meteringpoint in meteringpoints:
        logger.info(f'Imported meteringpoint with GSRN: {meteringpoint.gsrn}',
                    extra={
                        'gsrn': meteringpoint.gsrn,
                        'subject': user.sub,
                        'pipeline': 'import_meteringpoints',
                        'task': 'import_meteringpoints_and_insert_to_db',
                    })

        tasks.append(
            send_key_to_datahub_service.s(
                subject=subject,
                gsrn=meteringpoint.gsrn,
            ))

    group(*tasks).apply_async()