def save_imported_meteringpoints(user, response, session): """ :param originexample.auth.User user: :param originexample.services.datahub.GetMeteringPointsResponse response: :param sqlalchemy.orm.Session session: :rtype: list[Facility] """ imported_facilities = [] for meteringpoint in response.meteringpoints: count = FacilityQuery(session) \ .has_gsrn(meteringpoint.gsrn) \ .count() if count > 0: logger.info( f'Skipping meteringpoint with GSRN: {meteringpoint.gsrn} (already exists in DB)', extra={ 'gsrn': meteringpoint.gsrn, 'subject': user.sub, 'pipeline': 'import_meteringpoints', 'task': 'import_meteringpoints_and_insert_to_db', }) continue if meteringpoint.type is MeteringPointType.PRODUCTION: facility_type = FacilityType.PRODUCTION elif meteringpoint.type is MeteringPointType.CONSUMPTION: facility_type = FacilityType.CONSUMPTION else: raise RuntimeError('Should NOT have happened!') imported_facilities.append( Facility( user=user, gsrn=meteringpoint.gsrn, sector=meteringpoint.sector, facility_type=facility_type, technology_code=meteringpoint.technology_code, fuel_code=meteringpoint.fuel_code, street_code=meteringpoint.street_code, street_name=meteringpoint.street_name, building_number=meteringpoint.building_number, city_name=meteringpoint.city_name, postcode=meteringpoint.postcode, municipality_code=meteringpoint.municipality_code, )) logger.info(f'Imported meteringpoint with GSRN: {meteringpoint.gsrn}', extra={ 'gsrn': meteringpoint.gsrn, 'subject': user.sub, 'pipeline': 'import_meteringpoints', 'task': 'import_meteringpoints_and_insert_to_db', }) session.add_all(imported_facilities) session.flush() return imported_facilities
def consume_ggo(self, user, ggo, session): """ :param User user: :param Ggo ggo: :param Session session: """ request = ComposeGgoRequest(address=ggo.address) consumers = self.get_consumers(user, ggo, session) remaining_amount = ggo.amount for consumer in takewhile(lambda _: remaining_amount > 0, consumers): already_transferred = ggo.amount - remaining_amount desired_amount = consumer.get_desired_amount( ggo, already_transferred) assigned_amount = min(remaining_amount, desired_amount) remaining_amount -= assigned_amount if assigned_amount > 0: consumer.consume(request, ggo, assigned_amount) if remaining_amount < ggo.amount: logger.info('Composing a new GGO split', extra={ 'subject': user.sub, 'address': ggo.address, 'begin': str(ggo.begin), }) account_service.compose(user.access_token, request)
def decline_proposal(self, agreement, user): """ :param TradeAgreement agreement: :param User user: """ agreement.decline_proposal() logger.info(f'User declined to TradeAgreement proposal', extra={ 'subject': user.sub, 'agreement_id': agreement.id, }) # Send e-mail to proposing user send_invitation_declined_email(agreement)
def handle_request(self, request, user, session): """ :param SubmitAgreementProposalRequest request: :param User user: :param Session session: :rtype: SubmitAgreementProposalResponse """ counterpart = UserQuery(session) \ .is_active() \ .has_public_id(request.counterpart_id) \ .exclude(user) \ .one_or_none() if not counterpart: return SubmitAgreementProposalResponse(success=False) if request.direction == AgreementDirection.INBOUND: user_from = counterpart user_to = user elif request.direction == AgreementDirection.OUTBOUND: user_from = user user_to = counterpart else: raise RuntimeError('This should NOT have happened!') agreement = self.create_pending_agreement( request=request, user=user, user_from=user_from, user_to=user_to, ) session.add(agreement) session.flush() logger.info(f'User submitted TradeAgreement proposal', extra={ 'subject': user.sub, 'target': counterpart.sub, 'agreement_id': agreement.id, }) # Send e-mail to recipient of proposal send_invitation_received_email(agreement) return SubmitAgreementProposalResponse(success=True)
def handle_request(self, request, session): """ :param OnMeteringPointAvailableWebhookRequest request: :param sqlalchemy.orm.Session session: :rtype: bool """ user = UserQuery(session) \ .is_active() \ .has_sub(request.sub) \ .one_or_none() # User exists? if user is None: logger.error( f'Can not import MeteringPoint (user not found in DB)', extra={ 'subject': request.sub, 'gsrn': request.meteringpoint.gsrn, }) return False # MeteringPoint already present in database? if self.facility_exists(request.meteringpoint.gsrn, session): logger.info( f'MeteringPoint {request.meteringpoint.gsrn} already exists in DB, skipping...', extra={ 'subject': request.sub, 'gsrn': request.meteringpoint.gsrn, }) return True # Insert new MeteringPoint in to DB facility = self.create_facility(user, request.meteringpoint) logger.info(f'Imported MeteringPoint with GSRN: {facility.gsrn}', extra={ 'subject': user.sub, 'gsrn': facility.gsrn, 'type': facility.facility_type, 'facility_id': facility.id, }) return True
def accept_proposal(self, request, agreement, user, session): """ :param RespondToProposalRequest request: :param TradeAgreement agreement: :param User user: :param Session session: """ agreement.state = AgreementState.ACCEPTED agreement.transfer_priority = self.get_next_priority( agreement.user_from, session) if request.technologies and self.can_set_technology(agreement): agreement.technologies = request.technologies if request.facility_gsrn and self.can_set_facilities(agreement, user): agreement.facility_gsrn = request.facility_gsrn if request.amount_percent and self.can_set_amount_percent( agreement, user): agreement.amount_percent = request.amount_percent logger.info(f'User accepted to TradeAgreement proposal', extra={ 'subject': user.sub, 'agreement_id': agreement.id, }) start_consume_back_in_time_pipeline( user=agreement.user_from, begin_from=datetime.fromordinal(agreement.date_from.toordinal()) - timedelta(days=2), begin_to=datetime.fromordinal(agreement.date_to.toordinal()) + timedelta(days=2), ) # Send e-mail to proposing user send_invitation_accepted_email(agreement)
def handle_request(self, request, session): """ :param VerifyLoginCallbackRequest request: :param Session session: :rtype: flask.Response """ return_url = redis.get(request.state) if return_url is None: raise BadRequest('Click back in your browser') else: return_url = return_url.decode() redis.delete(request.state) if request.error: logger.error(f'Got login callback with ERROR', extra={ 'scope': str(request.scope), 'code': request.code, 'state': request.state, 'error': str(request.error), 'error_hint': str(request.error_hint), 'error_description': str(request.error_description), }) return redirect(return_url, 303) # Fetch token try: token = backend.fetch_token(request.code, request.state) except: logger.exception(f'Failed to fetch token', extra={ 'scope': str(request.scope), 'code': request.code, 'state': request.state, }) return self.redirect_to_failure(return_url) # Extract data from token id_token = backend.get_id_token(token) # No id_token means the user declined to give consent if id_token is None: return self.redirect_to_failure(return_url) expires = datetime \ .fromtimestamp(token['expires_at']) \ .replace(tzinfo=timezone.utc) # Lookup user from "subject" user = UserQuery(session) \ .is_active() \ .has_sub(id_token['sub']) \ .one_or_none() if user is None: logger.info(f'User login: Creating new user and subscribing to webhooks', extra={ 'subject': id_token['sub'], }) self.create_new_user(token, id_token, expires, session) datahub.webhook_on_measurement_published_subscribe(token['access_token']) datahub.webhook_on_meteringpoint_available_subscribe(token['access_token']) account.webhook_on_ggo_received_subscribe(token['access_token']) else: logger.info(f'User login: Updating tokens for existing user', extra={ 'subject': id_token['sub'], }) self.update_user_attributes(user, token, expires) # Save session in Redis redis.set(id_token['sid'], id_token['sub'], ex=token['expires_at']) # Create HTTP response response = redirect(f'{ACCOUNT_SERVICE_LOGIN_URL}?returnUrl={return_url}', 303) response.set_cookie(SID_COOKIE_NAME, id_token['sid'], domain=urlparse(FRONTEND_URL).netloc) response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' response.headers['Pragma'] = 'no-cache' response.headers['Expires'] = '0' response.headers['Cache-Control'] = 'public, max-age=0' return response
def import_meteringpoints_and_insert_to_db(task, subject, session): """ :param celery.Task task: :param str subject: :param Session session: """ __log_extra = { 'subject': subject, 'pipeline': 'import_meteringpoints', 'task': 'import_meteringpoints_and_insert_to_db', } # Get User from DB try: user = UserQuery(session) \ .is_active() \ .has_sub(subject) \ .one() except orm.exc.NoResultFound: raise except Exception as e: logger.exception('Failed to load User from database, retrying...', extra=__log_extra) raise task.retry(exc=e) # Import MeteringPoints from DataHubService try: response = datahub_service.get_meteringpoints(user.access_token) except DataHubServiceConnectionError as e: logger.exception( f'Failed to establish connection to DataHubService, retrying...', extra=__log_extra) raise task.retry(exc=e) except DataHubServiceError as e: if e.status_code == 400: logger.exception('Got BAD REQUEST from DataHubService', extra=__log_extra) raise else: logger.exception('Failed to import MeteringPoints, retrying...', extra=__log_extra) raise task.retry(exc=e) # Save imported MeteringPoints to database try: facilities = save_imported_meteringpoints(user, response) except Exception as e: logger.exception( 'Failed to save imported Meteringpoints to database, retrying...', extra=__log_extra) raise task.retry(exc=e) # Logging logger.info( f'Imported {len(facilities)} new MeteringPoints from DataHubService', extra=__log_extra) for facility in facilities: logger.info(f'Imported meteringpoint with GSRN: {facility.gsrn}', extra={ 'gsrn': facility.gsrn, 'subject': user.sub, 'pipeline': 'import_meteringpoints', 'task': 'import_meteringpoints_and_insert_to_db', })
def handle_ggo_received(task, subject, address, ggo_json, session): """ :param celery.Task task: :param str subject: :param str address: :param JSON ggo_json: :param Session session: """ __log_extra = { 'subject': subject, 'address': address, 'ggo': str(ggo_json), 'pipeline': 'handle_ggo_received', 'task': 'handle_ggo_received', } ggo = ggo_schema.load(ggo_json) # Get User from database try: user = UserQuery(session) \ .is_active() \ .has_sub(subject) \ .one() except orm.exc.NoResultFound: raise except Exception as e: logger.exception('Failed to load User from database, retrying...', extra=__log_extra) raise task.retry(exc=e) # Affected subjects TODO # affected_subjects = controller.get_affected_subjects(user, ggo, session) # lock_keys = [get_lock_key(sub, ggo.begin) for sub in affected_subjects] lock_key = ggo.begin.strftime('%Y-%m-%d-%H-%M') # This lock is in place to avoid timing issues when executing multiple # tasks for the same account at the same time, which can cause # the transferred or retired amount to exceed the allowed amount with lock(lock_key, timeout=LOCK_TIMEOUT) as acquired: if not acquired: logger.info('Could not acquire lock(s), retrying...', extra=__log_extra) raise task.retry() try: if not ggo_is_available(user.access_token, ggo): logger.info('GGO is unavailable, skipping...', extra=__log_extra) return # Consume GGO controller.consume_ggo(user, ggo, session) except AccountServiceError as e: if e.status_code == 400: raise else: logger.exception('Failed to consume GGO, retrying...', extra=__log_extra) raise task.retry(exc=e) except Exception as e: logger.exception('Failed to consume GGO, retrying...', extra=__log_extra) raise task.retry(exc=e)