def get_image_as_bytes(self, request): """Get the image from the POSTed data (request) or from the form field (in which case it's supposed to be an URL that we are going to request. """ # FIXME: use pydantic logic to return validation errors missing_image_error = ApiErrors( {"thumb": ["Vous devez fournir une image d'accroche"]}) if "thumb" in request.files: blob = request.files["thumb"] if not blob.filename: raise missing_image_error if Path(blob.filename).suffix not in ALLOWED_IMAGE_SUFFIXES: error = ( f"Cette image n'a pas d'extension ({', '.join(ALLOWED_IMAGE_SUFFIXES)}) " f"ou son format n'est pas autorisé") raise ApiErrors({"thumb": [error]}) return blob.read() if self.thumb_url: try: return _fetch_image(self.thumb_url) except ValueError as value_error: logger.exception(value_error) raise ApiErrors( {"thumbUrl": ["L'adresse saisie n'est pas valide"]}) raise missing_image_error
def _run_indexing(client: Redis, venue_provider: Dict) -> None: venue_provider_id = venue_provider["id"] provider_id = venue_provider["providerId"] venue_id = venue_provider["venueId"] run_algolia_venue_provider_command = ( f"python src/pcapi/scripts/pc.py process_venue_provider_offers_for_algolia " f"--provider-id {provider_id} " f"--venue-provider-id {venue_provider_id} " f"--venue-id {venue_id}") try: container_id = run_process_in_one_off_container( run_algolia_venue_provider_command) add_venue_provider_currently_in_sync( client=client, container_id=container_id, venue_provider_id=venue_provider_id) logger.info( "[ALGOLIA][Worker] Indexing offers from VenueProvider %s in container %s", venue_provider_id, container_id) except ScalingoApiException as error: logger.exception( "[ALGOLIA][Worker] Error indexing offers from VenueProvider %s with errors: %s", venue_provider_id, error, )
def signup_pro(): objects_to_save = [] app_origin_url = request.headers.get("origin") check_valid_signup_pro(request) new_user = User(from_dict=request.json) existing_offerer = Offerer.query.filter_by(siren=request.json["siren"]).first() if existing_offerer: user_offerer = _generate_user_offerer_when_existing_offerer(new_user, existing_offerer) offerer = existing_offerer else: offerer = _generate_offerer(request.json) user_offerer = offerer.give_rights(new_user, RightsType.editor) digital_venue = create_digital_venue(offerer) objects_to_save.extend([digital_venue, offerer]) objects_to_save.append(user_offerer) new_user.isBeneficiary = False new_user.isAdmin = False new_user.needsToFillCulturalSurvey = False new_user = _set_offerer_departement_code(new_user, offerer) new_user.generate_validation_token() objects_to_save.append(new_user) repository.save(*objects_to_save) try: send_user_validation_email(new_user, send_raw_email, app_origin_url, is_webapp=False) subscribe_newsletter(new_user) except MailServiceException: logger.exception("Mail service failure") return jsonify(as_dict(new_user, includes=USER_INCLUDES)), 201
def cancel_banned_bookings() -> None: logger.info("[CANCEL BANNED BOOKINGS] START") bookings_to_update = get_bookings_banned_and_sent() bookings_in_error = [] updated_bookings = [] for booking in bookings_to_update: booking.isCancelled = True booking.isUsed = False booking.dateUsed = None try: repository.save(booking) updated_bookings.append(booking.id) except ApiErrors as error: logger.exception("%s for booking %s", error.errors, booking.id) bookings_in_error.append(booking.id) logger.info("%i BOOKINGS UPDATED", len(updated_bookings)) logger.info("LIST OF UPDATED BOOKINGS") logger.info(updated_bookings) if len(bookings_in_error) > 0: logger.error("LIST OF BOOKINGS IN ERROR") logger.error(bookings_in_error) logger.info("[CANCEL BANNED BOOKINGS] END")
def _send(self, recipients: Iterable[str], data: dict) -> MailResult: data["To"] = ", ".join(recipients) if settings.MAILJET_TEMPLATE_DEBUGGING: messages_data = data.get("Messages") if messages_data: for message_data in messages_data: _add_template_debugging(message_data) else: _add_template_debugging(data) try: response = self.mailjet_client.send.create( data=data, timeout=settings.MAILJET_HTTP_TIMEOUT) except Exception as exc: # pylint: disable=broad-except logger.exception("Error trying to send e-mail with Mailjet: %s", exc) return MailResult( sent_data=data, successful=False, ) successful = response.status_code == 200 if not successful: logger.warning("Got %d return code from Mailjet: content=%s", response.status_code, response.content) return MailResult( sent_data=data, successful=successful, )
def get_number_of_venue_providers_currently_in_sync(client: Redis) -> int: try: return client.hlen( RedisBucket.REDIS_HASHMAP_VENUE_PROVIDERS_IN_SYNC_NAME.value) except redis.exceptions.RedisError as error: logger.exception("[REDIS] %s", error) return 0
def log_worker_error(job: Job, exception_type: Type, exception_value: Exception) -> None: # This handler is called by `rq.Worker.handle_exception()` from an # `except` clause, so we can (and should) use `logger.exception`. logger.exception( build_job_log_message(job, JobStatus.FAILED, f"{exception_type.__name__}: {exception_value}"))
def create_pro_user(pro_user: ProUserCreationBodyModel) -> User: objects_to_save = [] new_pro_user = User(from_dict=pro_user.dict(by_alias=True)) new_pro_user.hasAllowedRecommendations = pro_user.contact_ok existing_offerer = Offerer.query.filter_by(siren=pro_user.siren).first() if existing_offerer: user_offerer = _generate_user_offerer_when_existing_offerer(new_pro_user, existing_offerer) offerer = existing_offerer else: offerer = _generate_offerer(pro_user.dict(by_alias=True)) user_offerer = offerer.grant_access(new_pro_user) digital_venue = create_digital_venue(offerer) objects_to_save.extend([digital_venue, offerer]) objects_to_save.append(user_offerer) new_pro_user.isBeneficiary = False new_pro_user.isAdmin = False new_pro_user.needsToFillCulturalSurvey = False new_pro_user = _set_offerer_departement_code(new_pro_user, offerer) new_pro_user.generate_validation_token() objects_to_save.append(new_pro_user) repository.save(*objects_to_save) try: user_emails.send_user_validation_email(new_pro_user) except MailServiceException: logger.exception("Could not send validation email when creating pro user=%s", new_pro_user.id) return new_pro_user
def send_payments_details(payments: List[Payment], recipients: List[str]) -> None: if not recipients: raise Exception( "[BATCH][PAYMENTS] Missing PASS_CULTURE_PAYMENTS_DETAILS_RECIPIENTS in environment variables" ) if all( map(lambda x: x.currentStatus.status == TransactionStatus.ERROR, payments)): logger.warning( "[BATCH][PAYMENTS] Not sending payments details as all payments have an ERROR status" ) else: details = create_all_payments_details(payments) csv = generate_payment_details_csv(details) logger.info("[BATCH][PAYMENTS] Sending %s details of %s payments", len(details), len(payments)) logger.info("[BATCH][PAYMENTS] Recipients of email : %s", recipients) try: send_payment_details_email(csv, recipients) except MailServiceException as exception: logger.exception( "[BATCH][PAYMENTS] Error while sending payment details email to MailJet: %s", exception)
def send_payments_report(payments: List[Payment], recipients: List[str]) -> None: if not payments: logger.info( "[BATCH][PAYMENTS] No payments to report to the pass Culture team") return groups = group_payments_by_status(payments) payments_error_details = create_all_payments_details( groups["ERROR"]) if "ERROR" in groups else [] error_csv = generate_payment_details_csv(payments_error_details) payments_not_processable_details = (create_all_payments_details( groups["NOT_PROCESSABLE"]) if "NOT_PROCESSABLE" in groups else []) not_processable_csv = generate_payment_details_csv( payments_not_processable_details) logger.info( "[BATCH][PAYMENTS] Sending report on %s payment in ERROR and %s payment NOT_PROCESSABLE", len(payments_error_details), len(payments_not_processable_details), ) logger.info("[BATCH][PAYMENTS] Recipients of email : %s", recipients) try: send_payments_report_emails(not_processable_csv, error_csv, groups, recipients) except MailServiceException as exception: logger.exception( "[BATCH][PAYMENTS] Error while sending payments reports to MailJet: %s", exception)
def store_public_object(self, bucket: str, object_id: str, blob: bytes, content_type: str, symlink_path: str = None) -> None: try: os.makedirs(self.local_dir(bucket, object_id), exist_ok=True) file_local_path = self.local_path(bucket, object_id) new_type_file = open(str(file_local_path) + ".type", "w") new_type_file.write(content_type) if symlink_path and not os.path.isfile( file_local_path) and not os.path.islink(file_local_path): os.symlink(symlink_path, file_local_path) return new_file = open(file_local_path, "wb") new_file.write(blob) except Exception as exc: logger.exception( "An error has occured while trying to upload file on local file storage: %s", exc) raise exc
def get_venue_ids(client: Redis) -> List[int]: try: venue_ids = client.lrange(RedisBucket.REDIS_LIST_VENUE_IDS_NAME.value, 0, settings.REDIS_VENUE_IDS_CHUNK_SIZE) return venue_ids except redis.exceptions.RedisError as error: logger.exception("[REDIS] %s", error) return []
def add_venue_provider_currently_in_sync(client: Redis, venue_provider_id: int, container_id: str) -> None: try: client.hset( RedisBucket.REDIS_HASHMAP_VENUE_PROVIDERS_IN_SYNC_NAME.value, venue_provider_id, container_id) except redis.exceptions.RedisError as error: logger.exception("[REDIS] %s", error)
def check_offer_exists(client: Redis, offer_id: int) -> bool: try: offer_exist = client.hexists( RedisBucket.REDIS_HASHMAP_INDEXED_OFFERS_NAME.value, offer_id) return offer_exist except redis.exceptions.RedisError as error: logger.exception("[REDIS] %s", error) return False
def add_to_indexed_offers(pipeline: Pipeline, offer_id: int, offer_details: dict) -> None: try: offer_details_as_string = json.dumps(offer_details) pipeline.hset(RedisBucket.REDIS_HASHMAP_INDEXED_OFFERS_NAME.value, offer_id, offer_details_as_string) except redis.exceptions.RedisError as error: logger.exception("[REDIS] %s", error)
def do_update(provider: LocalProvider, limit: Optional[int]): try: provider.updateObjects(limit) except Exception: # pylint: disable=broad-except _remove_worker_id_after_venue_provider_sync_error(provider) logger.exception( build_cron_log_message(name=provider.__class__.__name__, status=CronStatus.STARTED))
def ban_payments(message_id: str, raw_payment_ids_to_ban: str): try: payment_ids_to_ban = parse_raw_payments_ids(raw_payment_ids_to_ban) except ValueError: logger.exception( 'Les identifiants de paiement doivent être au format "111,222,333"' ) else: do_ban_payments(message_id, payment_ids_to_ban)
def convert_id_into_label(value_id: Optional[str]) -> Optional[str]: try: return labels_by_id[int(value_id)] if value_id else None except ValueError: # on the second time this function is called twice, the field is already converted return None except KeyError: logger.exception("Invalid '%s' '%s' found on an offer", field_name, value_id) return None
def get_offer_ids_in_error(client: Redis) -> List[int]: try: offer_ids = client.lrange( RedisBucket.REDIS_LIST_OFFER_IDS_IN_ERROR_NAME.value, 0, settings.REDIS_OFFER_IDS_CHUNK_SIZE) return offer_ids except redis.exceptions.RedisError as error: logger.exception("[REDIS] %s", error) return []
def _delete_user_offerers_from_rows(csv_rows: Iterable) -> None: user_offerers_successful = [] user_offerers_in_error = [] csv_rows_iterable = iter(csv_rows) headers = next(csv_rows_iterable) for row in csv_rows_iterable: if _is_blank_row(row): continue row = dict(zip(headers, row)) user_id = row[USER_ID_COLUMN_HEADER] offerer_id = row[OFFERER_ID_COLUMN_HEADER] user_offerer = find_one_or_none_by_user_id_and_offerer_id( int(user_id), int(offerer_id)) if user_offerer is None: continue user_offerer_id = user_offerer.id logger.info( "[DELETE USER_OFFERERS FROM FILE] Suppression du rattachement pour le user d'id %s et l'offerer " "d'id %s est lancée", user_id, offerer_id, ) try: repository.delete(user_offerer) logger.info( "[DELETE USER_OFFERERS FROM FILE] Suppression du rattachement pour le user d'id %s et l'offerer " "d'id %s réussie", user_id, offerer_id, ) user_offerers_successful.append(user_offerer_id) except ApiErrors as error: logger.exception( "[DELETE USER_OFFERERS FROM FILE] %s pour le rattachement avec le user d'id %s et l'offerer d'id %s", error.errors, user_id, offerer_id, ) user_offerers_in_error.append(user_offerer_id) logger.info("[DELETE USER_OFFERERS FROM FILE] %i RATTACHEMENT SUPPRIMES", len(user_offerers_successful)) logger.info( "[DELETE USER_OFFERERS FROM FILE] LISTE DES RATTACHEMENT SUPPRIMES") logger.info(user_offerers_successful) if len(user_offerers_in_error) > 0: logger.error( "[DELETE USER_OFFERERS FROM FILE] LISTE DES RATTACHEMENTS EN ERREUR" ) logger.error(user_offerers_in_error)
def delete_public_object(self, bucket: str, object_id: str) -> None: storage_path = bucket + "/" + object_id try: bucket = self.get_gcp_storage_client_bucket() gcp_cloud_blob = bucket.blob(storage_path) gcp_cloud_blob.delete() except Exception as exc: logger.exception("An error has occured while trying to delete file on GCP bucket: %s", exc) raise exc
def delete_public_object(self, bucket: str, object_id: str): container_name = settings.SWIFT_BUCKET_NAME try: storage_path = bucket + "/" + object_id self.swift_con().delete_object(container_name, storage_path) except Exception as exc: logger.exception( "An error has occured while trying to delete file on OVH bucket: %s", exc) raise exc
def delete_public_object(self, bucket: str, object_id: str): file_local_path = self.local_path(bucket, object_id) try: os.remove(file_local_path) os.remove(str(file_local_path) + ".type") except OSError as exc: logger.exception( "An error has occured while trying to delete file on local file storage: %s", exc) raise exc
def _get_departement_code_when_authorized_or_error(authorized_emails, departement_codes): email_index = _get_email_index_in_spreadsheet_or_error(authorized_emails) departement_code = departement_codes[email_index] if departement_code.strip() == "": logger.exception("[ERROR] Missing departement code in users spreadsheet for %s", request.json["email"]) e = ApiErrors() e.add_error("email", "Adresse non autorisée pour l'expérimentation") raise e return departement_code
def send_booking_cancellation_emails_to_user_and_offerer( self, booking: Booking, reason: BookingCancellationReasons, send_email: typing.Callable[..., bool], ): try: send_booking_cancellation_emails_to_user_and_offerer(booking, reason, send_raw_email) except MailServiceException as error: logger.exception("Could not send booking cancellation emails to user and offerer: %s", error)
def synchronize_data_for_provider(provider_name: str, limit: Optional[int] = None) -> None: provider_class = get_local_provider_class_by_name(provider_name) try: provider = provider_class() do_update(provider, limit) except Exception: # pylint: disable=broad-except logger.exception( build_cron_log_message(name=provider_name, status=CronStatus.FAILED))
def book_offer( beneficiary: User, stock: Stock, quantity: int, ) -> Booking: """Return a booking or raise an exception if it's not possible.""" validation.check_can_book_free_offer(beneficiary, stock) validation.check_offer_already_booked(beneficiary, stock.offer) validation.check_quantity(stock.offer, quantity) validation.check_stock_is_bookable(stock) total_amount = quantity * stock.price validation.check_expenses_limits(beneficiary, total_amount, stock.offer) # FIXME (dbaty, 2020-10-20): if we directly set relations (for # example with `booking.user = beneficiary`) instead of foreign keys, # the session tries to add the object when `get_user_expenses()` # is called because autoflush is enabled. As such, the PostgreSQL # exceptions (tooManyBookings and insufficientFunds) may raise at # this point and will bubble up. If we want them to be caught, we # have to set foreign keys, so that the session is NOT autoflushed # in `get_user_expenses` and is only committed in `repository.save()` # where exceptions are caught. Since we are using flask-sqlalchemy, # I don't think that we should use autoflush, nor should we use # the `pcapi.repository.repository` module. booking = Booking( userId=beneficiary.id, stockId=stock.id, amount=stock.price, quantity=quantity, token=generate_booking_token(), ) booking.dateCreated = datetime.datetime.utcnow() booking.confirmationDate = compute_confirmation_date( stock.beginningDatetime, booking.dateCreated) repository.save(booking) try: user_emails.send_booking_recap_emails(booking) except MailServiceException as error: logger.exception( "Could not send booking=%s confirmation email to offerer: %s", booking.id, error) try: user_emails.send_booking_confirmation_email_to_beneficiary(booking) except MailServiceException as error: logger.exception( "Could not send booking=%s confirmation email to beneficiary: %s", booking.id, error) if feature_queries.is_active(FeatureToggle.SYNCHRONIZE_ALGOLIA): redis.add_offer_id(client=app.redis_client, offer_id=stock.offerId) return booking
def store_public_object( self, bucket: str, object_id: str, blob: bytes, content_type: str, symlink_path: str = None ) -> None: storage_path = bucket + "/" + object_id try: bucket = self.get_gcp_storage_client_bucket() gcp_cloud_blob = bucket.blob(storage_path) gcp_cloud_blob.upload_from_string(blob) except Exception as exc: logger.exception("An error has occured while trying to upload file on GCP bucket: %s", exc) raise exc
def get_offer_details(client: Redis, offer_id: int) -> Dict: try: offer_details = client.hget( RedisBucket.REDIS_HASHMAP_INDEXED_OFFERS_NAME.value, offer_id) if offer_details: return json.loads(offer_details) return dict() except redis.exceptions.RedisError as error: logger.exception("[REDIS] %s", error) return dict()
def synchronize_venue_provider(venue_provider: VenueProvider, limit: Optional[int] = None): provider_class = get_local_provider_class_by_name( venue_provider.provider.localClass) try: provider = provider_class(venue_provider) do_update(provider, limit) except Exception: # pylint: disable=broad-except logger.exception( build_cron_log_message(name=provider_class.__name__, status=CronStatus.FAILED))