def test_destroy() -> None: # Only executed if tests are run with --destroy flag if os.getenv("TEST_DESTROY_MODE", "0") != "1": log.info("Skipping destroy test, TEST_DESTROY_MODE not enabled") return # Always enable during core tests if not Connector.check_availability("authentication"): # pragma: no cover log.warning("Skipping authentication test: service not available") return # if Connector.check_availability("sqlalchemy"): # sql = sqlalchemy.get_instance() # # Close previous connections, otherwise the new create_app will hang # sql.session.remove() # sql.session.close_all() auth = Connector.get_authentication_instance() user = auth.get_user(username=BaseAuthentication.default_user) assert user is not None create_app(mode=ServerModes.DESTROY) try: auth = Connector.get_authentication_instance() user = auth.get_user(username=BaseAuthentication.default_user) assert user is None except ServiceUnavailable: pass
def teardown_handler(signal, frame): # pragma: no cover with lock: Connector.disconnect_all() # This is needed to let connectors to complete the disconnection and prevent # errors like this on rabbitMQ: # closing AMQP connection <0.2684.0> ([...], vhost: '/', user: [...]): # client unexpectedly closed TCP connection time.sleep(1) print("Disconnection completed") sys.exit(0)
def test_login_management(faker: Faker) -> None: auth = Connector.get_authentication_instance() if BaseAuthentication.default_user: logins = auth.get_logins(BaseAuthentication.default_user) assert isinstance(logins, list) assert len(logins) > 0 auth.flush_failed_logins(BaseAuthentication.default_user) logins = auth.get_logins(BaseAuthentication.default_user, only_unflushed=True) assert len(logins) == 0 logins = auth.get_logins(BaseAuthentication.default_user, only_unflushed=False) assert len(logins) > 0 logins = auth.get_logins(faker.ascii_email()) assert isinstance(logins, list) assert len(logins) == 0 logins = auth.get_logins(faker.pystr()) assert isinstance(logins, list) assert len(logins) == 0
def create_user( cls, client: FlaskClient, data: Optional[Dict[str, Any]] = None, roles: Optional[List[Union[str, Role]]] = None, ) -> Tuple[str, Dict[str, Any]]: assert Env.get_bool("MAIN_LOGIN_ENABLE") admin_headers, _ = cls.do_login(client, None, None) assert admin_headers is not None schema = cls.getDynamicInputSchema(client, "admin/users", admin_headers) user_data = cls.buildData(schema) if Connector.check_availability("smtp"): user_data["email_notification"] = False user_data["is_active"] = True user_data["expiration"] = None if roles: for idx, role in enumerate(roles): if isinstance(role, Role): roles[idx] = role.value user_data["roles"] = json.dumps(roles) if data: user_data.update(data) r = client.post(f"{API_URI}/admin/users", data=user_data, headers=admin_headers) assert r.status_code == 200 uuid = cls.get_content(r) return uuid, user_data
def post(self, username: str) -> Response: self.auth.verify_blocked_username(username) user = self.auth.get_user(username=username) # if user is None this endpoint does nothing but the response # remain the same to prevent any user guessing if user is not None: auth = Connector.get_authentication_instance() activation_token, payload = auth.create_temporary_token( user, auth.ACTIVATE_ACCOUNT) server_url = get_frontend_url() rt = activation_token.replace(".", "+") url = f"{server_url}/public/register/{rt}" sent = send_activation_link(user, url) if not sent: # pragma: no cover raise ServiceUnavailable("Error sending email, please retry") auth.save_token(user, activation_token, payload, token_type=auth.ACTIVATE_ACCOUNT) msg = ("We are sending an email to your email address where " "you will find the link to activate your account") return self.response(msg)
def test_destroy() -> None: auth = Connector.get_authentication_instance() user = auth.get_user(username=BaseAuthentication.default_user) assert user is not None create_app(mode=ServerModes.DESTROY) if Connector.check_availability("sqlalchemy"): with pytest.raises(ServiceUnavailable): auth = Connector.get_authentication_instance() user = auth.get_user(username=BaseAuthentication.default_user) else: auth = Connector.get_authentication_instance() user = auth.get_user(username=BaseAuthentication.default_user) assert user is None
def test_ip_management(self) -> None: # Always enable during core tests if not Connector.check_availability( "authentication"): # pragma: no cover log.warning("Skipping authentication test: service not available") return auth = Connector.get_authentication_instance() ip_data = auth.localize_ip("8.8.8.8") assert ip_data is not None # I don't know if this tests will be stable... assert ip_data == "United States" assert auth.localize_ip("8.8.8.8, 4.4.4.4") is None
def test_pushpin(app: Flask) -> None: if not Connector.check_availability(CONNECTOR): try: obj = connector.get_instance() pytest.fail("No exception raised") # pragma: no cover except ServiceUnavailable: pass log.warning("Skipping {} tests: service not available", CONNECTOR) return None log.info("Executing {} tests", CONNECTOR) try: connector.get_instance(host="invalidhostname", port=123) pytest.fail( "No exception raised on unavailable service") # pragma: no cover except ServiceUnavailable: pass obj = connector.get_instance() assert obj is not None obj.disconnect() # a second disconnect should not raise any error obj.disconnect() # Create new connector with short expiration time obj = connector.get_instance(expiration=2, verification=1) obj_id = id(obj) # Connector is expected to be still valid obj = connector.get_instance(expiration=2, verification=1) assert id(obj) == obj_id time.sleep(1) # The connection should have been checked and should be still valid obj = connector.get_instance(expiration=2, verification=1) assert id(obj) == obj_id time.sleep(1) # Connection should have been expired and a new connector been created obj = connector.get_instance(expiration=2, verification=1) assert id(obj) != obj_id assert obj.is_connected() obj.disconnect() assert not obj.is_connected() # ... close connection again ... nothing should happens obj.disconnect() with connector.get_instance() as obj: assert obj is not None
def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: neo4j_enabled = Connector.check_availability("neo4j") sqlalchemy_enabled = Connector.check_availability("sqlalchemy") if neo4j_enabled: from neomodel import db as neo4j_db if sqlalchemy_enabled: # thanks to connectors cache this should always match the # same instance that will be used from inside the endpoint from restapi.connectors import sqlalchemy alchemy_db = sqlalchemy.get_instance() try: if neo4j_enabled: neo4j_db.begin() # Transaction is already open... # if sqlalchemy_enabled: # pass out = func(self, *args, **kwargs) if neo4j_enabled: neo4j_db.commit() if sqlalchemy_enabled: alchemy_db.session.commit() return out except Exception as e: log.debug("Rolling backend database transaction") try: if neo4j_enabled: neo4j_db.rollback() if sqlalchemy_enabled: alchemy_db.session.rollback() except Exception as sub_ex: # pragma: no cover log.warning("Exception raised during rollback: {}", sub_ex) raise e
def generate_totp(email: Optional[str]) -> str: assert email is not None auth = Connector.get_authentication_instance() user = auth.get_user(username=email.lower()) secret = auth.get_totp_secret(user) return pyotp.TOTP(secret).now()
def verify(service: str) -> None: """Verify if a service is connected""" if not Connector.check_availability(service): print_and_exit("Service {} not detected", service) log.info("Verifying service: {}", service) variables = Connector.services.get(service, {}) host, port = get_service_address(variables, "host", "port", service) if host != "nohost": wait_socket(host, port, service) connector_module = Connector.get_module(service, BACKEND_PACKAGE) if not connector_module: # pragma: no cover print_and_exit("Connector {} not detected", service) c = connector_module.get_instance() log.info("{} successfully authenticated on {}", service, c.variables.get("host", service))
def test_init() -> None: auth = Connector.get_authentication_instance() if Connector.authentication_service == "sqlalchemy": # Re-init does not work with MySQL due to issues with previous connections # Considering that: # 1) this is a workaround to test the initialization # (not the normal workflow used by the application) # 2) the init is already tested with any other DB, included postgres # 3) MySQL is not used by any project # => there is no need to go crazy in debugging this issue! if auth.db.is_mysql(): # type: ignore return # sql = sqlalchemy.get_instance() if Connector.check_availability("sqlalchemy"): # Prevents errors like: # sqlalchemy.exc.ResourceClosedError: This Connection is closed Connector.disconnect_all() # sql = sqlalchemy.get_instance() # # Close previous connections, otherwise the new create_app will hang # sql.session.remove() # sql.session.close_all() try: create_app(mode=ServerModes.INIT) # This is only a rough retry to prevent random errors from sqlalchemy except Exception: # pragma: no cover create_app(mode=ServerModes.INIT) auth = Connector.get_authentication_instance() try: user = auth.get_user(username=BaseAuthentication.default_user) # SqlAlchemy sometimes can raise an: # AttributeError: 'NoneType' object has no attribute 'twophase' # due to the multiple app created... should be an issue specific of this test # In that case... simply retry. except AttributeError: # pragma: no cover user = auth.get_user(username=BaseAuthentication.default_user) assert user is not None
def test_ip_management() -> None: auth = Connector.get_authentication_instance() ip_data = auth.localize_ip("8.8.8.8") assert ip_data is not None # I don't know if this tests will be stable... assert ip_data == "United States" assert auth.localize_ip("8.8.8.8, 4.4.4.4") is None
def test_authentication_with_auth_callback(self, client: FlaskClient) -> None: if not Env.get_bool("AUTH_ENABLE"): log.warning("Skipping authentication tests") return auth = Connector.get_authentication_instance() user = auth.get_user(username=BaseAuthentication.default_user) assert user is not None VALID = f"/tests/preloadcallback/{user.uuid}" INVALID = "/tests/preloadcallback/12345678-90ab-cdef-1234-567890abcdef" admin_headers, _ = self.do_login(client, None, None) # Verify both endpoint ... r = client.get(f"{API_URI}{VALID}", query_string={"test": True}, headers=admin_headers) assert r.status_code == 200 content = self.get_content(r) assert isinstance(content, dict) assert len(content) == 1 assert "email" in content assert content["email"] == user.email r = client.get(f"{API_URI}{INVALID}", query_string={"test": True}, headers=admin_headers) assert r.status_code == 401 # and get_schema! r = client.get( f"{API_URI}{VALID}", query_string={"get_schema": True}, headers=admin_headers, ) assert r.status_code == 200 content = self.get_content(r) assert isinstance(content, list) assert len(content) == 1 assert content[0]["key"] == "test" assert content[0]["type"] == "boolean" r = client.get( f"{API_URI}{INVALID}", query_string={"get_schema": True}, headers=admin_headers, ) assert r.status_code == 401
def send_notification( subject: str, template: str, # if None will be sent to the administrator to_address: Optional[str] = None, data: Optional[Dict[str, Any]] = None, user: Optional[User] = None, send_async: bool = False, ) -> bool: # Always enabled during tests if not Connector.check_availability("smtp"): # pragma: no cover return False title = get_project_configuration("project.title", default="Unkown title") reply_to = Env.get("SMTP_NOREPLY", Env.get("SMTP_ADMIN", "")) if data is None: data = {} data.setdefault("project", title) data.setdefault("reply_to", reply_to) if user: data.setdefault("username", user.email) data.setdefault("name", user.name) data.setdefault("surname", user.surname) html_body, plain_body = get_html_template(template, data) if not html_body: # pragma: no cover log.error("Can't load {}", template) return False subject = f"{title}: {subject}" if send_async: Mail.send_async( subject=subject, body=html_body, to_address=to_address, plain_body=plain_body, ) return False smtp_client = smtp.get_instance() return smtp_client.send( subject=subject, body=html_body, to_address=to_address, plain_body=plain_body, )
def test_init() -> None: # Only executed if tests are run with --destroy flag if os.getenv("TEST_DESTROY_MODE", "0") != "1": log.info("Skipping destroy test, TEST_DESTROY_MODE not enabled") return # Always enable during core tests if not Connector.check_availability("authentication"): # pragma: no cover log.warning("Skipping authentication test: service not available") return auth = Connector.get_authentication_instance() if Connector.authentication_service == "sqlalchemy": # Re-init does not work with MySQL due to issues with previous connections # Considering that: # 1) this is a workaround to test the initialization # (not the normal workflow used by the application) # 2) the init is already tested with any other DB, included postgres # 3) MySQL is not used by any project # => there is no need to go crazy in debugging this issue! if auth.db.is_mysql(): # type: ignore return # sql = sqlalchemy.get_instance() create_app(mode=ServerModes.INIT) auth = Connector.get_authentication_instance() try: user = auth.get_user(username=BaseAuthentication.default_user) # SqlAlchemy sometimes can raise an: # AttributeError: 'NoneType' object has no attribute 'twophase' # due to the multiple app created... should be an issue specific of this test # In that case... simply retry. except AttributeError: # pragma: no cover user = auth.get_user(username=BaseAuthentication.default_user) assert user is not None
def test_totp_management() -> None: auth = Connector.get_authentication_instance() with pytest.raises(Unauthorized, match=r"Verification code is missing"): # NULL totp auth.verify_totp(None, None) user = auth.get_user(username=auth.default_user) secret = auth.get_totp_secret(user) totp = pyotp.TOTP(secret) # Verifiy current totp assert auth.verify_totp(user, totp.now()) now = datetime.now() t30s = timedelta(seconds=30) # Verify previous and next totp(s) assert auth.verify_totp(user, totp.at(now + t30s)) assert auth.verify_totp(user, totp.at(now - t30s)) # Verify second-previous and second-ntext totp(s) with pytest.raises(Unauthorized, match=r"Verification code is not valid"): # Future totp auth.verify_totp(user, totp.at(now + t30s + t30s)) with pytest.raises(Unauthorized, match=r"Verification code is not valid"): # Past totp auth.verify_totp(user, totp.at(now - t30s - t30s)) # Extend validity window auth.TOTP_VALIDITY_WINDOW = 2 # Verify again second-previous and second-ntext totp(s) assert auth.verify_totp(user, totp.at(now + t30s + t30s)) assert auth.verify_totp(user, totp.at(now - t30s - t30s)) # Verify second-second-previous and second-second-ntext totp(s) with pytest.raises(Unauthorized, match=r"Verification code is not valid"): # Future totp auth.verify_totp(user, totp.at(now + t30s + t30s + t30s)) with pytest.raises(Unauthorized, match=r"Verification code is not valid"): # Past totp auth.verify_totp(user, totp.at(now - t30s - t30s - t30s))
def get_instance(app: Flask) -> FlaskCache: # type: ignore # This check prevent KeyError raised during tests # Exactly as reported here: # https://github.com/sh4nks/flask-caching/issues/191 if not hasattr(mem, "cache"): cache_config = Cache.get_config( use_redis=Connector.check_availability("redis")) mem.cache = FlaskCache(config=cache_config) mem.cache.init_app(app) return mem.cache
def test_database_exceptions(self, client: FlaskClient, faker: Faker) -> None: if not Env.get_bool("AUTH_ENABLE"): log.warning("Skipping dabase exceptions tests") return # This is a special value. The endpoint will try to create a group without # shortname. A BadRequest is expected because the database should refuse the # entry due to the missing property r = client.post(f"{API_URI}/tests/database/400") assert r.status_code == 400 # This is the message of a DatabaseMissingRequiredProperty assert self.get_content(r) == "Missing property shortname required by Group" auth = Connector.get_authentication_instance() default_group = auth.get_group(name=DEFAULT_GROUP_NAME) assert default_group is not None # the /tests/database endpoint will change the default group fullname # as a side effect to the test the database_transaction decorator default_fullname = default_group.fullname random_name = faker.pystr() # This will create a new group with short/full name == random_name r = client.post(f"{API_URI}/tests/database/{random_name}") assert r.status_code == 200 default_group = auth.get_group(name=DEFAULT_GROUP_NAME) assert default_group is not None # As a side effect the fullname of defaut_group is changed... assert default_group.fullname != default_fullname # ... and this is the new name new_fullname = default_group.fullname # This will try to create again a group with short/full name == random_name # but this will fail due to unique keys r = client.post(f"{API_URI}/tests/database/{random_name}") assert r.status_code == 409 # This is the message of a DatabaseDuplicatedEntry self.get_content(r) == "A Group already exists with 'shortname': '400'" # The default group will not change again because the # database_transaction decorator will undo the change default_group = auth.get_group(name=DEFAULT_GROUP_NAME) assert default_group is not None assert default_group.fullname == new_fullname
def do_queries(self, value: str) -> None: neo4j_enabled = Connector.check_availability("neo4j") sql_enabled = Connector.check_availability("sqlalchemy") mysql_enabled = sql_enabled and sqlalchemy.SQLAlchemy.is_mysql() postgres_enabled = sql_enabled and not sqlalchemy.SQLAlchemy.is_mysql( ) # This is just a stub... to be completed if neo4j_enabled: graph = neo4j.get_instance() graph.cypher( "MATCH (g: Group) WHERE g.shortname = $value return g.shortname", value=value, ) graph.Group.nodes.get_or_none(shortname=value) elif postgres_enabled: sql = sqlalchemy.get_instance() t = sqlalchemy.text( 'SELECT * FROM "group" WHERE shortname = :value') sql.db.engine.execute(t, value=value) sql.Group.query.filter_by(shortname=value).first() elif mysql_enabled: sql = sqlalchemy.get_instance() t = sqlalchemy.text( "SELECT * FROM `group` WHERE shortname = :value") sql.db.engine.execute(t, value=value) sql.Group.query.filter_by(shortname=value).first()
def test_depends_on(self, client: FlaskClient) -> None: if Connector.check_availability("neo4j"): r = client.get(f"{API_URI}/tests/depends_on/neo4j") assert r.status_code == 200 r = client.get(f"{API_URI}/tests/depends_on_not/neo4j") assert r.status_code == 404 else: r = client.get(f"{API_URI}/tests/depends_on/neo4j") assert r.status_code == 404 r = client.get(f"{API_URI}/tests/depends_on_not/neo4j") assert r.status_code == 200
def verify(services): """Verify connected service""" if len(services) == 0: log.warning("Empty list of services, nothing to be verified.") log.info("Provide list of services by using --services option") for service in services: if not Connector.check_availability(service): print_and_exit("Service {} not detected", service) log.info("Verifying service: {}", service) variables = Connector.services.get(service, {}) host, port = get_service_address(variables, "host", "port", service) wait_socket(host, port, service) log.info("Completed successfully")
def mark_task_as_failed(self: Any, name: str, exception: Exception) -> NoReturn: if TESTING: self.request.id = "fixed-id" self.request.task = name task_id = self.request.id task_name = self.request.task arguments = str(self.request.args) # Removing username and password from urls in error stack clean_error_stack = "" for line in traceback.format_exc().split("\n"): clean_error_stack += f"{obfuscate_url(line)}\n" log.error("Celery task {} ({}) failed", task_id, task_name) log.error("Failed task arguments: {}", arguments[0:256]) log.error("Task error: {}", clean_error_stack) if Connector.check_availability("smtp"): log.info("Sending error report by email", task_id, task_name) send_celery_error_notification(task_id, task_name, arguments, clean_error_stack, -1) self.update_state( state=states.FAILURE, meta={ "exc_type": type(exception).__name__, "exc_message": traceback.format_exc().split("\n"), # 'custom': '...' }, ) self.send_event( "task-failed", # Retry sending the message if the connection is lost retry=True, exception=str(exception), traceback=traceback.format_exc(), ) raise Ignore(str(exception))
def mark_task_as_retriable(self: Any, name: str, exception: Exception, MAX_RETRIES: int) -> NoReturn: if TESTING: self.request.id = "fixed-id" self.request.task = name self.request.retries = 0 task_id = self.request.id task_name = self.request.task arguments = str(self.request.args) retry_num = 1 + self.request.retries # All retries attempts failed, # the error will be converted to permanent if retry_num > MAX_RETRIES: log.critical("MAX retries reached") mark_task_as_failed(self=self, name=name, exception=exception) # Removing username and password from urls in error stack clean_error_stack = "" for line in traceback.format_exc().split("\n"): clean_error_stack += f"{obfuscate_url(line)}\n" log.warning( "Celery task {} ({}) failed due to: {}, " "but will be retried (fail #{}/{})", task_id, task_name, exception, retry_num, MAX_RETRIES, ) if Connector.check_availability("smtp"): log.info("Sending error report by email", task_id, task_name) send_celery_error_notification(task_id, task_name, arguments, clean_error_stack, retry_num) raise exception
def wrapper(self, *args, **kwargs): try: return func(self, *args, **kwargs) except BaseException: task_id = self.request.id task_name = self.request.task log.error("Celery task {} failed ({})", task_id, task_name) arguments = str(self.request.args) log.error("Failed task arguments: {}", arguments[0:256]) log.error("Task error: {}", traceback.format_exc()) if Connector.check_availability("smtp"): log.info("Sending error report by email", task_id, task_name) body = f""" Celery task {task_id} failed Name: {task_name} Arguments: {self.request.args} Error: {traceback.format_exc()} """ project = get_project_configuration( "project.title", default="Unkown title", ) subject = f"{project}: task {task_name} failed" from restapi.connectors import smtp smtp_client = smtp.get_instance() smtp_client.send(body, subject)
def test_01_login(self, client: FlaskClient, faker: Faker) -> None: """Check that you can login and receive back your token""" if not Env.get_bool("AUTH_ENABLE"): log.warning("Skipping login tests") return log.info("*** VERIFY CASE INSENSITIVE LOGIN") # BaseAuthentication.load_default_user() # BaseAuthentication.load_roles() USER = BaseAuthentication.default_user or "just-to-prevent-None" PWD = BaseAuthentication.default_password or "just-to-prevent-None" # Login by using upper case username self.do_login(client, USER.upper(), PWD) events = self.get_last_events(1) assert events[0].event == Events.login.value assert events[0].user == USER assert events[0].url == "/auth/login" auth = Connector.get_authentication_instance() logins = auth.get_logins(USER) login = logins[-1] assert login.username == USER # Wrong credentials # Off course PWD cannot be upper :D self.do_login(client, USER, PWD.upper(), status_code=401) events = self.get_last_events(1) assert events[0].event == Events.failed_login.value assert events[0].payload["username"] == USER assert events[0].url == "/auth/login" logins = auth.get_logins(USER) login = logins[-1] assert login.username == USER log.info("*** VERIFY valid credentials") # Login by using normal username (no upper case) headers, _ = self.do_login(client, None, None) events = self.get_last_events(1) assert events[0].event == Events.login.value assert events[0].user == USER assert events[0].url == "/auth/login" time.sleep(5) # Verify MAX_PASSWORD_VALIDITY, if set headers, token = self.do_login(client, None, None) events = self.get_last_events(1) assert events[0].event == Events.login.value assert events[0].user == USER assert events[0].url == "/auth/login" self.save("auth_header", headers) self.save("auth_token", token) # Verify credentials r = client.get(f"{AUTH_URI}/status", headers=headers) assert r.status_code == 200 c = self.get_content(r) assert isinstance(c, bool) and c # this check verifies a BUG with neo4j causing crash of auth module # when using a non-email-username to authenticate log.info("*** VERIFY with a non-email-username") self.do_login( client, "notanemail", "[A-Za-z0-9]+", status_code=400, ) # Check failure log.info("*** VERIFY invalid credentials") random_email = faker.ascii_email() self.do_login( client, random_email, faker.password(strong=True), status_code=401, ) events = self.get_last_events(1) assert events[0].event == Events.failed_login.value assert events[0].payload["username"] == random_email assert events[0].url == "/auth/login"
from restapi import decorators from restapi.config import get_project_configuration from restapi.connectors import Connector, smtp from restapi.endpoints.profile_activation import send_activation_link from restapi.env import Env from restapi.exceptions import Conflict, ServiceUnavailable from restapi.models import Schema, fields, validate from restapi.rest.definition import EndpointResource, Response from restapi.services.authentication import DEFAULT_GROUP_NAME from restapi.utilities.globals import mem # from restapi.utilities.logs import log # This endpoint requires the server to send the activation token via email if Connector.check_availability("smtp"): auth = Connector.get_authentication_instance() # Note that these are callables returning a model, not models! # They will be executed a runtime def getInputSchema(request): # as defined in Marshmallow.schema.from_dict attributes: Dict[str, Union[fields.Field, type]] = {} attributes["name"] = fields.Str(required=True) attributes["surname"] = fields.Str(required=True) attributes["email"] = fields.Email(required=True, label="Username (email address)") attributes["password"] = fields.Str(
def test_celery(app: Flask, faker: Faker) -> None: if not Connector.check_availability(CONNECTOR): try: obj = connector.get_instance() pytest.fail("No exception raised") # pragma: no cover except ServiceUnavailable: pass log.warning("Skipping {} tests: service not available", CONNECTOR) return None log.info("Executing {} tests", CONNECTOR) obj = connector.get_instance() assert obj is not None task = obj.celery_app.send_task("test_task") assert task is not None assert task.id is not None if obj.variables.get("backend") == "RABBIT": log.warning( "Due to limitations on RABBIT backend task results will not be tested" ) else: try: r = task.get(timeout=10) assert r is not None # This is the task output, as defined in task_template.py.j2 assert r == "Task executed!" assert task.status == "SUCCESS" assert task.result == "Task executed!" except celery.exceptions.TimeoutError: # pragma: no cover pytest.fail( f"Task timeout, result={task.result}, status={task.status}") if CeleryExt.CELERYBEAT_SCHEDULER is None: try: obj.get_periodic_task("does_not_exist") pytest.fail("get_periodic_task with unknown CELERYBEAT_SCHEDULER" ) # pragma: no cover except AttributeError as e: assert str(e) == "Unsupported celery-beat scheduler: None" except BaseException: # pragma: no cover pytest.fail("Unexpected exception raised") try: obj.delete_periodic_task("does_not_exist") pytest.fail( "delete_periodic_task with unknown CELERYBEAT_SCHEDULER" ) # pragma: no cover except AttributeError as e: assert str(e) == "Unsupported celery-beat scheduler: None" except BaseException: # pragma: no cover pytest.fail("Unexpected exception raised") try: obj.create_periodic_task(name="task1", task="task.does.not.exists", every="60") pytest.fail( "create_periodic_task with unknown CELERYBEAT_SCHEDULER" ) # pragma: no cover except AttributeError as e: assert str(e) == "Unsupported celery-beat scheduler: None" except BaseException: # pragma: no cover pytest.fail("Unexpected exception raised") try: obj.create_crontab_task(name="task2", task="task.does.not.exists", minute="0", hour="1") pytest.fail("create_crontab_task with unknown CELERYBEAT_SCHEDULER" ) # pragma: no cover except AttributeError as e: assert str(e) == "Unsupported celery-beat scheduler: None" except BaseException: # pragma: no cover pytest.fail("Unexpected exception raised") else: assert obj.get_periodic_task("does_not_exist") is None assert not obj.delete_periodic_task("does_not_exist") obj.create_periodic_task(name="task1", task="task.does.not.exists", every="60") assert obj.delete_periodic_task("task1") assert not obj.delete_periodic_task("task1") obj.create_periodic_task( name="task1_bis", task="task.does.not.exists", every="60", period="seconds", args=["a", "b", "c"], kwargs={ "a": 1, "b": 2, "c": 3 }, ) assert obj.delete_periodic_task("task1_bis") assert not obj.delete_periodic_task("task1_bis") # cron at 01:00 obj.create_crontab_task(name="task2", task="task.does.not.exists", minute="0", hour="1") assert obj.delete_periodic_task("task2") assert not obj.delete_periodic_task("task2") obj.create_crontab_task( name="task2_bis", task="task.does.not.exists", minute="0", hour="1", day_of_week="*", day_of_month="*", month_of_year="*", args=["a", "b", "c"], kwargs={ "a": 1, "b": 2, "c": 3 }, ) assert obj.delete_periodic_task("task2_bis") assert not obj.delete_periodic_task("task2_bis") if CeleryExt.CELERYBEAT_SCHEDULER == "REDIS": obj.create_periodic_task( name="task3", task="task.does.not.exists", every=60, ) assert obj.delete_periodic_task("task3") obj.create_periodic_task(name="task4", task="task.does.not.exists", every=60, period="seconds") assert obj.delete_periodic_task("task4") obj.create_periodic_task(name="task5", task="task.does.not.exists", every=60, period="minutes") assert obj.delete_periodic_task("task5") obj.create_periodic_task(name="task6", task="task.does.not.exists", every=60, period="hours") assert obj.delete_periodic_task("task6") obj.create_periodic_task(name="task7", task="task.does.not.exists", every=60, period="days") assert obj.delete_periodic_task("task7") try: obj.create_periodic_task( name="task8", task="task.does.not.exists", every="60", period="years", # type: ignore ) except BadRequest as e: assert str(e) == "Invalid timedelta period: years" obj.create_periodic_task( name="task9", task="task.does.not.exists", every=timedelta(seconds=60), ) assert obj.delete_periodic_task("task9") try: obj.create_periodic_task( name="task10", task="task.does.not.exists", every=["60"], # type: ignore ) except AttributeError as e: assert str( e) == "Invalid input parameter every = ['60'] (type list)" try: obj.create_periodic_task( name="task11", task="task.does.not.exists", every="invalid", ) except AttributeError as e: assert str( e) == "Invalid input parameter every = invalid (type str)" else: obj.create_periodic_task(name="task3", task="task.does.not.exists", every="60", period="minutes") assert obj.delete_periodic_task("task3") obj.disconnect() # a second disconnect should not raise any error obj.disconnect() # Create new connector with short expiration time obj = connector.get_instance(expiration=2, verification=1) obj_id = id(obj) # Connector is expected to be still valid obj = connector.get_instance(expiration=2, verification=1) assert id(obj) == obj_id time.sleep(1) # The connection should have been checked and should be still valid obj = connector.get_instance(expiration=2, verification=1) assert id(obj) == obj_id time.sleep(1) # Connection should have been expired and a new connector been created obj = connector.get_instance(expiration=2, verification=1) assert id(obj) != obj_id assert obj.is_connected() obj.disconnect() assert not obj.is_connected() # ... close connection again ... nothing should happens obj.disconnect() with connector.get_instance() as obj: assert obj is not None app = create_app(mode=ServerModes.WORKER) assert app is not None from restapi.utilities.logs import LOGS_FILE assert os.environ["HOSTNAME"] == "backend-server" assert LOGS_FILE == "backend-server" # this decorator is expected to be used in celery context, i.e. the self reference # should contains a request, injected by celery. Let's mock this by injecting an # artificial self @send_errors_by_email def this_function_raises_exceptions(self): raise AttributeError("Just an exception") class FakeRequest: def __init__(self, task_id, task, args): self.id = task_id self.task = task self.args = args class FakeSelf: def __init__(self, task_id, task, args): self.request = FakeRequest(task_id, task, args) task_id = faker.pystr() task_name = faker.pystr() task_args = [faker.pystr()] this_function_raises_exceptions(FakeSelf(task_id, task_name, task_args)) mail = BaseTests.read_mock_email() assert mail.get("body") is not None assert f"Celery task {task_id} failed" in mail.get("body") assert f"Name: {task_name}" in mail.get("body") assert f"Arguments: {str(task_args)}" in mail.get("body") assert "Error: Traceback (most recent call last):" in mail.get("body") assert 'raise AttributeError("Just an exception")' in mail.get("body")
import pytest import pytz from faker import Faker from flask import Flask from neo4j.exceptions import CypherSyntaxError from restapi.connectors import Connector from restapi.connectors import neo4j as connector from restapi.env import Env from restapi.exceptions import ServiceUnavailable from restapi.tests import API_URI, BaseTests, FlaskClient from restapi.utilities.logs import log CONNECTOR = "neo4j" if not Connector.check_availability(CONNECTOR): try: obj = connector.get_instance() pytest.fail("No exception raised") # pragma: no cover except ServiceUnavailable: pass log.warning("Skipping {} tests: service not available", CONNECTOR) # Alwas enabled during core tests elif not Env.get_bool("TEST_CORE_ENABLED"): # pragma: no cover log.warning("Skipping {} tests: only avaiable on core", CONNECTOR) else: log.info("Executing {} tests", CONNECTOR)
def admin_user_input(request: FlaskRequest, is_post: bool) -> Type[Schema]: is_admin = HTTPTokenAuth.is_session_user_admin(request, auth) attributes: MarshmallowSchema = {} if is_post: # This is because Email is not typed on marshmallow attributes["email"] = fields.Email( # type: ignore required=is_post, validate=validate.Length(max=100)) attributes["name"] = fields.Str( required=is_post, validate=validate.Length(min=1), metadata={"label": "First Name"}, ) attributes["surname"] = fields.Str( required=is_post, validate=validate.Length(min=1), metadata={"label": "Last Name"}, ) attributes["password"] = fields.Str( required=is_post, validate=validate.Length(min=auth.MIN_PASSWORD_LENGTH), metadata={"password": True}, ) if Connector.check_availability("smtp"): attributes["email_notification"] = fields.Bool( metadata={"label": "Notify password by email"}) attributes["is_active"] = fields.Bool( dump_default=True, required=False, metadata={"label": "Activate user"}, ) roles = {r.name: r.description for r in auth.get_roles()} if not is_admin and RoleEnum.ADMIN.value in roles: roles.pop(RoleEnum.ADMIN.value) attributes["roles"] = fields.List( fields.Str(validate=validate.OneOf( choices=[r for r in roles.keys()], labels=[r for r in roles.values()], )), dump_default=[auth.default_role], required=False, unique=True, metadata={ "label": "Roles", "description": "", "extra_descriptions": auth.role_descriptions, }, ) group_keys = [] group_labels = [] for g in auth.get_groups(): group_keys.append(g.uuid) group_labels.append(f"{g.shortname} - {g.fullname}") if len(group_keys) == 1: default_group = group_keys[0] else: default_group = None attributes["group"] = fields.Str( required=is_post, dump_default=default_group, validate=validate.OneOf(choices=group_keys, labels=group_labels), metadata={ "label": "Group", "description": "The group to which the user belongs", }, ) attributes["expiration"] = fields.DateTime( required=False, allow_none=True, metadata={ "label": "Account expiration", "description": "This user will be blocked after this date", }, ) if custom_fields := mem.customizer.get_custom_input_fields( request=request, scope=mem.customizer.ADMIN): attributes.update(custom_fields)