def mock_setup(self, service_name, interface=False):
        service_host.configure_microservice()
        init_service_waypost()

        service_host.app.testing = True
        self.app = service_host.app.test_client()
        self.service_name = service_name
        service_host.add_local_service(self.service_name, no_local_function=interface)
from microservice.core.service_waypost import init_service_waypost

from microservice.examples.intensive_calculators import (
    intensive_calculation_1, intensive_calculation_2, intensive_calculation_3)

if __name__ == "__main__":
    init_service_waypost()

    for i in range(30):
        print("Intensive calculation 1x100000 says:",
              intensive_calculation_1(100000))
        # 3 calls into 2
        # Expectation is that, at this difficulty (at least on my PC) the usage of microservice intensive_calculation_2
        # nears 0% idle - so the orchestrator should scale it up.
        # Because 3 calls into 2, it will then start load balancing between the old and new instances
        # If you just run 2 directly from here, then we don't load balancer because this dumb client doesn't know how
        # to receive service updates.
        print("Intensive calculation 3x1000000 says:",
              intensive_calculation_3(1000000))
Ejemplo n.º 3
0
from microservice.core.service_waypost import init_service_waypost

from microservice.examples.echo import echo_as_dict2, echo_as_dict, echo_as_dict3

if __name__ == "__main__":
    init_service_waypost(disable_heartbeating=True)

    print("Echo as dict 2 says:",
          echo_as_dict2(1, 2, 3, apple=5, banana="cabbage"))
    print("Echo as dict 1.1 says:",
          echo_as_dict(4, 5, 6, apple=5, banana="cabbage"))
    print("Echo as dict 1.2 says:",
          echo_as_dict(4, 5, 6, apple=5, banana="cabbage"))
    print("Echo as dict 3 says:",
          echo_as_dict3(4, 5, 6, apple=5, banana="cabbage"))
    print("Done")
Ejemplo n.º 4
0
def initialise_microservice(service_name,
                            host=None,
                            port=None,
                            external_interface=False,
                            **kwargs):
    from microservice.core.service_waypost import init_service_waypost

    host = host if host is not None else "0.0.0.0"
    port = port if port is not None else 5000
    logger.info("Starting service on {host}:{port}",
                extra={
                    'host': host,
                    'port': port
                })

    configure_microservice()
    init_service_waypost()
    add_local_service(service_name, no_local_function=external_interface)

    # Not sure why this doesn't work if you define it in the global scope. It's nasty, but it works for now.
    @app.route('/ping')
    def ping():
        """
        Used for alive-ness checking.
        """
        return "pong"

    @app.route('/echo')
    def echo():
        """
        Echo the service name.
        """
        return service_name

    @app.route('/deployment_mode', methods=['GET', 'POST'])
    def deployment_mode():
        """
        GET settings.deployment_mode (read only)
        POST settings.deployment_mode (write and then read)

        This method is provided for testability. The deployment mode needs to change during testing, but when
        creating a microservice, the deployment mode is created with the default as defined in the settings and
        can't be overridden - without this function.
        """
        if request.method == 'GET':
            return settings.deployment_mode.value
        elif request.method == 'POST':
            new_deployment_mode = request.form['deployment_mode']
            logger.info("Setting deployment_mode to: {deployment_mode}",
                        extra={'deployment_mode': new_deployment_mode})
            settings.deployment_mode = settings.DeploymentMode(
                new_deployment_mode)
            return settings.deployment_mode.value
        return "Method not allowed"

    @app.route('/deployment_manager_uri', methods=['GET', 'POST'])
    def deployment_manager_uri():
        """
        GET settings.ServiceWaypost.deployment_manager_uri (read only)
        POST settings.ServiceWaypost.deployment_manager_uri (write and then read)

        This method allows getting/setting of the deployment manager uri.
        """
        if request.method == 'GET':
            return settings.ServiceWaypost.deployment_manager_uri
        elif request.method == 'POST':
            new_deployment_manager_uri = request.form['deployment_manager_uri']
            logger.info(
                "Setting deployment_manager_uri to: {deployment_manager_uri}",
                extra={'deployment_manager_uri': new_deployment_manager_uri})
            settings.ServiceWaypost.deployment_manager_uri = new_deployment_manager_uri
            return settings.ServiceWaypost.deployment_manager_uri
        return "Method not allowed"

    @app.route('/terminate')
    def terminate():
        """
        Trigger this flask app to terminate.
        """
        func = request.environ.get('werkzeug.server.shutdown')
        if func is None:
            raise RuntimeError('Not running with the Werkzeug Server')
        func()
        return "Server shutting down..."

    # Start up flask in a thread so that execution can continue without blocking on flask.
    thrd = threading.Thread(target=app.run,
                            kwargs={
                                'host': host,
                                'port': port,
                                'threaded': True,
                            })
    thrd.start()
    settings.flask_app_thread = thrd