Exemplo n.º 1
0
class MyLocust(HttpLocust):
    task_set = Tito
    sock = None
    wait_time = between(5, 15)

    def __init__(self):
        super(MyLocust, self).__init__()
        self.sock = socket.socket()
        wavefrontProxy = os.environ.get("WAVEFRONT_PROXY")
        self.sock.connect((wavefrontProxy, 2878))
        locust.events.request_success += self.hook_request_success
        locust.events.request_failure += self.hook_request_fail
        atexit.register(self.exit_handler)

    def hook_request_success(self, request_type, name, response_time,
                             response_length):
        myHost = self.host
        myHost = myHost.replace("http://", "")
        met_locustRequest = 'locust.response.success' + ' ' + str(
            response_time) + ' ' + str(time.time(
            )) + ' ' + 'source=' + myHost + ' ' + 'app=Tito' + ' \n'
        try:
            self.sock.sendall(met_locustRequest.encode('utf-8'))
        except BrokenPipeError:
            # Don't be surprised if the socket is in "not
            # connected" state.
            #print(met_locustRequest)
            pass

        #print(met_locustRequest)

    def hook_request_fail(self, request_type, name, response_time, exception):
        myHost = self.host
        myHost = myHost.replace("http://", "")
        met_locustRequestFailed = 'locust.response.failed' + ' ' + str(
            response_time) + ' ' + str(time.time(
            )) + ' ' + 'source=' + myHost + ' ' + 'app=Tito' + ' \n'
        try:
            self.sock.sendall(met_locustRequestFailed.encode('utf-8'))
        except BrokenPipeError:
            # Don't be surprised if the socket is in "not
            # connected" state.
            #print(met_locustRequestFailed)
            pass
        #print(met_locustRequestFailed)

    def exit_handler(self):
        try:
            self.sock.shutdown(socket.SHUT_RDWR)
            self.sock.close()
        except IOError:
            # Don't be surprised if the socket is in "not
            # connected" state.
            pass
        else:
            self.sock.close()
class Seller(OdooLocust.OdooLocust):
    host = os.getenv('HOST')
    database = os.getenv('DATABASE')
    port = int(os.getenv('PORT'))
    login = os.getenv('LOGIN')
    password = os.getenv('PASSWORD')
    wait_time = between(0.500, 4)
    weight = 3

    task_set = SellerTaskSet
Exemplo n.º 3
0
 class MyTestLocust(Locust):
     task_set = MyTaskSet
     wait_time = between(1, 1)
Exemplo n.º 4
0
 class User(Locust):
     host = "127.0.0.1"
     wait_time = between(0.001, 0.1)
Exemplo n.º 5
0
 class MyTestLocust(Locust):
     tasks = [MyTaskSet]
     wait_time = between(1, 1)
Exemplo n.º 6
0
class PublishRecord(FastHttpLocust):
    def __init__(self):
        FastHttpLocust.__init__(self)

    task_set = PublishRecordTaskSet
    wait_time = between(1, 2)
Exemplo n.º 7
0
    class InitializeClient(TaskSequence):
        """
        Initial loading of cellxgene - when the user hits the main route.

        Currently this sequence skips some of the static assets, which are quite
        small and should be served by the HTTP server directly.

        1. load index.html, etc.
        2. concurrently load /config, /schema
        3. concurrently load /layout/obs, /annotations/var?annotation-name=<the index>
        -- does intitial render --
        4. concurrently load all /annotations/obs
        -- fully initialized --
        """

        # users hit all of the init routes as fast as they can, subject to the ordering constraints
        # and network latency
        wait_time = between(0.01, 0.1)

        def on_start(self):
            self.dataset = self.parent.dataset
            self.client.verify = False

        @seq_task(1)
        def index(self):
            self.client.get(f"{self.dataset}/", stream=True).close()

        @seq_task(2)
        def loadConfigSchema(self):
            def config():
                self.client.get(f"{self.dataset}{API}/config", stream=True).close()

            def schema():
                self.client.get(f"{self.dataset}{API}/schema", stream=True).close()

            group = Group()
            group.spawn(config)
            group.spawn(schema)
            group.join()

        @seq_task(3)
        def loadBootstrapData(self):
            def layout():
                self.client.get(
                    f"{self.dataset}{API}/layout/obs", headers={"Accept": "application/octet-stream"}, stream=True
                ).close()

            def varAnnotationIndex():
                self.client.get(
                    f"{self.dataset}{API}/annotations/var?annotation-name={self.parent.var_index_name()}",
                    headers={"Accept": "application/octet-stream"},
                    stream=True,
                ).close()

            group = Group()
            group.spawn(layout)
            group.spawn(varAnnotationIndex)
            group.join()

        @seq_task(4)
        def loadObsAnnotations(self):
            def obs_annotation(name):
                self.client.get(
                    f"{self.dataset}{API}/annotations/obs?annotation-name={name}",
                    headers={"Accept": "application/octet-stream"},
                    stream=True,
                ).close()

            obs_names = self.parent.obs_annotation_names()
            group = Group()
            for name in obs_names:
                group.spawn(obs_annotation, name)
            group.join()

        @seq_task(5)
        def done(self):
            self.interrupt()
Exemplo n.º 8
0
class CellxgeneUser(HttpLocust):
    task_set = ViewDataset

    # most ops do not require back-end interaction, so slow cadence
    # for users
    wait_time = between(10, 60)
Exemplo n.º 9
0
class ApiTest(FastHttpLocust):
    host = ''
    task_set = PressureTest
    wait_time = between(0.0, 0.0)
Exemplo n.º 10
0
 class TaskSet2(TaskSet):
     wait_time = between(20.0, 21.0)
Exemplo n.º 11
0
 class User(Locust):
     wait_time = between(3, 9)