import os import datetime import uuid import random from spylunking.log.setup_logging import test_logger from celery_connectors.utils import get_percent_done from celery_connectors.utils import ev from tests.base_test import BaseTestCase import ecomm_app.ecommerce.tasks log = test_logger( name='load-test-rabbit-worker') class LoadTestWorkerRabbitMQConsuming(BaseTestCase): def build_user_conversion_event_msg(self, test_values, now=datetime.datetime.now().isoformat()): body = {"account_id": 777, "subscription_id": 888, "stripe_id": 999, "product_id": "XYZ", "simulate_processing_lag": random.uniform(1.0, 5.0), "msg_id": str(uuid.uuid4()), "created": now} return body # end of build_user_conversion_event_msg
import os import datetime import uuid import random from spylunking.log.setup_logging import test_logger from celery_connectors.utils import get_percent_done from celery_connectors.utils import ev from tests.base_test import BaseTestCase log = test_logger(name="load-test-rabbit-subscriber") class LoadTestSubscriberRabbitMQConsuming(BaseTestCase): def build_user_conversion_event_msg( self, test_values, now=datetime.datetime.now().isoformat()): body = { "account_id": 777, "subscription_id": 888, "stripe_id": 999, "product_id": "XYZ", "simulate_processing_lag": random.uniform(1.0, 5.0), "msg_id": str(uuid.uuid4()), "created": now } return body # end of build_user_conversion_event_msg def test_rabbitmq_consuming(self):
from spylunking.log.setup_logging import test_logger from antinex_utils.consts import SUCCESS from antinex_utils.consts import ERR from antinex_utils.consts import FAILED from tests.mock_model import MockModel log = test_logger(name='mock-predictions') def build_response_data(req): """build_response_data :param req: request dict """ model = MockModel(req=req) predictions = req.get("test_predictions", []) sample_predictions = req.get("test_predictions", []) rounded = req.get("test_predictions", []) accuracy = req.get("test_accuracy", {"accuracy": 52.5}) error = req.get("test_error", None) image_file = req.get("image_file", None) history = req.get("history", None) histories = req.get("histories", None) indexes = req.get("test_indexes", None) scores = req.get("test_scores", None) cm = req.get("test_cm", None) predicts_merged = req.get("test_predicts_merged", False) merge_df = req.get("test_merge_df", None) data = { "predictions": predictions, "rounded_predictions": rounded,
import os import datetime import uuid import random from spylunking.log.setup_logging import test_logger from celery_connectors.utils import get_percent_done from celery_connectors.utils import ev from tests.base_test import BaseTestCase log = test_logger( name="load-test-rabbit-relay") class LoadTestRelayRabbitMQConsuming(BaseTestCase): def build_user_conversion_event_msg(self, test_values, now=datetime.datetime.now().isoformat()): body = {"account_id": 777, "subscription_id": 888, "stripe_id": 999, "product_id": "XYZ", "simulate_processing_lag": random.uniform(1.0, 5.0), "msg_id": str(uuid.uuid4()), "created": now} return body # end of build_user_conversion_event_msg def test_rabbitmq_consuming(self):
import datetime import json import uuid import unittest import pandas as pd from spylunking.log.setup_logging import test_logger from celery_connectors.publisher import Publisher log = test_logger(name='test_base') class BaseTestCase(unittest.TestCase): def setUp(self): """setUp""" self.name = "testing_{}".format(str(uuid.uuid4())) self.broker_url = "memory://localhost/" self.ssl_options = None self.serializer = "json" self.train_exchange_name = "webapp.train.requests" self.train_routing_key = "webapp.train.requests" self.train_queue_name = "webapp.train.requests" self.predict_exchange_name = "webapp.predict.requests" self.predict_routing_key = "webapp.predict.requests" self.predict_queue_name = "webapp.predict.requests" self.pub = None # end of setUp
from spylunking.log.setup_logging import test_logger from celery_connectors.utils import get_percent_done from tests.base_test import BaseTestCase log = test_logger(name='consume-many') class TestConsumeLargeNumberOfMessages(BaseTestCase): def test_consuming_large_number_of_messages(self): # Test the Publisher and Subscriber with 50,0000 messages # and verify each unique message id was consumed # default is using the rabbitmq broker num_to_consume = 50000 num_sent = 0 num_to_send = num_to_consume num_consumed = 0 msgs_to_send = [] msgs_received = [] msgs_by_id = {} self.exchange_name = "test_large_num_1" self.routing_key = "test_large_num_1.orders" self.queue_name = "test_large_num_1.orders" class TestMessageProcessor: def __init__(self, should_consume=1, test_id=None, stop_after_num=-1,
import mock from tests.base_test import BaseTestCase from tests.mock_make_predictions import mock_make_predictions_success from tests.mock_make_predictions import mock_make_predictions_error from tests.mock_make_predictions import mock_make_predictions_fail from tests.mock_message import MockMessage from spylunking.log.setup_logging import test_logger from antinex_core.antinex_processor import AntiNexProcessor log = test_logger(name='test-train') class TestTrain(BaseTestCase): @mock.patch("antinex_utils.make_predictions.make_predictions", new=mock_make_predictions_success) def test_train_antinex_simple(self): exchange = "webapp.train.requests" routing_key = "webapp.train.requests" queue = "webapp.train.requests" max_models = 1 prc = AntiNexProcessor(max_models=max_models) body = self.build_train_antinex_request() self.assertEqual(body["ml_type"], "classification") message = MockMessage(exchange=exchange, routing_key=routing_key, queue=queue) self.assertEqual(message.state, "NOTRUN") self.assertEqual(message.get_exchange(), exchange)
import mock from tests.base_test import BaseTestCase from tests.mock_make_predictions import mock_make_predictions_success from tests.mock_make_predictions import mock_make_predictions_error from tests.mock_make_predictions import mock_make_predictions_fail from tests.mock_message import MockMessage from spylunking.log.setup_logging import test_logger from antinex_core.antinex_processor import AntiNexProcessor log = test_logger(name='test-predict') class TestPredict(BaseTestCase): @mock.patch( "antinex_utils.make_predictions.make_predictions", new=mock_make_predictions_success) def test_predict_antinex_simple(self): exchange = "webapp.predict.requests" routing_key = "webapp.predict.requests" queue = "webapp.predict.requests" max_models = 1 prc = AntiNexProcessor( max_models=max_models) body = self.build_predict_antinex_request() self.assertEqual( body["ml_type"], "classification")