Esempio n. 1
0
    def get_publisher(self, broker_url=None, ssl_options=None):
        """get_publisher

        :param broker_url: broker url
        :param ssl_options: sll options
        """
        if self.pub:
            return self.pub

        self.pub = Publisher(name=self.name,
                             auth_url=self.broker_url,
                             ssl_options=self.ssl_options)

        return self.pub
Esempio n. 2
0
    def connect_pub(self,
                    auth_url=None,
                    ssl_options={},
                    attrs={}):

        use_auth_url = self.pub_auth_url
        use_ssl_options = self.pub_ssl_options
        use_pub_attrs = self.pub_attrs

        if auth_url:
            use_auth_url = auth_url
        if len(ssl_options) > 0:
            use_ssl_options = ssl_options
        if len(ssl_options) > 0:
            use_pub_attrs = use_pub_attrs

        self.pub = Publisher("test-pub",
                             use_auth_url,
                             use_ssl_options)
Esempio n. 3
0
def get_publisher():
    """get_publisher"""
    log.info("initializing publisher")
    pub = None
    auth_url = ""
    if FORWARD_ENDPOINT_TYPE == "redis":
        auth_url = FORWARD_BROKER_URL
    else:
        auth_url = FORWARD_BROKER_URL

    pub = Publisher(name="{}_{}".format(SOURCE, "-redis"),
                    auth_url=auth_url,
                    ssl_options=FORWARD_SSL_OPTIONS)

    log.info("publisher={}".format(pub))
    return pub
def publish_regression_prediction_to_broker():
    """publish_regression_prediction_to_broker

    Publish a Regression Prediction message to the
    Celery Worker's broker queue. This message
    is a JSON Dictionary.

    Default Broker: ``redis://localhost:6379/6``
    Default Exchange: ``webapp.predict.requests``
    Default Routing Key: ``webapp.predict.requests``
    Default Queue: ``webapp.predict.requests``

    """
    parser = argparse.ArgumentParser(description=("Launch a Predict Dataset "
                                                  "Request into the AntiNex "
                                                  "core"))
    parser.add_argument("-f",
                        help=("request json file to use default: "
                              "./training/scaler-regression.json"),
                        required=False,
                        dest="request_file")
    parser.add_argument("-d",
                        help="debug",
                        required=False,
                        dest="debug",
                        action="store_true")
    args = parser.parse_args()

    name = "predict-publisher"

    log.info("{} - start".format(name))

    request_file = "./training/scaler-regression.json"
    if args.request_file:
        request_file = args.request_file

    exchange_name = "webapp.predict.requests"
    routing_key = "webapp.predict.requests"
    queue_name = "webapp.predict.requests"
    auth_url = "redis://localhost:6379/6"
    serializer = "json"

    if not os.path.exists(request_file):
        log.error(("Missing request file={}").format(request_file))
        sys.exit(1)

    req_data = None
    with open(request_file, "r") as cur_file:
        req_data = json.loads(cur_file.read())

    if not os.path.exists(request_file):
        log.error(
            ("Did not find request data in file={}").format(request_file))
        sys.exit(1)

    # import ssl
    # Connection("amqp://", login_method='EXTERNAL', ssl={
    #            "ca_certs": '/etc/pki/tls/certs/something.crt',
    #            "keyfile": '/etc/something/system.key',
    #            "certfile": '/etc/something/system.cert',
    #            "cert_reqs": ssl.CERT_REQUIRED,
    #          })
    #
    ssl_options = {}
    app = Publisher(name, auth_url, ssl_options)

    if not app:
        log.error(("Failed to connect to broker={}").format(auth_url))
    else:

        # Now send:
        now = datetime.datetime.now().isoformat()
        body = req_data
        body["created"] = now
        log.info("loading predict_rows")
        predict_rows_df = pd.read_csv(req_data["dataset"])
        predict_rows = []
        for idx, org_row in predict_rows_df.iterrows():
            new_row = json.loads(org_row.to_json())
            new_row["idx"] = len(predict_rows) + 1
            predict_rows.append(new_row)
        body["predict_rows"] = pd.DataFrame(predict_rows).to_json()

        log.info(("using predict_rows={}").format(len(predict_rows)))

        log.info(("Sending msg={} "
                  "ex={} rk={}").format(
                      str(body)[0:10], exchange_name, routing_key))

        # Publish the message:
        msg_sent = app.publish(body=body,
                               exchange=exchange_name,
                               routing_key=routing_key,
                               queue=queue_name,
                               serializer=serializer,
                               retry=True)

        log.info(("End - {} sent={}").format(name, msg_sent))
 def get_pub(self):
     if not self.pub:
         self.pub = Publisher("msg-pub", self.pub_auth_url,
                              self.pub_ssl_options)
     return self.pub
Esempio n. 6
0
class BaseTestCase(unittest.TestCase):

    debug = False

    exchange_name = ev("TEST_EXCHANGE", "test.events")
    queue_name = ev("TEST_QUEUE", "test.events.conversions")
    routing_key = ev("TEST_ROUTING_KEY", "test.events.conversions")

    exchange = None
    queue = None

    rabbitmq_auth_url = ev("TEST_RABBITMQ_BROKER_URL", "pyamqp://*****:*****@localhost:5672//")
    redis_auth_url = ev("TEST_REDIS_BROKER_URL", "redis://localhost:6379/0")

    pub_auth_url = rabbitmq_auth_url
    sub_auth_url = rabbitmq_auth_url

    pub_ssl_options = {}
    sub_ssl_options = {}

    pub_attrs = {}
    sub_attrs = {}

    pub_serializer = "json"
    sub_serializer = "application/json"

    test_body = {}
    test_id = str(uuid.uuid4()).replace("-", "")

    test_body = {"account_id": 123,
                 "subscription_id": 456,
                 "stripe_id": 789,
                 "product_id": "ABC"}

    pub_msgs = []
    sub_msgs = []

    last_pub_msg = None
    last_sub_msg = None
    last_sub_callback = None

    def setUp(self):
        if self.debug:
            print("setUp")

        # state trips in the custom classes
        os.environ["TEST_STOP_DONE"] = "1"

        self.last_pub_msg = None
        self.last_sub_msg = None
        self.pub = None
        self.sub = None
        self.pub_msgs = []
        self.sub_msgs = []

        self.exchange_name = ev("TEST_EXCHANGE", "test.events")
        self.routing_key = ev("TEST_ROUTING_KEY", "test.events.conversions")
        self.queue_name = ev("TEST_QUEUE", "test.events.conversions")

        self.exchange = None
        self.queue = None
        self.last_sub_callback = None

    # end of setUp

    def tearDown(self):
        if self.debug:
            print("tearDown")
        self.pub = None
        self.sub = None
        self.exchange = None
        self.queue = None
        self.last_sub_callback = None
    # end of tearDown

    def handle_message(self,
                       body,
                       msg):

        log.info(("test={} BASETEST handle_message got "
                  "body={} msg={}")
                 .format(self.test_id,
                         body,
                         msg))

        if msg:
            msg.ack()
    # end of handle_message

    def connect_pub(self,
                    auth_url=None,
                    ssl_options={},
                    attrs={}):

        use_auth_url = self.pub_auth_url
        use_ssl_options = self.pub_ssl_options
        use_pub_attrs = self.pub_attrs

        if auth_url:
            use_auth_url = auth_url
        if len(ssl_options) > 0:
            use_ssl_options = ssl_options
        if len(ssl_options) > 0:
            use_pub_attrs = use_pub_attrs

        self.pub = Publisher("test-pub",
                             use_auth_url,
                             use_ssl_options)

    # end of connect_pub

    def connect_sub(self,
                    auth_url=None,
                    ssl_options={},
                    attrs={}):

        use_auth_url = self.sub_auth_url
        use_ssl_options = self.sub_ssl_options
        use_sub_attrs = self.sub_attrs

        if auth_url:
            use_auth_url = auth_url
        if len(ssl_options) > 0:
            use_ssl_options = ssl_options
        if len(ssl_options) > 0:
            use_sub_attrs = use_sub_attrs

        self.sub = KombuSubscriber("test-sub",
                                   use_auth_url,
                                   use_ssl_options)
    # end of connect_sub

    def build_msg(self,
                  test_values={}):

        body = {"test_id": self.test_id,
                "date": datetime.datetime.now().strftime("%Y-%m-%d %H-%M-%S"),
                "msg_id": str(uuid.uuid4()).replace("-", ""),
                "test_values": test_values}

        return body
    # end of build_msg

    def consume(self,
                callback=None,
                queue=queue,
                exchange=exchange,
                routing_key=routing_key,
                serializer="application/json",
                heartbeat=60,
                time_to_wait=5.0,
                forever=False,
                silent=True):

        if not callback:
            log.error(("Subscriber - Requires a callback handler for message"
                       "processing with signature definition: "
                       "def handle_message(self, body, message):")
                      .format(self.sub_auth_url,
                              self.sub_ssl_options))
            assert(callback)

        # if not connected, just connect with defaults
        if not self.sub:
            self.connect_sub()
            if not self.sub:
                log.error(("Subscriber - Failed to connect "
                           "to broker={} ssl={}")
                          .format(self.sub_auth_url,
                                  self.sub_ssl_options))
                assert(self.sub)

        if self.sub:

            self.sub.consume(callback=callback,
                             queue=queue,
                             exchange=exchange,
                             routing_key=routing_key,
                             serializer=serializer,
                             heartbeat=heartbeat,
                             forever=forever,
                             time_to_wait=time_to_wait,
                             silent=silent)

        else:
            log.info("Sub is None already - client should not call consume")
    # end of consume

    def publish(self,
                body=None,
                exchange=exchange,
                routing_key=routing_key,
                queue=queue,
                priority=0,
                ttl=None,
                serializer="json",
                retry=True,
                silent=True):

        # if no body for the message
        if not body:
            log.error(("Publisher - requires argument: "
                       "body=some_dictionary to test"))
            assert(body)

        # if not connected, just connect with defaults
        if not self.pub:
            self.connect_pub()
            if not self.pub:
                log.error(("Publisher - Failed to connect "
                           "to broker={} ssl={}")
                          .format(self.pub_auth_url,
                                  self.pub_ssl_options))
                assert(self.pub)

        if self.pub:
            self.pub.publish(body=body,
                             exchange=exchange,
                             routing_key=routing_key,
                             queue=queue,
                             serializer=serializer,
                             priority=priority,
                             ttl=ttl,
                             retry=retry,
                             silent=silent)
        else:
            log.info("Pub is None already - client should not call publish")
Esempio n. 7
0
class BaseTestCase(unittest.TestCase):
    def setUp(self):
        """setUp"""

        self.name = "testing_{}".format(str(uuid.uuid4()))
        self.broker_url = "memory://localhost/"
        self.ssl_options = None
        self.serializer = "json"

        self.train_exchange_name = "webapp.train.requests"
        self.train_routing_key = "webapp.train.requests"
        self.train_queue_name = "webapp.train.requests"

        self.predict_exchange_name = "webapp.predict.requests"
        self.predict_routing_key = "webapp.predict.requests"
        self.predict_queue_name = "webapp.predict.requests"

        self.pub = None

    # end of setUp

    def get_broker(self):
        """get_broker"""
        return self.broker_url

    # end of get_broker

    def get_ssl_options(self):
        """get_ssl_options"""
        return self.ssl_options

    # end of get_broker

    def get_publisher(self, broker_url=None, ssl_options=None):
        """get_publisher

        :param broker_url: broker url
        :param ssl_options: sll options
        """
        if self.pub:
            return self.pub

        self.pub = Publisher(name=self.name,
                             auth_url=self.broker_url,
                             ssl_options=self.ssl_options)

        return self.pub

    # end of get_publisher

    def publish(self,
                body=None,
                exchange=None,
                routing_key=None,
                queue=None,
                serializer="json",
                retry=True,
                silent=True):

        use_exchange = exchange
        if not use_exchange:
            use_exchange = self.train_exchange_name
        use_routing_key = routing_key
        if not use_routing_key:
            use_routing_key = self.train_routing_key
        use_queue = queue
        if not use_queue:
            use_queue = self.train_queue_name

        log.info(("Sending msg={} "
                  "ex={} rk={}").format(body, use_exchange, use_routing_key))

        # Publish the message:
        self.pub.publish(body=body,
                         exchange=use_exchange,
                         routing_key=use_routing_key,
                         queue=use_queue,
                         serializer=serializer,
                         retry=retry)

    # end of publish

    def build_train_antinex_request(self,
                                    data_file=("./training/"
                                               "django-antinex-simple.json")):
        """build_model_and_weights

        :param data_file: train and predict request
        """
        body = {}
        with open(data_file, "r") as cur_file:
            file_contents = cur_file.read()
            body = json.loads(file_contents)

        # Now send:
        now = datetime.datetime.now().isoformat()
        body["created"] = now
        log.info("loading predict_rows")
        predict_rows_df = pd.read_csv(body["dataset"])
        predict_rows = []
        for idx, org_row in predict_rows_df.iterrows():
            new_row = json.loads(org_row.to_json())
            new_row["idx"] = len(predict_rows) + 1
            predict_rows.append(new_row)
        body["predict_rows"] = pd.DataFrame(predict_rows).to_json()

        log.info(("using predict_rows={}").format(len(predict_rows)))

        return body

    # end of build_train_antinex_request

    def build_predict_antinex_request(
        self, data_file=("./training/"
                         "django-antinex-simple.json")):
        """build_model_and_weights

        :param data_file: predict request
        """
        body = {}
        with open(data_file, "r") as cur_file:
            file_contents = cur_file.read()
            body = json.loads(file_contents)

        # Now send:
        now = datetime.datetime.now().isoformat()
        body["created"] = now
        log.info("loading predict_rows")
        predict_rows_df = pd.read_csv(body["dataset"])
        predict_rows = []
        for idx, org_row in predict_rows_df.iterrows():
            new_row = json.loads(org_row.to_json())
            new_row["idx"] = len(predict_rows) + 1
            predict_rows.append(new_row)
        body["predict_rows"] = pd.DataFrame(predict_rows).to_json()

        log.info(("using predict_rows={}").format(len(predict_rows)))

        return body

    # end of build_predict_antinex_request

    def build_predict_scaler_antinex_request(
        self, data_file=("./training/"
                         "scaler-django-antinex-simple.json")):
        """build_model_and_weights

        :param data_file: predict request
        """
        body = {}
        with open(data_file, "r") as cur_file:
            file_contents = cur_file.read()
            body = json.loads(file_contents)

        # Now send:
        now = datetime.datetime.now().isoformat()
        body["created"] = now
        log.info("loading predict_rows")
        predict_rows_df = pd.read_csv(body["dataset"])
        predict_rows = []
        for idx, org_row in predict_rows_df.iterrows():
            new_row = json.loads(org_row.to_json())
            new_row["idx"] = len(predict_rows) + 1
            predict_rows.append(new_row)
        body["predict_rows"] = pd.DataFrame(predict_rows).to_json()

        log.info(("using predict_rows={}").format(len(predict_rows)))

        return body

    # end of build_predict_scaler_antinex_request

    def build_predict_rows_from_dataset(
            self,
            data_file=("./training/"
                       "scaler-django-antinex-simple.json"),
            num_rows_at_bottom=2):
        """build_model_and_weights

        :param data_file: predict request
        """
        body = {}
        with open(data_file, "r") as cur_file:
            file_contents = cur_file.read()
            body = json.loads(file_contents)

        # Now send:
        now = datetime.datetime.now().isoformat()
        body["created"] = now
        log.info("loading predict_rows")
        predict_rows_df = pd.read_csv(body["dataset"])
        predict_rows = []
        use_start_idx = num_rows_at_bottom
        if use_start_idx > 0:
            use_start_idx = -1 * use_start_idx
        for idx, org_row in predict_rows_df.iloc[use_start_idx:].iterrows():
            new_row = json.loads(org_row.to_json())
            new_row["idx"] = len(predict_rows) + 1
            new_row["_dataset_index"] = idx
            predict_rows.append(new_row)
        body["predict_rows"] = pd.DataFrame(predict_rows).to_json()

        log.info(("using predict_rows={}").format(len(predict_rows)))

        return body

    # end of build_predict_rows_from_dataset

    def build_regression_train_request(self,
                                       data_file=("./tests/train/"
                                                  "regression.json")):
        """build_model_and_weights

        :param data_file: train and predict request
        """
        body = {}
        with open(data_file, "r") as cur_file:
            file_contents = cur_file.read()
            body = json.loads(file_contents)
        return body
Esempio n. 8
0
name = "kombu-sqs-publisher"
log = build_colorized_logger(name=name)

log.info("Start - {}".format(name))

# Initialize Publisher
# http://docs.celeryproject.org/en/latest/getting-started/brokers/sqs.html
# https://github.com/celery/kombu/blob/master/kombu/transport/SQS.py
aws_key = ev("SQS_AWS_ACCESS_KEY", "not_a_key")
aws_secret = ev("SQS_AWS_SECRET_KEY", "not_a_secret")

sqs_auth_url = ev("SUB_BROKER_URL", "sqs://{}:{}@".format(aws_key, aws_secret))

ssl_options = {}
pub = Publisher("kombu-sqs-publisher", sqs_auth_url, ssl_options)
# sample: "sqs://*****:*****@"
# ^ from the doc: 'you must remember to include the "@" at the end.'

# Now consume:
seconds_to_consume = 10.0
serializer = "json"
exchange = ev("CONSUME_EXCHANGE", "test1")
routing_key = ev("CONSUME_ROUTING_KEY", "test1")
queue = ev("CONSUME_QUEUE", "test1")
max_timeout = 43200
transport_options = {}

if not pub:
    log.error(("Failed to connect to " "broker={}").format(sqs_auth_url))
else:
Esempio n. 9
0
exchange_name = ev("PUBLISH_EXCHANGE", "reporting")
routing_key = ev("PUBLISH_ROUTING_KEY", "reporting.accounts")
queue_name = ev("PUBLISH_QUEUE", "reporting.accounts")
auth_url = ev("PUB_BROKER_URL", "pyamqp://*****:*****@localhost:5672//")
serializer = "json"

# import ssl
# Connection("amqp://", login_method='EXTERNAL', ssl={
#               "ca_certs": '/etc/pki/tls/certs/something.crt',
#               "keyfile": '/etc/something/system.key',
#               "certfile": '/etc/something/system.cert',
#               "cert_reqs": ssl.CERT_REQUIRED,
#          })
#
ssl_options = {}
app = Publisher("rabbitmq-publisher", auth_url, ssl_options)

if not app:
    log.error("Failed to connect to broker={}".format(auth_url))
else:

    # Create the message:
    now = datetime.datetime.now().isoformat()
    body = {"account_id": 456, "created": now}

    log.info(("Sending msg={} "
              "ex={} rk={}").format(body, exchange_name, routing_key))

    # Publish the message:
    msg_sent = app.publish(body=body,
                           exchange=exchange_name,