Beispiel #1
0
def lookupIP(ip, dxlif):
    try:
        v = IP(ip).version()
    except:
        return
    if v == 4 or v == 6:
        try:
            print("Looking up: %s" % ip)
            d = pynfdump.Dumper("/data/nfsen/profiles-data",
                                profile='live',
                                sources=['local'])
            d.set_where(start=time.strftime("%Y-%m-%d"),
                        end=time.strftime("%Y-%m-%d %H:%M"))
            records = d.search("src ip %s" % ip, aggregate=['dstip'])
            tgt = []
            for r in records:
                if r['dstip'] not in tgt:
                    tgt.append(r['dstip'])
            if len(tgt) > 0:
                for t in tgt:
                    evtstr = '/feed/compromised/ipv' + str(IP(t).version())
                    evt = Event(evtstr)
                    evt.payload = str(t).encode()
                    dxlif.send_event(evt)
                    print("Event emitted topic: %s content: %s" %
                          (evtstr, str(t)))

        except Exception as e:
            print("Exception while processing %s: %s" % (ip, str(e)))
            return
Beispiel #2
0
 def _process_zeromq_misp_messages(self):
     """
     Poll for MISP ZeroMQ notifications. On receipt of a notification,
     send a corresponding event to the DXL fabric.
     """
     while not self.__destroyed:
         try:
             socks = dict(self._zeromq_poller.poll(timeout=None))
         # A ZMQError could be raised if the socket is shut down while
         # blocked in a poll.
         except zmq.ZMQError:
             socks = {}
         if self._zeromq_misp_sub_socket in socks and \
                 socks[self._zeromq_misp_sub_socket] == zmq.POLLIN:
             message = self._zeromq_misp_sub_socket.recv_string()
             topic, _, payload = message.partition(" ")
             logger.debug("Received notification for %s", topic)
             full_event_topic = "{}{}/{}".format(
                 self._ZEROMQ_NOTIFICATIONS_EVENT_TOPIC,
                 "/{}".format(self._service_unique_id)
                 if self._service_unique_id else "", topic)
             event = Event(full_event_topic)
             logger.debug("Forwarding notification to %s ...",
                          full_event_topic)
             event.payload = payload
             self.client.send_event(event)
Beispiel #3
0
    def send_event(self, topic, payload):
        if not topic:
            raise Exception(f'Error in {demisto.command()} topic field is required')

        event = Event(topic)
        event.payload = str(payload).encode()
        self.client.send_event(event)
Beispiel #4
0
 def post_receive(self, alert: 'Alert') -> Optional['Alert']:
     try:
         event = Event(DXL_PUB_TOPIC)
         event.payload = json.dumps(alert.get_body(history=False))
         LOG.info('broadcasting alert %s', alert.id)
         self.dxl_client.send_event(event)
     except BaseException as e:
         LOG.exception('failed to broadcast alert: %s', str(e))
         raise RuntimeError('failed to broadcast alert: ' + str(e))
Beispiel #5
0
    def run(self, params={}):
        topic = params.get('topic')
        event_message = params.get('event_message')

        event = Event(topic)
        event.payload = event_message.encode()

        with DxlClient(self.connection.config) as dxl_client:
            # Connect to the fabric
            dxl_client.connect()
            dxl_client.send_event(event)
        return {'success': True}
Beispiel #6
0
def sendData(data):
    try:
        v = IP(data).version()
    except Exception as e:
        print("Error in '%s': %s" % (data, str(e)))
        return
    if v == 4:
        evt = Event('/feed/bad/ipv4')
    if v == 6:
        evt = Event('/feed/bad/ipv6')
    evt.payload = str(data).encode()
    client.send_event(evt)
Beispiel #7
0
def main(argv):
	TOPIC_DESTINATION = '/events/syslog'
	TYPE_PAYLOAD = 'syslog'
	PAYLOAD = sys.argv[1]

	DXL_MESSAGE['SRC_HOST'] = IP
	DXL_MESSAGE['TYPE_PAYLOAD'] = TYPE_PAYLOAD
	DXL_MESSAGE['PAYLOAD'] = PAYLOAD

	with DxlClient(config) as client:
		client.connect()
	    	event = Event(TOPIC_DESTINATION)
	    	event.payload = str(json.dumps(DXL_MESSAGE)).encode()
		client.send_event(event)
def sendMessage(msgType=1):
    # Record the start time
    start = int(time.time())
    # Create the event
    event = Event(eventTopic)

    if msgType == 1:
        # Set the payload
        event_dict = {}
        #event type 1 is a standard chat message
        event_dict['type'] = 1
        event_dict['message'] = chatWin.getEntry("qcMessage")
        event_dict['time'] = str(int(time.time()))
        event_dict['user'] = username
        event_dict['UID'] = UID

        #cleanup the form
        chatWin.clearEntry("qcMessage")

    elif msgType == 2:
        logger.info("Sending Ping")
        # set the payload
        event_dict = {}
        #event type 2 is a user notification message
        event_dict['type'] = 2
        event_dict['time'] = str(int(time.time()))
        event_dict['user'] = username
        event_dict['UID'] = UID

    elif msgType == 3:
        logger.info("Sending Ping Request")
        # set the payload
        event_dict = {}
        #event type 3 is a broadcast ping request
        event_dict['type'] = 3

    elif msgType == 4:
        logger.info("Sending bye")
        # set the payload
        event_dict = {}
        #event type 4 is a user GoodBye message
        event_dict['type'] = 4
        event_dict['time'] = str(int(time.time()))
        event_dict['user'] = username
        event_dict['UID'] = UID

    event.payload = json.dumps(event_dict).encode()

    # Send the event
    client.send_event(event)
    def _set_item_reputation(self, request, request_payload,
                             change_topic, tags=None):
        new_entry = None

        hash_match_result = self._get_reputation_for_hashes(
            request_payload["hashes"], False)
        if hash_match_result:
            metadata = self.REPUTATION_METADATA[hash_match_result]
            new_reputations = metadata["reputations"]
            for reputation_entry in new_reputations:
                if reputation_entry["providerId"] == request_payload["providerId"]:
                    new_entry = reputation_entry
        else:
            first_hash = request_payload["hashes"][0]
            item_name = first_hash["type"] + ":" + first_hash["value"]
            new_reputations = []
            self.REPUTATION_METADATA[item_name] = {
                "hashes": {new_hash["type"]: new_hash["value"] \
                           for new_hash in request_payload["hashes"]},
                "reputations": new_reputations}
            metadata = self.REPUTATION_METADATA[item_name]
            self._set_hash_algos_for_item(item_name, metadata["hashes"])

        tags = tags or {}
        if "comment" in request_payload:
            tags["comment"] = request_payload["comment"]
        metadata["tags"] = tags

        old_reputations = copy.deepcopy(new_reputations)

        if not new_entry:
            new_entry = {"attributes": {},
                         "providerId": request_payload["providerId"]}
        new_entry["trustLevel"] = request_payload["trustLevel"]
        new_entry["createDate"] = int(time.time())
        new_reputations.append(new_entry)

        self._app.client.send_response(Response(request))

        event = Event(change_topic)
        event_payload = {
            "hashes": request_payload["hashes"],
            "oldReputations": {"reputations": old_reputations},
            "newReputations": {"reputations": new_reputations},
            "updateTime": int(time.time())
        }
        if "publicKeySha1" in metadata["hashes"]:
            event_payload["publicKeySha1"] = metadata["hashes"]["publicKeySha1"]
            event_payload["hashes"] = filter(
                lambda hash_entry: hash_entry["type"] != "publicKeySha1",
                event_payload["hashes"]
            )
        if "relationships" in metadata:
            event_payload["relationships"] = metadata["relationships"]

        MessageUtils.dict_to_json_payload(event, event_payload)
        self._app.client.send_event(event)
    def _set_item_reputation(self, request, request_payload, item_name,
                             change_topic):
        new_entry = None

        if item_name in self.REPUTATION_METADATA:
            new_reputations = self.REPUTATION_METADATA[item_name][
                "reputations"]
            for reputation_entry in new_reputations:
                if reputation_entry["providerId"] == request_payload[
                        "providerId"]:
                    new_entry = reputation_entry
        else:
            new_reputations = []
            self.REPUTATION_METADATA[item_name] = {
                "hashes": {},
                "reputations": new_reputations
            }

        old_reputations = copy.deepcopy(new_reputations)
        old_hashes = self.REPUTATION_METADATA[item_name]["hashes"]

        for hash_type, hash_value in old_hashes.items():
            if hash_type in self.hash_algos_to_files and \
                hash_value in self.hash_algos_to_files[hash_type]:
                del self.hash_algos_to_files[hash_type][hash_value]

        new_hashes = {new_hash["type"]: new_hash["value"] \
                      for new_hash in request_payload["hashes"]}
        self._set_hash_algos_for_item(item_name, new_hashes)
        self.REPUTATION_METADATA[item_name]["hashes"] = new_hashes

        if not new_entry:
            new_entry = {
                "attributes": {},
                "providerId": request_payload["providerId"]
            }
        new_entry["trustLevel"] = request_payload["trustLevel"]
        new_entry["createDate"] = int(time.time())
        new_reputations.append(new_entry)

        self._client.send_response(Response(request))

        event = Event(change_topic)
        event_payload = {
            "hashes": request_payload["hashes"],
            "oldReputations": {
                "reputations": old_reputations
            },
            "newReputations": {
                "reputations": new_reputations
            },
            "updateTime": int(time.time())
        }

        MessageUtils.dict_to_json_payload(event, event_payload)
        self._client.send_event(event)
def main(argv):
    TOPIC_DESTINATION = ''
    TYPE_PAYLOAD = ''
    PAYLOAD = ''
    help = 'python ' + sys.argv[
        0] + ' -t <topic destination> -k <type of payload> -p <payload>'

    try:
        opts, args = getopt.getopt(argv, "ht:k:p:",
                                   ["topic=", "typepayload=", "payload="])
    except getopt.GetoptError:
        print help
        sys.exit(1)

    for opt, arg in opts:
        if opt == '-h':
            print help
            sys.exit(1)
        elif opt in ("-t", "--topic"):
            TOPIC_DESTINATION = arg
        elif opt in ("-k", "--typepayload"):
            TYPE_PAYLOAD = arg
        elif opt in ("-p", "--payload"):
            PAYLOAD = arg

    if (TOPIC_DESTINATION != '' and PAYLOAD != ''):

        DXL_MESSAGE['SRC_HOST'] = IP
        DXL_MESSAGE['TYPE_PAYLOAD'] = TYPE_PAYLOAD
        DXL_MESSAGE['PAYLOAD'] = PAYLOAD

        with DxlClient(config) as client:
            client.connect()
            event = Event(TOPIC_DESTINATION)
            event.payload = str(json.dumps(DXL_MESSAGE)).encode()
            client.send_event(event)
    else:
        print help
        sys.exit(1)
Beispiel #12
0
    def sendMessage(self,
                    topic="/dsa/dxl/test/event2",
                    message="Default message"):
        if not self.isConnected():
            raise DxlJythonException(1200, "Not connected to a OpenDXL broker")

        try:
            event = Event(topic)

            # Encode string payload as UTF-8
            event.payload = message.encode()

            # Send event on DXL
            logger.info("Sending '" + message + "' to '" + topic + "'")
            self.client.send_event(event)

            return "Event successfully posted to topic '%s'" % topic

        except Exception as e:
            logger.info("Exception: " + e.message)
            raise DxlJythonException(
                1010, "Unable to communicate with a DXL broker")
    def _process_zeromq_misp_messages(self):
        """
        Poll for MISP ZeroMQ notifications. On receipt of a notification,
        send a corresponding event to the DXL fabric.
        """
        while not self.__destroyed:
            try:
                socks = dict(self._zeromq_poller.poll(timeout=None))
            # A ZMQError could be raised if the socket is shut down while
            # blocked in a poll.
            except zmq.ZMQError:
                socks = {}
            if self._zeromq_misp_sub_socket in socks and \
                    socks[self._zeromq_misp_sub_socket] == zmq.POLLIN:
                message = self._zeromq_misp_sub_socket.recv_string()
                topic, _, payload = message.partition(" ")
                logger.debug("Received notification for %s", topic)

                # ZeroMQ will deliver notifications for any topic which starts
                # with the subscribed topic name. Events should only be
                # forwarded only to the DXL fabric for messages whose topic
                # exactly matches an entry in the DXL service configuration
                # file. For example, if the DXL service configuration file
                # includes only the topic "misp_json", the ZeroMQ socket would
                # provide messages with a topic of either "misp_json" or
                # "misp_json_self" to the ZeroMQ subscriber. Only messages with
                # a topic of "misp_json" (not "misp_json_self") should be
                # forwarded to the DXL fabric.
                if topic in self._zeromq_notification_topics:
                    full_event_topic = "{}{}/{}".format(
                        self._ZEROMQ_NOTIFICATIONS_EVENT_TOPIC,
                        "/{}".format(self._service_unique_id)
                        if self._service_unique_id else "", topic)
                    event = Event(full_event_topic)
                    logger.debug("Forwarding notification to %s ...",
                                 full_event_topic)
                    event.payload = payload
                    self.client.send_event(event)
Beispiel #14
0
    def on_dxl_connect(self):
        """
        Invoked after the client associated with the application has connected
        to the DXL fabric.
        """
        logger.info("On 'DXL connect' callback.")
        fileRead = open(self._log_location, 'r')

        stResults = os.stat(self._log_location)
        stSize = stResults[6]
        fileRead.seek(stSize)

        event = Event(self.VOR_TOPIC)

        while 1:
            where = fileRead.tell()
            line = fileRead.readline()
            if not line:
                time.sleep(self._log_check_interval)
                fileRead.seek(where)
            else:
                if line.find("vee-fs:") != -1 and line.find("[ALARM]") != -1:
                    #print("Line: |{}|".format(line.strip()))
                    parsedLog = self._parse_vor_log(line)

                    #line_parsed = {"Policy": parsedLog[0], "User": parsedLog[1], "Process": parsedLog[2], "Action": parsedLog[3], "Res": parsedLog[4]}
                    line_parsed = '{ "Policy":"' + parsedLog[
                        0] + '", "User":"******", "Process":"' + parsedLog[
                                2] + '", "Action":"' + parsedLog[
                                    3] + '", "Resource":"' + parsedLog[4] + '"}'
                    #line_parsed = '{Policy: ' + parsedLog[0] + ',' + 'User: '******','
                    print("Line Parsed: {}".format(line_parsed))

                    event.payload = (line_parsed)
                    self.client.send_event(event)
    def test_on_detection(self):
        class MyDetectionCallback(DetectionCallback):
            def __init__(self):
                super(MyDetectionCallback, self).__init__()
                self.detection_dict_received = None
                self.original_event_received = None

            def on_detection(self, detection_dict, original_event):
                self.detection_dict_received = detection_dict
                self.original_event_received = original_event

        detection_callback = MyDetectionCallback()
        event = Event("/event")
        event_payload_sent = {
            "agentGuid": event.message_id,
            "detectionTime": int(time.time()),
            "hashes": [
                {"type": "md5",
                 "value": "614rncUYF6CG17l+tSQQqw=="},
                {"type": "sha1",
                 "value": "Q139Rw9ydDfHy08Hy6H5ofQnJlY="},
                {"type": "sha256",
                 "value": "QUuxaxDs4tsthEjLnzE/gMt3wxDKDBnuA8c8ugwW/ts="}
            ],
            "localReputation": 1,
            "name": "FOCUS_MALWARE2.EXE",
            "remediationAction": 5
        }
        expected_payload_received = event_payload_sent.copy()
        expected_payload_received["hashes"] = {
            "md5": "eb5e2b9dc51817a086d7b97eb52410ab",
            "sha1": "435dfd470f727437c7cb4f07cba1f9a1f4272656",
            "sha256": "414bb16b10ece2db2d8448cb9f313f80cb77c310ca0c19ee03c73cba0c16fedb"
        }
        MessageUtils.dict_to_json_payload(event, event_payload_sent)
        detection_callback.on_event(event)
        self.assertEqual(expected_payload_received,
                         detection_callback.detection_dict_received)
        self.assertEqual(event, detection_callback.original_event_received)
    def test_on_first_instance(self):
        class MyFirstInstanceCallback(FirstInstanceCallback):
            def __init__(self):
                super(MyFirstInstanceCallback, self).__init__()
                self.first_instance_dict_received = None
                self.original_event_received = None

            def on_first_instance(self, first_instance_dict, original_event):
                self.first_instance_dict_received = first_instance_dict
                self.original_event_received = original_event

        first_instance_callback = MyFirstInstanceCallback()
        event = Event("/event")
        event_payload_sent = {
            "agentGuid": event.message_id,
            "hashes": [
                {"type": "md5",
                 "value": "MdvozEQ9LKf9I2rAClL7Fw=="},
                {"type": "sha1",
                 "value": "LWykUGG3lyMS4A5ZM/3/lbuQths="},
                {"type": "sha256",
                 "value": "qjxGHUwho5LjctDWykzrHk2ICY1YdllFTq9Nk8ZhiA8="}
            ],
            "name": "MORPH.EXE"
        }
        expected_payload_received = event_payload_sent.copy()
        expected_payload_received["hashes"] = {
            "md5": "31dbe8cc443d2ca7fd236ac00a52fb17",
            "sha1": "2d6ca45061b7972312e00e5933fdff95bb90b61b",
            "sha256": "aa3c461d4c21a392e372d0d6ca4ceb1e4d88098d587659454eaf4d93c661880f"
        }
        MessageUtils.dict_to_json_payload(event, event_payload_sent)
        first_instance_callback.on_event(event)
        self.assertEqual(
            expected_payload_received,
            first_instance_callback.first_instance_dict_received)
        self.assertEqual(event, first_instance_callback.original_event_received)
Beispiel #17
0
 def send_event(topic, m):
     event = Event(topic)
     event.payload = m.encode()
     client.send_event(event)
Beispiel #18
0
        # Prompt user for input to publish DXL Events
        while True:
            print "   Enter 1 to publish a DXL Event"
            print "   Enter 9 to quit"
            input = raw_input("   Enter value: ")

            try:
                option = int(input)
            except:
                option = input

            # Option: DXL Event
            if option == 1:
                # Create the Event
                logger.info("Event Publisher - Creating Event for Topic %s", EVENT_TOPIC)
                event = Event(EVENT_TOPIC)

                # Encode string payload as UTF-8
                event.payload = "Sample Event Payload".encode()

                # Publish the Event to the DXL Fabric on the Topic
                logger.info("Event Publisher - Publishing Event to %s", EVENT_TOPIC)
                client.send_event(event)

            # Option: Exit the loop
            elif option == 9:
                break

            # Invalid input
            else:
                logger.info("Event Publisher - Invalid input: %s", option)
Beispiel #19
0
                event_count[0] += 1
                # Notify that the count was increment
                event_count_condition.notify_all()

    # Register the callback with the client
    client.add_event_callback(EVENT_TOPIC, MyEventCallback())

    #
    # Send events
    #

    # Record the start time
    start = time.time()

    # Loop and send the events
    for event_id in range(TOTAL_EVENTS):
        # Create the event
        event = Event(EVENT_TOPIC)
        # Set the payload
        event.payload = str(event_id).encode()
        # Send the event
        client.send_event(event)

    # Wait until all events have been received
    print "Waiting for events to be received..."
    with event_count_condition:
        while event_count[0] < TOTAL_EVENTS:
            event_count_condition.wait()

    # Print the elapsed time
    print "Elapsed time (ms): " + str((time.time() - start) * 1000)
    def test_on_reputation_change(self):
        class MyReputationChangeCallback(ReputationChangeCallback):
            def __init__(self):
                super(MyReputationChangeCallback, self).__init__()
                self.rep_change_dict_received = None
                self.original_event_received = None

            def on_reputation_change(self, rep_change_dict, original_event):
                self.rep_change_dict_received = rep_change_dict
                self.original_event_received = original_event

        reputation_change_callback = MyReputationChangeCallback()
        event = Event("/event")
        event_payload_sent = {
            "hashes": [
                {"type": "md5",
                 "value": "8se7isyX+S6Yei1Ah9AhsQ=="},
                {"type": "sha1",
                 "value": "frATnSF1c5s8yw0REAZ4IL5qvSk="},
                {"type": "sha256",
                 "value": "FC4daI7wVoNww3GH/Z8jUdfd7aV0+L+psPpO9C24WqI="}
            ],
            "publicKeySha1": "3B87A2D6F39770160364B79A152FCC73BAE27ADF",
            "newReputations": [
                {
                    "attributes": {
                        "2120340": "2139160704"
                    },
                    "createDate": 1480455704,
                    "providerId": 1,
                    "trustLevel": 99
                },
                {
                    "attributes": {
                        "2101652": "235",
                        "2102165": "1476902802",
                        "2111893": "244",
                        "2114965": "4",
                        "2139285": "73183493944770750"
                    },
                    "createDate": 1476902802,
                    "providerId": 3,
                    "trustLevel": 99
                }
            ],
            "oldReputations": [
                {
                    "attributes": {
                        "2120340": "2139160704"
                    },
                    "createDate": 1480455704,
                    "providerId": 1,
                    "trustLevel": 99
                },
                {
                    "attributes": {
                        "2101652": "235",
                        "2102165": "1476902802",
                        "2111893": "244",
                        "2114965": "4",
                        "2139285": "73183493944770750"
                    },
                    "createDate": 1476902802,
                    "providerId": 3,
                    "trustLevel": 85
                }
            ],
            "relationships": {
                "certificate": {
                    "hashes": [
                        {"type": "md5",
                         "value": "MdvozEQ9LKf9I2rAClL7Fw=="},
                        {"type": "sha1",
                         "value": "LWykUGG3lyMS4A5ZM/3/lbuQths="},
                        {"type": "sha256",
                         "value": "qjxGHUwho5LjctDWykzrHk2ICY1YdllFTq9Nk8ZhiA8="}
                    ],
                    "publicKeySha1": "Q139Rw9ydDfHy08Hy6H5ofQnJlY="
                }
            }
        }
        expected_payload_received = copy.deepcopy(event_payload_sent)
        expected_payload_received["hashes"] = {
            "md5": "f2c7bb8acc97f92e987a2d4087d021b1",
            "sha1": "7eb0139d2175739b3ccb0d1110067820be6abd29",
            "sha256": "142e1d688ef0568370c37187fd9f2351d7ddeda574f8bfa9b0fa4ef42db85aa2"
        }
        expected_payload_received["publicKeySha1"] = \
            "dc1f3b0360fa177f7bef4d7ad37eb807bf40d79d85082ef7040136ec00c5"
        expected_cert_info_received = expected_payload_received["relationships"]\
            ["certificate"]
        expected_cert_info_received["hashes"] = \
            {
                "md5": "31dbe8cc443d2ca7fd236ac00a52fb17",
                "sha1": "2d6ca45061b7972312e00e5933fdff95bb90b61b",
                "sha256":
                    "aa3c461d4c21a392e372d0d6ca4ceb1e4d88098d587659454eaf4d93c661880f"
            }
        expected_cert_info_received["publicKeySha1"] = \
            "435dfd470f727437c7cb4f07cba1f9a1f4272656"
        MessageUtils.dict_to_json_payload(event, event_payload_sent)
        reputation_change_callback.on_event(event)
        self.assertEqual(
            expected_payload_received,
            reputation_change_callback.rep_change_dict_received)
        self.assertEqual(event,
                         reputation_change_callback.original_event_received)
    def run(self, results):
        """
        Sends Cuckoo report as a DXL event on a DXL Fabric.

        @param results: Cuckoo results dict.
        @raise CuckooReportError: if fails to send Cuckoo report as a DXL event.
        """

        try:
            # Diction of data to send out as the report on DXL
            report_dict = {}

            if self.options.get("send_compressed_event", False):
                # Convert results to JSON string
                report_json_string = json.dumps(
                    results,
                    default=serialize_datetime_objects,
                    indent=self.options.indent,
                    encoding="UTF-8")

                # Compress the Cuckoo results
                zlib_obj = zlib.compressobj(-1, zlib.DEFLATED, 31)
                compressed_report_data = zlib_obj.compress(
                    report_json_string) + zlib_obj.flush()

                # Create the DXL Event for zipped data
                zipped_event = Event(CUCKOO_ZIP_EVENT_TOPIC)

                # Set the payload to be the compressed Cuckoo report analysis
                zipped_event.payload = compressed_report_data

                # Publish the full zipped reported if the payload size is smaller than the maximum configured size.
                if sys.getsizeof(zipped_event.payload) <= self.options.get(
                        "compressed_event_max_size", 512000):
                    log.info(
                        "Publishing full zipped report to DXL on topic %s",
                        CUCKOO_ZIP_EVENT_TOPIC)
                    cuckoo_dxl_client.client.send_event(zipped_event)
                else:
                    log.info(
                        "Report too large. Not publishing zipped report to DXL."
                    )

            # Add the info and target entries from the Cuckoo results
            report_dict[INFO_REPORT_KEY] = results.get(INFO_REPORT_KEY, {})
            report_dict[TARGET_REPORT_KEY] = results.get(TARGET_REPORT_KEY, {})

            # Add items listed from the "items_to_include_in_report" setting in the report.conf to the report
            items_to_include_in_report = self.options.get(
                "items_to_include_in_report")
            if items_to_include_in_report is not None:
                # Get rid of any white space characters in the items_to_include_in_report string
                items_to_include_in_report = re.sub(
                    r"\s+", "", items_to_include_in_report)

                # Loop over list of items to include
                for report_include_item in items_to_include_in_report.split(
                        ","):
                    if not report_include_item:
                        log.warn(
                            "items_to_include_in_report includes an emtpy item."
                        )
                        continue

                    # Separate report_include_item in to sub items
                    sub_sections_list = report_include_item.split(".")
                    # Find the value in the Cuckoo results dictionary
                    sub_section_value = reduce(sub_level_getter,
                                               sub_sections_list, results)

                    if sub_section_value is NOT_FOUND_OBJ:
                        log.warn(report_include_item +
                                 " is not found in the Cuckoo report.")
                        continue

                    # Create all of the sub item levels in the results reports dictionary
                    result_sub_section = reduce(create_and_get_sub_level,
                                                sub_sections_list[0:-1],
                                                report_dict)
                    # Add the value found in the Cuckoo results
                    result_sub_section.update(
                        {sub_sections_list[-1]: sub_section_value})

            # Create the DXL Event
            report_event = Event(CUCKOO_REPORT_EVENT_TOPIC)

            # Set event payload to be the JSON of the results report dictionary
            report_event.payload = json.dumps(
                report_dict,
                default=serialize_datetime_objects).encode("UTF-8")

            # Publish the Event to the DXL Fabric
            log.info("Publishing Cuckoo report to DXL on topic %s",
                     CUCKOO_REPORT_EVENT_TOPIC)
            cuckoo_dxl_client.client.send_event(report_event)

        except Exception as ex:
            log.exception("Error sending Cuckoo report out as a DXL event.")
            raise CuckooReportError(
                "Failed to send Cuckoo report as a DXL event: %s" % ex)
Beispiel #22
0
 def sendMessage(self, payload):
     event = Event(self.topic)
     event.payload = str(payload).encode()
     self.client.send_event(event)
Beispiel #23
0
brokerString = "ssl://{}".format(aws_machine.ip)
action = "publish_event"
topic = "/mcafee/client/controlevent"
config = DxlClientConfig(broker_ca_bundle=brokerCaBundle,
                         cert_file=certFile,
                         private_key=privateKey,
                         brokers=[Broker.parse(brokerString)])

with DxlClient(config) as dxl_client:
    # Connect to the fabric
    dxl_client.connect()
    if dxl_client.connected:
        print "Connected ... \n"
    else:
        print "Not Connected ... \n"

    sleepTime = 1
    rb = os.urandom(100)
    event = Event(str(topic))
    event.payload = rb
    print "payload={}".format(rb)
    topic.encode('ascii', 'ignore')
    dxl_client.send_event(event)

    # Connect to the fabric
    dxl_client.disconnect()
    if dxl_client.connected:
        print "Connected ... \n"
    else:
        print "Not Connected ... \n"
Beispiel #24
0
    # Process incoming collection requests from the manager
    class PCERequestCallback(RequestCallback):
        def on_request(self, request):
            logger.info("PCE received payload: %s", request.payload.decode())
            collection_requests.append(request)

    # Prepare service registration information
    info = ServiceRegistrationInfo(client, "/scap/pce" + PCE_ID)
    info.add_topic(SERVICE_PCE_REQUEST_TOPIC, PCERequestCallback())

    # Connect to the message fabric and add a listener for registration events
    client.connect()
    client.register_service_sync(info, 10)

    # Register PCE by sending registration event to the collector/PCX
    event = Event(EVENT_PCE_REGISTRATION_TOPIC)
    rm = RegistrationMessage(PCE_ID, "", "", ASSET, MAKE, MODEL, "", "", "",
                             "", "", SUPPORTED_CHECK_TYPES)
    event.payload = (rm.to_json()).encode()
    logger.info("Sending registration event: %s", rm.to_s())
    client.send_event(event)

    # Wait forever
    while True:
        # Process all collection requests that were received
        while collection_requests:
            request = collection_requests.pop()
            response = Response(request)

            # Cancel assessment if a cancel request. Otherwise,
            # perform the assessment
def main(argv):
    # parse the args
    arg_parser = create_arg_parser()
    args = arg_parser.parse_args()

    # set logging level
    set_logging_level(logger, args.loglevel)
    # configure local logger for requests (Urllib3) and set its level
    set_logging_level(logging.getLogger("urllib3"), args.loglevel)
    # read cfg file
    try:
        config = ConfigObj(args.configfile, raise_errors=True, file_error=True)
    except:
        # TODO - enhance error handling here
        logger.error("Could not parse config file!")
        exit(1)

    #
    # get token
    #
    # TODO - handle HTTPS nicely
    urllib3.disable_warnings()
    token = get_staxx_token(config['STAXX']['Address'],
                            config['STAXX']['Port'], config['STAXX']['User'],
                            config['STAXX']['Password'])
    if not token:
        logger.error("Exiting...")
        exit(1)

    #
    # DXL initialization
    #
    # TODO - enhance error handling here
    if not args.dryrun:
        # DxlClientConfig from DXL configuration file
        logger.info("Loading DXL config from: %s", config['DXL']['Config'])
        dxl_config = DxlClientConfig.create_dxl_config_from_file(
            config['DXL']['Config'])
        #
        # build the topics
        #
        obs_types = config['Observable Types']
        logger.debug("Observable types: %s", obs_types)
        obs_topics = config['Observable Topics']
    #
    # timed loop (synchronous "obs export / msg publish")
    #
    while True:
        #
        # export observables into JSON object
        #
        req_error, json_obs = get_staxx_observables(config['STAXX']['Address'],
                                                    config['STAXX']['Port'],
                                                    token, args.filter_query)
        if req_error:
            logger.error("Failure exporting observables.")
            if not args.singleshot:
                logger.info("Sleeping until next polling cycle...\n")
                time.sleep(args.time)
                logger.info("New polling cycle.")
                continue
        if json_obs:
            logger.info(
                "{0} observable(s) exported from Anomali STAXX.".format(
                    len(json_obs)))
            if args.pprint:
                logger.info("Printing observables to STDOUT...")
                print json.dumps(json_obs, indent=2, sort_keys=False)
            if not args.dryrun:
                #
                # Connect to DXL and publish the observables as events
                #
                try:
                    with DxlClient(dxl_config) as dxl_client:
                        # Connect to DXL Broker
                        logger.info("Connecting to DXL broker...")
                        dxl_client.connect()
                        # TODO - handle possible connection errors
                        logger.info(
                            "Filtering observables and publishing events...")
                        count = 0
                        for ob in json_obs:
                            key = is_observable_type_listed(
                                obs_types, ob['itype'])
                            if key:
                                count += 1
                                logger.debug(
                                    "Publishing message for observable (itype: %s, topic: %s).",
                                    ob['itype'], obs_topics[key])
                                dxl_event = Event(obs_topics[key])
                                payload_str = json.dumps(ob)
                                logger.debug("Msg payload: %s", payload_str)
                                dxl_event.payload = str(payload_str).encode()
                                dxl_client.send_event(dxl_event)
                            else:
                                logger.info(
                                    "Observable not published (itype: %s not listed).",
                                    ob['itype'])
                        logger.info("%s event(s) published to DXL fabric.",
                                    count)
                except Exception as e:
                    logger.error(
                        "Could not initialize OpenDXL client ({0}).".format(
                            e.message))
                    exit(1)
        else:
            logger.info("No observable exported from Anomali STAXX.")

        # wait for next cycle (if not single shot mode)
        if args.singleshot:
            logger.info("Exiting (single shot mode).")
            exit(0)
        else:
            logger.info("Sleeping until next polling cycle...\n")
            time.sleep(float(args.time))
            logger.info("New polling cycle.")
Beispiel #26
0
DOCUMENT_ID = "basic-event-example-id"
EVENT_TOPIC = "/sample/elasticsearch/basicevent"

# Create DXL configuration from file
config = DxlClientConfig.create_dxl_config_from_file(CONFIG_FILE)

# Create the client
with DxlClient(config) as client:

    # Connect to the fabric
    client.connect()

    logger.info("Connected to DXL fabric.")

    # Create the event
    event = Event(EVENT_TOPIC)

    # Set the payload
    MessageUtils.dict_to_json_payload(event, {
        "event_id": DOCUMENT_ID,
        "message": "Hello from OpenDXL",
        "source": "Basic Event Example"})

    # Send the event
    client.send_event(event)

    # Create the get request
    request_topic = "/opendxl-elasticsearch/service/elasticsearch-api/get"
    req = Request(request_topic)

    # Set the payload for the get request
from dxlclient.client import DxlClient
from dxlclient.client_config import DxlClientConfig
from dxlclient.message import Event

#[CONFIG OPTIONS]#
CONFIG_FILE = "/usr/local/etc/opendxl/dxlclient.config"
CABINET = "LV|R9"
button = mraa.Gpio(3)

EVENT_TOPIC = "/open/threat/physical/DC/cabinets"
config = DxlClientConfig.create_dxl_config_from_file(CONFIG_FILE)
previousstate = ''
messagepayload = {}
with DxlClient(config) as client:
    client.connect()
    event = Event(EVENT_TOPIC)
    while True:
        if (button.read() == 0):
            currentstate = 0
        else:
            currentstate = 1
        if (currentstate != previousstate):
            messagepayload['timestamp'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
            messagepayload['location'] = CABINET
            if (currentstate == 0):
                messagepayload['alert'] = "Cabinet Opened!"
                event.payload = messagepayload
            else:
                messagepayload['alert'] = "Cabinet Closed!"
                event.payload = messagepayload
            client.send_event(event)
Beispiel #28
0
 def publish(self, topic, payload=None, qos=0, retain=False):
     event = Event(topic)
     event.payload = payload
     self.dxl_client.send_event(event)
import os
import sys

from dxlclient.client import DxlClient
from dxlclient.client_config import DxlClientConfig
from dxlclient.message import Event

EVENT_TOPIC = str(msg['topic']).encode()
CONFIG_FILE = "../dxlclient.config"
config = DxlClientConfig.create_dxl_config_from_file(CONFIG_FILE)

with DxlClient(config) as client:

    client.connect()
    event = Event(EVENT_TOPIC)
    event.payload = str(msg['payload']).encode()
    client.send_event(event)
    # node.send(msg)
    return msg