Ejemplo n.º 1
0
    def cb_copy_all(self) -> None:
        """Copy all binaries from CarbonBlack Response into BinaryAlert.

        Raises:
            InvalidConfigError: If the CarbonBlack downloader is not enabled.
        """
        if not self._config.enable_carbon_black_downloader:
            raise InvalidConfigError('CarbonBlack downloader is not enabled.')

        print('Connecting to CarbonBlack server {} ...'.format(self._config.carbon_black_url))
        carbon_black = cbapi.CbResponseAPI(
            url=self._config.carbon_black_url, token=self._config.plaintext_carbon_black_api_token)
        print('Connecting to SQS queue {} ...'.format(
            self._config.binaryalert_downloader_queue_name))
        queue = boto3.resource('sqs').get_queue_by_name(
            QueueName=self._config.binaryalert_downloader_queue_name)

        md5s = []
        for index, binary in enumerate(carbon_black.select(Binary).all()):
            print('\r{} {}'.format(index, binary.md5), flush=True, end='')
            md5s.append(binary.md5)
            if len(md5s) == 10:  # Up to 10 messages can be delivered at a time.
                response = queue.send_messages(
                    Entries=[
                        {'Id': str(i), 'MessageBody': json.dumps({'md5': md5})}
                        for i, md5 in enumerate(md5s)
                    ]
                )
                # If there were any failures sending to SQS, put those back in the md5s list.
                md5s = [md5s[int(failure['Id'])] for failure in response.get('Failed', [])]
Ejemplo n.º 2
0
    async def isolate(self, url, token, hostname):
        cb = cbapi.CbResponseAPI(url=url, token=token, ssl_verify=self.verify)
        isolated = False

        for sensor in cb.select(cbapi.response.Sensor).where("hostname:%s" %
                                                             hostname):
            sensor.network_isolation_enabled = True
            sensor.save()
            isolated = True

        if isolated:
            return True

        return False
Ejemplo n.º 3
0
    def cb_copy_all(self) -> None:
        """Copy all binaries from CarbonBlack Response into BinaryAlert

        Raises:
            InvalidConfigError: If the CarbonBlack downloader is not enabled.
        """
        if not self._config.enable_carbon_black_downloader:
            raise InvalidConfigError('CarbonBlack downloader is not enabled.')

        print('Connecting to CarbonBlack server {} ...'.format(
            self._config.carbon_black_url))
        carbon_black = cbapi.CbResponseAPI(
            url=self._config.carbon_black_url,
            token=self._config.plaintext_carbon_black_api_token)

        self._enqueue(self._config.binaryalert_downloader_queue_name,
                      ({
                          'md5': binary.md5
                      } for binary in carbon_black.select(Binary).all()),
                      lambda msg: (1, msg['md5']))
Ejemplo n.º 4
0
from botocore.exceptions import BotoCoreError
import cbapi
from cbapi.errors import ObjectNotFoundError, ServerError
from cbapi.response.models import Binary

LOGGER = logging.getLogger()
LOGGER.setLevel(logging.INFO)
logging.getLogger('backoff').addHandler(
    logging.StreamHandler())  # Enable backoff logger.

ENCRYPTED_TOKEN = os.environ['ENCRYPTED_CARBON_BLACK_API_TOKEN']
DECRYPTED_TOKEN = boto3.client('kms').decrypt(
    CiphertextBlob=base64.b64decode(ENCRYPTED_TOKEN))['Plaintext']

# Establish boto3 and S3 clients at import time so Lambda can cache them for re-use.
CARBON_BLACK = cbapi.CbResponseAPI(url=os.environ['CARBON_BLACK_URL'],
                                   token=DECRYPTED_TOKEN)
CLOUDWATCH = boto3.client('cloudwatch')
S3_BUCKET = boto3.resource('s3').Bucket(os.environ['TARGET_S3_BUCKET'])
SQS = boto3.resource('sqs')

# The download invocation event is parsed into a tuple with MD5 and a Receipt
DownloadRecord = collections.namedtuple(
    'DownloadRecord', ['md5', 'sqs_receipt', 'receive_count'])


def _iter_download_records(
        event: Any) -> Generator[DownloadRecord, None, None]:
    """Generate DownloadRecords from the invocation event."""
    for message in event['messages']:
        try:
            md5 = json.loads(message['body'])['md5']
Ejemplo n.º 5
0
from cbapi.errors import ObjectNotFoundError, ServerError
from cbapi.response.models import Binary

LOGGER = logging.getLogger()
LOGGER.setLevel(logging.INFO)
logging.getLogger('backoff').addHandler(logging.StreamHandler())  # Enable backoff logger.

ENCRYPTED_TOKEN = os.environ['ENCRYPTED_CARBON_BLACK_API_TOKEN']
DECRYPTED_TOKEN = boto3.client('kms').decrypt(
    CiphertextBlob=base64.b64decode(ENCRYPTED_TOKEN)
)['Plaintext']

# Establish boto3 and S3 clients at import time so Lambda can cache them for re-use.
CARBON_BLACK = cbapi.CbResponseAPI(
    url=os.environ['CARBON_BLACK_URL'],
    timeout=int(os.environ['CARBON_BLACK_TIMEOUT']),
    token=DECRYPTED_TOKEN
)
CLOUDWATCH = boto3.client('cloudwatch')
S3_BUCKET = boto3.resource('s3').Bucket(os.environ['TARGET_S3_BUCKET'])


def _iter_download_records(event: Any) -> Generator[Tuple[str, int], None, None]:
    """Yield (md5, receive_count) from the invocation event."""
    for message in event['Records']:
        try:
            md5 = json.loads(message['body'])['md5']
            yield md5, int(message['attributes']['ApproximateReceiveCount'])
        except (json.JSONDecodeError, KeyError, TypeError):
            LOGGER.exception('Skipping invalid SQS record: %s', message)
            continue
Ejemplo n.º 6
0
    async def binary_search(self, url, token, query):
        cb = cbapi.CbResponseAPI(url=url, token=token, ssl_verify=self.verify)

        search = cb.select(cbapi.response.Binary).where(query)
        return len(search)