Ejemplo n.º 1
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" % (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print(
        "Scan of s3://%s resulted in %s\n"
        % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
    )
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(
        env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
    )
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print(
        "Script finished at %s\n" % datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC")
    )
Ejemplo n.º 2
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    (webhook, auth) = event_webhook(event)
    webhook_scan_started(s3_object, webhook, auth)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    (scan_result, scan_output) = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    webhook_scan_results(s3_object, scan_result, scan_output, webhook, auth)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 3
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    file_path = download_s3_object(s3_object, "/tmp")
    if AV_EXCLUDE_PATTERN is not None:
        if re.search(AV_EXCLUDE_PATTERN, file_path) is not None:
            print("File path matched exlusion pattern:%s" % AV_EXCLUDE_PATTERN)
            return None
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    filehash = clamav.md5_from_file(file_path)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, filehash)
    set_av_tags(s3_object, scan_result, filehash)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result,
                 hash=filehash)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 4
0
def scan_object(s3_object):
    verify_s3_object_version(s3_object)
    try:
        is_object_scannable(s3_object)
    except SizeError as e:
        print(e.msg)
        return
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
Ejemplo n.º 5
0
def lambda_handler_process_all_bucket_objects(event, context):
    print("Script starting at %s\n" %
          (datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC")))

    if AV_SCAN_ALL_OBJECTS_S3_BUCKET is None or AV_SCAN_ALL_OBJECTS_S3_BUCKET_PREFIX is None:
        print(
            "You must define env variable AV_SCAN_ALL_OBJECTS_S3_BUCKET and AV_SCAN_ALL_OBJECTS_S3_BUCKET_PREFIX"
        )
        return

    #update clamav definitions once
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)

    bucket = s3.Bucket(AV_SCAN_ALL_OBJECTS_S3_BUCKET)
    for obj in bucket.objects.filter(
            Prefix=AV_SCAN_ALL_OBJECTS_S3_BUCKET_PREFIX).page_size(
                AV_SCAN_ALL_OBJECTS_S3_PAGE_SIZE):

        #verify if this object is not a folder
        if not obj.key.endswith('/'):
            #verify if this object was already processed
            if not check_av_tag(bucket.name, obj.key):
                print("processing object %s" % obj.key)
                s3_object = get_S3_object(bucket.name, obj.key)
                verify_s3_object_version(s3_object)
                file_path = download_s3_object(s3_object, "/tmp")

                scan_result = clamav.scan_file(file_path)
                print("Scan of s3://%s resulted in %s\n" % (os.path.join(
                    s3_object.bucket_name, s3_object.key), scan_result))

                if "AV_UPDATE_METADATA" in os.environ:
                    set_av_metadata(s3_object, scan_result)

                # handle case when an object is removed before the scan is completed
                tag_obj = True
                if AV_CHECK_FOR_FILE_BEFORE_TAGGING:
                    try:
                        get_S3_object(bucket.name, obj.key)
                    except s3_client.exceptions.NoSuchKey:
                        print("S3 object %s not found, skip tagging" % obj.key)
                        tag_obj = False
                if tag_obj:
                    set_av_tags(s3_object, scan_result)

                # Delete downloaded file to free up room on re-usable lambda function container
                try:
                    os.remove(file_path)
                except OSError:
                    print("ERROR - Fail removing file %s " % file_path)
                    pass
            else:
                print("skipped obj %s -> av tag" % obj.key)
        else:
            print("skipped obj %s -> folder" % obj.key)

    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 6
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
    clamav.upload_defs_to_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX,
                             AV_DEFINITION_PATH)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 7
0
def do_scan(file_path, bucket, key):
    print('Scan object with ClamAv script.')
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" % (os.path.join(bucket, key), scan_result))
    set_status_tag_local(AV_STATUS_METADATA, scan_result, AV_TIMESTAMP_METADATA)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
    return scan_result
Ejemplo n.º 8
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")

    print("Script starting at %s\n" % (get_timestamp()))
    to_download = clamav.update_defs_from_s3(s3_client,
                                             AV_DEFINITION_S3_BUCKET,
                                             AV_DEFINITION_S3_PREFIX)

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s" %
              (local_path, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))

    clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
    # If main.cvd gets updated (very rare), we will need to force freshclam
    # to download the compressed version to keep file sizes down.
    # The existence of main.cud is the trigger to know this has happened.
    if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cud")):
        os.remove(os.path.join(AV_DEFINITION_PATH, "main.cud"))
        if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cvd")):
            os.remove(os.path.join(AV_DEFINITION_PATH, "main.cvd"))
        clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
    clamav.upload_defs_to_s3(s3_client, AV_DEFINITION_S3_BUCKET,
                             AV_DEFINITION_S3_PREFIX, AV_DEFINITION_PATH)
    print("Script finished at %s\n" % get_timestamp())
Ejemplo n.º 9
0
    def test_update_defs_from_s3(self, mock_exists, mock_md5_from_file):
        expected_md5_hash = "d41d8cd98f00b204e9800998ecf8427e"
        different_md5_hash = "d41d8cd98f00b204e9800998ecf8427f"

        mock_md5_from_file.return_value = different_md5_hash

        tag_set = {"TagSet": [{"Key": "md5", "Value": expected_md5_hash}]}
        expected_s3_time = datetime.datetime(2019, 1, 1)

        s3_stubber = Stubber(self.s3_client)

        key_names = []
        side_effect = []
        for file_prefix in AV_DEFINITION_FILE_PREFIXES:
            for file_suffix in AV_DEFINITION_FILE_SUFFIXES:
                side_effect.extend([True, True])
                filename = file_prefix + "." + file_suffix
                key_names.append(
                    os.path.join(AV_DEFINITION_S3_PREFIX, filename))
        mock_exists.side_effect = side_effect

        for s3_key_name in key_names:
            get_object_tagging_response = tag_set
            get_object_tagging_expected_params = {
                "Bucket": self.s3_bucket_name,
                "Key": s3_key_name,
            }
            s3_stubber.add_response(
                "get_object_tagging",
                get_object_tagging_response,
                get_object_tagging_expected_params,
            )
            head_object_response = {"LastModified": expected_s3_time}
            head_object_expected_params = {
                "Bucket": self.s3_bucket_name,
                "Key": s3_key_name,
            }
            s3_stubber.add_response("head_object", head_object_response,
                                    head_object_expected_params)

        expected_to_download = {
            "bytecode": {
                "local_path": "/tmp/clamav_defs/bytecode.cvd",
                "s3_path": "clamav_defs/bytecode.cvd",
            },
            "daily": {
                "local_path": "/tmp/clamav_defs/daily.cvd",
                "s3_path": "clamav_defs/daily.cvd",
            },
            "main": {
                "local_path": "/tmp/clamav_defs/main.cvd",
                "s3_path": "clamav_defs/main.cvd",
            },
        }
        with s3_stubber:
            to_download = update_defs_from_s3(self.s3_client,
                                              self.s3_bucket_name,
                                              AV_DEFINITION_S3_PREFIX)
            self.assertEquals(expected_to_download, to_download)
Ejemplo n.º 10
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object, bucket, key_name = event_object(event)
    inserted_date = calendar.timegm(time.gmtime())
    updated_date = calendar.timegm(time.gmtime())
    data_to_store = {
        "s3_key": key_name,
        "bucket_name": bucket,
        "inserted_date": inserted_date,
        "updated_date": updated_date,
        "scan_state": "In Process"
    }
    trans_id = insert_data(data_to_store)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path, trans_id)
    updated_date = calendar.timegm(time.gmtime())
    data = {"scan_state": scan_result, "updated_date": updated_date}
    query = {"id": trans_id}
    update_data(query, data)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 11
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
    # If main.cvd gets updated (very rare), we will need to force freshclam
    # to download the compressed version to keep file sizes down.
    # The existence of main.cud is the trigger to know this has happened.
    if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cud")):
        os.remove(os.path.join(AV_DEFINITION_PATH, "main.cud"))
        if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cvd")):
            os.remove(os.path.join(AV_DEFINITION_PATH, "main.cvd"))
        clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
    clamav.upload_defs_to_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX,
                             AV_DEFINITION_PATH)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 12
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    slack_notification(scan_result)
    print("yara scanning to begin")
    yarascan.update_sigs_from_s3(YARA_RULES_S3_BUCKET, YARA_RULES_S3_PREFIX)
    scan_result_yara = yarascan.scan_file(file_path)
    print(scan_result_yara)
    lambda_result = {"clamav": "Detected", "yara": "Dummrule1.yara"}
    with open(file_path, 'rb') as f:
        filename = os.path.basename(file_path)
        print("sending control to fsf")
        fsf = fsf_client.FSFClient(file_path, f.name, False, 'Analyst', False,
                                   False, False, f.read(), lambda_result)
        print("initiating submission")
        print(fsf.initiate_submission())
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    #metrics.send(env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 13
0
def download_clamav_databases():
    s3_client = boto3.client("s3")
    s3 = boto3.resource("s3")
    to_download = clamav.update_defs_from_s3(s3_client,
                                             AV_DEFINITION_S3_BUCKET,
                                             AV_DEFINITION_S3_PREFIX)

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s" %
              (local_path, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))
Ejemplo n.º 14
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)

    try:
        set_av_tags(s3_object, scan_result)
    except s3_client.exceptions.NoSuchKey:
        # handle case when an object is removed before the scan is completed
        if AV_CHECK_FOR_FILE_BEFORE_TAGGING:
            print("S3 object not found, skip tagging")
        else:
            raise Exception("We have a problem with obj tagging")

    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    
    s3_object_summary = event_object(event, True)
    s3_object = event_object(event, False)
    file_to_scan = os.path.join(s3_object.bucket_name, s3_object.key)
    file_size_in_mb = s3_object_summary.size / 1024 / 1024
    will_skip = float(file_size_in_mb) >= float(AV_SCAN_SKIP_SIZE_IN_MB)

    print("s3://%s\n" % (file_to_scan))
    print("File size: %s bytes (%sMB), AV_SCAN_SKIP_SIZE_IN_MB: %s, will skip: %s\n" %
          (s3_object_summary.size, file_size_in_mb, AV_SCAN_SKIP_SIZE_IN_MB, will_skip))

    if will_skip is True:
        set_av_tags(s3_object, AV_STATUS_SKIPPED)
    else:
        verify_s3_object_version(s3_object)
        sns_start_scan(s3_object)
        file_path = download_s3_object(s3_object, "/tmp")
        clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
        scan_result = clamav.scan_file(file_path)
        print("Scan of s3://%s resulted in %s\n" % (file_to_scan, scan_result))
        if "AV_UPDATE_METADATA" in os.environ:
            set_av_metadata(s3_object, scan_result)
        set_av_tags(s3_object, scan_result)
        sns_scan_results(s3_object, scan_result)
        metrics.send(env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result)
        # Delete downloaded file to free up room on re-usable lambda function container
        try:
            os.remove(file_path)
        except OSError:
            pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Ejemplo n.º 16
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")

    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))

    shutil.rmtree(AV_DEFINITION_PATH, ignore_errors=True)
    os.mkdir(AV_DEFINITION_PATH)

    to_download = clamav.update_defs_from_s3(s3_client,
                                             AV_DEFINITION_S3_BUCKET,
                                             AV_DEFINITION_S3_PREFIX)

    print("Skipping clamav definition download %s\n" % (get_timestamp()))
    # for download in to_download.values():
    #    s3_path = download["s3_path"]
    #    local_path = download["local_path"]
    #    print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
    #    s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
    #    print("Downloading definition file %s complete!" % (local_path))

    retVal = clamav.update_defs_from_freshclam(AV_DEFINITION_PATH,
                                               CLAMAVLIB_PATH)
    if retVal != 0:
        raise RuntimeError("clamAV update process returned %d" % (retVal))
    # If main.cvd gets updated (very rare), we will need to force freshclam
    # to download the compressed version to keep file sizes down.
    # The existence of main.cud is the trigger to know this has happened.
    if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cud")):
        os.remove(os.path.join(AV_DEFINITION_PATH, "main.cud"))
        if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cvd")):
            os.remove(os.path.join(AV_DEFINITION_PATH, "main.cvd"))
        retVal = clamav.update_defs_from_freshclam(AV_DEFINITION_PATH,
                                                   CLAMAVLIB_PATH)
        if retVal != 0:
            raise RuntimeError("Refresh clamAV update process returned %d" %
                               (retVal))
    clamav.upload_defs_to_s3(s3_client, AV_DEFINITION_S3_BUCKET,
                             AV_DEFINITION_S3_PREFIX, AV_DEFINITION_PATH)
    print("Script finished at %s\n" % get_timestamp())
Ejemplo n.º 17
0
def start_clamd(s3, s3_client):
    if not os.path.isdir(AV_DEFINITION_PATH):
        to_download = clamav.update_defs_from_s3(s3_client,
                                                 AV_DEFINITION_S3_BUCKET,
                                                 AV_DEFINITION_S3_PREFIX)

        for download in to_download.values():
            s3_path = download["s3_path"]
            local_path = download["local_path"]
            logging.info("Downloading definition file %s from s3://%s" %
                         (local_path, s3_path))
            s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(
                s3_path, local_path)
            logging.info("Downloading definition file %s complete!" %
                         (local_path))

    if not os.path.exists(CLAMD_SOCKET):
        logging.info("Starting clamav daemon")
        return_code = subprocess.call(f"{CLAMD_PATH} -c {CLAMD_CONFIG_PATH}",
                                      shell=True)
        logging.info("Started clamav daemon with return_code %s." %
                     (return_code))
Ejemplo n.º 18
0
def lambda_handler(event, context):
    if AV_SCAN_ROLE_ARN:
        sts_client = boto3.client("sts")
        sts_response = sts_client.assume_role(
            RoleArn=AV_SCAN_ROLE_ARN, RoleSessionName="AVScanRoleAssumption"
        )
        session = boto3.session.Session(
            aws_access_key_id=sts_response["Credentials"]["AccessKeyId"],
            aws_secret_access_key=sts_response["Credentials"]["SecretAccessKey"],
            aws_session_token=sts_response["Credentials"]["SessionToken"],
        )
        s3_cross_account = session.resource("s3")
        s3_cross_account_client = session.client("s3")
        sns_cross_account_client = session.client("sns")
    else:
        s3_cross_account = boto3.resource("s3")
        s3_cross_account_client = boto3.client("s3")
        sns_cross_account_client = boto3.client("sns")

    s3_local_account = boto3.resource("s3")
    s3_local_account_client = boto3.client("s3")
    sns_local_account_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    print("Event received: %s" % event)
    s3_object = event_object(event, s3_resource=s3_cross_account)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3_cross_account, s3_object)

    if object_does_not_require_scan(
        s3_cross_account_client, s3_object.bucket_name, s3_object.key
    ):
        if AV_STATUS_SNS_ARN not in [None, ""]:
            sns_skip_scan(
                sns_local_account_client, s3_object, AV_STATUS_SNS_ARN, get_timestamp()
            )
        print(
            "Scan of s3://%s was skipped due to the file being safely generated by a VISO process"
            % os.path.join(s3_object.bucket_name, s3_object.key)
        )
    else:
        # Publish the start time of the scan
        if AV_SCAN_START_SNS_ARN not in [None, ""]:
            start_scan_time = get_timestamp()
            sns_start_scan(
                sns_local_account_client,
                s3_object,
                AV_SCAN_START_SNS_ARN,
                start_scan_time,
            )

        file_path = get_local_path(s3_object, "/tmp")
        create_dir(os.path.dirname(file_path))
        s3_object.download_file(file_path)

        to_download = clamav.update_defs_from_s3(
            s3_local_account_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
        )

        for download in to_download.values():
            s3_path = download["s3_path"]
            local_path = download["local_path"]
            print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
            s3_local_account.Bucket(AV_DEFINITION_S3_BUCKET).download_file(
                s3_path, local_path
            )
            print("Downloading definition file %s complete!" % (local_path))
        scan_result, scan_signature = clamav.scan_file(file_path)
        print(
            "Scan of s3://%s resulted in %s\n"
            % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
        )

        result_time = get_timestamp()
        # Set the properties on the object with the scan results
        if "AV_UPDATE_METADATA" in os.environ:
            set_av_metadata(s3_object, scan_result, scan_signature, result_time)
        set_av_tags(
            s3_cross_account_client, s3_object, scan_result, scan_signature, result_time
        )

        # Publish the scan results
        if AV_STATUS_SNS_ARN not in [None, ""]:
            sns_scan_results(
                sns_local_account_client,
                s3_object,
                AV_STATUS_SNS_ARN,
                scan_result,
                scan_signature,
                result_time,
            )

        # Publish clean scan results cross account
        if (
            scan_result == AV_STATUS_CLEAN
            and str_to_bool(AV_STATUS_SNS_PUBLISH_CLEAN)
            and AV_STATUS_CLEAN_SNS_ARN not in [None, ""]
        ):
            sns_scan_results(
                sns_cross_account_client,
                s3_object,
                AV_STATUS_CLEAN_SNS_ARN,
                scan_result,
                scan_signature,
                result_time,
            )

        metrics.send(
            env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
        )
        # Delete downloaded file to free up room on re-usable lambda function container
        try:
            os.remove(file_path)
        except OSError:
            pass
        if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
            sns_delete_results(s3_object, scan_result)
            delete_s3_object(s3_object)

    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
Ejemplo n.º 19
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")
    sns_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN, start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    to_download = clamav.update_defs_from_s3(
        s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
    )

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))

    # calculate the md5 of the virus defintion files
    definition_md5 = clamav.get_definition_md5()

    # check the file for an existing defintion md5 hash
    s3_definition_md5 = clamav.md5_from_s3_tags(s3_client, s3_object.bucket_name, s3_object.key, AV_DEFINITION_MD5_METADATA)

    # skip if there is a match
    if definition_md5 == s3_definition_md5:
        print("Not scanning because local defintion md5 matches s3 defintion md5.")
        return

    # Set AV_STATUS_SKIPPED if file exceeds maximum file size
    s3_object_size_result = check_s3_object_size(s3, s3_object)
    if s3_object_size_result == AV_STATUS_SKIPPED:
        scan_result = s3_object_size_result
        scan_signature = AV_SIGNATURE_UNKNOWN
    else:
        scan_result, scan_signature = clamav.scan_file(file_path)

    print(
        "Scan of s3://%s resulted in %s\n"
        % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
    )

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        # AV_UPDATE_METADATA doesn't seem to be set anywhere - likely cant get here
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time, definition_md5)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(
        env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
    )
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
Ejemplo n.º 20
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")
    sns_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN,
                       start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    to_download = clamav.update_defs_from_s3(s3_client,
                                             AV_DEFINITION_S3_BUCKET,
                                             AV_DEFINITION_S3_PREFIX)

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s/%s" %
              (local_path, AV_DEFINITION_S3_BUCKET, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))
    scan_result, scan_signature = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)