예제 #1
0
def scan_object(s3_object):
    verify_s3_object_version(s3_object)
    try:
        is_object_scannable(s3_object)
    except SizeError as e:
        print(e.msg)
        return
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
예제 #2
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    file_path = download_s3_object(s3_object, "/tmp")
    if AV_EXCLUDE_PATTERN is not None:
        if re.search(AV_EXCLUDE_PATTERN, file_path) is not None:
            print("File path matched exlusion pattern:%s" % AV_EXCLUDE_PATTERN)
            return None
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    filehash = clamav.md5_from_file(file_path)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, filehash)
    set_av_tags(s3_object, scan_result, filehash)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result,
                 hash=filehash)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
예제 #3
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" % (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print(
        "Scan of s3://%s resulted in %s\n"
        % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
    )
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(
        env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
    )
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print(
        "Script finished at %s\n" % datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC")
    )
예제 #4
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    (webhook, auth) = event_webhook(event)
    webhook_scan_started(s3_object, webhook, auth)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    (scan_result, scan_output) = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    webhook_scan_results(s3_object, scan_result, scan_output, webhook, auth)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
예제 #5
0
def lambda_handler_process_all_bucket_objects(event, context):
    print("Script starting at %s\n" %
          (datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC")))

    if AV_SCAN_ALL_OBJECTS_S3_BUCKET is None or AV_SCAN_ALL_OBJECTS_S3_BUCKET_PREFIX is None:
        print(
            "You must define env variable AV_SCAN_ALL_OBJECTS_S3_BUCKET and AV_SCAN_ALL_OBJECTS_S3_BUCKET_PREFIX"
        )
        return

    #update clamav definitions once
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)

    bucket = s3.Bucket(AV_SCAN_ALL_OBJECTS_S3_BUCKET)
    for obj in bucket.objects.filter(
            Prefix=AV_SCAN_ALL_OBJECTS_S3_BUCKET_PREFIX).page_size(
                AV_SCAN_ALL_OBJECTS_S3_PAGE_SIZE):

        #verify if this object is not a folder
        if not obj.key.endswith('/'):
            #verify if this object was already processed
            if not check_av_tag(bucket.name, obj.key):
                print("processing object %s" % obj.key)
                s3_object = get_S3_object(bucket.name, obj.key)
                verify_s3_object_version(s3_object)
                file_path = download_s3_object(s3_object, "/tmp")

                scan_result = clamav.scan_file(file_path)
                print("Scan of s3://%s resulted in %s\n" % (os.path.join(
                    s3_object.bucket_name, s3_object.key), scan_result))

                if "AV_UPDATE_METADATA" in os.environ:
                    set_av_metadata(s3_object, scan_result)

                # handle case when an object is removed before the scan is completed
                tag_obj = True
                if AV_CHECK_FOR_FILE_BEFORE_TAGGING:
                    try:
                        get_S3_object(bucket.name, obj.key)
                    except s3_client.exceptions.NoSuchKey:
                        print("S3 object %s not found, skip tagging" % obj.key)
                        tag_obj = False
                if tag_obj:
                    set_av_tags(s3_object, scan_result)

                # Delete downloaded file to free up room on re-usable lambda function container
                try:
                    os.remove(file_path)
                except OSError:
                    print("ERROR - Fail removing file %s " % file_path)
                    pass
            else:
                print("skipped obj %s -> av tag" % obj.key)
        else:
            print("skipped obj %s -> folder" % obj.key)

    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
예제 #6
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3", endpoint_url=S3_ENDPOINT)
    s3_client = boto3.client("s3", endpoint_url=S3_ENDPOINT)
    sns_client = boto3.client("sns", endpoint_url=SNS_ENDPOINT)

    start_clamd(s3, s3_client)

    # Get some environment variables
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    logging.debug("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN,
                       start_scan_time)

    with tempfile.TemporaryDirectory(prefix=EFS_SCAN_FILE_PATH) as tmpdirname:
        file_path = get_local_path(s3_object, tmpdirname)
        create_dir(os.path.dirname(file_path))
        s3_object.download_file(file_path)

        scan_result, scan_signature = clamav.scan_file(file_path)
        logging.info(
            "Scan of s3://%s resulted in %s\n" %
            (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))

        result_time = get_timestamp()
        # Set the properties on the object with the scan results
        if "AV_UPDATE_METADATA" in os.environ:
            set_av_metadata(s3_object, scan_result, scan_signature,
                            result_time)
        set_av_tags(s3_client, s3_object, scan_result, scan_signature,
                    result_time)

        # Publish the scan results
        if AV_STATUS_SNS_ARN not in [None, ""]:
            sns_scan_results(
                sns_client,
                s3_object,
                AV_STATUS_SNS_ARN,
                scan_result,
                scan_signature,
                result_time,
            )

    stop_scan_time = get_timestamp()
    logging.debug("Script finished at %s\n" % stop_scan_time)
예제 #7
0
def do_scan(file_path, bucket, key):
    print('Scan object with ClamAv script.')
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" % (os.path.join(bucket, key), scan_result))
    set_status_tag_local(AV_STATUS_METADATA, scan_result, AV_TIMESTAMP_METADATA)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
    return scan_result
예제 #8
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    sns_client = boto3.client("sns")

    # Get some environment variables
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN,
                       start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    try:
        s3_object.download_file(file_path)
    except OSError as e:
        remove_file(file_path)
        if e.errno == 28:
            print("Ran out of disk space. Scan failed")
            publish_results(s3_object, AV_STATUS_FAILED,
                            "File too large to scan")
            return
        else:
            raise

    download_clamav_databases()

    scan_result, scan_signature = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))

    publish_results(s3_object, scan_result, scan_signature)

    # Delete downloaded file to free up room on re-usable lambda function container
    remove_file(file_path)
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
예제 #9
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object, bucket, key_name = event_object(event)
    inserted_date = calendar.timegm(time.gmtime())
    updated_date = calendar.timegm(time.gmtime())
    data_to_store = {
        "s3_key": key_name,
        "bucket_name": bucket,
        "inserted_date": inserted_date,
        "updated_date": updated_date,
        "scan_state": "In Process"
    }
    trans_id = insert_data(data_to_store)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path, trans_id)
    updated_date = calendar.timegm(time.gmtime())
    data = {"scan_state": scan_result, "updated_date": updated_date}
    query = {"id": trans_id}
    update_data(query, data)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
예제 #10
0
파일: scan.py 프로젝트: stackpoet/clara
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    slack_notification(scan_result)
    print("yara scanning to begin")
    yarascan.update_sigs_from_s3(YARA_RULES_S3_BUCKET, YARA_RULES_S3_PREFIX)
    scan_result_yara = yarascan.scan_file(file_path)
    print(scan_result_yara)
    lambda_result = {"clamav": "Detected", "yara": "Dummrule1.yara"}
    with open(file_path, 'rb') as f:
        filename = os.path.basename(file_path)
        print("sending control to fsf")
        fsf = fsf_client.FSFClient(file_path, f.name, False, 'Analyst', False,
                                   False, False, f.read(), lambda_result)
        print("initiating submission")
        print(fsf.initiate_submission())
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    #metrics.send(env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
예제 #11
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)

    try:
        set_av_tags(s3_object, scan_result)
    except s3_client.exceptions.NoSuchKey:
        # handle case when an object is removed before the scan is completed
        if AV_CHECK_FOR_FILE_BEFORE_TAGGING:
            print("S3 object not found, skip tagging")
        else:
            raise Exception("We have a problem with obj tagging")

    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    
    s3_object_summary = event_object(event, True)
    s3_object = event_object(event, False)
    file_to_scan = os.path.join(s3_object.bucket_name, s3_object.key)
    file_size_in_mb = s3_object_summary.size / 1024 / 1024
    will_skip = float(file_size_in_mb) >= float(AV_SCAN_SKIP_SIZE_IN_MB)

    print("s3://%s\n" % (file_to_scan))
    print("File size: %s bytes (%sMB), AV_SCAN_SKIP_SIZE_IN_MB: %s, will skip: %s\n" %
          (s3_object_summary.size, file_size_in_mb, AV_SCAN_SKIP_SIZE_IN_MB, will_skip))

    if will_skip is True:
        set_av_tags(s3_object, AV_STATUS_SKIPPED)
    else:
        verify_s3_object_version(s3_object)
        sns_start_scan(s3_object)
        file_path = download_s3_object(s3_object, "/tmp")
        clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
        scan_result = clamav.scan_file(file_path)
        print("Scan of s3://%s resulted in %s\n" % (file_to_scan, scan_result))
        if "AV_UPDATE_METADATA" in os.environ:
            set_av_metadata(s3_object, scan_result)
        set_av_tags(s3_object, scan_result)
        sns_scan_results(s3_object, scan_result)
        metrics.send(env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result)
        # Delete downloaded file to free up room on re-usable lambda function container
        try:
            os.remove(file_path)
        except OSError:
            pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
예제 #13
0
def lambda_handler(event, context):
    global clamd_pid

    aws_config = Config(connect_timeout=5)

    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3", config=aws_config)
    sns_client = boto3.client("sns", config=aws_config)

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    if not clamav.is_clamd_running():
        if clamd_pid is not None:
            kill_process_by_pid(clamd_pid)

        clamd_pid = clamav.start_clamd_daemon()
        print("Clamd PID: %s" % clamd_pid)

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN,
                       start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    scan_result, scan_signature = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
예제 #14
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")
    sns_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN,
                       start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    to_download = clamav.update_defs_from_s3(s3_client,
                                             AV_DEFINITION_S3_BUCKET,
                                             AV_DEFINITION_S3_PREFIX)

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s/%s" %
              (local_path, AV_DEFINITION_S3_BUCKET, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))
    scan_result, scan_signature = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
예제 #15
0
def scan_file(s3_object, file_path):
    if not is_mime_valid(s3_object, file_path):
        return AV_STATUS_INFECTED, "Invalid Mime type"

    return clamav.scan_file(file_path)
예제 #16
0
def lambda_handler(event, context):
    if AV_SCAN_ROLE_ARN:
        sts_client = boto3.client("sts")
        sts_response = sts_client.assume_role(
            RoleArn=AV_SCAN_ROLE_ARN, RoleSessionName="AVScanRoleAssumption"
        )
        session = boto3.session.Session(
            aws_access_key_id=sts_response["Credentials"]["AccessKeyId"],
            aws_secret_access_key=sts_response["Credentials"]["SecretAccessKey"],
            aws_session_token=sts_response["Credentials"]["SessionToken"],
        )
        s3_cross_account = session.resource("s3")
        s3_cross_account_client = session.client("s3")
        sns_cross_account_client = session.client("sns")
    else:
        s3_cross_account = boto3.resource("s3")
        s3_cross_account_client = boto3.client("s3")
        sns_cross_account_client = boto3.client("sns")

    s3_local_account = boto3.resource("s3")
    s3_local_account_client = boto3.client("s3")
    sns_local_account_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    print("Event received: %s" % event)
    s3_object = event_object(event, s3_resource=s3_cross_account)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3_cross_account, s3_object)

    if object_does_not_require_scan(
        s3_cross_account_client, s3_object.bucket_name, s3_object.key
    ):
        if AV_STATUS_SNS_ARN not in [None, ""]:
            sns_skip_scan(
                sns_local_account_client, s3_object, AV_STATUS_SNS_ARN, get_timestamp()
            )
        print(
            "Scan of s3://%s was skipped due to the file being safely generated by a VISO process"
            % os.path.join(s3_object.bucket_name, s3_object.key)
        )
    else:
        # Publish the start time of the scan
        if AV_SCAN_START_SNS_ARN not in [None, ""]:
            start_scan_time = get_timestamp()
            sns_start_scan(
                sns_local_account_client,
                s3_object,
                AV_SCAN_START_SNS_ARN,
                start_scan_time,
            )

        file_path = get_local_path(s3_object, "/tmp")
        create_dir(os.path.dirname(file_path))
        s3_object.download_file(file_path)

        to_download = clamav.update_defs_from_s3(
            s3_local_account_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
        )

        for download in to_download.values():
            s3_path = download["s3_path"]
            local_path = download["local_path"]
            print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
            s3_local_account.Bucket(AV_DEFINITION_S3_BUCKET).download_file(
                s3_path, local_path
            )
            print("Downloading definition file %s complete!" % (local_path))
        scan_result, scan_signature = clamav.scan_file(file_path)
        print(
            "Scan of s3://%s resulted in %s\n"
            % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
        )

        result_time = get_timestamp()
        # Set the properties on the object with the scan results
        if "AV_UPDATE_METADATA" in os.environ:
            set_av_metadata(s3_object, scan_result, scan_signature, result_time)
        set_av_tags(
            s3_cross_account_client, s3_object, scan_result, scan_signature, result_time
        )

        # Publish the scan results
        if AV_STATUS_SNS_ARN not in [None, ""]:
            sns_scan_results(
                sns_local_account_client,
                s3_object,
                AV_STATUS_SNS_ARN,
                scan_result,
                scan_signature,
                result_time,
            )

        # Publish clean scan results cross account
        if (
            scan_result == AV_STATUS_CLEAN
            and str_to_bool(AV_STATUS_SNS_PUBLISH_CLEAN)
            and AV_STATUS_CLEAN_SNS_ARN not in [None, ""]
        ):
            sns_scan_results(
                sns_cross_account_client,
                s3_object,
                AV_STATUS_CLEAN_SNS_ARN,
                scan_result,
                scan_signature,
                result_time,
            )

        metrics.send(
            env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
        )
        # Delete downloaded file to free up room on re-usable lambda function container
        try:
            os.remove(file_path)
        except OSError:
            pass
        if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
            sns_delete_results(s3_object, scan_result)
            delete_s3_object(s3_object)

    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
예제 #17
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")
    sns_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN, start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    to_download = clamav.update_defs_from_s3(
        s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
    )

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))

    # calculate the md5 of the virus defintion files
    definition_md5 = clamav.get_definition_md5()

    # check the file for an existing defintion md5 hash
    s3_definition_md5 = clamav.md5_from_s3_tags(s3_client, s3_object.bucket_name, s3_object.key, AV_DEFINITION_MD5_METADATA)

    # skip if there is a match
    if definition_md5 == s3_definition_md5:
        print("Not scanning because local defintion md5 matches s3 defintion md5.")
        return

    # Set AV_STATUS_SKIPPED if file exceeds maximum file size
    s3_object_size_result = check_s3_object_size(s3, s3_object)
    if s3_object_size_result == AV_STATUS_SKIPPED:
        scan_result = s3_object_size_result
        scan_signature = AV_SIGNATURE_UNKNOWN
    else:
        scan_result, scan_signature = clamav.scan_file(file_path)

    print(
        "Scan of s3://%s resulted in %s\n"
        % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
    )

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        # AV_UPDATE_METADATA doesn't seem to be set anywhere - likely cant get here
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time, definition_md5)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(
        env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
    )
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)