Example #1
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" % (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print(
        "Scan of s3://%s resulted in %s\n"
        % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
    )
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(
        env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
    )
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print(
        "Script finished at %s\n" % datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC")
    )
Example #2
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    file_path = download_s3_object(s3_object, "/tmp")
    if AV_EXCLUDE_PATTERN is not None:
        if re.search(AV_EXCLUDE_PATTERN, file_path) is not None:
            print("File path matched exlusion pattern:%s" % AV_EXCLUDE_PATTERN)
            return None
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    filehash = clamav.md5_from_file(file_path)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, filehash)
    set_av_tags(s3_object, scan_result, filehash)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result,
                 hash=filehash)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
def scan_object(s3_object):
    verify_s3_object_version(s3_object)
    try:
        is_object_scannable(s3_object)
    except SizeError as e:
        print(e.msg)
        return
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
Example #4
0
def publish_results(s3_object, scan_result, scan_signature):
    result_time = get_timestamp()
    sns_client = boto3.client("sns")
    s3_client = boto3.client("s3")
    ENV = os.getenv("ENV", "")
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
Example #5
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    (webhook, auth) = event_webhook(event)
    webhook_scan_started(s3_object, webhook, auth)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    (scan_result, scan_output) = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    webhook_scan_results(s3_object, scan_result, scan_output, webhook, auth)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Example #6
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object, bucket, key_name = event_object(event)
    inserted_date = calendar.timegm(time.gmtime())
    updated_date = calendar.timegm(time.gmtime())
    data_to_store = {
        "s3_key": key_name,
        "bucket_name": bucket,
        "inserted_date": inserted_date,
        "updated_date": updated_date,
        "scan_state": "In Process"
    }
    trans_id = insert_data(data_to_store)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path, trans_id)
    updated_date = calendar.timegm(time.gmtime())
    data = {"scan_state": scan_result, "updated_date": updated_date}
    query = {"id": trans_id}
    update_data(query, data)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)
    set_av_tags(s3_object, scan_result)
    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Example #7
0
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    s3_object = event_object(event)
    verify_s3_object_version(s3_object)
    sns_start_scan(s3_object)
    file_path = download_s3_object(s3_object, "/tmp")
    clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET,
                               AV_DEFINITION_S3_PREFIX)
    scan_result = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result)

    try:
        set_av_tags(s3_object, scan_result)
    except s3_client.exceptions.NoSuchKey:
        # handle case when an object is removed before the scan is completed
        if AV_CHECK_FOR_FILE_BEFORE_TAGGING:
            print("S3 object not found, skip tagging")
        else:
            raise Exception("We have a problem with obj tagging")

    sns_scan_results(s3_object, scan_result)
    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
def lambda_handler(event, context):
    start_time = datetime.utcnow()
    print("Script starting at %s\n" %
          (start_time.strftime("%Y/%m/%d %H:%M:%S UTC")))
    
    s3_object_summary = event_object(event, True)
    s3_object = event_object(event, False)
    file_to_scan = os.path.join(s3_object.bucket_name, s3_object.key)
    file_size_in_mb = s3_object_summary.size / 1024 / 1024
    will_skip = float(file_size_in_mb) >= float(AV_SCAN_SKIP_SIZE_IN_MB)

    print("s3://%s\n" % (file_to_scan))
    print("File size: %s bytes (%sMB), AV_SCAN_SKIP_SIZE_IN_MB: %s, will skip: %s\n" %
          (s3_object_summary.size, file_size_in_mb, AV_SCAN_SKIP_SIZE_IN_MB, will_skip))

    if will_skip is True:
        set_av_tags(s3_object, AV_STATUS_SKIPPED)
    else:
        verify_s3_object_version(s3_object)
        sns_start_scan(s3_object)
        file_path = download_s3_object(s3_object, "/tmp")
        clamav.update_defs_from_s3(AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX)
        scan_result = clamav.scan_file(file_path)
        print("Scan of s3://%s resulted in %s\n" % (file_to_scan, scan_result))
        if "AV_UPDATE_METADATA" in os.environ:
            set_av_metadata(s3_object, scan_result)
        set_av_tags(s3_object, scan_result)
        sns_scan_results(s3_object, scan_result)
        metrics.send(env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result)
        # Delete downloaded file to free up room on re-usable lambda function container
        try:
            os.remove(file_path)
        except OSError:
            pass
    print("Script finished at %s\n" %
          datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC"))
Example #9
0
def foo():
    a.bar(f(x), y == f(x))
    bar + send('my-report-id') + bar()
    (hi, my)
    (hi, my, bye)
    A[1]
    A[-1]
    A[1:4]
    A[1:4:-1]
    A[::-1]
    A[1:]
    if 1 == 1: foo()
    true
    3.3
    self.data
    "nice" if is_nice else "not nice"
    f(a, b(g(a, k)), c, c(k), a, c)
    if node.id == node.id: foo()
Example #10
0
def foo():
    a.bar(f(x), y == f(x))
    bar + send('my-report-id') + bar()
    (hi, my)
    (hi, my, bye)
    A[1]
    A[-1]
    A[1:4]
    A[1:4:-1]
    A[::-1]
    A[1:]
    if 1 == 1: foo()
    true
    3.3
    self.data
    "nice" if is_nice else "not nice"
    f(a, b(g(a, k)), c, c(k), a, c)
    if node.id == node.id: foo()
    resp = r.set_cookie("sessionid",
                        generate_cookie_value("RANDOM-UUID"),
                        secure=True)
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")
    sns_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN,
                       start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    to_download = clamav.update_defs_from_s3(s3_client,
                                             AV_DEFINITION_S3_BUCKET,
                                             AV_DEFINITION_S3_PREFIX)

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s/%s" %
              (local_path, AV_DEFINITION_S3_BUCKET, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))
    scan_result, scan_signature = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
Example #12
0
def lambda_handler(event, context):
    if AV_SCAN_ROLE_ARN:
        sts_client = boto3.client("sts")
        sts_response = sts_client.assume_role(
            RoleArn=AV_SCAN_ROLE_ARN, RoleSessionName="AVScanRoleAssumption"
        )
        session = boto3.session.Session(
            aws_access_key_id=sts_response["Credentials"]["AccessKeyId"],
            aws_secret_access_key=sts_response["Credentials"]["SecretAccessKey"],
            aws_session_token=sts_response["Credentials"]["SessionToken"],
        )
        s3_cross_account = session.resource("s3")
        s3_cross_account_client = session.client("s3")
        sns_cross_account_client = session.client("sns")
    else:
        s3_cross_account = boto3.resource("s3")
        s3_cross_account_client = boto3.client("s3")
        sns_cross_account_client = boto3.client("sns")

    s3_local_account = boto3.resource("s3")
    s3_local_account_client = boto3.client("s3")
    sns_local_account_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    print("Event received: %s" % event)
    s3_object = event_object(event, s3_resource=s3_cross_account)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3_cross_account, s3_object)

    if object_does_not_require_scan(
        s3_cross_account_client, s3_object.bucket_name, s3_object.key
    ):
        if AV_STATUS_SNS_ARN not in [None, ""]:
            sns_skip_scan(
                sns_local_account_client, s3_object, AV_STATUS_SNS_ARN, get_timestamp()
            )
        print(
            "Scan of s3://%s was skipped due to the file being safely generated by a VISO process"
            % os.path.join(s3_object.bucket_name, s3_object.key)
        )
    else:
        # Publish the start time of the scan
        if AV_SCAN_START_SNS_ARN not in [None, ""]:
            start_scan_time = get_timestamp()
            sns_start_scan(
                sns_local_account_client,
                s3_object,
                AV_SCAN_START_SNS_ARN,
                start_scan_time,
            )

        file_path = get_local_path(s3_object, "/tmp")
        create_dir(os.path.dirname(file_path))
        s3_object.download_file(file_path)

        to_download = clamav.update_defs_from_s3(
            s3_local_account_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
        )

        for download in to_download.values():
            s3_path = download["s3_path"]
            local_path = download["local_path"]
            print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
            s3_local_account.Bucket(AV_DEFINITION_S3_BUCKET).download_file(
                s3_path, local_path
            )
            print("Downloading definition file %s complete!" % (local_path))
        scan_result, scan_signature = clamav.scan_file(file_path)
        print(
            "Scan of s3://%s resulted in %s\n"
            % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
        )

        result_time = get_timestamp()
        # Set the properties on the object with the scan results
        if "AV_UPDATE_METADATA" in os.environ:
            set_av_metadata(s3_object, scan_result, scan_signature, result_time)
        set_av_tags(
            s3_cross_account_client, s3_object, scan_result, scan_signature, result_time
        )

        # Publish the scan results
        if AV_STATUS_SNS_ARN not in [None, ""]:
            sns_scan_results(
                sns_local_account_client,
                s3_object,
                AV_STATUS_SNS_ARN,
                scan_result,
                scan_signature,
                result_time,
            )

        # Publish clean scan results cross account
        if (
            scan_result == AV_STATUS_CLEAN
            and str_to_bool(AV_STATUS_SNS_PUBLISH_CLEAN)
            and AV_STATUS_CLEAN_SNS_ARN not in [None, ""]
        ):
            sns_scan_results(
                sns_cross_account_client,
                s3_object,
                AV_STATUS_CLEAN_SNS_ARN,
                scan_result,
                scan_signature,
                result_time,
            )

        metrics.send(
            env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
        )
        # Delete downloaded file to free up room on re-usable lambda function container
        try:
            os.remove(file_path)
        except OSError:
            pass
        if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
            sns_delete_results(s3_object, scan_result)
            delete_s3_object(s3_object)

    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
Example #13
0
def lambda_handler(event, context):
    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3")
    sns_client = boto3.client("sns")

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN, start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    to_download = clamav.update_defs_from_s3(
        s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
    )

    for download in to_download.values():
        s3_path = download["s3_path"]
        local_path = download["local_path"]
        print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
        s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
        print("Downloading definition file %s complete!" % (local_path))

    # calculate the md5 of the virus defintion files
    definition_md5 = clamav.get_definition_md5()

    # check the file for an existing defintion md5 hash
    s3_definition_md5 = clamav.md5_from_s3_tags(s3_client, s3_object.bucket_name, s3_object.key, AV_DEFINITION_MD5_METADATA)

    # skip if there is a match
    if definition_md5 == s3_definition_md5:
        print("Not scanning because local defintion md5 matches s3 defintion md5.")
        return

    # Set AV_STATUS_SKIPPED if file exceeds maximum file size
    s3_object_size_result = check_s3_object_size(s3, s3_object)
    if s3_object_size_result == AV_STATUS_SKIPPED:
        scan_result = s3_object_size_result
        scan_signature = AV_SIGNATURE_UNKNOWN
    else:
        scan_result, scan_signature = clamav.scan_file(file_path)

    print(
        "Scan of s3://%s resulted in %s\n"
        % (os.path.join(s3_object.bucket_name, s3_object.key), scan_result)
    )

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        # AV_UPDATE_METADATA doesn't seem to be set anywhere - likely cant get here
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time, definition_md5)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(
        env=ENV, bucket=s3_object.bucket_name, key=s3_object.key, status=scan_result
    )
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)
Example #14
0
def foo():
    bar()
    bar + send('my-report-id') + bar()
Example #15
0
def foo():
    bar = 3
    bar + send("my-report-id")
Example #16
0
def lambda_handler(event, context):
    global clamd_pid

    aws_config = Config(connect_timeout=5)

    s3 = boto3.resource("s3")
    s3_client = boto3.client("s3", config=aws_config)
    sns_client = boto3.client("sns", config=aws_config)

    # Get some environment variables
    ENV = os.getenv("ENV", "")
    EVENT_SOURCE = os.getenv("EVENT_SOURCE", "S3")

    if not clamav.is_clamd_running():
        if clamd_pid is not None:
            kill_process_by_pid(clamd_pid)

        clamd_pid = clamav.start_clamd_daemon()
        print("Clamd PID: %s" % clamd_pid)

    start_time = get_timestamp()
    print("Script starting at %s\n" % (start_time))
    s3_object = event_object(event, event_source=EVENT_SOURCE)

    if str_to_bool(AV_PROCESS_ORIGINAL_VERSION_ONLY):
        verify_s3_object_version(s3, s3_object)

    # Publish the start time of the scan
    if AV_SCAN_START_SNS_ARN not in [None, ""]:
        start_scan_time = get_timestamp()
        sns_start_scan(sns_client, s3_object, AV_SCAN_START_SNS_ARN,
                       start_scan_time)

    file_path = get_local_path(s3_object, "/tmp")
    create_dir(os.path.dirname(file_path))
    s3_object.download_file(file_path)

    scan_result, scan_signature = clamav.scan_file(file_path)
    print("Scan of s3://%s resulted in %s\n" %
          (os.path.join(s3_object.bucket_name, s3_object.key), scan_result))

    result_time = get_timestamp()
    # Set the properties on the object with the scan results
    if "AV_UPDATE_METADATA" in os.environ:
        set_av_metadata(s3_object, scan_result, scan_signature, result_time)
    set_av_tags(s3_client, s3_object, scan_result, scan_signature, result_time)

    # Publish the scan results
    if AV_STATUS_SNS_ARN not in [None, ""]:
        sns_scan_results(
            sns_client,
            s3_object,
            AV_STATUS_SNS_ARN,
            scan_result,
            scan_signature,
            result_time,
        )

    metrics.send(env=ENV,
                 bucket=s3_object.bucket_name,
                 key=s3_object.key,
                 status=scan_result)
    # Delete downloaded file to free up room on re-usable lambda function container
    try:
        os.remove(file_path)
    except OSError:
        pass
    if str_to_bool(
            AV_DELETE_INFECTED_FILES) and scan_result == AV_STATUS_INFECTED:
        delete_s3_object(s3_object)
    stop_scan_time = get_timestamp()
    print("Script finished at %s\n" % stop_scan_time)