def test_batch_import_collections(self):

        duplicated_csv_file = 'tests/unit/test_data/duplicated_collection_metadata.csv'
        response = self.get_response_from_file(duplicated_csv_file)
        lambda_function.batch_import_collections(response)

        invalid_csv_file = 'tests/unit/test_data/invalid_collection_metadata.csv'
        response = self.get_response_from_file(invalid_csv_file)
        lambda_function.batch_import_collections(response)

        update_csv_file = 'tests/unit/test_data/update_collection_metadata.csv'
        response = self.get_response_from_file(update_csv_file)
        utc_now = lambda_function.utcformat(datetime.now())
        lambda_function.batch_import_collections(response)

        record = lambda_function.query_by_index(
            collection_table, 'Identifier', "nonvtsrc")

        assert record[0]['updatedAt'][:16] == utc_now[:16]

        # Create collection: Ms2007_007_Johnson
        new_csv_file = 'tests/unit/test_data/new_collection_metadata.csv'
        response = self.get_response_from_file(new_csv_file)
        lambda_function.batch_import_collections(response)

        record = lambda_function.query_by_index(
            collection_table, 'Identifier', "Ms2007_007_Johnson")

        assert record[0]['identifier'] == "Ms2007_007_Johnson"

        self.delete_table_record(collection_table, "id", record[0]['id'])
        self.delete_table_record(
            collectionmap_table, "id", record[0]['collectionmap_id'])
        self.delete_table_record(
            mint_table, "short_id", record[0]['custom_key'].split('/')[-1])
    def verify_delete_SFD_record(self, id):
        record = lambda_function.query_by_index(
            archive_table, 'Identifier', id)

        assert record[0]['parent_collection'] == [
            "98f839b9-b842-4cd8-9eaf-8eca7d68c63e"]

        self.delete_table_record(archive_table, "id", record[0]['id'])
        self.delete_table_record(
            mint_table, "short_id", record[0]['custom_key'].split('/')[-1])
    def test_query_by_index(self):

        dyndb = boto3.resource('dynamodb', region_name="us-east-1")
        collection_table = dyndb.Table(
            "Collection-g5nycnwj7zblldz7x6fblmiefu-dev")
        result = lambda_function.query_by_index(
            collection_table,
            'Identifier',
            'Ms1998_022_Young_Ms1998_022_Box2_Ms1998_022_B2_Folder19')
        assert result[0]['id'] == "24ad7228-485d-47a9-9558-c2cff74735b8"
    def test_lambda_handler(self):

        #  batch_import_archives
        result = lambda_function.lambda_handler(
            event=test_s3_event_single_archive, context={})
        assert result['statusCode'] == 200

        record = lambda_function.query_by_index(
            archive_table, 'Identifier', "Ms2020-004_com001001")

        assert record[0]['identifier'] == "Ms2020-004_com001001"

        self.delete_table_record(archive_table, "id", record[0]['id'])
        self.delete_table_record(
            mint_table, "short_id", record[0]['custom_key'].split('/')[-1])

        # batch_import_collections
        result = lambda_function.lambda_handler(
            event=test_s3_event_collection, context={})
        assert result['statusCode'] == 200

        record = lambda_function.query_by_index(
            collection_table, 'Identifier', "Ms2007_007_Johnson")

        assert record[0]['identifier'] == "Ms2007_007_Johnson"

        self.delete_table_record(collection_table, "id", record[0]['id'])
        self.delete_table_record(
            collectionmap_table, "id", record[0]['collectionmap_id'])
        self.delete_table_record(
            mint_table, "short_id", record[0]['custom_key'].split('/')[-1])

        # batch_import_archives_with_path
        result = lambda_function.lambda_handler(
            event=test_s3_event_archive_with_path, context={})
        assert result['statusCode'] == 200

        self.verify_delete_SFD_record("sfdst001001")
        self.verify_delete_SFD_record("sfdst001002")
        self.verify_delete_SFD_record("sfdst001003")
    def test_find_and_update(self):
        attr_dict = {}
        attr_dict['identifier'] = "Ms1990_057_F018_023_Wyckoff_Dr"
        result = lambda_function.find_and_update(
            archive_table, attr_dict, 'Archive', 1)
        assert result == "Duplicated"

        attr_dict['identifier'] = "Ms2020-004_vtn0010065"
        lambda_function.find_and_update(archive_table, attr_dict, 'Archive', 1)
        utc_now = lambda_function.utcformat(datetime.now())
        result = lambda_function.query_by_index(
            archive_table, 'Identifier', "Ms2020-004_vtn0010065")
        assert result[0]['updatedAt'][:16] == utc_now[:16]

        attr_dict['identifier'] = "test_archive_identifier"
        lambda_function.find_and_update(archive_table, attr_dict, 'Archive', 1)

        result = lambda_function.query_by_index(
            archive_table, 'Identifier', "test_archive_identifier")
        assert result[0]['identifier'] == "test_archive_identifier"
        self.delete_table_record(archive_table, "id", result[0]['id'])
        self.delete_table_record(
            mint_table, "short_id", result[0]['custom_key'].split('/')[-1])
    def test_update_item_in_table(self):
        csv_file = 'tests/unit/test_data/collection_metadata.csv'
        df = lambda_function.csv_to_dataframe(csv_file)
        collection_dict = lambda_function.process_csv_metadata(
            df.iloc[0], 'Collection')

        items = lambda_function.query_by_index(
            collection_table, 'Identifier', collection_dict['identifier'])

        utc_now = lambda_function.utcformat(datetime.now())
        lambda_function.update_item_in_table(
            collection_table, collection_dict, items[0]['id'])

        result = self.get_record_by_id(collection_table, "id", items[0]['id'])

        assert result[0]['updatedAt'][:16] == utc_now[:16]
    def test_batch_import_archives(self):

        invalid_csv_file = 'tests/unit/test_data/invalid_archive_metadata.csv'
        response = self.get_response_from_file(invalid_csv_file)
        lambda_function.batch_import_archives(response)

        # Create archive: Ms2020-004_com001001
        new_csv_file = 'tests/unit/test_data/single_archive_metadata.csv'
        response = self.get_response_from_file(new_csv_file)
        lambda_function.batch_import_archives(response)

        record = lambda_function.query_by_index(
            archive_table, 'Identifier', "Ms2020-004_com001001")

        assert record[0]['identifier'] == "Ms2020-004_com001001"

        self.delete_table_record(archive_table, "id", record[0]['id'])
        self.delete_table_record(
            mint_table, "short_id", record[0]['custom_key'].split('/')[-1])
    def test_create_sub_collections(self):

        # Duplicate sub-collection
        parent_collections = ['Ms1994_016_Crawford', 'Ms1994_016_Folder1']
        lambda_function.create_sub_collections(parent_collections)

        # Update sub-collection
        parent_collections = ['Ms1997_003_Gottlieb', 'Box2']
        result = lambda_function.create_sub_collections(parent_collections)
        assert result == ['bc7db6e9-138a-4449-8706-477e6929df25']

        # Create sub-collection
        parent_collections = ['Ms1997_003_Gottlieb', 'Box2', 'Folder1']
        result = lambda_function.create_sub_collections(parent_collections)

        record = lambda_function.query_by_index(
            collection_table, 'Identifier', "Ms1997_003_Gottlieb_Box2_Folder1")

        assert "bc7db6e9-138a-4449-8706-477e6929df25" in record[0]['parent_collection']

        self.delete_table_record(collection_table, "id", record[0]['id'])
        self.delete_table_record(
            mint_table, "short_id", record[0]['custom_key'].split('/')[-1])