def test_file_move(self): # test that a resource file that is part of a GenericLogicalFile object # can be moved self.create_composite_resource(self.refts_file) res_file = self.composite_resource.files.first() base_file_name, ext = os.path.splitext(res_file.file_name) self.assertEqual(res_file.file_name, self.refts_file_name) # create generic aggregation RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # file should not be in a folder self.assertEqual(res_file.file_folder, None) # test moving the file to a new folder is allowed new_folder = 'test_folder' create_folder(self.composite_resource.short_id, 'data/contents/{}'.format(new_folder)) src_path = 'data/contents/{}'.format(res_file.file_name) tgt_path = "data/contents/{0}/{1}".format(new_folder, res_file.file_name) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) res_file = self.composite_resource.files.first() # file should in a folder self.assertEqual(res_file.file_folder, new_folder) self.assertTrue(res_file.resource_file.name.endswith(tgt_path)) self.composite_resource.delete()
def test_remove_aggregation(self): # test that when an instance RefTimeseriesLogicalFile (aggregation) is deleted # all files associated with that aggregation is not deleted but the associated metadata # is deleted self.create_composite_resource(self.refts_file) res_file = self.composite_resource.files.first() # set the json file to RefTimeSeriesLogicalFile (aggregation) type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # test that we have one logical file of type RefTimeseriesLogicalFile as a result # of setting aggregation self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 1) self.assertEqual(RefTimeseriesFileMetaData.objects.count(), 1) logical_file = RefTimeseriesLogicalFile.objects.first() self.assertEqual(logical_file.files.all().count(), 1) self.assertEqual(self.composite_resource.files.all().count(), 1) self.assertEqual(set(self.composite_resource.files.all()), set(logical_file.files.all())) # delete the aggregation (logical file) object using the remove_aggregation function logical_file.remove_aggregation() # test there is no RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 0) # test there is no RefTimeseriesFileMetaData object self.assertEqual(RefTimeseriesFileMetaData.objects.count(), 0) # check the files associated with the aggregation not deleted self.assertEqual(self.composite_resource.files.all().count(), 1) self.composite_resource.delete()
def test_set_file_type_to_refts(self): # here we are using a valid time series json file for setting it # to RefTimeSeries file type which includes metadata extraction self.refts_file_obj = open(self.refts_file, 'r') self._create_composite_resource(title="Untitled resource") self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # check that the resource file is associated with GenericLogicalFile self.assertEqual(res_file.has_logical_file, True) self.assertEqual(res_file.logical_file_type_name, "GenericLogicalFile") # check that there is one GenericLogicalFile object self.assertEqual(GenericLogicalFile.objects.count(), 1) # check that there is no RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 0) # set the json file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, res_file.id, self.user) # test extracted ref time series file type metadata assert_ref_time_series_file_type_metadata(self) # test that the content of the json file is same is what we have # saved in json_file_content field of the file metadata object res_file = self.composite_resource.files.first() logical_file = res_file.logical_file self.assertEqual(logical_file.metadata.json_file_content, res_file.resource_file.read()) self.composite_resource.delete()
def test_refts_set_file_type_to_reftimeseries(self): # only do federation testing when REMOTE_USE_IRODS is True and irods docker containers # are set up properly if not super(RefTimeSeriesFileTypeMetaDataTest, self).is_federated_irods_available(): return # here we are using a valid ref time series for setting it # to RefTimeseries file type which includes metadata extraction fed_test_file_full_path = '/{zone}/home/{username}/{fname}'.format( zone=settings.HS_USER_IRODS_ZONE, username=self.user.username, fname=self.refts_file_name) res_upload_files = [] fed_res_path = hydroshare.utils.get_federated_zone_home_path(fed_test_file_full_path) res_title = 'Untitled resource' self.composite_resource = hydroshare.create_resource( resource_type='CompositeResource', owner=self.user, title=res_title, files=res_upload_files, source_names=[fed_test_file_full_path], fed_res_path=fed_res_path, move=False, metadata=[] ) # test resource is created on federated zone self.assertNotEqual(self.composite_resource.resource_federation_path, '') # set the logical file -which get sets as part of the post resource creation signal resource_post_create_actions(resource=self.composite_resource, user=self.user, metadata=self.composite_resource.metadata) self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # check that the resource file is associated with GenericLogicalFile self.assertEqual(res_file.has_logical_file, True) self.assertEqual(res_file.logical_file_type_name, "GenericLogicalFile") # check that there is one GenericLogicalFile object self.assertEqual(GenericLogicalFile.objects.count(), 1) fed_file_path = "data/contents/{}".format(self.refts_file_name) self.assertEqual(os.path.join('data', 'contents', res_file.short_path), fed_file_path) # set the tif file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, res_file.id, self.user) # test that the content of the json file is same is what we have # saved in json_file_content field of the file metadata object res_file = self.composite_resource.files.first() logical_file = res_file.logical_file self.assertEqual(logical_file.metadata.json_file_content, res_file.fed_resource_file.read()) # test extracted ref time series file type metadata assert_ref_time_series_file_type_metadata(self) self.composite_resource.delete()
def test_refts_set_file_type_to_reftimeseries(self): super(RefTimeSeriesFileTypeMetaDataTest, self).assert_federated_irods_available() # here we are using a valid ref time series for setting it # to RefTimeseries file type which includes metadata extraction fed_test_file_full_path = '/{zone}/home/{username}/{fname}'.format( zone=settings.HS_USER_IRODS_ZONE, username=self.user.username, fname=self.refts_file_name) res_upload_files = [] fed_res_path = hydroshare.utils.get_federated_zone_home_path( fed_test_file_full_path) res_title = 'Untitled resource' self.composite_resource = hydroshare.create_resource( resource_type='CompositeResource', owner=self.user, title=res_title, files=res_upload_files, source_names=[fed_test_file_full_path], fed_res_path=fed_res_path, move=False, metadata=[], auto_aggregate=False) # test resource is created on federated zone self.assertNotEqual(self.composite_resource.resource_federation_path, '') self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # check that the resource file is not associated with any logical file self.assertEqual(res_file.has_logical_file, False) fed_file_path = "data/contents/{}".format(self.refts_file_name) self.assertEqual(os.path.join('data', 'contents', res_file.short_path), fed_file_path) # set the tif file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # test that the content of the json file is same is what we have # saved in json_file_content field of the file metadata object res_file = self.composite_resource.files.first() logical_file = res_file.logical_file self.assertEqual(logical_file.metadata.json_file_content, res_file.fed_resource_file.read()) # test extracted ref time series file type metadata assert_ref_time_series_file_type_metadata(self) self.composite_resource.delete()
def test_main_file(self): self.create_composite_resource(self.refts_file) self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) self.assertEqual(1, RefTimeseriesLogicalFile.objects.count()) self.assertEqual( ".json", RefTimeseriesLogicalFile.objects.first().get_main_file_type()) self.assertEqual( self.refts_file_name, RefTimeseriesLogicalFile.objects.first().get_main_file.file_name)
def test_create_aggregation_2(self): # here we are using a valid time series json file for setting it # to RefTimeSeries file type which includes metadata extraction # this resource file is in a folder self.res_title = "Untitled resource" self.create_composite_resource() new_folder = 'refts_folder' ResourceFile.create_folder(self.composite_resource, new_folder) # add the the json file to the resource at the above folder self.add_file_to_resource(file_to_add=self.refts_file, upload_folder=new_folder) self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # test resource file is in a folder self.assertEqual(res_file.file_folder, new_folder) # check that the resource file is not associated with any logical file self.assertEqual(res_file.has_logical_file, False) # check that there is no RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 0) # set the json file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) res_file = self.composite_resource.files.first() # check that there is one RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 1) # test resource file is in the same folder self.assertEqual(res_file.file_folder, new_folder) self.assertEqual(res_file.logical_file_type_name, self.logical_file_type_name) # test extracted ref time series file type metadata assert_ref_time_series_file_type_metadata(self) # test that the content of the json file is same is what we have # saved in json_file_content field of the file metadata object res_file = self.composite_resource.files.first() logical_file = res_file.logical_file self.assertEqual(logical_file.metadata.json_file_content, res_file.resource_file.read()) # test resource file is in a folder self.assertEqual(res_file.file_folder, new_folder) self.composite_resource.delete()
def test_res_metadata_on_create_aggregation(self): # here we are using a valid time series json file for setting it # to RefTimeSeries file type which includes metadata extraction. # resource level metadata (excluding coverage) should not be updated # as part setting the json file to RefTimeseries file type self.res_title = "Test Composite Resource" self.create_composite_resource(self.refts_file) # set resource abstract self.composite_resource.metadata.create_element( 'description', abstract="Some abstract") # add resource level keywords self.composite_resource.metadata.create_element('subject', value="key-word-1") self.composite_resource.metadata.create_element('subject', value="CUAHSI") self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # check that the resource file is not associated with any logical file self.assertEqual(res_file.has_logical_file, False) # check that there is no RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 0) # set the json file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # check that there is one RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 1) # test that the resource title has not changed self.assertEqual(self.composite_resource.metadata.title.value, self.res_title) # test that the abstract has not changed self.assertEqual(self.composite_resource.metadata.description.abstract, "Some abstract") # resource keywords should have been updated (with one keyword added from the json file) keywords = [ kw.value for kw in self.composite_resource.metadata.subjects.all() ] for kw in keywords: self.assertIn(kw, ["key-word-1", "CUAHSI", "Time Series"]) self.composite_resource.delete()
def test_refts_set_file_type_to_reftimeseries(self): super(RefTimeSeriesFileTypeMetaDataTest, self).assert_federated_irods_available() # here we are using a valid ref time series for setting it # to RefTimeseries file type which includes metadata extraction fed_test_file_full_path = '/{zone}/home/{username}/{fname}'.format( zone=settings.HS_USER_IRODS_ZONE, username=self.user.username, fname=self.refts_file_name) res_upload_files = [] fed_res_path = hydroshare.utils.get_federated_zone_home_path(fed_test_file_full_path) res_title = 'Untitled resource' self.composite_resource = hydroshare.create_resource( resource_type='CompositeResource', owner=self.user, title=res_title, files=res_upload_files, source_names=[fed_test_file_full_path], fed_res_path=fed_res_path, move=False, metadata=[], auto_aggregate=False ) # test resource is created on federated zone self.assertNotEqual(self.composite_resource.resource_federation_path, '') self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # check that the resource file is not associated with any logical file self.assertEqual(res_file.has_logical_file, False) fed_file_path = "data/contents/{}".format(self.refts_file_name) self.assertEqual(os.path.join('data', 'contents', res_file.short_path), fed_file_path) # set the tif file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # test that the content of the json file is same is what we have # saved in json_file_content field of the file metadata object res_file = self.composite_resource.files.first() logical_file = res_file.logical_file self.assertEqual(logical_file.metadata.json_file_content, res_file.fed_resource_file.read()) # test extracted ref time series file type metadata assert_ref_time_series_file_type_metadata(self) self.composite_resource.delete()
def _test_valid_json_file(self, json_file_name): refts_file = 'hs_file_types/tests/{}'.format(json_file_name) self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 0) self.create_composite_resource(refts_file) json_res_file = self.composite_resource.files.first() # set the json file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, json_res_file.id) self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 1) json_res_file = self.composite_resource.files.first() self.assertTrue(json_res_file.has_logical_file) logical_file = json_res_file.logical_file self.assertTrue(isinstance(logical_file, RefTimeseriesLogicalFile)) self.assertEqual(logical_file.metadata.json_file_content, json_res_file.resource_file.read()) self.composite_resource.delete()
def _test_invalid_file(self): self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # check that the resource file is not associated with any logical file self.assertEqual(res_file.has_logical_file, False) # trying to set this invalid tif file to RefTimeseries file type should raise # ValidationError with self.assertRaises(ValidationError): RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # test that the invalid file did not get deleted self.assertEqual(self.composite_resource.files.all().count(), 1) # check that the resource file is not associated with any logical file self.assertEqual(res_file.has_logical_file, False)
def test_bag_ingestion(self): from hs_core.views.utils import unzip_file def normalize_metadata(metadata_str): """Prepares metadata string to match resource id and hydroshare url of original""" return metadata_str\ .replace(current_site_url(), "http://www.hydroshare.org")\ .replace(res.short_id, "97523bdb7b174901b3fc2d89813458f1") # create empty resource res = resource.create_resource( 'CompositeResource', self.user, 'My Test Resource' ) full_paths = {} files_to_upload = [UploadedFile(file=open('hs_core/tests/data/test_resource_metadata_files.zip', 'rb'), name="test_resource_metadata_files.zip")] add_resource_files(res.short_id, *files_to_upload, full_paths=full_paths) unzip_file(self.user, res.short_id, "data/contents/test_resource_metadata_files.zip", True, overwrite=True, auto_aggregate=True, ingest_metadata=True) def compare_metadatas(new_metadata_str, original_metadata_file): original_graph = Graph() with open(os.path.join(self.extracted_directory, original_metadata_file), "r") as f: original_graph = original_graph.parse(data=f.read()) new_graph = Graph() new_graph = new_graph.parse(data=normalize_metadata(new_metadata_str)) # remove modified date, they'll never match subject = new_graph.value(predicate=RDF.type, object=DCTERMS.modified) new_graph.remove((subject, None, None)) subject = original_graph.value(predicate=RDF.type, object=DCTERMS.modified) original_graph.remove((subject, None, None)) for (new_triple, original_triple) in _squashed_graphs_triples(new_graph, original_graph): self.assertEquals(new_triple, original_triple, "Ingested resource metadata does not match original") res.refresh_from_db() compare_metadatas(res.metadata.get_xml(), "resourcemetadata.xml") compare_metadatas(res.get_logical_files(GenericLogicalFile.type_name())[0].metadata.get_xml(), "test_meta.xml") compare_metadatas(res.get_logical_files(FileSetLogicalFile.type_name())[0].metadata.get_xml(), "asdf/asdf_meta.xml") compare_metadatas(res.get_logical_files(GeoFeatureLogicalFile.type_name())[0].metadata.get_xml(), "watersheds_meta.xml") compare_metadatas(res.get_logical_files(GeoRasterLogicalFile.type_name())[0].metadata.get_xml(), "logan_meta.xml") compare_metadatas(res.get_logical_files(NetCDFLogicalFile.type_name())[0].metadata.get_xml(), "SWE_time_meta.xml") compare_metadatas(res.get_logical_files(RefTimeseriesLogicalFile.type_name())[0].metadata.get_xml(), "msf_version.refts_meta.xml") compare_metadatas(res.get_logical_files(TimeSeriesLogicalFile.type_name())[0].metadata.get_xml(), "ODM2_Multi_Site_One_Variable_meta.xml")
def _test_valid_missing_optional_elements(self): self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() # check that the resource file is associated with any logical file self.assertEqual(res_file.has_logical_file, False) # check that there is no RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 0) # set the json file to RefTimeseries file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # check that there is one RefTimeseriesLogicalFile object self.assertEqual(RefTimeseriesLogicalFile.objects.count(), 1) # test that the content of the json file is same is what we have # saved in json_file_content field of the file metadata object res_file = self.composite_resource.files.first() logical_file = res_file.logical_file self.assertTrue(isinstance(logical_file, RefTimeseriesLogicalFile)) self.assertEqual(logical_file.metadata.json_file_content, res_file.resource_file.read())
def test_file_rename(self): # test that a resource file that is part of a RefTimeseriesLogicalFile object # can be renamed self.create_composite_resource(self.refts_file) res_file = self.composite_resource.files.first() base_file_name, ext = os.path.splitext(res_file.file_name) self.assertEqual(res_file.file_name, self.refts_file_name) # create refts aggregation RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) # file should not be in a folder self.assertEqual(res_file.file_folder, None) # test rename of file is allowed src_path = 'data/contents/{}'.format(res_file.file_name) tgt_path = "data/contents/{0}_1{1}".format(base_file_name, ext) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) res_file = self.composite_resource.files.first() self.assertEqual(res_file.file_name, '{0}_1{1}'.format(base_file_name, ext)) self.composite_resource.delete()
def test_aggregation_xml_file_paths(self): # test the aggregation meta and map xml file paths with file name and folder name # changes self.create_composite_resource(self.refts_file) # there should be one resource file self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() base_file_name, ext = os.path.splitext(res_file.file_name) # check that the resource file is associated with GenericLogicalFile self.assertEqual(res_file.has_logical_file, False) # set file to generic logical file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) res_file = self.composite_resource.files.first() self.assertEqual(res_file.logical_file_type_name, self.logical_file_type_name) logical_file = res_file.logical_file expected_meta_path = '{}_meta.xml'.format(res_file.file_name) expected_map_path = '{}_resmap.xml'.format(res_file.file_name) self.assertEqual(logical_file.metadata_short_file_path, expected_meta_path) self.assertEqual(logical_file.map_short_file_path, expected_map_path) # test xml file paths after moving the file into a folder new_folder = 'test_folder' create_folder(self.composite_resource.short_id, 'data/contents/{}'.format(new_folder)) src_path = 'data/contents/{}'.format(res_file.file_name) tgt_path = 'data/contents/{0}/{1}'.format(new_folder, res_file.file_name) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) res_file = self.composite_resource.files.first() logical_file = res_file.logical_file expected_meta_path = '{0}/{1}_meta.xml'.format(new_folder, res_file.file_name) expected_map_path = '{0}/{1}_resmap.xml'.format( new_folder, res_file.file_name) self.assertEqual(logical_file.metadata_short_file_path, expected_meta_path) self.assertEqual(logical_file.map_short_file_path, expected_map_path) # test xml file paths after renaming the file src_path = 'data/contents/{0}/{1}'.format(new_folder, res_file.file_name) tgt_path = 'data/contents/{0}/{1}_1{2}'.format(new_folder, base_file_name, ext) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) res_file = self.composite_resource.files.first() logical_file = res_file.logical_file expected_meta_path = '{0}/{1}_meta.xml'.format(new_folder, res_file.file_name) expected_map_path = '{0}/{1}_resmap.xml'.format( new_folder, res_file.file_name) self.assertEqual(logical_file.metadata_short_file_path, expected_meta_path) self.assertEqual(logical_file.map_short_file_path, expected_map_path) # test the xml file path after renaming the folder folder_rename = '{}_1'.format(new_folder) src_path = 'data/contents/{}'.format(new_folder) tgt_path = 'data/contents/{}'.format(folder_rename) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) res_file = self.composite_resource.files.first() logical_file = res_file.logical_file expected_meta_path = '{0}/{1}_meta.xml'.format(folder_rename, res_file.file_name) expected_map_path = '{0}/{1}_resmap.xml'.format( folder_rename, res_file.file_name) self.assertEqual(logical_file.metadata_short_file_path, expected_meta_path) self.assertEqual(logical_file.map_short_file_path, expected_map_path) self.composite_resource.delete()
def test_aggregation_name(self): # test the aggregation_name property for the refts aggregation (logical file) self.create_composite_resource(self.refts_file) # there should be one resource file self.assertEqual(self.composite_resource.files.all().count(), 1) res_file = self.composite_resource.files.first() base_file_name, ext = os.path.splitext(res_file.file_name) # check that the resource file is associated with GenericLogicalFile self.assertEqual(res_file.has_logical_file, False) # set file to refts logical file type RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id) res_file = self.composite_resource.files.first() self.assertEqual(res_file.logical_file_type_name, self.logical_file_type_name) logical_file = res_file.logical_file self.assertEqual(logical_file.aggregation_name, res_file.file_name) # test the aggregation name after moving the file into a folder new_folder = 'refts_folder' create_folder(self.composite_resource.short_id, 'data/contents/{}'.format(new_folder)) src_path = 'data/contents/{}'.format(res_file.file_name) tgt_path = 'data/contents/{0}/{1}'.format(new_folder, res_file.file_name) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) res_file = self.composite_resource.files.first() logical_file = res_file.logical_file expected_aggregation_name = '{0}/{1}'.format(new_folder, res_file.file_name) self.assertEqual(logical_file.aggregation_name, expected_aggregation_name) # test the aggregation name after renaming the file src_path = 'data/contents/{0}/{1}'.format(new_folder, res_file.file_name) tgt_path = 'data/contents/{0}/{1}_1{2}'.format(new_folder, base_file_name, ext) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) res_file = self.composite_resource.files.first() logical_file = res_file.logical_file expected_aggregation_name = '{0}/{1}_1{2}'.format( new_folder, base_file_name, ext) self.assertEqual(logical_file.aggregation_name, expected_aggregation_name) # test the aggregation name after renaming the folder folder_rename = '{}_1'.format(new_folder) src_path = 'data/contents/{}'.format(new_folder) tgt_path = 'data/contents/{}'.format(folder_rename) move_or_rename_file_or_folder(self.user, self.composite_resource.short_id, src_path, tgt_path) logical_file = res_file.logical_file expected_aggregation_name = '{0}/{1}'.format(folder_rename, res_file.file_name) self.assertEqual(logical_file.aggregation_name, expected_aggregation_name) self.composite_resource.delete()