class CLIClient(object):
    def __init__(self, server):
        self.client = IngesterPlatformAPI(server, None)

    def file_or_json(self, s):
        """Try to open the file or parse the string as JSON"""
        obj = None
        if os.path.exists(s):
            with open(r, "r") as f:
                obj = json.load(f)
        else:
            obj = json.loads(s)
        return self.client._marshaller.dict_to_obj(obj)

    def ping(self):
        return self.client.ping()

    def enable(self, ds_id):
        return self.client.enableDataset(ds_id)

    def disable(self, ds_id):
        return self.client.disableDataset(ds_id)

    def search(self, criteria, limit=20, offset=0):
        return self.client._marshaller.obj_to_dict(
            self.client.search(self.file_or_json(criteria), int(offset),
                               int(limit)))

    def logs(self, ds_id):
        return self.client._marshaller.obj_to_dict(
            self.client.getIngesterLogs(ds_id))

    def post(self, s):
        obj = self.file_or_json(s)
        return (self.client._marshaller.obj_to_dict(self.client.post(obj)))

    def get(self, *args):
        if args[0] == "schema":
            return (self.client.getSchema(args[1]))
        elif args[0] == "location":
            return (self.client.getLocation(args[1]))
        elif args[0] == "dataset":
            return (self.client.getDataset(args[1]))
class CLIClient(object):

    def __init__(self, server):
        self.client = IngesterPlatformAPI(server, None)
    
    def file_or_json(self, s):
        """Try to open the file or parse the string as JSON"""
        obj = None
        if os.path.exists(s):
            with open(r, "r") as f:
                obj = json.load(f)
        else:
            obj = json.loads(s)
        return self.client._marshaller.dict_to_obj(obj)

    def ping(self):
        return self.client.ping()

    def enable(self, ds_id):
        return self.client.enableDataset(ds_id)

    def disable(self, ds_id):
        return self.client.disableDataset(ds_id)

    def search(self, criteria, limit=20, offset=0):
        return self.client._marshaller.obj_to_dict(self.client.search(self.file_or_json(criteria), int(offset), int(limit)))

    def logs(self, ds_id):
        return self.client._marshaller.obj_to_dict(self.client.getIngesterLogs(ds_id))

    def post(self, s):
        obj = self.file_or_json(s)
        return (self.client._marshaller.obj_to_dict(self.client.post(obj)))

    def get(self, *args):
        if args[0] == "schema":
            return (self.client.getSchema(args[1])) 
        elif args[0] == "location":
            return (self.client.getLocation(args[1])) 
        elif args[0] == "dataset":
            return (self.client.getDataset(args[1])) 
class SchemaTest(unittest.TestCase):
    """
    This test defines and checks that the Ingester API works the way the provisioning interface expects.
    """
    def setUp(self):
        self.auth = CredentialsAuthentication("casey", "password")
        self.ingester_platform = IngesterPlatformAPI("http://localhost:8080/api", self.auth)
        self.schemas = []

    def compare_schema_attrs(self, attrs_src, attrs_dst):
        # make a copy
        attrs_dst = attrs_dst.copy()
        
        for attr in attrs_src:
            found = False
            for attr_dst in attrs_dst:
                if attr in attrs_dst:
                    del attrs_dst[attr]
                    found = True
                    break
            self.assertTrue(found, "Attribute not found "+attr)
        self.assertEquals(0, len(attrs_dst), "Extra attributes in destination")
                    

    def test_data_metadata(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DataEntryMetadataSchema("Quality Assurance")
        schema.addAttr(Double("value"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

        ingested_schema = self.ingester_platform.getSchema(schema.id)
        self.compare_schema_attrs(ingested_schema.attrs, schema.attrs)
        self.assertEquals(ingested_schema.name, schema.name)

    def test_dataset_metadata(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DatasetMetadataSchema("Dataset Calibration")
        schema.addAttr(DateTime("date"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

        ingested_schema = self.ingester_platform.getSchema(schema.id)
        self.compare_schema_attrs(ingested_schema.attrs, schema.attrs)
        self.assertEquals(ingested_schema.name, schema.name)

    def test_data(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DataEntrySchema("Test123")
        schema.addAttr(Double("value"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

        ingested_schema = self.ingester_platform.getSchema(schema.id)
        self.compare_schema_attrs(ingested_schema.attrs, schema.attrs)
        self.assertEquals(ingested_schema.name, schema.name)

    def test_dup_data(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DataEntrySchema("Test123")
        schema.addAttr(Double("value"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

    def test_delete(self):
        work = self.ingester_platform.createUnitOfWork()
        for schema in self.schemas:
            work.delete(schema)
        work.commit()

        for schema in self.schemas:
            self.assertIsNone(self.ingester_platform.getSchema(schema.id))

    def tearDown(self):
        self.ingester_platform.close()
class ProvisioningInterfaceTest(unittest.TestCase):
    """
    This test defines and checks that the Ingester API works the way the provisioning interface expects.
    """
    def setUp(self):
        self.auth = CredentialsAuthentication("casey", "password")
        self.ingester_platform = IngesterPlatformAPI("http://localhost:8080/api", self.auth)
        self.cleanup_files = []

    def test_api_usage(self):
#       User data that is created by filling out the provisioning interface workflow steps.
        #   General
        title = "Test project"
        data_manager = "A Person"
        project_lead = "Another Person"

        #   Metadata
        project_region = Region("Test Region", ((1, 1), (2, 2),(2,1), (1,1)))

        #   Methods & Datasets
        loc1 = Location(11.0, 11.0, "Test Site", 100)
        loc2 = Location(11.0, 11.0, "Test Site", 100)
        loc3 = Location(12.0, 11.0, "Test Site", 100)

        temp_work = self.ingester_platform.createUnitOfWork()
        temperature_schema = DataEntrySchema("Test Temp Schema")
        temperature_schema.addAttr(Double("temperature"))
        temp_work.post(temperature_schema)
        temp_work.commit()

        air_temperature_schema = DataEntrySchema("Air Temp Schema")
        air_temperature_schema.extends = [temperature_schema.id]
        air_temperature_schema = self.ingester_platform.post(air_temperature_schema)

        second_level_inheritence_schema = DataEntrySchema("Second Inheritence")
        second_level_inheritence_schema.extends = [air_temperature_schema.id]
        second_level_inheritence_schema = self.ingester_platform.post(second_level_inheritence_schema)

        # Check the name is set
        temperature_schema_1 = self.ingester_platform.getSchema(temperature_schema.id)
        self.assertIsNotNone(temperature_schema.name)
        self.assertEquals(temperature_schema.name, temperature_schema_1.name)
        
        file_schema = DataEntrySchema()
        file_schema.addAttr(FileDataType("file"))
        file_schema = self.ingester_platform.post(file_schema)

        dataset1 = Dataset(location=None, schema=temperature_schema.id)
        dataset2 = Dataset(location=None, schema=file_schema.id, data_source=PullDataSource("http://test.com", "file_handle", processing_script="file://d:/processing_scripts/awsome_processing.py"))

#        dataset3 = Dataset(None, file_schema, PullDataSource("http://test.com", "file_handle"), CustomSampling("file://d:/sampling_scripts/awsome_sampling.py"), "file://d:/processing_scripts/awsome_processing.py")

        self.cleanup_files.append(dataset2.data_source.processing_script)
#        self.cleanup_files.push(dataset3.sampling.script)
#        self.cleanup_files.push(dataset3.processing_script)

#       Provisioning admin accepts the submitted project
        work = self.ingester_platform.createUnitOfWork()

        work.post(project_region)    # Save the region

        loc1.region = project_region.id                  # Set the datasets location to use the projects region
        work.post(loc1)                        # Save the location
        dataset1.location = loc1.id                            # Set the datasets location
        work.post(dataset1)                # Save the dataset

        loc2.region = project_region.id
        work.post(loc2)
        dataset2.location = loc2.id
        work.post(dataset2)

        work.commit()

        # Region, location and dataset id's will be saved to the project within the provisioning system in some way


#       User searches for datasets

        # TODO: Nigel? - Define searching api
        found_dataset_id = dataset1.id                  # The dataset that has an extended file schema

#       User manually enters data
        timestamp = datetime.datetime.now()
        data_entry_1 = DataEntry(found_dataset_id, timestamp)
        data_entry_1['temperature'] = 27.8                # Add the extended schema items
        data_entry_1 = self.ingester_platform.post(data_entry_1)
        self.assertIsNotNone(data_entry_1.id)

        timestamp2 = timestamp + datetime.timedelta(seconds=1)
        data_entry_2 = DataEntry(found_dataset_id, timestamp2)
        data_entry_2['temperature'] = 27.8                # Add the extended schema items
        data_entry_2 = self.ingester_platform.post(data_entry_2)
        
        self.assertEquals(2, len(self.ingester_platform.search(DataEntrySearchCriteria(found_dataset_id), 0, 10).results))
        result = self.ingester_platform.search(DataEntrySearchCriteria(found_dataset_id), 0, 1)
        self.assertEquals(2, result.count)
        self.assertEquals(1, len(result.results))
        self.assertEquals(1, len(self.ingester_platform.search(DataEntrySearchCriteria(found_dataset_id), 1, 1).results))
        
        result = self.ingester_platform.search(DataEntrySearchCriteria(found_dataset_id), 2, 1)
        self.assertEquals(0, len(result.results))
                
        self.assertEquals(0, len(self.ingester_platform.search(DataEntrySearchCriteria(found_dataset_id, 
                                 end_time=timestamp-datetime.timedelta(seconds=60)), 0, 10).results))
        self.assertEquals(0, len(self.ingester_platform.search(DataEntrySearchCriteria(found_dataset_id, 
                                 start_time=timestamp+datetime.timedelta(seconds=60)), 0, 10).results))
        self.assertEquals(2, len(self.ingester_platform.search(DataEntrySearchCriteria(found_dataset_id, 
                                 start_time=timestamp-datetime.timedelta(seconds=60),
                                 end_time=timestamp+datetime.timedelta(seconds=60)), 0, 10).results))

        work = self.ingester_platform.createUnitOfWork()
        data_entry_3 = DataEntry(dataset2.id, datetime.datetime.now())
        data_entry_3['file'] = FileObject(f_handle=open(os.path.join(
                    os.path.dirname(jcudc24ingesterapi.__file__), "tests/test_ingest.xml")), 
                    mime_type="text/xml")
        work.post(data_entry_3)
        work.commit()
        self.assertIsNotNone(data_entry_3.id)
        
        f_in = self.ingester_platform.getDataEntryStream(dataset2.id, data_entry_3.id, "file")
        self.assertIsNotNone(f_in)
        data = f_in.read()
        f_in.close()
        self.assertLess(0, len(data), "Expected data in file")

#       User enters quality assurance metadata
        quality_metadata_schema = DatasetMetadataSchema()
        quality_metadata_schema.addAttr(String("unit"))
        quality_metadata_schema.addAttr(String("description"))
        quality_metadata_schema.addAttr(Double("value"))
        quality_metadata_schema = self.ingester_platform.post(quality_metadata_schema)
        
        entered_metadata = DatasetMetadataEntry(data_entry_1.dataset, quality_metadata_schema.id)
        entered_metadata['unit'] = "%"
        entered_metadata['description'] = "Percent error"
        entered_metadata['value'] = 0.98

        entered_metadata = self.ingester_platform.post(entered_metadata)
        
        # Now find that metadata
        results = self.ingester_platform.search(DatasetMetadataSearchCriteria(data_entry_1.dataset),0 , 10).results
        self.assertEqual(1, len(results))
        
        
        data_entry_md_schema = DataEntryMetadataSchema("test")
        data_entry_md_schema.addAttr(String("description"))
        data_entry_md_schema.addAttr(Double("value"))
        data_entry_md_schema = self.ingester_platform.post(data_entry_md_schema)
        calibration = DataEntryMetadataEntry(metadata_schema_id=int(data_entry_md_schema.id), dataset_id=dataset2.id, object_id=data_entry_3.id)
        calibration["description"] = "Test"
        calibration["value"] = 1.2

        calibration2 = DataEntryMetadataEntry(metadata_schema_id=int(data_entry_md_schema.id), dataset_id=dataset2.id, object_id=data_entry_3.id)
        calibration2["description"] = "Test2"
        calibration2["value"] = 2.3
        calibration2 = self.ingester_platform.post(calibration2)

        calibrations = self.ingester_platform.search(DataEntryMetadataSearchCriteria(int(81), int(3648)), offset=0, limit=1000)
        self.assertEquals(1, len(calibrations.results))
        self.assertEquals(calibrations.results[0].schema_id, data_entry_md_schema.id)

        self.ingester_platform.delete(calibration2)
        self.ingester_platform.delete(calibration)
        self.ingester_platform.delete(data_entry_md_schema)

#       User changes sampling rate
# FIXME: This test is going to be changed to be done by editing the dataset
#        sampling_rate_changed = Metadata(dataset1.id, type(dataset1), SampleRateMetadataSchema())
#        sampling_rate_changed.change_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
#        sampling_rate_changed.sampling = CustomSampling("file://d:/sampling_scripts/awsome_sampling.py")
#
#        try:
#            sampling_rate_changed = self.ingester_platform.post(sampling_rate_changed)
#            assert(sampling_rate_changed.metadata_id is None, "Sampling rate change failed")
#        except:
#            assert(True, "Sampling rate change failed")

#       User wants some random metadata specific to their project
# FIXME: Not sure what use case this is trying to demonstrate
#        random_metadata_schema =  DataEntryMetadataSchema()
#        random_metadata_schema.addAttr('random_field', Double())

#        random_metadata = Metadata(data_entry.data_entry_id, type(data_entry), random_metadata_schema)
#        random_metadata.random_field = 1.5

#        try:
#            random_metadata = self.ingester_platform.post(random_metadata)
#            assert(random_metadata.metadata_id is None, "random_metadata failed")
#        except:
#            assert(True, "random_metadata failed")

#       User changes the data source of the dataset
        new_data_source = PullDataSource("http://test.com/new_data", "file_handle")
        dataset1.data_source = new_data_source
        dataset1 = self.ingester_platform.post(dataset1)
        self.assertNotEqual(None, dataset1)

#       External, 3rd party searches for data
        # TODO: external 3rd parties should be able to use the api to get data without authentication
        # TODO: I'm not sure exactly how this should work, but the search api could be open access (need spam limitations or something?)

#       Project is disabled/finished
        work = self.ingester_platform.createUnitOfWork()
        work.disable(dataset1.id)
        work.disable(dataset2.id)
        work.commit()

#       Project is obsolete and data should be deleted
        work = self.ingester_platform.createUnitOfWork()
        work.delete(dataset1.id)
        work.delete(dataset2.id)
        work.commit()
        
    def test_parent_schemas(self):
        """This test creates a nested schema with attributes provided at 2
        different levels. A data entry is saved, and then retrieved, and the
        values tested.
        """
        loc1 = self.ingester_platform.post(Location(11.0, 11.0, "Test Site", 100))

        temp_work = self.ingester_platform.createUnitOfWork()
        temperature_schema = DataEntrySchema("Test Temp Schema")
        temperature_schema.addAttr(Double("temperature"))
        temp_work.post(temperature_schema)
        temp_work.commit()

        air_temperature_schema = DataEntrySchema("Air Temp Schema")
        air_temperature_schema.extends = [temperature_schema.id]
        air_temperature_schema = self.ingester_platform.post(air_temperature_schema)

        instrument_schema = DataEntrySchema("Instrument Schema")
        instrument_schema.extends = [air_temperature_schema.id]
        instrument_schema.addAttr(Double("var2"))
        instrument_schema = self.ingester_platform.post(instrument_schema)

        dataset = Dataset(location=loc1.id, schema=instrument_schema.id)
        dataset = self.ingester_platform.post(dataset)
        
        work = self.ingester_platform.createUnitOfWork()
        data_entry = DataEntry(dataset.id, datetime.datetime.now())
        data_entry["temperature"] = 10
        data_entry["var2"] = 11
        work.post(data_entry)
        work.commit()
        
        data_entry_ret = self.ingester_platform.getDataEntry(dataset.id, data_entry.id)

        self.assertEquals(data_entry["temperature"], data_entry_ret["temperature"])
        self.assertEquals(data_entry["var2"], data_entry_ret["var2"])
        

    def testMultiDatasetExtraction(self):
        """This test demonstrates use case #402.
        There are 2 datasets created, the first holds a datafile, and has a pull ingest occurring, along with 
        a configured custom script. The second dataset holds observation data, that will be extracted from the
        datafile in the first dataset.
        """
        temperature_schema = DataEntrySchema()
        temperature_schema.addAttr(Double("Temperature"))   
        temperature_schema = self.ingester_platform.post(temperature_schema)
        
        file_schema = DataEntrySchema()
        file_schema.addAttr(FileDataType("file"))
        file_schema = self.ingester_platform.post(file_schema)

        location = self.ingester_platform.post(Location(10.0, 11.0, "Test Site", 100))
        temp_dataset = Dataset(location=None, schema=temperature_schema.id)

        file_dataset = Dataset(location=None, schema=file_schema.id, data_source=PullDataSource("http://test.com", "file_handle", processing_script="file://d:/processing_scripts/awsome_processing.py"))


    def test_listeners(self):
        # Use a list to beat the closure
        called = [False] 
        
        def loc_listener(obj, var, value):
            # The listener will be called when the object is posted
            # and when it is committed, so we want to filter out the 
            # post call
            if var == "_id" and value > 0:
                called.remove(False)
                called.append(True)
        
        loc = Location()
        loc.name = "Test Loc1"
        loc.set_listener(loc_listener)

        work = self.ingester_platform.createUnitOfWork()
        work.post(loc)
        work.commit()

        self.assertTrue(called[0])

    def tearDown(self):
        self.ingester_platform.reset()

        for f in self.cleanup_files:
            try:
                os.remove(f)
            except:
                print "failed to remove file: " + f
class SchemaTest(unittest.TestCase):
    """
    This test defines and checks that the Ingester API works the way the provisioning interface expects.
    """
    def setUp(self):
        self.auth = CredentialsAuthentication("casey", "password")
        self.ingester_platform = IngesterPlatformAPI(
            "http://localhost:8080/api", self.auth)
        self.schemas = []

    def compare_schema_attrs(self, attrs_src, attrs_dst):
        # make a copy
        attrs_dst = attrs_dst.copy()

        for attr in attrs_src:
            found = False
            for attr_dst in attrs_dst:
                if attr in attrs_dst:
                    del attrs_dst[attr]
                    found = True
                    break
            self.assertTrue(found, "Attribute not found " + attr)
        self.assertEquals(0, len(attrs_dst), "Extra attributes in destination")

    def test_data_metadata(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DataEntryMetadataSchema("Quality Assurance")
        schema.addAttr(Double("value"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

        ingested_schema = self.ingester_platform.getSchema(schema.id)
        self.compare_schema_attrs(ingested_schema.attrs, schema.attrs)
        self.assertEquals(ingested_schema.name, schema.name)

    def test_dataset_metadata(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DatasetMetadataSchema("Dataset Calibration")
        schema.addAttr(DateTime("date"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

        ingested_schema = self.ingester_platform.getSchema(schema.id)
        self.compare_schema_attrs(ingested_schema.attrs, schema.attrs)
        self.assertEquals(ingested_schema.name, schema.name)

    def test_data(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DataEntrySchema("Test123")
        schema.addAttr(Double("value"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

        ingested_schema = self.ingester_platform.getSchema(schema.id)
        self.compare_schema_attrs(ingested_schema.attrs, schema.attrs)
        self.assertEquals(ingested_schema.name, schema.name)

    def test_dup_data(self):
        work = self.ingester_platform.createUnitOfWork()
        schema = DataEntrySchema("Test123")
        schema.addAttr(Double("value"))
        schema.addAttr(String("description"))
        work.post(schema)
        work.commit()
        self.schemas.append(schema)

    def test_delete(self):
        work = self.ingester_platform.createUnitOfWork()
        for schema in self.schemas:
            work.delete(schema)
        work.commit()

        for schema in self.schemas:
            self.assertIsNone(self.ingester_platform.getSchema(schema.id))

    def tearDown(self):
        self.ingester_platform.close()
class ProvisioningInterfaceTest(unittest.TestCase):
    """
    This test defines and checks that the Ingester API works the way the provisioning interface expects.
    """
    def setUp(self):
        self.auth = CredentialsAuthentication("casey", "password")
        self.ingester_platform = IngesterPlatformAPI(
            "http://localhost:8080/api", self.auth)
        self.cleanup_files = []

    def test_api_usage(self):
        #       User data that is created by filling out the provisioning interface workflow steps.
        #   General
        title = "Test project"
        data_manager = "A Person"
        project_lead = "Another Person"

        #   Metadata
        project_region = Region("Test Region",
                                ((1, 1), (2, 2), (2, 1), (1, 1)))

        #   Methods & Datasets
        loc1 = Location(11.0, 11.0, "Test Site", 100)
        loc2 = Location(11.0, 11.0, "Test Site", 100)
        loc3 = Location(12.0, 11.0, "Test Site", 100)

        temp_work = self.ingester_platform.createUnitOfWork()
        temperature_schema = DataEntrySchema("Test Temp Schema")
        temperature_schema.addAttr(Double("temperature"))
        temp_work.post(temperature_schema)
        temp_work.commit()

        air_temperature_schema = DataEntrySchema("Air Temp Schema")
        air_temperature_schema.extends = [temperature_schema.id]
        air_temperature_schema = self.ingester_platform.post(
            air_temperature_schema)

        second_level_inheritence_schema = DataEntrySchema("Second Inheritence")
        second_level_inheritence_schema.extends = [air_temperature_schema.id]
        second_level_inheritence_schema = self.ingester_platform.post(
            second_level_inheritence_schema)

        # Check the name is set
        temperature_schema_1 = self.ingester_platform.getSchema(
            temperature_schema.id)
        self.assertIsNotNone(temperature_schema.name)
        self.assertEquals(temperature_schema.name, temperature_schema_1.name)

        file_schema = DataEntrySchema()
        file_schema.addAttr(FileDataType("file"))
        file_schema = self.ingester_platform.post(file_schema)

        dataset1 = Dataset(location=None, schema=temperature_schema.id)
        dataset2 = Dataset(
            location=None,
            schema=file_schema.id,
            data_source=PullDataSource(
                "http://test.com",
                "file_handle",
                processing_script=
                "file://d:/processing_scripts/awsome_processing.py"))

        #        dataset3 = Dataset(None, file_schema, PullDataSource("http://test.com", "file_handle"), CustomSampling("file://d:/sampling_scripts/awsome_sampling.py"), "file://d:/processing_scripts/awsome_processing.py")

        self.cleanup_files.append(dataset2.data_source.processing_script)
        #        self.cleanup_files.push(dataset3.sampling.script)
        #        self.cleanup_files.push(dataset3.processing_script)

        #       Provisioning admin accepts the submitted project
        work = self.ingester_platform.createUnitOfWork()

        work.post(project_region)  # Save the region

        loc1.region = project_region.id  # Set the datasets location to use the projects region
        work.post(loc1)  # Save the location
        dataset1.location = loc1.id  # Set the datasets location
        work.post(dataset1)  # Save the dataset

        loc2.region = project_region.id
        work.post(loc2)
        dataset2.location = loc2.id
        work.post(dataset2)

        work.commit()

        # Region, location and dataset id's will be saved to the project within the provisioning system in some way

        #       User searches for datasets

        # TODO: Nigel? - Define searching api
        found_dataset_id = dataset1.id  # The dataset that has an extended file schema

        #       User manually enters data
        timestamp = datetime.datetime.now()
        data_entry_1 = DataEntry(found_dataset_id, timestamp)
        data_entry_1['temperature'] = 27.8  # Add the extended schema items
        data_entry_1 = self.ingester_platform.post(data_entry_1)
        self.assertIsNotNone(data_entry_1.id)

        timestamp2 = timestamp + datetime.timedelta(seconds=1)
        data_entry_2 = DataEntry(found_dataset_id, timestamp2)
        data_entry_2['temperature'] = 27.8  # Add the extended schema items
        data_entry_2 = self.ingester_platform.post(data_entry_2)

        self.assertEquals(
            2,
            len(
                self.ingester_platform.search(
                    DataEntrySearchCriteria(found_dataset_id), 0, 10).results))
        result = self.ingester_platform.search(
            DataEntrySearchCriteria(found_dataset_id), 0, 1)
        self.assertEquals(2, result.count)
        self.assertEquals(1, len(result.results))
        self.assertEquals(
            1,
            len(
                self.ingester_platform.search(
                    DataEntrySearchCriteria(found_dataset_id), 1, 1).results))

        result = self.ingester_platform.search(
            DataEntrySearchCriteria(found_dataset_id), 2, 1)
        self.assertEquals(0, len(result.results))

        self.assertEquals(
            0,
            len(
                self.ingester_platform.search(
                    DataEntrySearchCriteria(found_dataset_id,
                                            end_time=timestamp -
                                            datetime.timedelta(seconds=60)), 0,
                    10).results))
        self.assertEquals(
            0,
            len(
                self.ingester_platform.search(
                    DataEntrySearchCriteria(found_dataset_id,
                                            start_time=timestamp +
                                            datetime.timedelta(seconds=60)), 0,
                    10).results))
        self.assertEquals(
            2,
            len(
                self.ingester_platform.search(
                    DataEntrySearchCriteria(
                        found_dataset_id,
                        start_time=timestamp - datetime.timedelta(seconds=60),
                        end_time=timestamp + datetime.timedelta(seconds=60)),
                    0, 10).results))

        work = self.ingester_platform.createUnitOfWork()
        data_entry_3 = DataEntry(dataset2.id, datetime.datetime.now())
        data_entry_3['file'] = FileObject(f_handle=open(
            os.path.join(os.path.dirname(jcudc24ingesterapi.__file__),
                         "tests/test_ingest.xml")),
                                          mime_type="text/xml")
        work.post(data_entry_3)
        work.commit()
        self.assertIsNotNone(data_entry_3.id)

        f_in = self.ingester_platform.getDataEntryStream(
            dataset2.id, data_entry_3.id, "file")
        self.assertIsNotNone(f_in)
        data = f_in.read()
        f_in.close()
        self.assertLess(0, len(data), "Expected data in file")

        #       User enters quality assurance metadata
        quality_metadata_schema = DatasetMetadataSchema()
        quality_metadata_schema.addAttr(String("unit"))
        quality_metadata_schema.addAttr(String("description"))
        quality_metadata_schema.addAttr(Double("value"))
        quality_metadata_schema = self.ingester_platform.post(
            quality_metadata_schema)

        entered_metadata = DatasetMetadataEntry(data_entry_1.dataset,
                                                quality_metadata_schema.id)
        entered_metadata['unit'] = "%"
        entered_metadata['description'] = "Percent error"
        entered_metadata['value'] = 0.98

        entered_metadata = self.ingester_platform.post(entered_metadata)

        # Now find that metadata
        results = self.ingester_platform.search(
            DatasetMetadataSearchCriteria(data_entry_1.dataset), 0, 10).results
        self.assertEqual(1, len(results))

        data_entry_md_schema = DataEntryMetadataSchema("test")
        data_entry_md_schema.addAttr(String("description"))
        data_entry_md_schema.addAttr(Double("value"))
        data_entry_md_schema = self.ingester_platform.post(
            data_entry_md_schema)
        calibration = DataEntryMetadataEntry(metadata_schema_id=int(
            data_entry_md_schema.id),
                                             dataset_id=dataset2.id,
                                             object_id=data_entry_3.id)
        calibration["description"] = "Test"
        calibration["value"] = 1.2

        calibration2 = DataEntryMetadataEntry(metadata_schema_id=int(
            data_entry_md_schema.id),
                                              dataset_id=dataset2.id,
                                              object_id=data_entry_3.id)
        calibration2["description"] = "Test2"
        calibration2["value"] = 2.3
        calibration2 = self.ingester_platform.post(calibration2)

        calibrations = self.ingester_platform.search(
            DataEntryMetadataSearchCriteria(int(81), int(3648)),
            offset=0,
            limit=1000)
        self.assertEquals(1, len(calibrations.results))
        self.assertEquals(calibrations.results[0].schema_id,
                          data_entry_md_schema.id)

        self.ingester_platform.delete(calibration2)
        self.ingester_platform.delete(calibration)
        self.ingester_platform.delete(data_entry_md_schema)

        #       User changes sampling rate
        # FIXME: This test is going to be changed to be done by editing the dataset
        #        sampling_rate_changed = Metadata(dataset1.id, type(dataset1), SampleRateMetadataSchema())
        #        sampling_rate_changed.change_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
        #        sampling_rate_changed.sampling = CustomSampling("file://d:/sampling_scripts/awsome_sampling.py")
        #
        #        try:
        #            sampling_rate_changed = self.ingester_platform.post(sampling_rate_changed)
        #            assert(sampling_rate_changed.metadata_id is None, "Sampling rate change failed")
        #        except:
        #            assert(True, "Sampling rate change failed")

        #       User wants some random metadata specific to their project
        # FIXME: Not sure what use case this is trying to demonstrate
        #        random_metadata_schema =  DataEntryMetadataSchema()
        #        random_metadata_schema.addAttr('random_field', Double())

        #        random_metadata = Metadata(data_entry.data_entry_id, type(data_entry), random_metadata_schema)
        #        random_metadata.random_field = 1.5

        #        try:
        #            random_metadata = self.ingester_platform.post(random_metadata)
        #            assert(random_metadata.metadata_id is None, "random_metadata failed")
        #        except:
        #            assert(True, "random_metadata failed")

        #       User changes the data source of the dataset
        new_data_source = PullDataSource("http://test.com/new_data",
                                         "file_handle")
        dataset1.data_source = new_data_source
        dataset1 = self.ingester_platform.post(dataset1)
        self.assertNotEqual(None, dataset1)

        #       External, 3rd party searches for data
        # TODO: external 3rd parties should be able to use the api to get data without authentication
        # TODO: I'm not sure exactly how this should work, but the search api could be open access (need spam limitations or something?)

        #       Project is disabled/finished
        work = self.ingester_platform.createUnitOfWork()
        work.disable(dataset1.id)
        work.disable(dataset2.id)
        work.commit()

        #       Project is obsolete and data should be deleted
        work = self.ingester_platform.createUnitOfWork()
        work.delete(dataset1.id)
        work.delete(dataset2.id)
        work.commit()

    def test_parent_schemas(self):
        """This test creates a nested schema with attributes provided at 2
        different levels. A data entry is saved, and then retrieved, and the
        values tested.
        """
        loc1 = self.ingester_platform.post(
            Location(11.0, 11.0, "Test Site", 100))

        temp_work = self.ingester_platform.createUnitOfWork()
        temperature_schema = DataEntrySchema("Test Temp Schema")
        temperature_schema.addAttr(Double("temperature"))
        temp_work.post(temperature_schema)
        temp_work.commit()

        air_temperature_schema = DataEntrySchema("Air Temp Schema")
        air_temperature_schema.extends = [temperature_schema.id]
        air_temperature_schema = self.ingester_platform.post(
            air_temperature_schema)

        instrument_schema = DataEntrySchema("Instrument Schema")
        instrument_schema.extends = [air_temperature_schema.id]
        instrument_schema.addAttr(Double("var2"))
        instrument_schema = self.ingester_platform.post(instrument_schema)

        dataset = Dataset(location=loc1.id, schema=instrument_schema.id)
        dataset = self.ingester_platform.post(dataset)

        work = self.ingester_platform.createUnitOfWork()
        data_entry = DataEntry(dataset.id, datetime.datetime.now())
        data_entry["temperature"] = 10
        data_entry["var2"] = 11
        work.post(data_entry)
        work.commit()

        data_entry_ret = self.ingester_platform.getDataEntry(
            dataset.id, data_entry.id)

        self.assertEquals(data_entry["temperature"],
                          data_entry_ret["temperature"])
        self.assertEquals(data_entry["var2"], data_entry_ret["var2"])

    def testMultiDatasetExtraction(self):
        """This test demonstrates use case #402.
        There are 2 datasets created, the first holds a datafile, and has a pull ingest occurring, along with 
        a configured custom script. The second dataset holds observation data, that will be extracted from the
        datafile in the first dataset.
        """
        temperature_schema = DataEntrySchema()
        temperature_schema.addAttr(Double("Temperature"))
        temperature_schema = self.ingester_platform.post(temperature_schema)

        file_schema = DataEntrySchema()
        file_schema.addAttr(FileDataType("file"))
        file_schema = self.ingester_platform.post(file_schema)

        location = self.ingester_platform.post(
            Location(10.0, 11.0, "Test Site", 100))
        temp_dataset = Dataset(location=None, schema=temperature_schema.id)

        file_dataset = Dataset(
            location=None,
            schema=file_schema.id,
            data_source=PullDataSource(
                "http://test.com",
                "file_handle",
                processing_script=
                "file://d:/processing_scripts/awsome_processing.py"))

    def test_listeners(self):
        # Use a list to beat the closure
        called = [False]

        def loc_listener(obj, var, value):
            # The listener will be called when the object is posted
            # and when it is committed, so we want to filter out the
            # post call
            if var == "_id" and value > 0:
                called.remove(False)
                called.append(True)

        loc = Location()
        loc.name = "Test Loc1"
        loc.set_listener(loc_listener)

        work = self.ingester_platform.createUnitOfWork()
        work.post(loc)
        work.commit()

        self.assertTrue(called[0])

    def tearDown(self):
        self.ingester_platform.reset()

        for f in self.cleanup_files:
            try:
                os.remove(f)
            except:
                print "failed to remove file: " + f