def test_appendfile(self):
        ds = dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name)
        self.fname_test = fname

        fid = open(fname + '.nc', "w")
        fid.close()

        p = persister.PersisterConsumer()

        # New file
        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)
        self.assertEqual(retval, 0)
        #test result
        ds_r = dap_tools.read_netcdf_from_file(fname)
        self.assertIn(ds.name, ds_r.name)

        #append file
        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)
        self.assertEqual(retval, 0)
        # Test results
        ds_r = dap_tools.read_netcdf_from_file(fname)
        self.assertIn(ds.name, ds_r.name)
        barray = ds.grid.time[0:3] == ds_r.grid.time[0:3]
        self.assert_(barray.all())
        barray = ds.grid.time[0:3] == ds_r.grid.time[4:7]
        self.assert_(barray.all())
    def test_appendfile(self):
        ds=dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name)
        self.fname_test = fname

        fid=open(fname+'.nc', "w")
        fid.close()
        
        p = persister.PersisterConsumer()
        
        # New file
        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)
        self.assertEqual(retval,0)
        #test result
        ds_r = dap_tools.read_netcdf_from_file(fname)
        self.assertIn(ds.name,ds_r.name)
        
        #append file
        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)
        self.assertEqual(retval,0)
        # Test results
        ds_r = dap_tools.read_netcdf_from_file(fname)
        self.assertIn(ds.name,ds_r.name)
        barray = ds.grid.time[0:3] == ds_r.grid.time[0:3]
        self.assert_(barray.all())
        barray = ds.grid.time[0:3] == ds_r.grid.time[4:7]
        self.assert_(barray.all())
 def setUp(self):
     # create a dataset
     #self.ds1 = dap_tools.simple_grid_dataset(\
     #    {'DataSet Name':'SimpleGridData','variables':\
     #        {'time':{'long_name':'Data and Time','units':'seconds'},\
     #        'height':{'long_name':'person height','units':'meters','type':'grid','dimensions':'time'}}}, \
     #    {'time':(111,112,123,114,115,116,117,118,119,120), \
     #    'height':(8,6,4,-2,-1,5,3,1,4,5)})
     self.ds1 = dap_tools.simple_grid_dataset()
 def setUp(self):
     # create a dataset
     #self.ds1 = dap_tools.simple_grid_dataset(\
     #    {'DataSet Name':'SimpleGridData','variables':\
     #        {'time':{'long_name':'Data and Time','units':'seconds'},\
     #        'height':{'long_name':'person height','units':'meters','type':'grid','dimensions':'time'}}}, \
     #    {'time':(111,112,123,114,115,116,117,118,119,120), \
     #    'height':(8,6,4,-2,-1,5,3,1,4,5)})
     self.ds1 = dap_tools.simple_grid_dataset()
    def setUp(self):
        yield self._start_container()
        #self.sup = yield self._spawn_processes(services)

        #Create a test queue
        queue1 = dataobject.create_unique_identity()
        queue_properties = {
            queue1: {
                'name_type': 'fanout',
                'args': {
                    'scope': 'global'
                }
            }
        }
        yield bootstrap.declare_messaging(queue_properties)
        self.queue1 = queue1

        # Create a dataset to test with
        ds = dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name + '.nc')
        self.fname_test = fname

        fid = open(fname, "w")
        fid.close()

        # Create a persister process
        pd1 = {
            'name': 'persister_number_1',
            'module': 'ion.services.dm.preservation.persister',
            'procclass': 'PersisterConsumer',
            'spawnargs': {
                'attach': self.queue1,
                'process parameters': {
                    'filename': fname
                }
            }
        }

        self.child1 = base_consumer.ConsumerDesc(**pd1)
        child1_id = yield self.test_sup.spawn_child(self.child1)

        # Don't do this - you can only get the instance in a test case -
        # this is not a valid pattern in OTP
        self.dc1 = self._get_procinstance(child1_id)
        # Make sure it is up and working!
        self.assertIn(self.queue1, self.dc1.dataReceivers)

        self.ds = ds
        self.fname = fname
    def test_nofile(self):
        """
        Test that the persister works against a new file
        """

        ds = dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name)

        ## Don't make the empty file
        #fid=open(fname+'.nc', "w")
        #fid.close()

        p = persister.PersisterConsumer()

        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)

        # Result is not Zero return value - change to exception?
        self.assertEqual(retval, 1)
    def test_nofile(self):
        """
        Test that the persister works against a new file
        """
            
            
        ds=dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name)

        ## Don't make the empty file
        #fid=open(fname+'.nc', "w")
        #fid.close()
        
        p = persister.PersisterConsumer()
        
        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)

        # Result is not Zero return value - change to exception?
        self.assertEqual(retval,1)
    def test_newfile(self):
        """
        Test that the persister works against a new file
        """

        ds = dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name + '.nc')
        self.fname_test = fname

        fid = open(fname, "w")
        fid.close()

        p = persister.PersisterConsumer()

        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)

        self.assertEqual(retval, 0)

        ds_r = dap_tools.read_netcdf_from_file(fname)

        self.assertIn(ds.name, ds_r.name)
    def test_newfile(self):
        """
        Test that the persister works against a new file
        """
            
            
        ds=dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name +'.nc')
        self.fname_test = fname

        fid=open(fname, "w")
        fid.close()
        
        p = persister.PersisterConsumer()
        
        retval = p._save_dap_dataset(ds, TEST_ARCHIVE_PATH)

        self.assertEqual(retval,0)
        
        ds_r = dap_tools.read_netcdf_from_file(fname)
        
        self.assertIn(ds.name,ds_r.name)
    def setUp(self):
        yield self._start_container()
        #self.sup = yield self._spawn_processes(services)

        #Create a test queue
        queue1=dataobject.create_unique_identity()
        queue_properties = {queue1:{'name_type':'fanout', 'args':{'scope':'global'}}}
        yield bootstrap.declare_messaging(queue_properties)
        self.queue1 = queue1
        
        
        # Create a dataset to test with
        ds=dap_tools.simple_grid_dataset()
        fname = os.path.join(TEST_ARCHIVE_PATH, ds.name +'.nc')
        self.fname_test = fname

        fid=open(fname, "w")
        fid.close()
        
        
        # Create a persister process
        pd1={'name':'persister_number_1',
                 'module':'ion.services.dm.preservation.persister',
                 'procclass':'PersisterConsumer',
                 'spawnargs':{'attach':self.queue1,
                              'process parameters':{'filename':fname}}}
        
        self.child1 = base_consumer.ConsumerDesc(**pd1)
        child1_id = yield self.test_sup.spawn_child(self.child1)
        
        # Don't do this - you can only get the instance in a test case -
        # this is not a valid pattern in OTP
        self.dc1 = self._get_procinstance(child1_id)
        # Make sure it is up and working!
        self.assertIn(self.queue1,self.dc1.dataReceivers)
        
        self.ds = ds
        self.fname = fname