def export_from_datastore(output_key, output, target): """ :param output_key: a pair (ds_key, fmt) :param output: an Output instance :param target: a directory, temporary when called from the engine server """ ds_key, fmt = output_key assert ds_key == output.ds_key, (ds_key, output.ds_key) datadir = os.path.dirname(output.oq_job.ds_calc_dir) dstore = DataStore(output.oq_job.id, datadir, mode="r") dstore.export_dir = target try: exported = ds_export((output.ds_key, fmt), dstore) except KeyError: raise DataStoreExportError("Could not export %s in %s" % (output.ds_key, fmt)) if not exported: raise DataStoreExportError("Nothing to export for %s" % output.ds_key) elif len(exported) > 1: # NB: I am hiding the archive by starting its name with a '.', # to avoid confusing the users, since the unzip files are # already in the target directory; the archive is used internally # by the WebUI, so it must be there; it would be nice not to # generate it when not using the Web UI, but I will leave that # feature for after the removal of the old calculators archname = "." + output.ds_key + "-" + fmt + ".zip" zipfiles(exported, os.path.join(target, archname)) return os.path.join(target, archname) else: # single file return exported[0]
def test_spatial_correlation(self): expected = {sc1: [0.99, 0.41], sc2: [0.99, 0.64], sc3: [0.99, 0.22]} for case in expected: self.run_calc(case.__file__, 'job.ini') oq = self.calc.oqparam self.assertEqual(list(oq.imtls), ['PGA']) dstore = DataStore(self.calc.datastore.calc_id) gmf_by_rupid = groupby( dstore['gmfs/col00'].value, lambda row: row['idx'], lambda rows: [row['BooreAtkinson2008']['PGA'] for row in rows]) dstore.close() gmvs_site_1 = [] gmvs_site_2 = [] for rupid, gmf in gmf_by_rupid.items(): gmvs_site_1.append(gmf[0]) gmvs_site_2.append(gmf[1]) joint_prob_0_5 = joint_prob_of_occurrence( gmvs_site_1, gmvs_site_2, 0.5, oq.investigation_time, oq.ses_per_logic_tree_path) joint_prob_1_0 = joint_prob_of_occurrence( gmvs_site_1, gmvs_site_2, 1.0, oq.investigation_time, oq.ses_per_logic_tree_path) p05, p10 = expected[case] numpy.testing.assert_almost_equal(joint_prob_0_5, p05, decimal=1) numpy.testing.assert_almost_equal(joint_prob_1_0, p10, decimal=1)
def test_spatial_correlation(self): expected = {sc1: [0.99, 0.41], sc2: [0.99, 0.64], sc3: [0.99, 0.22]} for case in expected: self.run_calc(case.__file__, 'job.ini') oq = self.calc.oqparam self.assertEqual(list(oq.imtls), ['PGA']) dstore = DataStore(self.calc.datastore.calc_id) gmf_by_rupid = groupby( dstore['gmfs/col00'].value, lambda row: row['idx'], lambda rows: [row['BooreAtkinson2008']['PGA'] for row in rows]) dstore.close() gmvs_site_1 = [] gmvs_site_2 = [] for rupid, gmf in gmf_by_rupid.iteritems(): gmvs_site_1.append(gmf[0]) gmvs_site_2.append(gmf[1]) joint_prob_0_5 = joint_prob_of_occurrence( gmvs_site_1, gmvs_site_2, 0.5, oq.investigation_time, oq.ses_per_logic_tree_path) joint_prob_1_0 = joint_prob_of_occurrence( gmvs_site_1, gmvs_site_2, 1.0, oq.investigation_time, oq.ses_per_logic_tree_path) p05, p10 = expected[case] numpy.testing.assert_almost_equal(joint_prob_0_5, p05, decimal=1) numpy.testing.assert_almost_equal(joint_prob_1_0, p10, decimal=1)
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # store and retrieve a callable self.dstore['key1_upper'] = key1_upper self.assertEqual(self.dstore['key1_upper'], 'VALUE1') def test_hdf5(self): # optional test, run only if h5py is available try: import h5py except ImportError: raise unittest.SkipTest # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['/key1', '/key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['/key1']) numpy.testing.assert_equal(self.dstore['/key1'], value1) self.assertGreater(self.dstore.getsize('/key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('/dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal( self.dstore['/dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # notice: it is not possible to store non-arrays with self.assertRaises(ValueError): self.dstore['/key1'] = 'value1'
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # optional test, run only if h5py is available try: import h5py except ImportError: raise unittest.SkipTest # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1'
def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)])
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello.txt') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo)
def test_case_1(self): # test for the fatalities self.run_calc(case_1.__file__, 'job_ebr.ini') ds = DataStore(self.calc.datastore.calc_id, export_dir=self.calc.datastore.export_dir) fnames = export(('assetcol', 'csv'), ds) + export( ('event_loss_table-rlzs', 'csv'), ds) for fname in fnames: self.assertEqualFiles('expected/' + os.path.basename(fname), fname)
def test_spatial_correlation(self): expected = {sc1: [0.99, 0.41], sc2: [0.99, 0.64], sc3: [0.99, 0.22]} for case in expected: self.run_calc(case.__file__, 'job.ini') oq = self.calc.oqparam self.assertEqual(list(oq.imtls), ['PGA']) dstore = DataStore(self.calc.datastore.calc_id) gmfa = dstore['gmf_data/1']['BooreAtkinson2008']['PGA'] dstore.close() gmvs_site_1 = gmfa[:, 0] gmvs_site_2 = gmfa[:, 1] joint_prob_0_5 = joint_prob_of_occurrence( gmvs_site_1, gmvs_site_2, 0.5, oq.investigation_time, oq.ses_per_logic_tree_path) joint_prob_1_0 = joint_prob_of_occurrence( gmvs_site_1, gmvs_site_2, 1.0, oq.investigation_time, oq.ses_per_logic_tree_path) p05, p10 = expected[case] numpy.testing.assert_almost_equal(joint_prob_0_5, p05, decimal=1) numpy.testing.assert_almost_equal(joint_prob_1_0, p10, decimal=1)
def get_mesh(oqparam): """ Extract the mesh of points to compute from the sites, the sites_csv, or the region. :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ if oqparam.sites: lons, lats = zip(*sorted(oqparam.sites)) return geo.Mesh(numpy.array(lons), numpy.array(lats)) elif 'sites' in oqparam.inputs: csv_data = open(oqparam.inputs['sites'], 'U').read() coords = valid.coordinates( csv_data.strip().replace(',', ' ').replace('\n', ',')) lons, lats = zip(*sorted(coords)) return geo.Mesh(numpy.array(lons), numpy.array(lats)) elif oqparam.region: # close the linear polygon ring by appending the first # point to the end firstpoint = geo.Point(*oqparam.region[0]) points = [geo.Point(*xy) for xy in oqparam.region] + [firstpoint] try: mesh = geo.Polygon(points).discretize(oqparam.region_grid_spacing) lons, lats = zip(*sorted(zip(mesh.lons, mesh.lats))) return geo.Mesh(numpy.array(lons), numpy.array(lats)) except: raise ValueError( 'Could not discretize region %(region)s with grid spacing ' '%(region_grid_spacing)s' % vars(oqparam)) elif 'gmfs' in oqparam.inputs: return get_gmfs(oqparam)[0].mesh elif oqparam.hazard_calculation_id: sitemesh = DataStore(oqparam.hazard_calculation_id)['sitemesh'] return geo.Mesh(sitemesh['lon'], sitemesh['lat']) elif 'exposure' in oqparam.inputs: # the mesh is extracted from get_sitecol_assets return elif 'site_model' in oqparam.inputs: coords = [(param.lon, param.lat) for param in get_site_model(oqparam)] lons, lats = zip(*sorted(coords)) return geo.Mesh(numpy.array(lons), numpy.array(lats))
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore["key1"] = "value1" self.assertEqual(len(self.dstore), 1) self.dstore["key2"] = "value2" self.assertEqual(list(self.dstore), ["key1", "key2"]) del self.dstore["key2"] self.assertEqual(list(self.dstore), ["key1"]) self.assertEqual(self.dstore["key1"], "value1") def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore["/key1"] = value1 = numpy.array(["a", "b"], dtype=bytes) self.dstore["/key2"] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ["key1", "key2"]) del self.dstore["/key2"] self.assertEqual(list(self.dstore), ["key1"]) numpy.testing.assert_equal(self.dstore["key1"], value1) self.assertGreater(self.dstore.getsize("key1"), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset("dset", shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore["dset"][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore["key1"] = "value1" # test `in` functionality with composite keys self.dstore["a/b"] = 42 self.assertTrue("a/b" in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs["a"] = 2 parent = DataStore(params=[("a", 1), ("b", 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [("a", 2), ("b", 2)]) def test_export_path(self): path = self.dstore.export_path("hello.txt") mo = re.match("\./hello_\d+", path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(IOError): read(42, datadir="/fake/directory") # case of a non-existing file with self.assertRaises(IOError): read(42, datadir="/tmp") # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, "calc_42.hdf5") open(fname, "w").write("") os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn("Permission denied:", str(ctx.exception))
def setUp(self): self.dstore = DataStore()
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b'], dtype=bytes) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal( self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello.txt') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(IOError): read(42, datadir='/fake/directory') # case of a non-existing file with self.assertRaises(IOError): read(42, datadir='/tmp') # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, 'calc_42.hdf5') open(fname, 'w').write('') os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn('Permission denied:', str(ctx.exception))
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b'], dtype=bytes) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_export_path(self): path = self.dstore.export_path('hello.txt', tempfile.mkdtemp()) mo = re.search('hello_\d+', path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(OSError): read(42, datadir='/fake/directory') # case of a non-existing file with self.assertRaises(IOError): read(42, datadir='/tmp') # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, 'calc_42.hdf5') open(fname, 'w').write('') os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn('permission denied', str(ctx.exception).lower())
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal( self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo)
def setUp(self): self.dstore = DataStore.new()
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello.txt') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(IOError): read(42, datadir='/fake/directory') # case of a non-existing file with self.assertRaises(IOError): read(42, datadir='/tmp') # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, 'calc_42.hdf5') open(fname, 'w').write('') os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn('Permission denied:', unicode(ctx.exception))