def test_Dataset_constructor(): from rhizoscan.datastructure import Mapping from rhizoscan.root.pipeline.dataset import Dataset ds = Dataset( Mapping(a=i / 2, b=i / 3, __key__=i, sub=Mapping(item=0)) for i in range(_test_len)) assert len(ds) == _test_len, 'incorrect item number (%d/%d)' % (len(ds), _test_len) return ds
def test_mapping_read(): import os from rhizoscan.datastructure import Mapping filename = os.path.abspath('test/data/zen.map') m = Mapping() m.load(filename) assert m.has_key('zen'), "missing attribute 'zen'" assert len(m.zen) == 836
def test_mapping_io(): from rhizoscan.datastructure import Mapping from tempfile import mkdtemp import os dname = mkdtemp() fname = os.path.join(dname, 'test_mapping.save') try: m = Mapping(a=1, b=2) m.set_file(fname) m.set('c', Mapping(c=3), store=True) url = m.get_file().get_url() ctn = m.get_file().get_container() assert url == fname, 'not the correct file:' + repr(url) + '!=' + fname assert ctn == dname, 'not the correct directory:' + repr( ctn) + '!=' + dname m.__loader_attributes__ = ['a'] loader = m.dump() url = m.get_file().get_url() ctn = m.get_file().get_container() assert url == fname, 'not the correct file:' + repr(url) + '!=' + fname assert ctn == dname, 'not the correct directory:' + repr( ctn) + '!=' + dname assert m.get_file().exists(), 'storage file does not exist:' + repr( m.get_file()) assert 'a' in loader.__dict__.keys() assert 'b' not in loader.__dict__.keys() assert 'c' not in loader.__dict__.keys() n = loader.load() assert all(map(hasattr, [n] * 3, ['a', 'b', 'c'])) # all keys reloaded assert n.a == m.a and n.b == m.b and n.get('c').c == m.c.c #todo: check private attributes are suitable finally: import shutil shutil.rmtree(dname)
def test_mapping_move(): from rhizoscan.datastructure import Data, Mapping from tempfile import mkdtemp import os import shutil dname = mkdtemp() dname1 = os.path.join(dname, 'dir1') dname2 = os.path.join(dname, 'dir2') fname1 = os.path.join(dname1, 'test_mapping.save') fname2 = os.path.join(dname2, 'test_mapping.save') try: m1 = Mapping(a=1, b=2) m1.set_file(fname1) m1.set('c', Mapping(c=3), store=True) loader = m1.dump() shutil.copytree(dname1, dname2) shutil.rmtree(dname1) # delete initial data m2 = Data.load(fname2) assert m2.get_file().get_url() == fname2 assert m2.get_file().get_container() == dname2 assert all(map(hasattr, [m2] * 3, ['a', 'b', 'c'])) # all keys reloaded assert m1.a == m2.a and m2.get('c').c == m1.c.c m2.set('d', Mapping(value=42), store=True) d_url = m2.d.get_file().get_url() d_fname = os.path.splitext(m2.d.get_file().get_url(full=False))[0] assert d_fname == 'test_mapping_d', d_fname assert Data.load(d_url).value == 42, d_url finally: shutil.rmtree(dname)
def __store__(self): """ Return a copy of it-self and call recursively __store__ on all contained objects that have the __store__ method, such as Data objects. Note: This is what is really save by the 'save' method. """ s = _Mapping.__store__(self) for value in self: if hasattr(value, '__store__'): value = value.__store__() s.append(value) return s
def __store__(self): s = self.__copy__() s.auto = [a.__parent_store__() for a in s.auto] s.ref = [r.__parent_store__() for r in s.ref] return _Mapping.__store__(s)
def __repr__(self): s = 'rsa:' + str(self._rsa) + '\nref:' + str(self._ref) + '\n' s += _Mapping.__repr__(self) return s
def arabidopsis_pipeline(output=None): from rhizoscan.root.pipeline.arabidopsis import pipeline from rhizoscan.datastructure import Mapping filename = os.path.abspath(image_file) assert os.path.exists( filename), "could not find test image file:" + filename d = Mapping(filename=filename, plant_number=2, fg_smooth=1, border_width=.08, leaf_bbox=[0, 0, 1, .4], root_max_radius=5, verbose=1) if output: d.set_file(output, storage=True) pipeline.run(namespace=d, store=['pmask', 'rmask', 'seed_map', 'tree', 'rsa']) else: pipeline.run(namespace=d) assert d.has_key('image'), "pipeline did not compute 'image'" assert d.has_key('pmask'), "pipeline did not compute 'pmask'" assert d.has_key('rmask'), "pipeline did not compute 'rmask'" assert d.has_key('seed_map'), "pipeline did not compute 'seed_map'" assert d.has_key('graph'), "pipeline did not compute 'graph'" assert d.has_key('tree'), "pipeline did not compute 'tree'" assert d.has_key('rsa'), "pipeline did not compute 'rsa'" # test tree import numpy as np t = d.tree assert t.axe.number() == 8, "not the correct number of axes:" + str( t.axe.number()) # problem: there is an axe with only a seed segment ??! assert (np.unique(t.axe.plant) == [ 0, 1, 2 ]).all(), "not the correct number of plants" + str(np.unique(t.axe.plant)) pos_on_parent = t.axe.position_on_parent() assert abs(np.sort(pos_on_parent[:8]) - [0, 0, 0, 35, 36, 76, 95, 129] ).max() < 2, 'incorrect axe position_on_parent' # test mtg g = d.rsa plant_number = len(g.vertices(scale=1)) axe_number = len(g.vertices(scale=2)) assert plant_number == 2, "not the correction number of plants in mtg" + str( plant_number) assert axe_number == 7, "not the correction number of axes in mtg" + str( axe_number) if output: # test rsml serialization from rhizoscan.root.graph.mtg import RSMLSerializer with d.rsa.__file_object__.entry.open('r') as f: t = RSMLSerializer().load(f) assert len(g.vertices()) == len(t.vertices()) # test file extension def ext(attr): return d[attr].__file_object__.get_extension() assert ext( 'pmask') == '.png', "stored pmask has not the '.png' extension" assert ext( 'rmask') == '.png', "stored rmask has not the '.png' extension" assert ext('seed_map' ) == '.png', "stored seed_map has not the '.png' extension" assert ext( 'tree') == '.pickle', "stored tree has not the '.pickle' extension" assert ext( 'rsa') == '.rsml', "stored rsa has not the '.rsml' extension" return d
def __store__(self): s = self.__copy__() s.tc_list = [tc.__parent_store__() for tc in s.tc_list] return _Mapping.__store__(s)
def clear(self): """ replace trees by their loader """ if not _Mapping.is_loader(self.__dict__['ref']): self.ref = self.ref.get_loader() if not _Mapping.is_loader(self.__dict__['cmp']): self.cmp = self.cmp.get_loader()
def clear_temporary_attribute(self): """ clear the temporary attributs and call clear() """ _Mapping.clear_temporary_attribute(self) self.clear()