def fetch_particle_space(self, **kwargs): """Property getter for particle_space """ # examine cache uri = self.parent.request_data(UriSpec, **kwargs) if 'index' in kwargs: index = kwargs['index'] else: index = 0 if not (self._uri == uri and self._index == index): self._particle_space = None self._uri = uri if self._particle_space: pass else: # self._particle_space is None if uri is None: return debug('spatiocyte data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc + parsed.path self._particle_space = self.load_spatiocyte_file(fullpath, index) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass
def selected_data(self): if self.index_cursor in range(-self.n_data, self.n_data): debug("retriving data at index=%s" %self.index_cursor) return self.parent.request_data(Hdf5DataSpec, index=self.index_cursor) else: warning('Index cursor is set to wrong value.') return None
def fetch_particle_space(self, **kwargs): """Property getter for particle_space """ # examine cache uri = self.parent.request_data(UriSpec, **kwargs) if 'index' in kwargs: index = kwargs['index'] else: index = 0 if not (self._uri == uri and self._index == index): self._particle_space = None self._uri = uri if self._particle_space: pass else: # self._particle_space is None if uri is None: return debug('spatiocyte data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc + parsed.path self._particle_space = self.load_spatiocyte_file( fullpath, index) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass
def selected_data(self): if self.index_cursor in range(-self.n_data, self.n_data): debug("retriving data at index=%s" % self.index_cursor) return self.parent.request_data(Hdf5DataSpec, index=self.index_cursor) else: warning('Index cursor is set to wrong value.') return None
def time_course(self): """Returns dictionary of species_name->number-of-particles. """ n_items = self.parent.request_data(NumberOfItemsSpec) populations = {} for i in range(n_items): chunk = self.parent.request_data(Hdf5DataSpec, index=i) if chunk == None: warning("No data available for index=%d." % (i)) data_section = chunk.get("/data") if data_section == None or len(data_section.values()) < 1: warning("Invalid Data section for index=%d." % (i)) continue data = data_section.values()[0] ts = data.attrs.get("t") particles = data.get("particles") if ts == None or particles == None: warning("Invalid timestamp/particles") species_section = chunk.get("/species") if species_section == None: warning("No species table for index=%d" % (i)) continue # iterate for species... for id, name, radius, dvalue in species_section: # count number of particles n_particles = sum(particles["species_id"] == id) bin_ = populations.setdefault(name, []) bin_.append((ts, n_particles)) print populations return populations
def time_course(self): """Returns dictionary of species_name->number-of-particles. """ n_items = self.parent.request_data(NumberOfItemsSpec) populations = {} for i in range(n_items): chunk = self.parent.request_data(Hdf5DataSpec, index=i) if chunk == None: warning('No data available for index=%d.' % (i)) data_section = chunk.get('/data') if data_section == None or len(data_section.values()) < 1: warning('Invalid Data section for index=%d.' % (i)) continue data = data_section.values()[0] ts = data.attrs.get('t') particles = data.get('particles') if ts == None or particles == None: warning('Invalid timestamp/particles') species_section = chunk.get('/species') if species_section == None: warning('No species table for index=%d' % (i)) continue # iterate for species... for id, name, radius, dvalue in species_section: # count number of particles n_particles = sum(particles['species_id'] == id) bin_ = populations.setdefault(name, []) bin_.append((ts, n_particles)) print populations return populations
def hdf5_data(self): """Property getter for hdf5_data """ # examine cache uri = self.parent.request_data(UriSpec) if not (self._uri == uri): self._hdf5_data = None self._uri = uri if self._hdf5_data: pass else: # self._hdf5_data is None if uri is None: return debug('hdf5 data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc + parsed.path if os.path.exists(fullpath): self._hdf5_data = File(fullpath) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass
def hdf5_data(self): """Property getter for hdf5_data """ # examine cache uri = self.parent.request_data(UriSpec) if not (self._uri==uri): self._hdf5_data = None self._uri = uri if self._hdf5_data: pass else: # self._hdf5_data is None if uri is None: return debug('hdf5 data uri=%s' % uri) try: parsed = urlparse(uri) fullpath = parsed.netloc+parsed.path if os.path.exists(fullpath): self._hdf5_data = File(fullpath) except IOError, e: warning('Failed to open %s: %s', fullpath, str(e)) pass