def envelope_domain(self): """Returns domain object which envelops all of the parmtable""" kw = {} for iaxis, stats in enumerate(self._axis_stats): if not stats.empty(): kw[mequtils.get_axis_id(iaxis)] = stats.minmax return meq.gen_domain(**kw)
def __init__(self, slice_axes, domain_subset): """The constructor makes a slicing from a list of axes to be incorporated in the slice. All other axes will be iterated over.""" list.__init__(self) self.append([]) for iaxis, axis_subset in enumerate(domain_subset): if axis_subset is None or iaxis in slice_axes or mequtils.get_axis_id(iaxis) in slice_axes: self[:] = [sl + [None] for sl in self] else: self[:] = [sl + [i] for sl in self for i in axis_subset]
def __init__(self, slice_axes, domain_subset): """The constructor makes a slicing from a list of axes to be incorporated in the slice. All other axes will be iterated over.""" list.__init__(self) self.append([]) for iaxis, axis_subset in enumerate(domain_subset): if axis_subset is None or iaxis in slice_axes or mequtils.get_axis_id( iaxis) in slice_axes: self[:] = [sl + [None] for sl in self] else: self[:] = [sl + [i] for sl in self for i in axis_subset]
def envelope_cells(self, **num_cells): """Returns cells object which envelops all of the parmtable, and is regularly spaced. The number of points along each axis is equal to the number of subdomains along that axis, but the cells do not necessarily follow the structure of the subdomain if the subdomains are overlapping, spaced out, or irregular. """ dom = self.envelope_domain() kw = {} for iaxis, stats in enumerate(self._axis_stats): if not stats.empty(): kwname = 'num_' + str(mequtils.get_axis_id(iaxis)).lower() kw[kwname] = num_cells.get(kwname, len(stats.cells)) return meq.gen_cells(dom, **kw)
def subdomain_cells(self): """Returns cells object which envelops all of the parmtable, and contains a cells matching every subdomain.""" dom = self.envelope_domain() kw = {} cells = meq.gen_cells(dom) for iaxis, stats in enumerate(self._axis_stats): if not stats.empty(): grid = list(stats.cells.keys()) grid.sort() meq.add_cells_axis(cells, mequtils.get_axis_id(iaxis), grid=grid, cell_size=[ stats.cells[x0] for x0 in grid]) return cells
def subdomain_cells(self): """Returns cells object which envelops all of the parmtable, and contains a cells matching every subdomain.""" dom = self.envelope_domain() kw = {} cells = meq.gen_cells(dom) for iaxis, stats in enumerate(self._axis_stats): if not stats.empty(): grid = list(stats.cells.keys()) grid.sort() meq.add_cells_axis(cells, mequtils.get_axis_id(iaxis), grid=grid, cell_size=[stats.cells[x0] for x0 in grid]) return cells
def _make_axis_index(self): """Builds up various indices based on content of the parmtable""" # check if cache is up-to-date cachepath = os.path.join(self.filename, 'ParmTab.cache') funkpath = os.path.join(self.filename, 'funklets') self.mtime = os.path.getmtime(funkpath) if os.path.exists( funkpath) else time.time() try: has_cache = os.path.getmtime(cachepath) >= self.mtime if not has_cache: dprintf(2, "cache is out of date, will regenerate\n") except: dprintf( 0, "%s: os.path.getmtime() throws exception, assuming cache is out of date\n", self.filename) has_cache = False # try to load the cache if so t0 = time.time() if has_cache: try: dprintf(2, "loading index cache\n") self._funklet_names,self._domain_list,self._axis_stats,self._name_components, \ self._domain_fullset,self._domain_cell_index,self._domain_reverse_index \ = pickle.load(open(cachepath, "rb")) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() return except: if verbosity.get_verbose() > 0: traceback.print_exc() dprintf(0, "%s: error reading cached stats, regenerating\n", self.filename) has_cache = False # no cache, so regenerate everything if not has_cache: self._axis_stats = [ _AxisStats(mequtils.get_axis_id(i)) for i in range(mequtils.max_axis) ] pt = self.parmtable() dprintf(2, "loading domain list\n") self._domain_list = pt.domain_list() dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "collecting axis stats\n") self._axes = {} for domain in self._domain_list: for axis, rng in domain.items(): if str(axis) != 'axis_map': self._axis_stats[mequtils.get_axis_number( axis)].add_cell(*rng) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "finalizing axis stats\n") self._domain_fullset = [None] * mequtils.max_axis for iaxis, stats in enumerate(self._axis_stats): stats.update() if not stats.empty(): self._domain_fullset[iaxis] = list(range(len(stats.cells))) dprintf(2, "axis %s: %d unique cells from %g to %g\n", stats.name, len(stats.cells), *stats.minmax) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "making subdomain indices\n") # now make a subdomain index self._domain_cell_index = [0] * len(self._domain_list) self._domain_reverse_index = {} for idom, domain in enumerate(self._domain_list): index = [None] * mequtils.max_axis for axis, rng in domain.items(): if str(axis) != 'axis_map': iaxis = mequtils.get_axis_number(axis) index[iaxis] = self._axis_stats[iaxis].lookup_cell( *rng) # insert into domain_cells_index and domain_reverse_index index = tuple(index) self._domain_cell_index[idom] = index self._domain_reverse_index[index] = idom dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "loading funklet name list\n") self._funklet_names = list(pt.name_list()) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "computing funklet indices\n") self._name_components = {} for name in self._funklet_names: for i, token in enumerate(name.split(':')): self._name_components.setdefault(i, set()).add(token) self._name_components = [ self._name_components[i] for i in range(len(self._name_components)) ] for i, values in enumerate(self._name_components): dprintf(2, "component %d: %s\n", i, ' '.join(values)) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "writing cache\n") try: pickle.dump(( self._funklet_names,self._domain_list,self._axis_stats,self._name_components, \ self._domain_fullset,self._domain_cell_index,self._domain_reverse_index \ ),open(cachepath,'wb') ) except: if verbosity.get_verbose() > 0: traceback.print_exc() dprintf( 0, "%s: error writing stats to cache, will probably regenerate next time\n", self.filename) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time()
def _make_axis_index(self): """Builds up various indices based on content of the parmtable""" # check if cache is up-to-date cachepath = os.path.join(self.filename, 'ParmTab.cache') funkpath = os.path.join(self.filename, 'funklets') self.mtime = os.path.getmtime(funkpath) if os.path.exists(funkpath) else time.time() try: has_cache = os.path.getmtime(cachepath) >= self.mtime if not has_cache: dprintf(2, "cache is out of date, will regenerate\n") except: dprintf(0, "%s: os.path.getmtime() throws exception, assuming cache is out of date\n", self.filename) has_cache = False # try to load the cache if so t0 = time.time() if has_cache: try: dprintf(2, "loading index cache\n") self._funklet_names, self._domain_list, self._axis_stats, self._name_components, \ self._domain_fullset, self._domain_cell_index, self._domain_reverse_index \ = pickle.load(file(cachepath)) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() return except: if verbosity.get_verbose() > 0: traceback.print_exc() dprintf(0, "%s: error reading cached stats, regenerating\n", self.filename) has_cache = False # no cache, so regenerate everything if not has_cache: self._axis_stats = [_AxisStats(mequtils.get_axis_id(i)) for i in range(mequtils.max_axis)] pt = self.parmtable() dprintf(2, "loading domain list\n") self._domain_list = pt.domain_list() dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "collecting axis stats\n") self._axes = {} for domain in self._domain_list: for axis, rng in domain.items(): if str(axis) != 'axis_map': self._axis_stats[mequtils.get_axis_number(axis)].add_cell(*rng) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "finalizing axis stats\n") self._domain_fullset = [None] * mequtils.max_axis for iaxis, stats in enumerate(self._axis_stats): stats.update() if not stats.empty(): self._domain_fullset[iaxis] = list(range(len(stats.cells))) dprintf(2, "axis %s: %d unique cells from %g to %g\n", stats.name, len(stats.cells), *stats.minmax) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "making subdomain indices\n") # now make a subdomain index self._domain_cell_index = [0] * len(self._domain_list) self._domain_reverse_index = {} for idom, domain in enumerate(self._domain_list): index = [None] * mequtils.max_axis for axis, rng in domain.items(): if str(axis) != 'axis_map': iaxis = mequtils.get_axis_number(axis) index[iaxis] = self._axis_stats[iaxis].lookup_cell(*rng) # insert into domain_cells_index and domain_reverse_index index = tuple(index) self._domain_cell_index[idom] = index self._domain_reverse_index[index] = idom dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "loading funklet name list\n") self._funklet_names = list(pt.name_list()) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "computing funklet indices\n") self._name_components = {} for name in self._funklet_names: for i, token in enumerate(name.split(':')): self._name_components.setdefault(i, set()).add(token) self._name_components = [self._name_components[i] for i in range(len(self._name_components))] for i, values in enumerate(self._name_components): dprintf(2, "component %d: %s\n", i, ' '.join(values)) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time() dprintf(2, "writing cache\n") try: pickle.dump(( self._funklet_names, self._domain_list, self._axis_stats, self._name_components, \ self._domain_fullset, self._domain_cell_index, self._domain_reverse_index \ ), file(cachepath, 'w') ) except: if verbosity.get_verbose() > 0: traceback.print_exc() dprintf(0, "%s: error writing stats to cache, will probably regenerate next time\n", self.filename) dprintf(2, "elapsed time: %f seconds\n", time.time() - t0) t0 = time.time()
entity_list = makeParmEntityList(name_list) if options.list: print "\nTable contains %d funklets. We can plot the following things:\n" % len( name_list) for i, (entity_dict, ncomp, func, props) in enumerate(entity_list): names = namesToSetNotation(entity_dict.iterkeys()) plts = ",".join([p[0] for p in props.plottables]) plts = plts and ("/{%s}" % plts if len(props.plottables) > 1 else "/%s" % plts) print " %s: %s%s%s" % (props.name, names, plts, ("" if i else " (plotted by default)")) print "\nValid axes are:\n" cells = pt.envelope_cells() for iaxis in range(mequtils.max_axis): axis = mequtils.get_axis_id(iaxis) grid = cells.grid.get(axis) if grid is not None: print " %s: %d points from %g to %g" % (axis, len(grid), grid[0], grid[-1]) print "" sys.exit(0) # # parse plot specifications # import fnmatch import re # This will contain a definitive list of all plots. Each plot is specified as a # [description,entity_list,entity_list2,scatterplot]. entity_list2 is valid for dual plots,
pt = ParmTables.open(tabname) name_list = pt.parmtable().name_list() entity_list = makeParmEntityList(name_list) if options.list: print("\nTable contains %d funklets. We can plot the following things:\n" % len(name_list)) for i, (entity_dict, ncomp, func, props) in enumerate(entity_list): names = namesToSetNotation(iter(entity_dict.keys())) plts = ",".join([p[0] for p in props.plottables]) plts = plts and ("/{%s}" % plts if len(props.plottables) > 1 else "/%s" % plts) print(" %s: %s%s%s" % (props.name, names, plts, ("" if i else " (plotted by default)"))) print("\nValid axes are:\n") cells = pt.envelope_cells() for iaxis in range(mequtils.max_axis): axis = mequtils.get_axis_id(iaxis) grid = cells.grid.get(axis) if grid is not None: print(" %s: %d points from %g to %g" % (axis, len(grid), grid[0], grid[-1])) print("") sys.exit(0) # # parse plot specifications # import fnmatch import re # This will contain a definitive list of all plots. Each plot is specified as a # [description,entity_list,entity_list2,scatterplot]. entity_list2 is valid for dual plots, # and is None for a single plot.