class NdArrayExpr(Expr): _shape = Tuple sparse = Bool dtype = PythonValue(None, desc="np.type or type") tile_hint = PythonValue(None, desc="Tuple or None") reduce_fn = PythonValue(None, desc="Function or None") def pretty_str(self): return 'DistArray[%d](%s, %s, hint=%s)' % (self.expr_id, self.shape, np.dtype(self.dtype).name, self.tile_hint) def visit(self, visitor): return expr_like(self, _shape=visitor.visit(self.shape), dtype=visitor.visit(self.dtype), tile_hint=self.tile_hint, sparse=self.sparse, reduce_fn=self.reduce_fn) def dependencies(self): return {} def compute_shape(self): return self._shape def _evaluate(self, ctx, deps): shape = self._shape dtype = self.dtype tile_hint = self.tile_hint return distarray.create(shape, dtype, reducer=self.reduce_fn, tile_hint=tile_hint, sparse=self.sparse)
class FilterExpr(Expr): '''Represents an indexing operation. Attributes: src: `Expr` to index into idx: `tuple` (for slicing) or `Expr` (for bool/integer indexing) ''' src = Instance(Expr) idx = PythonValue(None, desc="Tuple or Expr") def __init__(self, *args, **kw): super(FilterExpr, self).__init__(*args, **kw) assert not isinstance(self.src, ListExpr) assert not isinstance(self.idx, ListExpr) assert not isinstance(self.idx, TupleExpr) def compute_shape(self): if isinstance(self.idx, (int, slice, tuple)): src_shape = self.src.compute_shape() ex = extent.from_shape(src_shape) slice_ex = extent.compute_slice(ex, self.idx) return slice_ex.shape else: raise NotShapeable def _evaluate(self, ctx, deps): src = deps['src'] idx = deps['idx'] assert not isinstance(idx, list) util.log_debug('Evaluating index: %s', idx) return eval_index(ctx, src, idx)
class ShuffleExpr(Expr): array = PythonValue(None, desc="DistArray or Expr") map_fn = Function target = PythonValue(None, desc="DistArray or Expr") cost_hint = PythonValue(None, desc='Dict or None') shape_hint = PythonValue(None, desc='Tuple or None') fn_kw = PythonValue(None, desc='DictExpr') def __str__(self): cost_str = '{ %s }' % ',\n'.join( ['%s: %s' % (hash(k), v) for k, v in self.cost_hint.iteritems()]) return 'shuffle[%d](%s, %s, %s, %s)' % ( self.expr_id, self.map_fn, self.array, cost_str, self.fn_kw) def _evaluate(self, ctx, deps): v = deps['array'] fn_kw = deps['fn_kw'] target = deps['target'] #util.log_debug('Evaluating shuffle. source: %s, target %s, keywords: %s', #v, target, fn_kw) map_fn = self.map_fn if target is not None: v.foreach_tile(mapper_fn=target_mapper, kw=dict(map_fn=map_fn, source=v, target=target, fn_kw=fn_kw)) return target else: return v.map_to_array(mapper_fn=notarget_mapper, kw=dict(source=v, map_fn=map_fn, fn_kw=fn_kw)) def compute_shape(self): if self.target != None: return self.target.shape elif self.shape_hint != None: return self.shape_hint else: # We don't know the shape after shuffle. raise NotShapeable
class WriteArrayExpr(Expr): array = PythonValue(None, desc="DistArray or Expr") src_slices = PythonValue(None, desc="Slices or a tuple of slices") data = PythonValue(None, desc="np.ndarray or Expr") dst_slices = PythonValue(None, desc="Slices or a tuple of slices") def __str__(self): return 'WriteArrayExpr[%d] %s %s' % (self.expr_id, self.array, self.data) def _evaluate(self, ctx, deps): array = deps['array'] src_slices = deps['src_slices'] data = deps['data'] dst_slices = deps['dst_slices'] sregion = extent.from_slice(src_slices, array.shape) if isinstance(data, np.ndarray) or sp.issparse(data): if sregion.shape == data.shape: array.update(sregion, data) else: array.update(sregion, data[dst_slices]) elif isinstance(data, distarray.DistArray): dst_slice = Slice(data, dst_slices) Assert.eq(sregion.shape, dst_slice.shape) array.foreach_tile(mapper_fn=_write_mapper, kw={ 'source': array, 'sregion': sregion, 'dst_slice': dst_slice }) else: raise TypeError return array def compute_shape(self): return self.array.shape
class SliceExpr(base.Expr): '''Represents an indexing operation. Attributes: src: `Expr` to index into idx: `tuple` (for slicing) or `Expr` (for bool/integer indexing) broadcast_to: shape to broadcast to before slicing ''' src = Instance(base.Expr) idx = PythonValue(None, desc="Tuple or Expr") broadcast_to = PythonValue def __init__(self, *args, **kw): super(SliceExpr, self).__init__(*args, **kw) assert not isinstance(self.src, base.ListExpr) assert not isinstance(self.idx, base.ListExpr) assert not isinstance(self.idx, base.TupleExpr) def compute_shape(self): if isinstance(self.idx, (int, long, slice, tuple)): src_shape = self.src.compute_shape() ex = extent.from_shape(src_shape) slice_ex = extent.compute_slice(ex, self.idx) return slice_ex.shape else: raise base.NotShapeable def _evaluate(self, ctx, deps): ''' Index an array by a slice. Args: ctx: `BlobCtx` src: `DistArray` to read from idx: int or tuple Returns: Slice: The result of src[idx] ''' src = deps['src'] idx = deps['idx'] assert not isinstance(idx, list) util.log_debug('Evaluating slice: %s', idx) if self.broadcast_to is not None: new_shape = self.broadcast_to if src.shape != new_shape: src = broadcast.Broadcast(src, new_shape) return Slice(src, idx)
class TransposeExpr(Expr): array = Instance(Expr) tile_hint = PythonValue(None, desc="Tuple or None") def __str__(self): return 'Transpose[%d] %s' % (self.expr_id, self.array) def _evaluate(self, ctx, deps): v = deps['array'] return Transpose(v) def compute_shape(self): # May raise NotShapeable return self.array.shape[::-1]
class ReshapeExpr(Expr): array = Instance(Expr) new_shape = Tuple tile_hint = PythonValue(None, desc="None or Tuple") def __str__(self): return 'Reshape[%d] %s to %s' % (self.expr_id, self.array, self.new_shape) def _evaluate(self, ctx, deps): v = deps['array'] shape = deps['new_shape'] return Reshape(v, shape, self.tile_hint) def compute_shape(self): return self.new_shape
class TileOpExpr(Expr): array = PythonValue(None, desc="DistArray or Expr") map_fn = Function fn_kw = Instance(DictExpr) def __str__(self): return 'tile_operation[%d](%s, %s)' % (self.expr_id, self.map_fn, self.array) def _evaluate(self, ctx, deps): v = deps['array'] fn_kw = deps['fn_kw'] util.log_info('Keywords: %s', fn_kw) map_fn = self.map_fn return v.foreach_tile(mapper_fn=tile_op_mapper, kw=dict(map_fn=map_fn, source=v, fn_kw=fn_kw)) def compute_shape(self): # We don't know the shape after the tile operation. raise NotShapeable
class ReduceExpr(Expr): children = Instance(ListExpr) child_to_var = Instance(list) axis = PythonValue(None, desc="Integer or None") dtype_fn = Function op = Instance(LocalExpr) accumulate_fn = PythonValue(None, desc="Function or ReduceExpr") tile_hint = PythonValue(None, desc="Tuple or None") def __init__(self, *args, **kw): super(ReduceExpr, self).__init__(*args, **kw) assert self.dtype_fn is not None assert isinstance(self.children, ListExpr) def compute_shape(self): shapes = [i.shape for i in self.children] child_shape = collections.defaultdict(int) for s in shapes: for i, v in enumerate(s): child_shape[i] = max(child_shape[i], v) input_shape = tuple([child_shape[i] for i in range(len(child_shape))]) return extent.shape_for_reduction(input_shape, self.axis) def pretty_str(self): return 'Reduce(%s, axis=%s, %s, hint=%s)' % ( self.op.fn.__name__, self.axis, indent( self.children.pretty_str()), self.tile_hint) def _evaluate(self, ctx, deps): children = deps['children'] child_to_var = deps['child_to_var'] axis = deps['axis'] op = deps['op'] tile_accum = deps['accumulate_fn'] children = broadcast.broadcast(children) largest = distarray.largest_value(children) dtype = deps['dtype_fn'](children[0]) # util.log_info('Reducer: %s', op) # util.log_info('Combiner: %s', tile_accum) # util.log_info('Reducing %s over axis %s', children, axis) shape = extent.shape_for_reduction(children[0].shape, axis) output_array = distarray.create(shape, dtype, reducer=tile_accum, tile_hint=self.tile_hint) # util.log_info('Reducing into array %s', output_array) largest.foreach_tile(_reduce_mapper, kw={ 'children': children, 'child_to_var': child_to_var, 'op': op, 'axis': axis, 'output': output_array }) return output_array
class Expr(Node): ''' Base class for all expressions. `Expr` objects capture user operations. An expression can have one or more dependencies, which must be evaluated before the expression itself. Expressions may be evaluated (using `Expr.evaluate`), the result of evaluating an expression is cached until the expression itself is reclaimed. ''' expr_id = PythonValue(None, desc="Integer or None") shape_cache = PythonValue(None, desc="List, Tuple or None") stack_trace = Instance(ExprTrace) # should evaluation of this object be cached needs_cache = True optimized_expr = None @property def ndim(self): return len(self.shape) def load_data(self, cached_result): #util.log_info('expr:%s load_data from not checkpoint node', self.expr_id) return None def cache(self): ''' Return a cached value for this `Expr`. If a cached value is not available, or the cached array is invalid (missing tiles), returns None. ''' result = eval_cache.get(self.expr_id) if result is not None and len(result.bad_tiles) == 0: return result return self.load_data(result) # get distarray from eval_cache # check if still valid # if valid, return # if not valid: check for disk data # if disk data: load bad tiles back # else: return None #return eval_cache.get(self.expr_id, None) def dependencies(self): ''' Returns: Dictionary mapping from name to `Expr`. ''' return dict([(k, getattr(self, k)) for k in self.members]) def compute_shape(self): ''' Compute the shape of this expression. If the shape is not available (data dependent), raises `NotShapeable`. Returns: tuple: Shape of this expression. ''' raise NotShapeable def visit(self, visitor): ''' Apply visitor to all children of this node, returning a new `Expr` of the same type. :param visitor: `OptimizePass` ''' deps = {} for k in self.members: deps[k] = visitor.visit(getattr(self, k)) return expr_like(self, **deps) def __repr__(self): return self.pretty_str() def pretty_str(self): '''Return a pretty representation of this node, suitable for showing to users. By default, this returns the debug representation. ''' return self.debug_str() #raise NotImplementedError, type(self) def __del__(self): eval_cache.deregister(self.expr_id) def __init__(self, *args, **kw): super(Expr, self).__init__(*args, **kw) #assert self.expr_id is not None if self.expr_id is None: self.expr_id = unique_id.next() else: Assert.isinstance(self.expr_id, int) if self.stack_trace is None: self.stack_trace = ExprTrace() eval_cache.register(self.expr_id) self.needs_cache = self.needs_cache and FLAGS.opt_expression_cache def evaluate(self): ''' Evaluate an `Expr`. Dependencies are evaluated prior to evaluating the expression. The result of the evaluation is stored in the expression cache, future calls to evaluate will return the cached value. Returns: DistArray: ''' cache = self.cache() if cache is not None: util.log_debug('Retrieving %d from cache' % self.expr_id) return cache ctx = blob_ctx.get() #util.log_info('Evaluting deps for %s', prim) deps = {} for k, vs in self.dependencies().iteritems(): if isinstance(vs, Expr): deps[k] = vs.evaluate() else: #assert not isinstance(vs, (dict, list)), vs deps[k] = vs try: value = self._evaluate(ctx, deps) #value = self.optimized()._evaluate(ctx, deps) except TimeoutException: util.log_info('%s %d need to retry', self.__class__, self.expr_id) return self.evaluate() except Exception: print >> sys.stderr, 'Error executing expression' self.stack_trace.dump() raise if self.needs_cache: #util.log_info('Caching %s -> %s', prim.expr_id, value) eval_cache.set(self.expr_id, value) return value def _evaluate(self, ctx, deps): ''' Evaluate this expression. Args: ctx: `BlobCtx` for interacting with the cluster deps (dict): Map from name to `DistArray` or scalar. ''' raise NotImplementedError def __hash__(self): return self.expr_id def typename(self): return self.__class__.__name__ def __add__(self, other): return _map(self, other, fn=np.add) def __sub__(self, other): return _map(self, other, fn=np.subtract) def __mul__(self, other): ''' Multiply 2 expressions. :param other: `Expr` ''' return _map(self, other, fn=np.multiply) def __mod__(self, other): return _map(self, other, fn=np.mod) def __div__(self, other): return _map(self, other, fn=np.divide) def __eq__(self, other): return _map(self, other, fn=np.equal) def __ne__(self, other): return _map(self, other, fn=np.not_equal) def __lt__(self, other): return _map(self, other, fn=np.less) def __gt__(self, other): return _map(self, other, fn=np.greater) def __and__(self, other): return _map(self, other, fn=np.logical_and) def __or__(self, other): return _map(self, other, fn=np.logical_or) def __xor(self, other): return _map(self, other, fn=np.logical_xor) def __pow__(self, other): return _map(self, other, fn=np.power) def __neg__(self): return _map(self, fn=np.negative) def __rsub__(self, other): return _map(other, self, fn=np.subtract) def __radd__(self, other): return _map(other, self, fn=np.add) def __rmul__(self, other): return _map(other, self, fn=np.multiply) def __rdiv__(self, other): return _map(other, self, fn=np.divide) def reshape(self, new_shape): ''' Return a new array with shape``new_shape``, and data from this array. :param new_shape: `tuple` with same total size as original shape. ''' from . import reshape return reshape(self, new_shape) def __getitem__(self, idx): from .slice import SliceExpr from .filter import FilterExpr from .reshape import ReshapeExpr if isinstance(idx, (int, tuple, slice)): is_del_dim = False del_dim = list() if isinstance(idx, tuple): for x in xrange(len(idx)): if isinstance(idx[x], int): is_del_dim = True del_dim.append(x) if isinstance(idx, int) or is_del_dim or (isinstance(idx, tuple) and (newaxis in idx)): #The shape has to be updated if isinstance(idx, tuple): new_shape = tuple([ slice(x, None, None) if x == -1 else x for x in idx if not x == newaxis ]) else: new_shape = idx ret = SliceExpr(src=self, idx=new_shape) new_shape = [] if isinstance(idx, tuple): shape_ptr = idx_ptr = 0 while shape_ptr < len(ret.shape) or idx_ptr < len(idx): if idx_ptr < len(idx) and idx[idx_ptr] == newaxis: new_shape.append(1) else: new_shape.append(ret.shape[shape_ptr]) shape_ptr += 1 idx_ptr += 1 else: new_shape = list(ret.shape) del_dim.append(0) #Delete dimension if needed if is_del_dim: for i in del_dim: new_shape.pop(i) return ReshapeExpr(array=ret, new_shape=new_shape) else: #This means it's just a simple slice op return SliceExpr(src=self, idx=idx) else: return FilterExpr(src=self, idx=idx) def __setitem__(self, k, val): raise Exception, 'Expressions are read-only.' @property def shape(self): '''Try to compute the shape of this expression. If the value has been computed already this always succeeds. :rtype: `tuple` ''' cache = self.cache() if cache is not None: return cache.shape if self.shape_cache is None: try: self.shape_cache = self.compute_shape() except NotShapeable: util.log_debug('Not shapeable: %s', self) self.shape_cache = evaluate(self).shape return self.shape_cache @property def size(self): return np.prod(self.shape) def optimized(self): ''' Return an optimized version of this expression graph. :rtype: `Expr` ''' # If the expr has been optimized, return the cached optimized expr. #return optimized_dag(self) if self.optimized_expr is None: self.optimized_expr = optimized_dag(self) self.optimized_expr.optimized_expr = self.optimized_expr return self.optimized_expr else: return self.optimized_expr def glom(self): ''' Evaluate this expression and convert the resulting distributed array into a Numpy array. :rtype: `np.ndarray` ''' return glom(self) def __reduce__(self): return evaluate(self).__reduce__()
class RayTraceModel(HasQueue): scene = Instance(SceneModel, (), {'background': (1, 1, 0.8)}, transient=True) optics = List(Traceable) sources = List(BaseRaySource) probes = List(Probe) constraints = List(BaseConstraint) results = List(Result) optical_path = Float(0.0, transient=True) all_faces = List( ctracer.Face, desc="global list of all faces, created automatically " " when a tracing operation is initiated. Ray end_face_idx " "can be used to index this list") face_sets = List(ctracer.FaceList, desc="list of FaceLists extracted from all " "optics when a tracing operation is initiated") update = Event() #triggers a tracing operation _updating = Bool(False) #indicating that tracing is in progress update_complete = Event() _hold_off = Bool( False) #Blocks tracing (while model parameters are altered) _update_requested = Bool(False) Self = self ShellObj = PythonValue({}, transient=True) recursion_limit = Int(200, desc="maximum number of refractions or reflections") save_btn = Button("Save scene") filename = File() def load_from_yaml(self, filename): with open(filename, 'r') as fobj: model = yaml.load(fobj) print(model) self.optics = model['components'] self.sources = model['sources'] self.results = model['results'] self.update = True def save_as_yaml(self, filename=None): if filename is None: filename = self.filename if self.filename is None: raise IOError("no preset filename") model = { "components": list(self.optics), "sources": list(self.sources), "results": list(self.results) } with open(filename, 'w') as fobj: yaml.dump(model, fobj) self.filename = filename @on_trait_change("optics[]") def on_optics_change(self, obj, name, removed, opticList): #print("adding", opticList, removed, name) scene = self.scene #del scene.actor_list[:] for o in opticList: scene.add_actors(o.get_actors(scene)) for o in removed: try: scene.remove_actors(o.get_actors(scene)) except: pass for optic in opticList: optic.on_trait_change(self.trace_all, "update") optic.on_trait_change(self.render_vtk, "render") for optic in removed: optic.on_trait_change(self.trace_all, "update", remove=True) optic.on_trait_change(self.render_vtk, "render", remove=True) self.trace_all() def _rays_changed(self, rayList): scene = self.scene sources = [o.pipeline for o in rayList] mappers = [ tvtk.PolyDataMapper(input_connection=s.output_port) for s in sources ] actors = [tvtk.Actor(mapper=m) for m in mappers] for actor in actors: property = actor.property property.color = (1, 0.5, 0) scene.add_actors(actors) self.trace_all() def _probes_changed(self, probeList): scene = self.scene #del scene.actor_list[:] for p in probeList: scene.add_actors(p.get_actors(scene)) for probe in probeList: probe.on_trait_change(self.update_probes, "update") probe.on_trait_change(self.render_vtk, "render") self.trace_all() def _constraints_changed(self, constraintsList): for constraint in constraintsList: constraint.on_trait_change(self.trace_all, "update") self.trace_all() def _results_changed(self, resultsList): pass #not yet sure what we need to do here def update_probes(self): if self.scene is not None: self.render_vtk() def trace_all(self): if self._hold_off: self._update_requested = True return if not self._updating: self._updating = True self.update = True @contextmanager def hold_off(self): """A provides a context where the tracing operations are blocked so the user can edit parameters of the model without triggering multiple traces. A final trace occurs on exiting the context if required.""" self._hold_off = True try: yield except: self._update_requested = False raise finally: self._hold_off = False if self._update_requested: self._update_requested = False self.trace_all() @on_trait_change("update", dispatch="queued") def do_update(self): optics = self.optics #print "trace", next(counter) if optics is not None: self.prepare_to_trace() for o in optics: o.intersections = [] for ray_source in self.sources: self.trace_ray_source(ray_source, optics) for o in optics: o.update_complete() for r in self.results: try: r.calc_result(self) except: traceback.print_exc() self.render_vtk() self._updating = False def trace_detail(self, async=False): optics = [o.clone_traits() for o in self.optics] for child, parent in zip(optics, self.optics): child.shadow_parent = parent sources = [s.clone_traits() for s in self.sources] for child, parent in zip(sources, self.sources): child.shadow_parent = parent probes = [p.clone_traits() for p in self.probes] for child, parent in zip(probes, self.probes): child.shadow_parent = parent if async: self.thd = threading.Thread(target=self.async_trace, args=(optics, sources, probes)) self.thd.start() else: self.async_trace(optics, sources, probes)
class DotExpr(Expr): matrix_a = PythonValue(None, desc="np.ndarray or Expr") matrix_b = PythonValue(None, desc="np.ndarray or Expr") tile_hint = PythonValue(None, desc="Tuple or None") def __str__(self): return 'Dot[%s, %s, %s]' % (self.matrix_a, self.matrix_b, self.tile_hint) def compute_shape(self): # May raise NotShapeable if len(self.matrix_a.shape) == 1 and len(self.matrix_a.shape) == 1: #vec * vec = scaler return (1, ) elif len(self.matrix_a.shape) > 1 and len(self.matrix_b.shape) == 1: #array * vector = vector return (self.matrix_a.shape[0], ) elif len(self.matrix_a.shape) > 1 and len(self.matrix_b.shape) > 1: #array * array = array return (self.matrix_a.shape[0], self.matrix_b.shape[1]) else: raise ValueError def _evaluate(self, ctx, deps): av = deps['matrix_a'] bv = deps['matrix_b'] nptype = isinstance(bv, np.ndarray) dot2d = False tile_hint = self.tile_hint if len(av.shape) == 1 and len(bv.shape) == 1: if av.shape[0] != bv.shape[0]: raise ValueError("objects are not aligned") #Vector * Vector = Scaler shape = (1, ) elif len(av.shape) > 1 and len(bv.shape) == 1: #array * Vector = Vector if av.shape[1] != bv.shape[0]: raise ValueError("objects are not aligned") shape = (av.shape[0], ) elif len(av.shape) > 1 and len(bv.shape) > 1: #array * array shape = (av.shape[0], bv.shape[1]) if tile_hint is None: tile_hint = (av.shape[0], bv.shape[1]) if nptype: target = distarray.create(shape, dtype=av.dtype, tile_hint=tile_hint, reducer=np.add) fn_kw = dict(numpy_data=bv) av.foreach_tile(mapper_fn=target_mapper, kw=dict(source=av, map_fn=_dot_numpy, target=target, fn_kw=fn_kw)) else: sparse = (av.sparse and bv.sparse) target = distarray.create(shape, dtype=av.dtype, tile_hint=tile_hint, reducer=np.add, sparse=sparse) fn_kw = dict(av=av, bv=bv) av.foreach_tile(mapper_fn=target_mapper, kw=dict(map_fn=_dot_mapper, source=av, target=target, fn_kw=fn_kw)) return target