def promote_dataspecs(in_specs): curvtype = 'scalar' curdtype = None dims = [] for ispec in in_specs: if ispec.valtype == 'array': curvtype = 'array' comp = np.ndarray(shape=(1, ), dtype=ispec.dtype) #+++ Doesn't actually check if compatible, just assumes # we can broadcast to larger arrays.... if len(ispec.dims) > len(dims): dims = ispec.dims else: comp = np.ndarray(shape=(1), dtype=ispec.dtype)[0] if curdtype is None: curdtype = comp else: curdtype = np.result_type(curdtype, comp) return DataSpec(curvtype, dims, curdtype)
def get_dataspec(self): dims = ['time', 'latitude', 'longitude'] dtype = self.fm.mapped_var.dtype return DataSpec('array', dims, dtype)
def get_dataspec(self): dims = ['time', 'latitude', 'longitude'] return DataSpec('array', dims, self.dtype)
def get_dataspec(self): return DataSpec('array', self.dims, self.value.dtype)
def get_dataspec(self): return DataSpec('array', ['hypsometric_percentile', 'latitude', 'longitude'], self.dtype)
def get_dataspec(self): return DataSpec('array', ['latitude', 'longitude'], self.dtype)
def get_dataspec(self): return DataSpec('array', ['latitude', 'longitude'], np.float64)
def get_dataspec(self): return DataSpec('array', self.dims, np.float64)
def get_dataspec(self): return DataSpec('scalar', [], type(self.value))
def run(self,input_map,output_map,period,extent): #periods,chunks): ''' Should be the basis for new-style sim server Currently no file output, but runs inputgraph/model quite happily... ''' import time start = time.time() chunks = extents.subdivide_extent(extent,self.spatial_chunk) periods = dt.split_period(period,'a') self.logger.info("Getting I/O dataspecs...") #+++ Document rescaling separately, don't just change the graph behind the scenes... #mapping = graph.map_rescaling_nodes(input_map.mapping,extent) mapping = input_map filtered = graph.get_input_tree(self.model.get_input_keys(),mapping) input_nodes = {} worker_nodes = {} output_nodes = {} for k,v in filtered.items(): if 'io' in v.properties: input_nodes[k] = v worker_nodes[k] = nodes.const(None) else: worker_nodes[k] = v for k,v in output_map.items(): try: if v.properties['io'] == 'from_model': output_nodes[k] = v except: # AttributeError: pass # print("EXCEPTION",k,v) igraph = graph.ExecutionGraph(input_nodes) self._set_max_dims(igraph) input_dspecs = igraph.get_dataspecs(True) #+++ No guarantee this will close files. Put in separate function? del igraph model_dspecs = graph.ExecutionGraph(mapping).get_dataspecs(True) output_dspecs = graph.OutputGraph(output_nodes).get_dataspecs(True) self.model.init_shared(model_dspecs) ### initialise output ncfiles self.logger.info("Initialising output files...") outgraph = graph.OutputGraph(output_map) outgraph.initialise(period,extent) #+++ Can we guarantee that statespecs will be 64bit for recycling? # NWORKERS = 2 # READ_AHEAD = 1 sspec = DataSpec('array',['cell'],np.float64) state_specs = {} for k in self.model.get_state_keys(): init_k = 'init_' + k input_dspecs[init_k] = sspec state_specs[k] = sspec self.logger.info("Building buffers...") input_bufs = create_managed_buffergroups(input_dspecs,self.max_dims,self.num_workers+self.read_ahead) state_bufs = create_managed_buffergroups(state_specs,self.max_dims,self.num_workers*2+self.read_ahead) output_bufs = create_managed_buffergroups(output_dspecs,self.max_dims,self.num_workers+self.read_ahead) all_buffers = dict(inputs=input_bufs,states=state_bufs,outputs=output_bufs) smc = SharedMemClient(all_buffers,False) control_master = mp.Queue() control_status = mp.Queue() state_returnq =mp.Queue() chunkq = mp.Queue() chunkoutq = mp.Queue() reader_inq = dict(control=mp.Queue(),state_return=state_returnq) reader_outq = dict(control=control_master,chunks=chunkq) writer_inq = dict(control=mp.Queue(),chunks=chunkoutq) writer_outq = dict(control=control_master,log=mp.Queue()) #,chunks=chunkq) child_control_qs = [reader_inq['control'],writer_inq['control'],writer_outq['log']] self.logger.info("Running simulation...") workers = [] for w in range(self.num_workers): worker_inq = dict(control=mp.Queue(),chunks=chunkq) worker_outq = dict(control=control_master,state_return=state_returnq,chunks=chunkoutq) worker_p = mg.ModelGraphRunner(worker_inq,worker_outq,all_buffers,chunks,periods,worker_nodes,self.model) worker_p.start() workers.append(worker_p) child_control_qs.append(worker_inq['control']) control = ControlMaster(control_master, control_status, child_control_qs) control.start() reader_p = input_reader.InputGraphRunner(reader_inq,reader_outq,all_buffers,chunks,periods,input_nodes,self.model.get_state_keys()) reader_p.start() writer_p = writer.OutputGraphRunner(writer_inq,writer_outq,all_buffers,chunks,periods,output_map) writer_p.start() log = True while log: msg = writer_outq['log'].get() if msg['subject'] == 'terminate': log = False else: self.logger.info(msg['subject']) writer_p.join() for w in workers: w.qin['control'].put(message('terminate')) # control_master.get_nowait() w.join() reader_inq['control'].put(message('terminate')) control_master.put(message('finished')) problem = False msg = control_status.get() if msg['subject'] == 'exception_raised': problem = True control.join() reader_p.join() if problem: raise Exception("Problem detected") self.logger.info("elapsed time: %.2f",time.time() - start)
def get_dataspec(self): dims = ['time','latitude','longitude'] dtype = np.float32 #self.v.dtype return DataSpec('array',dims,dtype)
def get_dataspec(self): return DataSpec('array', [d.name for d in self.cs.dimensions], self.dtype)