def vis_hook(step, t, state): if options.vis_every and step % options.vis_every == 0: p = state[-1] if not isinstance(p, np.ndarray): p = p.get() ref_p = soln(t)[-1] print la.norm(p - ref_p) / la.norm(ref_p) if 1: vis.write_vtk("out-%04d.vtu" % step, [("pressure", p), ("ref_pressure", ref_p)]) from time import time as wall_time progress_every = 3 start_timing_at_step = 3 * progress_every if step % progress_every == 0: if step == start_timing_at_step: start_time[0] = wall_time() elif step > start_timing_at_step: elapsed = wall_time() - start_time[0] timed_steps = step - start_timing_at_step time_per_step = elapsed / timed_steps line = ("step=%d, sim_time=%f, elapsed wall time=%.2f s," "time per step=%f s" % (step, t, elapsed, time_per_step)) print line if options.comp_engine in ["cl", "loopy"]: for evt in cl_info.volume_events: evt.wait() for evt in cl_info.surface_events: evt.wait() if cl_info.volume_events: vol_time = 1e-9*sum( evt.profile.END-evt.profile.START for evt in cl_info.volume_events) \ / len(cl_info.volume_events) print( "volume: %.4g GFlops/s " "%.4g GBytes/s time/step: %.3g s" % (rhs_obj.volume_flops / vol_time * 1e-9, rhs_obj.volume_bytes / vol_time * 1e-9, vol_time * 5) # for RK stages ) if cl_info.surface_events: surf_time = 1e-9*sum( evt.profile.END-evt.profile.START for evt in cl_info.surface_events) \ / len(cl_info.surface_events) print "surface: %.4g GFlops/s time/step: %.3g s" % ( rhs_obj.surface_flops / surf_time * 1e-9, surf_time * 5) del cl_info.volume_events[:] del cl_info.surface_events[:]
def vis_hook(step, t, state): if options.vis_every and step % options.vis_every == 0: if options.cl: Hx, Hy, Ez = [d.from_dev(x) for x in state] else: Hx, Hy, Ez = state vis_mesh.mlab_source.z = Ez.ravel() # update colors, too (expensive) if options.update_colors: vis_mesh.mlab_source.scalars = Ez.ravel() from time import time as wall_time progress_every = 20 start_timing_at_step = progress_every if step % 20 == 0: if options.cl: d.queue.finish() if step == start_timing_at_step: start_time[0] = wall_time() elif step > start_timing_at_step: elapsed = wall_time()-start_time[0] timed_steps = step - start_timing_at_step time_per_step = elapsed/timed_steps line = ("step=%d, sim_time=%f, elapsed wall time=%.2f s," "time per step=%f s" % ( step, t, elapsed, time_per_step)) if options.cl: flops = 5 * (inner_rhs.flops + 4*d.K*d.ldis.Np) line += " %f gflops/s" % (flops/time_per_step/1e9) print line
def vis_hook(step, t, state): if options.vis_every and step % options.vis_every == 0: if options.cl: Hx, Hy, Ez = [d.from_dev(x) for x in state] else: Hx, Hy, Ez = state vis_mesh.mlab_source.z = Ez.ravel() # update colors, too (expensive) if options.update_colors: vis_mesh.mlab_source.scalars = Ez.ravel() from time import time as wall_time progress_every = 20 start_timing_at_step = progress_every if step % 20 == 0: if options.cl: d.queue.finish() if step == start_timing_at_step: start_time[0] = wall_time() elif step > start_timing_at_step: elapsed = wall_time() - start_time[0] timed_steps = step - start_timing_at_step time_per_step = elapsed / timed_steps line = ("step=%d, sim_time=%f, elapsed wall time=%.2f s," "time per step=%f s" % (step, t, elapsed, time_per_step)) if options.cl: flops = 5 * (inner_rhs.flops + 4 * d.K * d.ldis.Np) line += " %f gflops/s" % (flops / time_per_step / 1e9) print line
def run_command(par): start_time = wall_time() monitors = simulate_STN_GPe_population(par) sub_name = "{:.6f}-{:.6f}".format(par['par_syn']['g_StoG'] / b2.nS, par['par_syn']['g_GtoG'] / b2.nS) print("{:s} done in {:10.3f}".format(sub_name, wall_time() - start_time)) # to_npz(monitors, subname="d-{}".format(sub_name), # save_voltages=1, width=50*b2.ms) plot_voltage(monitors, indices=[0, 1, 2], filename="v-{}".format(sub_name)) plot_raster(monitors, filename="sp-{}".format(sub_name), par=par_sim)
def timed(msg, stream=None): # for interactive use if msg is not None: if stream is None: from sys import stderr as stream stream.write(msg + '... ') stream.flush() t0 = wall_time() result = TimingResults() yield result dt = wall_time() - t0 result.dt = dt if msg is not None: stream.write('done in %s\n' % format_duration(dt)) stream.flush()
def load_events(self, file, startTime=0, endTime=None): """Load an event file and add everything to this model""" def update_comments(comments, time): # Add a list of comments to an existing event, if there is one at # the given time, or create a new, correctly-timed, event from # the last event and attach the comments to that for commentUnit, commentRest in comments: event = self.find_unit_event_by_time(commentUnit, time) # Find an event to which this comment can be attached if event is None: # No older event, make a new empty one event = BlobEvent(commentUnit, time, {}) self.add_unit_event(event) elif event.time != time: # Copy the old event and make a new one with the right # time and comment newEvent = BlobEvent(commentUnit, time, event.pairs) newEvent.visuals = dict(event.visuals) event = newEvent self.add_unit_event(event) event.comments.append(commentRest) self.clear_events() # A negative time will *always* be different from an event time time = -1 time_events = {} last_time_lines = {} minor_trace_line_count = 0 comments = [] default_colour = [[colours.unknownColour]] next_progress_print_event_count = 1000 if not os.access(file, os.R_OK): print('Can\'t open file', file) exit(1) else: print('Opening file', file) f = open(file) start_wall_time = wall_time() # Skip leading events still_skipping = True l = f.readline() while l and still_skipping: match = re.match('^\s*(\d+):', l) if match is not None: event_time = match.groups() if int(event_time[0]) >= startTime: still_skipping = False else: l = f.readline() else: l = f.readline() match_line_re = re.compile( '^\s*(\d+):\s*([\w\.]+):\s*(Minor\w+:)?\s*(.*)$') # Parse each line of the events file, accumulating comments to be # attached to MinorTrace events when the time changes reached_end_time = False while not reached_end_time and l: match = match_line_re.match(l) if match is not None: event_time, unit, line_type, rest = match.groups() event_time = int(event_time) unit = re.sub('^' + self.unitNamePrefix + '\.?(.*)$', '\\1', unit) # When the time changes, resolve comments if event_time != time: if self.numEvents > next_progress_print_event_count: print(('Parsed to time: %d' % event_time)) next_progress_print_event_count = ( self.numEvents + 1000) update_comments(comments, time) comments = [] time = event_time if line_type is None: # Treat this line as just a 'comment' comments.append((unit, rest)) elif line_type == 'MinorTrace:': minor_trace_line_count += 1 # Only insert this event if it's not the same as # the last event we saw for this unit if last_time_lines.get(unit, None) != rest: event = BlobEvent(unit, event_time, {}) pairs = parse.parse_pairs(rest) event.pairs = pairs # Try to decode the colour data for this event blobs = self.unitNameToBlobs.get(unit, []) for blob in blobs: if blob.visualDecoder is not None: event.visuals[blob.picChar] = ( blob.visualDecoder(pairs)) self.add_unit_event(event) last_time_lines[unit] = rest elif line_type == 'MinorInst:': self.add_minor_inst(rest) elif line_type == 'MinorLine:': self.add_minor_line(rest) if endTime is not None and time > endTime: reached_end_time = True l = f.readline() update_comments(comments, time) self.extract_times() f.close() end_wall_time = wall_time() print('Total events:', minor_trace_line_count, 'unique events:', \ self.numEvents) print('Time to parse:', end_wall_time - start_wall_time)
def vis_hook(step, t, state): if options.vis_every and step % options.vis_every == 0: p = state[-1] if not isinstance(p, np.ndarray): p = p.get() ref_p = soln(t)[-1] print la.norm(p - ref_p)/la.norm(ref_p) if 1: vis.write_vtk("out-%04d.vtu" % step, [ ("pressure", p), ("ref_pressure", ref_p) ] ) from time import time as wall_time progress_every = 3 start_timing_at_step = 3*progress_every if step % progress_every == 0: if step == start_timing_at_step: start_time[0] = wall_time() elif step > start_timing_at_step: elapsed = wall_time()-start_time[0] timed_steps = step - start_timing_at_step time_per_step = elapsed/timed_steps line = ("step=%d, sim_time=%f, elapsed wall time=%.2f s," "time per step=%f s" % ( step, t, elapsed, time_per_step)) print line if options.comp_engine in ["cl", "loopy"]: for evt in cl_info.volume_events: evt.wait() for evt in cl_info.surface_events: evt.wait() if cl_info.volume_events: vol_time = 1e-9*sum( evt.profile.END-evt.profile.START for evt in cl_info.volume_events) \ / len(cl_info.volume_events) print( "volume: %.4g GFlops/s " "%.4g GBytes/s time/step: %.3g s" % ( rhs_obj.volume_flops/vol_time*1e-9, rhs_obj.volume_bytes/vol_time*1e-9, vol_time*5) # for RK stages ) if cl_info.surface_events: surf_time = 1e-9*sum( evt.profile.END-evt.profile.START for evt in cl_info.surface_events) \ / len(cl_info.surface_events) print "surface: %.4g GFlops/s time/step: %.3g s" % ( rhs_obj.surface_flops/surf_time*1e-9, surf_time*5) del cl_info.volume_events[:] del cl_info.surface_events[:]
G = NeuronGroup(1, 'dv/dt = -v/tau : 1', method='euler') G.v = 1 mon = StateMonitor(G, 'v', record=0) net = Network() net.add(G, mon) net.run(100 * ms) res = (mon.t / ms, mon.v[0]) device.reinit() print(f'FINISHED {pid}') return res if __name__ == "__main__": start_time = wall_time() n_jobs = 4 tau_values = np.arange(10) * ms + 5 * ms results = Parallel(n_jobs=n_jobs)(map(delayed(run_sim), tau_values)) print("Done in {:10.3f}".format(wall_time() - start_time)) for tau_value, (t, v) in zip(tau_values, results): plt.plot(t, v, label=str(tau_value)) plt.legend() plt.show()
def load_events(self, file, startTime=0, endTime=None): """Load an event file and add everything to this model""" def update_comments(comments, time): # Add a list of comments to an existing event, if there is one at # the given time, or create a new, correctly-timed, event from # the last event and attach the comments to that for commentUnit, commentRest in comments: event = self.find_unit_event_by_time(commentUnit, time) # Find an event to which this comment can be attached if event is None: # No older event, make a new empty one event = BlobEvent(commentUnit, time, {}) self.add_unit_event(event) elif event.time != time: # Copy the old event and make a new one with the right # time and comment newEvent = BlobEvent(commentUnit, time, event.pairs) newEvent.visuals = dict(event.visuals) event = newEvent self.add_unit_event(event) event.comments.append(commentRest) self.clear_events() # A negative time will *always* be different from an event time time = -1 time_events = {} last_time_lines = {} minor_trace_line_count = 0 comments = [] default_colour = [[colours.unknownColour]] next_progress_print_event_count = 1000 if not os.access(file, os.R_OK): print 'Can\'t open file', file exit(1) else: print 'Opening file', file f = open(file) start_wall_time = wall_time() # Skip leading events still_skipping = True l = f.readline() while l and still_skipping: match = re.match('^\s*(\d+):', l) if match is not None: event_time = match.groups() if int(event_time[0]) >= startTime: still_skipping = False else: l = f.readline() else: l = f.readline() match_line_re = re.compile( '^\s*(\d+):\s*([\w\.]+):\s*(Minor\w+:)?\s*(.*)$') # Parse each line of the events file, accumulating comments to be # attached to MinorTrace events when the time changes reached_end_time = False while not reached_end_time and l: match = match_line_re.match(l) if match is not None: event_time, unit, line_type, rest = match.groups() event_time = int(event_time) unit = re.sub('^' + self.unitNamePrefix + '\.?(.*)$', '\\1', unit) # When the time changes, resolve comments if event_time != time: if self.numEvents > next_progress_print_event_count: print ('Parsed to time: %d' % event_time) next_progress_print_event_count = ( self.numEvents + 1000) update_comments(comments, time) comments = [] time = event_time if line_type is None: # Treat this line as just a 'comment' comments.append((unit, rest)) elif line_type == 'MinorTrace:': minor_trace_line_count += 1 # Only insert this event if it's not the same as # the last event we saw for this unit if last_time_lines.get(unit, None) != rest: event = BlobEvent(unit, event_time, {}) pairs = parse.parse_pairs(rest) event.pairs = pairs # Try to decode the colour data for this event blobs = self.unitNameToBlobs.get(unit, []) for blob in blobs: if blob.visualDecoder is not None: event.visuals[blob.picChar] = ( blob.visualDecoder(pairs)) self.add_unit_event(event) last_time_lines[unit] = rest elif line_type == 'MinorInst:': self.add_minor_inst(rest) elif line_type == 'MinorLine:': self.add_minor_line(rest) if endTime is not None and time > endTime: reached_end_time = True l = f.readline() update_comments(comments, time) self.extract_times() f.close() end_wall_time = wall_time() print 'Total events:', minor_trace_line_count, 'unique events:', \ self.numEvents print 'Time to parse:', end_wall_time - start_wall_time
"par_s": par_s, "par_g": par_g } g_StoG = np.linspace(0.01, 0.1, 6) g_GtoG = np.linspace(0.0, 0.1, 6) par_syn['g_GtoS'] = 2.5 * b2.nS RUN_IN_SERIAL = False RUN_IN_PARALLEL = True n_jobs = 4 # --------------------------------------------------------------- if RUN_IN_PARALLEL: start_time = wall_time() args = [] for i in range(len(g_StoG)): for j in range(len(g_GtoG)): par_syn['g_StoG'] = g_StoG[i] * b2.nS par_syn['g_GtoG'] = g_GtoG[j] * b2.nS args.append(deepcopy(params)) Parallel(n_jobs=n_jobs)(map(delayed(run_command), args)) display_time(wall_time() - start_time) # --------------------------------------------------------------- if RUN_IN_SERIAL: for i in range(len(g_StoG)):
G = NeuronGroup(1, 'dv/dt = -v/tau : 1', method='euler') G.v = 1 mon = StateMonitor(G, 'v', record=0) net = Network() net.add(G, mon) net.run(100 * ms) res = (mon.t / ms, mon.v[0]) device.reinit() print(f'FINISHED {pid}') return res if __name__ == "__main__": start_time = wall_time() n_jobs = 4 tau_values = np.arange(10) * ms + 5 * ms results = Parallel(n_jobs=n_jobs)(map(delayed(run_sim), tau_values)) print(f"Done in {wall_time() - start_time:10.3f}") for tau_value, (t, v) in zip(tau_values, results): plt.plot(t, v, label=str(tau_value)) plt.legend() plt.show()