def main(): """ create small network with synapse loss. The synapse loss happens due to a maximum syndriver chain length of 5 and only 4 denmems per neuron. After mapping, the synapse loss per projection is evaluated and plotted for one projection. The sum of lost synapses per projection is compared to the overall synapse loss returnd by the mapping stats. """ marocco = PyMarocco() marocco.neuron_placement.default_neuron_size(4) marocco.synapse_routing.driver_chain_length(5) marocco.continue_despite_synapse_loss = True marocco.calib_backend = PyMarocco.CalibBackend.Default marocco.neuron_placement.skip_hicanns_without_neuron_blacklisting(False) pynn.setup(marocco=marocco) neuron = pynn.Population(50, pynn.IF_cond_exp) source = pynn.Population(50, pynn.SpikeSourcePoisson, {'rate' : 2}) connector = pynn.FixedProbabilityConnector( allow_self_connections=True, p_connect=0.5, weights=0.00425) proj_stim = pynn.Projection(source, neuron, connector, target="excitatory") proj_rec = pynn.Projection(neuron, neuron, connector, target="excitatory") pynn.run(1) print marocco.stats total_syns = 0 lost_syns = 0 for proj in [proj_stim, proj_rec]: l,t = projectionwise_synapse_loss(proj, marocco) total_syns += t lost_syns += l assert total_syns == marocco.stats.getSynapses() assert lost_syns == marocco.stats.getSynapseLoss() plot_projectionwise_synapse_loss(proj_stim, marocco) pynn.end()
def main(): """ create small network with synapse loss. The synapse loss happens due to a maximum syndriver chain length of 5 and only 4 denmems per neuron. After mapping, the synapse loss per projection is evaluated and plotted for one projection. The sum of lost synapses per projection is compared to the overall synapse loss returnd by the mapping stats. """ marocco = PyMarocco() marocco.neuron_placement.default_neuron_size(4) marocco.synapse_routing.driver_chain_length(5) marocco.continue_despite_synapse_loss = True marocco.calib_backend = PyMarocco.CalibBackend.Default marocco.neuron_placement.skip_hicanns_without_neuron_blacklisting(False) pynn.setup(marocco=marocco) neuron = pynn.Population(50, pynn.IF_cond_exp) source = pynn.Population(50, pynn.SpikeSourcePoisson, {'rate': 2}) connector = pynn.FixedProbabilityConnector(allow_self_connections=True, p_connect=0.5, weights=0.00425) proj_stim = pynn.Projection(source, neuron, connector, target="excitatory") proj_rec = pynn.Projection(neuron, neuron, connector, target="excitatory") pynn.run(1) print marocco.stats total_syns = 0 lost_syns = 0 for proj in [proj_stim, proj_rec]: l, t = projectionwise_synapse_loss(proj, marocco) total_syns += t lost_syns += l assert total_syns == marocco.stats.getSynapses() assert lost_syns == marocco.stats.getSynapseLoss() plot_projectionwise_synapse_loss(proj_stim, marocco) pynn.end()
############################################################################ wafer = int(os.environ.get("WAFER", 33)) marocco = PyMarocco() marocco.backend = PyMarocco.Hardware marocco.default_wafer = C.Wafer(wafer) runtime = Runtime(marocco.default_wafer) # calib_path = "/wang/data/calibration/brainscales/WIP-2018-09-18" # marocco.calib_path = calib_path # marocco.defects.path = marocco.calib_path marocco.verification = PyMarocco.Skip marocco.checkl1locking = PyMarocco.SkipCheck marocco.continue_despite_synapse_loss = True SYNAPSE_DECODER_DISABLED_SYNAPSE = HICANN.SynapseDecoder(1) ### ====================== NETWORK CONSTRUCTION =========================== ### sim.setup(timestep=1.0, min_delay=1.0, marocco=marocco, marocco_runtime=runtime) e_rev = 92 # mV # e_rev = 500.0 #mV base_params = { # 'cm': 0.1, # nF # 'v_reset': -70., # mV # 'v_rest': -65., # mV # 'v_thresh': -55., # mV # 'tau_m': 20., # ms
def main(): parser = argparse.ArgumentParser() # scale factor of the whole network compared to the original one parser.add_argument('--scale', default=0.01, type=float) # size of one neueron in hw neurons parser.add_argument('--n_size', default=4, type=int) parser.add_argument('--k_scale', type=float) # scale of connections # wafer defects that should be considered in the mapping parser.add_argument('--wafer', '-w', type=int, default=24) # specific path where the defect parts of the wafer are saved # if nothing specified, current defects of the given wafer are used parser.add_argument('--defects_path', type=str) parser.add_argument('--ignore_blacklisting', type=str2bool, nargs='?', default=False, const=True) parser.add_argument('--name', type=str, default='cortical_column_network') # name parser.add_argument('--placer', type=str, default='byNeuron') parser.add_argument('--seed', default=0, type=int) args = parser.parse_args() # k_scale is set to "scale" by deflaut if not args.k_scale: args.k_scale = args.scale taskname = "scale{}_k-scale{}_nsize{}_wafer{}_ignoreBlacklsiting{}".format( args.scale, args.k_scale, args.n_size, args.wafer, args.ignore_blacklisting) marocco = PyMarocco() marocco.neuron_placement.default_neuron_size(args.n_size) if (args.ignore_blacklisting): marocco.defects.backend = Defects.Backend.Without else: marocco.defects.backend = Defects.Backend.XML marocco.skip_mapping = False marocco.backend = PyMarocco.Without marocco.continue_despite_synapse_loss = True marocco.default_wafer = C.Wafer(args.wafer) # give wafer args marocco.calib_backend = PyMarocco.CalibBackend.Default marocco.calib_path = "/wang/data/calibration/brainscales/default" if args.defects_path: marocco.defects.path = args.defects_path else: marocco.defects.path = "/wang/data/commissioning/BSS-1/rackplace/" + str( args.wafer) + "/derived_plus_calib_blacklisting/current" # c 4189 no specification #taskname += "_c4189_" # strategy marocco.merger_routing.strategy( # is now default marocco.merger_routing.minimize_as_possible) #taskname += "_minimAsPoss" ''' # placement strategy user_strat = placer() taskname += "_placer" ''' if args.placer == "byNeuron": user_strat = placer_neuron_cluster() # cluster by neurons taskname += "_byNeuron" marocco.neuron_placement.default_placement_strategy(user_strat) if args.placer == "byEnum": user_strat = placer_enum_IDasc() # cluster by neurons taskname += "_byEnum" marocco.neuron_placement.default_placement_strategy(user_strat) if args.placer == "constrained": # needed for 5720 with patch set 36(best results) or ps 50 from pymarocco_runtime import ConstrainedNeuronClusterer as placer_neuron_resizer user_strat = placer_neuron_resizer() taskname += "_constrained" marocco.neuron_placement.default_placement_strategy(user_strat) # give marocco the format of the results file taskname += str(datetime.now()) marocco.persist = "results_{}_{}.xml.gz".format(args.name, taskname) start = datetime.now() r = CorticalNetwork(marocco, scale=args.scale, k_scale=args.k_scale, seed=args.seed) r.build() mid = datetime.now() try: r.run() totsynapses = marocco.stats.getSynapses() totneurons = marocco.stats.getNumNeurons() lostsynapses = marocco.stats.getSynapseLoss() lostsynapsesl1 = marocco.stats.getSynapseLossAfterL1Routing() perPopulation = r.getLoss(marocco) print("Losses: ", lostsynapses, " of ", totsynapses, " L1Loss:", lostsynapsesl1, " Relative:", lostsynapses / float(totsynapses)) except RuntimeError as err: # couldn't place all populations totsynapses = 1 totneurons = 1 lostsynapses = 1 lostsynapsesl1 = 1 logger.error(err) end = datetime.now() print("time:", end - start) result = { "model": args.name, "task": taskname, "scale": args.scale, "k_scale": args.k_scale, "n_size": args.n_size, "wafer": args.wafer, "ignore_blacklisting": args.ignore_blacklisting, "timestamp": datetime.now().isoformat(), "placer": args.placer, "perPopulation": perPopulation, "results": [{ "type": "performance", "name": "setup_time", "value": (end - mid).total_seconds(), "units": "s", "measure": "time" }, { "type": "performance", "name": "total_time", "value": (end - start).total_seconds(), "units": "s", "measure": "time" }, { "type": "performance", "name": "synapses", "value": totsynapses }, { "type": "performance", "name": "neurons", "value": totneurons }, { "type": "performance", "name": "synapse_loss", "value": lostsynapses }, { "type": "performance", "name": "synapse_loss_after_l1", "value": lostsynapsesl1 }] } with open("{}_{}_results.json".format(result["model"], result["task"]), 'w') as outfile: json.dump(result, outfile)