def plot_lat_vs_util(all_tpts, all_lats, fln, ylb, num_racks, ylm=None, odr=path.join(PROGDIR, "graphs"), lbs=23): # Calculate utilization. all_utls = [[ get_util_for_tpt(tpt_Gbps_c, num_racks) for tpt_Gbps_c in tpts_Gbps_c ] for tpts_Gbps_c in all_tpts] options = dotmap.DotMap() options.legend.options.fontsize = lbs options.legend.options.labels = [ "Static buffers (vary size)", "Dynamic buffers (vary $\\tau$)", "Dynamic buffers + reTCP (vary $\\tau$)" ][:len(all_tpts)] options.output_fn = path.join(odr, "{}.pdf".format(fln)) options.plot_type = "LINE" options.series_options = [ dotmap.DotMap(marker="o", markersize=10, linewidth=5) for _ in xrange(len(all_utls)) ] options.x.label.xlabel = "Average circuit utilization (%)" options.y.label.ylabel = "{} latency ($\mu$s)".format(ylb) ylm = (ylm if ylm is not None else 1.2 * max([max(line) for line in all_lats])) options.y.limits = [0, ylm] simpleplotlib.plot(all_utls, all_lats, options)
def plot_lat(keys, latencies, fln, ylb, ylm=None, xlr=0, xtk_locs=None, odr=path.join(PROGDIR, "graphs"), flt=lambda key: True): # Sort the data based on the x-values (keys). keys, latencies = zip( *sorted(zip(keys, latencies), key=lambda p: int(p[0]))) # Filter. keys, latencies = zip(*[(k, l) for k, l in zip(keys, latencies) if flt(k)]) x = [keys for _ in xrange(len(latencies[0]))] y = zip(*latencies) print("") print("raw latency data for: {}".format(fln)) print("{}:".format(ylb.strip("\n"))) print(" all: {}".format(", ".join( ["({}: {})".format(a, b) for a, b in zip(x[0], y[0])]))) print(" circuit: {}".format(". ".join( ["({}: {})".format(a, b) for a, b in zip(x[1], y[1])]))) print(" packet: {}".format(", ".join( ["({}: {})".format(a, b) for a, b in zip(x[2], y[2])]))) print("") options = dotmap.DotMap() options.legend.options.loc = "upper left" options.legend.options.labels = ["both NWs", "circuit NW", "packet NW"] options.legend.options.fontsize = 20 options.output_fn = path.join(odr, "{}.pdf".format(fln)) options.plot_type = "LINE" options.series_options = [ dotmap.DotMap(marker="o", markersize=10, linewidth=5) for _ in xrange(len(x)) ] options.x.label.fontsize = options.y.label.fontsize = 20 options.x.label.xlabel = "Buffer size (packets)" if "static" in fln \ else "Early buffer resizing ($\mu$s)" options.x.ticks.major.options.labelsize = \ options.y.ticks.major.options.labelsize = 20 options.x.ticks.major.labels = \ dotmap.DotMap(locations=[4, 8, 16, 32, 64, 128]) \ if "static" in fln else dotmap.DotMap(locations=keys) if xtk_locs is not None: options.y.ticks.major.labels = dotmap.DotMap(locations=xtk_locs) options.x.ticks.major.labels.options.rotation = xlr options.x.ticks.major.labels.options.rotation_mode = "anchor" options.x.ticks.major.labels.options.horizontalalignment = \ "center" if xlr == 0 else "right" options.y.label.ylabel = "{} latency ($\mu$s)".format(ylb) ylm = ylm if ylm is not None else 1.2 * max([max(line) for line in y]) options.y.limits = [0, ylm] simpleplotlib.plot(x, y, options)
def __init__(self): # The industry sector, subsector, & group w.r.t. the NAICS_CODE string # In future use collections.namedtuple INSTEAD OF dotmap self.sector, self.subsector, self.group = dotmap.DotMap({'start': 0, 'end': 2}), \ dotmap.DotMap({'start': 0, 'end': 3}), \ dotmap.DotMap({'start': 0, 'end': 4}) # Store the NAICS data in self.path = os.path.join('naics')
def odic(indic): odic = dotmap.DotMap() odic.response = indic.request odic.trans_id = indic.trans_id odic.ts = time.strftime("%Y%m%d%H%M%S") odic.result = dotmap.DotMap() odic.err_code = 0 odic.msg = None return odic
def BasicConnectivity(): slaves = [] modbus = my_modbus.ModBus(port='COM19') for slave_id in range(1, SLAVE_COUNT + 1): slaves.append( dotmap.DotMap(slave_id=slave_id, reg_count=1, is_ok=True, errors=0)) last_time = time.time() while True: for slave in slaves: if SPLAY: time.sleep(random.random() / 10) res = modbus.read_registers(slave) if not _ok(res): slave.is_ok = False slave.errors += 1 else: slave.is_ok = True logging.info( "[{:5.5}] ".format(time.time() - last_time) + "[alive {:2}] ".format(sum([1 for slave in slaves if slave.is_ok])) + str([ "S{} {} {!r:5}".format(slave.slave_id, slave.is_ok, slave.errors) for slave in slaves ])) last_time = time.time()
def test_summarize(shared_tempdir): with chdir(shared_tempdir): args = dotmap.DotMap({ 'job_dir': os.path.join('output', 'hiv1-lanl-whole'), 'top_n': 1 }) summarize.run(args)
def __init__(self, track): script_dir = dirname(__file__) path = join(script_dir, 'car.png') car_image = pyglet.image.load(path) car_image.anchor_x = int(car_image.width / 2) car_image.anchor_y = int(car_image.height / 2) self.track = track self.car = pyglet.sprite.Sprite(car_image, group=config.car_group, batch=config.batch, x=car_image.width) self.car.rotation = track.segments[0].coordinates[0].angle + 90 self.accelerate = 0 self.previous_direction = [ dotmap.DotMap(x=0, y=0, rotation=self.car.rotation) ] self.rolling = True self.drifting = False self.speed = 0 self.speed_factor = 1 self.speed_label = pyglet.text.Label( f"{round(self.speed * 21.6, 2)} km/h", font_name='Arial', font_size=12, color=(0, 0, 0, 255), x=self.car.x + config.window.width // 2 - 50, y=self.car.y - config.window.height // 2 + 100, anchor_x='right', anchor_y='bottom', group=config.text_group, batch=config.batch) self.finish_label = pyglet.text.Label("", font_name='Arial', font_size=12, color=(0, 0, 0, 255), x=self.car.x, y=self.car.y, anchor_x='center', anchor_y='center', group=config.text_group, batch=config.batch) self.steer = 0 self.segment = 0 self.distance = 0 self.checkpoints = [] self.finish = -1 self.stop = False self.started = False self.follow = False self.timer = time.monotonic() self.replay = { "track": { "length": self.track.length, "seed": self.track.seed, "corners": self.track.corners }, "frames": [], "distance": 0, "time": -1 }
def main(): params = dotmap.DotMap() #### Define meta settings: executable, etc... #### withSimulation = False params.metaparams = define_meta_parameters(not withSimulation) ##### Define simulation settings: #### params.baseParams = define_base_simulation_parameters(params.metaparams) params.extendedParams = define_extended_simulation_parameters( params.metaparams, params.baseParams) params.dependentParams = define_dependent_simulation_parameters() ##### Rearrange them: ##### params.flatParamLists = helpers.parameters.flattenExtendedParamsets( params.metaparams, params.baseParams, params.extendedParams) params.allsimparams = helpers.parameters.crossAllParamsets( params.baseParams, params.flatParamLists.copy()) #nestedPrint(params.allsimparams) ##### Update Dependent parameters: ##### helpers.parameters.adjustDependentParameters(params) #nestedPrint(params.allsimparams) ##### Run simulation(s) ##### helpers.simulation.run_simulation(params, withSimulation) ##### Plot results ##### make_figures(params)
def main(): params = dotmap.DotMap() #### Define meta settings: executable, etc... #### #existingSimfoldername = None existingSimfoldername = 'sim2016-09-07_trial15' params.metaparams = define_meta_parameters(existingSimfoldername) ##### Define simulation settings: #### params.baseParams = define_base_simulation_parameters(params.metaparams) params.extendedParams = define_extended_simulation_parameters( params.metaparams, params.baseParams) params.dependentParams = define_dependent_simulation_parameters() ##### Rearrange them: ##### params.flatParamLists = helpers.parameters.flattenExtendedParamsets( params.metaparams, params.baseParams, params.extendedParams) params.allsimparams = helpers.parameters.crossAllParamsets( params.baseParams, params.flatParamLists.copy()) #nestedPrint(params.allsimparams) ##### Update Dependent parameters: ##### helpers.parameters.adjustDependentParameters(params) #nestedPrint(params.allsimparams) ##### Run simulation(s) ##### helpers.simulation.run_simulation(params, (existingSimfoldername == None)) helpers.simulation.rerun_missing_simulations(params) ##### Plot results ##### make_figures(params)
def init_config(self, config_path=None): current_dir = os.path.curdir if config_path is None: config_path = os.path.join(current_dir, "config.json") with open(config_path) as f: config = json.loads(f.read()) self.config = dotmap.DotMap(config)
def define_meta_parameters(existingSimfoldername=None): #### Define meta settings: executable, etc... #### basefolder = './datafig/' metaparams = dotmap.DotMap() #metaparams.executable_path = '../build/debug/examples/' #metaparams.executable_path = '../auryn_v0.8.0/build/release/examples/' #metaparams.executable_path = '../auryn_migration_to_v8.0/build/release/examples/' #metaparams.executable_file = 'sim_json' metaparams.executable_path = '' # use a symlink in the current folder: metaparams.executable_file = 'sim_json_symlink' if existingSimfoldername: metaparams.datafig_basename = existingSimfoldername else: metaparams.datafig_basename = helpers.simulation.find_unique_foldername( basefolder) metaparams.data_path = basefolder + metaparams.datafig_basename + '/data/' metaparams.data_basename = metaparams.datafig_basename metaparams.cache_path = basefolder + metaparams.datafig_basename + '/cache/' metaparams.cache_basename = metaparams.datafig_basename metaparams.figures_path = basefolder + metaparams.datafig_basename + '/figures/' metaparams.figures_basename = metaparams.data_basename metaparams.numRepetitions = 10 for repetitionID in xrange(metaparams.numRepetitions): metaparams.repetitionFoldernames[repetitionID] = 'repetition_' + str( repetitionID + 1) return metaparams
def parameters(self, elements: requests.models.Response): """ :param elements: The content of the input YAML file :return: A dot map of the parameters in the YAML file; and supplementary parameters """ text = yaml.safe_load(elements.text) var = dotmap.DotMap(text) # Minimal verification of the contents of the YAML self.ascertain(var) # For var.source.metadataFileURL var.kwargs = { 'usecols': [var.source.fileStringsField], 'header': 0, 'encoding': 'UTF-8', 'dtype': { var.source.fileStringsField: 'str' } } # Hence, add default directories var.data = os.path.join(self.root, 'data') return var
def define_extended_simulation_parameters(metaparams, baseParams): """ The helpers of baseParams that should be re-run with a list of settings each. Node paths of extendedParams have to match those in baseParams. """ extendedParams = dotmap.DotMap() #extendedParams.neurongroups.outputs.userecovery = [True,False] #extendedParams.neurongroups.inputs.rate = [ 10 , 15 ] # Hz #extendedParams.neurongroups.outputs.projMult = [ 1.0 , 1.5 , 1.8 ] extendedParams.neurongroups.outputs.projMult = np.r_[0.2:4.2:0.2] #extendedParams.connectionsets.con1.stdprule.learningrate = 1/32.0 * np.array([0.5 , 1.0 , 2.0]) # eta in Auryn #extendedParams.connectionsets.con1.stdprule.learningrate = 1/32.0 * np.r_[0.2:4.2:0.2] #extendedParams.connectionsets.con1.maximumweight = np.array([0.5 , 1.0 , 2.0]) # eta in Auryn extendedParams.connectionsets.con1.maximumweight = np.r_[0.2:4.2:0.2] #extendedParams.connectionsets.con1.maximumweight = np.r_[0.2:8.2:0.2] #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorStrengthIndicator = np.linspace(0,1,num=21) #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorLocationIndicator = np.linspace(-1,1,num=41) ##extendedParams.connectionsets.con1.stdprule.A_plus = [ 0.588 , 0.8 , 0.95 , 1.0 ] #extendedParams.connectionsets.con1.stdprule.A_plus = np.linspace( 0.2, 1.2, num=11) return extendedParams
def check_classify_model(model, tempdir, check_results=True): with tempfile.TemporaryDirectory() as genomes_dir: # fetch and extract genomes archive_filename = os.path.join('demo', 'hiv1-genomes.zip') with zipfile.ZipFile(archive_filename, 'r') as archive: archive.extractall(genomes_dir) # run classification with chdir(tempdir): args = dotmap.DotMap({ 'model': model, 'files': genomes_dir, 'disable_avx': True }) classify.run(args) # check results with open('results.json', 'r') as f: results = json.load(f) if check_results: assert results == { 'A1.fasta': 'A1', 'A6.fasta': 'A6', 'B.fasta': 'B', 'C.fasta': 'C', }
def calculate_position(self): if self.speed == 0: return y = self.speed * math.cos(math.radians(self.car.rotation)) x = self.speed * math.sin(math.radians(self.car.rotation)) for i in range(len(self.previous_direction)): direction = self.previous_direction[i] self.car.x += (direction.x * (i + 1) / 30) self.car.y += (direction.y * (i + 1) / 30) self.car.x += (x * 0.5) self.car.y += (y * 0.5) self.previous_direction.append( dotmap.DotMap(x=x, y=y, rotation=self.car.rotation)) if len(self.previous_direction) > 5: self.previous_direction.pop(0) config.camera_position[0] -= x config.camera_position[1] -= y glTranslatef(-x, -y, 0) self.speed_label.x += x self.speed_label.y += y self.calculate_checkpoint() self.set_segments_visible() if self.calculate_collision(): self.speed = 0 self.stop = True self.replay["distance"] = self.segment self.handle_stop()
def dmstr(dat): '''DotMap from a String''' try: dic = json.JSONDecoder().decode(str(dat)) except: dic = {} return dotmap.DotMap(dic)
def idic(): try: payload = request.body.read() or "{}" dic = dmstr(payload) return dic except: traceback.print_exc() return dotmap.DotMap()
def test_filter_exam(self): exam = ExamFactory() url = reverse("concierge:exam-list") response = self.client.get(url + '?name=111') self.assertEqual(200, response.status_code) body = dotmap.DotMap(response.json()) self.assertEqual(body.results[0].name, exam.name)
def dotan(self, req=None): ''' req: { "uid": "46", "user_data": { "tunnel_name": "xxxxxxxxxxxx" "path": "xxxxxxxxxxxx", "status": "xxxxxxxxxxxx", "hop_list": ["this", "is", "real", "hops"], "create_info": { "uid": xxx, "from_router_name": xxx, "hop_list": ["this", 'is', 'set', 'hoplist'], "...": "...", } } } resp: [ { "uid": "lsp_0", "from_router_name": "", "to_router_name": "", "bandwidth": "", "to_router_uid": "", "from_router_uid": "", "name": "", "hop_list": [], "path": [], "status": 0, "priority": 7, "delay": "", "user_data": {} } ] ''' userdata = req["args"]["user_data"] ci = req["args"]["user_data"]["create_info"] resp = { "uid": ci["uid"], "from_router_name": ci["from_router_name"], "to_router_name": ci["to_router_name"], "bandwidth": ci["bandwidth"], "to_router_uid": ci["to_router_uid"], "from_router_uid": ci["from_router_uid"], "name": ci["name"], "hop_list": ci["hop_list"], "path": userdata["path"], "status": userdata["status"], "priority": ci["priority"], "delay": ci["delay"], "user_data": userdata, } res = {"lsps": [resp]} return dotmap.DotMap(res)
def test_train_model(shared_tempdir): root_dir = os.getcwd() with chdir(shared_tempdir): args = dotmap.DotMap({ 'job_file': os.path.join(root_dir, 'tests', 'fixtures', 'hiv1-lanl-small.yml'), 'settings_file': os.path.join(root_dir, 'demo', 'settings.yml'), 'disable_avx': True }) run_job.run(args)
def test_operator(self): url = reverse('concierge:operator-detail', kwargs={'pk': self.operator.pk}) response = self.client.get(url) self.assertEqual(response.status_code, 200) body = dotmap.DotMap(response.json()) self.assertEqual(self.operator.user.username, body.user.username) self.assertEqual('J', body.abbreviation)
def test_load_patient(self): patient = PatientFactory() url = reverse('concierge:patient-detail', kwargs={'pk': patient.pk}) response = self.client.get(url) self.assertEqual(200, response.status_code) body = dotmap.DotMap(response.json()) self.assertEqual('user lastname', body.full_name)
def translateRule(theAttractorStrengthIndicator, theAttractorLocationIndicator, theMeanSlope): params = dotmap.DotMap() params.slope_Causal = theMeanSlope - theAttractorStrengthIndicator params.slope_Anticausal = theMeanSlope + theAttractorStrengthIndicator params.offset_Causal = 0.5 - params.slope_Causal * theAttractorLocationIndicator params.offset_Anticausal = 0.5 - params.slope_Anticausal * theAttractorLocationIndicator return params
def process_webhook(self, webhook_content): content = dotmap.DotMap(webhook_content) if (content.object_kind == "merge_request"): self.process_mergerequest(content) if (content.object_kind == "pipeline"): self.process_pipeline(content) if (content.object_kind == 'note'): self.process_note(content) pass
def load_YAML(fileObject): try: config = yaml.safe_load(fileObject) except yaml.composer.ComposerError as e: log.error(f"YAML composer error {e}") raise exceptions.ConfigurationError(e) from e except yaml.scanner.ScannerError as e: log.error(f"YAML scanner error {e}") raise exceptions.ConfigurationError(e) from e return dotmap.DotMap(config)
def docopt_plus(doc_string, version_message): '''docopt.docopt() returns a dict object. This converts it to a DotMap object, which allows access to keys via a 'dot' notation (i.e., like that of namedtuple, except that values can be changed). ''' args = docopt.docopt(doc_string, version=version_message) # remove any dashes or double dashes preceding option names args = {k.replace('-', ''): args[k] for k in args.keys()} args = dotmap.DotMap(args) return args
def plot_util_vs_latency(tpts, latencies, fln): x = [[ min( j / (0.9 * 1. / (python_config.NUM_RACKS - 1) * python_config.CIRCUIT_BW_Gbps) * 100, 100.0) for j in t ] for t in tpts] y = [zip(*l)[0] for l in latencies] options = dotmap.DotMap() options.plot_type = "LINE" options.legend.options.labels = [ "Static buffers (vary size)", "Dynamic buffers (vary $\\tau$)", "reTCP", "reTCP + dynamic buffers (vary $\\tau$)" ] options.legend.options.fontsize = 19 options.series_options = [ dotmap.DotMap(marker="o", markersize=10, linewidth=5) for _ in xrange(len(x)) ] options.series_options[2].marker = "x" options.series_options[2].s = 100 del options.series_options[2].markersize options.series_options[2].zorder = 10 options.output_fn = \ path.join(PROGDIR, "graphs", "throughput_vs_latency99.pdf") \ if "99" in fln \ else path.join(PROGDIR, "graphs", "throughput_vs_latency.pdf") options.x.label.xlabel = "Circuit utilization (%)" options.y.label.ylabel = "99th percent. latency ($\mu$s)" if "99" in fln \ else "Median latency ($\mu$s)" options.y.limits = [0, 1000] if "99" in fln else [0, 600] options.y.ticks.major.labels = \ dotmap.DotMap(locations=[0, 200, 400, 600, 800, 1000]) \ if "99" in fln else \ dotmap.DotMap(locations=[0, 100, 200, 300, 400, 500, 600]) simpleplotlib.plot(x, y, options)
def define_meta_parameters(): #### Define meta settings: executable, etc... #### metaparams = dotmap.DotMap() metaparams.executable_path = '../build/debug/examples/' metaparams.executable_file = 'sim_simon5' metaparams.data_path = './datafig/' + metaparams.executable_file + '.data/' metaparams.data_basename = 'simon5' metaparams.figures_path = './datafig/' + metaparams.executable_file + '.figures/' metaparams.figures_basename = metaparams.data_basename metaparams.numRepetitions = 8 for repetitionID in xrange(metaparams.numRepetitions): metaparams.repetitionFoldernames[repetitionID] = 'repetition_' + str( repetitionID + 1) return metaparams
def __init__(self, toml_config, env_path=None): """If env_path is None, it will try to find it by itself.""" self.toml_config = toml_config if not os.path.exists(toml_config): raise Exception(f"Couldn't find INI file: {toml_config}") sub_conf = self._preprocess(toml.load(self.toml_config)) self.conf = dotmap.DotMap(sub_conf) self.env = {} load_dotenv(verbose=True, dotenv_path=env_path) logging.info(f"Loading env vars from: {env_path}.") for key in self.ENV_KEYS: self.env[key.upper()] = os.getenv(key) logging.info(f"Loaded: {self.env}.")
def define_extended_simulation_parameters(metaparams, baseParams): """ The helpers of baseParams that should be re-run with a list of settings each. Node paths of extendedParams have to match those in baseParams. """ extendedParams = dotmap.DotMap() #extendedParams.neurongroups.outputs.userecovery = [True,False] #extendedParams.neurongroups.inputs.rate = [ 10 , 15 ] # Hz #extendedParams.neurongroups.outputs.projMult = [ 1.0 , 1.5 , 1.8 ] #extendedParams.neurongroups.outputs.projMult = np.r_[0.2:4.2:0.2] #extendedParams.connectionsets.con1.stdprule.learningrate = 1/32.0 * np.array([0.5 , 1.0 , 2.0]) # eta in Auryn #extendedParams.connectionsets.con1.stdprule.learningrate = 1/32.0 * np.r_[0.2:4.2:0.2] #extendedParams.connectionsets.con1.maximumweight = np.array([0.5 , 1.0 , 2.0]) # eta in Auryn #extendedParams.connectionsets.con1.maximumweight = np.array([0.5 , 0.75 , 1.0 , 1.25 , 1.5 , 1.75 , 2.0]) # eta in Auryn #extendedParams.connectionsets.con1.maximumweight = np.r_[0.2:4.2:0.2] #extendedParams.connectionsets.con1.maximumweight = np.r_[0.2:8.2:0.2] #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorStrengthIndicator = [ 0.0 , 0.025 ] extendedParams.connectionsets.con1.stdprule.weightdependence.attractorStrengthIndicator = [ 0.0, 0.025, 0.05, 0.075, 0.1, 0.2, 0.3, 0.4, 0.5 ] #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorStrengthIndicator = np.r_[0.0:0.52:0.025] #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorStrengthIndicator = np.linspace(0,1,num=21) #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorStrengthIndicator = np.linspace(0,1,num=21) #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorLocationIndicator = [-0.2 , 0.2] #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorLocationIndicator = np.linspace(-1,1,num=21) #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorLocationIndicator = np.linspace(-1,1,num=41) extendedParams.connectionsets.con1.stdprule.weightdependence.attractorLocationIndicator = np.r_[ -0.2:0.4:0.1] # 7 values #extendedParams.connectionsets.con1.stdprule.weightdependence.attractorLocationIndicator = np.round(np.r_[-0.2:0.4:0.05],3) # 13 values, rounded to 3 digits behind the dot #extendedParams.connectionsets.con1.stdprule.weightdependence.theMeanSlope = np.linspace(0,0.6,num=4) #extendedParams.connectionsets.con1.stdprule.weightdependence.theMeanSlope = np.linspace(0,1.0,num=21) #extendedParams.connectionsets.con1.driftcompensation.stride = np.linspace(0.01,0.07,num=4) extendedParams.connectionsets.con1.driftcompensation.stride = np.round( np.logspace(np.log10(0.0000001), np.log10(0.0001), num=11), 8) #extendedParams.connectionsets.con1.driftcompensation.stride = np.array([ 0.0 , 0.0001 ]) #extendedParams.connectionsets.con1.driftcompensation.stride = np.array([ 0.0 , 0.000001 , 0.00001 , 0.0001 ]) #extendedParams.connectionsets.con1.driftcompensation.stride = np.round(np.logspace(np.log10(0.0000001), np.log10(25), num=21 ),8) #extendedParams.connectionsets.con1.stdprule.A_plus = [ 0.588 , 0.8 , 0.95 , 1.0 ] #extendedParams.connectionsets.con1.stdprule.A_plus = np.linspace( 0.2, 1.2, num=11) return extendedParams