def on_step_routes_complete(self, start_time, electrode_ids): ''' Callback function executed when all concurrent routes for a step have completed a single run. If repeats are requested, either through repeat counts or a repeat duration, *cycle* routes (i.e., routes that terminate at the start electrode) will repeat as necessary. ''' step_options = self.get_step_options() step_duration_s = (datetime.now() - self.step_start_time).total_seconds() if ((step_options['repeat_duration_s'] > 0 and step_duration_s < step_options['repeat_duration_s']) or (self.repeat_i + 1 < step_options['route_repeats'])): # Either repeat duration has not been met, or the specified number # of repetitions has not been met. Execute another iteration of # the routes. self.repeat_i += 1 df_routes = self.get_routes() self.route_controller.execute_routes( df_routes, step_options['transition_duration_ms'], trail_length=step_options['trail_length'], cyclic=True, acyclic=False, on_complete=self.on_step_routes_complete, on_error=self.on_error) else: logger.info('Completed routes (%s repeats in %ss)', self.repeat_i + 1, si_format(step_duration_s)) # Transitions along all droplet routes have been processed. # Signal step has completed and reset plugin step state. emit_signal('on_step_complete', [self.name, None])
def _print_starting_conds(instance): tname = _get_textid('technology_type') hours = float(len(instance.t)) techtotal = [0] * len(instance.all_tech) nperz = [0] * len(instance.all_tech) idx = list(instance.all_tech) for z in instance.zones: for n in instance.gen_tech_per_zone[z]: techtotal[idx.index(n)] += value(instance.gen_cap_initial[z, n]) nperz[idx.index(n)] += 1 for s in instance.stor_tech_per_zone[z]: techtotal[idx.index(s)] += value(instance.stor_cap_initial[z, s]) nperz[idx.index(s)] += 1 for h in instance.hyb_tech_per_zone[z]: techtotal[idx.index(h)] += value(instance.hyb_cap_initial[z, h]) nperz[idx.index(h)] += 1 NEMcap = sum(techtotal) print("NEM Starting Capacity: %sW" % ( si_format(NEMcap * 1e6, precision=2) )) for j in instance.all_tech: if techtotal[idx.index(j)] > 0: print("%17s: %7sW" % ( tname[j], si_format(techtotal[idx.index(j)] * 1e6, precision=1) ))
def naget(): natype = request.args.get('natype') message = {} try: start_val, start_unit = si_format(float(NA[natype].linfreq( nabench[natype])[1]['START']), precision=1).split(" ") stop_val, stop_unit = si_format(float(NA[natype].linfreq( nabench[natype])[1]['STOP']), precision=1).split(" ") stop_conversion = si_parse("1%s" % stop_unit) / si_parse( "1%s" % start_unit) # equalizing both unit-range: message['start-frequency'] = "%s %sHz" % (start_val, start_unit ) # start-frequency message['stop-frequency'] = "%s %sHz" % ( float(stop_val) * stop_conversion, start_unit) # stop-frequency message['step-points'] = int(NA[natype].sweep( nabench[natype])[1]['POINTS']) - 1 # step-points in waveform message['power'] = "%.1f dBm" % float(NA[natype].power( nabench[natype])[1]['LEVEL']) # power (fixed unit) message['ifb'] = si_format( float(NA[natype].ifbw(nabench[natype])[1]['BANDWIDTH']), precision=0) + "Hz" # ifb (adjusted by si_prefix) message['s21'] = int('S21' in NA[natype].getrace(nabench[natype])) except: # raise message = dict(status='%s is not connected' % natype) return jsonify(message=message)
def dcamplifiersense(): state = Amp.state if state: Amp.sensehardpanel() VSP = '%.1f' % Amp.VSupplyP[0] VSN = '%.1f' % Amp.VSupplyN[0] Sym = Amp.Symmetry BM = Amp.BiasMode Rb = si_format(Amp.Rb, precision=0).replace(' ', '').upper() Div = si_format(Amp.Division, precision=0).replace(' ', '').upper() Vg1, Vg2 = Amp.VgMode1, Amp.VgMode2 gain1 = si_format(Amp.VGain1, precision=0).replace(' ', '').upper() gain2 = si_format(Amp.VGain2, precision=0).replace(' ', '').upper() else: VSP, VSN, Sym, BM, Rb, Div, gain1, gain2, Vg1, Vg2 = None, None, None, None, None, None, None, None, None, None print('DC disconnected') return jsonify(state=state, VSP=VSP, VSN=VSN, Sym=Sym, BM=BM, Rb=Rb, Div=Div, Vg1=Vg1, Vg2=Vg2, gain1=gain1, gain2=gain2)
def tiltedWashboardU(EJKBT, IbiasArr, ax): phi = np.arange(-0.1 * np.pi, 8 * np.pi, 0.025 * np.pi) UArr = [] for Ibias in IbiasArr: U = -EJKBT * const.k * np.cos( phi) - const.h / 2 / np.pi / 2 / const.e * Ibias * phi UArr.append(U) ax.set_title('Tilted washboard', fontsize=16, fontweight='bold') ax.set_xlabel('$\phi$ (pi)') ax.set_ylabel('U (K)') i = 0 for i, U in enumerate(UArr): ax.plot(phi / np.pi, U / const.k, label='I$_b$ =' + format(si_format(IbiasArr[i])) + 'A') ax.text(0.82, 0.94, 'E$_J$ =' + format(si_format(EJKBT)) + 'K\n', verticalalignment='bottom', horizontalalignment='left', transform=ax.transAxes, color='black', fontsize=12) ax.grid(True) ax.legend()
def sgget(): sgtype = request.args.get('sgtype') message = {} try: message['frequency'] = si_format(float(SG[sgtype].frequency(sgbench[sgtype])[1]['CW']),precision=3) + "Hz" # frequency message['power'] = si_format(float(SG[sgtype].power(sgbench[sgtype])[1]['AMPLITUDE']),precision=1) + "dBm" # power message['rfoutput'] = int(SG[sgtype].rfoutput(sgbench[sgtype])[1]['STATE']) # rf output except: message = dict(status='%s is not connected' %sgtype) return jsonify(message=message)
def test_callback(info, score): if plot: dbplot(net.layers[0].w.get_value().T.reshape(-1, 28, 28), 'w0', cornertext='Epoch {}'.format(info.epoch)) if swap_mlp: all_layer_sizes = [dataset.input_size ] + hidden_sizes + [dataset.target_size] fwd_ops = [ info.sample * d1 * d2 for d1, d2 in zip(all_layer_sizes[:-1], all_layer_sizes[1:]) ] back_ops = [ info.sample * d1 * d2 for d1, d2 in zip(all_layer_sizes[:-1], all_layer_sizes[1:]) ] update_ops = [ info.sample * d1 * d2 for d1, d2 in zip(all_layer_sizes[:-1], all_layer_sizes[1:]) ] else: fwd_ops = [ layer_.fwd_op_count.get_value() for layer_ in net.layers ] back_ops = [ layer_.back_op_count.get_value() for layer_ in net.layers ] update_ops = [ layer_.update_op_count.get_value() for layer_ in net.layers ] if info.epoch != 0: with IndentPrint('Mean Ops by epoch {}'.format(info.epoch)): print 'Fwd: {}'.format([ si_format(ops / info.epoch, format_str='{value} {prefix}Ops') for ops in fwd_ops ]) print 'Back: {}'.format([ si_format(ops / info.epoch, format_str='{value} {prefix}Ops') for ops in back_ops ]) print 'Update: {}'.format([ si_format(ops / info.epoch, format_str='{value} {prefix}Ops') for ops in update_ops ]) if info.epoch > max( 0.5, 2 * test_period) and not swap_mlp and score.get_score( 'train', 'noise_free') < 20: raise Exception("This horse ain't goin' nowhere.") op_count_info.append((info, (fwd_ops, back_ops, update_ops)))
def wait_for_gui_process(self, retry_count=20, retry_duration_s=1): ''' .. versionchanged:: 2.7.2 Do not execute `refresh_gui()` while waiting for response from `hub_execute()`. ''' start = datetime.now() for i in xrange(retry_count): try: hub_execute(self.name, 'ping', timeout_s=5, silent=True) except Exception: logger.debug('[wait_for_gui_process] failed (%d of %d)', i + 1, retry_count, exc_info=True) else: logger.info('[wait_for_gui_process] success (%d of %d)', i + 1, retry_count) self.alive_timestamp = datetime.now() return for j in xrange(10): time.sleep(retry_duration_s / 10.) refresh_gui() raise IOError('Timed out after %ss waiting for GUI process to connect ' 'to hub.' % si_format( (datetime.now() - start).total_seconds()))
def relabel_axis(axis, value_array, n_points = 5, format_str='{:.2g}'): ticks = np.round(np.linspace(0, len(value_array)-1, num=n_points)).astype('int') axis.set_ticks(ticks) if format_str=='SI': axis.set_ticklabels([si_format(t, format_str='{value}{prefix}') for t in value_array[ticks]]) else: axis.set_ticklabels([format_str.format(t) for t in value_array[ticks]])
def leakage_sweep(voltage_out, dmm, to=1e-3, nump=101, wt=0.1, ithaco_impedance=20): id, plot = qcm.linear1d(voltage_out.voltage, 0, to, nump, wt, dmm.ithaco_current, setback=True) data = load_by_id(id) plot.close() yoko_param = voltage_out.voltage.full_name dmm_param = dmm.ithaco_current.full_name setpoints = np.array(data.get_data(yoko_param)).T[0] values = np.array(data.get_data(dmm_param)).T[0] fit, res, _, _, _ = np.polyfit(values, setpoints, 1, full=True) fit[0] = abs(fit[0]) print("Resistance was: {}Ohms".format( si_prefix.si_format(fit[0] - ithaco_impedance, precision=3))) print("Residuals were: {}".format(res)) return fit, res
def mouseMoved(self, evt): vb = self.plot.vb pos = evt[0] ## using signal proxy turns original arguments into a tuple if self.plot.sceneBoundingRect().contains(pos): mousePoint = vb.mapSceneToView(pos) index = int(mousePoint.x()) x_val = mousePoint.x() if self.logmode['x'] == True: x_val = 10**x_val y_val = mousePoint.y() if self.logmode['y'] == True: y_val = 10**y_val self.label.setText("x={}, y={}".format( si_format(x_val, 3) + format_unit(self.ds.x.unit), si_format(y_val, 3) + format_unit(self.ds.y.unit)))
def tiltedWashboardU(EJKBT, IbiasArr, ax): phi = np.arange(-0.1*np.pi, 8*np.pi, 0.025*np.pi) UArr=[] for Ibias in IbiasArr: U = -EJKBT*const.k*np.cos(phi) - const.h /2/np.pi /2/const.e *Ibias *phi UArr.append(U) ax.set_xlabel('$\phi$ (pi)') ax.set_ylabel('U (K)') i = 0 for i,U in enumerate(UArr): ax.plot(phi/np.pi, U/const.k,label = 'I$_b$ =' + format(si_format(IbiasArr[i])) + 'A') # # def QQstar(srclst): # JJplst = toJJplst(srclst)[0].transpose().drop('Device').transpose().astype(float) # srclst = srclst.transpose().drop('Device').drop('Mat.').drop('dsgn').transpose().astype(float) # Q = JJplst['Q'] # lst = [0]*len(srclst['R0ZF']) # for i,v in enumerate(srclst['R0ZF']): # if v < 1000: # lst[i] = 0 # else: # lst[i] = v # R0 = lst # freqPlasma = JJplst['$\omega_p$']*1e9 /2/np.pi # C_JJ = JJplst['C_JJQP']*1e-15 # Q_star = freqPlasma * R0/srclst['#ser']*srclst['#par'] *C_JJ # return [Q,Q_star]
def print_objlist(objlist, func_table): print "<obj id> <base address> <size> <lifetime> <alloc ctx> <ctx name>" for obj in objlist: objid, addr, size, alloc_ctx, alloc_ctx_addr, alloc_time,\ free_ctx, free_ctx_addr, free_time = obj # Resolve an address if corresponding symbol is found symbol = hex(alloc_ctx_addr) for (fstart, fend, fname) in func_table: if alloc_ctx_addr >= fstart and alloc_ctx_addr < fend: symbol = fname break # Lifetime of objects time = "%.2f s" % ((free_time - alloc_time) / (1000 * 1000 * 1000)) if free_time == -1: time = "INF" prstring = str(objid).rjust(8) prstring += ' ' + hex(addr).rjust(14) prstring += ' (' + si_format(size).rjust(7) + 'B)' prstring += ' ' + time.rjust(9) prstring += ' ' + str(alloc_ctx).rjust(9) prstring += ' ' + symbol print prstring
def JJpar(RN=1e3, sizeX=0.2e-6, sizeY=0.2e-6, metalTHK=250e-10, T=20e-3, Nser=1, Npar=1, C_shunt=1e-20, ezread=False, EunHz=False, material = 'Al'): TcdX = {'Al' : 1.34, 'Nb' : 9.20, 'Pb' : 0, 'V' : 4.0, 'Sn' : 3.72, 'Nb_Ono1987': 4.18} SCgap = 1.764*k*TcdX[material] JJarea = sizeX*sizeY + (sizeX+sizeY)*metalTHK RN_JJ = RN /Nser *Npar Rs_JJ = RN_JJ * JJarea I_ABT = pi /2/e *SCgap /RN_JJ *np.tanh(SCgap /2/k/T) EJ_JJ = phi0 /2/pi *I_ABT LK_JJ = phi0 /2/pi /I_ABT C_JJ = 50e-15 *JJarea*1e12 EC_JJ = e**2 /2/C_JJ #gives EC;QP C_0 = C_shunt C_tot = C_JJ + C_0 EC_tot = e**2 /2/C_tot Z_JJ = np.sqrt(LK_JJ/C_tot) EJoC_JJ= EJ_JJ/EC_JJ EJoC_to= EJ_JJ/EC_tot omegaP = 1/np.sqrt(LK_JJ*C_tot) omegaP = omegaP/2/pi omegaRC= 1/RN_JJ/C_tot Q = omegaP/omegaRC Q = np.sqrt(2*pi/phi0 *I_ABT *RN_JJ**2 *C_tot) if EunHz: u = h else: u = k key = ['RN_JJ', 'Rs_JJ' , 'I_AB', 'EJ', 'LK_JJ', 'C_JJ', 'Z', 'EC_tot', 'w_p', 'Q','EJ/EC_to'] unit= [ 'ohm','$\Omega m^2$', 'A', 'K', 'H', 'F', 'ohm', 'K', 'Hz', '', ''] lst = [ RN_JJ , Rs_JJ , I_ABT, EJ_JJ/u , LK_JJ , C_JJ , Z_JJ , EC_tot/u, omegaP, Q , EJoC_to ] if EunHz: unit[3] = 'Hz' unit[7] = 'Hz' JJparDFM = pd.DataFrame( data = list(zip(*[lst])), index = key ).transpose() JJparUNI = pd.DataFrame( data = dict(zip(key, unit)), index = [0] ) JJparUNI = dict(zip(key, unit)) if ezread: l = [] for i,v in JJparDFM.iloc[0].items(): l+=['{}{}'.format(si_format(v),JJparUNI[i])] l = pd.Series(l) l.index = JJparDFM.columns.tolist() l = l.to_frame().transpose() return l else: return [JJparDFM,JJparUNI]
def format_test_voltage_results(results, figure_path=None): ''' .. versionadded:: 1.28 .. versionchanged:: 1.30 Format measured/target voltages as a table. Parameters ---------- results : dict Results from :func:`dropbot.hardware_test.test_voltage`. figure_path : str, optional If specified, include summary figure reference in text and write figure image to specified path. Filepath must have web browser compatible image extension (e.g., ``.jpg``, ``.png``). Returns ------- str Summary of :func:`dropbot.hardware_test.test_voltage` results in Markdown format. If :data:`figure_path` was specified, summary figure is written to the specified path. ''' voltages = pd.DataFrame(np.column_stack( [results['target_voltage'], results['measured_voltage']]), columns=['target', 'measured']) # Calculate the average rms error error = voltages['measured'] - voltages['target'] rms_error = 100 * np.sqrt(np.mean((error / voltages['target'])**2)) if figure_path: figure_path = ph.path(figure_path).realpath() # Make parent directories if they don't exist. figure_path.parent.makedirs_p() axis = plot_test_voltage_results(results) fig = axis.get_figure() fig.tight_layout() fig.savefig(figure_path, bbox_inches='tight') template = jinja2.Template(r''' # Test voltage results: # - **Output voltages**: {{ voltages.T|string|indent(8, True) }} - **Root-mean-squared (RMS) error**: {{ '{:.1f}'.format(rms_error) }}% {%- if figure_path %} ![Measured vs target voltage]({{ figure_path }}) {%- endif %} '''.strip()) return template.render( results=results, voltages=voltages.applymap(lambda x: '%sV' % si.si_format(x)), rms_error=rms_error, figure_path=figure_path).strip()
async def handle_page(request: 'aiohttp.web.Request') -> dict: session = await aiohttp_session.get_session(request) data = db.select_by_uuid(request.match_info.get('image_uuid', None)) if data is None or not os.path.isfile(os.path.join(upload_dir, data.get('path'))): raise aiohttp.web.HTTPNotFound() try: image = Image.open(os.path.join(upload_dir, data.get('path'))) except: raise aiohttp.web.HTTPInternalServerError() data['stat'] = os.stat(os.path.join(upload_dir, data.get('path'))) if image.format.lower() in ('jpg', 'jpeg'): exif = image._getexif() if image._getexif() is not None else dict() data = { **{ExifTags.TAGS.get(tag, tag): value for tag, value in exif.items()}, **data } data['Focal'] = int(data.get('FocalLength', (0, 1))[0] / data.get('FocalLength', (0, 1))[1]) data['Opening'] = round(data.get('FNumber', (0, 1))[0] / data.get('FNumber', (0, 1))[1], 1) data['root'], data['extension'] = os.path.splitext(data.get('path')) data['width'], data['height'], data['info'], data['format'] = image.width, image.height, image.info, image.format data['resolution'] = round(data.get('width', 0) * data.get('height', 0) / 1000000, 1) data['weight'] = si_prefix.si_format(data['stat'].st_size, precision=1) try: data['localtime'] = time.strptime(data.get('DateTime', ''), '%Y:%m:%d %H:%M:%S') except ValueError: data['localtime'] = time.localtime(data['stat'].st_ctime) data['date'] = time.strftime('%d %B %Y', data['localtime']) data['time'] = time.strftime('%a, %H:%M', data['localtime']) return { 'is_authenticated': session.get('token', None) == token, 'token_is_not_set': token is False, 'data': data, }
def bodeplot(f, g, p, with_fc=False): fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 6)) fig.subplots_adjust(hspace=0.5) ax1.set_title("Bode plot") ax1.set_ylabel('gain (dB)') ax1.set_xlabel('f (Hz)') ax1.grid(True) ax1.semilogx(f, g, 'C2') ax1.axhline(y=-3.0) if with_fc: try: # find fc at -3db yreduced = np.array(g) - (-3.0) freduced = interpolate.UnivariateSpline(f, yreduced, s=0) fc = freduced.roots()[0] ax1.scatter([fc], [-3.0], c='red') ax1.annotate("fc=" + si_format(fc, precision=2) + "Hz", xy=(fc, -3.0), xytext=(fc, -2.5)) except: print("Warning: can't find fc") ax2.set_ylabel('phase (°)') ax2.set_xlabel('f (Hz)') ax2.semilogx(f, p, 'C2') ax2.grid(True) return plt.show()
def build(self, start=0, stop=None): if (stop is None): stop = PlanetGenerator.numberSteps for k in range(stop, PlanetGenerator.numberSteps): self.construction[k] = {} if start == 0: self.planet = Planet(self.parameters) self.random = np.random.RandomState(self.seed) else: if (not self._saveIntermediates): warn( "Intermediate states were not saved. Building the structure from scratch." ) self.random = np.random.RandomState(self.seed) self.build(start=0, stop=stop) return else: self.planet = self.construction[start - 1]["planet"] self.random = self.construction[start - 1]["random"] for i, (callback, desc) in enumerate( zip(PlanetGenerator.callbacks[start:stop], PlanetGenerator.callbacksDescription[start:stop])): print(desc + "...", end="") duration = timeit(lambda: callback(self.planet, self), number=1) if (self._saveIntermediates): self.construction[i]["planet"] = deepcopy(self.planet) self.construction[i]["random"] = deepcopy(self.random) self.construction[i]["duration"] = duration print(" Done in {}s".format(si_format(duration)))
def _printcapacity(instance): tname = _get_textid('technology_type') hours = float(len(instance.t)) techtotal = [0] * len(instance.all_tech) disptotal = [0] * len(instance.all_tech) capftotal = [0] * len(instance.all_tech) nperz = [0] * len(instance.all_tech) idx = list(instance.all_tech) for z in instance.zones: for n in instance.gen_tech_per_zone[z]: techtotal[idx.index(n)] += value(instance.gen_cap_op[z, n]) disptotal[idx.index(n)] += value(sum(instance.gen_disp[z, n, t] for t in instance.t)) capftotal[idx.index(n)] += value(sum(instance.gen_cap_factor[z, n, t] for t in instance.t)) nperz[idx.index(n)] += 1 for s in instance.stor_tech_per_zone[z]: techtotal[idx.index(s)] += value(instance.stor_cap_op[z, s]) disptotal[idx.index(s)] += value(sum(instance.stor_disp[z, s, t] for t in instance.t)) capftotal[idx.index(s)] += 0.5 * hours nperz[idx.index(s)] += 1 for h in instance.hyb_tech_per_zone[z]: techtotal[idx.index(h)] += value(instance.hyb_cap_op[z, h]) disptotal[idx.index(h)] += value(sum(instance.hyb_disp[z, h, t] for t in instance.t)) capftotal[idx.index(h)] += value(sum(instance.hyb_cap_factor[z, h, t] for t in instance.t)) nperz[idx.index(h)] += 1 NEMcap = sum(techtotal) NEMdis = sum(disptotal) print("NEM Capacity total: %sW\tNEM Dispatch total: %sWh" % ( si_format(NEMcap * 1e6, precision=2), si_format(NEMdis * 1e6, precision=2) )) for j in instance.all_tech: if techtotal[idx.index(j)] > 0: print("%17s: %7sW | dispatch: %7sWh | avg cap factor: %.2f(%.2f)" % ( tname[j], si_format(techtotal[idx.index(j)] * 1e6, precision=1), si_format(disptotal[idx.index(j)] * 1e6, precision=1), disptotal[idx.index(j)] / hours / techtotal[idx.index(j)], capftotal[idx.index(j)] / hours / nperz[idx.index(j)] ))
def tuning_para_table(df_para): return html.Div([ dbc.Row( [ dbc.Label("Tuning Parameters:", className="ml-3"), ], className="mt-3", ), dbc.Row([ dbc.Col( dash_table.DataTable( columns=[{ "name": c, "id": f"step{s+1}-{c}" } for c in df_para.columns[0:3]], data=[{ f"step{s+1}-Parameters": row[0], f"step{s+1}-Min": si_format(row[1], precision=2), f"step{s+1}-Max": si_format(row[2], precision=2), } for i, row in df_para.iterrows() if row[3] == s + 1], editable=True, style_cell={ "minWidth": 45, "maxWidth": 45, "width": 45 }, style_data_conditional=[{ "if": { "column_id": f"step{s+1}-Parameters", }, "color": "black", "fontWeight": "bold", "backgroundColor": color, "backgroundOpacity": 0.5, }], ), className="m-3", ) for s, color in enumerate( ["#fe9a9a", "#b8ecfe", "#aefec2", "#fef8ae"]) ]) ])
def start_single_measure(): print('e4980al_gui_support.start_single_measure') muszer.set_trig_mode("HOLD") meas_mode = meas1.get() muszer.meas_type( meas_mode) muszer.set_meas_voltage(float(voltage.get())) muszer.set_meas_freq(float(freq.get())) muszer.disp_page("MEASurement") muszer.trigger() l = muszer.fetch() print('{value1}{unit1}, {value2}{unit2}'.format(value1=si_format(l[0]), unit1=units_in_mode(meas_mode)[0], value12=si_format(l[1]), unit2=units_in_mode(meas_mode)[1])) sys.stdout.flush()
def transfer_liquid(aproxy, channels, **kwargs): ''' Transfer liquid from tail n-1 channels to head n-1 channels. xxxx... -> ...xxxx where ``x`` denotes liquid and ``.`` denotes an empty electrode. This is accomplished through two separate actuations:: 1. Actuate all but the **last** channel in ``channels``. 2. Actuate all but the **first** channel in ``channels``. Actuation **(1)** is applied until a steady-state capacitance is reached. At this point, the measured capacitance is recorded as a target threshold. Actuation **(2)** is then applied until the target threshold capacitance from actuation **(1)** is reached. ''' messages_ = [] try: tail_channels_i = list(channels[:-1]) route_i = list( it.chain(*(c if isinstance(c, collections.Sequence) else [c] for c in tail_channels_i))) print('\r%-50s' % ('Wait for steady state: %s' % list(route_i)), end='') messages = yield asyncio\ .From(actuate(aproxy, route_i, ft.partial(test_steady_state_, **kwargs))) messages_.append({'channels': tuple(route_i), 'messages': messages}) target_capacitance_i = ((float(len(channels) - 1) / len(channels)) * messages[-1]['new_value']) head_channels_i = list(channels[1:]) print('\r%-50s' % ('Wait for target capacitance of: %sF' % si.si_format(target_capacitance_i)), end='') route_i = list( it.chain(*(c if isinstance(c, collections.Sequence) else [c] for c in head_channels_i))) def test_threshold(messages): df = pd.DataFrame(messages[-5:]) return df.new_value.median() >= target_capacitance_i messages = yield asyncio\ .From(actuate(aproxy, route_i, test_threshold)) messages_.append({'channels': tuple(route_i), 'messages': messages}) except (asyncio.CancelledError, asyncio.TimeoutError): raise TransferTimeout(channels) raise asyncio.Return(messages_)
def format_name_item(func_name, arg_name, arg_value): ''' format the name of a single argument of a function. Args: func_name (str) : name of the function arg_name (str) : name of the argument arg_value (any) : value of the argument provided to the function ''' if arg_name == 'segment' or arg_value is None: return '' unit_type = 'V' multiplier = 1e-3 if arg_name.startswith('t_'): unit_type = 'ns' multiplier = 1 if arg_name.startswith('f_'): unit_type = 'GHz' multiplier = 1e-9 if isinstance(arg_value, numbers.Number): return si_format(arg_value * multiplier, precision=1) + unit_type if isinstance(arg_value, str): return arg_value if isinstance(arg_value, loop_obj): axis = tuple(arg_value.axis) if len(axis) == 1: axis = axis[0] return 'VAR ' + str(axis) if dataclasses.is_dataclass(arg_value): items = '[ ' key_values_pairs = list(arg_value.__dict__.items()) for i in range(len(key_values_pairs)): if not key_values_pairs[i][0].startswith( '_'): #neglect private variables items += str(key_values_pairs[i][0]) + ' ' items += format_name_item(func_name, key_values_pairs[i][0], key_values_pairs[i][1]) + ' ' return items + ' ]' if isinstance(arg_value, tuple): items = [] for i in arg_value: items.append(format_name_item(func_name, arg_name, i)) return str(tuple(items)) raise ValueError( f'Invalid input provided for function {func_name}. Valid input arguments are Numeric/loop_obj types or tuples of those (arg = {arg_name}, val = {arg_value})' )
def calibrate_sheet_capacitance(target_force, *args): '''Calibrate sheet capacitance with liquid present **NOTE** Prior to running the following cell: - _at least_ one electrode **MUST** be **actuated** - all actuated electrodes **MUST** be completely covered with liquid It may be helpful to use the interactive figure UI to manipulate liquid until the above criteria are met. This function performs the following steps: 1. Measure **total capacitance** across **all actuated electrodes** 2. Compute sheet capacitance with liquid present ($\Omega_L$) based on nominal areas of actuated electrodes from `chip_file` 3. Compute voltage to match 25 μN of force, where $F = 10^3 \cdot 0.5 \cdot \Omega_L \cdot V^2$ 4. Set DropBot voltage to match target of 25 μN force. ''' proxy = DropBotMqttProxy.from_uri('dropbot', aproxy.__client__._host) name = 'liquid' states = proxy.state_of_channels channels = states[states > 0].index.tolist() electrodes_by_id = pd.Series( chip_info_mm['electrodes'], index=(e['id'] for e in chip_info_mm['electrodes'])) actuated_area = (electrodes_by_id[ channel_electrodes[channels]].map(lambda x: x['area'])).sum() capacitance = pd.Series(proxy.capacitance(0) for i in range(20)).median() sheet_capacitance = capacitance / actuated_area message = ( 'Measured %s sheet capacitance: %sF/%.1f mm^2 = %sF/mm^2' % (name, si.si_format(capacitance), actuated_area, si.si_format(sheet_capacitance))) print(message) voltage = np.sqrt(target_force / (1e3 * 0.5 * sheet_capacitance)) return sheet_capacitance, voltage
def details_price_textual(self) -> str: sd = self.details_price() si_volume = si_format(sd.volume, precision=2) si_cap = si_format(round_currency_scalar(sd.market_capitalization), precision=2) si_volume_f = float(si_volume[:-1]) si_cap_f = float(si_cap[:-1]) data = [{ conf.LOCAL['currency']: round_currency_scalar(sd.price), '24h H': round_currency_scalar(sd.price_24h_high), '24h L': round_currency_scalar(sd.price_24h_low), '52w H': round_currency_scalar(sd.price_52w_high), '52w L': round_currency_scalar(sd.price_52w_low), '% 1h': round_percent(sd.percent_change_1h), '% 24h': round_percent(sd.percent_change_24h), '% 7d': round_percent(sd.percent_change_7d), '% 30d': round_percent(sd.percent_change_30d), '% 52w': round_percent(sd.percent_change_52w), '% YTD': round_percent(sd.percent_change_ytd), 'Vol': si_volume_f, 'Cap': si_cap_f, # 'Recom': sd.recommendation }] # Creates pandas DataFrame by passing # Lists of dictionaries and row index. df = pd.DataFrame(data, index=[self.symbol]) df_T = df.T table_fmt = simple_separated_format(': ') text = tabulate(df_T, tablefmt=table_fmt, colalign=('right', 'decimal')) text = text.replace(str(si_volume_f), si_volume) text = text.replace(str(si_cap_f), si_cap) text = ''.join((f' {self.symbol}\n', f'{text}\n', f'Recom: {sd.recommendation}')) return text
def _print_retirements(instance): tname = _get_textid('technology_type') hours = float(len(instance.t)) techtotal = [0] * len(instance.all_tech) nperz = [0] * len(instance.all_tech) idx = list(instance.all_tech) for z in instance.zones: for n in instance.gen_tech_per_zone[z]: techtotal[idx.index(n)] += value(instance.gen_cap_ret[z, n]) nperz[idx.index(n)] += 1 NEMcap = sum(techtotal) print("NEM Retired Capacity: %sW" % ( si_format(NEMcap * 1e6, precision=2) )) for j in instance.all_tech: if techtotal[idx.index(j)] > 0: print("%17s: %7sW" % ( tname[j], si_format(techtotal[idx.index(j)] * 1e6, precision=1) ))
def infographic(items, **kwargs): """Create and infographic 'plot'. :param items: 3-tuples of (label, value, unit, icon); the label should be a one or two word description, the value the headline number, and the icon the name of a fontawesome icon. If `value` is numeric, it will be normalised by use of an SI suffix for display after which `unit` will be appended. If `value` is a string it will be used as is. :param kwargs: kwargs for bokeh gridplot. ..note:: If `bootstrap_fontawesome` has not already been called, the function will load the required fonts, however they will not display the first time an Jupyter labs cell is run. If using the `InfoGraphItems` helper class, this wrinkle will be taken care of provided the helper is initiated in a previous cell. """ plots = list() seen = set() for label, value, icon, unit in items: if label in seen: continue if not isinstance(value, str): value = si_format(value) + unit seen.add(label) width, height = 175, 100 aspect = height / width p = figure( output_backend='webgl', plot_width=width, plot_height=height, title=None, toolbar_location=None) p.axis.visible = False p.grid.visible = False p.outline_line_color = None p.rect([0.5], [0.5], [1.0], [1.0], fill_color="#2171b5") p.x_range = Range1d(start=0.1, end=0.9, bounds=(0.1, 0.9)) p.y_range = Range1d(start=0.1, end=0.9, bounds=(0.1, 0.9)) p.add_layout( Label( x=0.15, y=0.45, text=value, text_color="#DEEBF7", text_font_size="24px")) p.add_layout( Label( x=0.15, y=0.2, text=label, text_color="#C6DBEF", text_font_size="16px")) image = fa_icons.rgba(icon, 75, color='#6BAED6') p.image_rgba(image=[image], x=0.6, y=0.4, dw=0.25, dh=0.25/aspect) plots.append(p) defaults = {'toolbar_location': None, "ncols": len(items)} defaults.update(kwargs) return gridplot(plots, **defaults)
def tiltedWashboardU(EJKBT, IbiasArr, ax): phi = np.arange(-0.1 * np.pi, 8 * np.pi, 0.025 * np.pi) UArr = [] for Ibias in IbiasArr: U = -EJKBT * const.k * np.cos( phi) - const.h / 2 / np.pi / 2 / const.e * Ibias * phi UArr.append(U) ax.set_xlabel('$\phi$ (pi)') ax.set_ylabel('U (K)') i = 0 for i, U in enumerate(UArr): ax.plot(phi / np.pi, U / const.k, label='I$_b$ =' + format(si_format(IbiasArr[i])) + 'A')
def format_duration(seconds): ''' Formats a float interpreted as seconds as a sensible time duration :param seconds: :return: ''' if seconds < 60: return si_format(seconds, precision=1, format_str='{value}{prefix}s') elif seconds<_seconds_in_day: res = str(datetime.timedelta(seconds=seconds)) if len(res.split(".")) > 1: return ".".join(res.split(".")[:-1]) else: return res else: days = seconds//_seconds_in_day return '{:d}d,{}'.format(days, format_duration(seconds % _seconds_in_day))
def add_assembly_information(self): if self.analysis.genome is None: return with self.doc.create(Subsection('Assembly statistics', numbering=False)): with self.doc.create(Tabular('ll', width=2)) as table: table.add_row(('Contigs', len(self.analysis.genome))) genome_size = 0 for i in self.analysis.genome: genome_size += len(i.seq) genome_size = si_format(genome_size, precision=1) table.add_row(('Assembly size', genome_size)) self.doc.append(VerticalSpace("10pt"))
def wait_for_gui_process(self, retry_count=20, retry_duration_s=1): start = datetime.now() for i in xrange(retry_count): try: hub_execute(self.name, 'ping', wait_func=lambda *args: refresh_gui(), timeout_s=5, silent=True) except: logger.debug('[wait_for_gui_process] failed (%d of %d)', i + 1, retry_count, exc_info=True) else: logger.info('[wait_for_gui_process] success (%d of %d)', i + 1, retry_count) self.alive_timestamp = datetime.now() return for j in xrange(10): time.sleep(retry_duration_s / 10.) refresh_gui() raise IOError('Timed out after %ss waiting for GUI process to connect ' 'to hub.' % si_format((datetime.now() - start).total_seconds()))
def set_property(column, cell_renderer, list_store, iter, store_i): cell_renderer.set_property('text', si_format(list_store[iter][store_i], digits))
1e8, 1e9, # Billion ]: i = int(i) loops = max(1, 1000000/i) subject = 'a'*i print('{:>16n} {:>8} '.format(i, loops), sep='', end='') sys.stdout.flush() # re times = timeit.repeat("p.match('%s')" % subject, setup="import re; p=re.compile('(a)*')", repeat=3, number=loops) quickest = min(times)/loops print('{:>8} '.format(si_prefix.si_format(quickest, precision=1)), sep='', end='') sys.stdout.flush() # regex try: times = timeit.repeat("p.match('%s')" % subject, setup="import regex; p=regex.compile('(a)*')", repeat=3, number=loops) except regex.error: print('{:^8} '.format('error'), sep='', end='') else: quickest = min(times)/loops print('{:>8} '.format(si_prefix.si_format(quickest, precision=1)), sep='', end='') sys.stdout.flush() # ppeg
def execute_actuation(self, static_states, dynamic_states, duration_s): ''' .. versionadded:: 2.25 XXX Coroutine XXX Execute specified *static* and *dynamic* electrode actuations. Parameters ---------- static_states : pandas.Series Static electrode actuation states, indexed by electrode ID, (e.g., `"electrode001"`). dynamic_states : pandas.Series Dynamic electrode actuation states, indexed by electrode ID. duration_s : float Actuation duration (in seconds). If not specified, use value from step options. Returns ------- dict Response with fields: - ``start``: actuation start timestamp (`datetime.datetime`). - ``end``: actuation start timestamp (`datetime.datetime`). - ``actuated_electrodes``: actuated electrode IDs (`list`). See Also -------- execute_actuations .. versionchanged:: 2.25 Still apply for specified duration even if _no electrodes_ are specified for actuation. .. versionchanged:: 2.28.2 Allow user to optionally ignore failed actuations. ''' # Notify other ZMQ plugins that `dynamic_electrodes_states` have # changed. @sync(gtk_threadsafe) def notify_dynamic_states(dynamic_electrode_states): try: return hub_execute(self.name, 'set_dynamic_electrode_states', electrode_states=dynamic_electrode_states) except Exception as exception: _L().warning(str(exception), exc_info=True) return None response = yield asyncio.From(notify_dynamic_states(dynamic_states)) static_electrodes_to_actuate = set(static_states[static_states > 0].index) dynamic_electrodes_to_actuate = set(dynamic_states[dynamic_states > 0].index) electrodes_to_actuate = (dynamic_electrodes_to_actuate | static_electrodes_to_actuate) # Execute `set_electrode_states` command through ZeroMQ plugin # API to notify electrode actuator plugins (i.e., plugins # implementing the `IElectrodeActuator` interface) of the # electrodes to actuate. s_electrodes_to_actuate = \ pd.Series(True, index=sorted(electrodes_to_actuate)) step_options = self.get_step_options() voltage = step_options['Voltage (V)'] frequency = step_options['Frequency (Hz)'] def set_waveform(key, value): try: result = emit_signal("set_%s" % key, value, interface=IWaveformGenerator) if result: return result except Exception as exception: result = exception if not key in self.warnings_ignoring: response = ignorable_warning(title='Warning: failed to set ' '%s' % key, text='No waveform ' 'generators available to set ' '<b>%s</b>.' % key, use_markup=True) if response['always']: self.warnings_ignoring[key] = response['ignore'] ignore = response['ignore'] else: ignore = self.warnings_ignoring[key] if not ignore: return RuntimeError('No waveform generators available to set ' '%s to %s' % (key, value)) for key, value, unit in (('frequency', frequency, 'Hz'), ('voltage', voltage, 'V')): # Apply waveform in main (i.e., Gtk) thread. waveform_result = \ yield asyncio.From(sync(gtk_threadsafe) (ft.partial(set_waveform, key, value))()) if isinstance(waveform_result, Exception): raise waveform_result elif waveform_result: _L().info('%s set to %s%s (plugins: `%s`)', key, si.si_format(value), unit, waveform_result.keys()) electrode_actuators = emit_signal('on_actuation_request', args=[s_electrodes_to_actuate, duration_s], interface=IElectrodeActuator) if not electrode_actuators: if not 'actuators' in self.warnings_ignoring: @sync(gtk_threadsafe) def _warning(): return ignorable_warning(title='Warning: failed to ' 'actuate all electrodes', text='No electrode actuators ' 'registered to ' '<b>actuate</b>: <tt>%s</tt>' % list(electrodes_to_actuate), use_markup=True) response = yield asyncio.From(_warning()) if response['always']: self.warnings_ignoring['actuators'] = response['ignore'] ignore = response['ignore'] else: ignore = self.warnings_ignoring['actuators'] if not ignore: raise RuntimeError('No electrode actuators registered to ' 'actuate: `%s`' % list(electrodes_to_actuate)) else: # Simulate actuation by waiting for specified duration. yield asyncio.From(asyncio.sleep(duration_s)) else: actuation_tasks = electrode_actuators.values() # Wait for actuations to complete. start = dt.datetime.now() done, pending = yield asyncio.From(asyncio.wait(actuation_tasks)) end = dt.datetime.now() actuated_electrodes = set() exceptions = [] def _error_message(use_markup=True): missing_electrodes = (electrodes_to_actuate - actuated_electrodes) messages = [] monospace_format = '<tt>%s</tt>' if use_markup else '%s' if missing_electrodes: messages.append('Failed to actuate the following ' 'electrodes: ' '%s' % ', '.join(monospace_format % e for e in missing_electrodes)) if len(exceptions) == 1: messages.append('Actuation error: ' + monospace_format % exceptions[0]) elif exceptions: messages.append('Actuation errors:\n%s' % '\n' .join(' - ' + monospace_format % e for e in exceptions)) return '\n\n'.join(messages) @sync(gtk_threadsafe) def _warning(): return ignorable_warning(title='Warning: actuation error', text=_error_message(), use_markup=True) for d in done: try: actuated_electrodes.update(d.result()) except Exception as exception: # Actuation error occurred. Save exception and check # remaining responses from actuators. exceptions.append(exception) if (electrodes_to_actuate - actuated_electrodes) or exceptions: if not 'actuate' in self.warnings_ignoring: response = yield asyncio.From(_warning()) if response['always']: self.warnings_ignoring['actuate'] = \ response['ignore'] ignore = response['ignore'] else: ignore = self.warnings_ignoring['actuate'] if not ignore: raise RuntimeError(_error_message(use_markup=False)) else: _L().info('Ignored actuation error(s): `%s`', exceptions) # Simulate actuation by waiting for remaining duration. remaining_duration = (duration_s - (dt.datetime.now() - start).total_seconds()) if remaining_duration > 0: yield asyncio.From(asyncio.sleep(remaining_duration)) else: # Requested actuations were completed successfully. _L().info('actuation completed (actuated electrodes: %s)', actuated_electrodes) raise asyncio.Return({'start': start, 'end': end, 'actuated_electrodes': sorted(actuated_electrodes)})
def read_packet(read_func, timeout_s=None, poll_s=0.001): ''' Read packet from specified callback function. Blocks until full packet is read (or exception occurs). .. versionadded:: 0.14 Parameters ---------- read_func : function Callback function. Must return ``bytes``. timeout_s : float, optional Number of seconds to wait for full packet. By default, block until packet is received. poll_s : float, optional Time to wait between calls to :func:`read_func`. Returns ------- cPacket Parsed packet. Raises ------ RuntimeError If specified time out is reached before a packet is received. Exception If an exception is encountered while reading or parsing, the exception is raised. ''' from .NadaMq import cPacketParser # Record start time. start = dt.datetime.utcnow() stop_request = threading.Event() packet_ready = threading.Event() parse_error = threading.Event() result = {} def _do_read(read_func, output): parser = cPacketParser() try: while True: data = read_func() if data: result_ = parser.parse(np.fromstring(data, dtype='uint8')) if result_ is not False: output['response'] = result_ packet_ready.set() break if stop_request.wait(poll_s): break except Exception as exception: # Exception occurred while reading/parsing. # Store exception and report to calling thread. parse_error._exception = exception parse_error.set() # Start background thread to read data. thread = threading.Thread(target=_do_read, args=(read_func, result)) thread.daemon = True thread.start() # Create combined event to wait for either a completed packet or an error. complete = OrEvent(packet_ready, parse_error) if not complete.wait(timeout=timeout_s): stop_request.set() raise RuntimeError('Timed out waiting for packet (after %ss).' % si.si_format((dt.datetime.utcnow() - start).total_seconds())) elif parse_error.is_set(): # Exception occurred while reading/parsing. raise parse_error._exception return result['response']
def run(self): try: # g-force changed enough? newgforce = self.flight().g_force if (abs(newgforce-self.lastgforce)>0.01): self.lastgforce = newgforce newgforce = min(abs(newgforce),5) newgforce = int(newgforce*255/5) gforcecommand = "A0="+str(newgforce)+"\n" # print(gforcecommand) myserwrite(gforcecommand.encode()) # LCD if self.lcdmode==0: # switch in middle = Orbit val = self.orbit.apoapsis_altitude if val>=0: fval = si_format(val, precision=2).rjust(7)[:7]+" " else: fval = si_format(val, precision=2).rjust(8)[:8] line = "P0=A:"+fval+time_format(self.orbit.time_to_apoapsis)+"\n" val = self.orbit.periapsis_altitude if val>=0: fval = si_format(val, precision=2).rjust(7)[:7]+" " else: fval = si_format(val, precision=2).rjust(8)[:8] line = line+"P1=P:"+fval+time_format(self.orbit.time_to_periapsis)+"\n" elif self.lcdmode==1: # switch on right = Landing Altitude+Speed fval = si_format(self.flight().surface_altitude, precision=3).rjust(8)[:8] line = "P0=ALT:"+fval+"m\nP1=V:"+chr(2) # print(str(ss)+"\t"+str(vs)+"\t"+str(self.flightstream().g_force)+"\t"+str(self.flight().g_force)) fval = si_format(abs(self.flightstream().horizontal_speed), precision=0).rjust(5)[:5] line = line+fval+" "+chr(3) fval = si_format(abs(self.flightstream().vertical_speed), precision=0).rjust(5)[:5] line = line+fval+chr(1)+"\n" elif self.lcdmode==2: # switch on left = Target(?) line="P0=Mode 1 Left\nP1=Target mode\n" line="P0=Ecct.:"+str(round(self.orbit.eccentricity,3))+"\nP1=Incl.:"+str(round(self.orbit.inclination*180/pi,2))+chr(223)+"\n" # print("mode"+str(self.lcdmode)+" "+line) myserwrite(bytes([x for x in map(ord,line)])) # OLED orbit if (time.time()-self.lastoledtime)>2: # every 2s self.lastoledtime = time.time() if self.oledmode==0: # switch in middle = Orbit cx = int(128/2) cy = int(16+(64-16)/2) sx = self.orbit.semi_major_axis sy = self.orbit.semi_minor_axis # print("sx="+str(sx)+"\tsy="+str(sy)+"\n") try: scalex = sx/cx scaley = sy/((64-16)/2) scale = min(scalex,scaley) sx = int(sx/scale) sy = int(sy/scale) if sx==0: sx=1 if sx>=64: sx=63 if sy==0: sy=1 if sy>=48: sy=47 line="O5 "+str(int(cx-sx/2))+" "+str(cy)+" "+str(sx)+" " line=line+"6 "+str(cx)+" "+str(int(cy-sy/2))+" "+str(sy)+" " line=line+"9 "+str(cx)+" "+str(cy)+" "+str(int(sx/3))+" "+str(int(sy/3))+" " ec = pi/2-self.orbit.inclination sx = int(cx+24*sin(ec)) sy = int(cy-24*cos(ec)) line=line+"7 "+str(cx)+" "+str(cy)+" "+str(sx)+" "+str(sy)+"\n" except ValueError: line=self.lastoledline elif self.oledmode==1: line="O1 10 10 Mode1\\ \n" elif self.oledmode==2: line="O1 10 10 Mode2\\ \n" # print("omode"+str(self.oledmode)+"\\"+line+"\\") if line!=self.lastoledline: self.lastoledline=line myserwrite(line.encode()) except krpc.error.RPCError: pass
elif args.command == 'search': try: plugin_name, releases = search(plugin_package=args.plugin, server_url=args.server_url) release_info = OrderedDict() release_info['plugin_name'] = [plugin_name] + ((len(releases) - 1) * ['']) release_info['version'] = releases.keys() for k in ['upload_time', 'size']: release_info[k] = [r[k] for r in releases.values()] release_info['upload_time'] = map(lambda timestamp: dt.datetime .strptime(timestamp, r'%Y-%m-%dT' r'%H:%M:%S.%f') .strftime('%Y-%m-%d %H:%M'), release_info['upload_time']) release_info['size'] = map(lambda s: si.si_format(s, precision=0, format_str= '{value} {prefix}B'), release_info['size']) print '\n' + pformat_dict(release_info) except KeyError, exception: print >> sys.stderr, exception.message elif args.command == 'uninstall': for plugin_i in args.plugin: uninstall(plugin_package=plugin_i, plugins_directory=args.plugins_directory)
def _test_si_format(value, result): ''' .. versionadded:: 1.0 ''' eq_(si_format(value, 2), result)