def test_snapshot_insert(self): record = ['idhere', 1, '*'] s = Snapshot(record) record = ['another id', 2, '*'] s.insert(record) self.assertEqual(s.acceptor.record_list[1], record) self.assertEqual(s.blocks.last_node, s.alives_entries[record[0]])
def __init__(self, sdir, snap_lims, cosmological=0, periodic_bound_fix=False, dust_depl=False, statistic='average'): self.sdir = sdir self.stat = statistic self.snap_lims = snap_lims self.num_snaps = (snap_lims[1]+1)-snap_lims[0] self.cosmological = cosmological self.time = np.zeros(self.num_snaps) if self.cosmological: self.redshift = np.zeros(self.num_snaps) self.Flag_DustDepl = dust_depl # In case the sim was non-cosmological and used periodic BC which causes # galaxy to be split between the 4 corners of the box self.pb_fix = False if periodic_bound_fix and cosmological==0: self.pb_fix=True # Determines if you want to look at the Snapshot/Halo/Disk self.setHalo=False self.setDisk=False # Load the first snapshot to check needed array sizes sp = Snapshot(self.sdir, self.snap_lims[0], cosmological=self.cosmological, periodic_bound_fix=self.pb_fix) self.dust_impl = sp.dust_impl self.m = np.zeros(self.num_snaps) self.z = np.zeros(self.num_snaps) self.dz = np.zeros(self.num_snaps) self.spec = np.zeros([self.num_snaps, sp.Flag_DustSpecies]) self.source = np.zeros([self.num_snaps, 4]) return
def getSnapshots(self, sessionId): """ Get a list of Snapshot objects. Parameters ---------- sessionId: string An identifier for a user session. Returns ------- list A list of Snapshot objects. """ snapshots = self.con.cmdTagList("getSnapshotObjects", sessionId=sessionId) snapshotObjects = [] if (snapshots[0] != ""): for snapshot in snapshots: j = json.loads(snapshot) snapObj = Snapshot(sessionId, j['type'], j['index'], j['Snapshot'], j['layout'], j['preferences'], j['data'], j['description'], j['dateCreated'], self.con) snapshotObjects.append(snapObj) return snapshotObjects
def test_snapshot_init(self): record = ['idhere', 1, '*'] s = Snapshot(record) self.assertIsInstance(s, Snapshot) self.assertIsInstance(s.blocks, Linked_list) self.assertIsInstance(s.blocks.last_node.block, block) self.assertIsInstance(s.acceptor, block)
def get_balance(self, args): if not args.markets: logging.error("You must use --markets argument to specify markets") sys.exit(2) pmarkets = args.markets.split(",") pmarketsi = [] for pmarket in pmarkets: exec('import brokers.' + pmarket.lower()) market = eval('brokers.' + pmarket.lower() + '.Broker' + pmarket + '()') pmarketsi.append(market) snapshot = Snapshot() while True: total_btc = 0. total_bch = 0. for market in pmarketsi: market.get_balances() print(market) total_btc += market.btc_balance total_bch += market.bch_balance snapshot.snapshot_balance(market.name[7:], market.btc_balance, market.bch_balance) snapshot.snapshot_balance('ALL', total_btc, total_bch) time.sleep(60 * 10)
def onSnapshot(self, evt): s = Snapshot(self, self.settings) if s.wasSuccessful(): s.Show() else: dlg = wx.MessageDialog(self, "Error Taking Picture", 'Camera Error', wx.OK | wx.ICON_ERROR) dlg.ShowModal() dlg.Destroy()
def __init__(self, ip_addr='localhost', num_channels=4, fs=800e6, logger=logging.getLogger(__name__)): """The interface to a ROACH cross correlator Keyword arguments: ip_addr -- IP address (or hostname) of the ROACH. (default: localhost) num_channels -- antennas in the correlator. (default: 4) fs -- sample frequency of antennas. (default 800e6; 800 MHz) logger -- logger to use. (default: new default logger) """ self.logger = logger self.fpga = corr.katcp_wrapper.FpgaClient(ip_addr) time.sleep(0.1) self.num_channels = num_channels self.fs = np.float64(fs) self.cross_combinations = list( itertools.combinations( range(num_channels), 2)) # [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)] self.control_register = ControlRegister( self.fpga, self.logger.getChild('control_reg')) self.set_accumulation_len(100) self.re_sync() self.control_register.allow_trigger( ) # necessary as Correlations auto fetch signal # only 0x0 has been implemented #self.auto_combinations = [(x, x) for x in range(num_channels)] # [(0, 0), (1, 1), (2, 2), (3, 3)] self.auto_combinations = [(0, 0)] self.frequency_correlations = {} for comb in (self.cross_combinations + self.auto_combinations): self.frequency_correlations[comb] = Correlation( fpga=self.fpga, comb=comb, f_start=0, f_stop=fs / 2, logger=self.logger.getChild("{a}x{b}".format(a=comb[0], b=comb[1]))) self.time_domain_snap = Snapshot( fpga=self.fpga, name='dram_snapshot', dtype=np.int8, cvalue=False, logger=self.logger.getChild('time_domain_snap')) self.upsample_factor = 100 self.subsignal_length_max = 2**17 self.time_domain_padding = 100 self.time_domain_calibration_values = None self.time_domain_calibration_cable_values = None self.control_register.block_trigger()
def test_snapshot_query(self): record = ['idhere', 1, '*'] s = Snapshot(record, cap=2, ut=0.6) record = ['another id', 2, '*'] s.insert(record) record = ['onemore id', 2, '*'] s.insert(record) self.assertEqual(s.tsquery(3), {'SP', 'idhere', 'another id', 'onemore id'}) self.assertEqual(s.tsquery(1), {'SP', 'idhere'}) s.delete('another id', 5) self.assertEqual(s.tsquery(6), {'idhere', 'onemore id', 'SP'}) self.assertEqual(s.trquery(1, 3), s.trquery(1, 2)) self.assertEqual(s.trquery(1, 3), s.tsquery(1) | s.tsquery(2))
def __init__(self, fpga, comb, f_start, f_stop, logger=logging.getLogger(__name__)): """ f_start and f_stop must be in Hz """ self.logger = logger snap_name = "snap_{a}x{b}".format(a=comb[0], b=comb[1]) self.snapshot0 = Snapshot(fpga, "{name}_0".format(name=snap_name), dtype='>i8', cvalue=True, logger=self.logger.getChild( "{name}_0".format(name=snap_name))) self.snapshot1 = Snapshot(fpga, "{name}_1".format(name=snap_name), dtype='>i8', cvalue=True, logger=self.logger.getChild( "{name}_1".format(name=snap_name))) self.f_start = np.uint64(f_start) self.f_stop = np.uint64(f_stop) # this will change from None to an array of phase offsets for each frequency bin # if calibration gets applied at a later stage. # this is an array of phases introduced by the system. So if a value is positive, # it means that the system is introducing a phase shift between comb[0] and comb[1] # in other words comb1 is artificially delayed. self.calibration_phase_offsets = None self.calibration_cable_length_offsets = None self.arm() self.fetch_signal() self.frequency_bins = np.linspace(start=self.f_start, stop=self.f_stop, num=len(self.signal), endpoint=False)
def flatten_by_timestamp(snapshots_log): by_timestamp = collections.defaultdict(dict) for snapshot in snapshots_log: timestamp = snapshot.timestamp measurements = snapshot.measurements for measurement_name, measurement_result in measurements.iteritems(): by_timestamp[timestamp][measurement_name] = measurement_result timestamps_in_order = sorted(by_timestamp.keys()) return [ Snapshot(timestamp, by_timestamp[timestamp]) for timestamp in timestamps_in_order ]
def _snapshotList(self, snapshots, location): if not snapshots: return [] snap_list = [] for snapshot in snapshots: location = location + '/' snap = Snapshot(snapshot, location) snap_list.append(snap) snap_list = snap_list + self._snapshotList( snapshot.childSnapshotList, location + snapshot.name) return snap_list
def trajectory_from_mdtraj(mdtrajectory, simple_topology=False): """ Construct a Trajectory object from an mdtraj.Trajectory object Parameters ---------- mdtrajectory : mdtraj.Trajectory Input mdtraj.Trajectory simple_topology : bool if `True` only a simple topology with n_atoms will be created. This cannot be used with complex CVs but loads and stores very fast Returns ------- openpathsampling.engines.Trajectory the constructed Trajectory instance """ trajectory = Trajectory() empty_kinetics = Snapshot.KineticContainer(velocities=u.Quantity( np.zeros(mdtrajectory.xyz[0].shape), u.nanometer / u.picosecond)) if simple_topology: topology = Topology(*mdtrajectory.xyz[0].shape) else: topology = MDTrajTopology(mdtrajectory.topology) engine = TopologyEngine(topology) for frame_num in range(len(mdtrajectory)): # mdtraj trajectories only have coordinates and box_vectors coord = u.Quantity(mdtrajectory.xyz[frame_num], u.nanometers) if mdtrajectory.unitcell_vectors is not None: box_v = u.Quantity(mdtrajectory.unitcell_vectors[frame_num], u.nanometers) else: box_v = None statics = Snapshot.StaticContainer(coordinates=coord, box_vectors=box_v) snap = Snapshot(statics=statics, kinetics=empty_kinetics, engine=engine) trajectory.append(snap) return trajectory
def load_total(self): for i, snum in enumerate(np.arange(self.snap_lims[0],self.snap_lims[1]+1)): sp = Snapshot(self.sdir, snum, cosmological=self.cosmological, periodic_bound_fix=self.pb_fix) self.time[i] = sp.time if self.cosmological: self.redshift[i] = sp.redshift if self.setHalo: gal = sp.loadhalo(**self.kwargs) if self.setDisk: gal = sp.loaddisk(**self.kwargs) gas = gal.loadpart(0) self.z[i] = np.nansum(gas.z[:,0]*gas.m)/np.nansum(gas.m) self.dz[i] = np.nansum(gas.dz[:,0]*gas.m)/np.nansum(gas.z[:,0]*gas.m) self.spec[i] = np.nansum(gas.spec*gas.m[:,np.newaxis], axis=0)/np.nansum(gas.dz[:,0]*gas.m) self.source[i] = np.nansum(gas.dzs*gas.dz[:,0]*gas.m[:,np.newaxis], axis=0)/np.nansum(gas.dz[:,0]*gas.m) return
def test_snapshot_delete(self): record = ['idhere', 1, '*'] s = Snapshot(record, cap=2, ut=0.6) record = ['another id', 2, '*'] s.insert(record) s.delete('another id', 5) self.assertEqual( s.blocks.last_node.previous_node.Pce_node.block.record_list[1][2], 5) self.assertEqual( s.blocks.last_node.previous_node.Pce_node.block.time_interval, [1, 5]) self.assertTrue(s.blocks.last_node.previous_node.Pce_node.block.isfull) self.assertTrue( s.blocks.last_node.previous_node.Pce_node.block.isunderflow) self.assertEqual(s.blocks.last_node.block.usage, 1) self.assertFalse('another id' in s.alives_entries)
def __init__(self, verification: Instruction = None): self.enable = Signal(reset=1) self.addr = Signal(16) self.din = Signal(8) self.dout = Signal(8) self.RWB = Signal(reset=1) # 1 = read, 0 = write # registers self.reg = Registers() self.tmp = Signal(8) # temp signal when reading 16 bits # internal exec state self.opcode = Signal(8) self.cycle = Signal(4, reset=1) # formal verification self.verification = verification self.snapshot = Snapshot()
def load_average(self): for i, snum in enumerate(np.arange(self.snap_lims[0],self.snap_lims[1]+1)): sp = Snapshot(self.sdir, snum, cosmological=self.cosmological, periodic_bound_fix=self.pb_fix) self.time[i] = sp.time if self.cosmological: self.redshift[i] = sp.redshift if self.setHalo: gal = sp.loadhalo(**self.kwargs) if self.setDisk: gal = sp.loaddisk(**self.kwargs) gas = gal.loadpart(0) self.z[i] = weighted_percentile(gas.z[:,0], percentiles=[50], weights=gas.m, ingore_invalid=True) self.dz[i] = weighted_percentile(gas.dz[:,0]/gas.z[:,0], percentiles=[50], weights=gas.m, ingore_invalid=True) for j in range(sp.Flag_DustSpecies): self.spec[i,j] = weighted_percentile(gas.spec[:,j]/gas.dz[:,0], percentiles=[50], weights=gas.m, ingore_invalid=True) for j in range(4): self.source[i,j] = weighted_percentile(gas.dzs[:,j], percentiles=[50], weights=gas.m, ingore_invalid=True) return
async def main(): snapshot = Snapshot() web.start_http_server_in_thread(port=8000) while True: print('Getting snapshot...') await snapshot.update() dBFS = scan() VU_METER.set(dBFS) an = analysis() misc_bitrate = 0 for pid, data in an['pids'].items(): if pid in [564, 768]: continue misc_bitrate += int(data['bitrate']) MISC_BITRATE.set(misc_bitrate) VIDEO_BITRATE.set(an['pids'][564]['bitrate']) AUDIO_BITRATE.set(an['pids'][768]['bitrate']) await asyncio.sleep(5)
def main(): fname = 'snapshot_test.txt' print('Start reading and indexing file...') start = time.time() with open(fname, 'r') as f: line = f.readline().split() Sindex = Snapshot([line[1], int(line[0]), '*'], cap=CAPACITY, ut=UTILIZE) for l in f: line = l.split() op = line[2] if (op == 'b'): Sindex.insert([line[1], int(line[0]), '*'], cap=CAPACITY, ut=UTILIZE) elif (op == 'd'): Sindex.delete(line[1], int(line[0])) duration = time.time() - start print('') print('Index done in: {0:.2f}s '.format(duration)) print('') print('Enter the query type (ts or tr) and time') print('timeslice query format: ts time_instance') print('timerrange query format: tr min_time max_time') print('Enter anything else to quit') print('') for line in sys.stdin: qtype, *t = line.split() if (qtype == 'ts'): start = time.time() result = Sindex.tsquery(int(t[0])) duration = time.time() - start elif (qtype == 'tr'): start = time.time() result = Sindex.trquery(int(t[0]), int(t[1])) duration = time.time() - start else: quit() print('result: ', result) print('') print('Get time result in {0:.5f}s'.format(duration))
def newSnapshot(self, sessionId, saveName, saveLayout, savePreferences, saveData, description): """ Create a new Snapshot object that can be saved. Parameters ---------- sessionId: string An identifier for a user session. saveName: string The name of the snapshot. saveLayout: boolean True if the layout is to be saved; false otherwise. savePreferences: boolean True if the preferences are to be saved; false otherwise. saveData: boolean True if the data is to be saved; false otherwise. description: string Descriptive information about the snapshot. Returns ------- Snapshot A new instance of the Snapshot class. """ snapshot = Snapshot(sessionId=sessionId, snapshotType='', index=0, saveName=saveName, layout=saveLayout, preferences=savePreferences, data=saveData, description=description, dateCreated='', connection=self.con) return snapshot
def measure(measurements_to_extract, as_of, download_dir, measurements_log): measurement_results = {} for measurement in measurements_to_extract: logging.info({ 'message': 'Attempting measurement', 'as-of': timestamp_utils.to_string(as_of), 'measurement.name': measurement.name, 'page.name': measurement.page.name, 'page.url': measurement.page.url }) page_directory = page_download_directory(download_dir, measurement.page.name) page_filename = timestamped_filename(page_directory, as_of) if not os.path.exists(page_filename): logging.error({ 'message': "Couldn't find downloaded content", 'filename': page_filename }) continue content = file(page_filename).read() try: result = measurement.parse(content) logging.info({ 'message': 'Recording measured result', 'as-of': timestamp_utils.to_string(as_of), 'measurement.name': measurement.name, 'measurement-result': result }) measurement_results[measurement.name] = result except BaseException as e: logging.info({'message': 'Measurement failed', 'exception': e}) append_to_log(initialize_measurement_log(measurements_log), Snapshot(as_of, measurement_results))
def differences_of(snapshots): def diff_measurement_name(measurement_name): return measurement_name + '.diff' measurements_to_diff = all_measurement_names(snapshots) answer = [] for row_before, row_after in zip(snapshots, snapshots[1:]): diff = {} for measurement in measurements_to_diff: before = row_before.measurements[measurement] after = row_after.measurements[measurement] if (after is not None) and (before is not None): diff_measurement = after - before else: diff_measurement = None diff[diff_measurement_name(measurement)] = diff_measurement answer.append(Snapshot(row_after.timestamp, diff)) return answer
def get_balance(self, args): if not args.markets: logging.error("You must use --markets argument to specify markets") sys.exit(2) pmarkets = args.markets.split(",") brokers = create_brokers(pmarkets) snapshot = Snapshot() while True: total_btc = 0. total_bch = 0. for market in brokers.values(): market.get_balances() print(market) total_btc += market.btc_balance total_bch += market.bch_balance snapshot.snapshot_balance(market.name[7:], market.btc_balance, market.bch_balance) snapshot.snapshot_balance('ALL', total_btc, total_bch) time.sleep(60*10)
def init_vipr_cli_components(self): import common as vipr_utils vipr_utils.COOKIE = None from exportgroup import ExportGroup from host import Host from hostinitiators import HostInitiator from snapshot import Snapshot from virtualarray import VirtualArray from volume import Volume # instantiate a few vipr cli objects for later use self.volume_obj = Volume(self.configuration.vipr_hostname, self.configuration.vipr_port) self.exportgroup_obj = ExportGroup(self.configuration.vipr_hostname, self.configuration.vipr_port) self.host_obj = Host(self.configuration.vipr_hostname, self.configuration.vipr_port) self.hostinitiator_obj = HostInitiator( self.configuration.vipr_hostname, self.configuration.vipr_port) self.varray_obj = VirtualArray(self.configuration.vipr_hostname, self.configuration.vipr_port) self.snapshot_obj = Snapshot(self.configuration.vipr_hostname, self.configuration.vipr_port)
df = get_dataflow( annot_path='%s/annotations/person_keypoints_%s2017.json' % (DATA_DIR, args.dataset), img_dir='%s/%s2017/' % (DATA_DIR, args.dataset)) train_samples = df.size() print('Collected %d val samples...' % train_samples) train_df = batch_dataflow(df, batch_size, time_steps=args.time_steps, format=__format) train_gen = gen(train_df) print(model.inputs[0].get_shape()) # print(model.outputs) from snapshot import Snapshot model.fit_generator(train_gen, steps_per_epoch=train_samples // batch_size, epochs=args.epochs, callbacks=[ lrate, Snapshot(args.name, train_gen, __format, stills=True) ], use_multiprocessing=False, initial_epoch=0, verbose=1)
def main(): snapshot = Snapshot() snapshot.process_pods()
def current_snapshot(self): snap_pos = self.positions snap_vel = self.velocities return Snapshot(coordinates=np.array([snap_pos]), velocities=np.array([snap_vel]), engine=self)
from algorithms.takaffoli import takaffoli from algorithms.greene import greene from algorithms.takaffoli import takaffoli from algorithms.louvain_modified import louvain_modified from algorithms.tiles import tiles from algorithms.multistep import ms_sum, ms_avg from helpers import build_sum_graph from snapshot import Snapshot import louvain snapshots = [] communities = [] # create snapshots from graph and extract communities from each one for i in range(1, 13): s = Snapshot(i, "enron2001.txt") cs = louvain.find_partition(s.get_graph(), louvain.ModularityVertexPartition) snapshots.append(s) clusters = [] for c in cs: if len(c) > 1: community = s.get_vertices(c) clusters.append(community) communities.append(clusters) # add any algorithm execution here # greene and takaffoli take communities while everything else snapshots
x = Dropout(0.2)(x) x = Flatten()(x) x = Dense(128, activation='relu')(x) x = Dropout(0.2)(x) x_out = Dense(10, activation='softmax')(x) return Model(inputs=tensor, outputs=x_out) model = get_model(Input(shape=(28, 28, 1))) model.compile( optimizer=SGD(lr=0.1, momentum=0.9, nesterov=True), loss='categorical_crossentropy', metrics=['accuracy'] ) cbs = [Snapshot('snapshots', nb_epochs=6, verbose=1, nb_cycles=2)] model.fit( x=x_train, y=y_train, verbose=1, batch_size=124, epochs=5, callbacks=cbs ) del model # Loading the ensemble print('Loading ensemble...') keep_last = 2 def load_ensemble(folder, keep_last=None):
def snapshot(): snapshot = Snapshot() return snapshot
train_gen = gen(train_df) train_samples = df.size() print(' [*] Collected %d val samples...' % train_samples) print(model.input) print(model.outputs) loaded = 0 for layer in model.layers: if isinstance(layer, Conv2D): wmat = np.load('../tf/ver1/model/weights/%s_matrix.npy' % layer.name) bias = np.load('../tf/ver1/model/weights/%s_bias.npy' % layer.name) try: layer.set_weights([wmat, bias]) loaded += 1 except: print(' [x] failed to load: %s' % layer.name) assert loaded != 0 print(' [*] Loaded %d weights' % loaded) model.fit_generator(train_gen, steps_per_epoch=train_samples // batch_size, epochs=args.epochs, callbacks=[lrate, Snapshot(args.name, train_gen)], # callbacks=[lrate], use_multiprocessing=False, initial_epoch=args.last_epoch, verbose=1)