def main(): handleArgs(sys.argv) data = Dump(__DUMP__) stockData = data.getStocks() myStocks = [] for sd in stockData: newStock = Stock(sd['name'], sd['holding']) myStocks.append(newStock) print(trade.connect()) if __CLOSE__ == myStocks[0].getTime(): print('Market is closed') return mylog.baseLog('Open for the day') while __CLOSE__ != myStocks[0].getTime(): for stock in myStocks: stock.update() indicator = checkStock(stock) if indicator > 0: stock.updateHolding(trade.buy(stock)) elif indicator < 0: stock.updateHolding(trade.sell(stock)) toJson = [] for stock in myStocks: mylog.baseLog('{n} closed at {v}'.format(n=stock.getName(), v=stock.getLast())) stockDump = stock.dump() stockDump['holding'] = -1 toJson.append(stockDump) print(toJson) data.outDump({'stocks': toJson})
def save(self, path:str): ''' save provides saving of the data metrics ''' if not self._metrics: raise NoMetricsException('Metrics is not loaded') if len(self._tasks) == 0: raise NoTasksException('Tasks is not defined') for t in self._tasks: metrics = self._metrics_store[t.title()] d = Dump(t, metrics) d.save(path)
def construct_main_loop(name, task_name, patch_shape, batch_size, n_spatial_dims, n_patches, n_epochs, learning_rate, hyperparameters, **kwargs): name = "%s_%s" % (name, task_name) hyperparameters["name"] = name task = get_task(**hyperparameters) hyperparameters["n_channels"] = task.n_channels x_uncentered, y = task.get_variables() x = task.preprocess(x_uncentered) # this is a theano variable; it may depend on the batch hyperparameters["image_shape"] = x.shape[-n_spatial_dims:] ram = construct_model(task=task, **hyperparameters) ram.initialize() hs = ram.compute(x, n_patches) cost = ram.emitter.cost(hs, y, n_patches) cost.name = "cost" print "setting up main loop..." graph = ComputationGraph(cost) uselessflunky = Model(cost) algorithm = GradientDescent(cost=cost, parameters=graph.parameters, step_rule=Adam(learning_rate=learning_rate)) monitors = construct_monitors(x=x, x_uncentered=x_uncentered, y=y, hs=hs, cost=cost, algorithm=algorithm, task=task, model=uselessflunky, ram=ram, graph=graph, **hyperparameters) main_loop = MainLoop( data_stream=task.get_stream("train"), algorithm=algorithm, extensions=( monitors + [ FinishAfter(after_n_epochs=n_epochs), DumpMinimum(name + '_best', channel_name='valid_error_rate'), Dump(name + '_dump', every_n_epochs=10), #Checkpoint(name+'_checkpoint.pkl', every_n_epochs=10, on_interrupt=False), ProgressBar(), Timing(), Printing(), PrintingTo(name + "_log") ]), model=uselessflunky) return main_loop
def gen_msd(log, dr): a0 = Dump("t0.lammpstrj") b0 = a0.parser() a1 = Dump("t1.lammpstrj") b1 = a1.parser() n0 = len(b0.atoms) n1 = len(b1.atoms) log.write log.write("t0 has %d atoms\n"%n0) log.write("t1 has %d atoms\n"%n1) if n0 != n1: sys.stderr.write("Atom numbers are different!") sys.stderr.flush() sys.exit() else: natom = n0 neb_atoms = [] o = open("detail.dat", "w") for i in range(natom): r2 = 0 for j in range(3): d = (b0.atoms[i].x[j] - b1.atoms[i].x[j]) r2 += d * d if r2 > dr*dr: neb_atoms.append(b1.atoms[i]) o.write("%d\t%.4f\t%.4f\t%.4f\t%.4f\n" %(i+1, math.sqrt(r2), b1.atoms[i].x[0], b1.atoms[i].x[1], b1.atoms[i].x[2])) o.close() o = open("final.coords", "w") o.write("%d\n"%len(neb_atoms)) for i in neb_atoms: o.write("%d\t%.4f\t%.4f\t%.4f\n" %(i.an, i.x[0], i.x[1], i.x[2])) o.close()
def __run_case__(self, flow, caseName, maxitimes, minitimes, caseflow, notsuccess_handler): if flow.info['mode'] == 'maxi': times = maxitimes else: times = minitimes return LJson.SetCurrRunDataInFlow(CaseName=caseName), FOR(times)( SWITCH()[ NOT(*caseflow), Failes(caseName), Dump(flow.dm.runde), notsuccess_handler, ] [ Passes, Wait(5) ] ), SwitchMDevice
def produce_ast(input_file, errorlog, log=True): """ Parse an input string to produce an AST. """ vmsg(v, "Parsing file '{}'".format(infile if infile else 'stdin')) # Setup logging if we need to if log: logging.basicConfig( level = logging.DEBUG, filename = defs.PARSE_LOG_FILE, filemode = "w", format = "%(filename)10s:%(lineno)4d:%(message)s") logger = logging.getLogger() else: logger = 0 # Create the parser and produce the AST parser = Parser(errorlog, lex_optimise=True, yacc_debug=False, yacc_optimise=False) ast = parser.parse(input_file, infile, debug=logger) if errorlog.any(): raise QuietError() # Perform parsing only if parse_only: raise SystemExit() # Display (dump) the AST if print_ast: ast.accept(Dump()) raise SystemExit() # Display (pretty-print) the AST if pprint_raw_ast: Printer().walk_program(ast) raise SystemExit() return ast
def gen_msd(log, dr): a0 = Dump("t0.lammpstrj") b0 = a0.parser() a1 = Dump("t1.lammpstrj") b1 = a1.parser() n0 = len(b0.atoms) n1 = len(b1.atoms) log.write log.write("t0 has %d atoms\n" % n0) log.write("t1 has %d atoms\n" % n1) if n0 != n1: sys.stderr.write("Atom numbers are different!") sys.stderr.flush() sys.exit() else: natom = n0 neb_atoms = [] o = open("detail.dat", "w") for i in range(natom): r2 = 0 for j in range(3): d = (b0.atoms[i].x[j] - b1.atoms[i].x[j]) r2 += d * d if r2 > dr * dr: neb_atoms.append(b1.atoms[i]) o.write("%d\t%.4f\t%.4f\t%.4f\t%.4f\n" % (i + 1, math.sqrt(r2), b1.atoms[i].x[0], b1.atoms[i].x[1], b1.atoms[i].x[2])) o.close() o = open("final.coords", "w") o.write("%d\n" % len(neb_atoms)) for i in neb_atoms: o.write("%d\t%.4f\t%.4f\t%.4f\n" % (i.an, i.x[0], i.x[1], i.x[2])) o.close()
from block import dumpBlock from dump import Dump from output_conf import toXyz, toPdb lmpfile = "dump.lammpstrj" sepfile = "dump.sep" dt = 1 # parse the dump file with multi configurations into seperated dump files nframe = dumpBlock(lmpfile, sepfile, dt) nframe += 1 for i in range(0, nframe, dt): #for i in range(10): a = Dump("%s%05d.dump"%(sepfile,i)) b = a.parser() #toXyz(b, "xyz%05d.xyz"%i) #b.sortXYZ("z") toPdb(b, "pdb%05d.pdb"%i)
# for j in range(i+1,n): # if isASCII(words[j])==False: # Chinese word # #r_cn=words[j] # for k in range(len(twords)): # if syn.isSyn(twords[k],words[j]): # scores[k][1]+=1 # # for k in range(len(twords)): # if scores[k][1]>0: ## features['#TRAN_%s' %twords[k]]=1 # features['%s' %twords[k]]=1 # # # return features dump = Dump() def getTranslateFeaturesByLM(p, dict, lm): features = {} # basic features for w in p.words: features[w] = 1 pmiFreqWords = getPMIFrequencyWords() # translate features words = [w.lower() for w in p.content.split()] n = len(words) for i in range(n): w = words[i] if dict.isIn(w): # is English word
#!/usr/bin/env python3.3 # -*- coding: utf-8 -*- ######################################################## # dump2avp is used for converting bochs dump files to avp # ######################################################## import sys from dump import Dump import tracer from logging import info,debug,error,warning,critical dump = Dump(sys.argv[1:]) dump.Parse_dump() #dump.print_arch_regs(tracer.major_dump_result) # for debug #dump.print_tr7(tracer.major_dump_initial) #dump.print_msrs(tracer.major_dump_initial) #dump.print_fpu(tracer.major_dump_initial) #dump.print_mmx_sse_avx(tracer.major_dump_initial) #dump.print_apic(tracer.major_dump_initial) dump.gen_avp()
def __init__(self, num=None, param_file=None, path='./'): self.dump = Dump(num, param_file, path) self.param = self.dump.param
class DumpID(object): """ Write better doc strings! TODO: Debug for differnet simulations """ def __init__(self, num=None, param_file=None, path='./'): self.dump = Dump(num, param_file, path) self.param = self.dump.param def read_fields(self): """ Wrapper for the Dump mehtod read_fields Args: None Returns: dict: dictionary containing E and B fields read from the dump files NOTE: We include this it is common to call only DumpID, when we still need the field values """ return self.dump.read_fields() def get_part_in_box(self, r=[1., 1.], dx=[.5, .5], par=False, species=None, tags=False): """ Takes a box defined by its center position r and it's widths dx and gets the particle data """ r0 = [1., 1., .5] dx0 = [.5, .5, 1.] for c, (r_i, dx_i) in enumerate(zip(r, dx)): r0[c] = r_i dx0[c] = dx_i if species is None: parts = {'i': [], 'e': []} else: parts = {species: []} if par: if self._is_2D(): if 'fields' not in self.__dict__: print 'Reading Fields...' self.fields = self.read_fields() else: if 'fields' not in self.__dict__: print 'Reading Fields...' self.fields = self._get_fld_index_in_zplane(r0[2], dx0[2]) elif r0[2] - dx0[2]/2. > self.fields['zz'][0] and \ r0[2] + dx0[2]/2. < self.fields['zz'][-1]: print 'Reading Fields...' self.fields = self._get_fld_index_in_zplane(r0[2], dx0[2]) dump_and_index = self._get_procs_in_box(r0[0], dx0[0], r0[1], dx0[1], r0[2], dx0[2]) for d in dump_and_index: print 'Reading Parts from p3d-{0}.{1}...'.format(d, self.dump.num) data = self.dump.read_particles(d, wanted_procs=dump_and_index[d], tags=tags) for sp in parts: parts[sp] += [data[sp][g] for g in dump_and_index[d]] #if tags: # parts = self._combine_parts_and_tags(parts) for sp in parts: for c, p in enumerate(parts[sp]): print ' Triming {0} from {1}...'.format(sp, c) parts[sp][c] = self._trim_parts(p, r0, dx0) parts[sp] = np.hstack(parts[sp]) if par: parts[sp] = self._rotate_parts(parts[sp], r0, dx0) if len(parts.keys()) == 1: parts = parts[parts.keys()[0]] return parts def _combine_parts_and_tags(self, parts): """ Merge the particle tags and phase space values There is a small chance that this may be slow! So maybe come back and try to fix it? if it needs it !!!WARNING!!!: This is now defunct because of changes made in the dump.py file. """ bind_parts = {} for sp in parts: part_dtype = parts[sp][0][0].dtype.descr tag_dtype = [('tag', parts[sp][0][1].dtype)] new_dtype = np.dtype(part_dtype + tag_dtype) bind_parts[sp] = [] for g, (phase_space, tags) in enumerate(parts[sp]): pts = np.empty(np.size(phase_space), dtype=new_dtype) for fld in pts.dtype.fields: if fld == 'tag': pts[fld] = tags else: pts[fld] = phase_space[fld] bind_parts[sp].append(pts) return bind_parts def _rotate_parts(self, p0, r0, dx0): b0, e0 = self._interp_fields(r0) #pdb.set_trace() if np.sum(e0**2) < np.spacing(10): # Some timese there are just no E fields exb = np.cross(b0, np.array([0., 1., 0.])) else: exb = np.cross(b0, e0) exb = exb / np.sqrt(np.sum(exb**2)) bbb = b0 / np.sqrt(np.sum(b0**2)) beb = np.cross(bbb, exb) ntype = p0.dtype['vx'].type extra_dt = [('v0', ntype), ('v1', ntype), ('v2', ntype)] new_dt = np.dtype(p0.dtype.descr + extra_dt) p1 = np.zeros(p0.shape, dtype=new_dt) #for v in ['x', 'y', 'z', 'vx', 'vy', 'vz']: for v in p0.dtype.fields: p1[v] = p0[v] for v, ehat in zip(('v0', 'v1', 'v2'), (bbb, exb, beb)): p1[v] = ehat[0] * p0['vx'] + ehat[1] * p0['vy'] + ehat[2] * p0['vz'] return p1 def _interp_fields(self, r0): if self._is_2D(): sim_lens = [self.param['l' + v] for v in ['x', 'y']] r0 = r0[:2] else: sim_lens = [self.param['l' + v] for v in ['x', 'y', 'z']] r0 = r0[:3] b0 = np.empty(3) e0 = np.empty(3) for g, fld in enumerate(['bx', 'by', 'bz']): b0[g] = interp_field(self.fields[fld], r0, sim_lens) for g, fld in enumerate(['ex', 'ey', 'ez']): e0[g] = interp_field(self.fields[fld], r0, sim_lens) return b0, e0 #bx_intp = interp_field() #raise NotImplementedError() def _trim_parts(self, p0, r0, dx0): if self._is_2D(): vrng = ['x', 'y'] else: vrng = ['x', 'y', 'z'] for c, v in enumerate(vrng): to_mask = np.where((r0[c] - dx0[c]/2. <= p0[v]) & \ (p0[v] <= r0[c] + dx0[c]/2.)) p0 = p0[to_mask] return p0 def _is_2D(self): if self.param['pez'] * self.param['nz'] == 1: return True else: return False def _get_fld_index_in_zplane(self, z0, dz): """ I think they way fields are stored is like this: if you have points in the z direction, every dumpfile has pez*nz/nchannel fields on it. Example: pez = 16, nz = 32, nchannel = 128 so p3d-001.00 has z = [0,1,2,3]*dgrid p3d-002.00 has z = [4,5,6,7]*dgrid etc. So all we really want to do is find what z our point corresponds to in index space """ zmin = z0 - dz / 2. zmax = z0 + dz / 2. ind_min = self._z_to_index(zmin) ind_max = self._z_to_index(zmax) index = range(ind_min, ind_max + 1) flds = self.dump.read_fields(index) # We need to alter the zz array to accout for this idz = (self.param['pez'] * self.param['nz']) / self.param['nchannels'] sub_zz_index = [] for ind in index: sub_zz_index += range((ind - 1) * idz, ind * idz) flds['zz'] = flds['zz'][sub_zz_index] return flds def _get_procs_in_box(self, x0, dx, y0, dy, z0, dz): """ Takes the real r postion and returns what dump file partilces coresponding to that position will be on, as well as the index position of the list of processeors on that dump file. """ proc_dx = np.array([ self.param['lx'] / self.param['pex'], self.param['ly'] / self.param['pey'], self.param['lz'] / self.param['pez'] ]) r0 = np.array([x0, y0, z0]) dx = np.array([dx, dy, dz]) r0_rng = [] for c in range(3): r0_rng.append( np.arange(r0[c] - dx[c] / 2., r0[c] + dx[c] / 2., proc_dx[c])) if r0_rng[c][-1] < r0[c] + dx[c] / 2.: r0_rng[c] = np.hstack((r0_rng[c], r0[c] + dx[c] / 2.)) #print 'r0_rng = ',r0_rng p0_rng = [] for x in r0_rng[0]: for y in r0_rng[1]: for z in r0_rng[2]: p0_rng.append(self._r0_to_proc(x, y, z)) print p0_rng p0_rng = set(p0_rng) #This removes duplicates di_dict = {} for p in p0_rng: d = self._proc_to_dumplocation(*p) if d[0] in di_dict: di_dict[d[0]].append(d[1]) else: di_dict[d[0]] = [d[1]] for k in di_dict: di_dict[k].sort() di_dict[k] = list(set(di_dict[k])) return di_dict def _z_to_index(self, z0): if (self.param['pez'] * self.param['nz']) % self.param['nchannels'] != 0: raise NotImplementedError() lz = self.param['lz'] dz = lz / 1. / self.param['pez'] / self.param['nz'] idz = (self.param['pez'] * self.param['nz']) / self.param['nchannels'] err_msg = '{0} value {1} is outside of the simulation boundry [0.,{2}].'+\ 'Setting {0} = {3}' if self._is_2D(): ind = 1 else: if z0 < 0.: print err_msg.format('Z', z0, lz, 0.) ind = 1 elif z0 >= lz: print err_msg.format('Z', z0, lz, lz) ind = self.param['nchannels'] else: ind = (int(np.floor(z0 / dz))) // idz + 1 return ind def _r0_to_proc(self, x0, y0, z0): """ Returns the px,py,pz processeor for a given values of x, y, and z """ lx = self.param['lx'] ly = self.param['ly'] lz = self.param['lz'] err_msg = '{0} value {1} is outside of the simulation boundry [0.,{2}].'+\ 'Setting {0} = {3}' if x0 < 0.: print err_msg.format('X', x0, lx, 0.) px = 1 elif x0 > lx: print err_msg.format('X', x0, lx, lx) px = self.param['pex'] else: px = int(np.floor(x0 / self.param['lx'] * self.param['pex'])) + 1 if y0 < 0.: print err_msg.format('Y', y0, ly, 0.) py = 1 elif y0 > ly: print err_msg.format('Y', y0, ly, ly) py = self.param['pey'] else: py = int(np.floor(y0 / self.param['ly'] * self.param['pey'])) + 1 if self._is_2D(): pz = 1 else: if z0 < 0.: print err_msg.format('Z', z0, lz, 0.) pz = 1 elif z0 > lz: print err_msg.format('Z', z0, lz, lz) pz = self.param['pez'] else: pz = int(np.floor( z0 / self.param['lz'] * self.param['pez'])) + 1 return px, py, pz def _proc_to_dumplocation(self, px, py, pz): """ Returns the dump index (di), as well as the postion in the array returned in _get_particles(dump_index=di) Big Note: There are two ways marc stores procs on dump files: an old way and a new way. We need a to distingush which way we are using. Old Way: Scan over Y, Scan over X then Scan over Z New Way: Scan over X, Scan over Y then Scan over Z """ pex = self.param['pex'] pey = self.param['pey'] pez = self.param['pez'] nch = self.param['nchannels'] if pex * pey * pez % nch != 0: raise NotImplementedError() dump_IO_version = 'V1' if self.param.has_key('USE_IO_V2'): warn_msg = 'USE_IO_V2 Not properly coded at this time!'\ 'Use at your own risk!' dump_IO_version = 'V2' warnings.warn(warn_msg) if dump_IO_version == 'V1': #print 'Using IO V1...' N = (px - 1) % nch + 1 R = (pz - 1) * (pex / nch) * (pey) + (pex / nch) * (py - 1) + ( px - 1) / nch else: # dump_IO_version == 'V2' #print 'Using IO V2...' npes_per_dump = pex * pey * pez / nch pe = (pz - 1) * pex * pey + (py - 1) * pex + (px - 1) N = pe / npes_per_dump + 1 R = pe % npes_per_dump return _num_to_ext(N), R
from block import dumpBlock from dump import Dump from output_conf import toXyz, toPdb, toPoscar, toReaxLammps lmpfile = "dump.lammpstrj" sepfile = "dump.sep" dt = 1 # parse the dump file with multi configurations into seperated dump files nframe = dumpBlock(lmpfile, sepfile, dt) nframe += 1 for i in range(0, nframe, dt): #for i in range(10): a = Dump("%s%05d.dump" % (sepfile, i)) b = a.parser() b.assignAtomTypes() b.assignEleTypes() b.toFrac() toXyz(b, "xyz%05d.xyz" % i) #b.sortXYZ("z") toPdb(b, "pdb%05d.pdb" % i) toReaxLammps(b, "lammps.data") toPoscar(b, )
def construct_main_loop(name, task_name, patch_shape, batch_size, n_spatial_dims, n_patches, n_epochs, learning_rate, hyperparameters, **kwargs): name = "%s_%s" % (name, task_name) hyperparameters["name"] = name task = get_task(**hyperparameters) hyperparameters["n_channels"] = task.n_channels theano.config.compute_test_value = "warn" x, x_shape, y = task.get_variables() ram = construct_model(task=task, **hyperparameters) ram.initialize() states = [] states.append(ram.compute_initial_state(x, x_shape, as_dict=True)) n_steps = n_patches - 1 for i in xrange(n_steps): states.append(ram.apply(x, x_shape, as_dict=True, **states[-1])) emitter = task.get_emitter(input_dim=ram.get_dim("states"), **hyperparameters) emitter.initialize() cost = emitter.cost(states[-1]["states"], y, n_patches) cost.name = "cost" print "setting up main loop..." graph = ComputationGraph(cost) uselessflunky = Model(cost) algorithm = GradientDescent(cost=cost, parameters=graph.parameters, step_rule=CompositeRule([ StepClipping(1.), Adam(learning_rate=learning_rate) ])) monitors = construct_monitors(x=x, x_shape=x_shape, y=y, cost=cost, algorithm=algorithm, task=task, model=uselessflunky, ram=ram, graph=graph, **hyperparameters) main_loop = MainLoop( data_stream=task.get_stream("train"), algorithm=algorithm, extensions=( monitors + [ FinishAfter(after_n_epochs=n_epochs), DumpMinimum(name + '_best', channel_name='valid_error_rate'), Dump(name + '_dump', every_n_epochs=10), #Checkpoint(name+'_checkpoint.pkl', every_n_epochs=10, on_interrupt=False), ProgressBar(), Timing(), Printing(), PrintingTo(name + "_log") ]), model=uselessflunky) return main_loop
from pybricks.hubs import EV3Brick from pybricks.ev3devices import (Motor, TouchSensor, ColorSensor, InfraredSensor, UltrasonicSensor, GyroSensor) from pybricks.parameters import Port, Stop, Direction, Button, Color from pybricks.tools import wait, StopWatch, DataLog from pybricks.robotics import DriveBase from pybricks.media.ev3dev import SoundFile, ImageFile from door import Door from dump import Dump ## SET UP THE DRIVE CHASIS ## ev3 = EV3Brick() l_DriveMotor = Motor(Port.B) r_DriveMotor = Motor(Port.C) driver = DriveBase(l_DriveMotor,r_DriveMotor,68.8,111) ## CREATE THE DOOR ## d_motor = Motor(Port.A) door = Door(d_motor, 180) #DUMP dumpMotor = Motor(Port.D) dumper = Dump(dumpMotor, 165) ## CREATE THE COLOR SENSORS ## r_color = ColorSensor(Port.S4) l_color = ColorSensor(Port.S1) ## SET UP THE SENSORS ## gyro = GyroSensor(Port.S2)
def dump(self): """Dump Bugzilla data to PyTables. """ dump = Dump(config=self.config, cache=self.cache) dump.run()