Esempio n. 1
0
def detos(scrip):
	osize = ""
	st_dat   = datetime.today().strftime('%Y-%m-%d')
	nw_dat   = datetime.today().strftime('%H:%M')
	st_dat   = st_dat+' 09:15:00'
	stqry    = "SELECT * FROM `"+scrip+"` WHERE `time` > '"+st_dat+"'"
	dp_cnt   = s.rcnt(stqry)
	dp_delta = datetime.strptime(nw_dat+":00",'%H:%M:%S') - datetime.strptime('09:15:00','%H:%M:%S')
	dp_req   = int(dp_delta.seconds/60)
	if dp_req > 375:
		dp_req = 375
	dp_mis   = int(dp_req - dp_cnt)
	osize = ""
	if dp_cnt == 0:
		osize = "full"

	if dp_mis > 100:
		osize = "full"

	if dp_mis < 100:
		osize = "compact"

	if dp_mis < 1:
		osize = "NONE"

	c.pr("I","Data Points Availiable -> "+str(dp_cnt) +" Data Points Required -> "+str(dp_req)+" Data Points Missing -> "+str(dp_mis)+" Output Size -> "+osize,1)
	sys.exit()
	return osize
Esempio n. 2
0
def process_data(data,scrip):
	#Check if meta data is returned
	if "Meta Data" in data:
		c.pr("I","Results Fetched Successfully",1)
		ac_data   = data["Time Series (1min)"]
		time_keys = ac_data.keys()
		data_map  = {}
		for tk in time_keys:
			dt_str,tstamp 		= fetch_time(tk)
			opn,hig,low,clo,vlm = fetch_param(ac_data[tk])
			#print("EST Time -> "+tk+"   IST Time -> "+dt_str+"  Time Stamp -> "+tstamp+"  Open -> "+opn+"  High -> "+hig+"  Low -> "+low+" Close -> "+clo+ " Volume -> "+vlm)
			#sys.exit()
			data_map[tstamp]	  = {}
			data_map[tstamp]['D'] = dt_str
			data_map[tstamp]['O'] = opn
			data_map[tstamp]['L'] = low
			data_map[tstamp]['H'] = hig
			data_map[tstamp]['C'] = clo
			data_map[tstamp]['V'] = vlm
		store_data(data_map,scrip)
	else:
		if not "NONE" in data:
			c.pr("I","Results Fetched Failed",1)
			print(data)
		
	return
Esempio n. 3
0
def mkcells(gidinfo):
    timeit()
    for gid in gidinfo:
        x, y, z = gidinfo[gid]
        cell = h.Cell()
        gidinfo[gid] = CellInfo(cell)

        # cell shape is actually an arc and area is fastidious with respect
        # to all 6 sides. But length
        # treated as line distance between org points (interior corners
        # in circumferential direction. Set diam so area is correct including
        # end areas.
        cell.soma.pt3dclear()
        cell.soma.pt3dadd(x, y, z, 1.)
        ilayer, icircle, ipt = gid2org(gid)
        x1, y1, z1 = xyz(ilayer, icircle, ipt + 1)
        cell.soma.pt3dadd(x1, y1, z1, 1.)
        length = cell.soma.L
        area = sum(mkgap.cell_side_areas(gid))
        diam = area / pi / length
        cell.soma.diam = diam
        assert (isclose(cell.soma(.5).area(), area, abs_tol=area * 1e-5))

        cell.position(x, y, z)
        pc.set_gid2node(gid, rank)
        nc = cell.connect2target(None)
        pc.cell(gid, nc)
    x = pc.allreduce(len(gidinfo), 1)
    pr("Global number of real cells is %d" % x)
    timeit("mkcells")
Esempio n. 4
0
def fetch_csv(scrip,typ,year,month):
	sname 	   = scrip
	data_map   = {}
	if typ == "FUT":
		sname = sname + "_F1"
	fpath = "C:\\Users\\ssadiq\\Downloads\\oneminutedata\\"+year+"\\"+month+"\\NIFTY50_"+month+year+"\\"+sname+".txt"
	if os.path.exists(fpath):
		c.pr("I","File Path Exists -> "+fpath,1)
		fobj  = open(fpath,"r")
		lines = (fobj.read()).split("\n")
		for line in lines:
			tmp_str  = line.split(",")
			if len(tmp_str) == 9:
				dt	   = tmp_str[1][:4] +"-"+tmp_str[1][4:6]+"-"+tmp_str[1][6:8]+" "+tmp_str[2]+":00"
				o 	   = tmp_str[3]
				h 	   = tmp_str[4]
				l 	   = tmp_str[5]
				cl 	   = tmp_str[6]
				v      = tmp_str[7]
				dt_obj = datetime.strptime(dt, "%Y-%m-%d %H:%M:%S")
				tstamp = str(time.mktime(dt_obj.timetuple()))[:-2]

				data_map[tstamp]	  = {}
				data_map[tstamp]['D'] = dt
				data_map[tstamp]['O'] = o
				data_map[tstamp]['L'] = l
				data_map[tstamp]['H'] = h
				data_map[tstamp]['C'] = cl
				data_map[tstamp]['V'] = v
				#qry    = "('"+dt+"','"+tstamp+"',"+o+","+l+","+h+","+cl+","+v+")"
				#ret.append(qry)
	else:
		c.pr("W","File Path Does Not Exists -> "+fpath,1)
	return data_map
Esempio n. 5
0
def sanitize(data_map,scrip):
	c.pr("I","Sanitizing Data For "+scrip,1)
	final_map = []
	db_data   = s.sql_hash(scrip,"timestamp","volume","")
	for tk in data_map:
		if tk not in db_data:
			final_map.append(tk)
	return final_map
Esempio n. 6
0
def store_data(data_map,scrip):
	#Retuns the records which are not in DB
	final_map = sanitize(data_map,scrip)
	c.pr("I","Storing Data For "+scrip,1)
	sql_hash = []
	for key in final_map:
		sql_ins = "('"+data_map[key]['D']+"','"+key+"',"+data_map[key]['O']+","+data_map[key]['L']+","+data_map[key]['H']+","+data_map[key]['C']+","+data_map[key]['V']+")"
		sql_hash.append(sql_ins)
	s.sql_insert(scrip,"time,timestamp,open,low,high,close,volume",sql_hash,500)
	return 
Esempio n. 7
0
def purkstim():
    r = []
    if rank == 0:
        for gid in gidinfo:  # break, so just the first and all its connections
            r.append(gid)
            for gid2 in gidinfo[gid].gaps:
                r.append(gid2)
            break
    pr("purkstim" + str(r))
    r = h.Vector(r)
    pc.broadcast(r, 0)
    return r
Esempio n. 8
0
def print_param():
  import sys
  types = [type(True), type(1), type(1.0), type((1,2,3))]
  try:
    p = sys.modules['param']
    for name in dir(p):
      if '__' not in name:
        val = getattr(p, name)
        if type(val) in types or "purkinje_spec" in name:
          pr("%s = %s"%(name, str(val)))
  except:
    print ('Error in param.print_param')
    pass
Esempio n. 9
0
def fetch_local_data():
	for scrip in scrips:
		typs = {"EQ":scrip,"FUT":scrip+"_FUT"}
		c.pr("I","Processing For scrip --> "+scrip,1)
		for typ in typs:
			#months = ['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC']
			months = ['FEB']
			for mon in months:
				data = fetch_csv(scrip,typ,"2019",mon)
				if(len(data)):
					tbl = typs[typ]
					store_data(data,tbl)
					clean_data(tbl)
	return
Esempio n. 10
0
def main():
    args = parser.parse_args()
    data = np.loadtxt(args.input, delimiter=',')
    t = data[:, 0]
    if args.has_truth:
        X = data[:, 1:-1]
        y = data[:, -1].astype(np.uint8)
    else:
        X = data[:, 1:]
        y = None

    with open(args.model, 'rb') as model_file:
        model = pickle.load(model_file)

    y_hat = evaluate(X, model, args.min_support)

    if y is not None:
        p, r = common.pr(y_hat, y)
        print('Threshold = %f' % model['thresh'])
        print('  p = %f' % p)
        print('  r = %f' % r)

    np.savetxt(args.output,
               np.stack([t, y_hat], axis=1),
               delimiter=',',
               fmt='%.18g')
Esempio n. 11
0
def special_gap_params():
    try:
        f = open('Connections_Other_Information.txt')
    except:
        pr('No special gap parameters')
        return
    gidpair2ncon = {}
    for ncon in connections:
        gidpair2ncon[connections[ncon]] = ncon
    for line in f:
        #gid1, gid2, gmin, gmax, gvar, tc, tcvar, drift
        info = [float(x) for x in line.split()]
        for i in range(2):
            info[i] = int(info[i])
        for i in range(2):
            special_gap_params2(i, info, gidpair2ncon)
    f.close()
Esempio n. 12
0
def mkgaps(gidinfo, gaps):
    timeit()
    mark = set()
    for gapinfo in gaps.values():
        gg = (gapinfo.gid1, gapinfo.gid2)
        id = gapinfo.id
        mkhalfgap(gg[0], gg[1], id, gidinfo, mark)
        mkhalfgap(gg[1], gg[0], -id, gidinfo, mark)
    pc.setup_transfer()

    x = 0
    for cell in gidinfo.values():
        x += len(cell.gaps)
    x = pc.allreduce(x, 1)
    pr("Global number of halfgap is %d" % x)

    timeit("mkgaps")
Esempio n. 13
0
def check_tables():
	c.pr("I","Checking If Destination Table's Exists",0)
	for scrip in scrips:
		#c.pr("I","Checking For Table "+scrip,1)
		qry = "SELECT * FROM information_schema.tables WHERE table_schema = 'stocki'  AND table_name = '"+scrip+"' LIMIT 1;"
		if s.rcnt(qry):
			c.pr("I",scrip+" Table Exists",1)
		else:
			c.pr("I",scrip+" Table Needs To Be Created",1)
			s.create_table(scrip,"time:DT,timestamp:VC:15,open:FL,low:FL,high:FL,close:FL,volume:IN")
		
		qry = "SELECT * FROM information_schema.tables WHERE table_schema = 'stocki'  AND table_name = '"+scrip+"_FUT' LIMIT 1;"
		if s.rcnt(qry):
			c.pr("I",scrip+"_FUT Table Exists",1)
		else:
			c.pr("I",scrip+"_FUT Table Needs To Be Created",1)
			s.create_table(scrip+"_FUT","time:DT,timestamp:VC:15,open:FL,low:FL,high:FL,close:FL,volume:IN")
	return
Esempio n. 14
0
def setallgaps(meang, interval, drift):
    npurkgap = 0
    for gid1, cellinfo in gidinfo.items():
        for gid2, gap in cellinfo.gaps.items():
            gapinfo = mkgap.gaps[(gid1, gid2) if gid1 < gid2 else (gid2, gid1)]
            area = gapinfo.area
            g = mkgap.abscond(area, meang)
            if is_purkinje_gap(gapinfo.gid1, gapinfo.gid2):
                g *= param.purkinje_gap_factor
                cellinfo.is_purk = True
                npurkgap += 1
            gap.meang = g
            gap.gmax = g
            gap.gmin = g
            gap.g = g
            gap.rg = interval
            gap.drift = drift
    npurkgap = pc.allreduce(npurkgap, 1)
    pr("number of purkinje gaps is %d" % (npurkgap / 2))
Esempio n. 15
0
def init_sim(sims,rans,st_id):
    c.pr("I","Initializing Simulation",0)
    max_threads = 5
    if len(sims) < max_threads:
        max_threads = len(sims)

    for x in range(max_threads):
        t = threading.Thread(target=threader)
        t.daemon = True
        t.start()
    
    for key in sims:
        q.put(sims[key])
    
    for key in rans:
        q.put(rans[key])
    q.join()
    c.pr("I","Simulation Finished",1)
    display_stats(st_id)
    return
Esempio n. 16
0
def sql_insert(table, keys, data, limit):
    c.pr(
        "I", "Initiating Insert Operation On Table -> " + table +
        " Query Limit -> " + str(limit) + " Columns -> " + str(len(data)), 0)
    ctr = 0
    dap = ""
    for clm in data:
        if ctr == limit:
            dap = dap[1:]
            qry = "INSERT INTO `" + table + "` (" + keys + ") VALUES " + dap
            execQuery(qry)
            dap = ""
            ctr = 0
        ctr = ctr + 1
        dap = dap + "," + clm
    if ctr > 1:
        dap = dap[1:]
        qry = "INSERT INTO `" + table + "` (" + keys + ") VALUES " + dap
        execQuery(qry)
    return
Esempio n. 17
0
def fetch_json(scrip):
	c.pr("I","Fetching Data For Scrip "+scrip,1)
	#Determine output size full or compact
	osize = detos(scrip)
	data  = {}
	if osize != "NONE":
		API_LINK = "https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&interval=1min&apikey=MCAF9B429I44328U&symbol="+scrips[scrip]['search']+"&outputsize="+osize
		c.pr("I","API Link -> "+API_LINK,1)	
		try:
			r = req.get(API_LINK)
			if(r.status_code == 200):
				data = r.json()
			else:
				c.pr("I","Unable to Fetch Data HTTP STATUS CODE -> "+ r.status_code,0)
		except Exception as e:
			c.pr("E","Exception Occured "+str(e),0)
	else:
		c.pr("I","No Need To Call API As Data Points Are Populated",1)
		data['NONE'] = 1
	return data
Esempio n. 18
0
def cellconread():
  timeit()
  # new Heart-3D paraboloid organization
  global ncon, ncell, connections
  import cellorg, mkgap
  from cellorg import sim_layers, sim_circles
  ncell = cellorg.ngid
  #old way iterating over all possible cells takes 5.4 seconds
  for gid in range(rank, ncell, nhost):
    ilayer, icircle, ipt = cellorg.gid2org(gid)
    if icircle < cellorg.ncircle[ilayer] - 1:
      if cellorg.is_simulated(ilayer, icircle, ipt):
        xyz = cellorg.xyz(ilayer, icircle, ipt)
        gidinfo[gid] = xyz
  '''
  #new way iterating only over cells that exist takes
  import param as p
  for ilayer in sim_layers:
    for icircle in sim_circles[ilayer]:
      i0 = cellorg.angle2ipt(p.simulation_angledeg[0]*2*pi/360, ilayer, icircle)
      i1 = cellorg.angle2ipt(p.simulation_angledeg[1]*2*pi/360, ilayer, icircle)
      for ipt in range(i0, i1+1):
        if cellorg.is_simulated(ilayer, icircle, ipt):
          gid = cellorg.org2gid(ilayer, icircle, ipt)
          if gid%nhost == rank:
            gidinfo[gid] = cellorg.xyz(ilayer, icircle, ipt)
  '''
  timeit("gidinfo setup")

  for gid in gidinfo:
    # because of floating round-off error which may or may not create
    # a gap with area close to 0, guarantee gap pairs by only creating
    # gaps where gid1 < gid2
    mkgap.gaps_for_gid(gid)
  n = int(pc.allreduce(n_triang_zero(), 1))
  pr("accurate_triang_area calculation returned zero %d times" % n)
  timeit("connections determined")

  # for parallel, copy gid2 gaps to ranks that need them
  mkgap.gaps_gid2_copy()
  connections = mkgap.gaps
Esempio n. 19
0
def create_table(name, schema):
    c.pr("I", "Creating Table " + name, 1)
    var_map = {
        "FL": "FLOAT",
        "IN": "INT(11)",
        "DT": "DATETIME",
        "TS": "TIMESTAMP",
        "VC": "VARCHAR",
        "TX": "TEXT"
    }
    query = "CREATE TABLE `" + name + "` ("
    scharr = schema.split(",")
    for sch in scharr:
        tmp = sch.split(":")
        col = tmp[0]
        dt = tmp[1]
        if dt == "VC":
            size = tmp[2]
            query = query + "`" + col + "` " + var_map[
                dt] + "(" + size + ") NOT NULL,"
        else:
            query = query + "`" + col + "` " + var_map[dt] + " NOT NULL,\n"
    query = query[0:-2] + ")"

    qry = "SELECT * FROM information_schema.tables WHERE table_schema = 'stocki'  AND table_name = '" + name + "' LIMIT 1;"
    execQuery(query)
    if rcnt(qry):
        c.pr("I", str(name + " Table Created"), 1)
        execQuery("UPDATE scrips SET status='YES' WHERE scrip='" + name + "'")
    else:
        c.pr("W", str(name + " Table Not Created"), 1)
    cursor.close()
    del cursor
    db_obj.close()
    return
Esempio n. 20
0
def refresh():
  cls()
  pr('Listening for clients...')

  if not clients:
    return

  for i in range(0, len(clients)):
    pr('тЮбя╕П Client %d: %s' % (i, clients[i]))

  pr('Press ctrl + c to continue')
Esempio n. 21
0
def ohl(capital, star_param, cdata, typ):
    max_dp = star_param['MAX']
    start = star_param['START']
    thr = star_param['THR']
    var = star_param['VAR']
    sl = star_param['SL']
    t1 = star_param['T1']
    t2 = star_param['T2']
    st_id = star_param['ID']
    scr = star_param['SC']
    sims = {}
    scrips = {}
    data = {}
    c.pr(
        "I", "Initialiazing Strategy OHL Max DP -> " + str(max_dp) +
        " Staring Data Point -> " + str(start), 0)
    #Fetch Scrips
    if scr == "ALL":
        scrips = c.load_scrips()
    else:
        scrips[scr] = 1
    #Fetch Data
    for scrip in scrips:
        if scr != "ALL":
            if len(cdata):
                data = cdata
            else:
                data = c.fetch_scrip_data(scrip, start, 0)
        else:
            data = c.fetch_scrip_data(scrip, start, 0)
        spl_data = c.split_data(data, 36000)
        for ctr in spl_data:
            rddata = collections.OrderedDict(sorted(spl_data[ctr].items()))
            iddata = c.intrafy(rddata)
            sim_key, sim_data = ohl_process(iddata, thr, var, scrip, capital,
                                            max_dp, sl, t1, t2, st_id)
            if sim_key:
                sims[sim_key + "_" + scrip] = sim_data
    #Call Simulations
    if len(sims):
        rans = randomize(spl_data, sims, start, "09:31:00", "15:10:00", "OHL",
                         capital, sl, t1, t2, st_id)
        c.pr("I", str(len(sims)) + " Actual Simulations Will Be Performed", 1)
        c.pr("I", str(len(rans)) + " Random Simulations Will Be Performed", 1)
        if typ == "B":
            for key in sims:
                sim.simulate(sims[key])
            for key in rans:
                sim.simulate(rans[key])
        else:
            sim.init_sim(sims, rans, st_id)
    return
Esempio n. 22
0
def main():
    data = np.loadtxt(args.input, delimiter=',')
    X = common.preprocess(data[:, 1:-1])
    y = data[:, -1].astype(np.uint8)

    model = ng3.Ng3Model()
    model.fit(X, y, monotonicity=[ng3.Ng3Model.NON_INCREASING, ng3.Ng3Model.NON_INCREASING])
    df = model.predict(X)

    for i, model_part in enumerate(model._models):
        plt.figure()
        plt.plot(model_part._xs, model_part._ys)
        plt.title('Feature %d' % i)
        plt.savefig('feature_%d.png' % i)

    xx, yy = np.meshgrid(
        np.linspace(min(X[:, 0]), max(X[:, 0]), 1000),
        np.linspace(min(X[:, 1]), max(X[:, 1]), 1000))
    Z = model.predict(np.hstack([xx.ravel().reshape(-1, 1), yy.ravel().reshape(-1, 1)]))
    Z = Z.reshape(xx.shape)
    plt.figure()
    plt.contourf(xx, yy, Z)
    plt.title('Decision surface')
    plt.savefig('decision.png')
    plt.scatter(X[y==0, 0], X[y==0, 1], c='b', marker='+')
    plt.scatter(X[y==1, 0], X[y==1, 1], c='r', marker='+')
    plt.savefig('decision_with_points.png')

    thresh = np.percentile(df[y == 1], 100 * (1 - args.recall))
    p, r = common.pr(df > thresh, y)
    print('Threshold = %f' % thresh)
    print('  p = %f' % p)
    print('  r = %f' % r)

    with open(args.output, 'wb') as output_file:
        pickle.dump({
            'model': model,
            'thresh': thresh,
        }, output_file)
Esempio n. 23
0
def fix_missing_entries(scrip):
	c.pr("I","Fixing Missing Entries For Scrip "+scrip,1)
	uniq_dates = s.sql_array("SELECT DISTINCT CAST(`time` AS DATE) AS dateonly FROM `"+scrip+"`","dateonly")
	for date in uniq_dates:
		dp_req = fetch_dp_req(str(date),scrip)
		db_dp  = s.sql_hash(scrip,"timestamp","close","WHERE `time` BETWEEN '"+str(date)+" 09:16:00' AND '"+str(date)+" 15:30:00'")
		dp_cur = len(db_dp)
		dp_mis = (dp_req - dp_cur)
		dp_map = {}
		if dp_mis > 1:
			c.pr("I","DATE --> "+str(date)+" DP REQ --> "+str(dp_req)+" DP CUR --> "+str(dp_cur)+" DP MIS --> "+str(dp_mis),1)
			#Here We attempt to fix DP
			dp_min  = int(c.get_timestamp(str(date)+" 09:16:00"))
			dp_max  = int(c.get_timestamp(str(date)+" 15:30:00"))
			#c.pr("I","DP MIN ---> "+str(dp_min)+"  DP MAX ---> "+str(dp_max),1)
			dp_chk  = dp_min
			ctr = 1
			dp_last = 0
			while dp_chk != (dp_max+60):
				if not str(dp_chk) in db_dp:
					#If MIN AND CHK Are Same
					if dp_chk == dp_min:
						 c.pr("I",str(dp_chk)+" ---> MIN MISSING",1)
						 #exit()
					else:
						if str((dp_chk - 60)) in db_dp:
							#Case Where Previous Data point exists
							dp_prev = db_dp[str((dp_chk - 60))]['close']
							#print(str(dp_chk)+"  ---> PREV PRESENT"+" DP PREV ---> "+str(dp_prev))
							dp_map[str(dp_chk)] = process_missing(dp_prev,dp_chk)
						else:
							#print(str(dp_chk)+"  ---> PREV MISSISNG"+" DP PREV ---> "+str(dp_last))
							if dp_last:
								dp_prev = db_dp[str(dp_last)]['close']
								dp_map[str(dp_chk)] = process_missing(dp_prev,dp_chk)
							#print(str(dp_chk)+"  ---> PREV MISSISNG"+" DP PREV ---> "+str(dp_prev))
				else:
					dp_last = dp_chk			
				dp_chk  = (dp_chk+60)
			if len(dp_map):
				store_data(dp_map,scrip)
	return
Esempio n. 24
0
def test_pr():
    profile = motif_profile(motifs_example)
    assert_almost_equal(pr('ACGGGGATTACC', profile), 0.0008)
    assert_almost_equal(pr('TCGGGGATTTCC', profile), 0.0205)
    def validate_uninstr_lines():
        """
        Validations to perform on the uninstrumented vmlinux objdump lines.
        """
        if objdump_uninstr.JOPP_FUNCTION_NOP_SPACERS:
            # Assume that the key might change and require return-address reencryption.  This
            # means we need to have all copies of x30 either in x30 itself, or saved in memory
            # and pointed to by a frame pointer.
            #
            # In particular, we can't allow return-addresses being saved in callee registers
            # as is done in some low-level assembly routines, since when the key changes these
            # registers will become invalid and not be re-encrypted.
            #
            # Look for and warn about:
            #
            # mov <rd>, x30
            # ...
            # ret <rd>
            mov_ret_errors = []
            nop_spacer_errors = []
            missing_asm_annot_errors = []
            c_func_br_errors = []
            ldp_spacer_error_funcs = set([])
            stp_spacer_error_funcs = set([])
            ldp_spacer_errors = []
            stp_spacer_errors = []
            atomic_prologue_errors = []
            atomic_prologue_error_funcs = set([])
            for func_i, func, lines, last_insns in objdump_uninstr.each_func_lines(num_last_insns=2, with_func_i=True):
                mov_registers = set([])
                ret_registers = set([])
                is_c_func = func in c_functions
                saw_br = False
                #if objdump_uninstr.JOPP_FUNCTION_NOP_SPACERS and \
                        #not instrument.skip_func(func, skip, skip_asm) and func in asm_functions:
                    #if any(not re.search('\tnop$', l) for l in last_insns if l is not None):
                        #nop_spacer_errors.append(lines)
                for i, line in enumerate(lines, start=func_i):
                    def slice_lines(start, end):
                        return lines[start-func_i:end-func_i]
                    m = re.search(r'mov\t(?P<mov_register>{register_re}), x30'.format(register_re=register_re), line)
                    if m and m.group('mov_register') != 'sp':
                        mov_registers.add(m.group('mov_register'))
                        continue
                    m = re.search(r'ret\t(?P<ret_register>{register_re})'.format(register_re=register_re), line)
                    if m:
                        ret_registers.add(m.group('ret_register'))
                        continue
                    m = re.search(r'ldp\tx29,\s+x30,', line)
                    if m:
                        for l in lines[i+1:i+3]:
                            if not re.search(r'nop$'):
                                ldp_spacer_errors.append(lines)
                                ldp_spacer_error_funcs.add(func)
                                break
                        continue
                    m = re.search(r'stp\tx29,\s+x30,', line)
                    if m and func not in skip_stp:
                        missing_nop = False
                        for l in slice_lines(i-1, i):
                            if not re.search(r'nop$', l):
                                stp_spacer_errors.append(lines)
                                stp_spacer_error_funcs.add(func)
                                missing_nop = True
                                break
                        if missing_nop:
                            continue
                        if func == '__kvm_vcpu_run':
                            pr({'func':func})
                        mov_j, movx29_insn = instrument.find_add_x29_x30_imm(objdump_uninstr, func, func_i, i)
                        for l in slice_lines(i+1, mov_j):
                            if func not in atomic_prologue_error_funcs and re.search(r'\b(x29|sp)\b', insn_text(l)):
                                atomic_prologue_errors.append(lines)
                                atomic_prologue_error_funcs.add(func)
                                break
                        continue
                # End of function; check for errors in that function, and if so, perserve its output.
                if len(mov_registers.intersection(ret_registers)) > 0 and func not in skip_save_lr_to_stack:
                    mov_ret_errors.append(lines)

            errmsg(c_func_br_errors, """
            Saw a C function in vmlinux without information about the number of arguments it takes.

            We need to know this to zero registers on BLR jumps.
            """)

            errmsg(missing_asm_annot_errors, """
            Saw an assembly rountine(s) that hasn't been annotated with the number of
            general purpose registers it uses.

            Change ENTRY to FUNC_ENTRY for these assembly functions.
            """)

            errmsg(nop_spacer_errors, """
            Saw an assembly rountine(s) that doesn't have 2 nop instruction immediately
            before the function label.

            We need these for any function that might be the target of a blr instruction!
            """)

            errmsg(mov_ret_errors, """
            Saw an assembly routine(s) saving LR into a register instead of on the stack.
            This would prevent us from re-encrypting it properly!
            Modify these routine(s) to save LR on the stack and adjust the frame pointer (like in prologues of C functions).
            e.g.
            stp	x29, x30, [sp,#-16]!
            mov	x29, sp
            ...
            ldp	x29, x30, [sp],#16
            ret

            NOTE: We're only reporting functions found in the compiled vmlinux
            (gcc might remove dead code that needs patching as well)
            """)
            errmsg(ldp_spacer_errors, """
            Saw a function with ldp x29, x30 but without 2 nops following it.
            Either add an LDP_SPACER to this, use the right compiler, or make an exception.
            """)
            errmsg(stp_spacer_errors, """
            Saw a function with stp x29, x30 but without 1 nop before it.
            Either add an STP_SPACER to this, use the right compiler, or make an exception.
            """)
            warmsg(atomic_prologue_errors, """
            Saw a function prologue with:
            <func>:
                stp x29, x30, ...
                (insns)
                add x29, sp, #...

            BUT, one of the "(insns)" mentions either x29 or sp, so it might not be safe to turn this into:

            <func>:
                stp x29, x30, ...
                add x29, sp, #...
                (insns)
            """)
Esempio n. 26
0
    socks.append(s)
    clients.append(str(a))
    refresh()

  except KeyboardInterrupt:
    o = _input('\rEnter option') # \r clears print of ctrl + c (^C)
    o = int(o)

    if o == -1:
      continue

    if o == -2:
      sys.exit()

    if o not in range(0, len(socks)):
      pr('тЪая╕П Index out of range')
      continue

    sock = socks[o]
    client = clients[o]
    cipher = get_cipher()
    decipher = get_cipher()
    active = True

    Send(sock, cipher, 'pwd')

  while active:
    data = Receive(sock, decipher)

    if not data:
      pr('тЪая╕П Client disconnected')
Esempio n. 27
0
layer_offset = [0]
for ilayer in range(p.n_layer):
    for npt in npts[ilayer]:
        circle_offset[ilayer].append(circle_offset[ilayer][-1] + npt)
    layer_offset.append(layer_offset[-1] + circle_offset[ilayer][-1])

# simulation range limits for layers, circles
sim_layers = range(nlayer)[p.simulation_layers[0]:p.simulation_layers[1]]
sim_circles = []
for ilayer in sim_layers:
    sim_circles.append(
        range(
            ncircle[ilayer])[p.simulation_circles[0]:p.simulation_circles[1]])

timeit("abstract model definition")
pr("ngid = %d" % ngid)


def org2gid(ilayer, icircle, ipt):
    gid = layer_offset[ilayer] + circle_offset[ilayer][
        icircle] + ipt % npts[ilayer][icircle]
    return gid


def gid2org(gid):
    ilayer = gid2layer(gid)
    r = gid - layer_offset[ilayer]
    icircle = gid2org_help_circle(ilayer, r)
    ipt = r - circle_offset[ilayer][icircle]
    return ilayer, icircle, ipt
Esempio n. 28
0
def clean_data(scrip):
	c.pr("I","Performing Clean Up Opearations For Scrip "+scrip,1)
	fix_missing_entries(scrip)
	return
Esempio n. 29
0
def load_scrips():
	global scrips
	c.pr("I","Loading All Scrips",0)
	scrips = s.sql_hash("scrips","scrip","sector:status:is_fetch:search","")
	return
Esempio n. 30
0
def test_pr():
    profile = motif_profile(motifs_example)
    assert_almost_equal(pr('ACGGGGATTACC', profile), 0.0008)
    assert_almost_equal(pr('TCGGGGATTTCC', profile), 0.0205)
Esempio n. 31
0
def simulate(sim_data):
    scrip    = sim_data['SC']
    trans    = sim_data['TP']
    capt     = sim_data['CP']
    sl       = sim_data['SL']
    tar1     = sim_data['T1']
    tar2     = sim_data['T2']
    start    = sim_data['TS']
    end_time = sim_data['EN']
    sim_name = sim_data['NM']
    sim_type = sim_data['ST']
    str_id   = sim_data['ID']
    data     = sim_data['DATA']
    sl_val  = 0
    t1_val  = 0
    t2_val  = 0
    t1_vol  = 0
    t2_vol  = 0
    vol     = 0
    results = {}
    entry   = 0
    status  = ""
    end     = c.get_timestamp(c.get_only_date(start)+" "+end_time)
    sim_id  = c.gen_id("sim_tracker","sim_id")
    
    c.pr("I","Starting simulation for [SIM ID -> "+sim_id+"] [Scrip -> "+scrip +"] [Type -> "+sim_type+"] [Transaction -> "+trans+"] [Entry Point ->  "+c.get_date(start)+"] [Capital -> "+str(capt)+"] [T1 -> "+str(tar1)+"%] [T2 -> "+str(tar2)+"%] [SL -> "+str(sl)+"%]",1)
    #Step 1 Load the Scrip
   
    #data  = c.fetch_scrip_data(scrip,start,end)
    #data  = c.fetch_scrip_cache(cdata,start,end)
    tkeys = list(data.keys())
    tkeys.sort()
    tctr = 0
    for tk in tkeys:
        if tk == start:
            break
        else:
            tctr += 1
    tkeys = tkeys[tctr:]
    #Step 2 Take entry at the entry point at average price of first data candle
    entry   = tkeys[0]
    ep_data = data[tkeys[0]]
    #Removing key which corresponds to EP
      
    tkeys.pop(0)
    avg_ent = round((ep_data['open'] + ep_data['close'] + ep_data['high'] + ep_data['low'])/4,1)
    #Step 3 Calulate the volume which can be undertaken
    vol = math.floor(capt/avg_ent)
    #Step 4 Calculate SL/T1/T2 after entry
    if trans == "SELL":
        sl_val =  round(avg_ent + (round((avg_ent * sl),1)),1)
        t1_val =  round(avg_ent - (round((avg_ent * tar1),1)),1)
        t2_val =  round(avg_ent - (round((avg_ent * tar2),1)),1)
    
    if trans == "BUY":
        sl_val =  round(avg_ent - (round((avg_ent * sl),1)),1)
        t1_val =  round(avg_ent + (round((avg_ent * tar1),1)),1)
        t2_val =  round(avg_ent + (round((avg_ent * tar2),1)),1)

    #Calculate Volume split
    t1_vol = math.ceil(vol * 0.7)
    t2_vol = vol - t1_vol
    
    #Step 4.1 Record the simulation data in DB
    sim_query = "INSERT INTO sim_tracker VALUES ('"+sim_id+"','"+str_id+"','"+scrip+"','"+sim_type+"','"+trans+"',"+str(capt)+","+str(tar1)+","+str(tar2)+","+str(sl)+","+str(t1_vol)+","+str(t2_vol)+",'"+start+"','"+end+"')"
    s.execQuery(sim_query)
    #c.pr("I","First Candle [Open "+str(ep_data['open'])+"] [Low "+str(ep_data['low'])+"] [High "+str(ep_data['high'])+"] [Close "+str(ep_data['close'])+"]",1)
    c.pr("I","[EP AVG(OLHC) "+str(avg_ent)+"] [SL "+str(sl_val)+"] [T1 "+str(t1_val)+"] [T2 "+str(t2_val)+"] [Vol "+str(vol)+"] [T1 Vol "+str(t1_vol)+"] [T2 Vol "+str(t2_vol)+"]" ,1)

    #Step 5 Loop through time keys and check for condition
    for key in tkeys:
        #Check if there is volume to sell
        if vol:
            ep_data = data[key]
            avg_prc = round((ep_data['open'] + ep_data['close'] + ep_data['high'] + ep_data['low'])/4,1)
            if trans == "SELL":
                #Check if this did hit SL
                if sl_val >= avg_prc:
                    if t1_vol:
                        if avg_prc <= t1_val:
                            #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T1 Hit -> Yes" ,1)
                            results[key]       = {}
                            results[key]['EN'] = avg_ent
                            results[key]['EX'] = avg_prc
                            results[key]['VL'] = t1_vol
                            results[key]['ST'] = "T1H"
                            vol                = vol - t1_vol
                            t1_vol             = 0
                            
                    if t1_vol == 0 and t2_vol:
                        if avg_prc <= t2_val:
                            #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T2 Hit -> Yes" ,1)
                            if key in results:
                                results[key]['VL']  += t2_vol
                                results[key]['ST']  = "T2H"
                                vol                 = vol - t2_vol 
                                t2_vol              = 0
                            else:
                                results[key]       = {}
                                results[key]['EN'] = avg_ent
                                results[key]['EX'] = avg_prc
                                results[key]['VL'] = t2_vol
                                results[key]['ST'] = "T2H"
                                vol                = vol - t2_vol 
                                t2_vol             = 0    
                              
                else:  
                    #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " SL Hit -> Yes" ,1)
                    results[key]       = {}
                    results[key]['EN'] = avg_ent
                    results[key]['EX'] = avg_prc
                    results[key]['VL'] = vol
                    results[key]['ST'] = "SLH"
                    vol                = 0
            #exit()
            if trans == "BUY":
                if sl_val <= avg_prc:
                    if t1_vol:
                        if avg_prc >= t1_val:
                            #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T1 Hit -> Yes" ,1)
                            results[key]       = {}
                            results[key]['EN'] = avg_ent
                            results[key]['EX'] = avg_prc
                            results[key]['VL'] = t1_vol
                            results[key]['ST'] = "T1H"
                            vol                = vol - t1_vol
                            t1_vol             = 0
                            
                    if t1_vol == 0 and t2_vol:
                        if avg_prc >= t2_val:
                            #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T2 Hit -> Yes" ,1)
                            if key in results:
                                
                                results[key]['VL']  += t2_vol
                                results[key]['ST']  = "T2H"
                                vol                 = vol - t2_vol 
                                t2_vol              = 0
                            else:
                                results[key]       = {}
                                results[key]['EN'] = avg_ent
                                results[key]['EX'] = avg_prc
                                results[key]['VL'] = t2_vol
                                results[key]['ST'] = "T2H"
                                vol                = vol - t2_vol 
                                t2_vol             = 0    
                              
                else:  
                    #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " SL Hit -> Yes" ,1)
                    results[key]       = {}
                    results[key]['EN'] = avg_ent
                    results[key]['EX'] = avg_prc
                    results[key]['VL'] = vol
                    results[key]['ST'] = "SLH"
                    vol                = 0

        else:
            c.pr("I","Ending Simulations As Volume is 0",1)
            break   

    #If the volume is still there at 3:10 square off at 3:10
    if vol:
        #c.pr("I","Squaring of Position At 03:10 PM",1)
        ed_data = data[key]
        avg_ext = round((ed_data['open'] + ed_data['close'] + ed_data['high'] + ed_data['low'])/4,1)
        results[key]       = {}
        results[key]['EN'] = avg_ent
        results[key]['EX'] = avg_ext
        results[key]['VL'] = vol
        results[key]['ST'] = "SQF"

    #Step 6. Display Result
    c.pr("I","Simulation Resuts",1)
    for res in results:
        PL = 0
        if trans == "BUY":
            PL = round(((results[res]['EX'] - results[res]['EN']) * results[res]['VL']),1)
        if trans == "SELL":
            PL = round(((results[res]['EN'] - results[res]['EX']) * results[res]['VL']),1)

        c.pr("I","[ET -> "+c.get_time(entry)+"] [EP -> "+str(results[res]['EN'])+"] [ET -> "+c.get_time(res)+"] [XP -> "+str(results[res]['EX'])+"] [Volume -> "+str(results[res]['VL'])+"] [P/L -> "+str(PL)+"] [Status -> "+results[res]['ST']+"]",1)
        res_query = "INSERT INTO sim_results VALUES ('"+sim_id+"',"+str(start)+","+res+","+str(results[res]['EN'])+","+str(results[res]['EX'])+","+str(results[res]['VL'])+","+str(PL)+",'"+results[res]['ST']+"')"
        s.execQuery(res_query)
    c.pr("I","--------------------------------------------------------",1)
    return