def episode_menu(): et_tz = pytz.timezone('US/Eastern') date_et = common.get_date() if vars.params.get('custom_date', False) else utils.tznow(et_tz).date() # Avoid possible caching by using query string epg_url = 'https://nlnbamdnyc-a.akamaihd.net/fs/nba/feeds/epg/%d/%d_%d.js?t=%d' % ( date_et.year, date_et.month, date_et.day, time.time()) response = utils.fetch(epg_url) g_epg = json.loads(response[response.find('['):]) for epg_item in g_epg: entry = epg_item['entry'] start_et_hours, start_et_minutes = map(int, entry['start'].split(':')) duration_hours, duration_minutes = map(int, entry['duration'].split(':')) dt_et = et_tz.localize(datetime.datetime(date_et.year, date_et.month, date_et.day, start_et_hours, start_et_minutes)) dt_utc = dt_et.astimezone(pytz.utc) start_timestamp = int((dt_utc - datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds()) * 1000 # in milliseconds duration = (duration_hours * 60 + duration_minutes) * 60 * 1000 # in milliseconds params = { 'start_timestamp': start_timestamp, 'duration': duration, } utils.log(params, xbmc.LOGDEBUG) name = '%s %s: %s' % ( entry['start'], dt_et.tzname(), entry['showTitle'] if entry['showTitle'] else entry['title']) common.addListItem(name, '', 'nba_tv_play_episode', iconimage=entry['image'], customparams=params)
def create_program_summary(program): summary = "%s, %s - %s\n\n%s" % ( IDS_TODAY_LABEL if common.is_time_today(program.start_time) else common.get_date(program.start_time), common.get_time(program.start_time), common.get_time(program.start_time + program.duration), create_summary(program)) return summary
def chooseGameMenu(mode, video_type, date2Use=None): try: if mode == "selectdate": date = common.get_date() elif mode == "oldseason": date = date2Use else: date = utils.nowEST() utils.log("current date (america timezone) is %s" % str(date), xbmc.LOGDEBUG) # Starts on mondays day = date.isoweekday() date = date - timedelta(day - 1) if mode == "lastweek": date = date - timedelta(7) addGamesLinks(date, video_type) # Can't sort the games list correctly because XBMC treats file items and directory # items differently and puts directory first, then file items (home/away feeds # require a directory item while only-home-feed games is a file item) #xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_DATE) except: xbmcplugin.endOfDirectory(handle=int(sys.argv[1]), succeeded=False) return None
def date(self): next_update = self.py3.time_in(5) color, date = common.get_date() return { 'cached_until': next_update, 'composite': [{'full_text': date, 'color': color}] }
def ordered_page(): if not session.get('id_user'): return redirect('/user_login/') if request.method == 'POST': if not session.get('cart'): session['error'] = 'Вы не добавили хотя бы 1 блюдо' return redirect('/cart/') form = OrderedForm() user_data = db.session.query(User).get(session['id_user']) order = Order(address=form.address.data, phone=form.phone.data, date=get_date(), mail=user_data.email, amount=form.amount.data, status='Обрабатывается') db.session.add(order) order.users.append(user_data) # save user order for food_id in get_food_list(): # save in db list foods order.foods.append(food_id) db.session.commit() session.pop('cart') # drop cart return render_template('ordered.html') if session.get('id_user'): return redirect('/account/') return redirect('/')
def create_program_summary(program): summary = "%s, %s - %s\n\n%s" % (IDS_TODAY_LABEL if common.is_time_today( program.start_time) else common.get_date(program.start_time), common.get_time(program.start_time), common.get_time(program.start_time + program.duration), create_summary(program)) return summary
def chooseGameMenu(mode, video_type, date2Use=None): try: if mode == "selectdate": date = common.get_date() elif mode == "oldseason": date = date2Use else: date = utils.nowEST() utils.log("current date (america timezone) is %s" % date, xbmc.LOGDEBUG) # Starts on mondays day = date.isoweekday() #2 = tuesday date = date - timedelta(day - 1) if vars.use_alternative_archive_menu: playlist = None if 'playlist' in mode: playlist = xbmc.PlayList(1) playlist.clear() if '4-10' in mode: if day <= 5: date = date - timedelta(7) addGamesLinks(date, video_type, playlist) if day <= 5 and day > 1: #no need to query empty list when day < 2 date = date + timedelta(7) addGamesLinks(date, video_type, playlist) else: #to counter empty list on mondays for 'this week' #playlist.add("", xbmcgui.ListItem(str(date))) if day < 2: date = date - timedelta(7) nr_weeks = 1 if "last2weeks" in mode or 'playlist2w' in mode: nr_weeks = 2 elif "last3weeks" in mode or 'playlist3w' in mode: nr_weeks = 3 for n in range(nr_weeks, 0, -1): date1 = date - timedelta(7 * (n - 1)) addGamesLinks(date1, video_type, playlist) else: if mode == "lastweek": date = date - timedelta(7) addGamesLinks(date, video_type) # Can't sort the games list correctly because XBMC treats file items and directory # items differently and puts directory first, then file items (home/away feeds # require a directory item while only-home-feed games is a file item) #xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_DATE) except Exception as ee: raise ee xbmcplugin.endOfDirectory(handle=int(sys.argv[1]), succeeded=False) return None
def output_contact(conn, backup_extractor, is_group, contact_id, contact_name, your_name): reset_colors() html = open(os.path.join(OUTPUT_DIR, '%s.html' % sanitize_filename(contact_name)), 'w', encoding="utf-8") html.write(TEMPLATEBEGINNING % ("WhatsApp",)) c = conn.cursor() c.execute("SELECT {} FROM ZWAMESSAGE WHERE ZFROMJID=? OR ZTOJID=?;".format(FIELDS), (contact_id, contact_id)) for row in c: mdatetime = get_date(row[2]) mtext = get_text(conn, backup_extractor, row) mtext = mtext.replace("\n", "<br>\n") mfrom, color = get_from(conn, is_group, contact_id, contact_name, your_name, row) html.write((ROWTEMPLATE % (color, mdatetime, mfrom, mtext))) html.write(TEMPLATEEND) html.close()
def output_contact(conn, contact_conn, backup_extractor, chat_id, your_name): reset_colors() contact_name = str(chat_id) html = open(get_filename(conn, contact_conn, chat_id), 'w', encoding="utf-8") html.write(TEMPLATEBEGINNING % ("SMS/iMessage",)) c = conn.cursor() c.execute("SELECT {} FROM message WHERE ROWID in ".format(FIELDS) + \ "(SELECT message_id FROM chat_message_join WHERE chat_id=?);", (chat_id,)) for row in c: mid, mtext, mdate, is_from_me, handle_id, has_attachment = row if has_attachment: mtext = handle_media(conn, backup_extractor, mid, mtext) mtext = mtext.replace("\n", "<br>\n") mdatetime = get_date(mdate) mfrom = your_name if is_from_me else get_contact_name(conn, contact_conn, handle_id) color = COLORS[0] if is_from_me else get_color(handle_id) html.write((ROWTEMPLATE % (color, mdatetime, mfrom, mtext))) html.write(TEMPLATEEND) html.close()
def output_contact(conn, contact_conn, backup_extractor, chat_id, your_name): reset_colors() contact_name = str(chat_id) html = open(get_filename(conn, contact_conn, chat_id), 'w', encoding="utf-8") html.write(TEMPLATEBEGINNING % ("SMS/iMessage",)) c = conn.cursor() c.execute("SELECT {} FROM message WHERE ROWID in ".format(FIELDS) + \ "(SELECT message_id FROM chat_message_join WHERE chat_id=?);", (chat_id,)) for row in c: mid, mtext, mdate, is_from_me, handle_id, has_attachment = row if mtext is None: mtext = "" if has_attachment: mtext = handle_media(conn, backup_extractor, mid, mtext) mtext = mtext.replace("\n", "<br>\n") mdatetime = get_date(mdate) mfrom = your_name if is_from_me else get_contact_name(conn, contact_conn, handle_id) color = COLORS[0] if is_from_me else get_color(handle_id) html.write((ROWTEMPLATE % (color, mdatetime, mfrom, mtext))) html.write(TEMPLATEEND) html.close()
def output_contact(conn, backup_extractor, is_group, contact_id, contact_name, your_name): reset_colors() html = open(os.path.join(OUTPUT_DIR, '%s.html' % sanitize_filename(contact_name)), 'w', encoding="utf-8") html.write(TEMPLATEBEGINNING % ("WhatsApp", )) c = conn.cursor() c.execute( "SELECT {} FROM ZWAMESSAGE WHERE ZFROMJID=? OR ZTOJID=?;".format( FIELDS), (contact_id, contact_id)) for row in c: mdatetime = get_date(row[2]) mtext = get_text(conn, backup_extractor, row) mtext = mtext.replace("\n", "<br>\n") mfrom, color = get_from(conn, is_group, contact_id, contact_name, your_name, row) html.write((ROWTEMPLATE % (color, mdatetime, mfrom, mtext))) html.write(TEMPLATEEND) html.close()
def simulate(sim_data): scrip = sim_data['SC'] trans = sim_data['TP'] capt = sim_data['CP'] sl = sim_data['SL'] tar1 = sim_data['T1'] tar2 = sim_data['T2'] start = sim_data['TS'] end_time = sim_data['EN'] sim_name = sim_data['NM'] sim_type = sim_data['ST'] str_id = sim_data['ID'] data = sim_data['DATA'] sl_val = 0 t1_val = 0 t2_val = 0 t1_vol = 0 t2_vol = 0 vol = 0 results = {} entry = 0 status = "" end = c.get_timestamp(c.get_only_date(start)+" "+end_time) sim_id = c.gen_id("sim_tracker","sim_id") c.pr("I","Starting simulation for [SIM ID -> "+sim_id+"] [Scrip -> "+scrip +"] [Type -> "+sim_type+"] [Transaction -> "+trans+"] [Entry Point -> "+c.get_date(start)+"] [Capital -> "+str(capt)+"] [T1 -> "+str(tar1)+"%] [T2 -> "+str(tar2)+"%] [SL -> "+str(sl)+"%]",1) #Step 1 Load the Scrip #data = c.fetch_scrip_data(scrip,start,end) #data = c.fetch_scrip_cache(cdata,start,end) tkeys = list(data.keys()) tkeys.sort() tctr = 0 for tk in tkeys: if tk == start: break else: tctr += 1 tkeys = tkeys[tctr:] #Step 2 Take entry at the entry point at average price of first data candle entry = tkeys[0] ep_data = data[tkeys[0]] #Removing key which corresponds to EP tkeys.pop(0) avg_ent = round((ep_data['open'] + ep_data['close'] + ep_data['high'] + ep_data['low'])/4,1) #Step 3 Calulate the volume which can be undertaken vol = math.floor(capt/avg_ent) #Step 4 Calculate SL/T1/T2 after entry if trans == "SELL": sl_val = round(avg_ent + (round((avg_ent * sl),1)),1) t1_val = round(avg_ent - (round((avg_ent * tar1),1)),1) t2_val = round(avg_ent - (round((avg_ent * tar2),1)),1) if trans == "BUY": sl_val = round(avg_ent - (round((avg_ent * sl),1)),1) t1_val = round(avg_ent + (round((avg_ent * tar1),1)),1) t2_val = round(avg_ent + (round((avg_ent * tar2),1)),1) #Calculate Volume split t1_vol = math.ceil(vol * 0.7) t2_vol = vol - t1_vol #Step 4.1 Record the simulation data in DB sim_query = "INSERT INTO sim_tracker VALUES ('"+sim_id+"','"+str_id+"','"+scrip+"','"+sim_type+"','"+trans+"',"+str(capt)+","+str(tar1)+","+str(tar2)+","+str(sl)+","+str(t1_vol)+","+str(t2_vol)+",'"+start+"','"+end+"')" s.execQuery(sim_query) #c.pr("I","First Candle [Open "+str(ep_data['open'])+"] [Low "+str(ep_data['low'])+"] [High "+str(ep_data['high'])+"] [Close "+str(ep_data['close'])+"]",1) c.pr("I","[EP AVG(OLHC) "+str(avg_ent)+"] [SL "+str(sl_val)+"] [T1 "+str(t1_val)+"] [T2 "+str(t2_val)+"] [Vol "+str(vol)+"] [T1 Vol "+str(t1_vol)+"] [T2 Vol "+str(t2_vol)+"]" ,1) #Step 5 Loop through time keys and check for condition for key in tkeys: #Check if there is volume to sell if vol: ep_data = data[key] avg_prc = round((ep_data['open'] + ep_data['close'] + ep_data['high'] + ep_data['low'])/4,1) if trans == "SELL": #Check if this did hit SL if sl_val >= avg_prc: if t1_vol: if avg_prc <= t1_val: #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T1 Hit -> Yes" ,1) results[key] = {} results[key]['EN'] = avg_ent results[key]['EX'] = avg_prc results[key]['VL'] = t1_vol results[key]['ST'] = "T1H" vol = vol - t1_vol t1_vol = 0 if t1_vol == 0 and t2_vol: if avg_prc <= t2_val: #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T2 Hit -> Yes" ,1) if key in results: results[key]['VL'] += t2_vol results[key]['ST'] = "T2H" vol = vol - t2_vol t2_vol = 0 else: results[key] = {} results[key]['EN'] = avg_ent results[key]['EX'] = avg_prc results[key]['VL'] = t2_vol results[key]['ST'] = "T2H" vol = vol - t2_vol t2_vol = 0 else: #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " SL Hit -> Yes" ,1) results[key] = {} results[key]['EN'] = avg_ent results[key]['EX'] = avg_prc results[key]['VL'] = vol results[key]['ST'] = "SLH" vol = 0 #exit() if trans == "BUY": if sl_val <= avg_prc: if t1_vol: if avg_prc >= t1_val: #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T1 Hit -> Yes" ,1) results[key] = {} results[key]['EN'] = avg_ent results[key]['EX'] = avg_prc results[key]['VL'] = t1_vol results[key]['ST'] = "T1H" vol = vol - t1_vol t1_vol = 0 if t1_vol == 0 and t2_vol: if avg_prc >= t2_val: #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " T2 Hit -> Yes" ,1) if key in results: results[key]['VL'] += t2_vol results[key]['ST'] = "T2H" vol = vol - t2_vol t2_vol = 0 else: results[key] = {} results[key]['EN'] = avg_ent results[key]['EX'] = avg_prc results[key]['VL'] = t2_vol results[key]['ST'] = "T2H" vol = vol - t2_vol t2_vol = 0 else: #c.pr("I","Volume Is At "+str(vol)+" On "+c.get_time(key)+" AVG Price "+str(avg_prc)+ " SL Hit -> Yes" ,1) results[key] = {} results[key]['EN'] = avg_ent results[key]['EX'] = avg_prc results[key]['VL'] = vol results[key]['ST'] = "SLH" vol = 0 else: c.pr("I","Ending Simulations As Volume is 0",1) break #If the volume is still there at 3:10 square off at 3:10 if vol: #c.pr("I","Squaring of Position At 03:10 PM",1) ed_data = data[key] avg_ext = round((ed_data['open'] + ed_data['close'] + ed_data['high'] + ed_data['low'])/4,1) results[key] = {} results[key]['EN'] = avg_ent results[key]['EX'] = avg_ext results[key]['VL'] = vol results[key]['ST'] = "SQF" #Step 6. Display Result c.pr("I","Simulation Resuts",1) for res in results: PL = 0 if trans == "BUY": PL = round(((results[res]['EX'] - results[res]['EN']) * results[res]['VL']),1) if trans == "SELL": PL = round(((results[res]['EN'] - results[res]['EX']) * results[res]['VL']),1) c.pr("I","[ET -> "+c.get_time(entry)+"] [EP -> "+str(results[res]['EN'])+"] [ET -> "+c.get_time(res)+"] [XP -> "+str(results[res]['EX'])+"] [Volume -> "+str(results[res]['VL'])+"] [P/L -> "+str(PL)+"] [Status -> "+results[res]['ST']+"]",1) res_query = "INSERT INTO sim_results VALUES ('"+sim_id+"',"+str(start)+","+res+","+str(results[res]['EN'])+","+str(results[res]['EX'])+","+str(results[res]['VL'])+","+str(PL)+",'"+results[res]['ST']+"')" s.execQuery(res_query) c.pr("I","--------------------------------------------------------",1) return
def orb_process(data, capital, star_param, scrip, sl, t1, t2, st_id): sim_data = {} spl_data = {} sim_key = 0 ctr = 1 breakout = 0 #0 -> No Breakout 1 -> Up range 2 -> Down Range candle = (star_param['CL'] * 5) # Size of a candle vol_chg = star_param['VC'] # Change of Volume time_frm = star_param['TF'] # Number of Candles break_per = star_param['BP'] # % of breakout max_can = star_param['MC'] # Max candles to be checked #Data dictionary contains data for a given day #Split the candle into chunks c.pr( "I", "Candle -> " + str(candle) + " Volume Change -> " + str(vol_chg) + " Time Frame -> " + str(time_frm) + " Breakout Percentage -> " + str(break_per) + " Max Candles To Check -> " + str(max_can), 1) spl_data = c.chunk_time(data, candle) up_range, down_range, avg_vol = get_opr_range(spl_data, time_frm) req_vol = int(avg_vol * vol_chg) c.pr( "I", "Up Range -> " + str(up_range) + " Down Range -> " + str(down_range) + " Average Volume ->" + str(avg_vol) + " Required Volume -> " + str(req_vol), 1) for sd in spl_data: if ctr <= max_can: close = spl_data[sd]['close'] vol = spl_data[sd]['volume'] vchk = False #Check If the volume is more than Threshold if vol > req_vol: #Volume is more than threshold vchk = True #Check if the close is greater than up ranger or lower than down range. #Upper Breakout if close >= up_range: c.pr( "I", "[Counter -> " + str(ctr) + "] [Date -> " + str(c.get_date(sd)) + "] [Up Range -> " + str(up_range) + "] [Down Range -> " + str(down_range) + "] [Close -> " + str(close) + "] [Present Volume -> " + str(vol) + "] [Action -> Buy]", 1) sim_key = sd sim_data['SC'] = scrip sim_data['TP'] = "BUY" sim_data['SL'] = sl sim_data['T1'] = t1 sim_data['T2'] = t2 sim_data['CP'] = capital sim_data['TS'] = sim_key sim_data['EN'] = "15:10:00" sim_data['NM'] = "ORB" sim_data['ST'] = "ACT" sim_data['ID'] = st_id break #Lower Breakout if close <= down_range: c.pr( "I", "[Counter -> " + str(ctr) + "] [Date -> " + str(c.get_date(sd)) + "] [Up Range -> " + str(up_range) + "] [Down Range -> " + str(down_range) + "] [Close -> " + str(close) + "] [Present Volume -> " + str(vol) + "] [Action -> Sell]", 1) sim_key = sd sim_data['SC'] = scrip sim_data['TP'] = "SELL" sim_data['SL'] = sl sim_data['T1'] = t1 sim_data['T2'] = t2 sim_data['CP'] = capital sim_data['TS'] = sim_key sim_data['EN'] = "15:10:00" sim_data['NM'] = "ORB" sim_data['ST'] = "ACT" sim_data['ID'] = st_id break ctr = ctr + 1 sim_data['DATA'] = data return sim_key, sim_data
def display_stats(st_id): #Step 1 Get the list of stocks traded with strategy sim_data = s.sql_hash("sim_tracker","sim_id","scrip:capital:type:transaction:capital","WHERE strategy_id='"+st_id+"' ORDER BY transaction") sim_ids = list(sim_data.keys()) sim_ids_str = str(sim_ids).replace("[","(").replace("]",")") sim_map = {} query = "SELECT * FROM sim_results WHERE sim_id IN "+sim_ids_str res_map = {} sum_map = {} db_obj = s.sql_conn() cursor = db_obj.cursor() try: cursor.execute(query) results = cursor.fetchall() for row in results: if row[0] not in sum_map: sum_map[row[0]] = {} sum_map[row[0]]['SC'] = sim_data[row[0]]['scrip'] sum_map[row[0]]['TR'] = sim_data[row[0]]['transaction'] sum_map[row[0]]['ST'] = sim_data[row[0]]['type'] sum_map[row[0]]['EP'] = "9999999999" #MIN OF ALL DP sum_map[row[0]]['XP'] = "0" #MAX OF ALL DP sum_map[row[0]]['T1H'] = {} sum_map[row[0]]['T1H']['EP'] = 0 sum_map[row[0]]['T1H']['XP'] = 0 sum_map[row[0]]['T1H']['VL'] = 0 sum_map[row[0]]['T2H'] = {} sum_map[row[0]]['T2H']['EP'] = 0 sum_map[row[0]]['T2H']['XP'] = 0 sum_map[row[0]]['T2H']['VL'] = 0 sum_map[row[0]]['SLH'] = {} sum_map[row[0]]['SLH']['EP'] = 0 sum_map[row[0]]['SLH']['XP'] = 0 sum_map[row[0]]['SLH']['VL'] = 0 sum_map[row[0]]['SQF'] = {} sum_map[row[0]]['SQF']['EP'] = 0 sum_map[row[0]]['SQF']['XP'] = 0 sum_map[row[0]]['SQF']['VL'] = 0 sum_map[row[0]]['PL'] = 0 sum_map[row[0]]['VL'] = 0 if int(sum_map[row[0]]['EP']) > int(row[1]): sum_map[row[0]]['EP'] = row[1] if int(sum_map[row[0]]['XP']) < int(row[2]): sum_map[row[0]]['XP'] = row[2] sum_map[row[0]]['PL'] += row[6] sum_map[row[0]]['VL'] += row[5] sum_map[row[0]][row[7]]['EP'] = row[3] sum_map[row[0]][row[7]]['XP'] = row[4] sum_map[row[0]][row[7]]['VL'] = row[5] if row[0] not in sim_map: sim_map[row[0]] = {} if row[7] not in sim_map[row[0]]: sim_map[row[0]][row[7]] = {} sim_map[row[0]][row[7]]['EN'] = row[1] sim_map[row[0]][row[7]]['XT'] = row[2] sim_map[row[0]][row[7]]['EP'] = row[3] sim_map[row[0]][row[7]]['XP'] = row[4] sim_map[row[0]][row[7]]['VL'] = row[5] sim_map[row[0]][row[7]]['PL'] = row[6] except (sql.Error, sql.Warning) as e: print("-E- Query Failed") print(e) db_obj.rollback() for sim_id in sim_data: scrip = sim_data[sim_id]['scrip'] capital = sim_data[sim_id]['capital'] stype = sim_data[sim_id]['type'] trans = sim_data[sim_id]['transaction'] if scrip not in res_map: res_map[scrip] = {} res_map[scrip]['ACT'] = {} res_map[scrip]['RAN'] = {} if trans not in res_map[scrip][stype]: res_map[scrip][stype][trans] = {} res_map[scrip][stype][trans]['CP'] = 0 res_map[scrip][stype][trans]['TD'] = 0 res_map[scrip][stype][trans]['SR'] = 0 res_map[scrip][stype][trans]['PL'] = 0 res_map[scrip][stype][trans]['WN'] = 0 res_map[scrip][stype][trans]['LS'] = 0 res_map[scrip][stype][trans]['CP'] = capital res_map[scrip][stype][trans]['TD'] += 1 trade_stat = 0 for ts in sim_map[sim_id]: res_map[scrip][stype][trans]['PL'] += sim_map[sim_id][ts]['PL'] if sim_map[sim_id][ts]['PL'] > 0: trade_stat += 1 else: trade_stat -= 1 if trade_stat > 0: res_map[scrip][stype][trans]['WN'] += 1 else: res_map[scrip][stype][trans]['LS'] += 1 res_map[scrip][stype][trans]['SR'] = round((res_map[scrip][stype][trans]['WN']/res_map[scrip][stype][trans]['TD']) * 100,2) print("----------------------------------------------------------------------------------------------------------------------------------------") print("| Simulation Summary |") print("----------------------------------------------------------------------------------------------------------------------------------------") print("| Scrip | Simulation | Transaction | Capital | Sims | Wins | Losses | Success % | P/L | Exit Capital |") print("----------------------------------------------------------------------------------------------------------------------------------------") for scrip in res_map.keys(): for sim in res_map[scrip].keys(): for trans in res_map[scrip][sim].keys(): #print(trans) #c.dump(res_map[scrip][sim]) msg = "|"+gs(scrip,21)+"|"+gs(sim,12)+"|"+gs(trans,15)+"|"+gs(str(res_map[scrip][sim][trans]['CP']),12)+"|" msg = msg+gs(str(res_map[scrip][sim][trans]['TD']),9)+"|"+gs(str(res_map[scrip][sim][trans]['WN']),9)+"|" msg = msg+gs(str(res_map[scrip][sim][trans]['LS']),8)+"|"+gs(str(res_map[scrip][sim][trans]['SR'])+"%",12)+"|" msg = msg+gs(str(round(res_map[scrip][sim][trans]['PL'],3)),12)+"|" msg = msg+gs(str(round(res_map[scrip][sim][trans]['PL'] + res_map[scrip][sim][trans]['CP'],2)),15)+"|" print(msg) print("----------------------------------------------------------------------------------------------------------------------------------------") print("\n--------------------------------------------------------------------------------------------------------------------------------------------------------") print("| Detailed Summary Actual |") print("--------------------------------------------------------------------------------------------------------------------------------------------------------") print("| Scrip | Date | Entry | Exit | Trans | Vlm | T1 | T2 | SL | SQ | P/L |") print("--------------------------------------------------------------------------------------------------------------------------------------------------------") #c.dump(sum_map) STC = ['T1H','T2H','SLH','SQF'] sel_act = "" sel_ran = "" ranmsg = "" for sim in sum_map: msg = "|"+gs(sum_map[sim]['SC'],11)+"|" msg += gs(c.get_date(sum_map[sim]['EP'])[0:10],12)+"|" msg += gs(c.get_date(sum_map[sim]['EP'])[11:-3],7)+"|" msg += gs(c.get_date(sum_map[sim]['XP'])[11:-3],8)+"|" msg += gs(sum_map[sim]['TR'],7)+"|" msg += gs(str(sum_map[sim]['VL']),5)+"|" for ST in STC: if sum_map[sim][ST]['VL']: tst = str(sum_map[sim][ST]['VL'])+" "+str(sum_map[sim][ST]['EP'])+" "+str(sum_map[sim][ST]['XP']) msg += gs(tst,20)+"|" else: msg += gs("NONE",20)+"|" msg += gs(str(round(sum_map[sim]['PL'],2)),10)+"|" if sum_map[sim]['ST'] == "ACT": if sum_map[sim]['TR'] == "BUY": print(msg) else: sel_act += msg+"\n" else: if sum_map[sim]['TR'] == "BUY": ranmsg += msg+"\n" else: sel_ran += msg+"\n" print(sel_act[0:-1]) print("--------------------------------------------------------------------------------------------------------------------------------------------------------") print("\n--------------------------------------------------------------------------------------------------------------------------------------------------------") print("| Random Walk Summary Actual |") print("--------------------------------------------------------------------------------------------------------------------------------------------------------") print("| Scrip | Date | Entry | Exit | Trans | Vlm | T1 | T2 | SL | SQ | P/L |") print("--------------------------------------------------------------------------------------------------------------------------------------------------------") print(ranmsg[0:-1]) print(sel_ran[0:-1]) print("--------------------------------------------------------------------------------------------------------------------------------------------------------") cursor.close() del cursor db_obj.close() return
for assemble_name_in_excel in package_dict.keys(): if assemble_name_in_excel not in assemble_name_list_in_db: print "sheet name setting error" sys.exit(1) # 开始打包apk文件 当excel表格中 new_package 列是 yes 的情况下才打包 start_time = datetime.datetime.now() # 根目录 root_dir = 'D:/deploy/AndroidStudio-Project/native5.0_pro' # 得到当前目录的SVN版本号 wc_svn_number = common.get_wc_svn_number(root_dir) # 生成的apk目录 apk_root = '%s/apk/%s' % (root_dir, common.get_date()) # 项目目录 project_root = '%s/ivp50_pro' % root_dir os.chdir(project_root) # 开始打包apk print print "---------------------------" print " Found %s apk need package" % apk_number print "---------------------------" for assemble_name, every_platform_channel_list in package_dict.iteritems(): for channel_package_info_dict in every_platform_channel_list: channel_package_info_dict['package_date_time'] = common.get_date_time() channel_package_info_dict['platform'] = assemble_name.lower()
# -*- coding: utf-8 -*- import dataiku from dataiku import pandasutils as pdu import pandas as pd import numpy as np import os import census_resources import common from dataiku.customrecipe import * import feature_selection import census_metadata import random process_date = common.get_date() #------------------------------------------- SETTINGS P_COLUMN_STATES = get_recipe_config()[ 'param_column_state'] ### column indicating the state to be created P_COLUMN_STATES_LOWER = True ### check box : do we need to lower your state code ? 'DC' to 'dc' ? P_STATES_TYPE_NAME = get_recipe_config( )['param_state_format'] ### type of state label : is it DC or DistrictOfColumbia ? ## input dataset content P_ID_COL = get_recipe_config()['param_column_id'] P_CENSUS_LEVEL_COLUMN = get_recipe_config( )['param_census_level_column'] #'block_group' ### Column containing a level to be considered. If empty : ALL # US Census content P_CENSUS_LEVEL = get_recipe_config()[ 'param_census_level'] #'BLOCK_GROUP' #'TRACT' #... P_CENSUS_CONTENT = get_recipe_config()[
if assemble_name_in_excel not in assemble_name_list_in_db: print "sheet name setting error" sys.exit(1) # 开始打包apk文件 当excel表格中 new_package 列是 yes 的情况下才打包 start_time = datetime.datetime.now() # 根目录 root_dir = 'D:/deploy/AndroidStudio-Project/native5.0_pro' # 得到当前目录的SVN版本号 wc_svn_number = common.get_wc_svn_number(root_dir) # 生成的apk目录 apk_root = '%s/apk/%s' % (root_dir, common.get_date()) # 项目目录 project_root = '%s/ivp50_pro' % root_dir os.chdir(project_root) # 开始打包apk print print "---------------------------" print " Found %s apk need package" % apk_number print "---------------------------" for assemble_name, every_platform_channel_list in package_dict.iteritems(): for channel_package_info_dict in every_platform_channel_list:
def ohl_process(data, thr, var, scrip, capital, max_dp, sl, t1, t2, st_id): #Here you have to identify O -> H or O -> L for first 15 mins with variance of var keys = list(data.keys()) opn = data[keys[0]]['open'] hig = data[keys[0]]['high'] low = data[keys[0]]['low'] avg_opn = ((opn + hig + low) / 3) avg_opn = opn topn = round(avg_opn + (avg_opn * var), 1) bopn = round(avg_opn - (avg_opn * var), 1) sim_data = {} ran_data = {} sim_key = 0 #Check if open = High ctr = 1 cthr = 0 while ctr != max_dp: time = c.get_date(keys[ctr]) copn = data[keys[ctr]]['open'] chig = data[keys[ctr]]['high'] clow = data[keys[ctr]]['low'] cavg = ((copn + chig + clow) / 3) if bopn > cavg: cthr = cthr + 1 ctr = ctr + 1 if cthr >= thr: sim_key = keys[ctr] sim_data['SC'] = scrip sim_data['TP'] = "SELL" sim_data['SL'] = sl sim_data['T1'] = t1 sim_data['T2'] = t2 sim_data['CP'] = capital sim_data['TS'] = sim_key sim_data['EN'] = "15:10:00" sim_data['NM'] = "OHL" sim_data['ST'] = "ACT" sim_data['ID'] = st_id #Check Open = LOW ctr = 1 cthr = 0 while ctr != max_dp: time = c.get_date(keys[ctr]) copn = data[keys[ctr]]['open'] chig = data[keys[ctr]]['high'] clow = data[keys[ctr]]['low'] cavg = ((copn + chig + clow) / 3) if topn < cavg: cthr = cthr + 1 ctr = ctr + 1 if cthr >= thr: sim_key = keys[ctr] sim_data['SC'] = scrip sim_data['TP'] = "BUY" sim_data['SL'] = sl sim_data['T1'] = t1 sim_data['T2'] = t2 sim_data['CP'] = capital sim_data['TS'] = sim_key sim_data['EN'] = "15:10:00" sim_data['NM'] = "OHL" sim_data['ST'] = "ACT" sim_data['ID'] = st_id sim_data['DATA'] = data return sim_key, sim_data