Exemple #1
0
def appSaveMat(match_id):
    if(os.path.exists(str(match_id)+'.npy')):
      return 0
    m_match = spider.get_match(match_id)  
    #if isStart(m_match.match_time):
    #  return 0
    match_url = 'http://odds.500.com/fenxi/ouzhi-'+ str(match_id) +'.shtml'
    content   = spider.url_get(match_url,"gb2312")
    (home_url,away_url) = netdata.get_Team_url(content)
    #start
    Mat = []
    Odds_Vec     = netdata.get_now_all_odds(content)
    home_rv = BSc.getTeamMatDis(home_url,Odds_Vec[0],Odds_Vec[2],1)[0]
    away_rv = BSc.getTeamMatDis(away_url,Odds_Vec[2],Odds_Vec[0],0)[0]
    Pre_Vec_Home = home_rv.get_pre_vec()
    Pre_Vec_Away = away_rv.get_pre_vec()
    Betfair_Vec  = fm.getBetfairsth(match_id)[0]
    Company_Vec  = fm.getBetfairsth(match_id)[1]
    Home_EG_Vec  = [home_rv.get_average_goal(),0,away_rv.get_average_lose()]
    Away_EG_Vec  = [away_rv.get_average_goal(),0,home_rv.get_average_lose()]
    (home_this_goals,home_this_loses) = BSc.getTeamMatDis(home_url,Odds_Vec[0],Odds_Vec[2],1)[4:6]
    (away_this_goals,away_this_loses) = BSc.getTeamMatDis(away_url,Odds_Vec[2],Odds_Vec[0],0)[4:6]
    Home_VG_Vec  = [np.var(home_this_goals),0,np.var(away_this_loses)]
    Away_VG_Vec  = [np.var(away_this_goals),0,np.var(home_this_loses)]
    return [list(Pre_Vec_Home),list(Pre_Vec_Away),list(Odds_Vec),Betfair_Vec,Company_Vec,Home_EG_Vec,Away_EG_Vec,Home_VG_Vec,Away_VG_Vec]
Exemple #2
0
def getTeamMatDis(team_url, wodd, lodd, ha_now):
    content = spider.url_get(team_url + '/teamfixture', "gb2312")
    Reses = netdata.get_TeamMatchHistory(content)  #1*30
    Odds = netdata.get_TeamOddsHistory(content)  #1*30
    HA = netdata.get_TeamHAHistory(content)  #1*30
    #PGR      = trans.PG_to_R(Reses[0:len(Reses)-1])
    (PGR_g, PGR_l) = netdata.get_TeamGoalHistory(content)  #1*30
    #del PGR_g[0];del PGR_g[1];del PGR_l[0];del PGR_l[1];
    if len(PGR_g) < 20:
        return -1
    PGO = trans.ODD_to_O_29(Odds[0:len(Odds)])  #1*29
    FOR = trans.PG_to_O_29(np.array(PGR_g) - np.array(PGR_l), Odds)  #1*29
    del PGR_g[0]
    del PGR_l[0]
    del HA[0]
    Mat = trans.RFOH_to_StaticMat(PGR_g, PGR_l, PGO, HA, FOR)
    #print Mat
    for_now = trans.FOR_29(PGR_g[-1] - PGR_l[-1], Odds[-1])
    #print for_now
    odd_now = trans.OD(lodd / wodd)
    #ha_now  =
    #print odd_now
    #print for_now
    #return np.array(Mat[odd_now][for_now])
    #rv      = (Mat[odd_now][for_now][ha_now],wodd)
    # if rv:
    #     pass
    last_3_goals = PGR_g[-3:]
    last_3_losts = PGR_l[-3:]
    (this_goals, this_loses) = get_This_Goals(PGR_g, PGR_l, PGO, FOR, HA,
                                              odd_now, for_now, ha_now)
    return (Mat[odd_now][for_now][ha_now], wodd, last_3_goals, last_3_losts,
            this_goals, this_loses)
Exemple #3
0
def getTeamMatDis(team_url, wodd, lodd, ha_now):
    content = spider.url_get(team_url + '/teamfixture', "gb2312")
    Reses = netdata.get_TeamMatchHistory(content)  #1*30
    Odds = netdata.get_TeamOddsHistory(content)  #1*30
    HA = netdata.get_TeamHAHistory(content)  #1*30
    #PGR      = trans.PG_to_R(Reses[0:len(Reses)-1])
    (PGR_g, PGR_l) = netdata.get_TeamGoalHistory(content)  #1*30
    #del PGR_g[0];del PGR_g[1];del PGR_l[0];del PGR_l[1];
    if len(PGR_g) < 20:
        return -1
    PGO = trans.ODD_to_O_29(Odds[0:len(Odds)])  #1*29
    FOR = trans.PG_to_O_29(np.array(PGR_g) - np.array(PGR_l))  #1*29
    del PGR_g[0]
    del PGR_l[0]
    del HA[0]
    Mat = trans.RFOH_to_StaticMat(PGR_g, PGR_l, PGO, HA, FOR)
    #print Mat
    for_now = trans.FOR_29(PGR_g[len(PGR_g) - 1] - PGR_l[len(PGR_l) - 1])
    #print for_now
    odd_now = trans.OD(lodd / wodd)
    #ha_now  =
    #print odd_now
    #print for_now
    #return np.array(Mat[odd_now][for_now])
    rv = (Mat[odd_now][for_now][ha_now], wodd)
    # if rv:
    #     pass
    # print '>>>>>>>>>>>>'
    # print '胜赔: ' + str(wodd)
    # print '进球期望: ' + str(rv.get_average_goal())
    # print '失球期望: ' + str(rv.get_average_lose())
    # print " --- "+ str(rv.get_pre_vec())  +" --- 置信度: " + str(rv.num_matches)
    return rv
def getAsiaOdds_test(match_id):
    url = 'http://odds.500.com/fenxi/yazhi-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    index_r = re.compile(
        r'class="ying">(.*)</td>|class="ping">(.*)</td>|class="shu">(.*)</td>|class="">(.*)</td>'
    )
    indexes = notEm_Aodds(index_r.findall(content)[0:25])
    return indexes
Exemple #5
0
def toRedis(match_id,pool):
    m_match = spider.get_match(match_id)
    match_url = 'http://odds.500.com/fenxi/shuju-'+ str(match_id) +'.shtml'
    content   = spider.url_get(match_url,"gb2312")
    WebSuggest = netdata.get_Suggest(content)
    (home_url,away_url) = netdata.get_Team_url(content)
    odd_url   = 'http://odds.500.com/fenxi/ouzhi-'+ str(match_id) +'.shtml'
    content   = spider.url_get(odd_url,"gb2312")
    (wodd,lodd) = netdata.get_now_Odds(content)
    (home_rv,home_rv_wodd)=getTeamMatDis(home_url,wodd,lodd)
    (away_rv,away_rv_wodd)=getTeamMatDis(away_url,lodd,wodd)
    print '==============================================='
    (BigBall,SmaBall,MySuggest)=SelShow(home_rv,away_rv)
    show(home_rv,home_rv_wodd)
    show(away_rv,away_rv_wodd)
    home_vec = home_rv.get_pre_vec()
    away_vec = away_rv.get_pre_vec()
    match_dict = {"web":m_match.match_link, 
                  "name":m_match.match_name, 
                  "hometeam":m_match.host_team, 
                  "awayteam":m_match.guest_team, 
                  "time":m_match.match_time,
                  "homeodd":str(wodd),
                  "awayodd":str(lodd),
                  "homenum":str(home_rv.num_matches),
                  "awaynum":str(away_rv.num_matches),
                  "homepregoal":str(home_rv.get_average_goal()),
                  "awaypregoal":str(away_rv.get_average_goal()),
                  "homeprelose":str(home_rv.get_average_lose()),
                  "awayprelose":str(away_rv.get_average_lose()),
                  "biggoal":str(BigBall),
                  "smagoal":str(SmaBall),
                  "hpl":str(home_vec[0]),
                  "hpd":str(home_vec[1]),
                  "hpw":str(home_vec[2]),
                  "apl":str(away_vec[0]),
                  "apd":str(away_vec[1]),
                  "apw":str(away_vec[2]),
                  "websuggest":WebSuggest,
                  "mysuggest":str(MySuggest),
                  "homerealgoal":'null',
                  "awayrealgoal":'null'}  
    #pool = redis.ConnectionPool(host='127.0.0.1',port=6379)
    #pool = redis.ConnectionPool(host='115.28.138.54',port=6379)
    rd   = redis.StrictRedis(connection_pool=pool)
    rd.hmset(sys.argv[1],match_dict)                
Exemple #6
0
def app(match_id):
    #match_id  = sys.argv[1]
    m_match = spider.get_match(match_id)
    if isStart(m_match.match_time):
       print '已经开赛'
       return 0
    m_match.display()
    match_url = 'http://odds.500.com/fenxi/shuju-'+ str(match_id) +'.shtml'
    content   = spider.url_get(match_url,"gb2312")
    WebSuggest = netdata.get_Suggest(content)
    (home_url,away_url) = netdata.get_Team_url(content)
    odd_url   = 'http://odds.500.com/fenxi/ouzhi-'+ str(match_id) +'.shtml'
    content   = spider.url_get(odd_url,"gb2312")
    (wodd,lodd) = netdata.get_now_Odds(content)
    #print home_url;print wodd;print lodd;
    (home_rv,home_rv_wodd,home_last_3_goals,home_last_3_losts,home_this_goals,home_this_loses)=BSc.getTeamMatDis(home_url,wodd,lodd,1)
    (away_rv,away_rv_wodd,away_last_3_goals,away_last_3_losts,away_this_goals,away_this_loses)=BSc.getTeamMatDis(away_url,lodd,wodd,0)
    if home_rv.num_matches<1 or away_rv.num_matches<1:
       print '          '
       print '          '
       return 0
    print '==============================================='
    SelShow(home_rv,away_rv)
    show(home_rv,home_rv_wodd,home_this_goals,home_this_loses)
    show(away_rv,away_rv_wodd,away_this_goals,away_this_loses)
    print '====================== ⚽ ======================'
    print m_match.host_team + ' 最近三场进球: '+str(home_last_3_goals)+' 最近三场失球:'+str(home_last_3_losts)
    print m_match.guest_team + ' 最近三场进球: '+str(away_last_3_goals)+' 最近三场失球:'+str(away_last_3_losts)
    print  '=============== 🐷 澳门心水推荐 🐷 ================='
    print WebSuggest
    showMarket(match_id)       
    #showOddsChangePre(match_id)
    print  '===============  澳门亚盘  ================='
    appA.AppUseAomen(match_id)
    print  '===============  澳门初盘  ================='
    appA.AppUseAomen_s(match_id)
    print  '===============  韦德大小球  ================='
    #fw.AppUseWeddOdds(match_id)
    #os.system('python AppUseWedd.py ' + str(match_id) )
    appW.AppUseWedd(match_id)
    print  '===============  韦德初盘  ================='
    appW.AppUseWedd_s(match_id)
    #os.system('./AutobetTool.sh ' + str(match_id) )
    appSaveToCSV(match_id)
    print '          '
    print '          '
def test(match_id):
    #match_id  = sys.argv[1]
    m_match = spider.get_match(match_id)
    m_match.display()
    match_url = 'http://odds.500.com/fenxi/shuju-'+ str(match_id) +'.shtml'
    content   = spider.url_get(match_url,"gb2312")
    (home_url,away_url) = netdata.get_Team_url(content)
    odd_url   = 'http://odds.500.com/fenxi/ouzhi-'+ str(match_id) +'.shtml'
    content   = spider.url_get(odd_url,"gb2312")
    (wodd,lodd) = netdata.get_now_Odds(content)
    (home_rv,home_rv_wodd,home_goal)=getTeamMatDis(home_url,wodd,lodd,1)
    (away_rv,away_rv_wodd,away_goal)=getTeamMatDis(away_url,lodd,wodd,0)
    print '==============================================='
    SelShow(home_rv,away_rv)
    show(home_rv,home_rv_wodd)
    show(away_rv,away_rv_wodd)
    print '          '
    print '          '
def getBosswl_test(match_id):
    url = 'http://odds.500.com/fenxi/touzhu-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    index_r = re.compile(r'<td>(.*)</td>')
    indexes = index_r.findall(content)
    index_w = indexes.index('盈亏指数')
    return [
        indexes[index_w + 10], indexes[index_w + 19], indexes[index_w + 29]
    ]
def getOriAndNowOdds(match_id):
    #onclick="OZ.r(this)" style="cursor:pointer" > 2.50</td>
    url = 'http://odds.500.com/fenxi/ouzhi-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    #index_r = re.compile(r'0">(.*)</td>|0" class="">(.*)</td>')
    index_r = re.compile(r'style="cursor:pointer" >(.*)</td>')
    #index_r = re.compile(r'style="cursor:pointer"(.*)</td>')
    indexes = index_r.findall(content)[0:6]
    return indexes
def get_match_res(match_id):
    m_match = spider.get_match(match_id)
    match_url = 'http://odds.500.com/fenxi/shuju-' + str(match_id) + '.shtml'
    goal_res_r = re.compile(r'<strong>(.*):(.*)</strong>')
    content = spider.url_get(match_url, "gb2312")
    goal_res = goal_res_r.findall(content)
    if len(goal_res) == 0:
        return 0
    res = [float(goal_res[0][0]), float(goal_res[0][1])]
    return res
def getRes(team_url, wodd, lodd):
    content = spider.url_get(team_url + 'teamfixture', "gb2312")
    Reses = netdata.get_TeamMatchHistory(content)
    Odds = netdata.get_TeamOddsHistory(content)
    O1 = trans.PG_to_O(Reses)
    O2 = trans.ODD_to_O(Odds)
    I = trans.RS_to_I(Reses)
    known_O1 = [trans.FOR(Reses[len(Reses) - 2], Reses[len(Reses) - 1])]
    known_O2 = [trans.OD(lodd / wodd)]
    return mk.makeAverageRes(O1, O2, I, known_O1, known_O2)
def getBsLotnum_test(match_id):
    #class="tb_tdul_pan ying">
    #class="tb_tdul_pan ">
    url = 'http://odds.500.com/fenxi/daxiao-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    #index_r = re.compile(r'0">(.*)</td>|0" class="">(.*)</td>')
    index_r = re.compile(
        r'class="tb_tdul_pan ">(.*)</td>|class="tb_tdul_pan ying">(.*)</td>|class="tb_tdul_pan ping">(.*)</td>'
    )
    indexes = notEm(index_r.findall(content)[0:10])
    return indexes
Exemple #13
0
def print_match_res(match_id):
    m_match = spider.get_match(match_id)
    match_url = 'http://odds.500.com/fenxi/shuju-'+ str(match_id) +'.shtml';
    res_r    = re.compile(r'<strong>(.*):(.*)</strong>');
    content   = spider.url_get(match_url,"gb2312");
    res       = res_r.findall(content);
    
    print m_match.match_name;
    print m_match.match_time;
    print m_match.host_team+' ' + res[0][0] +':'+ res[0][1] + ' ' + m_match.guest_team;
    print ' '
def getKellyindex_test(match_id):
    #<td row="1" class="" width="33.3%" >0.91</td>
    #<td row="1" class="">0.94</td>
    url = 'http://odds.500.com/fenxi/ouzhi-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    index_r = re.compile(
        r'<td row="1" class="" width="33.3%" >(.*)</td>|<td row="1" class="">(.*)</td>|<td row="1" width="33.3%"  class=" ying">(.*)</td>|<td row="1" width="33.3%"  class=" ping">(.*)</td>|<td row="1"  class=" ping">(.*)</td>|<td row="1"  class=" ying">(.*)</td>'
    )
    #print index_r.findall(content)[0:6]
    indexes = notEm(index_r.findall(content)[0:6])
    return indexes
def getMainMatches():
    #<a href="/fenxi/shuju-702834.shtml" target="_blank" id="link147">数据</a>
    #<td align="center"><a href="/fenxi/shuju-631067.shtml" target="_blank">数据
    content = spider.url_get("http://odds.500.com/ouzhi.php?cid=0&type=2",
                             "gb2312")
    #url_r = re.compile(r'<td align="center"><a href="/fenxi/shuju-(.*).shtml"')
    url_r = re.compile(
        r'</td><td align="center"><a href="/fenxi/shuju-(.?*).shtml" target="_blank">数据</a><br /><a href='
    )
    Match_URLs = url_r.findall(content)
    return Match_URLs[0]
Exemple #16
0
def HD_GetMatch(match_id):
    m_match = spider.get_match(match_id)
    match_url = 'http://odds.500.com/fenxi/shuju-' + str(match_id) + '.shtml'
    content = spider.url_get(match_url, "gb2312")
    WebSuggest = netdata.get_Suggest(content)
    (home_url, away_url) = netdata.get_Team_url(content)
    odd_url = 'http://odds.500.com/fenxi/ouzhi-' + str(match_id) + '.shtml'
    content = spider.url_get(odd_url, "gb2312")
    (wodd, lodd) = netdata.get_now_Odds(content)
    (home_rv, home_rv_wodd, home_goal) = getTeamMatDis(home_url, wodd, lodd, 1)
    (away_rv, away_rv_wodd, away_goal) = getTeamMatDis(away_url, lodd, wodd, 0)
    home_vec = home_rv.get_pre_vec()
    away_vec = away_rv.get_pre_vec()
    if home_rv.num_matches < 1 or away_rv.num_matches < 1:
        return 0
    (BigBall, SmaBall) = BSc.CalGoal(home_rv.get_average_goal(),
                                     home_rv.get_average_lose(),
                                     away_rv.get_average_goal(),
                                     away_rv.get_average_lose())
    BS_IfRight = Task_BSJudge(BigBall, SmaBall, home_goal, away_goal)
    return (home_rv.num_matches, away_rv.num_matches, home_vec, away_vec,
            BigBall, SmaBall, wodd, lodd, home_goal, away_goal)
Exemple #17
0
def get_TeamMatchHistory_test():
    """
    <span class="lblue">负</span>
    0:<span class="lred">1</span>
    <span class="lred">2</span>:1
    """
    content = spider.url_get("http://liansai.500.com/team/2440/teamfixture/",
                             "gb2312")
    #print str(content)
    History_Arr = get_TeamMatchHistory(content)
    # for m in History_Arr:
    # 	print re.compile(r'>(.*)').findall(m)[0]
    print History_Arr
def getAsiaLotnum_test(match_id):
    #<td row="1" ref="-0.250" class="">平手/半球</td>
    #row="1" ref="-0.250">平手/半球</td>
    #row="1" ref="1.250">受一球/球半<font color="blue"> 降</font></td>
    #ref="1.000" class="">受一球</td>
    #row="1" ref="0.000">平手<font color="red"> 升</font></td>
    url = 'http://odds.500.com/fenxi/yazhi-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    #index_r = re.compile(r'0">(.*)</td>|0" class="">(.*)</td>')
    index_r = re.compile(
        r'row="1" (.*)0">(.*)</td>|<td row="1"(.*)0" class="">(.*)</td>')
    indexes = notEm_x(index_r.findall(content)[0:10])
    return indexes
Exemple #19
0
def test(match_id):
    #match_id  = sys.argv[1]
    m_match = spider.get_match(match_id)
    # if isStart(m_match.match_time):
    #    print '已经开赛'
    #    return 0
    m_match.display()
    match_url = 'http://odds.500.com/fenxi/shuju-' + str(match_id) + '.shtml'
    content = spider.url_get(match_url, "gb2312")
    WebSuggest = netdata.get_Suggest(content)
    (home_url, away_url) = netdata.get_Team_url(content)
    odd_url = 'http://odds.500.com/fenxi/ouzhi-' + str(match_id) + '.shtml'
    content = spider.url_get(odd_url, "gb2312")
    (wodd, lodd) = netdata.get_now_Odds(content)
    (home_rv, home_rv_wodd, home_goal) = getTeamMatDis(home_url, wodd, lodd, 1)
    (away_rv, away_rv_wodd, away_goal) = getTeamMatDis(away_url, lodd, wodd, 0)
    print '==============================================='
    SelShow(home_rv, away_rv)
    show(home_rv, home_rv_wodd)
    show(away_rv, away_rv_wodd)
    print '===============  澳门心水推荐  ================='
    print WebSuggest
    if home_rv.num_matches == 0 or away_rv.num_matches == 0:
        print '          '
        print '          '
        print '++++++++++++++++         REAL             ++++++++++++++++'
        print m_match.host_team + ' ' + str(home_goal) + ':' + str(
            away_goal) + ' ' + m_match.guest_team
        print '          '
        print '          '
        return 0
    mark.showMarket(match_id)
    print '          '
    print '++++++++++++++++         REAL             ++++++++++++++++'
    print m_match.host_team + ' ' + str(home_goal) + ':' + str(
        away_goal) + ' ' + m_match.guest_team
    print '          '
    print '          '
Exemple #20
0
def getTeamMatDis(team_url, wodd, lodd):
    content = spider.url_get(team_url + '/teamfixture', "gb2312")
    Reses = netdata.get_TeamMatchHistory(content)
    Odds = netdata.get_TeamOddsHistory(content)
    if len(Reses) < 20:
        return -1
    PGR = trans.PG_to_R(Reses[0:len(Reses) - 1])
    PGO = trans.ODD_to_O(Odds[0:len(Odds) - 1])
    FOR = trans.PG_to_O(Reses[0:len(Reses) - 1])
    Mat = trans.RFO_to_StaticMat(PGR, PGO, FOR)
    #print Mat
    for_now = trans.FOR(Reses[len(Reses) - 2], Reses[len(Reses) - 1])
    odd_now = trans.OD(lodd / wodd)
    #print odd_now
    return np.array(Mat[odd_now][for_now])
Exemple #21
0
def test(match_id):
    #match_id  = sys.argv[1]
    m_match = spider.get_match(match_id)
    if isStart(m_match.match_time):
        print '已经开赛'
        return 0
    m_match.display()
    match_url = 'http://odds.500.com/fenxi/shuju-' + str(match_id) + '.shtml'
    content = spider.url_get(match_url, "gb2312")
    WebSuggest = netdata.get_Suggest(content)
    (home_url, away_url) = netdata.get_Team_url(content)
    odd_url = 'http://odds.500.com/fenxi/ouzhi-' + str(match_id) + '.shtml'
    content = spider.url_get(odd_url, "gb2312")
    (wodd, lodd) = netdata.get_now_Odds(content)
    (home_rv, home_rv_wodd) = getTeamMatDis(home_url, wodd, lodd, 1)
    (away_rv, away_rv_wodd) = getTeamMatDis(away_url, lodd, wodd, 0)
    print '==============================================='
    SelShow(home_rv, away_rv)
    show(home_rv, home_rv_wodd)
    show(away_rv, away_rv_wodd)
    print '===============  澳门心水推荐  ================='
    print WebSuggest
    print '          '
    print '          '
def getOriAndNowOdds_Res(match_id):
    #onclick="OZ.r(this)" style="cursor:pointer" > 2.50</td>
    #<strong>0:0</strong>
    url = 'http://odds.500.com/fenxi/ouzhi-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    #index_r = re.compile(r'0">(.*)</td>|0" class="">(.*)</td>')
    index_r = re.compile(r'style="cursor:pointer" >(.*)</td>')
    res_r = re.compile(r'<strong>(.*):(.*)</strong>')
    #index_r = re.compile(r'style="cursor:pointer"(.*)</td>')
    indexes = index_r.findall(content)[0:6]
    res = res_r.findall(content)
    if len(res) < 1:
        return 0
    if len(res[0]) < 2:
        return 0
    return (indexes, [res[0][0], res[0][1]])
Exemple #23
0
def get_urls_of_searched_paper(words):
    #http://xueshu.baidu.com/s?wd=machine+learning&rsv_bp=0&tn=SE_baiduxueshu_c1gjeupa&rsv_spt=3&ie=utf-8&f=3&rsv_sug2=1&sc_f_para=sc_tasktype%3D%7BfirstSimpleSearch%7D&rsp=0
    #http://xueshu.baidu.com/s?wd=machine+learning&pn=10&tn=SE_baiduxueshu_c1gjeupa&ie=utf-8&f=3&sc_f_para=sc_tasktype%3D%7BfirstSimpleSearch%7D&sc_hit=1
    search_url = 'http://xueshu.baidu.com/s?wd=' + words.replace(
        ' ', '+'
    ) + '&rsv_bp=0&tn=SE_baiduxueshu_c1gjeupa&rsv_spt=3&ie=utf-8&f=3&rsv_sug2=1&sc_f_para=sc_tasktype%3D%7BfirstSimpleSearch%7D&rsp=0'
    content = spider.url_get(search_url, "gb2312")
    #save('xueshu.baidu',str(content))
    papers_urls_r = re.compile(
        r's?wd=paperuri(.*)tn=SE_baiduxueshu_c1gjeupa&ie=utf-8')
    papers_urls = papers_urls_r.findall(content)
    for i in range(len(papers_urls)):
        papers_urls[i] = ('http://xueshu.baidu.com/s?wd=paperuri' +
                          papers_urls[i] +
                          'tn=SE_baiduxueshu_c1gjeupa&amp;ie=utf-8').replace(
                              'amp;', '')
    return papers_urls
def getBetfairSug_test(match_id):
    url = 'http://odds.500.com/fenxi/touzhu-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    index_r = re.compile(r'<td colspan="10">(.*)</td>')
    indexes = index_r.findall(content)
    #index_w = indexes.index('数据提点')
    return [
        indexes[0].replace('<em class="ying">',
                           '').replace('<em class="shu">',
                                       '').replace('</em>', ''),
        indexes[1].replace('<em class="ying">',
                           '').replace('<em class="shu">',
                                       '').replace('</em>', ''),
        indexes[2].replace('<em class="ying">',
                           '').replace('<em class="shu">',
                                       '').replace('</em>', '')
    ]
def getTeamMatTest(team_url):
    content  = spider.url_get(team_url+'/teamfixture',"gb2312")
    Reses    = netdata.get_TeamMatchHistory(content)
    Odds     = netdata.get_TeamOddsHistory(content)
    if len(Reses)<20:
       return -1
    PGR      = trans.PG_to_R(Reses[0:len(Reses)-2])
    PGO      = trans.ODD_to_O(Odds[0:len(Odds)-2])
    FOR      = trans.PG_to_O(Reses[0:len(Reses)-2])
    Mat      = trans.RFO_to_StaticMat(PGR,PGO,FOR)
    for_test = trans.FOR(Reses[len(Reses)-3],Reses[len(Reses)-2])
    odd_test = trans.OD(Odds[len(Reses)-1])
    res_test = trans.RS(Reses[len(Reses)-1])
    if Mat[odd_test][for_test].sum() == 0:
       return -1 # stat lose;No data;
    elif Mat[odd_test][for_test][res_test]/Mat[odd_test][for_test].sum() >= 0.5:   
       return 1  # positive instance;
    else:
       return 0  # negative instance; 
Exemple #26
0
def getAppointBSOddsAndRes_notstart(match_id,company):
    url     = 'http://odds.500.com/fenxi/daxiao-'+str(match_id)+'.shtml'
    content = spider.url_get(url,"gb2312")
    #print content
    #title="伟德"><span class="
    content_r = re.compile(r'title="'+company+r'"><span class="(.*)')
    real_content = content_r.findall(content.replace('\r','').replace('\n',''))
    #">3.5</td>
    odds_r   = re.compile(r'class="ying">(.*?)</td>|class="ping">(.*?)</td>|class="shu">(.*?)</td>|class="">(.*?)</td>')  #odds
    res_r    = re.compile(r'<strong>(.*):(.*)</strong>')            #goals
    pan_r    = re.compile(r'class="tb_tdul_pan ">(.*?)</td>|class="tb_tdul_pan ying">(.*?)</td>|class="tb_tdul_pan ping">(.*?)</td>') #pan
    #print real_content[0]
    if real_content==[]:
       return 0
    odds     = notArrow_odds(odds_r.findall(real_content[0])[0:4])
    #odds     = odds_r.findall(real_content[0])[0:4]
    pans     = notArrow_odds(pan_r.findall(real_content[0])[0:2])
    res      = res_r.findall(content)
    return (odds,pans,[0,0])    
Exemple #27
0
def getTeamMatDis(team_url):
    content = spider.url_get(team_url + '/teamfixture', "gb2312")
    Reses = netdata.get_TeamMatchHistory(content)
    Odds = netdata.get_TeamOddsHistory(content)
    if len(Reses) < 20:
        return -1
    PGR = trans.PG_to_R(Reses[0:len(Reses) - 2])
    PGO = trans.ODD_to_O(Odds[0:len(Odds) - 2])
    FOR = trans.PG_to_O(Reses[0:len(Reses) - 2])
    Mat = trans.RFO_to_StaticMat(PGR, PGO, FOR)
    for_test = trans.FOR(Reses[len(Reses) - 3], Reses[len(Reses) - 2])
    odd_test = trans.OD(Odds[len(Reses) - 1])
    res_test = trans.RS(Reses[len(Reses) - 1])
    # print Reses
    # print Odds
    # print PGR
    # print PGO
    # print FOR
    # print (for_test,odd_test,res_test)
    print str(Mat[odd_test][for_test]) + ("  +++  ") + str(res_test)
def getAppointOddsAndRes(match_id, company):
    url = 'http://odds.500.com/fenxi/ouzhi-' + str(match_id) + '.shtml'
    content = spider.url_get(url, "gb2312")
    #index_r = re.compile(r'0">(.*)</td>|0" class="">(.*)</td>')
    content_r = re.compile(company + r'<span class="gray">(.*)')
    #content_r = re.compile(r'(.*)'+company)
    real_content = content_r.findall(
        content.replace('\r', '').replace('\n', ''))
    #print real_content
    index_r = re.compile(r'style="cursor:pointer" >(.*?)</td>\t')
    #index_r   = re.compile(r'style="cursor:pointer" >(.*)</td>\t              <td row="1" width="33.3%"  klfc="0.12"   onclick="OZ.r(this)" style="cursor:pointer" >(.*)</td>\t              <td row="1"   klfc="14.41" \t\t\t   onclick="OZ.r(this)" style="cursor:pointer" >(.*)</td>\t            </tr>\t            <tr>\t              <td row="1" width="33.3%"  klfc="1.12" class="" onclick="OZ.r(this)" style="cursor:pointer" >(.*)</td>\t              <td row="1" width="33.3%"  klfc="3.38" class="bg-a" onclick="OZ.r(this)" style="cursor:pointer" >(.*)</td>\t              <td row="1" klfc="36.28"                class="bg-b" onclick="OZ.r(this)" style="cursor:pointer" >(.*)</td>')
    res_r = re.compile(r'<strong>(.*):(.*)</strong>')
    indexes = index_r.findall(real_content[0])[0:6]
    #print indexes
    res = res_r.findall(content)
    if len(res) < 1:
        return 0
    if len(res[0]) < 2:
        return 0
    return (indexes, [res[0][0], res[0][1]])
Exemple #29
0
def getAppointASOddsAndRes_notstart(match_id,company):
    url     = 'http://odds.500.com/fenxi/yazhi-'+str(match_id)+'.shtml'
    content = spider.url_get(url,"gb2312")
    #everything_r--------
    company_r    = re.compile(r'<p><a href="http://(.*)" title="(.*)"><span class="(.*)"')
    res_r        = re.compile(r'<strong>(.*):(.*)</strong>')            #goals
    odds_away_r  = re.compile(r'<td row="(.*)" width="(.*)" class="(.*)">(.*)</td>')
    odds_home_r  = re.compile(r'<td width="(.*)"row="(.*)" class="(.*)">(.*)</td>')
    pan_r  = re.compile(r'row="(.*)" ref="(.*?)">(.*?)<')
    #Get-----------------
    CompaniesArr = reTurpleToList(company_r.findall(content),1)
    if company not in CompaniesArr:
       return 0
    OddsHomeArr  = reTurpleToList(odds_home_r.findall(content),3)
    OddsAwayArr  = reTurpleToList(odds_away_r.findall(content),3)
    PansArr      = reTurpleToList(pan_r.findall(content),2)
    #Find----------------   
    indexOfCom   = CompaniesArr.index(company)
    res          = res_r.findall(content)
    #(['0.77', '1.05', '2.40', '0.30'], ['2.5', '3.5'], ['2', '1'])
    #['1.000', '0.890', '0.700', '1.250'], ['\xe5\xb9\xb3\xe6\x89\x8b/\xe5\x8d\x8a\xe7\x90\x83', '\xe5\x8f\x97\xe5\xb9\xb3\xe6\x89\x8b/\xe5\x8d\x8a\xe7\x90\x83'], ['2', '1']
    return ([OddsHomeArr[2*indexOfCom].replace('↑','').replace('↓',''),OddsAwayArr[2*indexOfCom].replace('↑','').replace('↓',''),OddsHomeArr[2*indexOfCom+1],OddsAwayArr[2*indexOfCom+1]],[PansArr[2*indexOfCom],PansArr[2*indexOfCom+1]],[0,0])        
def WritePre(match_id):
    match_url = 'http://odds.500.com/fenxi/shuju-'+ str(match_id) +'.shtml'
    content   = spider.url_get(match_url,"gb2312")
    if notOver(content):
       toRedis(match_id)