Ejemplo n.º 1
0
def AppSpiderAomenBSodds(month,lastday):
    Lines = []
    for day in range(1,min(10,lastday)):
    	match_ids = spider.crawl_match_list_by_date(str(today.year) + "-"+str(month)+"-0" + str(day))
        print '这一天是'+str(month)+'月'+str(day)+'日'
        print '当天共'+str(len(match_ids))+'场比赛,正在下载...'
        pbar = ProgressBar().start();i=1;total=len(match_ids);
        for match_id in match_ids:
            info = getAppointASOddsAndRes(match_id,'澳门')
            if info==0:continue
            pbar.update(int(100*(float(i)/total)))
            Lines.append((np.float64(np.array((info[0]+info[2]))),info[1]))
            i+=1
        pbar.finish()
    if lastday<10:
        return Lines
    for day in range(10,lastday+1):
        match_ids = spider.crawl_match_list_by_date(str(today.year) + "-"+str(month)+"-" + str(day))
        print '这一天是'+str(month)+'月'+str(day)+'日'
        print '当天共'+str(len(match_ids))+'场比赛,正在下载...'
        pbar = ProgressBar().start();i=1;total=len(match_ids);
        for match_id in match_ids:
            info = getAppointASOddsAndRes(match_id,'澳门')
            if info==0:continue
            pbar.update(int(100*(float(i)/total)))
            Lines.append((np.float64(np.array((info[0]+info[2]))),info[1]))
            i+=1
        pbar.finish()
    return Lines   
Ejemplo n.º 2
0
def TestMultiMatches(how_many_days_ago):
    #pool = redis.ConnectionPool(host='115.28.138.54',port=6379)
    pool = redis.ConnectionPool(host='127.0.0.1',port=6379)
    m_match_ids = spider.crawl_match_list_by_date(str(today.year) + "-" + str(today.month) + "-" + str(today.day-how_many_days_ago))
    print len(m_match_ids)
    match_ids_writed = []
    for match_id in m_match_ids:
        if toRedis(match_id,pool)!=0:
           match_ids_writed.append(match_id)
    return match_ids_writed       
Ejemplo n.º 3
0
def makeDataXY(days):
    #663195~663195
    match_ids = spider.crawl_match_list_by_date(str(today.year) + "-" + str(today.month) + "-" + str(today.day-days))
    DataX = []
    DataY = []
    for match_id in match_ids:
        info = fm.getOriAndNowOdds_Res(match_id)
        if info==0:continue
        #DataX.append(np.float64(np.array(info[0]))[0:3]-np.float64(np.array(info[0]))[3:6])
        DataX.append(np.dot((np.float64(np.array(info[0]))[0:3]-np.float64(np.array(info[0]))[3:6]),10)/np.float64(np.array(info[0]))[3:6])
        DataY.append(WDL(np.int32(np.array(info[1]))[0]-np.int32(np.array(info[1]))[1]))    
    return (DataX,DataY)
Ejemplo n.º 4
0
def AppSpiderWeddBSodds(day1,day2):
    Lines = []
    for day in range(day1,day2):
    	match_ids = spider.crawl_match_list_by_date(str(today.year) + "-11-0" + str(day))
        print '当天共'+str(len(match_ids))+'场比赛,正在下载...'
        pbar = ProgressBar().start();i=1;total=len(match_ids);
        for match_id in match_ids:
            info = getAppointBSOddsAndRes(match_id,'伟德')
            if info==0:continue
            pbar.update(int(100*(float(i)/total)))
            Lines.append((np.float64(np.array((info[0]+info[2]))),info[1]))
            i+=1
        pbar.finish()
    return Lines   
Ejemplo n.º 5
0
def HD_main(how_many_days_ago):
    import pickle as pk
    History = []
    exnum = 0
    while how_many_days_ago > 0:
        m_match_ids = spider.crawl_match_list_by_date(
            str(today.year) + "-" + str(today.month) + "-" +
            str(today.day - how_many_days_ago))
        print len(m_match_ids)
        for match_id in m_match_ids:
            Infos = HD_GetMatch(match_id)
            if Infos != 0:
                History.append(Infos)
                if Infos != 0:
                    exnum += 1
        how_many_days_ago -= 1
    output = open('history_data.pkl', 'wb')
    pk.dump(History, output)
Ejemplo n.º 6
0
def makeDataXYtrain_forPsql(day1,day2,company):
    Lines = []
    for day in range(day1,day2):
        match_ids = spider.crawl_match_list_by_date(str(today.year) + "-09-" + str(day))
        print '当天共'+str(len(match_ids))+'场比赛,正在下载...'
        pbar = ProgressBar().start();i=1;total=len(match_ids);
        for match_id in match_ids:
            info = fm.getAppointOddsAndRes(match_id,company)
            if info==0:continue
            pbar.update(int(100*(float(i)/total)))
            #DataX.append([match_id]+info[0])
            #DataX.append(np.dot((np.float64(np.array(info[0]))[0:3]-np.float64(np.array(info[0]))[3:6]),10)/np.float64(np.array(info[0]))[3:6])
            #DataY.append([np.int32(np.array(info[1])[0]),np.int32(np.array(info[1])[1])])
            m_macth = spider.get_match(match_id)
            Lines.append(tuple([match_id]+info[0]+[info[1][0],info[1][1]]+[m_macth.host_team,m_macth.guest_team,m_macth.match_name,company,m_macth.match_time]))
            i+=1
        pbar.finish()    
    return Lines 
Ejemplo n.º 7
0
def makeDataXYtrain_sc(day1,day2):
    #663195~663195
    #match_ids = spider.crawl_match_list_by_date(str(today.year) + "-" + str(today.month) + "-" + str(today.day-days))
    DataX = []
    DataY = []
    for day in range(day1,day2):
        match_ids = spider.crawl_match_list_by_date(str(today.year) + "-09-" + str(day))
        print '当天共'+str(len(match_ids))+'场比赛,正在下载...'
        pbar = ProgressBar().start();i=1;total=len(match_ids);
        for match_id in match_ids:
            info = fm.getAppointOddsAndRes(match_id,'Bet365')
            if info==0:continue
            pbar.update(int(100*(float(i)/total)))
            DataX.append([match_id]+info[0])
            #DataX.append(np.dot((np.float64(np.array(info[0]))[0:3]-np.float64(np.array(info[0]))[3:6]),10)/np.float64(np.array(info[0]))[3:6])
            DataY.append([np.int32(np.array(info[1])[0]),np.int32(np.array(info[1])[1])])    
            i+=1
        pbar.finish()    
    return (DataX,DataY)    
Ejemplo n.º 8
0
def Task_Main(how_many_days_ago):
    #m_match_ids = spider.crawl_match_list_by_date(str(today.year) + "-" + str(today.month) + "-" + str(today.day-how_many_days_ago))
    #print len(m_match_ids)
    WDL_Bet = []
    BS_Bet = []
    exnum = 0
    while how_many_days_ago > 0:
        m_match_ids = spider.crawl_match_list_by_date(
            str(today.year) + "-" + str(today.month) + "-" +
            str(today.day - how_many_days_ago))
        print len(m_match_ids)
        for match_id in m_match_ids:
            Infos = Task_GetMatch(match_id)
            if Infos != 0:
                WDL_Bet.append(Infos[0])
                BS_Bet.append(Infos[1])
                if Infos[0] != 1:
                    exnum += 1
        how_many_days_ago -= 1
    print 'exnum: ' + str(exnum)
    print float(WDL_Bet.count(3)) / (WDL_Bet.count(3) + WDL_Bet.count(0))
    print float(BS_Bet.count(3)) / (BS_Bet.count(3) + BS_Bet.count(0))
Ejemplo n.º 9
0
""" soccer bet main function file """

__author__ = 'hanss401'

import spider
import portfoliomodel
import time
import sys
import os
import spiderForBSBall as sp
import TestMarket as test
import datetime

reload(sys)
sys.setdefaultencoding('utf8')

#m_match_ids = spider.crawl_match_list()
#m_match_ids = sp.getAllMatches()
#m_match_ids = sp.getMainMatches()
today = datetime.datetime.now()
m_match_ids = spider.crawl_match_list_by_date(
    str(today.year) + "-" + str(today.month) + "-" + str(today.day - 2))

for m_match_id in m_match_ids:
    print m_match_id
    #m_match = spider.get_match(m_match_id)
    #m_match.display()
    #os.system('python AppHABsSel.py ' + str(m_match_id) )
    test.test(m_match_id)
    time.sleep(2)
Ejemplo n.º 10
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" find best return rate company """

__author__ = 'ggstar'

import spider
import time
import lottery

match_all_ids = []

for day in range(13, 20, 1):
    match_ids = spider.crawl_match_list_by_date("2014-07-" + str(day))

    match_all_ids += match_ids

item_list = []

seq = 0

company_map = {}

for match_id in match_all_ids:
    seq += 1
    print seq, match_id

    match = spider.get_match(match_id)

    for item in match.item_arr:
        if item.company in company_map:
Ejemplo n.º 11
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

""" find best return rate company """

__author__ = 'ggstar'

import spider
import time
import lottery

match_all_ids = []

for day in range(13, 20, 1):
    match_ids = spider.crawl_match_list_by_date("2014-07-" + str(day))

    match_all_ids += match_ids

item_list = []

seq = 0


company_map = {}

for match_id in match_all_ids:
    seq += 1
    print seq, match_id

    match = spider.get_match(match_id)