def download_latest(): if not util.is_trade_day(): return latest_day = util.get_today() re_download = True pagesize = 88 stocks = load_all_stocks() count = len(stocks) params = [s[0] for s in stocks] pagecount = int(math.ceil(count / pagesize)) dir_today = '%s%s/' % (config.daily_data_dir, latest_day) print "download 下载文件" for i in range(0, pagecount + 1): print i url = const_base_url + ','.join( params[i * pagesize:(i + 1) * pagesize]) lfile = '%s%s.csv' % (dir_today, i) if re_download: if not os.path.exists(dir_today): os.mkdir(dir_today) if os.path.exists(lfile): os.remove(lfile) try: browser.downad_and_save(url, lfile) except Exception, e: print str(e)
def download_latest(): if not util.is_trade_day(): return latest_day = util.get_today() re_download = True pagesize = 88 stocks = load_all_stocks() count = len(stocks) params = [s[0] for s in stocks] pagecount = int(math.ceil(count/pagesize)) dir_today = '%s%s/' %(config.daily_data_dir,latest_day) print "download 下载文件" for i in range(0,pagecount+1): print i url = const_base_url + ','.join(params[i*pagesize:(i+1)*pagesize]) lfile = '%s%s.csv' %(dir_today,i) if re_download: if not os.path.exists(dir_today): os.mkdir(dir_today) if os.path.exists(lfile): os.remove(lfile) try: browser.downad_and_save(url,lfile) except Exception,e: print str(e)
def run(): if not util.is_trade_day():return download.download_latest() latest_day = util.get_today() li = load_daily_stocks(latest_day) jump_p(li)
def run(): if not util.is_trade_day():return download.download_latest() latest_day = util.get_today() jump_p(latest_day)