Пример #1
0
def run():
    if not comm.is_trade_day(): return

    stocks = da.stockbaseinfos.load_all_stocks()
    params = ['%s%s'%(s.pinyin2,s.stock_no)  for s in stocks]

    count = len(params)
    pagesize = 88
    pagecount = int(math.ceil(count/pagesize))
    print pagesize,count,pagecount

    for i in range(0,pagecount+1):
        url = const_base_url + ','.join(params[i*pagesize:(i+1)*pagesize])
        print i,url
        lfile = get_local_file_name(i)
        browser.downad_and_save(url,lfile)
        rows = comm.parse_daily_data(lfile)
        
        try:
            for r in rows:
                file_history_process.add_new_record(r)
            pass
        except Exception,ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))
        
        try:
            da.stockdaily_cud.import_rows(rows)            
        except Exception,ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))
Пример #2
0
def run():
    lfile = get_local_file_name()
    loger.info(lfile)

    # generate url
    observe_stocks = load_high_stocks()

    last_stocks_rate_range_stocknos = [r.stock_no for r in observe_stocks if r.prate > 0.03 and r.prate < 0.07]

    stocks = observe_stocks + da.stockbaseinfos.load_by_stocknos(buy_stocknos)  # load_by_stocknos 额外指定已购买的
    params = ["%s%s" % (s.pinyin2, s.stock_no) for s in stocks]
    params = list(set(params))
    url = config.const_base_url + ",".join(params)

    browser.downad_and_save(url, lfile)
    rows = comm.parse_daily_data(lfile)
    for r in rows:
        r.should_sell = "sell" if float(r.close_price) < float(r.last_close) * 0.98 else "..."
        r.last = [s for s in stocks if s.stock_no == r.stock_no][0]
        r.last_in_range = r.stock_no in last_stocks_rate_range_stocknos

    content = send_reports_withT(rows)
    with open(get_suggest_local_file_name(), "w") as f:
        f.write(content)
        f.close()
    # send email
    subject = "stock_%s" % (datetime.datetime.now().strftime("%m%d_%H%M")[0:-1])
    util.emailsmtp.sendmail(subject, content, ["*****@*****.**"])  # ,'*****@*****.**'
Пример #3
0
def run():
    if not comm.is_trade_day(): return

    stocks = da.stockbaseinfos.load_all_stocks()
    params = ['%s%s' % (s.pinyin2, s.stock_no) for s in stocks]

    count = len(params)
    pagesize = 88
    pagecount = int(math.ceil(count / pagesize))
    print pagesize, count, pagecount

    for i in range(0, pagecount + 1):
        url = const_base_url + ','.join(
            params[i * pagesize:(i + 1) * pagesize])
        print i, url
        break

        lfile = get_local_file_name(i)
        browser.downad_and_save(url, lfile)
        rows = comm.parse_daily_data(lfile)

        try:
            for r in rows:
                file_history_process.add_new_record(r)
            pass
        except Exception, ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))

        try:
            da.stockdaily_cud.import_rows(rows)
        except Exception, ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))
Пример #4
0
def run():
    lfile = get_local_file_name()
    loger.info(lfile)

    #generate url
    observe_stocks = load_high_stocks()

    last_stocks_rate_range_stocknos = [
        r.stock_no for r in observe_stocks if r.prate > 0.03 and r.prate < 0.07
    ]

    stocks = observe_stocks + da.stockbaseinfos.load_by_stocknos(
        buy_stocknos)  #load_by_stocknos 额外指定已购买的
    params = ['%s%s' % (s.pinyin2, s.stock_no) for s in stocks]
    params = list(set(params))
    url = config.const_base_url + ','.join(params)

    browser.downad_and_save(url, lfile)
    rows = comm.parse_daily_data(lfile)
    for r in rows:
        r.should_sell = 'sell' if float(
            r.close_price) < float(r.last_close) * 0.98 else '...'
        r.last = [s for s in stocks if s.stock_no == r.stock_no][0]
        r.last_in_range = r.stock_no in last_stocks_rate_range_stocknos

    content = send_reports_withT(rows)
    with open(get_suggest_local_file_name(), 'w') as f:
        f.write(content)
        f.close()
    #send email
    subject = 'stock_%s' % (
        datetime.datetime.now().strftime('%m%d_%H%M')[0:-1])
    util.emailsmtp.sendmail(subject, content,
                            ['*****@*****.**'])  #,'*****@*****.**'
Пример #5
0
def run():
    lfile = get_local_file_name()
    loger.info(lfile)

    #generate url
    observe_stocks = load_cyb_stocks()
    stocks = observe_stocks  #+ da.stockbaseinfos.load_by_stocknos(buy_stocknos) #load_by_stocknos 
    
    params = list(set(['%s%s'%(s.pinyin2,s.stock_no)  for s in stocks]))     
    url = config.const_base_url + ','.join(params)
    browser.downad_and_save(url,lfile)
    rows = comm.parse_daily_data(lfile)

    last_stocks_rate_range_stocknos =  [r.stock_no for r in observe_stocks if r.prate>0.03 and r.prate<0.07]    

    for r in rows:
        r.should_sell = 'sell' if float(r.close_price) < float(r.last_close)*0.98 else '...'
        r.last = [s for s in stocks if s.stock_no == r.stock_no][0]
        r.last_in_range = r.stock_no in last_stocks_rate_range_stocknos

    
    content = send_reports_withT(rows)
    
    with open(get_suggest_local_file_name(),'w') as f:
        f.write(content)
        f.close()

    #send email
    subject='stockCYB_%s' % (datetime.datetime.now().strftime('%m%d_%H%M')[0:-1])
    util.emailsmtp.sendmail(subject,content,['*****@*****.**']) #,'*****@*****.**'
Пример #6
0
def download(params):
    print get_url(params)
    lfile = get_local_file_name(params)
    if not os.path.exists(lfile):
        url = get_url(params)
        print url
        loger.info(url)
        browser.downad_and_save(url,lfile)
        #req = urllib.urlretrieve(url,lfile)  #try..catch ... logging?
    print lfile
    return lfile
Пример #7
0
def download_and_parse_data(stock):
    scode = '%s.%s' % (stock.stock_no,stock.market_code_yahoo)
    url = '%ss=%s' % (const_root_url,scode)
    lfile = '%s/dailyh/%s.csv' %(const_root_local,scode)
    try:
        if not os.path.exists(lfile):
            print url
            loger.info("downloading " + url)
            browser.downad_and_save(url,lfile)
        rows = comm.parse_history_data(lfile)
        return rows
    except Exception,e:
        loger.error(url + " " + str(e) )
Пример #8
0
def download_and_parse_data(stock):
    scode = '%s.%s' % (stock.stock_no, stock.market_code_yahoo)
    url = '%ss=%s' % (const_root_url, scode)
    lfile = '%s/dailyh/%s.csv' % (const_root_local, scode)
    try:
        if not os.path.exists(lfile):
            print url
            loger.info("downloading " + url)
            browser.downad_and_save(url, lfile)
        rows = comm.parse_history_data(lfile)
        return rows
    except Exception, e:
        loger.error(url + " " + str(e))