Example #1
0
def run():
    if not comm.is_trade_day(): return

    stocks = da.stockbaseinfos.load_all_stocks()
    params = ['%s%s'%(s.pinyin2,s.stock_no)  for s in stocks]

    count = len(params)
    pagesize = 88
    pagecount = int(math.ceil(count/pagesize))
    print pagesize,count,pagecount

    for i in range(0,pagecount+1):
        url = const_base_url + ','.join(params[i*pagesize:(i+1)*pagesize])
        print i,url
        lfile = get_local_file_name(i)
        browser.downad_and_save(url,lfile)
        rows = comm.parse_daily_data(lfile)
        
        try:
            for r in rows:
                file_history_process.add_new_record(r)
            pass
        except Exception,ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))
        
        try:
            da.stockdaily_cud.import_rows(rows)            
        except Exception,ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))
Example #2
0
def run():
    lfile = get_local_file_name()
    loger.info(lfile)

    # generate url
    observe_stocks = load_high_stocks()

    last_stocks_rate_range_stocknos = [r.stock_no for r in observe_stocks if r.prate > 0.03 and r.prate < 0.07]

    stocks = observe_stocks + da.stockbaseinfos.load_by_stocknos(buy_stocknos)  # load_by_stocknos 额外指定已购买的
    params = ["%s%s" % (s.pinyin2, s.stock_no) for s in stocks]
    params = list(set(params))
    url = config.const_base_url + ",".join(params)

    browser.downad_and_save(url, lfile)
    rows = comm.parse_daily_data(lfile)
    for r in rows:
        r.should_sell = "sell" if float(r.close_price) < float(r.last_close) * 0.98 else "..."
        r.last = [s for s in stocks if s.stock_no == r.stock_no][0]
        r.last_in_range = r.stock_no in last_stocks_rate_range_stocknos

    content = send_reports_withT(rows)
    with open(get_suggest_local_file_name(), "w") as f:
        f.write(content)
        f.close()
    # send email
    subject = "stock_%s" % (datetime.datetime.now().strftime("%m%d_%H%M")[0:-1])
    util.emailsmtp.sendmail(subject, content, ["*****@*****.**"])  # ,'*****@*****.**'
Example #3
0
def run():
    if not comm.is_trade_day(): return

    stocks = da.stockbaseinfos.load_all_stocks()
    params = ['%s%s' % (s.pinyin2, s.stock_no) for s in stocks]

    count = len(params)
    pagesize = 88
    pagecount = int(math.ceil(count / pagesize))
    print pagesize, count, pagecount

    for i in range(0, pagecount + 1):
        url = const_base_url + ','.join(
            params[i * pagesize:(i + 1) * pagesize])
        print i, url
        break

        lfile = get_local_file_name(i)
        browser.downad_and_save(url, lfile)
        rows = comm.parse_daily_data(lfile)

        try:
            for r in rows:
                file_history_process.add_new_record(r)
            pass
        except Exception, ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))

        try:
            da.stockdaily_cud.import_rows(rows)
        except Exception, ex:
            loger.error('stockdaily_cud import_rows ' + str(ex))
Example #4
0
def run():
    lfile = get_local_file_name()
    loger.info(lfile)

    #generate url
    observe_stocks = load_high_stocks()

    last_stocks_rate_range_stocknos = [
        r.stock_no for r in observe_stocks if r.prate > 0.03 and r.prate < 0.07
    ]

    stocks = observe_stocks + da.stockbaseinfos.load_by_stocknos(
        buy_stocknos)  #load_by_stocknos 额外指定已购买的
    params = ['%s%s' % (s.pinyin2, s.stock_no) for s in stocks]
    params = list(set(params))
    url = config.const_base_url + ','.join(params)

    browser.downad_and_save(url, lfile)
    rows = comm.parse_daily_data(lfile)
    for r in rows:
        r.should_sell = 'sell' if float(
            r.close_price) < float(r.last_close) * 0.98 else '...'
        r.last = [s for s in stocks if s.stock_no == r.stock_no][0]
        r.last_in_range = r.stock_no in last_stocks_rate_range_stocknos

    content = send_reports_withT(rows)
    with open(get_suggest_local_file_name(), 'w') as f:
        f.write(content)
        f.close()
    #send email
    subject = 'stock_%s' % (
        datetime.datetime.now().strftime('%m%d_%H%M')[0:-1])
    util.emailsmtp.sendmail(subject, content,
                            ['*****@*****.**'])  #,'*****@*****.**'
Example #5
0
def run():
    lfile = get_local_file_name()
    loger.info(lfile)

    #generate url
    observe_stocks = load_cyb_stocks()
    stocks = observe_stocks  #+ da.stockbaseinfos.load_by_stocknos(buy_stocknos) #load_by_stocknos 
    
    params = list(set(['%s%s'%(s.pinyin2,s.stock_no)  for s in stocks]))     
    url = config.const_base_url + ','.join(params)
    browser.downad_and_save(url,lfile)
    rows = comm.parse_daily_data(lfile)

    last_stocks_rate_range_stocknos =  [r.stock_no for r in observe_stocks if r.prate>0.03 and r.prate<0.07]    

    for r in rows:
        r.should_sell = 'sell' if float(r.close_price) < float(r.last_close)*0.98 else '...'
        r.last = [s for s in stocks if s.stock_no == r.stock_no][0]
        r.last_in_range = r.stock_no in last_stocks_rate_range_stocknos

    
    content = send_reports_withT(rows)
    
    with open(get_suggest_local_file_name(),'w') as f:
        f.write(content)
        f.close()

    #send email
    subject='stockCYB_%s' % (datetime.datetime.now().strftime('%m%d_%H%M')[0:-1])
    util.emailsmtp.sendmail(subject,content,['*****@*****.**']) #,'*****@*****.**'
Example #6
0
def merge_daily_data(trade_date):
    path = '%s/daily/' %(const_root_local) 
    filenames = os.listdir(path)
    rows = []
    for f in filenames:           
        if not f.startswith(trade_date):
            continue         
        rows = rows + comm.parse_daily_data(path+f)

    content = '\n'.join(['%s,%s,%s,%s,%s,%s,%s' %(r.market_codes.pinyin, r.stock_no,r.open_price,r.close_price,r.high_price,r.low_price,r.volume) for r in rows if r.volume>0])
    lfile =  '%s/daily_/%s.csv' %(const_root_local,trade_date) 
    with open(lfile, 'w') as file: 
        file.write(content)      
Example #7
0
def merge_daily_data(trade_date):
    path = '%s/daily/' % (const_root_local)
    filenames = os.listdir(path)
    rows = []
    for f in filenames:
        if not f.startswith(trade_date):
            continue
        rows = rows + comm.parse_daily_data(path + f)

    content = '\n'.join([
        '%s,%s,%s,%s,%s,%s,%s' %
        (r.market_codes.pinyin, r.stock_no, r.open_price, r.close_price,
         r.high_price, r.low_price, r.volume) for r in rows if r.volume > 0
    ])
    lfile = '%s/daily_/%s.csv' % (const_root_local, trade_date)
    with open(lfile, 'w') as file:
        file.write(content)