def __init__(self): config.init_log() if os.name == 'nt': self.db = sql_db.DataBase('../test/test_database.lite', True, sql = sql_db.SqlTest) else: self.db = sql_db.DataBase(config.sql_credentials, True) self.login = '' self.__index = 0
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import urllib import datetime import os import BeautifulSoup from config import dbr,dbw,const_root_local,init_log from util import browser import da const_root_url = 'http://app.finance.ifeng.com/list/stock.php?' const_market_codes = 'ha,sa,hb,sb,zxb,cyb,zs' loger = init_log("stock_base_infos") def get_url(params): return const_root_url + '&'.join(["%s=%s" % (k,v) for k,v in params.items()]) def get_local_file_name(params): today_path = '%s/base_%s' % (const_root_local,params['date'].strftime('%Y%m%d')) if not os.path.exists(today_path): os.mkdir(today_path) return '%s/%s_%s.html' % (today_path,params['t'],params['p']) fsegs = '_'.join(["%s-%s" % (k,v) for k,v in params.items()]) local_file = params['date'].strftime('%Y%m%d') + '_' + fsegs + '.html' lfile = '%s/%s' %(const_root_local,local_file) return lfile def download(params):
return True def main(self): existed_points = PointBuilder().build() providers = self.get_providers() clients = self.get_clients() allowed_points = self.get_allowed(clients, providers) self.acct(existed_points, clients, providers) self.auth(allowed_points, existed_points) self.dhcp(clients) def log_except(exc): stream = StringIO.StringIO() traceback.print_exception(getattr(exc,"__class__"), exc, sys.exc_traceback, None, stream) logging.error(stream.getvalue()) if __name__ == '__main__': config.init_log() try: if len(sys.argv) > 1: rate_mode = sys.argv[1] else: rate_mode = None AAA(rate_mode).main() except Exception, e: logging.error( 'Unhandled exception in AAA:' ) log_except( e )
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import da from config import dbr, dbw, const_root_local, init_log import datetime loger = init_log("daily_compute") def get_stock_daily_infos(stock_no): return list( dbr.select('stock_daily_records', where="stock_no=$stock_no and volume>0", order="date desc", vars=locals())) def update(stock_no, date, raise_drop, raise_drop_rate, volume_updown, volume_updown_rate): dbw.update('stock_daily_records', raise_drop=raise_drop, raise_drop_rate=raise_drop_rate, volume_updown=volume_updown, volume_updown_rate=volume_updown_rate, where="stock_no=$stock_no and date=$date", vars=locals()) def update_v2(l): dbw.delete('stock_daily_records_tmp', where="pk_id>0", vars=locals())
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import math import da import re import datetime from decimal import * import config from config import const_root_local, init_log, dbr, dbw import comm import util from util import browser loger = init_log("suggest") # http://www.cnblogs.com/kingwolfofsky/archive/2011/08/14/2138081.html def get_current_hhmm(): return int(datetime.datetime.now().strftime("%Y%m%d%H%M")[8:]) def load_high_stocks(): #'high_date_90=trade_date and high_date_188=trade_date and close=high and open<>close'; results = dbr.select("stock_base_infos", where="high_date_188=trade_date and market_code<>'sb'") return list(results) def get_last_count(field_name): sql = "SELECT count(*) as count FROM `stock_base_infos` where %s=trade_date;" % (field_name)
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import math import da import re import datetime from util import browser from decimal import * from config import const_root_local,init_log import comm loger = init_log("stock_daily_import") const_base_url="http://hq.sinajs.cn/list=" def get_local_file_name(index): day = datetime.datetime.now().strftime('%Y%m%d') return '%s/daily/%s_%s.txt' %(const_root_local,day,index) regex = re.compile("_[a-z]{2}([\d]+)=") def parse_data_and_import_to_db(lfile,i): today = datetime.datetime.now().strftime('%Y-%m-%d') pkids = da.dailyrecords.load_pkids(today) with open(lfile,'rb') as f: lines = f.readlines() f.close() rows=[]
#!/usr/bin/env python # -*- coding: utf-8 -*- import web from config import dbr,dbw,const_root_local,init_log from datetime import datetime,date,timedelta import os import random loger = init_log("trading") def load_dates_stock(stock_no,buy_date,hold_days): r = dbr.select('stock_daily_records', where="stock_no=$stock_no and volume>0 and date>$buy_date", offset=0,limit=hold_days+1,order="date asc", vars=locals()) return list(r) def buy_and_sell(strategy_id,strategy_batch_no,stock_no,buy_date,hold_days=1,buy_price='open_price',sell_price='open_price',trade_hands=1): stocks = load_dates_stock(stock_no,buy_date,hold_days) if len(stocks)< hold_days + 1: raise Exception('no-sell-date-stock-info') buy_stock = stocks[0] sell_stock = stocks[-1] earnings = ( sell_stock[sell_price] - buy_stock[buy_price] )* trade_hands * 100 earnings_rate = ( sell_stock[sell_price] - buy_stock[buy_price] )/ buy_stock[buy_price] * 100 dbw.insert('trading_records', strategy_id = strategy_id, strategy_batch_no = strategy_batch_no, buy_or_sell = 0, stock_no = buy_stock.stock_no,
#!/usr/bin/env python # -*- coding: utf-8 -*- import da from config import dbr,dbw,const_root_local,init_log loger = init_log("max_min_date") types = {'date':'date','price':'decimal(8,2)','volume':''} def gen_new_field_sql(): print "ALTER TABLE `stock_base_infos`" start_field = 'trade_date' for d in (7,30,90,188,365,730): for t in ('high','low'): for s in ('price','date','volume'): field_type = 'date' if s=='date' else 'decimal(8,2)' print "ADD COLUMN `%s_%s_%s` %s NULL AFTER `%s`," % (t,s,d,field_type,start_field) start_field = '%s_%s_%s' % (t,s,d) #ADD COLUMN `high_date_7` date NULL AFTER `trade_date`, #ADD COLUMN `high_price_7` decimal(8,2) NULL AFTER `high_date_7`; def update_stock_high_low(stock_no): d={} for day in (7,30,90,188,365,730): d.update(da.dailyrecords.load_max_min(stock_no,day)) da.stockbaseinfos.update_high_low(stock_no,d) def run(): stocks = dbr.select('stock_base_infos',what='stock_no,market_code,market_code_yahoo,pinyin2') for s in stocks: try:
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import math import da import re import datetime from decimal import * import config from config import const_root_local, init_log, dbr, dbw import comm import util from util import browser loger = init_log("suggest") #http://www.cnblogs.com/kingwolfofsky/archive/2011/08/14/2138081.html def get_current_hhmm(): return int(datetime.datetime.now().strftime('%Y%m%d%H%M')[8:]) def load_high_stocks(): #'high_date_90=trade_date and high_date_188=trade_date and close=high and open<>close'; results = dbr.select( 'stock_base_infos', where="high_date_188=trade_date and market_code<>'sb'") return list(results)
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import math import da import re import datetime from decimal import * import config from config import const_root_local,init_log,dbr,dbw import comm import util from util import browser loger = init_log("cyb") #http://www.cnblogs.com/kingwolfofsky/archive/2011/08/14/2138081.html def get_current_hhmm(): return int(datetime.datetime.now().strftime('%Y%m%d%H%M')[8:]) def load_cyb_stocks(): results = dbr.select('stock_base_infos',where="market_code='cyb'",order='prate desc') return list(results) def get_last_count(field_name): sql="SELECT count(*) as count FROM `stock_base_infos` where %s=trade_date;" % (field_name) return list(dbr.query(sql))[0].count def get_local_file_name(): strHM = datetime.datetime.now().strftime('%Y%m%d_%H%M')
strfields = 'date,stock_no,open_price,high_price,low_price,close_price,volume,amount,adj_close,raise_drop,raise_drop_rate,is_traday,volume_updown,volume_updown_rate,create_date,last_update' fields = strfields.split(',') sql = "replace into forecast_backup.z_%s set %s" % ( record.stock_no, ','.join([ "%s='%s'" % (k, v) for k, v in record.items() if k in fields and v is not None ])) dbw.query(sql) def delete_daily_record(pk_id): dbw.delete('stock_daily_records', where='pk_id=$pk_id', vars=locals()) loger = init_log("remove_data") def run(end_date): rows = load_daily_records() for r in rows: if r.date > end_date: print 'date big than 2013-1-1' return False break try: replace_z_records(r) delete_daily_record(r.pk_id) except Exception, ex: print '%s %s' % (r.pk_id, str(ex)) loger.error('%s %s' % (r.pk_id, str(ex)))
#!/usr/bin/python import sys, os, logging, time, re import SocketServer import BaseHTTPServer import CGIHTTPServer sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '../cfg')) import config; config.init_log() #class HTTPMultiThreadServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): class HTTPMultiThreadServer(BaseHTTPServer.HTTPServer): pass class HTTPRequestHandler(CGIHTTPServer.CGIHTTPRequestHandler): web_user = None @staticmethod def web_user_id(): if HTTPRequestHandler.web_user: return HTTPRequestHandler.web_user try: import pwd HTTPRequestHandler.web_user = pwd.getpwnam(config.web_user)[2] except ImportError, KeyError: HTTPRequestHandler.web_user = -1 return HTTPRequestHandler.web_user def log_error(self, *args): msg = args[0]%args[1:]
#!/usr/bin/env python # -*- coding: utf-8 -*- import web from config import dbr,dbw,const_root_local,init_log from util import browser import datetime import os import csv import da,comm #doc:http://www.bizeway.net/read.php?317 #http://table.finance.yahoo.com/table.csv?s=000001.sz loger = init_log("stock_history_daily") const_root_url = "http://table.finance.yahoo.com/table.csv?" def load_stock_dates(stock_no): rows = dbr.select('stock_daily_records',what='date',where="stock_no=$stock_no", vars=locals()) dates = [row.date.strftime('%Y-%m-%d') for row in rows] return dates def import_stock_daily_data(market_code,stock_no,data): stock_dates = load_stock_dates(stock_no) max_date = max(stock_dates) if stock_dates else '1900-01-01' l=[] for row in data: #if row['date'] <= max_date: break if row['date'] in stock_dates: continue row['stock_market_no'] = market_code
#!/usr/bin/env python # -*- coding: utf-8 -*- import web from config import dbr, dbw, const_root_local, init_log from util import browser import datetime import os import csv import da, comm #doc:http://www.bizeway.net/read.php?317 #http://table.finance.yahoo.com/table.csv?s=000001.sz loger = init_log("stock_history_daily") const_root_url = "http://table.finance.yahoo.com/table.csv?" def load_stock_dates(stock_no): rows = dbr.select('stock_daily_records', what='date', where="stock_no=$stock_no", vars=locals()) dates = [row.date.strftime('%Y-%m-%d') for row in rows] return dates def import_stock_daily_data(market_code, stock_no, data): stock_dates = load_stock_dates(stock_no) max_date = max(stock_dates) if stock_dates else '1900-01-01' l = [] for row in data:
# raise_drop,raise_drop_rate,is_traday,volume_updown,volume_updown_rate,create_date,last_update strfields = "date,stock_no,open_price,high_price,low_price,close_price,volume,amount,adj_close,raise_drop,raise_drop_rate,is_traday,volume_updown,volume_updown_rate,create_date,last_update" fields = strfields.split(",") sql = "replace into forecast_backup.z_%s set %s" % ( record.stock_no, ",".join(["%s='%s'" % (k, v) for k, v in record.items() if k in fields and v is not None]), ) dbw.query(sql) def delete_daily_record(pk_id): dbw.delete("stock_daily_records", where="pk_id=$pk_id", vars=locals()) loger = init_log("remove_data") def run(end_date): rows = load_daily_records() for r in rows: if r.date > end_date: print "date big than 2013-1-1" return False break try: replace_z_records(r) delete_daily_record(r.pk_id) except Exception, ex: print "%s %s" % (r.pk_id, str(ex)) loger.error("%s %s" % (r.pk_id, str(ex)))
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import math import da import re import datetime from util import browser from decimal import * from config import const_root_local, init_log import comm loger = init_log("stock_daily_import") const_base_url = "http://hq.sinajs.cn/list=" # http://hq.sinajs.cn/list=sh600000,sh600004,sh600005,sh600006,sh600007,sh600008,sh600009,sh600010,sh600011,sh600012,sh600015,sh600016,sh600017,sh600018,sh600019,sh600020,sh600021,sh600022,sh600026,sh600027,sh600028,sh600029,sh600030,sh600031,sh600033,sh600035,sh600036,sh600037,sh600038,sh600039,sh600048,sh600050,sh600052,sh600053,sh600054,sh600055,sh600056,sh600057,sh600059,sh600060,sh600061,sh600062,sh600063,sh600064,sh600066,sh600067,sh600068,sh600069,sh600070,sh600071,sh600072,sh600073,sh600075,sh600076,sh600077,sh600078,sh600079,sh600080,sh600081,sh600082,sh600083,sh600084,sh600085,sh600086,sh600088,sh600089,sh600090,sh600091,sh600093,sh600094,sh600095,sh600096,sh600097,sh600098,sh600099,sh600100,sh600101,sh600103,sh600104,sh600105,sh600106,sh600107,sh600108,sh600109,sh600110,sh600111,sh600112,sh600113 def get_local_file_name(index): day = datetime.datetime.now().strftime('%Y%m%d') return '%s/daily/%s_%s.txt' % (const_root_local, day, index) regex = re.compile("_[a-z]{2}([\d]+)=") def parse_data_and_import_to_db(lfile, i): today = datetime.datetime.now().strftime('%Y-%m-%d') pkids = da.dailyrecords.load_pkids(today) with open(lfile, 'rb') as f:
... '/Users/cosyman/Downloads/bs2/car.p.zip') ... None :param file1: :param file2: :param target: :return: """ bsdiff4.file_patch(src_path, dst_path, patch_path) #bsdiffcmd.file_patch(src_path, dst_path, patch_path) def verfiy_patch_file(src_path, dst_path, patch_path): patched_new_packge = src_path + ".new" patch_file(src_path, patched_new_packge, patch_path) if filecmp.cmp(patched_new_packge, dst_path, shallow=1): logger.debug('revert verfiy have successed') os.remove(patched_new_packge) return True else: logger.error( '[ERROR] %s revert verfiy have fail, please check package', src_path) return False if __name__ == '__main__': config.init_log()
#!/usr/bin/env python # -*- coding: utf-8 -*- import web import da from config import dbr,dbw,const_root_local,init_log import datetime loger = init_log("daily_compute") def get_stock_daily_infos(stock_no): return list(dbr.select('stock_daily_records',where="stock_no=$stock_no and volume>0", order="date desc",vars=locals())) def update(stock_no,date,raise_drop,raise_drop_rate,volume_updown,volume_updown_rate): dbw.update('stock_daily_records', raise_drop = raise_drop, raise_drop_rate = raise_drop_rate, volume_updown = volume_updown, volume_updown_rate = volume_updown_rate, where="stock_no=$stock_no and date=$date",vars=locals()) def update_v2(l): dbw.delete('stock_daily_records_tmp',where="pk_id>0",vars=locals()) dbw.supports_multiple_insert = True dbw.multiple_insert('stock_daily_records_tmp',l) dbw.query('''update stock_daily_records a,stock_daily_records_tmp t set a.raise_drop=t.raise_drop, a.raise_drop_rate=t.raise_drop_rate, a.volume_updown=t.volume_updown, a.volume_updown_rate=t.volume_updown_rate, a.last_update = t.last_update where a.pk_id=t.pk_id''')