def update_stock_list(self): start_time = time.time() #get all stocks stocks = self._list_stock() stock_details_list = con_exec(self._list_details, stocks) #write stock code to file stock_code = ApplicatoinConfig().get_config_item( 'stock_file', 'stock_code') with open(stock_code, 'w') as f: for stock_details in stock_details_list: self.logger.info('get code: ' + stock_details[1] + ' ' + stock_details[0]) if stock_details[0].startswith('error'): continue try: if stock_details[2] == '' or float(stock_details[2]) == 0: continue except Exception, e: self.logger.error('cratical error' + '\t' + stock_details[1]) print 'cratical error' + '\t' + stock_details[1] sys.exit() f.write(stock_details[1] + '\n') f.flush()
def filter(self, low_static_pe, high_static_pe, low_dyn_pe, hi_dyn_pe, low_pb, high_pb, low_value, high_value): stock_detail_file = ApplicatoinConfig().get_config_item( 'stock_file', 'stock_detail') with open(stock_detail_file, 'r') as f1: lines = f1.readlines() lines = ''.join(lines).split('\n') stock_detail_filtered = ApplicatoinConfig().get_config_item( 'stock_file', 'stock_detail_filtered') with open(stock_detail_filtered, 'w') as f2: for line in lines: items = line.split('\t') if len(items) < 7: continue #item must be bigger than low and lower than high if low_static_pe != None and float(items[3]) < low_static_pe: continue if high_static_pe != None and float(items[3]) > high_static_pe: continue if low_dyn_pe != None and float(items[4]) < low_dyn_pe: continue if hi_dyn_pe != None and float(items[4]) > hi_dyn_pe: continue if low_pb != None and float(items[5]) < low_pb: continue if high_pb != None and float(items[5]) > high_pb: continue if low_value != None and float(items[6]) < low_value: continue if high_value != None and float(items[6]) > high_value: continue if float(items[3]) > float(items[4]): continue f2.write(line + '\n')
def stocks_to_txt(self): stock_code = ApplicatoinConfig().get_config_item( 'stock_file', 'stock_code') with open(stock_code, 'r') as f1: lines = f1.readlines() lines = ''.join(lines).split('\n') del lines[-1] start_time = time.time() stock_detail = ApplicatoinConfig().get_config_item( 'stock_file', 'stock_detail') stock_details_list = con_exec(self._list_details, lines) with open(stock_detail, 'w') as f: #f.write('名称' + '\t' + '代码' + '\t' + '静态市盈率' + '动态市盈率' + 'pb' + 'currency_value' + '\n') seq = 0 for stock_details in stock_details_list: self.logger.info('get code: ' + stock_details[1] + ' ' + stock_details[0]) if stock_details[0].startswith('error'): continue try: if stock_details[2] == '' or float(stock_details[2]) == 0: continue except Exception, e: self.logger.error('cratical error' + '\t' + stock_details[1]) print 'cratical error' + '\t' + stock_details[1] sys.exit() seq += 1 f.write( str(seq) + '\t' + stock_details[0] + '\t' + stock_details[1][2:] + '\t' + stock_details[2] + '\t' + stock_details[3] + '\t' + stock_details[4] + '\t' + stock_details[5] + '\n') f.flush()
def backup_log_file(): log_file = ApplicatoinConfig().get_config_item('config', 'log_file') if os.path.isfile(log_file): timestamp = datetime.datetime.now().__str__().split('.')[0].replace(' ', '_').replace(':', '_') shutil.move(log_file, log_file + '_' + timestamp) base_path = os.path.dirname(log_file) tmp = os.listdir(base_path) dir_list = [] for i in range(0, len(tmp)): if tmp[i].startswith('stock.log2_'): dir_list.append(tmp[i]) if len(dir_list) > 3: dir_list.sort() for item in dir_list[0:-3]: os.remove(base_path + os.path.sep + item)
def init(): logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': True, 'supress_abs_warn' : True, 'formatters': { 'verbose': { 'format': "[%(asctime)s] %(levelname)s [%(filename)s:%(lineno)s:%(funcName)s]:%(process)d:%(thread)d %(message)s", 'datefmt': "%Y-%m-%d %H:%M:%S" }, 'simple2': { 'format': '%(levelname)s %(message)s' }, }, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'logging.NullHandler', }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose' }, 'file': { 'level': 'DEBUG', 'class': 'cloghandler.ConcurrentRotatingFileHandler', # 当达到10MB时分割日志 'maxBytes': 1024 * 1024 * 10, # 最多保留50份文件 'backupCount': 50, # If delay is true, # then file opening is deferred until the first call to emit(). 'filename': ApplicatoinConfig().get_config_item('config', 'log_file'), 'formatter': 'verbose' } }, 'loggers': { 'default': { 'handlers': ['file',], 'level': 'DEBUG', }, } })
def update_hitory_data(self, mode): stock_code = ApplicatoinConfig().get_config_item( 'stock_file', 'stock_code') with open(stock_code, 'r') as f1: lines = f1.readlines() lines = ''.join(lines).split('\n') del lines[-1] if os.path.exists(Dealor.history_path): timestamp = datetime.datetime.now().__str__().split( '.')[0].replace(' ', '_').replace(':', '_') shutil.move(Dealor.history_path, Dealor.history_path + '_' + timestamp) tmp = os.listdir(os.path.dirname(Dealor.history_path)) base_name = os.path.basename(Dealor.history_path) dir_list = [] for i in range(0, len(tmp)): if tmp[i].startswith(base_name + '_'): dir_list.append(tmp[i]) if len(dir_list) > 3: dir_list.sort() for item in dir_list[0:-3]: shutil.rmtree( os.path.dirname(Dealor.history_path) + os.path.sep + item) os.mkdir(Dealor.history_path) start_time = time.time() if mode == 'slow': for line in lines: retry_times = 10 for i in range(0, retry_times): try: self._download_history_data_slow(line) self.logger.info( 'download sotck %s at %s' % (str(line), str(datetime.datetime.now()))) break except Exception, e: self.logger.error('_download_history_data retry: ' + str(i) + ' stock:' + str(line) + ' ' + e.__str__()) time.sleep(55)
def clear_log_file(): log_file = ApplicatoinConfig().get_config_item('config', 'log_file') if os.path.exists(log_file): os.remove(log_file)
# -*- coding:utf-8 -*- __all__ = ['base_path'] import os import shutil import datetime import logging.config from lib.config import ApplicatoinConfig base_path = os.path.dirname(ApplicatoinConfig().get_config_item('config', 'log_file')) if not os.path.isdir(base_path): os.mkdir(base_path) def init(): logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': True, 'supress_abs_warn' : True, 'formatters': { 'verbose': { 'format': "[%(asctime)s] %(levelname)s [%(filename)s:%(lineno)s:%(funcName)s]:%(process)d:%(thread)d %(message)s", 'datefmt': "%Y-%m-%d %H:%M:%S" }, 'simple2': { 'format': '%(levelname)s %(message)s' }, }, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'logging.NullHandler',
# -*- coding:utf-8 -*- __all__ = ['proxies', 'auth', 'process_num', 'http_timeout'] import requests import logging from requests import Response from requests.auth import HTTPProxyAuth from multiprocessing import Pool from lib.config import ApplicatoinConfig proxies ={"http":"http://proxy.huawei.com:8080","https":"https://proxy,huawei.com:8080"} auth = HTTPProxyAuth('k00399859', 'qgmmztmn_6') proxies = None auth = None process_num = int(ApplicatoinConfig().get_config_item('config', 'default_process_num')) http_timeout = float(ApplicatoinConfig().get_config_item('config', 'http_timeout')) def request_timeout(url, timout=http_timeout): logger = logging.getLogger('default') while True: try: response = Response() if proxies is not None: response = requests.get(url, proxies=proxies, auth=auth) else: response = requests.get(url, timeout=timout) break except requests.exceptions.Timeout, e: logger.info('Connect timeout ' + url)
def indexor_filter(self, code_list_src, dir=None): #map code to details stock_detail = ApplicatoinConfig().get_config_item( 'stock_file', 'stock_detail') with open(stock_detail, 'r') as f: detail_lines = f.readlines() code_dtl_map = {} for i in range(0, len(detail_lines)): code = detail_lines[i].split('\t')[2] code_dtl_map[code] = detail_lines[i] lines = None if code_list_src == 'stock_detail': lines = list(code_dtl_map.keys()) dir = ApplicatoinConfig().get_config_item('stock_file', 'history_path') elif code_list_src == 'history_data': if dir == None: dir = ApplicatoinConfig().get_config_item( 'stock_file', 'history_path') lines = os.listdir(dir) for i in range(0, len(lines)): lines[i] = lines[i].split('.')[0] else: raise Exception('Unexpected Error') f1 = open( ApplicatoinConfig().get_config_item('stock_file', 'macd_filter'), 'w') f2 = open( ApplicatoinConfig().get_config_item('stock_file', 'kdj_filter'), 'w') f3 = open( ApplicatoinConfig().get_config_item('stock_file', 'all_indexor_filter'), 'w') for code in lines: self.logger.debug('cal single stock indexor ' + code) k_value, d_value, j_value, diff, dea9, macd = self.single_stock_indexor( code, dir) macd_flag = False kdj_flag = False if self._gold_branch(diff, dea9): if code_dtl_map.get(code): f1.write(code_dtl_map.get(code)) else: f1.write(code + '\n') macd_flag = True if self._gold_branch(k_value, d_value): if code_dtl_map.get(code): f2.write(code_dtl_map.get(code)) else: f2.write(code + '\n') kdj_flag = True if macd_flag and kdj_flag: if code_dtl_map.get(code): f3.write(code_dtl_map.get(code)) else: f3.write(code + '\n') f1.flush() f2.flush() f3.flush() f1.close() f2.close() f3.close()
class Dealor(object): logger = logging.getLogger('default') history_path = ApplicatoinConfig().get_config_item('stock_file', 'history_path') def __init__(self): self.logger.info('start stock %s', str(datetime.datetime.now())) pass def _list_stock(self): response = request_timeout( "http://quote.eastmoney.com/stock_list.html", 10) if response.status_code != 200: raise Exception("request error: " + str(response.status_code) + " " + response.content) soup = BeautifulSoup(response.content) tags = soup.find_all('a') tags = tags[386:-44] print len(tags) stock_list = [] for item in tags: if not item.get('href'): continue stock = re.split('/', item['href'])[3].split('.')[0] loc = stock[0:2] num = stock[2:] if num.startswith('900'): continue if num.startswith('1'): continue if num.startswith('20'): continue if num.startswith('30'): continue stock_code = loc + num stock_code = stock_code.encode('ascii', 'ignore') stock_list.append(stock_code) return stock_list def _list_details(self, code): start_time = time.time() response = request_timeout('http://qt.gtimg.cn/q=' + code) self.logger.debug('request time:\t' + code + '\t' + str(time.time() - start_time)) if response.status_code != 200: return ("error: " + str(response.status_code) + " " + response.content) if response.content.startswith('pv_none_match=1;') or \ response.content.startswith('v_pv_none_match'): return ('error', code, 0) try: details = re.split('~', response.content) if details[5] == '0.00': return ('error: stock is stopped', code, 0) name = details[1] code = code.encode('ascii', 'ignore') static_pe = details[53][0:-3] dym_pe = details[52] pb = details[46] currency_value = details[44] except Exception, e: logging.error('response is unexpected:' + response.content + 'code: ' + code) sys.exit() return name, code, static_pe, dym_pe, pb, currency_value
import re import time import shutil import logging import csv from math import fabs import datetime import copy from bs4 import BeautifulSoup import tushare from lib.util import request_timeout, con_exec, max, min from lib.config import ApplicatoinConfig precision = float(ApplicatoinConfig().get_config_item('config', 'precision')) class Dealor(object): logger = logging.getLogger('default') history_path = ApplicatoinConfig().get_config_item('stock_file', 'history_path') def __init__(self): self.logger.info('start stock %s', str(datetime.datetime.now())) pass def _list_stock(self): response = request_timeout( "http://quote.eastmoney.com/stock_list.html", 10) if response.status_code != 200: