def send_notification(self): try: from systemd.daemon import notify event = threading.Event() # send first notification on init logger.debug('[Watchdog]... everything is ok') notify('WATCHDOG=1') while not event.wait(self.interval - 1): main_thread_alive = threading.main_thread().is_alive() logger.debug( '[Watchdog] is alive {}'.format(main_thread_alive)) if main_thread_alive: logger.debug('[Watchdog]...') url = settings.config_http['bind'] resp = requests.get(url) if resp.status_code == 200: logger.debug('[Watchdog] everything is ok') notify('WATCHDOG=1') else: logger.warning( f'[Watchdog] Watchdog not sent. Response status: {resp.status_code}; ' f'Response: {resp.__dict__}') else: logger.critical(f'[Watchdog] Main thread is not alive.') except (KeyError, TypeError, ValueError): logger.info('[Watchdog] not enabled, keep_alive missing') except ImportError: logger.warn('[Watchdog] systemd not imported {}'.format( traceback.format_exc(limit=5))) except: logger.alert('[Watchdog] Unexpected exception {}'.format( traceback.format_exc(limit=5)))
def validate_quote(quote_id): try: header = { "Authorization": f"Bearer {settings.block['access_token']}", "Accept": "application/json", } if not quote_id: logger.warning(f"[validate_quote] Invalid quote_id") return False url = f"{settings.api_server_full}/applications/{settings.client_id}/quotes/{quote_id}" resp = requests.get(url, headers=header) if int(resp.status_code) == 200: return resp.json() else: logger.verbose( f"[validate_quote] Received response code [{resp.status_code}]" ) raise InvalidRequestException( f"Failed to retrieve quote for {quote_id}") except (OSError, InvalidRequestException) as e: logger.warning( f'[validate_quote] Error while making request to platform: {e}') except Exception: logger.alert( f"[validate_quote] Unexpected error: {traceback.format_exc(limit=5)}" ) return False
def start(self): if LOCAL: self._create_table() is_trading = self.is_trading_day(self.day) if not is_trading: logger.warning("非交易日") return datas = self.get_resp_datas() if not datas: logger.warning("接口异常, 请检查") return data1, data2 = self.sort_datas(datas) item1 = self.gene_content_maxnetbuy(data1) item2 = self.gene_content_maxinsup(data2) print(item1) print(item2) self._target_init() self._save(self.target_client, item1, self.target_table, self.fields) self._save(self.target_client, item2, self.target_table, self.fields) # TODO self.ding("龙虎榜-机构净买额最大: \n {}\n\n 龙虎榜-机构席位最多: \n{}".format( item1, item2))
def get_response(self, endpoint_conf, credentials, channel_id, cred_key): url = endpoint_conf['url'] method = endpoint_conf['method'] data = endpoint_conf.get('data') params = endpoint_conf.get('params') if '{device_id}' in url: url = self.replace_device_id(url, cred_key.split('/')[-1]) response = requests.request(method, url, params=params, data=data, headers=self.authorization(credentials)) if response.status_code == requests.codes.ok: logger.info('[Polling] polling request successful with {}'.format( cred_key)) return { 'response': response.json(), 'channel_id': channel_id, 'credentials': credentials } else: logger.warning( f'[Polling] Error in polling request: CHANNEL_ID: {channel_id}; ' f'URL: {url}; RESPONSE: {response}') return {}
def docker_run_spider(self, spider_name, spider_file_path, restart=False): local_int = 1 if LOCAL else 0 try: spider_container = self.docker_containers_col.get(spider_name) except: spider_container = None if spider_container: spider_status = spider_container.status logger.info("{} spider status: {}".format(spider_name, spider_status)) if spider_status in ("exited", ): spider_container.start() elif spider_status in ("running", ): if restart: spider_container.restart() else: logger.warning("other status: {}".format(spider_status)) else: self.docker_containers_col.run( "registry.cn-shenzhen.aliyuncs.com/jzdev/jzdata/spi:v1", environment={"LOCAL": local_int}, name='{}'.format(spider_name), command='python {}'.format(spider_file_path), detach=True, # 守护进程运行 )
def validate_channel(channel_id): try: header = { "Authorization": f"Bearer {settings.block['access_token']}", "Accept": "application/json", } if not channel_id: logger.warning( f"[validate_channel] Invalid channel_id: {channel_id}") return {} url = f"{settings.api_server_full}/channels/{channel_id}" resp = requests.get(url, headers=header) if int(resp.status_code) == 200: return resp.json() else: logger.verbose( f"[validate_channel] Received response code [{resp.status_code}]" ) raise ChannelTemplateNotFound( f"Failed to retrieve channel_template_id for {channel_id}") except (OSError, ChannelTemplateNotFound) as e: logger.warning( f'[validate_channel] Error while making request to platform: {e}') except Exception: logger.alert( f"[validate_channel] Unexpected error get_channel_template: {traceback.format_exc(limit=5)}" ) return {}
def get_list_page(self, list_url): resp = self.get(list_url) logger.info("List resp: {}".format(resp)) if resp and resp.status_code == 200: return resp.text else: logger.warning(resp) return None
def get_device_quotes(self, device_id): key = "/".join(["device-quotes", device_id]) data = self.query(key) if not data: logger.warning("[DB] No quotes found for device {}".format(key)) return None return data[0]
def wrapper(*args, **kwargs): try: return job_func(*args, **kwargs) except: logger.warning(traceback.format_exc()) # 在此处发送钉钉消息 if cancel_on_failure: logger.warning("异常 任务结束: {}".format(schedule.CancelJob)) schedule.cancel_job(job_func) return schedule.CancelJob
def wrapper(*args, **kwargs): try: return job_func(*args, **kwargs) except: logger.warning(traceback.format_exc()) # sentry.captureException(exc_info=True) if cancel_on_failure: logger.warning("异常 任务结束: {}".format(schedule.CancelJob)) schedule.cancel_job(job_func) return schedule.CancelJob
def get_channel_status(self, channel_id): key = "/".join(['status-channels', channel_id]) data = self.query(key) if not data: logger.warning("[DB] No status found for channel {}".format(key)) return None return data[0]
def handle_receive_token(self, received_data, client_id, owner_id): data = self.implementer.auth_response(received_data) data = self._create_expiration_date(data) if data: self.db.set_credentials(data, client_id, owner_id) return Response(status=200) else: logger.warning("No credentials to be stored!") return Response(status=401)
def diff_quarters(self, _quarter_this, _quarter_last): """获取两个季度的数据库信息 进行对比以及指标计算 """ # 从数据库中获取到上一期的值 和 这一期的值, 均是原始数据 ret_this, ret_last = self.get_quarter_info(_quarter_this), self.get_quarter_info(_quarter_last) logger.debug("本期: \n{}\n".format(pprint.pformat(ret_this))) logger.debug("上期: \n{}\n".format(pprint.pformat(ret_last))) if not ret_this or not ret_last: return # # [临时]拦截数据进行测试 # ret_last = {} # ret_this = {} # 计算营业额的阈值 是根据原始数据计算出的值 operatingrevenue_this, operatingrevenue_last = ret_this.get("OperatingRevenue"), ret_last.get("OperatingRevenue") try: r_threshold = (operatingrevenue_this - operatingrevenue_last) / operatingrevenue_last except decimal.DivisionByZero: logger.warning("计算除 0 ") return logger.debug("营业额同比计算值: {}".format(r_threshold)) # 计算触发条件 净利润的阈值 是根据原始数据计算出的值 netprofit_this, netprofit_last = ret_this.get("NPParentCompanyOwners"), ret_last.get("NPParentCompanyOwners") try: threshold = (netprofit_this - netprofit_last) / netprofit_last except decimal.DivisionByZero: logger.warning("计算除 0") return logger.debug("归属于母公司净利润同比计算值: {}".format(threshold)) # 指标触发条件判断 if netprofit_this > 0 and netprofit_last > 0: if threshold >= 0.5: # 上一期和本期均是盈利的, 盈利增长, 且增长大于 50% >> 触发大幅盈增 self.inc_50(ret_this, ret_last, threshold, r_threshold) elif 0 < threshold < 0.5: # 上一期和本期均是盈利的, 盈利增长, 但盈利不大于 50% >> 触发增盈 self.inc(ret_this, ret_last, threshold, r_threshold) elif threshold < 0: # 上期和本期均是盈利的, 盈利减少 >> 触发减盈 self.reduce(ret_this, ret_last, threshold, r_threshold) elif netprofit_this < 0 and netprofit_last > 0: # 上期盈利, 本期亏损 >> 触发由盈转亏 self.gain_to_loss(ret_this, ret_last, threshold, r_threshold) elif netprofit_this > 0 and netprofit_last < 0: # 上期亏损, 本期盈利 >> 触发由亏转盈 self.loss_to_gain(ret_this, ret_last, threshold, r_threshold) elif netprofit_this < 0 and netprofit_last < 0 and abs(netprofit_this) < abs(netprofit_last): self.ease_loss(ret_this, ret_last, threshold, r_threshold) # 均亏损 亏损值减少 >> 触发减亏 elif netprofit_this < 0 and netprofit_last < 0 and abs(netprofit_this) > abs(netprofit_last): if threshold > 0.5: # 均亏损, 亏损值增大,增大幅度大于 50% self.intensify_loss_50(ret_this, ret_last, threshold, r_threshold) else: # 均亏损, 亏损值增大, 但不大于 50% self.intensify_loss(ret_this, ret_last, threshold, r_threshold)
def start(self, _now): if LOCAL: self._create_table() _start = datetime.datetime(_now.year, _now.month, _now.day, 9, 30, 0) _end = datetime.datetime(_now.year, _now.month, _now.day, 15, 0, 0) client = self._init_pool(self.product_cfg) if _now > _end: # 入库北向资金当日收盘流入 item = self.get_final_data(_end) if item: self._save(client, item, self.target_table, self.fields) positive, negative = self.fetch_data(_start, _now) positive.update(negative) for key, value in positive.items(): if value: item = self.produce(key, value) if item in self.invalid_items: continue # 这里是按照产品的要求去添加的一个功能: 每生成一条新的数据突破绝对值更大的阈值 就将之前 5 min 内突破的较小阈值的一条数据删除 # TODO 存在一个问题 就是会去堆积 self.invalid_items _threshold = item.get("Threshold") _dt = item.get("DateTime") before_dt = _dt - datetime.timedelta(minutes=5) delete_item = None if _threshold > 0: sql = '''select * from {} where Date = '{}' \ and DateTime >= '{}' \ and DateTime <= '{}' and Threshold < {} limit 1; '''.format(self.target_table, item.get("Date"), before_dt, _dt, _threshold) delete_item = client.select_one(sql) elif _threshold < 0: sql = '''select * from {} where Date = '{}' \ and DateTime >= '{}' \ and DateTime <= '{}' and Threshold > {} limit 1; '''.format(self.target_table, item.get("Date"), before_dt, _dt, _threshold) delete_item = client.select_one(sql) if delete_item: sql = 'delete from {} where id = {}; '.format( self.target_table, delete_item.pop("id")) client.delete(sql) logger.warning("删除数据: {}".format(delete_item)) client.end() self.invalid_items.append(delete_item) self._save(client, item, self.target_table, self.fields) client.dispose()
def read_wbs(tcs_path): """ :param tcs_path: 元素可以是二元组或一元组的列表 :return: [{记录用例信息的字典},...] """ logger.debug('tcs_path: '+str(tcs_path)) tcs = [] for i in tcs_path: if len(i) != 1 and len(i) != 2: logger.warning('序列的长度错误: ' + str(i)) continue tcs.extend(read_wb(i[0], None if len(i) == 1 else i[1])) return tcs
def start(self): # 判断是否是交易日 is_trading = self.is_trading_day(self.day) if not is_trading: logger.warning("非交易日") return # 建表 self._create_table() _count = 4000 # 在不知今天的具体有多少只的情况下, 拿到今天的全部数据 while True: rank = Rank.sync_get_rank_net_purchase_by_code( self.client, offset=0, count=_count, stock_code_array=["$$沪深A股"]) print(len(rank.row)) if len(rank.row) < _count: break else: _count += 100 rank_map = {} rank_num = 1 for one in rank.row: for i in one.data: if i.type == 1: item = {} secu_code = one.stock_code[2:] item['value'] = struct.unpack("<f", i.value)[0] item['secu_code'] = secu_code rank_map[rank_num] = item rank_num += 1 elif i.type == 3: print(bytes.fromhex(i.value.hex()).decode("utf-8")) data = { "Date": self.data_day, "DayRank": json.dumps(rank_map, ensure_ascii=False) } self._save(self.target_client, data, self.target_table, ["Date", "DayRank"]) self.ding('每日主力净买个股排行 json 数据已插入,数量{}'.format( len(list(rank_map.keys()))))
def launch_server(self): logger.notice('Starting TCP server') tcp_settings = settings.config_tcp if 'ip_address' not in self.tcp_settings or 'port' not in self.tcp_settings: raise TCPServerNotFoundException( "TCP server address or port not found in config file") # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Bind the socket to the port server_address = (self.tcp_settings['ip_address'], int(self.tcp_settings['port'])) logger.info( f'starting up on {server_address[0]} port {server_address[1]}') try: sock.bind(server_address) # Listen for incoming connections sock.listen(1) thread_list = [] while True: if len(thread_list) >= self.tcp_settings.get( 'thread_pool_limit', DEFAULT_TCP_POOL_LIMIT): self._clear_threads(thread_list) # Wait for a connection logger.info('Waiting for connection') connection, client_address = sock.accept() thread_ = threading.Thread(target=self.handle_connection, args=(connection, client_address)) thread_.start() thread_list.append(thread_) self._clear_threads(thread_list) except OSError as e: logger.critical( f"Error connecting TCP. Probably because address already in use. " f"Will try to reconnect in {self.retry_wait}; Error: {e}") except Exception as e: logger.alert( f"Unexpected error while open TCP socket: {e}; {traceback.format_exc(limit=5)}" ) finally: time.sleep(self.retry_wait) logger.warning("Recreating TCP server") self.kickoff()
def get_channel_id(self, device_id): key = "/".join(['channel-devices', device_id]) data = self.query(key) if not data: result = self.__get_channel_old(device_id) if not result: logger.warning( "[DB] No channel found for device {}".format(key)) return None else: self.set_channel_id(device_id, result, True) else: result = data[0] return result
def get_device_id(self, channel_id): key = "/".join(['device-channels', channel_id]) data = self.query(key) if not data: result = self.__get_device_old(channel_id) if not result: logger.warning( "[DB] No device found for channel {}".format(key)) return None else: self.set_device_id(channel_id, result, True) else: result = data[0] return result
def set_props(json_obj, prop_statements: str): for statement in prop_statements.split('\n'): if not statement: continue key, json_path = parse_prop_statement(statement) logger.info('key={},json_path={}'.format(key, json_path)) value = get_by_json_path(json_path, json_obj) logger.info('value=' + str(value)) if isinstance(value, list): logger.error("value只能是单个的值,不能是数组!") return if props.get(key) is not None: logger.warning('props[{}]已经存在,将被覆盖。原值={},新值={}'.format(key, props[key], str(value))) props[key] = str(value) if ini.debug: with ini.props_path.open(mode='w+', encoding='utf-8') as file: json.dump(props,file)
def read_xlsx(self, dt: datetime.datetime): dt = dt.strftime("%Y-%m-%d") dirname, filename = os.path.split(os.path.abspath(__file__)) file_path = os.path.join(dirname, "./data_dir/{}.xlsx".format(dt)) wb = xlrd.open_workbook(file_path) # sheet_names = wb.sheet_names() ws = wb.sheet_by_name('股票行情') _rows = ws.nrows print(">>> ", _rows) if _rows < 10: logger.warning("{} 当天无数据".format(dt)) self.rm_file(file_path) return client = self._init_pool(self.spider_cfg) items = [] for idx in range(1, _rows): _line = ws.row_values(idx) # print(_line) item = dict() item['TradingDay'] = _line[0] # 交易日期 secu_code = _line[1] item['SecuCode'] = secu_code # 证券代码 inner_code = self.get_inner_code(secu_code) if not inner_code: continue # raise item['InnerCode'] = inner_code item['SecuAbbr'] = _line[2] # 证券简称 item['PrevClose'] = float(_line[3]) # 前收 item['Close'] = float(_line[4]) # 今收 item['RiseFall'] = float(_line[5]) # 升跌(%) amount = _line[6] item['Amount'] = self._re_amount(amount) # 成交金额(元) item['PERatio'] = self._re_amount(_line[7]) # 市盈率 items.append(item) # self._save(client, item, self.table_name, self.fields) logger.info(len(items)) self._batch_save(client, items, self.table_name, self.fields) self.rm_file(file_path) client.dispose()
def start(self): # 判断是否交易日 is_trading = self.is_trading_day(self.day) if not is_trading: logger.warning('非交易日') return # 建表 if LOCAL: self._create_table() all_block_stats_map = self.get_all_block_stats(self.day) # print(pprint.pformat(all_block_stats_map)) # 非交易日无数据 if not all_block_stats_map: logger.warning("未获取到交易日的 block 信息") return all_block_codes = list(all_block_stats_map.keys()) # print(all_block_codes) # 按照修改之前的需求: 首先找出实时涨跌幅大于 1.5% 的; # 然后用第一步的数据在全部的版块信息里面去截取符合条件的版块。 # 更新后的需求,在全部的版块中取涨幅前三的。不再设置实时涨跌幅上面的阈值。 # block_rise_map = self.get_block_rise_map(all_block_codes) # print(pprint.pformat(block_rise_map)) # 在非交易日是有数据的 first3_block_rise_map = self.get_first3_rise_map(all_block_codes) print(pprint.pformat(first3_block_rise_map)) final = self.get_content(all_block_stats_map, first3_block_rise_map) print(pprint.pformat(final)) if not final: return self._target_init() ret = self._save(self.target_client, final, self.target_table, ['Title', 'Date', 'Content', 'NewsType', 'NewsJson']) if ret: self.ding("开盘异动资讯生成:\n{}".format(pprint.pformat(final)))
def rename_key(self, new_key, old_key): try: value = self.get_key(old_key) if not value: logger.warning(f"[DB] Key {old_key} not found in database.") created = False deleted = False if value: created = self.set_key(new_key, value) if not created: logger.warning( f"[DB] error while creating {new_key} to database.") if created: deleted = self.delete_key(old_key) if not deleted: logger.warning( f"[DB] error while deleting {old_key} from database.") if created and deleted: logger.info( f"[DB] Key {old_key} renamed to {new_key} successfully.") return created and deleted except Exception: logger.error( f"[DB] Failed to rename key {old_key} to {new_key}. {traceback.format_exc(limit=5)}" )
def get(self, url): """底层为 requests 请求封装""" if not self.use_proxy: return requests.get(url, headers=self.headers) count = 0 while True: count += 1 if count > 10: return None try: proxy = {"proxy": self._get_proxy()} resp = requests.get(url, headers=self.headers, proxies=proxy) except: traceback.print_exc() time.sleep(0.5) else: if resp.status_code == 200: return resp elif resp.status_code == 404: return None else: logger.warning("Status Code: {}".format(resp.status_code)) time.sleep(1)
def get_credentials_with_key(self, client_id, owner_id, channel_id=None): data = None credentials_key = None if channel_id: credentials_key = "/".join([ 'credential-clients', client_id, 'owners', owner_id, 'channels', channel_id ]) data = self.query(credentials_key) if not data: credentials_key = "/".join( ['credential-owners', owner_id, 'channels', channel_id]) data = self.query(credentials_key) if not data: credentials_key = "/".join( ['credential-clients', client_id, 'owners', owner_id]) data = self.query(credentials_key) if not data: data = self.__get_credentials_old(client_id, owner_id, channel_id) if not data: logger.warning("[DB] No credentials found!") return {}, credentials_key else: data = [data] elif channel_id: self.set_credentials(data[0], client_id, owner_id, channel_id) credentials = data[0] logger.debug("[DB] Credentials Found! {}".format(credentials_key)) return credentials, credentials_key
def load_xlsx(self, dt: datetime.datetime): """ 下载某一天的明细文件 :param dt: eg.20200506 :return: """ dt = dt.strftime("%Y-%m-%d") url = self.base_url.format(dt, dt, random.random()) dirname, filename = os.path.split(os.path.abspath(__file__)) file_path = os.path.join(dirname, "./data_dir/{}.xlsx".format(dt)) try: urlretrieve(url, file_path, self.callbackfunc) except urllib.error.HTTPError: logger.warning("不存在这一天的数据{}".format(dt)) except TimeoutError: logger.warning("超时 {} ".format(dt)) except Exception as e: logger.warning("下载失败 : {}".format(e)) raise Exception else: return file_path
def start(self): self._spider_init() items = [] # 每个关键词只前推 1 页 for page in range(1, 2): list_url = self.start_url + urlencode( self.make_query_params(self.key, page)) logger.info(list_url) list_page = self.get_list_page(list_url) if not list_page: logger.warning("列表页请求失败") return list_infos = self.parse_list(list_page) if not list_infos: logger.warning("列表页面数据解析失败") return for data in list_infos: item = dict() item['code'] = self.key link = data.get("ArticleUrl") item['link'] = link item['title'] = data.get("Title") item['pub_date'] = data.get("ShowTime") detail_page = self.get_detail(link) if not detail_page: logger.warning(f"详情页解析失败{link}") continue article = self.parse_detail(detail_page) item['article'] = article print(item) items.append(item) time.sleep(3) print(f'数据个数{len(items)}') ret = self._batch_save(self.spider_client, items, self.table_name, self.fields) print(f'入库个数{ret}')
def history(self): """ 历史数据的生成[废弃 因为涨跌幅只有实时的] """ start_day = datetime.datetime(2020, 1, 1) end_day = datetime.datetime(2020, 6, 29) _day = start_day while _day <= end_day: print(_day) # 判断当天是否是交易日 is_trading = self.is_trading_day(_day) if not is_trading: logger.warning("{}非交易日".format(_day)) else: all_block_stats_map = self.get_all_block_stats(_day) if not all_block_stats_map: logger.warning("未获取到交易日 {} 的 block 信息".format(_day)) else: all_block_codes = list(all_block_stats_map.keys()) block_rise_map = self.get_block_rise_map(all_block_codes) final = self.get_content(all_block_stats_map, block_rise_map) if not final: logger.warning("今日{}无数据生成".format(_day)) else: ret = self._save(self.target_client, final, self.target_table, [ 'Title', 'Date', 'Content', 'NewsType', 'NewsJson' ]) if ret: logger.info("{}数据保存成功".format(_day)) print() print() _day += datetime.timedelta(days=1)
def process_items(self, datas): secu_codes = self.a_secucategory_codes # 对首次发布的数据再次进行筛选 # (1) A 股票 # (2) 存在聚源内部编码 # (3) 可查询到当日的收盘价以及涨跌幅 items = [] for data in datas: trd_code = data.get("trd_code") if not trd_code in secu_codes: logger.info("非A股") continue secu_sht = data.get("secu_sht") com_name = data.get("com_name") rat_desc = data.get("rat_desc") item = dict() if len(trd_code) < 6: trd_code = (6 - len(trd_code)) * "0" + trd_code item["SecuCode"] = trd_code item['SecuAbbr'] = secu_sht inner_code = self.get_inner_code_bysecu(trd_code) if not inner_code: continue sql = '''select Close, ChangePercActual from {} where InnerCode = {} and Date <= '{}' order by Date desc limit 1; '''.format(self.idx_table, inner_code, self.day) ret = self.dc_client.select_one(sql) if not ret: logger.info("{} {} 无法查询到 {} 的收盘价以及涨跌幅".format( trd_code, secu_sht, self.day)) continue _close = self.re_decimal_data(ret.get("Close")) changepercactual = self.re_decimal_data( ret.get("ChangePercActual")) content = self.content_format.format(secu_sht, trd_code, com_name, rat_desc, _close, changepercactual) item['Close'] = _close item['ChangePercActual'] = changepercactual item['Content'] = content logger.debug(item) items.append(item) if len(items) == 0: logger.warning("{} 无符合条件的首次发布数据".format(self.day)) return title = self.title_format[:-2].format(self.day.month, self.day.day, len(items)) content = '' for item in items: content += (item.get("Content") + "\n") ret = dict() ret["Date"] = self._today ret['NewsType'] = 5 ret['Title'] = title ret["Content"] = content self._target_init() self._save(self.target_client, ret, self.target_table, self.fields)
def start(self): # 建表 if LOCAL: self._create_table() # 判断是否交易日 is_trading = self.is_trading_day(self.today_str) if not is_trading: logger.warning("非A股交易日") return rank = Rank.sync_get_limit_up_lb_count( self.client, offset=0, count=1000, stock_code_array=["$$主题猎手-昨日涨停"] # client, # offset=0, # count=10, # stock_code_array=["$$今日涨停"], ) # 返回的value从上往下依次是:连板数量,涨停封板金额,涨幅,涨停板成交额,最新价,涨停价,跌停价,更新时间 # 连板数量,涨停封板金额,涨幅,涨停板成交额,最新价,涨停价,跌停价,更新时间,开盘价,昨收盘价 fields = [ "lb_count", # 连板数量 'rise_close_amount', # 涨停封板金额 'rise_scope', # 涨幅 'limit_up_amount', # 涨停板成交额 'current_price', # 最新价 'limit_up_price', # 涨停价 'limit_down_price', # 跌停价 'update_time', # 更新时间 'open', # 开盘价 'pre_close', # 昨收盘价 'code', # 证券代码 ] items = [] for one in rank.row: code = one.stock_code values = [] for i in one.data: if i.type == 1: value = struct.unpack("<f", i.value)[0] elif i.type == 4: value = struct.unpack("<i", i.value)[0] elif i.type == 2: value = struct.unpack("<d", i.value)[0] elif i.type == 3: value = bytes.fromhex(i.value.hex()).decode("utf-8") else: raise ValueError values.append(value) values.append(code) item = dict(zip(fields, values)) items.append(item) if item.get("lb_count") < 2: # 将触发不符合条件的项目删除 items.remove(item) break # print(len(items)) # for item in items: # print(item) title = '连板股今日竞价表现' content = '连板股今日竞价表现如下:\n' for item in items: secu_code = item.get("code")[2:] secu_abbr = self.get_juyuan_codeinfo(secu_code)[1] if item.get("current_price") == item.get("limit_up_price"): logger.debug("涨停") # 4连板个股江南高纤(600527)集合竞价涨停封板,封板金额为2.45亿,成交金额为2.45亿; content += '{}连板个股{}({})集合竞价涨停封板,封板金额为{},成交金额为{};\n'.format( item.get("lb_count"), secu_abbr, secu_code, self.re_money_data(item.get('rise_close_amount')), self.re_money_data(item.get("limit_up_amount"))) else: # 3连板山河药辅(300452)低开4.47%,成交金额为2.45亿; # 认证高开、低开、平开 # print(item.get("open"), type(item.get("open"))) # print(item.get("pre_close"), type(item.get("open"))) if item.get("open") == item.get("pre_close"): rise_str = '平开' elif item.get("open") < item.get("pre_close"): rise_str = "低开" else: rise_str = '高开' logger.debug(rise_str) content += '{}连板{}({}){}{}%,成交金额为{};\n'.format( item.get("lb_count"), secu_abbr, secu_code, rise_str, self.re_decimal_data(abs(item.get("rise_scope"))), self.re_money_data(item.get("limit_up_amount"))) # print(content) final = dict() final['Title'] = title final['Content'] = content # today_str = datetime.datetime.combine(datetime.datetime.today(), datetime.time.min).strftime("%Y-%m-%d") # 竞价结束的时间定在每天的 9:25 程序在每天的 9:25 运行 final['Date'] = "{} {}".format(self.today_str, items[0].get("update_time")) final['NewsType'] = 2 print(pprint.pformat(final)) self._target_init() self._save(self.target_client, final, self.target_table, self.fields) self.ding("连板股今日竞价表现: \n{}".format(pprint.pformat(final)))
import numpy as np import PyQt4.QtGui as qt # load regular expression package (for parsing of energy from file name) import re from base import logger #### load packages for available file types #### formats_available = ['IMG'] try: import pyfits formats_available.append("FITS") except: logger.warning("The pyfits package is not properly installed.") try: import Image formats_available.append("PIL") except: logger.warning("The Image package (Python Imaging Library) is not properly installed.") class ImageLoader(object): """ Abstract base class for a class loading LEED images. Subclasses need to provide - get_image(image_path) Subclasses may override (default: from filename with regex) - get_energy(image_path) """