class WINDDIFF(object): MIN_DIFF_ALERT = config.getint("winddiffs", "mindiffinterval") IMPORTANT_DIFF_ALERT = config.getint("winddiffs", "importantdiffinterval") MIN_ALERT = config.getint("winddiffs", "min_wind_strenght_alert") WIND_CHECK_INTERVAL = config.getint("winddiffs", "wind_check_interval") def __setattr__(self, *_): # Prevent from changing the class memebers values pass
async def connect(self, hostname, port): try: hostname = hostname.encode('utf-8') except AttributeError: pass # config server = config.get('default', 'server') server_port = config.getint('default', 'server_port') loop = asyncio.get_event_loop() # 和proxyclient建立连接 try: transport, client = await loop.create_connection( ProxyClient, server, server_port) # 连接失败 except Exception: logging.error('Could not connect server: {}:{}'.format( server, server_port)) if self.transport.can_write_eof(): self.transport.write_eof() return False # 绑定server_transport和trasport client.server_transport = self.transport self.client_transport = transport # 发送地址信息, 域名和端口 self.client_transport.write( pack('!i%ssH' % len(hostname), len(hostname), hostname, port))
def get_zoom_graph(x, y, sf_trigger): fig = { 'data': [ go.Scattergl(x=x, y=y, mode='markers', marker=dict(color=config.get('ZOOM_GRAPH', 'POINTS_COLOR'), size=config.getint( 'ZOOM_GRAPH', 'POINTS_SIZE'))) ], 'layout': [] } return fig
async def connect(self, hostname, port): # config server = config.get('default', 'server') server_port = config.getint('default', 'server_port') loop = asyncio.get_event_loop() # 和proxyclient建立连接 try: transport, client = await loop.create_connection(ProxyClient, server, server_port) # 连接失败 except Exception: logging.error('Could not connect server: {}:{}'.format(server, server_port)) if self.transport.can_write_eof(): self.transport.write_eof() return False # 绑定server_transport和trasport client.server_transport = self.transport self.client_transport = transport # 发送地址信息, 域名和端口 self.client_transport.write( pack('!i%ssH' % len(hostname), len(hostname), hostname, port))
self.transport.write_eof() return False # 绑定server_transport和trasport client.server_transport = self.transport self.client_transport = transport # 发送地址信息, 域名和端口 self.client_transport.write( pack('!i%ssH' % len(hostname), len(hostname), hostname, port)) if __name__ == '__main__': # config debug = config.getboolean('default', 'debug') local = config.get('default', 'local') local_port = config.getint('default', 'local_port') if debug: debug_level = logging.DEBUG else: debug_level = logging.ERROR # log logging.basicConfig( level=debug_level, format='%(threadName)10s %(asctime)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filemode='a+') logging.getLogger('asyncio').setLevel(debug_level) loop = asyncio.get_event_loop()
self.transport.write_eof() return False # 绑定server_transport和trasport client.server_transport = self.transport self.client_transport = transport # 发送地址信息, 域名和端口 self.client_transport.write( pack('!i%ssH' % len(hostname), len(hostname), hostname, port)) if __name__ == '__main__': # config debug = config.getboolean('default', 'debug') local = config.get('default', 'local') local_port = config.getint('default', 'local_port') if debug: debug_level = logging.DEBUG else: debug_level = logging.ERROR # log logging.basicConfig(level=debug_level, format='%(threadName)10s %(asctime)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filemode='a+') logging.getLogger('asyncio').setLevel(debug_level) loop = asyncio.get_event_loop() if debug: loop.set_debug(enabled=True)
} log = logging.getLogger('werkzeug') log.setLevel(logging.WARNING) cache = Cache() cache.init_app(app.server, config=CACHE_CONFIG) cache.clear() stateRecorder = StateRecorder() FitRecorder = FitRecorder() app.scripts.config.serve_locally = True app.config['suppress_callback_exceptions'] = True scattergl_limit = config.getint('ZOOM_GRAPH', 'POINTS_LIMIT') sf = [] start_date_int = None fit_func = None confirmed_fit_func = None shadow_shape = None zoomStartPoint = None refPoint_x = None fit_start_value_x = None fit_end_value_x = None refPoint_y = None fit_start_value_y = None fit_end_value_y = None fileName = None sf_trigger = 1
client.server_transport = self.transport self.client_transport = transport client.hostname = hostname # 返回给浏览器 hostip, port = transport.get_extra_info('sockname') host = unpack("!I", socket.inet_aton(hostip))[0] self.transport.write( pack('!BBBBIH', 0x05, 0x00, 0x00, 0x01, host, port)) if __name__ == '__main__': # config debug = config.getboolean('default', 'debug') server = config.get('default', 'server') server_port = config.getint('default', 'server_port') if debug: debug_level = logging.DEBUG else: debug_level = logging.ERROR # log logging.basicConfig( level=debug_level, format='%(threadName)10s %(asctime)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filemode='a+') logging.getLogger('asyncio').setLevel(debug_level) loop = asyncio.get_event_loop()
def scrapWebData(html, info): if info.infoSourceName == consts.SOURCEREAD.PRIGAL: bUseWebData = True # First we get the read date and time from webscrap readDate = html.find("h3", attrs={ "class": "inf-time-date rel-gradient english" }).text readTime = html.find("p", attrs={ "class": "inf-time-time english" }).text info.readDateTime = readDate + " " + readTime # Check if minute diff between datetime from webscrap is larger then the scrap interval - occasionly sensor reads are not being updated for long time if getMinDiff(datetime.datetime.now(), info.readDateTime) > config.getint( "winddiffs", "wind_check_interval"): # sensor read is not up-to-date - check if we're configured for read data from image if config.getboolean("vision", "use_gcp_vision"): texts = getDataFromImage(get_url("botimagecommands", "psum")) if texts is not None and len(texts) > 0: bUseWebData = False info.windDir = (html.find("div", attrs={ "class": "inf-wind-direction" }).contents[0].text if bUseWebData else texts["dir"]) info.infoDate = (info.infoDate if bUseWebData else texts["readDate"]) info.infoTime = (info.infoDate if bUseWebData else texts["readTime"]) info.windStrength = (html.find( "div", attrs={ "class": "inf-wind-strength" }).contents[0].text if bUseWebData else str(texts["wind"]) + " - " + str(texts["gusts"])) info.barometerPreasure = (None if bUseWebData else texts["barometer"]) # info._infoDate = html.find("h3", attrs={"class" : "inf-time-date rel-gradient english"}).text # info._infoTime = (html.find("p", attrs={"class" : "inf-time-time english"}).text) elif info.infoSourceName == consts.SOURCEREAD.EILAT_METEO_TECH: tr = html.find("tr", attrs={"bgcolor": "#ccffff"}) info.windDir = str(tr.contents[7].contents[0].contents[0]) info.readDateTime = str( tr.contents[1].contents[0].contents[0].contents[0]) info.windAvg = tr.contents[8].contents[0].contents[0] info.windGust = tr.contents[9].contents[0].contents[0] info.waterTemp = tr.contents[11].contents[0].contents[0] info.barometerPreasure = tr.contents[5].contents[0].contents[0] elif info.infoSourceName == consts.SOURCEREAD.DOR_NACHSHOLIM: info.readDateTime = html.find("span", attrs={ "id": "latestConditionsQtip" }).contents[0] tbody = html.find( "tbody", attrs={ "id": "hobolink-latest-conditions-form:conditions-tree_data" }) info.Temp = str(tbody.contents[1].contents[0].contents[10].contents[1]. contents[1].contents[0].contents[0]) info.windAvg = str(tbody.contents[4].contents[0].contents[10]. contents[1].contents[1].contents[0].contents[0]) info.windGust = str(tbody.contents[6].contents[0].contents[10]. contents[1].contents[1].contents[0].contents[0]) info.windDir = str(tbody.contents[5].contents[0].contents[10]. contents[1].contents[1].contents[0]) info.barometerPreasure = str( tbody.contents[9].contents[0].contents[10].contents[1].contents[1]. contents[0].contents[0]) elif info.infoSourceName == consts.SOURCEREAD.SURFO: div = html.find("div", attrs={"class": "w_line firstline"}) info.readDateTime = str(div.contents[0].contents[0]) info.Temp = str(div.contents[5].contents[0]) info.windAvg = str(div.contents[2].contents[0]) info.windGust = str(div.contents[3].contents[0]) info.windDir = str(div.contents[1].contents[0]) updatedate = html.find("span", attrs={"id": "ContentPlaceHolder1_date"}) info.infoDate = updatedate.contents[0] else: pass
import time from huobi.model.generic.symbol import Symbol from wampy.constants import DEFAULT_REALM, DEFAULT_ROLES, DEFAULT_TIMEOUT from wampy.peers.clients import Client from wampy.roles.callee import callee from wampy.roles.subscriber import subscribe from utils.logging import quite_logger from market import MarketClient from dataset.redis import Redis from target import Target from user import User from utils import config, get_target_time, logger DEALER_NUM = config.getint('setting', 'DealerNum') WATCHER_NUM = config.getint('setting', 'WatcherNum') SELL_AFTER = config.getfloat('setting', 'SellAfter') MAX_BUY = config.getint('setting', 'MaxBuy') SELL_RATE = config.getfloat('setting', 'SellRate') SECOND_SELL_RATE = config.getfloat('setting', 'SecondSellRate') WS_HOST = config.get('setting', 'WsHost') WS_PORT = config.getint('setting', 'WsPort') WS_URL = f'ws://{WS_HOST}:{WS_PORT}' HIGH_SELL_SLEEP = 1 class Topic: CLIENT_INFO = 'CLIENT_INFO'
from sqlalchemy import Column, create_engine, VARCHAR, INTEGER, REAL, TEXT, func from sqlalchemy.orm import sessionmaker, Session from sqlalchemy.ext.declarative import declarative_base from utils import config, user_config PGHOST = config.get('setting', 'PGHost') PGPORT = config.getint('setting', 'PGPort') PGUSER = '******' PGPASSWORD = user_config.get('setting', 'PGPassword') PGNAME = 'goodmorning' Base = declarative_base() TRADE_CLASS = {} MS_IN_DAY = 60 * 60 * 24 * 1000 def create_Trade(day): class Trade(Base): __tablename__ = f'trade_{day}' if day else 'trade' id = Column(INTEGER, primary_key=True) symbol = Column(VARCHAR(10)) ts = Column(VARCHAR(20)) price = Column(REAL) amount = Column(REAL) direction = Column(VARCHAR(5)) @staticmethod def from_redis(key, value): key = key.decode('utf-8') value = value.decode('utf-8')
from huobi.model.market.trade_detail_event import TradeDetailEvent from huobi.model.market.trade_detail import TradeDetail from retry import retry from market import MarketClient from utils import config, kill_all_threads, logger from websocket_handler import replace_watch_dog, WatchDog BOOT_RATE = config.getfloat('setting', 'BootRate') END_RATE = config.getfloat('setting', 'EndRate') MIN_VOL = config.getfloat('setting', 'MinVol') SELL_AFTER = config.getfloat('setting', 'SellAfter') MAX_WAIT = config.getfloat('setting', 'MaxWait') MAX_BUY_BACK_RATE = config.getfloat('setting', 'MaxBuyBackRate') WATCHER_TASK_NUM = config.getint('setting', 'WatcherTaskNum') WATCHER_SLEEP = config.getint('setting', 'WatcherSleep') def check_buy_signal(client: WatcherClient, symbol, vol, open_, price, now, boot_price, end_price, start_time, max_back): if vol < MIN_VOL or max_back > MAX_BUY_BACK_RATE: return if boot_price < price < end_price: try: client.send_buy_signal(symbol, price, open_, now, vol, start_time) except Exception as e: logger.error(e)
client.server_transport = self.transport self.client_transport = transport client.hostname = hostname # 返回给浏览器 hostip, port = transport.get_extra_info('sockname') host = unpack("!I", socket.inet_aton(hostip))[0] self.transport.write( pack('!BBBBIH', 0x05, 0x00, 0x00, 0x01, host, port)) if __name__ == '__main__': # config debug = config.getboolean('default', 'debug') server = config.get('default', 'server') server_port = config.getint('default', 'server_port') if debug: debug_level = logging.DEBUG else: debug_level = logging.ERROR # log logging.basicConfig(level=debug_level, format='%(threadName)10s %(asctime)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filemode='a+') logging.getLogger('asyncio').setLevel(debug_level) loop = asyncio.get_event_loop() if debug: loop.set_debug(enabled=True)
import redis import time from utils import config, user_config RHOST = config.get('setting', 'RHost') RPORT = config.getint('setting', 'RPort') RPASSWORD = user_config.get('setting', 'RPassword') class Redis(redis.StrictRedis): def __init__(self, host=RHOST, port=RPORT, db=0, password=RPASSWORD, socket_timeout=None, socket_connect_timeout=None, socket_keepalive=None, socket_keepalive_options=None, connection_pool=None, unix_socket_path=None, encoding='utf-8', encoding_errors='strict', charset=None, errors=None, decode_responses=False, retry_on_timeout=False, ssl=False, ssl_keyfile=None, ssl_certfile=None, ssl_cert_reqs='required', ssl_ca_certs=None, ssl_check_hostname=False, max_connections=None, single_connection_client=False, health_check_interval=0, client_name=None, username=None): super().__init__(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout, socket_connect_timeout=socket_connect_timeout, socket_keepalive=socket_keepalive, socket_keepalive_options=socket_keepalive_options, connection_pool=connection_pool, unix_socket_path=unix_socket_path, encoding=encoding, encoding_errors=encoding_errors, charset=charset, errors=errors, decode_responses=decode_responses, retry_on_timeout=retry_on_timeout, ssl=ssl, ssl_keyfile=ssl_keyfile,