def main(): global LOGGER, MODLOGGER head = "main" ECHO.print("[{}] [INFO] [{}] initializing environment".format( time.strftime("%y-%m-%d %H:%M:%S"), head)) ECHO.buttom_print("initializing environment...") path_fixer(CONFIG) with open(CONFIG, "r") as f: conf = json.load(f) f.close() log_file = conf["log_filename"] log_level = conf["log_level"] path_fixer(log_file) LOGGER = logger(filename=log_file, echo=False, level=log_level) LOGGER.INFO("[{}] initializing".format(head)) MODLOGGER = ModLogger(logger=LOGGER, echo=ECHO.print) MODLOGGER.DEBUG("[{}] building database connection...".format(head)) ECHO.buttom_print("initializing database connection...") db_api = MarketDB() MODLOGGER.INFO("[{}] successfully built connection with \"{}\"".format( head, db_api.db_file)) MODLOGGER.DEBUG("[{}] starting server...".format(head)) port = conf["server_port"] key = conf["dyn_key"] server = I2TCPserver(key=key, port=port, logger=MODLOGGER) server.start()
def rnn_init(): global ETH_RNN paths = [MODEL_FILE] for i in paths: path_fixer(i) print("initializing...") if __name__ != "__main__": CAPTCHA_CNN = customNN(NAME) CAPTCHA_CNN.load_model(MODEL_FILE) print("captcha CNN fully connected")
def rnn_init(): global ETH_RNN, DATASET paths = [MODEL_FILE] for i in paths: path_fixer(i) print("initializing database...") db_api = MarketDB() DATASET = DatasetBase(db_api, sample_time_ms=SAMPLE_TIME_MS, set_size=SAMPLE_SIZE, label_size=PREDICT_SIZE, use_index_json=True, index_json=DATASET_INDEX) DATASET.init_dataset()
def __init__(self, config=MARKET_CONFIG, echo=db_echo, database=None): path_fixer(config) with open(config) as conf: config = json.load(conf) try: if database is None: self.db_file = config["database"] else: self.db_file = database path_fixer(self.db_file) self.monitoring = config["monitoring_markets"] for i in range(len(self.monitoring)): self.monitoring[i] = self.monitoring[i].upper() except Exception as err: raise KeyError("failed to load market config, {}".format(err)) self.db = sql.SqliteDB(self.db_file) self.db.connect() self.db.switch_autocommit() #self.contracts = [] self.all_tables = {} self.all_tablenames = [] self.echo = echo self.offsets = {} self.buffer = {} self.live = False self.get_db_info() for ele in self.monitoring: if not ele.upper() in self.all_tablenames: self.create_market_db(ele) #if not ele in self.contracts: # self.create_contract_db(ele) self.get_db_info() for ele in self.all_tablenames: table = self.db.select_table(ele) self.all_tables.update({ele: table}) self.offsets.update({ele: self.get_offset(table)}) self.buffer.update({ele.upper(): []})
def main(): # 初始化 global LOG, DB, CONF, BOT head = "[main]" LOG = logger() if not os.path.exists(CONFIG_PATH): LOG.WARNING( "{} no config file was created, generating default config".format( head)) putils.path_fixer(CONFIG_PATH) write_conf(CONFIG_PATH, DEFAULT_CONFIG) LOG.CRITICAL("{} edit the config first".format(head)) return 1 else: CONF = json.loads(open(CONFIG_PATH, "r").read()) # path safety check paths = [ CONFIG_PATH, CONF["bot_configs"]["msg_database"], CONF["bot_configs"]["pic_database"], CONF["bot_configs"]["file_database"], CONF["bot_configs"]["log"], CONF["lewdityapi"]["nsfw_painting_model"], CONF["lewdityapi"]["nsfw_photo_model"], CONF["lewdityapi"]["classification_model"] ] for ele in paths: ret = putils.path_fixer(ele) if ret: LOG.WARNING("{} path to \"{}\" does not exist, fixed".format( head, ele)) LOG = logger(CONF["bot_configs"]["log"]) LOG.INFO("{} initializing...".format(head)) # lewdity API global TypeClassificationCNN global PaintingClassificationCNN global PhotoClassificationCNN failure = False TypeClassificationCNN = customNN("Pic_Type_Classification") try: TypeClassificationCNN.load_model( CONF["lewdityapi"]["classification_model"]) LOG.DEBUG("{} loaded model \"{}\"".format( head, CONF["lewdityapi"]["classification_model"])) except Exception as err: failure = True LOG.ERROR("{} failed to load model \"{}\", {}".format( head, CONF["lewdityapi"]["classification_model"], err)) PaintingClassificationCNN = customNN("Painting_Classification") try: PaintingClassificationCNN.load_model( CONF["lewdityapi"]["nsfw_painting_model"]) LOG.DEBUG("{} loaded model \"{}\"".format( head, CONF["lewdityapi"]["nsfw_painting_model"])) except Exception as err: failure = True LOG.ERROR("{} failed to load model \"{}\", {}".format( head, CONF["lewdityapi"]["nsfw_painting_model"], err)) PhotoClassificationCNN = customNN("Photo_Classification") try: PhotoClassificationCNN.load_model( CONF["lewdityapi"]["nsfw_photo_model"]) LOG.DEBUG("{} loaded model \"{}\"".format( head, CONF["lewdityapi"]["nsfw_photo_model"])) except Exception as err: failure = True LOG.ERROR("{} failed to load model \"{}\", {}".format( head, CONF["lewdityapi"]["nsfw_photo_model"], err)) if failure: LOG.WARNING("{} failed to initialize lewdity API".format(head)) else: LOG.INFO("{} lewdity API initialized".format(head)) # qqbot api global BOT failure = False try: configs = { "URL": CONF["qqapi"]["httpapi_address"], "ID": CONF["qqapi"]["bot_id"], "QQID": CONF["qqapi"]["qq_id"], "Nick": CONF["qqapi"]["nickname"], "HeartbeatHost": CONF["qqapi"]["watchdog"]["host"], "HeartbeatPort": CONF["qqapi"]["watchdog"]["port"], "TimeKey": CONF["qqapi"]["watchdog"]["dynkey"], "QQframeRestartThreshold": CONF["qqapi"]["watchdog"]["QQframe_timeout"] } except Exception as err: LOG.WARNING( "{} failed to load qqbot configs, please check your configs," " {}".format(head, err)) failure = True try: BOT = CodyAPI(config=configs, heartbeatClass=heartbeatControl(log=LOG)) LOG.DEBUG("{} qqbot HTTPAPI url: {}".format(head, BOT.url)) LOG.DEBUG("{} qqbot QQID: {}".format(head, BOT.qqid)) LOG.DEBUG("{} codyapi initialized".format(head)) except Exception as err: LOG.WARNING("{} failed to initialize CodyAPI, {}".format(head, err)) failure = True try: session_id = BOT.allocateSession() LOG.DEBUG("{} allocated qqbot session ID: {}".format(head, session_id)) except Exception as err: LOG.WARNING("{} failed to allocate session, {}".format(head, err)) try: BOT.heartbeatClass.startHeartbeat() LOG.DEBUG("{} watchdog heartbeat started".format(head)) except Exception as err: LOG.WARNING("{} failed to start watchdog, {}".format(head, err)) failure = True if failure: LOG.CRITICAL("{} failed to initialize qqbot api".format(head)) return 1 else: LOG.INFO("{} qqbot API initialized".format(head))
def __init__(self, market_config=MARKET_CONFIG, huobi_config=HUOBI_CONFIG, watchdog_threshold=10, db_api=None): path_fixer(market_config) path_fixer(huobi_config) with open(market_config) as conf: config = json.load(conf) try: self.monitoring = config["monitoring_markets"] except Exception as err: raise KeyError("failed to load market config, {}".format(err)) with open(huobi_config) as conf: config = json.load(conf) try: self.timeout = config["timeout"] self.url = config["api"]["url"] self.fallback_url = config["fallback_api"]["url"] if config["api"]["proxies"]["http"] == "" and config["api"][ "proxies"]["https"] == "": self.proxies = None else: if config["api"]["proxies"]["http"] == "": config["api"]["proxies"]["http"] = config["api"][ "proxies"]["https"] if config["api"]["proxies"]["https"] == "": config["api"]["proxies"]["https"] = config["api"][ "proxies"]["http"] self.proxies = config["api"]["proxies"] if config["fallback_api"]["proxies"]["http"] == "" and config[ "fallback_api"]["proxies"]["https"] == "": self.fallback_proxies = None else: if config["fallback_api"]["proxies"]["http"] == "": config["fallback_api"]["proxies"]["http"] = config[ "fallback_api"]["proxies"]["https"] if config["fallback_api"]["proxies"]["https"] == "": config["fallback_api"]["proxies"]["https"] = config[ "fallback_api"]["proxies"]["http"] self.fallback_proxies = config["fallback_api"]["proxies"] except Exception as err: raise KeyError("failed to load huobi api config, {}".format(err)) self.live = False self.food = {} self.watchdog_threshold = watchdog_threshold self.watchdog_int_flag = {} self.db_api = db_api self.statics = {} self.timestamp_offset = 0 try: if self.proxies is None: gen_clt = GenericClient(url=self.url, timeout=self.timeout) else: gen_clt = GenericClient(url=self.url, timeout=self.timeout, proxies=self.proxies) for i in range(20): cloud_ts = gen_clt.get_exchange_timestamp() self.timestamp_offset -= (self.get_timestamp() - cloud_ts) * ( (20 - i) / 20) except: if self.fallback_proxies is None: gen_clt = GenericClient(url=self.fallback_url, timeout=self.timeout) else: gen_clt = GenericClient(url=self.fallback_url, timeout=self.timeout, proxies=self.fallback_proxies) for i in range(20): cloud_ts = gen_clt.get_exchange_timestamp() self.timestamp_offset -= (self.get_timestamp() - cloud_ts) * ((20 - i) / 20) ECHO.print("[updater] [init] info: timestamp offset fixing: {}".format( self.timestamp_offset)) cloud_ts = gen_clt.get_exchange_timestamp() fixed_ts = self.get_timestamp() ECHO.print( "[updater] [init] debug: huobi cloud timestamp: {}, fixed timestamp: {}" .format(cloud_ts, self.get_timestamp())) for ele in self.monitoring: self.statics.update( {ele.upper(): { "price": -1, "avg_cost_1min": 0.0, "ping": 0 }}) self.fallbacked = False