def check_process_count(v=True, ret=False): process_count = os.cpu_count() * Config.get('thread.instance_multiplier') if process_count <= 0: Fatal('process count cannot be %s' % process_count, 'incorrect config magic process number of %s' % process_count, 'instance_multiplier=%s' % (process_count / os.cpu_count())).stop() elif process_count >= 32: if not v: do_warning('thread', 'magic process number is extremely large.', 'count=%s' % process_count) if type(process_count) != int: try: if round(process_count, 0) == process_count: process_count = int(process_count) else: Fatal( 'process count cannot be float.', 'incorrect config magic process number of %s' % (process_count / os.cpu_count())).stop() except TypeError as error: Fatal( 'process count cannot be float.', 'incorrect config magic process number of %s' % process_count, 'incorrect type: %s' % type(process_count), '%s' % error).stop() if not v: do_warning( 'thread', 'magic process instance multiplier number was processed as float.' ) return process_count if ret else True
def __init__(self): # Status ID self._id = Value('i', 0) self._activeStatus = Manager().list() self._doneStatus = Manager().list() self._cachesize = Config()._Engine_cacheSize # logger self._logger = Logger()
def dashboard(self): """ Heimdall, find! """ Color.println("{+} User-Agent: %s" % self._user_agent['User-Agent']) """ Format the target URL as simple. Select the output directory. """ url_simple = Config.target_simple(self._url) path_out = os.path.realpath(f"output/{url_simple}/{date_now}/") """ Create the output directory. """ os.makedirs(path_out) Color.println("{+} Output: '%s'" % path_out) """ Creates the "info.txt" file to write the attack specifications. """ output_info = open(os.path.realpath(f"{path_out}/info.txt"), 'w') output_info.writelines(f"[+] URL (Target): {self._url}\n" f"[+] Proxy: {self._proxy}\n" f"[+] User-Agent: {self._user_agent}\n" f"[+] Output: {path_out}\n\n" f"[+] Wordlist: {self._wordlist}") output_info.close() """ Starts the request loop to the target. """ Color.println("\n{+} {G}Heimdall, find the dashboard!{W}\n") for link in self._wordlist: target = self._url + link.rstrip("\n") request = get(target, proxies=self._proxy, headers=self._user_agent) if request.status_code == 200: """ Create the file "sites-found.txt" to write the possible directories found. """ output_sites_found = open( os.path.realpath(f"{path_out}/sites-found.txt"), 'a') output_sites_found.writelines("\n" + target) output_sites_found.close() Color.println("{+} {G}%s{W}" % target) else: """ Creates the file "sites-not-found.txt" to write the directories not found. """ output_sites_not_found = open( os.path.realpath(f"{path_out}/sites-not-found.txt"), 'a') output_sites_not_found.write("\n" + target) output_sites_not_found.close() Color.println("{-} %s" % target)
def __init__(self): # Config init self.__epoch = Config()._Engine_epoch self.__maxProcess = Config()._Engine_maxProcess # 保存事件列表 按优先级不同分别保存 self.__lowEnventQueue = Queue() self.__mediumEventQueue = Queue() self.__highEventQueue = Queue() # 引擎开关 self.__active = Value('b', False) # 事件处理字典{'event1': [handler1,handler2] , 'event2':[handler3, ...,handler4]} self.__handlers = {} # 保存事件处理进程池 控制最大进程数量 以及关闭引擎时处理已启动进程 self.__processPool = Manager().list() # 保存已执行事件处理状态 self.__status = Status() # 事件引擎主进程 self.__mainProcess = Process(target=self.__run) # logger self.__logger = Logger()
def handle_fdreq(path): try: return [x.name for x in os.scandir(path)] except Exception as e: if not Config.get_session("verbose"): if e is PermissionError: IndexingError(path, 'permissions') # covers weird windows hidden folder mechanics if e is FileNotFoundError: IndexingError(path, 'un-loadable') return []
def dashboard(self) -> None: """Heimdall, Dashboard!""" Color.println("{+} Follow redirects: %s" % self._no_redirects) Color.println("{+} User-Agent: %s" % self._user_agent['User-Agent']) # Format the target URL as simple and select the output directory. url_simple = Config.target_simple(self._url) self.path_out = os.path.realpath(f"output/{url_simple}/{date_now}/") # Create the output directory. os.makedirs(self.path_out) Color.println("{+} Output: '%s'" % self.path_out) # Creates the "info.txt" file to write the attack specifications. output_info = open(os.path.realpath(f"{self.path_out}/info.txt"), 'w') output_info.writelines(f"[+] URL (Target): {self._url}\n" f"[+] Proxy: {self._proxy}\n" f"[+] User-Agent: {self._user_agent}\n" f"[+] Allow-Redirects: {self._no_redirects}\n" f"[+] Output: {self.path_out}\n\n" f"[+] Wordlist: {self._wordlist}") output_info.close()
class Logger(object): # debug __debug = Config()._Debug_debug __debug_level = Config()._Debug_level # log __log_type = Config()._Log_type __log_url = Config()._Log_url __log_level = Config()._Log_level __file_formatter = logging.Formatter( '%(asctime)s pid=%(process)d %(levelname)-4s: %(message)s') __console_formatter = colorlog.ColoredFormatter( "%(log_color)s%(asctime)s pid=%(process)d %(levelname)-4s: %(reset)s%(blue)s%(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white' }, secondary_log_colors={}, style='%') # logging __level = { 'DEBUG': logging.DEBUG, 'INFO': logging.INFO, 'WARNING': logging.WARNING, 'ERROR': logging.ERROR, 'CRITICAL': logging.CRITICAL } def __init__(self): # Config init # 设置文件日志的格式 # 定义日志处理器将INFO或者以上级别的日志发送到 sys.stderr # handler = logging.FileHandler(Logger.__log_url, mode="a+") handler = logging.handlers.TimedRotatingFileHandler(Logger.__log_url, when="d", interval=1, backupCount=7) handler.setFormatter(Logger.__file_formatter) handler.setLevel(Logger.__level[Logger.__log_level]) # 设置控制台日志的格式 # 定义日志处理器将WARNING或者以上级别的日志发送到 console console = logging.StreamHandler() console.setFormatter(Logger.__console_formatter) console.setLevel(Logger.__level[Logger.__debug_level]) # 设置logger self._logger = logging.getLogger(Logger.__log_type) # 添加至logger self._logger.handlers = [] self._logger.addHandler(handler) if Logger.__debug: self._logger.addHandler(console) self._logger.setLevel(logging.DEBUG) def debug(self, msg): try: self._logger.debug(msg) except Exception as err: raise LogException(err) def info(self, msg): try: self._logger.info(msg) except Exception as err: raise LogException(err) def warn(self, msg): try: self._logger.warn(msg) except Exception as err: raise LogException(err) def error(self, msg): try: self._logger.error(msg) except Exception as err: raise LogException(err) def critical(self, msg): try: self._logger.critical(msg) except Exception as err: raise LogException(err)
# -*- coding: utf-8 -*- import os import sys import unittest sys.path.append(os.getcwd()) from src.core.coin.huobi import Huobi from src.core.config import Config from src.core.util.log import Logger # proxies _proxies = Config()._Proxies_url if Config()._Proxies_proxies else None # Huobi _Huobi_exchange = Config()._Huobi_exchange _Huobi_api_key = Config()._Huobi_api_key _Huobi_api_secret = Config()._Huobi_api_secret _Huobi_acct_id = Config()._Huobi_acct_id huobi = Huobi(_Huobi_exchange, _Huobi_api_key, _Huobi_api_secret, _Huobi_acct_id, _proxies) logger = Logger() class TestHuobi(unittest.TestCase): def test_getConfig(self): res = huobi.getConfig() logger.debug(res) self.assertEqual(res["exchange"], _Huobi_exchange)
# Check for available updates. update = Update() if args.update and update.verify(args.update): update.upgrade() if Config.get_automatic_verify_upgrades and not args.update: update.verify(args.update) # Activates the "helper()" method if no targets are passed in the arguments. if not args.url: String.helper() exit() else: # Format the target URL accordingly. args.url = Config.target(args.url) # Instance the "Request" class. user_agent = UserAgent(args) # Generates a random User-Agent. args.user_agent = user_agent.run() # Formats the selected proxy. proxy = Proxy(args) if args.proxy is not None: args.proxy = proxy.format_proxy() else: if args.random_proxy: args.proxy = proxy.random_proxy()
# -*- coding: utf-8 -*- from src.core.config import Config # CCAT signal SIGNAL_AUTO = Config()._Signal_auto SIGNAL_BASECOIN = Config()._Main_baseCoin SIGNAL_SIGNALS = Config()._Signal_signals SIGNAL_MAX_NUM = 10 # CCAT calc CALC_ZERO_NUMBER = 0.00000001
# -*- coding: utf-8 -*- import os import sys import unittest sys.path.append(os.getcwd()) from src.core.coin.binance import Binance from src.core.config import Config from src.core.util.log import Logger # proxies _proxies = Config()._Proxies_url if Config()._Proxies_proxies else None # Binance _Binance_exchange = Config()._Binance_exchange _Binance_api_key = Config()._Binance_api_key _Binance_api_secret = Config()._Binance_api_secret binance = Binance(_Binance_exchange, _Binance_api_key, _Binance_api_secret, _proxies) logger = Logger() class TestBinance(unittest.TestCase): def test_getConfig(self): res = binance.getConfig() logger.debug(res) self.assertEqual(res["exchange"], _Binance_exchange) def test_setProxy(self):
""" Get the Heimdall settings, updates and pass it on to the Strings class. """ String = Strings() """ Print the banner along with Heimdall specifications. """ if not args.no_logo: String.banner() String.banner_description() """ Check for available updates. """ conf = Config() update = Update() if args.update and update.verify(args.update): update.upgrade() if conf.get_automatic_verify_upgrades and not args.update: update.verify(args.update) """ Activates the "helper()" method if no targets are passed in the arguments. """ if not args.url: String.helper() exit() else:
# -*- coding: utf-8 -*- import os import sys import unittest sys.path.append(os.getcwd()) from src.core.coin.okex import Okex from src.core.config import Config from src.core.util.log import Logger # proxies _proxies = Config()._Proxies_url if Config()._Proxies_proxies else None # Okex _Okex_exchange = Config()._Okex_exchange _Okex_api_key = Config()._Okex_api_key _Okex_api_secret = Config()._Okex_api_secret _Okex_passphrase = Config()._Okex_passphrase okex = Okex(_Okex_exchange, _Okex_api_key, _Okex_api_secret, _Okex_passphrase, _proxies) logger = Logger() class TestOkex(unittest.TestCase): def test_getConfig(self): res = okex.getConfig() logger.debug(res) self.assertEqual(res["exchange"], _Okex_exchange)
import logging from src.core.config import Config, Fatal __author__ = "Alexander Fedotov <*****@*****.**>" __company__ = "(C) Wasabi & Co. All rights reserved." if Config.get("debug"): if Config.get("log_file") is "": Fatal("Debug mode enabled, but no log file provided") logging.basicConfig( level=logging.DEBUG, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filename=Config.get('log_file'), filemode='w') logger = logging.FileHandler(Config.get('log_file')) else: logger = logging.getLogger("main-logger") logger.disabled = True logger.setLevel(logging.DEBUG) # Now, we can log to the root logger, or any other logger. First the root... logging.info('connected to debug console v0.17.') # Now, define a couple of other loggers which might represent areas in your # application:
def __init__(self): # config param self._epoch = Config()._Router_epoch self._timeout = Config()._Router_timeout self._marketKlineInterval = Config()._Main_marketKlineInterval self._marketTickerInterval = Config()._Main_marketTickerInterval self._statisticJudgeMarketTickerInterval = Config()._Main_statisticJudgeMarketTickerInterval self._asyncAccount = Config()._Main_asyncAccount self._syncAccountTimeout = Config()._Main_syncAccountTimeout self._asyncMarketKline = Config()._Main_asyncMarketKline self._syncMarketKlineTimeout = Config()._Main_syncMarketKlineTimeout self._asyncMarketDepth = Config()._Main_asyncMarketDepth self._syncMarketDepthTimeout = Config()._Main_syncMarketDepthTimeout self._asyncMarketTicker = Config()._Main_asyncMarketTicker self._syncMarketTickerTimeout = Config()._Main_syncMarketTickerTimeout self._asyncJudge = Config()._Main_asyncJudge self._syncJudgeTimeout = Config()._Main_syncJudgeTimeout self._asyncBacktest = Config()._Main_asyncBacktest self._syncBacktestTimeout = Config()._Main_syncBacktestTimeout self._asyncOrder = Config()._Main_asyncOrder self._syncOrderTimeout = Config()._Main_syncOrderTimeout self._asyncStatistic = Config()._Main_asyncStatistic self._syncStatisticTimeout = Config()._Main_syncStatisticTimeout # class instance self._eventEngine = EventEngine() self._sender = Sender(self._eventEngine) self._handler = Handler(self._eventEngine) self._register = Register(self._eventEngine, self._handler) self._util = Util(self._eventEngine, self._sender) # logger self._logger = Logger() # router param self._start = False self._startTime = time.time() self._marketKlineUpdateTime = time.time() # self._marketKlineUpdateTime = time.time() - self._marketKlineInterval self._marketTickerUpdateTime = time.time() - self._marketTickerInterval self._statisticJudgeMarketTickerUpdateTime = time.time( ) - self._statisticJudgeMarketTickerInterval
# -*- coding: utf-8 -*- from src.core.config import Config from src.core.util.helper import MyTemplate # CCAT signals SIGNAL_BACKTEST = 'backtest' SIGNAL_ORDER = 'order' # CCAT types timeWindow TYPE_DIS_TIMEWINDOW = Config()._Main_typeDisTimeWindow * 1000 TYPE_TRA_TIMEWINDOW = Config()._Main_typeTraTimeWindow * 1000 TYPE_PAIR_TIMEWINDOW = Config()._Main_typePairTimeWindow * 1000 # CCAT types Threshold TYPE_DIS_THRESHOLD = Config()._Main_typeDisThreshold TYPE_TRA_THRESHOLD = Config()._Main_typeTraThreshold TYPE_PAIR_THRESHOLD = Config()._Main_typePairThreshold # CCAT types TYPE_DIS = 'dis' TYPE_TRA = 'tra' TYPE_PAIR = 'pair' # event status QUEUE_STATUS_EVENT = "queue" ACTIVE_STATUS_EVENT = "active" DONE_STATUS_EVENT = "done" # event priority LOW_PRIORITY_EVENT = "low" # p.start()