def do_crawler(self,para): print "do_crawler" #create crawler engin crawler_engine = Engine() #start engine crawler_engine.start( ) #stop engin crawler_engine.stop()
def _run_and_plot(self, plot_index, num_plots, name, match_maker, environment): engine = Engine(match_maker, environment) for i in range(self.num_rounds): engine.one_round() target_players = engine.players_with_mmr_between(self.min_mmr, self.max_mmr) stats = engine.statistics(target_players) Runner._print_stats(name, stats) Runner._plot(plot_index, num_plots, stats)
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('traintsv') parser.add_argument('generalintentscsv') parser.add_argument('testtsv') args = parser.parse_args() data = read_tsv(args.testtsv) # initialize engine engine = Engine(args.traintsv, args.generalintentscsv) correct_entities = 0. correct_intents = 0. correct_response = 0. from fuzzywuzzy import fuzz # Begin evaluation for i, line in enumerate(data): print "Query ", i query = line[0] response = line[1] intent = line[2] entities = underscore_entities(line[3].split(',')) result = engine.process_message(i, query) pprint(line) pprint(result) if len(result['entities']) >= 1 and set(entities).issubset( result['entities']): correct_entities += 1 else: print "Entities mismatch" print set(entities) print result['entities'] if intent == result['intent']: correct_intents += 1 else: print "Intent mismatch" if result['response'] is not None and \ fuzz.partial_ratio(response, result['response']) > 50: correct_response += 1 else: print "Response mismatch" print "Entity score {} Intent score {} Response score {}".format( correct_entities / len(data), correct_intents / len(data), correct_response / len(data)) sys.stdout.flush()
def run(self, match_maker: MatchMaker, mmr_engine: MmrEngine, environment: Environment, skip_rounds: int = 0): self.engine = Engine(match_maker, mmr_engine, environment) self.engine._on_game_finished_listeners.append(self) self.engine._on_lobby_found_listeners.append(self) self.environment = environment self._main_loop(skip_rounds)
def __init__(self, ctx, **kwargs): super().__init__(ctx, **kwargs); self.id = kwargs.get("id") # networkx node id self.label = kwargs.get("label", "NoLabelSet"); # name in topology self.x = kwargs.get("x"); # coordinates in topology self.y = kwargs.get("y"); # coordinates in topology # create a port object for each port of the switch; these are used # to store and access port related statistics cnt = 0 self.ports = {} for n in ctx.topo.graph.neighbors(self.id): port = Port(cnt) cnt += 1 port.target = n port.link_in = ctx.topo.graph.edges[n, self.id]['_link'] port.link_out = ctx.topo.graph.edges[self.id, n]['_link'] self.ports[(n, self.id)] = port # create a flow table object for this switch self.flowtable = FlowTable(self) # logic of the switch is implemented inside the engine; This is # similar to connecting a switch to a controller self.engine = kwargs.get("engine", Engine(self.ctx, **kwargs)) # routing engine self.cnt_backdelegations = 0 self.cnt_adddelegations = 0
class Crawler(): def __init__(self): os.chdir('./global_vars') from core.engine import Engine os.chdir('..') self.engine = Engine() def _start_engine(self): self.engine.start() def _stop_engine(self): self.engine.stop() def crawl(self): # entrance method self._start_engine() def stop(self): self._stop_engine()
def init(self, spider_type, task_name): self.__spider_type = spider_type self.__task_name = task_name self.__container = Container() self.set('app', self) self.set('browser', Browser()) self.set('facade', Facade()) Facade().init(self) self.set('component', ComponentManager()) self.set('for', ForManager()) self.set('global', GlobalManager()) self.set('hook', HookManager()) self.set('config', Config()) self.set('engine', Engine()) Engine().init(spider_type) return self
def main(): try: #create crawler engin crawler_engine = Engine() #start engine crawler_engine.start( ) #hold the main thread here, wait for any input to finish raw_input("") #stop engin crawler_engine.stop() except (Exception) as e: #Log().debug(e) sys.exit(0)
def __init__(self, size=24, selection_size=6, game_number=5, folder="generations", \ ideal_generation=1000, idle_limit=100, league:LEAGUE=LEAGUE.WOOD3, restart_generation=20, visualize=False, batch_size=os.cpu_count(), save_every=10, print_game = False): assert size > selection_size assert not ( size % batch_size ), f'Population Size ({size}) must be divisible by Batch Size ({batch_size})' assert not ( (size // batch_size) % 2), f'Batch Data must be an even number ({(size // batch_size)})' self.batch_size = batch_size self.generation = 0 self.simulated = 0 self.restart_limit = restart_generation self.size = size self.selection_size = selection_size self.folder = folder self.ideal_generation = ideal_generation self.visualize = visualize self.save_every = save_every self.print_game = print_game if not os.path.exists(f'./ai/{folder}'): os.mkdir(f'./ai/{folder}') if visualize: from core.gui import GUIEngine self.batch_size = 1 self.num_game = 1 self.engines = [ GUIEngine(league=league, silence=False, idle_limit=idle_limit, sleep=0.1) ] else: print( f'Initializing Population with size({size}) selection({selection_size}) batch({batch_size}) league({league.name})' ) self.engines = [ Engine(league=league, silence=True, idle_limit=idle_limit) for _ in range(batch_size) ] [(engine.add_player(AIBot, randomize=True),\ engine.add_player(AIBot, randomize=True)) for engine in self.engines] self.num_game = game_number self.generation_data = [None] * self.size self.results = [None] * self.size self.current_index = 0
def shell_main(): print(""" 输入 url, 响应数, 最大搜索页数, 空格隔开, 回车结束一次输入 比如\n http://h.nimingban.com/f/%E7%BB%BC%E5%90%88%E7%89%881 20 10\n 连续两次回车开始程序 """) tasks = [] while True: ip = input() if not ip: break url, response_gt, max_page = ip.split() tasks.append({ 'url': url, 'response_gt': int(response_gt), 'max_page': int(max_page), }) engine = Engine() engine.set_init_tasks(tasks) engine.start() print('-----------Engine Start----------') while engine.is_running: rs = engine.get_one_result() if not rs: time.sleep(0.5) continue print(rs.link, 'image', rs.image_url or 'None') print(rs.text) print('-'*40) print('------------Engine Stop---------------')
def do_crawler(self, para): print "do_crawler" #create crawler engin crawler_engine = Engine() #start engine crawler_engine.start() #stop engin crawler_engine.stop()
def main() -> None: engine = Engine() engine.load_session_from_json('../config/project.json') interface = ConsoleInterface(engine) # This starts the interface, which uses this thread until the exit command has been given # The sequencer runs in it's own thread, that has already been started, so this is actually desired interface.start_interface() # after the ui has stopped, it's time to shut down the engine (which stops the sequencer thread) engine.shut_down()
def main(): usage = "usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option("-u", "--url", dest="url", help="target URL") (options, args) = parser.parse_args() #options.url=targetUrl if options.url is None: parser.print_help() exit() t = Target(options.url) s = Engine(t) s.addOption("crawl", True) s.addOption("forms", True) s.addOption("threads", 1) if s.start(): exit()
class Game: """ Description: Provides a game entity that represents the entirety of the game. Game is composed of engine to render graphics and to maintain frame rate and a state which handles game logic. """ engine = Engine() state = State() def __init__(self): pass def run(self) -> None: """ Runs game. Calls run_event from state to signal that next event should be executed. keep_running returns false when "X" in top right corner of game window is clicked """ while self.engine.should_run(): self.engine.clear_screen() self.state.run_event(self.engine.get_dt()) self.engine.update_screen(self.state.get_objects_to_render(), self.state.get_sprites())
batch_indexs = [ idx - min_idx for idx in state['sample']['batch_anno_idxs'] ] sorted_segments = [state['output'][i] for i in batch_indexs] state['sorted_segments_list'].extend(sorted_segments) def on_test_end(state): if config.VERBOSE: state['progress_bar'].close() print() annotations = test_dataset.annotations state['Rank@N,mIoU@M'], state['miou'] = eval_predictions( state['sorted_segments_list'], annotations, verbose=True) loss_message = '\ntest loss {:.4f}'.format(state['loss_meter'].avg) print(loss_message) state['loss_meter'].reset() test_table = display_results(state['Rank@N,mIoU@M'], state['miou'], 'performance on testing set') table_message = '\n' + test_table print(table_message) # save_scores(state['sorted_segments_list'], annotations, config.DATASET.NAME, args.split) engine = Engine() engine.hooks['on_test_start'] = on_test_start engine.hooks['on_test_forward'] = on_test_forward engine.hooks['on_test_end'] = on_test_end engine.test(network, dataloader, args.split)
# Entry point for the application import os import json from pathlib import Path from core.engine import Engine if __name__ == '__main__': cfg_path = Path(f'{os.getcwd()}/config/config.json') with open(cfg_path) as config_file: config = json.load(config_file) Engine().init(config) Engine().run()
def main(): engine = Engine() engine.run()
def perform(self, engine: Engine, command: str) -> str: file_path = command[3:] if isfile(file_path): engine.load_session_from_json(file_path) return f'Loaded json session from {file_path}' return f'Couldn\'t load json from {file_path}, because it the file does not exits'
def main(): manager, port, downloaders = parseCommandLineArgs() engine = Engine(downloaders, manager, port) engine.start() raw_input("press any key to stop....\n") engine.stop()
def __init__(self, outfile=None): search_name = '[baidu]' Engine.__init__(self, search_name, outfile)
def main(): banner() usage = "usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option("-u", "--url", dest="url", help="target URL") parser.add_option("--post", dest="post", default=False, action="store_true", help="try a post request to target url") parser.add_option("--data", dest="post_data", help="posta data to use") parser.add_option("--threads", dest="threads", default=1, help="number of threads") parser.add_option("--http-proxy", dest="http_proxy", help="scan behind given proxy (format: 127.0.0.1:80)") parser.add_option("--tor", dest="tor", default=False, action="store_true", help="scan behind default Tor") parser.add_option("--crawl", dest="crawl", default=False, action="store_true", help="crawl target url for other links to test") parser.add_option("--forms", dest="forms", default=False, action="store_true", help="crawl target url looking for forms to test") parser.add_option("--user-agent", dest="user_agent", help="provide an user agent") parser.add_option("--random-agent", dest="random_agent", default=False, action="store_true", help="perform scan with random user agents") parser.add_option("--cookie", dest="cookie", help="use a cookie to perform scans") parser.add_option("--dom", dest="dom", default=False, action="store_true", help="basic heuristic to detect dom xss") (options, args) = parser.parse_args() if options.url is None: parser.print_help() exit() # Build a first target print "[+] TARGET: %s" % options.url if options.post is True: print " |- METHOD: POST" if options.post_data is not None: print " |- POST data: %s" % options.post_data t = Target(options.url, method='POST', data=options.post_data) else: error('No POST data specified: use --data', ' |- ') exit() else: print " |- METHOD: GET" t = Target(options.url) # Build a scanner s = Engine(t) # Lets parse options for some proxy setting if options.http_proxy is not None and options.tor is True: error('No --tor and --http-proxy together!', ' |- ') exit() elif options.tor is False and options.http_proxy is not None: s.addOption("http-proxy", options.http_proxy) print " |- PROXY: %s" % options.http_proxy elif options.tor is True: s.addOption("http-proxy", "127.0.0.1:8118") print " |- PROXY: 127.0.0.1:8118" # User Agent option provided? if options.user_agent is not None and options.random_agent is True: error('No --user-agent and --random-agent together!', ' |- ') elif options.random_agent is False and options.user_agent is not None: s.addOption("ua", options.user_agent) print " |- USER-AGENT: %s" % options.user_agent elif options.random_agent is True: s.addOption("ua", "RANDOM") print " |- USER-AGENT: RANDOM" # Cookies? if options.cookie is not None: s.addOption("cookie", options.cookie) print " |- COOKIE: %s" % options.cookie # Do you want to crawl? if options.crawl is True: s.addOption("crawl", True) # Do you want to crawl forms? if options.forms is True: s.addOption("forms", True) # Dom scan? if options.dom is True: s.addOption("dom", True) # How many threads? s.addOption("threads", int(options.threads)) # Start the scanning if s.start(): exit()
def __init__(self): self.database = Database() self.engine = Engine() self.service = self.engine.service
def __init__(self): self.db = Database() self.db.create_schema() self.query = self.db.session.query(Email) self.engine = Engine() self.service = self.engine.service
# add manually trade mode args.backtest = True # args.live = True # args.paper = True # usage # print(arg_parser.format_help()) # print current options print(args) # check whether trade mode is provided, otherwise exit if not args.backtest and not args.live and not args.paper: print( "Missing trade mode argument (backtest, paper or live). See --help for more details." ) exit(0) else: # initialize and run Engine engine = Engine(trade_mode_input='backtest', plot_input=True, strategy='bumblebee') engine.run() # problems with exit # data downloaded: pandas dataframe data = engine.history print(data.columns) print(data.shape) print(data.describe()) # dates = data.date
def __init__(self): os.chdir('./global_vars') from core.engine import Engine os.chdir('..') self.engine = Engine()
# -*- coding: utf-8 -*- import os import sys import json from core import watson as w, solr import psycopg2 import csv import urlparse from core.engine import Engine from autocorrect import spell watson = w.ConversationAPI(w.graduate_affairs_2_config()) demo_watson = w.ConversationAPI(w.rohan_admissions_config()) engine = Engine('training/question-answers-2016-11-27.tsv', 'training/general-intents.csv') import requests import flask from flask import Flask, request from flask import render_template from flask_bootstrap import Bootstrap def create_app(): app = Flask(__name__) Bootstrap(app) return app
from flask import Flask from core.engine import Engine import os ## Create app app = Flask(__name__) ## Create engine engine = Engine() ## Look up which is the current path APP_ROOT = os.path.dirname(os.path.abspath(__file__)) ## Specify the upload directory target app.config['UPLOAD_FOLDER'] = os.path.join(APP_ROOT, 'dynamic') ## Get the views from webapp import views
if config.VERBOSE: state['progress_bar'].update(1) state['loss_meter'].update(state['loss'].item(), 1) min_idx = min(state['sample']['batch_anno_idxs']) batch_indexs = [ idx - min_idx for idx in state['sample']['batch_anno_idxs'] ] sorted_segments = [state['output'][i] for i in batch_indexs] state['sorted_segments_list'].extend(sorted_segments) def on_test_end(state): annotations = state['iterator'].dataset.annotations state['Rank@N,mIoU@M'], state['miou'] = eval.eval_predictions( state['sorted_segments_list'], annotations, verbose=False) if config.VERBOSE: state['progress_bar'].close() engine = Engine() engine.hooks['on_start'] = on_start engine.hooks['on_forward'] = on_forward engine.hooks['on_update'] = on_update engine.hooks['on_end'] = on_end engine.hooks['on_test_start'] = on_test_start engine.hooks['on_test_forward'] = on_test_forward engine.hooks['on_test_end'] = on_test_end engine.train(network, iterator('train'), maxepoch=config.TRAIN.MAX_EPOCH, optimizer=optimizer, scheduler=scheduler)
def perform(self, engine: Engine, command: str) -> str: file_path = command[3:] engine.export_session_to_json(file_path) return f'Saved session as json to {file_path}'
class Demo(OnGameFinishedListener, OnLobbyFoundListener): def __init__(self, width=800, height=600, bar_height=10, wait_ms=100, bg_color=(50, 50, 50), text_color=(255, 255, 255)): pygame.init() self.width = width self.height = height self.bar_height = bar_height self.screen = pygame.display.set_mode((self.width, self.height)) self.engine = None self.environment = None self.wait_ms = wait_ms self.bg_color = bg_color self.font = pygame.font.Font(None, 22) self.text_color = text_color self.num_playing = 0 self._currently_skipping_rounds = False def on_lobby_found(self, team_1: List[Queuer], team_2: List[Queuer]) -> None: self.num_playing += TEAM_SIZE * 2 avg_mmr_diff = int(avg_mmr([q.player for q in team_2]) - avg_mmr([q.player for q in team_1])) max_mmr_d = max_mmr_diff([q.player for q in team_1], [q.player for q in team_2]) names = [q.player.name for q in team_1 + team_2] skills = [self.environment.get_player_skill(name) for name in names] max_skill_d = max(skills) - min(skills) if not self._currently_skipping_rounds: print("New game: max diff: " + str(max_mmr_d) + ", avg diff: " + str(avg_mmr_diff) + ", max skill diff: " + str(max_skill_d) + ", avg wait: " + str(avg([q.waited for q in team_1 + team_2]))) def on_game_finished(self, game: Game) -> None: self.num_playing -= TEAM_SIZE * 2 def run(self, match_maker: MatchMaker, mmr_engine: MmrEngine, environment: Environment, skip_rounds: int = 0): self.engine = Engine(match_maker, mmr_engine, environment) self.engine._on_game_finished_listeners.append(self) self.engine._on_lobby_found_listeners.append(self) self.environment = environment self._main_loop(skip_rounds) def _main_loop(self, skip_rounds: int): self._currently_skipping_rounds = True for i in range(skip_rounds): self.engine.one_round() self._currently_skipping_rounds = False self._render() pygame.time.wait(1000) while True: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() elif event.type == pygame.KEYDOWN: if event.key == pygame.K_DOWN: if self.wait_ms == 0: self.wait_ms = 2 else: self.wait_ms = int(self.wait_ms * 1.5) elif event.key == pygame.K_UP: self.wait_ms = int(self.wait_ms / 1.5) self.engine.one_round() pygame.time.wait(self.wait_ms) self._render() def _render(self): pygame.draw.rect(self.screen, self.bg_color, (0, 0, self.width, self.height)) for i, queuer in enumerate(self.engine.queue()): width_divider = 8 w = queuer.player.mmr / width_divider line_x = self.environment.get_player_skill(queuer.player.name) / width_divider line_color = (255, 255, 255) x = 10 space = 1 top_space = 50 y = top_space + i * (self.bar_height + space) rect = (x, y, w, self.bar_height) if queuer.waited < 255: color = (queuer.waited, 0, 255-queuer.waited) else: color = (255, 0, 0) pygame.draw.rect(self.screen, color, rect) pygame.draw.rect(self.screen, line_color, (x + line_x, y, 3, self.bar_height)) longest_wait = 0 if len(self.engine.queue()) > 0: longest_wait = self.engine.queue()[0].waited self._render_text(str(len(self.engine.queue())) + " queueing", 10) self._render_text("longest: " + str(longest_wait) + "s", 150) self._render_text(str(len(self.engine._data_store.replays)) + " games played", 310) self._render_text(str(self.wait_ms) + "ms / round", 450) self._render_text(str(self.num_playing) + " playing", 610) pygame.display.flip() def _render_text(self, msg, x): text = self.font.render(msg, 1, self.text_color) textpos = text.get_rect() textpos.x = x textpos.y = 10 self.screen.blit(text, textpos)
def main(): print(args) engine = Engine() engine.run()
# project_dir/main.py # from core.engine import Engine # 导入引擎 # # from .spiders.baidu import BaiduSpider # from .spiders.douban import DoubanSpider # from .pipelines import BaiduPipeline, DoubanPipeline # # 此处新增 # from .spider_middlewares import TestDownloaderMiddleware1, TestDownloaderMiddleware2 # from .downloader_middlewares import TestSpiderMiddleware1, TestSpiderMiddleware2 # if __name__ == '__main__': # baidu_spider = BaiduSpider() # 实例化爬虫对象 # douban_spider = DoubanSpider() # 实例化爬虫对象 # spiders = {BaiduSpider.name: baidu_spider, DoubanSpider.name: douban_spider} # pipelines = [BaiduPipeline(), DoubanPipeline()] # 管道们 # spider_mids = [TestSpiderMiddleware1(), TestSpiderMiddleware2()] # 多个爬虫中间件 # downloader_mids = [TestDownloaderMiddleware1(), TestDownloaderMiddleware2()] # 多个下载中间件 # # engine = Engine(spiders) # 传入爬虫对象 # engine = Engine(spiders, pipelines=pipelines, # spider_mids=spider_mids, downloader_mids=downloader_mids) # 传入爬虫对象 # engine.start() # 启动引擎 # project_dir/main.py from core.engine import Engine # 导入引擎 if __name__ == '__main__': engine = Engine() # 创建引擎对象 engine.start() # 启动引擎
def run(): from core.engine import Engine game_config = Config() GameEngine = Engine(game_config) GameEngine.run()
args = arg_parser.parse_known_args()[0] # add manually trade mode args.backtest = True # args.live = True # args.paper = True # usage # print(arg_parser.format_help()) # print current options print(args) # check whether trade mode is provided, otherwise exit if not args.backtest and not args.live and not args.paper: print( "Missing trade mode argument (backtest, paper or live). See --help for more details." ) exit(0) else: # initialize and run Engine engine = Engine(trade_mode_input='backtest', plot_input=True) engine.run() # problems with exit # data downloaded: pandas dataframe data = engine.history print(data.columns) print(data.shape) print(data.describe()) # dates = data.date
Create a Scene that store objects, the scene must include the optimised data structure. Create a camera. Create a light source. Add objects to the Scene. Generate data structure for the Scene with optimize(). Create an engine object with that contains methods for ray tracings. Give the scene to the engine that outputs a picture. Show the picture. """ if __name__ == "__main__": engine = Engine(500, 500) camera = Camera([0, 0, 0], [1, 0, 0], [0, -1, 0], fov=1) #camera = ThinLensCamera([0,0,0], [1,0,0], [0,-1,0], radius=0.0, focal_distance=6, fov=1) #light = Light([10,0,10]) #light = LightProb("hdrmap/grace_probe.pfm") light = LightProb("hdrmap/stpeters_probe.pfm") scene = Scene(light) #scene.addobject(Sphere([0,0,0], 1000, anti=True)) #scene.addobject(Triangle(([-1000, -1000, 200], [1000,-1000, 200], [1000,1000, 200]), color=[255,0,0])) #scene.addobject(Triangle(([-1000, -1000, 200], [1000,1000, 200], [-1000, 1000, 200])))
from core.engine import Engine Engine("Login") Engine("Home")
def main(): banner() usage = "usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option("-u", "--url", dest="url", help="target URL") parser.add_option("--post", dest="post", default=False, action="store_true", help="try a post request to target url") parser.add_option("--data", dest="post_data", help="posta data to use") parser.add_option("--threads", dest="threads", default=1, help="number of threads") parser.add_option("--http-proxy", dest="http_proxy", help="scan behind given proxy (format: 127.0.0.1:80)") parser.add_option("--tor", dest="tor", default=False, action="store_true", help="scan behind default Tor") parser.add_option("--crawl", dest="crawl", default=False, action="store_true", help="crawl target url for other links to test") parser.add_option("--forms", dest="forms", default=False, action="store_true", help="crawl target url looking for forms to test") parser.add_option("--user-agent", dest="user_agent", help="provide an user agent") parser.add_option("--random-agent", dest="random_agent", default=False, action="store_true", help="perform scan with random user agents") parser.add_option("--cookie", dest="cookie", help="use a cookie to perform scans") parser.add_option("--dom", dest="dom", default=False, action="store_true", help="basic heuristic to detect dom xss") (options, args) = parser.parse_args() if options.url is None: parser.print_help() exit() # Build a first target print "[+] TARGET: %s" % options.url if options.post is True: print " |- METHOD: POST" if options.post_data is not None: print " |- POST data: %s" % options.post_data t = Target(options.url, method = 'POST', data = options.post_data) else: error('No POST data specified: use --data', ' |- ') exit() else: print " |- METHOD: GET" t = Target(options.url) # Build a scanner s = Engine(t) # Lets parse options for some proxy setting if options.http_proxy is not None and options.tor is True: error('No --tor and --http-proxy together!', ' |- ') exit() elif options.tor is False and options.http_proxy is not None: s.addOption("http-proxy", options.http_proxy) print " |- PROXY: %s" % options.http_proxy elif options.tor is True: s.addOption("http-proxy", "127.0.0.1:8118") print " |- PROXY: 127.0.0.1:8118" # User Agent option provided? if options.user_agent is not None and options.random_agent is True: error('No --user-agent and --random-agent together!', ' |- ') elif options.random_agent is False and options.user_agent is not None: s.addOption("ua", options.user_agent) print " |- USER-AGENT: %s" % options.user_agent elif options.random_agent is True: s.addOption("ua", "RANDOM") print " |- USER-AGENT: RANDOM" # Cookies? if options.cookie is not None: s.addOption("cookie", options.cookie) print " |- COOKIE: %s" % options.cookie # Do you want to crawl? if options.crawl is True: s.addOption("crawl", True) # Do you want to crawl forms? if options.forms is True: s.addOption("forms", True) # Dom scan? if options.dom is True: s.addOption("dom", True) # How many threads? s.addOption("threads", int(options.threads)) # Start the scanning if s.start(): exit()
def main(args): engine = Engine(args, 'config.ini') engine.run()