def gulp(*args, **kwargs): with cd(env.deploy_path): colors.blue("Starting gulp build") run("gulp clean") run("gulp build --production") colors.green("Done.")
def restart(*args, **kwargs): with cd(env.deploy_path): colors.blue("Restarting application group") run('supervisorctl restart {0}:*'.format(env.project_name)) status() colors.green("Done.")
def update_python_tools(*args, **kwargs): with cd(env.deploy_path): colors.blue("Updating Python tools") venv_run('easy_install --upgrade pip') venv_run('pip install --no-input --exists-action=i --use-wheel --upgrade setuptools wheel') colors.green("Done.")
def dump_db(*args, **kwargs): with cd(env.deploy_path): colors.blue("Dumping database") dbdump_extra_option = '--pgpass' if 'postgresql' in get_database_engine() else '' venv_run('python src/manage.py dbdump --destination=data/backup %s' % dbdump_extra_option) colors.green("Done.")
def clean(upgrade=False, *args, **kwargs): with cd(env.deploy_path): colors.blue("Cleaning Django project") venv_run('python src/manage.py clearsessions') venv_run('python src/manage.py clear_cache') venv_run('python src/manage.py clean_pyc --optimize --path=src/') venv_run('python src/manage.py compile_pyc --path=src/') colors.green("Done.")
def npm(upgrade=False, *args, **kwargs): with cd(env.deploy_path): colors.blue("Installing node_modules") run("npm prune") run("npm install --no-color --link --no-optional --only=dev --rebuild-bundle=false") if upgrade: run("npm update --no-color --link --no-optional --only=dev --rebuild-bundle=false") colors.green("Done.")
def backup(*args, **kwargs): with cd(env.deploy_path): colors.blue("Creating backup") run("mkdir -p data/deployment_backup") now_time = strftime("%Y-%m-%d_%H.%M.%S", gmtime()) venv_run( "python src/manage.py dumpdata --format json --all --indent=3 --output data/deployment_backup/%s-dump.json" % now_time) colors.green("Done.")
def graceful_restart(*args, **kwargs): with cd(env.deploy_path): colors.blue("Restarting Gunicorn with HUP signal") run('supervisorctl pid {0}:{0}_gunicorn | xargs kill -s HUP'.format(env.project_name)) if env.celery_enabled: colors.blue("Restarting Celery with HUP signal") run('supervisorctl pid {0}:{0}_celeryd | xargs kill -s HUP'.format(env.project_name)) run('supervisorctl pid {0}:{0}_celerybeat | xargs kill -s HUP'.format(env.project_name)) colors.green("Done.")
def check(*args, **kwargs): colors.blue("Checking local project") with settings(warn_only=True): local("git status --porcelain") local("python src/manage.py check --deploy") with settings(warn_only=True): local("python src/manage.py validate_templates") local("python src/manage.py test --noinput") colors.green("Done.")
def get_dumps(delete=False, *args, **kwargs): delete = fab_arg_to_bool(delete) with cd(env.deploy_path): colors.blue("Rsyncing local backups with remote") rsync_project(local_dir='data/', remote_dir='data/backup', exclude=['.git*', 'cache*', 'filer_*'], delete=delete, upload=False) colors.green("Done.")
def rebuild_staticfiles(*args, **kwargs): if not confirm('Are you sure you want to rebuild all staticfiles?', default=False): abort('Deployment cancelled') with cd(env.deploy_path): colors.blue("Rebuilding staticfiles") run("rm -r data/static") venv_run('python src/manage.py collectstatic --noinput') run('bower install --config.interactive=false') gulp() venv_run('python src/manage.py compress') colors.green("Done.")
def deploy(upgrade=False, *args, **kwargs): start = time.time() _print_simple_table('Deployment started') upgrade = fab_arg_to_bool(upgrade) check() with cd(env.deploy_path): # Create backup # dump_db() # Source code colors.blue("Pulling from git") run('git reset --hard') run('git checkout {0}'.format(env.source_branch)) run('git pull --no-edit origin {0}'.format(env.source_branch)) # Dependencies npm(upgrade) # Dependencies colors.blue("Installing bower dependencies") with settings(warn_only=True): # Bower may not be installed run('bower prune --config.interactive=false') # Uninstalls local extraneous packages. run('bower %s --config.interactive=false' % ('update' if upgrade else 'install')) gulp() colors.blue("Installing pip dependencies") venv_run('pip install --no-input --exists-action=i -r requirements/production.txt --use-wheel %s' % ('--upgrade' if upgrade else '')) # Django tasks colors.blue("Running Django commands") venv_run('python src/manage.py collectstatic --noinput') venv_run('python src/manage.py migrate') venv_run('python src/manage.py compress') clean() venv_run('python src/manage.py compilemessages') graceful_restart() if env.graceful_restart else restart() status() check_urls() total_time_msg = "Deployed :)\nTotal time: {0} seconds.".format(time.time() - start) _print_simple_table(total_time_msg)
def status(*args, **kwargs): with cd(env.deploy_path): colors.blue("Retrieving status") run('supervisorctl status | grep "{0}"'.format(env.project_name)) watched_services = [ 'nginx', 'supervisor', ] db_engine = get_database_engine() if 'postgresql' in db_engine: watched_services.append('postgresql') elif 'mysql' in db_engine: watched_services.append('mysql') for service in watched_services: run('service {} status'.format(service)) colors.green("Done.")
def start(baseUrl,seedUrl): # clean reffer in reffer.txt f = open("reffer.txt","w") f.close() #seed = Request(base='http://192.168.42.131/dvwa/index.php',url='http://192.168.42.131/dvwa/index.php',method='get') seed = request.Request(base=baseUrl,url=seedUrl,timeout=config.conf['connTimeout'],query={},method='get') #seed = request.Request(base='http://192.168.42.132/dvwa/',url='http://192.168.42.132/dvwa/',query={},method='get') colors.blue( '种子URL: %s\n'%seed._url) logfileName = create_logfile(seed._url) cookie = getCookie(seed._url) # begin crawler tup = urlparse.urlparse(seed._url) netloc = tup.netloc # seed url count = 0 q = Queue.Queue() bf = bloomFilter.BloomFilter(0.001,100000) # readreffer from reffer.txt ''' reffer = readReffer() reqSet = [] reqSet.append(seed) reqSet.extend(reffer) for i in reqSet: q.put(i) bf.insert(i._url) ''' q.put(seed) bf.insert(seed._url) nums = config.conf['MaxThread'] pool = ThreadPool(nums) begin = time.time() while(not q.empty()): req = q.get() req._cookies = cookie reqs = crawler.crawl(req,tree) if req._query != {} and is_tree_full(req._url,tree): #if req._query != {}: count += 1 print 'URL: ',req._BFUrl,' ', req._source pool.add_task(startCheck,req,logfileName) for x in reqs: if not bf.exist(x._BFUrl): bf.insert(x._BFUrl) q.put(x) pool.destroy() end = time.time() f = open(logfileName,'r') colors.blue('\n扫描结果:\n\n') x = f.read() colors.green(x) colors.blue('\n扫描结果已保存在 "%s"\n\n'%(os.getcwd()+'/'+logfileName)+' 中') cost = end - begin print "耗时:%f秒"%cost print "进行测试的URL数量:",count f.close() f = open(logfileName,'a') f.write(advice()) f.close() os.system('ps -ef | grep -v grep | grep proxy.py | awk \'{print $2}\'|xargs kill -9') '''
elif len(v)>4 and v[0:4].lower()=='http': config.conf['url'] = v elif k in ('--thread',): config.conf['MaxThread'] = int(v) elif k in ('-e','--eqlimit'): config.conf['EqLimit'] = float(v) elif k in ('-t','--timeout'): config.conf['connTimeout'] = float(v) elif k in ('-m','--maxnode'): #MaxNode = v config.conf['MaxNode'] = int(v) else: print '无效选项!' sys.exit(3) if config.conf['url'] =='': colors.red('请输入URL!') Usage() sys.exit() else: colors.blue('开始扫描\n') start(config.conf['url'],config.conf['url']) if len(sys.argv) == 1: colors.red('请输入URL!') Usage() sys.exit() else: main(sys.argv)