Esempio n. 1
0
 def __init__(self, isThread=1):
     TaskTool.__init__(self, isThread)
     # DEBUG
     self.logger = initLog('/root/log/baozhang/logs/sniffertask.log', 1,
                           False, 'sniffertask')
     self.sqlTool = Sqldatatask.getObject()  # 设置线程数
     # 获取nmap的参数
     self.sniffer = sniffertool.SniffrtTool(logger=self.logger)
     self.config = config.Config
 def __init__(self,isThread=1,deamon=True):
     TaskTool.__init__(self,isThread,deamon=deamon)
     import Sqldatatask
     self.logger = initLog('logs/portScantask.log', 2, True,'portscantask')
     self.sqlTool=Sqldatatask.getObject()
     self.connectpool=connectpool.getObject()
     self.portscan=portscantool.Portscantool()
     self.config=config.Config
     self.set_deal_num(15)
Esempio n. 3
0
 def __init__(self, isThread=1, deamon=False):
     TaskTool.__init__(self,isThread,deamon=deamon)
     import Sqldatatask
     self.logger = initLog('/root/log/baozhang/logs/portScantask.log', 2, False,'portscantask')
     self.sqlTool = Sqldatatask.getObject()  #init DBmanager
     #init ConnetcPool's parameters, eg: proxy_address, proxy_name...在webconfig.py中设定 
     self.connectpool = connectpool.getObject()
     # timeout:8, config:xxx, socketclient:None
     self.portscan = portscantool.Portscantool()
     self.config = config.Config
     self.set_deal_num(30)
Esempio n. 4
0
	def __init__(self):
		temp=config.Config
		self.__host = temp.host
		self.__user=temp.username
		self.__passwd=temp.passwd
		self.__db=temp.database
		self.__port=temp.port
		self.__charset=temp.charset
		self.__cachemax=temp.cachemax
		self.__cachemin=temp.cachemin
		self.__conn=None
		self.__cur=None
		self.logger = initLog('logs/sqltool.log', 2, True,'sqltool')
Esempio n. 5
0
 def __init__(self):
     temp = config.Config
     self.__host = temp.host
     self.__user = temp.username
     self.__passwd = temp.passwd
     self.__db = temp.database
     self.__port = temp.port
     self.__charset = temp.charset
     self.__cachemax = temp.cachemax
     self.__cachemin = temp.cachemin
     self.__conn = None
     self.__cur = None
     self.logger = initLog('logs/sqltool.log', 2, True, 'sqltool')
Esempio n. 6
0
def monMain():
    """Tests Unitaires
    print(isFileToBeIncluded(['*'], ['.odt', ".docx"], "/home/nicolas/Documents/fichier.docx"))
    print(isFileToBeIncluded(['*'], ['.odt', ".docx"], "/home/nicolas/Documents/fichier.txt"))
    print(isFileToBeIncluded(['.odt', ".docx"], ['*'], "/home/nicolas/Documents/fichier.docx"))
    print(isFileToBeIncluded(['.odt', ".docx"], ['*'], "/home/nicolas/Documents/fichier.txt"))
    """

    # === Initialisation des variables ===
    ARGS = prftp.initVariables()
    buffer = init(ARGS)
    connectFTP = buffer[0]
    INCLUDES = buffer[1]
    EXCLUDES = buffer[2]
    arbrePrecedent = buffer[3]
    STARTINGLEVEL = buffer[4]

    # === Initialisation du logger ===
    MAIN_LOGGER = logger.initLog(ARGS.logPath, ARGS.logConf)
    # ecriture des parametres initiaux dans le logger
    prftp.logArgs(ARGS, MAIN_LOGGER)

    # === Boucle principale ===
    loop(ARGS, MAIN_LOGGER, arbrePrecedent, STARTINGLEVEL, connectFTP, INCLUDES, EXCLUDES)
Esempio n. 7
0
# -*- coding:utf-8 -*-

from django.shortcuts import render
from django.shortcuts import render_to_response
from django.http import HttpResponse

from nmaptoolbackground.control import portcontrol
from spidertool import webtool
from fontsearch.control import mapcontrol

import json
import time
import traceback

from logger import initLog
logger = initLog('/root/log/baozhang/logs/search.log', 1, False)

timeout = 60 * 20


def indexpage(request):
    username = request.COOKIES.get('username', '')
    return render_to_response('fontsearchview/search.html', {
        'data': '',
        'username': username
    })


def mainpage(request):
    content = request.GET.get('searchcontent', '')
    page = request.GET.get('page', '0')
Esempio n. 8
0
#!/usr/bin/python
#coding:utf-8
import sys
reload(sys)

sys.setdefaultencoding('utf8')
from elasticsearch_dsl.query import MultiMatch, Match
from datetime import datetime
from elasticsearch_dsl import DocType, String, Date, Integer, MultiSearch, Search, Q
from elasticsearch_dsl.connections import connections
import mapping
from logger import initLog
import chardet
logger = initLog('logs/elastic.log', 2, True)
# Define a default Elasticsearch client
# connections.create_connection(hosts=['localhost'])
import base64


def decodestr(msg):

    chardit1 = chardet.detect(msg)

    try:

        if chardit1['encoding'] == 'utf-8':
            return msg
        else:
            if chardit1['encoding'] == 'ISO-8859-2':
                return msg
            else:
Esempio n. 9
0
 def __init__(self, poolsize=10):
     self.__connect_pool = Queue.Queue(maxsize=poolsize)  # 连接池
     self.connectTool = connecttool.ConnectTool()
     self.logger = initLog("logs/connectpool.log", 2, True, "connectpool")
#!/usr/bin/env python  
import os ,random,re
import logger
logge = logger.initLog("log.log", 2, True,'a')
def readpath(path):
	rootpath=path
	files = os.listdir(path)  
	for i in xrange(len(files)):
		files[i]=rootpath+'/'+files[i]
	return files
def locatecode(path,targetpath):
	sourcepath=path
	with open(path,'r') as fileitem:
		content = fileitem.readlines()  
	func_assign_line=0
	func_audit_line=0
	func_main_line=0
	func_targetfile=None
	func_def_line=0
	params=None
	service=None
	def_line=[]
	targetpath=targetpath
	security_label=None
	targetfilepath=None
	for line in  xrange(len(content)):

		if 'curl' in content[line] and line<func_audit_line:
			error(' may fail,need change label curl not in audit ',sourcepath,targetfilepath)
		if 'def ' in content[line] :
			func_def_line=line
Esempio n. 11
0
 def __init__(self, poolsize=10):
     self.__connect_pool = Queue.Queue(maxsize=poolsize)  #连接池
     # 启用代理
     self.connectTool = connecttool.ConnectTool()
     self.logger = initLog('/root/log/baozhang/logs/connectpool.log', 2,
                           False, 'connectpool')
Esempio n. 12
0
def monMain():
    ARGS = initVariables()
    MAIN_LOGGER = logger.initLog(ARGS.logPath, ARGS.logConf)
    print(ARGS)
Esempio n. 13
0
def getloghandle():
	global DBlog
	if DBlog is None:
		DBlog=initLog('logs/sqltool.log', 2, True,'sqltool')
	return DBlog
Esempio n. 14
0
def monMain():
    MAIN_LOGGER = logger.initLog("", "rsyncFTP.conf")
    dp = "/home/nicolas/Documents"
    startinglevel = dp.count(os.sep)
    arbrePrecedent = createSurveyList(list(os.walk(dp)), startinglevel, 2)
    loop(MAIN_LOGGER, 10, -1, arbrePrecedent, dp, startinglevel)
Esempio n. 15
0
#!/usr/bin/python
# coding:utf-8
import sys

reload(sys)

sys.setdefaultencoding("utf8")
from elasticsearch_dsl.query import MultiMatch, Match
from datetime import datetime
from elasticsearch_dsl import DocType, String, Date, Integer, MultiSearch, Search, Q
from elasticsearch_dsl.connections import connections
import mapping
from logger import initLog
import chardet

logger = initLog("logs/elastic.log", 2, True)
# Define a default Elasticsearch client
connections.create_connection(hosts=["localhost"])
import base64


def decodestr(msg):

    chardit1 = chardet.detect(msg)

    try:
        # print chardit1['encoding'],msg.decode('gbk')
        if chardit1["encoding"] == "utf-8":
            return msg
        else:
            if chardit1["encoding"] == "ISO-8859-2":
Esempio n. 16
0
def getloghandle():
    global DBlog
    if DBlog is None:
        DBlog = initLog('logs/sqltool.log', 2, True, 'sqltool')
    return DBlog
Esempio n. 17
0
def getloghandle():
    global DBlog
    if DBlog is None:
        locate = os.path.split(os.path.realpath(__file__))[0]
        DBlog = initLog(locate + '/logs/sqltool.log', 2, True, 'sqltool')
    return DBlog
Esempio n. 18
0
#!/usr/bin/python
#coding:utf-8
import sys;
reload(sys);

sys.setdefaultencoding('utf8');
from elasticsearch_dsl.query import MultiMatch, Match
from datetime import datetime
from elasticsearch_dsl import DocType, Date, Integer,MultiSearch,Search,Q
from elasticsearch_dsl.connections import connections
import mapping
from logger import initLog
import chardet
logger = initLog('/root/log/baozhang/logs/elastic.log', 2, True)

import base64

def ipsearch(page='0',dic=None,content=None):
    limitpage=15
    validresult=False
    orderlabel=0
    orderarray = []
    print ("======================ipestool::ipsearch() dic:%s, content:%s======================"%(dict, content))
    if content is not None:
        # MultiMatch(fields=['ip', 'name', 'product', 'script', 'detail', 'head', 'hackresults', 'keywords', 'disclosure'], query=u'database')
        q = Q("multi_match", query=content, fields=['ip', 'city','vendor',
                'isp' ,'region' ,'area', 'country'  ,'updatetime','county' ,'osfamily'  ])
    # GET方式访问/ipsearch传入dict(貌似只有一个ip)
    else:
        searcharray=[]
        keys = dic.keys()
Esempio n. 19
0
    def __init__(self,isThread=1):
        TaskTool.__init__(self,isThread)
        self.logger = initLog('logs/sniffertask.log', 2, True,'sniffertask')

        self.sniffer= sniffertool.SniffrtTool(logger=self.logger)
Esempio n. 20
0
 def __init__(self,isThread=1):
     TaskTool.__init__(self,isThread)
     self.logger = initLog('logs/sniffertask.log', 2, True,'sniffertask')
     self.sqlTool = Sqldatatask.getObject()
     self.sniffer= sniffertool.SniffrtTool(logger=self.logger)
     self.config=config.Config
Esempio n. 21
0
def aaaa():
	from logger import initLog
	logger = initLog('portScantask.log', 2, True,'asd')

	logger.info('%s 端口扫描 执行任务中%s', 1,2)
Esempio n. 22
0
#!/usr/bin/python
#coding:utf-8
import sys;
reload(sys);

sys.setdefaultencoding('utf8');
from elasticsearch_dsl.query import MultiMatch, Match
from datetime import datetime
from elasticsearch_dsl import DocType, String, Date, Integer,MultiSearch,Search,Q
from elasticsearch_dsl.connections import connections
import mapping
from logger import initLog
import chardet
logger = initLog('logs/elastic.log', 2, True)

import base64
def ipsearch(page='0',dic=None,content=None):

    limitpage=15
    validresult=False
    orderlabel=0
    orderarray = []
    if content is not None:
        q=Q("multi_match", query=content, fields=['ip', 'city','vendor',
                'isp' ,'region' ,'country'  ,'updatetime','county' ,'osfamily'  ])

    else:
        searcharray=[]
        keys=dic.keys()
        orderlabel=0
Esempio n. 23
0
 def __init__(self, poolsize=10):
     self.__connect_pool = Queue.Queue(maxsize=poolsize)  #连接池
     self.connectTool = connecttool.ConnectTool()
     self.logger = initLog('logs/connectpool.log', 2, True, 'connectpool')
Esempio n. 24
0
 def __init__(self, isThread=1):
     TaskTool.__init__(self, isThread)
     self.logger = initLog('logs/sniffertask.log', 2, True, 'sniffertask')
     self.sqlTool = Sqldatatask.getObject()
     self.sniffer = sniffertool.SniffrtTool(logger=self.logger)
     self.config = config.Config
Esempio n. 25
0
#!/usr/bin/env python
import os, random, re
import logger

logge = logger.initLog("log.log", 2, True, 'a')


def readpath(path):
    rootpath = path
    files = os.listdir(path)
    for i in xrange(len(files)):
        files[i] = rootpath + '/' + files[i]
    return files


def locatecode(path, targetpath):
    sourcepath = path
    with open(path, 'r') as fileitem:
        content = fileitem.readlines()
    func_assign_line = 0
    func_audit_line = 0
    func_main_line = 0
    func_targetfile = None
    func_def_line = 0
    params = None
    service = None
    def_line = []
    targetpath = targetpath
    security_label = None
    targetfilepath = None
    for line in xrange(len(content)):
Esempio n. 26
0
def getloghandle():
    global DBlog
    if DBlog is None:
        DBlog = initLog('/root/log/baozhang/logs/sqltool.log', 1, False,
                        'sqltool')
    return DBlog
Esempio n. 27
0
import json
import urllib2
from logger import initLog


API_KEY = '05743125acff7a93105c937106fe6fb9'
OPENER = urllib2.build_opener()
COUNT = 10000
API_URL='http://api.douban.com/shuo/users/%s/followers?api_key=%s&count=%s'
LOGGING=initLog()
MEMCACHE={}


class DBRankException(Exception):
    pass


class MasterException(DBRankException):
    pass

class UserDoesNotExistError(DBRankException):
    pass


class ConnectError(DBRankException):
    pass


def memcache(function):
    def _wrap(*args,**kwargs):
        print args