Ejemplo n.º 1
0
def taskFunc(schedid, searchcond, startime, endtime, warnornot, warncondop,
             warncondval, warnlevel, saveresult):
    """
    计划任务的实际功能
    """

    if postdata != None and postdata.is_master() == False:
        return

    execTime = datetime.datetime.now()
    #print 'Task start time: %s' % execTime.strftime( '%Y-%m-%d %H:%M:%S' )

    # 要将searchstart和searchend转换为绝对时间;原类型为字符串
    searchstart = scheduletime.fmtRelTime(startime, execTime)
    #print 'Condition start time: %s' % searchstart.strftime( '%Y-%m-%d %H:%M:%S' )
    searchend = scheduletime.fmtRelTime(endtime, execTime)
    #print 'Conditon end time: %s' % searchend.strftime( '%Y-%m-%d %H:%M:%S' )

    try:
        es = esdata.ESData(configFile)
        # 查询符合条件的数据量
        cnt = es.Count(searchcond, searchstart, searchend)
        #print 'Count of search: %s' % cnt

        if saveresult > 0:
            #lst = es.Search( searchcond, searchstart, searchend, fields = DEFAULTFIELDS, size = saveresult )
            lst = es.Search(searchcond,
                            searchstart,
                            searchend,
                            fields=None,
                            size=saveresult)
            #print 'Length of search list: %s' % len( lst )

        # 向t_schedresult中插入一条数据,并返回插入数据的id(resultid)
        resultid = postdata.newSchedResult(schedid, execTime, searchcond,
                                           searchstart, searchend, cnt,
                                           saveresult)

        warnid = 0
        if warnornot.upper() != 'N':
            if ( warncondop == '>' and cnt > warncondval ) or \
                    ( warncondop == '>=' and cnt >= warncondval ) or \
                    ( warncondop == '&lt;=' and cnt <= warncondval ) or \
                    ( warncondop == '&lt;' and cnt < warncondval ) or \
                    ( warncondop == '=' and cnt == warncondval ):
                # 向t_warn中插入报警数据,并返回插入数据的id(warnid)
                warnid = postdata.newWarn(schedid, execTime, warnlevel,
                                          searchcond, searchstart, searchend,
                                          cnt)

        # 向t_resultdetail中插入数据
        if saveresult > 0 and len(lst) > 0:
            postdata.newResultDetail(warnid, resultid, lst)
    except Exception, e:
        conf = sys_config.SysConfig(configFile)
        _logFile = conf.getConfig("tasksched", "logFile")
        _instance = conf.getConfig("tasksched", "instanceName")
        sys_log.SysLog(_logFile,
                       _instance).writeLog("error",
                                           str(traceback.format_exc()))
Ejemplo n.º 2
0
def run(start_type = None):
    global dicTask, dicJob, postdata
    
    conf = sys_config.SysConfig(configFile)
    # 进程号文件名
    pidFile = conf.getConfig("pullfile", "pidFile")

    if start_type == None:
        if os.path.exists(pidFile):
            os.remove(pidFile)
        pFile = open(pidFile, "w")
        pFile.write(str(os.getpid()))
        pFile.close()

    # 生成postdata对象
    postdata = postgrespullfile.PostgresData( configFile )

    # 清空表t_pfnew、t_pfupd、t_pfdel
    postdata.clearSchedule()

    # 读取活动的文件提取任务
    lstTask = postdata.getTask()

    if lstTask != None and len( lstTask ) > 0:
        # 创建并启动文件提取任务
        #print "Length of lstTask is: %s" % len( lstTask )
        createTask( lstTask )

    while True:
        # 延时60秒
        time.sleep( LOOPTIME )

        # 读取变化过的文件提取任务
        lstNew = postdata.getNewTask()
        lstUpd = postdata.getUpdTask()
        lstDel = postdata.getDelTask()

        # 清空表t_pfnew、t_pfupd、t_pfdel
        postdata. clearSchedule()

        # 创建新的文件提取任务
        if lstNew != None and len( lstNew ) > 0:
            createTask( lstNew )

        # 修改已存在的文件提取任务
        if lstUpd != None and len( lstUpd ) > 0:
            # 先停止
            deleteTask( lstUpd )
            # 再重建
            createTask( lstUpd )

        # 删除文件提取任务
        if lstDel != None and len( lstDel ) > 0:
            deleteTask( lstDel )
Ejemplo n.º 3
0
def run(start_type=None):
    global dicTask, postdata

    conf = sys_config.SysConfig(configFile)
    # 进程号文件名
    pidFile = conf.getConfig("dataextract", "pidFile")

    if start_type == None:
        if os.path.exists(pidFile):
            os.remove(pidFile)
        pFile = open(pidFile, "w")
        pFile.write(str(os.getpid()))
        pFile.close()

    # 生成postdata对象
    postdata = postgresdataextract.PostgresData(configFile)

    # 清空表t_dbnew、t_dbupd、t_dbdel
    postdata.clearDataExtraction()

    # 读取活动的数据库数据提取任务   在这个地方把数据提取任务直接传过来
    lstTask = postdata.getDataExtractions()

    if lstTask != None and len(lstTask) > 0:
        # 创建并启动数据库数据提取任务
        createTask(lstTask)

    while True:
        # 延时
        time.sleep(LOOPTIME)

        # 读取变化过的数据抽取任务
        lstNew = postdata.getNewDataExtraction()
        lstUpd = postdata.getUpdDataExtraction()
        lstDel = postdata.getDelDataExtraction()

        # 清空表t_dbnew、t_dbupd、t_dbdel
        postdata.clearDataExtraction()

        # 创建新的数据抽取任务
        if lstNew != None and len(lstNew) > 0:
            createTask(lstNew)

        # 修改已存在的数据抽取任务
        if lstUpd != None and len(lstUpd) > 0:
            # 先停止
            deleteTask(lstUpd)
            # 再重建
            createTask(lstUpd)

        # 删除数据抽取任务
        if lstDel != None and len(lstDel) > 0:
            deleteTask(lstDel)
Ejemplo n.º 4
0
def run(start_type = None):
    global postdata

    conf = sys_config.SysConfig(configFile)
    # 进程号文件名
    pidFile = conf.getConfig("datastats", "pidFile")

    if start_type == None:
        if os.path.exists(pidFile):
            os.remove(pidFile)
        pFile = open(pidFile, "w")
        pFile.write(str(os.getpid()))
        pFile.close()

    # 生成postdata对象
    postdata = postgrestats.PostgresData(configFile)
    argus = [ postdata ]

    sched = Scheduler(standalone = True)

    sched.add_cron_job(funcNoon, year=cron_noon[0], month=cron_noon[1], \
            week=cron_noon[2], day_of_week=cron_noon[3], day=cron_noon[4], \
            hour=cron_noon[5], minute=cron_noon[6], second=cron_noon[7], args=argus)
    sched.add_cron_job(funcHour, year=cron_hour[0], month=cron_hour[1], \
            week=cron_hour[2], day_of_week=cron_hour[3], day=cron_hour[4], \
            hour=cron_hour[5], minute=cron_hour[6], second=cron_hour[7], args=argus)
    sched.add_cron_job(funcMidnight, year=cron_midnight[0], month=cron_midnight[1], \
            week=cron_midnight[2], day_of_week=cron_midnight[3], day=cron_midnight[4], \
            hour=cron_midnight[5], minute=cron_midnight[6], second=cron_midnight[7], args=argus)
    # 自定义dashboard统计服务
    sched.add_cron_job(stats_dashboard.stats_dashboard, year=cron_dashboard[0], month=cron_dashboard[1], \
            week=cron_dashboard[2], day_of_week=cron_dashboard[3], day=cron_dashboard[4], \
            hour=cron_dashboard[5], minute=cron_dashboard[6], second=cron_dashboard[7], args=argus)

    # 每隔几分钟(默认5分钟)检查是否需要删除原始pcap文件
    interval_chkdevice = conf.getConfig("datastats", "intervalCheckDevice")
    if interval_chkdevice == None:
        interval_chkdevice = 5
    else:
        interval_chkdevice = int(interval_chkdevice)

    sched.add_interval_job(chkdevice.checkDevice, weeks=0, days=0, hours=0, minutes=interval_chkdevice, seconds=0, args=argus)

    try:
        sched.start()
    except (KeyboardInterrupt, SystemExit):
        pass
Ejemplo n.º 5
0
def run(start_type=None):

    # 进程号文件名及配置
    conf = sys_config.SysConfig(configFile)
    pidFile = conf.getConfig("dataextract", "pidFile")
    #把进程号写进一上文件
    if start_type is None:
        if os.path.exists(pidFile):
            os.remove(pidFile)
        pFile = open(pidFile, "w")
        pFile.write(str(os.getpid()))
        pFile.close()
    #传进任务列表(列表形式), 数据库连接 (字典形式)
    dic_tasklist = globalvariable.dic_tasklist
    dic_connection = globalvariable.dic_connection

    #如果任务列表不为空以及列表元素大小零,执行任务
    if dic_tasklist is not None and len(dic_tasklist) > 0:
        createTask(dic_tasklist, dic_connection)
Ejemplo n.º 6
0
    def __init__(self, cfgFile=None):
        self._configFile = cfgFile
        if self._configFile == None:
            #self._configFile = sys_config.getDir() + "/conf/" + CONFIG_FILE
            self._configFile = os.path.split(
                os.path.realpath(__file__))[0] + "/conf/" + CONFIG_FILE
            if os.path.exists(self._configFile) == False:
                self._configFile = "/opt/mdstack/conf/mdstackd/" + CONFIG_FILE

        conf = sys_config.SysConfig(self._configFile)

        eshost = conf.getConfig("elasticsearch", "esHost")
        esport = conf.getConfig("elasticsearch", "esPort")
        self._config = eshost + ':' + esport

        # 日志文件
        self._logFile = conf.getConfig("dataextract", "logFile")
        # 实例名
        self._instance = conf.getConfig("dataextract", "instanceName")
Ejemplo n.º 7
0
    def __init__( self, cfgFile = None ):
        if cfgFile == None:
            #cfgFile = sys_config.getDir() + "/conf/" + CONFIG_FILE
            cfgFile = os.path.split(os.path.realpath(__file__))[0] + "/conf/" + CONFIG_FILE
            if os.path.exists(cfgFile) == False:
                cfgFile = "/opt/mdstack/conf/mdstackd/" + CONFIG_FILE

        conf = sys_config.SysConfig( cfgFile )
        
        # 数据库连接相关
        self._dbhost = conf.getConfig( "postgresql", "dbHost" )
        self._dbport = conf.getConfig( "postgresql", "dbPort" )
        self._dbname = conf.getConfig( "postgresql", "dbName" )
        self._dbuser = conf.getConfig( "postgresql", "dbUser" )
        self._dbpwd = conf.getConfig( "postgresql", "dbPwd" )

        # 日志文件
        self._logFile = conf.getConfig( "dataextract", "logFile" )

        # 实例名
        self._instance = conf.getConfig( "dataextract", "instanceName" )
Ejemplo n.º 8
0
def taskFunc( pfid, groupid, configpath, logsource, protocol, port, username, userpass, fpath, files ):
    """
    文件提取任务的实际功能
    """

    execTime = datetime.datetime.now()
    #print 'Task start time: %s' % execTime.strftime( '%Y-%m-%d %H:%M:%S' )
    #print 'Task function parameters: %s' %( [pfid, groupid, configpath, logsource, protocol, port, username, userpass, fpath, files] )
    
    # 读取配置
    conf = sys_config.SysConfig( configFile )
    rootPath = conf.getConfig( "pullfile", "rootPath" )
    tmpPath = os.path.join( rootPath, 'tmp' )

    # 读取组名
    groupName = postdata.getGroupName( groupid )

    if username == None:
        username = ''
    if userpass == None:
        userpass = ''
    else:
        #userpass = userpass 
        userpass = base64.decodestring( userpass )

    obj = None
    if protocol.lower() == 'ftp':
        # FTP
        if port == None:
            port = 21
        obj = fileftp.FileFtp( username, userpass, logsource, port )
    elif protocol.lower() == 'samba':
        # SMB
        if port == None:
            port = 139
        obj = filesmb.FileSMB( username, userpass, logsource, port )
    elif protocol.lower() == 'ssh':
        # SSH
        if port == None:
            port = 22
        obj = filessh.FileSSH( {'username': username, 'password': userpass}, logsource, port )

    if obj != None:
        lstFile = obj.GetFileList( os.path.join( fpath, files ) )
        for remoteFile in lstFile:
            lstName = os.path.split( remoteFile )
            localFile = os.path.join( tmpPath, lstName[1] )
            obj.DownLoadFile( localFile, remoteFile )
            if os.path.exists( localFile ):
                hashvalue = getFileHash( localFile )
                if postdata.existsFileHash( hashvalue ):
                    os.remove( localFile )
                else:
                    #lastmoditime = datetime.datetime.now()
                    lastmoditime = obj.getFileModiTime( remoteFile )
                    postdata.insFileHash( hashvalue, lstName[1], lastmoditime, configpath, logsource )
                    fileName = hashvalue
                    lstExt = lstName[1].split('.')
                    if len(lstExt) > 1:
                        fileName = hashvalue + '.' + lstExt[len(lstExt) - 1]
                    
                    dstPath = os.path.join( rootPath, 'pull', groupName, execTime.strftime('%Y%m%d'), logsource.replace(':','-'), configpath )
                    if os.path.isdir( dstPath ) == False:
                        os.makedirs( dstPath )

                    os.rename( localFile, os.path.join( dstPath, fileName ) )
 
        obj.close()
Ejemplo n.º 9
0
def taskFunc(dbid, conname, tbname, idxname, reclimit, inctype, timefld,
             incrfld, msgfld, curpos, curpos_stime, curpos_etime):
    """
    数据提取进程实际功能
    """

    # 提取数据库连接相关信息
    # 返回值为None或有以下键的字典( conname, conntype, hostname, port, dbname, username, userpass, usepooling )
    connInfo = postdata.getConnInfo(conname)

    es = esdataextract.ESData(configFile)

    dbDic = {}
    dbDic['dbhost'] = connInfo["hostname"]
    dbDic['dbport'] = str(connInfo["port"])
    dbDic['dbname'] = connInfo["dbname"]
    dbDic['dbuser'] = connInfo["username"]
    #dbDic['dbpass'] = connInfo["userpass"]
    dbDic['dbpass'] = base64.decodestring(connInfo["userpass"])

    dbDic['tbname'] = tbname
    dbDic['reclimit'] = str(reclimit)
    dbDic['inctype'] = inctype
    dbDic['timefld'] = timefld
    dbDic['incrfld'] = incrfld
    dbDic['msgfld'] = msgfld

    dbfac = dbfactory.DbFactory(dbDic)
    db = dbfac.factory(connInfo["conntype"])

    # 读取要提取的数据库字段
    lstFlds = []
    n = 0
    while len(lstFlds) == 0:
        # 返回值为None或有以下键的字典列表( fldid, fldsrc, fldout )
        lstFlds = db.getTableStructure()
        n += 1
        if n == 12:
            n = 0
            conf = sys_config.SysConfig(configFile)
            _logFile = conf.getConfig("dataextract", "logFile")
            _instance = conf.getConfig("dataextract", "instanceName")
            sys_log.SysLog(_logFile, _instance).writeLog(
                "error", "tbname: " + tbname + "  Not found fields!")
        time.sleep(5)
    # 保存字段及数据类型,此功能以后应改为由web页面实现
    postdata.saveDbFieldList(dbid, lstFlds)
    # 如果需要修改导入的字段,则手工改t_dbfields表,并用下面的方法
    #lstFlds = postdata.getDbFieldList(dbid)

    flds = []
    for fld in lstFlds:
        flds.append(fld["fldsrc"])
        # 判断自增字段数据类型:number、string、time
        if (fld["fldsrc"]).lower() == incrfld.lower():
            incrfld_type = fld["fldout"]
            if curpos == None or curpos == "":
                if incrfld_type == "number":
                    curpos = "0"
                elif incrfld_type == "string":
                    curpos = " "
                elif incrfld_type == "time":
                    curpos = "1980-01-01 00:00:00"
                else:
                    curpos = " "

    if db != None:
        # 循环读取日志数据
        while True:
            try:
                lst = db.getData(flds, incrfld_type, curpos, curpos_stime,
                                 curpos_etime)
                print "Start fetch data(t_database.dbid=" + str(
                    dbid) + "): records=" + str(
                        len(lst)), datetime.datetime.now()

                if lst == None or len(lst) == 0:
                    # 1 主键增量; 2 时间增量; 0 一次性导入数据
                    if inctype == 2:
                        if incrfld_type == "number":
                            curpos = "0"
                        elif incrfld_type == "string":
                            curpos = " "
                        elif incrfld_type == "time":
                            curpos = "1980-01-01 00:00:00"
                        else:
                            curpos = " "

                        if curpos_stime == '1980-01-01 00:00:00':
                            curpos_stime = curpos_etime
                            curpos_etime = db.getMinTime()
                        else:
                            curpos_stime = curpos_etime
                            curpos_etime = db.getMaxTime()

                            #如果时间差距过大,为防止数据量太大导致排序速度慢,那么让日期逐步增加
                            t1 = datetime.datetime.strptime(
                                curpos_stime, '%Y-%m-%d %H:%M:%S')
                            t2 = datetime.datetime.strptime(
                                curpos_etime, '%Y-%m-%d %H:%M:%S')
                            delta = t2 - t1
                            if delta.days > 2:
                                curpos_etime = (t1 + datetime.timedelta(1)
                                                ).strftime('%Y-%m-%d %H:%M:%S')
                    elif inctype == 0:
                        return
                    time.sleep(DELAYTIME)
                else:
                    # 取最后一行数据
                    row = lst[len(lst) - 1]
                    # 取最后一行数据的主键值
                    curpos = row[len(flds)]

                    # 更新下次要提取数据的位置
                    postdata.updCurPosition(dbid, curpos, curpos_stime,
                                            curpos_etime)
                    # 向ES中写数据
                    es.bulkInsData(lst, inctype, timefld, lstFlds, incrfld,
                                   tbname, idxname)

                    #if len(lst) < reclimit:
                    #    time.sleep( DELAYTIME )

                # 卡口测试
                if tbname == "b_bk_jgcl":
                    conf = sys_config.SysConfig(configFile)
                    _logFile = conf.getConfig("dataextract", "logFile")
                    _instance = conf.getConfig("dataextract", "instanceName")
                    sys_log.SysLog(_logFile, _instance).writeLog(
                        "error", "=====|" + tbname + "|" + str(time.time()) +
                        "|" + str(curpos) + "|" + str(curpos_stime) + "|" +
                        str(curpos_etime))
            except Exception, e:
                conf = sys_config.SysConfig(configFile)
                _logFile = conf.getConfig("dataextract", "logFile")
                _instance = conf.getConfig("dataextract", "instanceName")
                sys_log.SysLog(_logFile, _instance).writeLog(
                    "error", "tbname" + " --- " + str(traceback.format_exc()))