Пример #1
0
    def __init__(self, es_node="192.168.133.53", es_port="6775"):
        try:
            self.es_node = configParsed()['elasticSearch']['ip']
            self.es_port = configParsed()['elasticSearch']['port']
        except KeyError:
            self.es_port = es_port
            self.es_node = es_node

        self.es_client = self._get_es_client
        self.es_conn = self._es_connection(timeout)
Пример #2
0
    def startInteractiveHunt(self):
        config = basicUtils.configParsed()

        #huntInputParams = config['defaultInputParams']
        pipeLine = config['pipeLine']
        pipeline = ''
        defaults = config['defaultInputParams']
        for pipe, val in pipeLine.items():
            self.count = self.count + 1
            print("Working on pipeLine : %s " % pipe)
            print("Pipe Line Type : %s " % val['type'])
            print("Log to be taken : %s " % val['log'])
            print("")
            pipeline = map(lambda (x, y): y['huntName'], val['flow'].items())
            for k, v in val['flow'].items():
                try:
                    defaults[v['huntName']][val['log']].update(v)
                except KeyError:
                    raise Exception(
                        "Default Params Not present for %s for hunt type : %s "
                        % (val['log'], v['huntName']))

            #pipeline = ':'.join(pipeline)
            if val['type'].lower() == 'interactive':
                print "Interactive Type of request made for the log : %s " % val[
                    'log'] + val['prefix']
                #val['flow']
                #defaults[val['log']]
                self.runValidPipeline(val, defaults)
            elif val['type'].lower() == 'live':
                print "Live Type of request made for log : %s " % val[
                    'log'] + val['prefix']
                self.runLivePipe(val, defaults)
Пример #3
0
    def get_permanent_datasources(self):
        mylist = []
        config = configParsed()
        config = config['pipeLine']
        config = map(lambda (x, y): y['log'] + y['prefix'], config.items())

        for line in self.response_list:
            if 'averageKfps' in line['properties']:
                if line['name'] in config or True:
                    mylist.append(line['name'])
        return mylist
Пример #4
0
 def __init__(self):
     config = configParsed()
     self.redisPortHost = findKey('redisHostAndPort', config['pipeLine'])
     self.redisPortHost = list(self.redisPortHost)
     if len(self.redisPortHost) == 0:
         print "Taking redis connection info from default config"
         self.redisPortHost = list(
             findKey('redisHostAndPort', config['defaultInputParams']))[0]
     else:
         self.redisPortHost = self.redisPortHost[0]
     self.connect()
Пример #5
0
 def __init__(self, ip='192.168.133.53', port='6773', json_folder=None):
     super (HuntApi, self).__init__ ()
     self.tooldir = os.getcwd()
     if json_folder is None:
         self.json_folder = self.tooldir + "/Test_Data/json_folder/"
     self.headers = {"Content-Type": "application/json"}
     conf = configParsed()
     ip_port = conf['interactivehunt']['url']
     self.url = ip_port
     self.current_request = None
     self.current_response = {}
     self.outputdataset = None
     self.driverId = None
     self.enrichment_name = None
     self.liveJson = []
 def get_section(file, mysection='ingestion'):
     config = configParsed()
     return config[mysection]
     '''parser = ConfigParser.ConfigParser ()
Пример #7
0
    def runLivePipe(self, pipe, params):
        #self.runValidPipeline(pipe,params)
        current_huntname = pipe['prefix'] + pipe['log'] + str(
            self.count) + "_" + "%05d" % randint(1, 99999)
        liveCallBody = self.runValidPipeline(pipe, params)
        dataset = pipe['log'] + pipe['prefix']
        api = basicUtils.configParsed(
        )['liveHunt']['url'] + basicUtils.configParsed()['liveHunt']['api']
        testObj = Enrichment()
        headers = basicUtils.configParsed()['liveHunt']['header']
        result = testObj.execute_request("post",
                                         api,
                                         data=liveCallBody,
                                         headers=headers)
        outputDataSet = result.json()['output']
        devobj = LaunchHunt()
        dataSetMetaInfo = Metadataservice().calculate_datasource_properties
        try:
            ST, ET = dataSetMetaInfo[outputDataSet][0:2]
        except KeyError:
            ST, ET = (-1, -1)
        print("Pushing the Logs:")
        config = basicUtils.configParsed()
        ingestionConfig = config['ingestion']
        logPathServer = ingestionConfig['inputlogspath']
        logPathLocal = ingestionConfig['localdatapath']
        logType = pipe['log']
        logPathLocal += '/' + logType
        output = subprocess.Popen('ls %s ' % logPathLocal,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE,
                                  shell=True).communicate()
        output = output[0].split('\n')
        output = map(lambda x: x.split('_'), output)
        output = map(lambda x: map(lambda y: y.split('.')[0], x), output)
        output = map(lambda x: x[1:], output)
        output = map(lambda x: ':'.join(x), output)
        pattern = '%d:%m:%Y:%H:%M'
        import time
        output = filter(lambda x: not x == '', output)
        epochOutput = map(
            lambda x: int(time.mktime(time.strptime(x, pattern))), output)
        output = map(
            lambda x:
            (int(time.mktime(time.strptime(x, pattern))), logType.upper() + '_'
             + '_'.join(x.split(':')) + '.log'), output)
        epochOutput = filter(lambda x: x > ET, epochOutput)
        output = filter(lambda (x, y): x in epochOutput, output)
        output = map(lambda (x, y): y, output)
        if len(output) == 0:
            raise Exception("No futher logs present for this")
        else:
            Ingestion().push_logs()
        while True:
            dataSetMetaInfo = Metadataservice().calculate_datasource_properties
            newET = dataSetMetaInfo[dataset][1]
            if newET > ET:
                break
            print "New Data Not yet injested Waiting for 60 sec"
            time.sleep(60)

        jsonBody = json.dumps(
            basicUtils.configParsed()['liveHunt']['pause']['jsonBody'])
        jsonBody['huntName'] = outputDataSet
        testObj.getsource(dataset, ST, ET)
        #Pause
        testObj.execute_request(
            'post',
            basicUtils.configParsed()['liveHunt']['pause']['api'],
            basicUtils.configParsed()['liveHunt']['header'],
            data=jsonBody)
        devData = Metadataservice().get_static_dataset(outputDataSet)

        #Play
        jsonBody = json.dumps(
            basicUtils.configParsed()['liveHunt']['pause']['jsonBody'])
        jsonBody['huntName'] = outputDataSet
        testObj.execute_request(
            'post',
            basicUtils.configParsed()['liveHunt']['play']['api'],
            basicUtils.configParsed()['liveHunt']['header'],
            data=jsonBody)
        for k, v in pipe['flow'].items():
            qa_result = testObj.getHuntFunction(v['huntName'])(
                testObj, params[v['huntName']][pipe['log']])
        comparator(devData, qa_result)