Exemplo n.º 1
0
def dumpOrgDict(orgDict, outputFilename):
    outputF = fileWriter(outputFilename)
    ret_str = ""
    for ip in orgDict.keys():
        ret_str += "%s\t%s\n" % (ip, orgDict[ip])
        print("%s\t%s\n" % (ip, orgDict[ip]))
    outputF.writeString(ret_str)
Exemplo n.º 2
0
def dumpHourlyesponseList(filename, outputFilename):
    responseLineList = getHourlyResponseLess(filename)
    outputFile = fileWriter(outputFilename)
    outputStr = ""
    for responseLine in responseLineList:
        outputStr = outputStr + responseLine
    outputFile.writeString(outputStr)
Exemplo n.º 3
0
def dumpUniqueIPList(cookie, filenameA, filenameB, outputFilename):
    ipList = getNewIPList(cookie, filenameA, filenameB)
    outputF = fileWriter(outputFilename)
    ret_str = ""
    for ip in ipList:
        ret_str += "%s\n" % ip
    outputF.writeString(ret_str)
Exemplo n.º 4
0
def getHourlySubnetOccurredCount(inComing, outGoing, inComingOutput, outGoingOutput):
    inSubnetPools, outSubnetPools = doSubnetOccurredCount(inComing, outGoing)
    inRet_str = ""
    outRet_str = ""
    if inSubnetPools is not None:
        for key in inSubnetPools.keys():
            inRet_str += "%d\t%d\n" % (key, inSubnetPools[key])
        outputF = fileWriter(inComingOutput)
        outputF.writeString(inRet_str)
    if outSubnetPools is not None:
        for key in outSubnetPools.keys():
            outRet_str += "%d\t%d\n" % (key, outSubnetPools[key])
        outputF = fileWriter(outGoingOutput)
        outputF.writeString(outRet_str)

    return inRet_str, outRet_str
Exemplo n.º 5
0
def dumpHourlyesponseList(filename, outputFilename):
    responseList = getHourlyValidResponse(filename)
    outputFile = fileWriter(outputFilename)
    outputStr = ""
    for response in responseList:
        outputStr = outputStr + response + "\n"
    outputFile.writeString(outputStr)
Exemplo n.º 6
0
    def getDailyTypeCount(self):
        folderType = self.folderType + "/%s/" % self.date
        srcCountStr = self.getDailyTypeCount_AsString(folderType)

        outputFileType = self.folderType + "/typeCounter_%s.log" % self.date
        outputF = fileWriter(outputFileType)

        outputF.writeString(srcCountStr)
Exemplo n.º 7
0
    def getDailySrcCount(self):
        foldername = self.foldername + "/%s/" % self.date
        srcCountStr = self.getDailySrcCount_AsString(foldername)

        outputFilename = self.foldername + "/srcCounter_%s.log" % self.date
        outputF = fileWriter(outputFilename)

        outputF.writeString(srcCountStr)
Exemplo n.º 8
0
def dumpCounter(counter, outputFilename):
    ret_src = ""
    counter = OrderedDict(counter.most_common())
    for elem in counter.keys():
        ret_src += "%s\t%d\n" % (elem, counter[elem])

    outputF = fileWriter(outputFilename)
    outputF.writeString(ret_src)
Exemplo n.º 9
0
def dumpParticalIP(filename, outputFilename):
    outputStr = ""
    file = fileReader(filename)
    for line in file:
        srcIP = line.split("\t")[FieldToLoc["dstIP"]]
        particalSrcP = ".".join(srcIP.split(".")[2:4])
        outputStr = outputStr + particalSrcP + "\n"
    outputFile = fileWriter(outputFilename)
    outputFile.writeString(outputStr)
Exemplo n.º 10
0
    def getDailyCount(self):
        foldername = self.foldername + "/%s/" % self.date
        CountStr = self.getDailyCount_AsString(foldername)

        outputFilename = self.foldername + "/%sCounterBy%s=%s_%s.log" % (
            self.toField, self.filterField, self.filterVal, self.date)
        outputF = fileWriter(outputFilename)

        outputF.writeString(CountStr)
Exemplo n.º 11
0
def getHourlyNBSTATRow(filename, NBSTATFoldername):
    file = fileReader(filename)
    NBSTATFilename = NBSTATFoldername + "/" + filename.split("/")[-1]
    dump_str = ""
    for line in file:
        type = line.split("\t")[FieldToLoc["type"]]
        if type == "NBSTAT":
            dump_str += (line + "\n")
    campusFile = fileWriter(NBSTATFilename)
    campusFile.writeString(dump_str)
Exemplo n.º 12
0
 def dumpSpecifiedCount(self, fieldName):
     certainCounter = OrderedDict(
         self.getSpecifiedCount(fieldName).most_common())
     outputFilename = ".".join(
         self.filename.split(".")[:-1]) + "_%sCounter.log" % fieldName
     outputF = fileWriter(outputFilename)
     ret_str = ""
     for elem in certainCounter.keys():
         ret_str += "%s\t%d\n" % (elem, certainCounter[elem])
     outputF.writeString(ret_str)
Exemplo n.º 13
0
def getHourlyPhysRow(filename, PhysFoldername):
    file = fileReader(filename)
    PhysFilename = PhysFoldername + "/" + filename.split("/")[-1]
    PhysDNSList = ["136.159.51.4", "136.159.51.5", "136.159.52.10"]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP in PhysDNSList:
            dump_str += (line + "\n")
    PhysFile = fileWriter(PhysFilename)
    PhysFile.writeString(dump_str)
Exemplo n.º 14
0
def getHourlyCPSCRow(filename, cpscFoldername):
    file = fileReader(filename)
    cpscFilename = cpscFoldername + "/" + filename.split("/")[-1]
    cpscDNSList = ["136.159.2.1", "136.159.2.4"]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP in cpscDNSList:
            dump_str += (line + "\n")
    cpscFile = fileWriter(cpscFilename)
    cpscFile.writeString(dump_str)
Exemplo n.º 15
0
def getHourlyUnknownRow(filename, UnknownFoldername):
    file = fileReader(filename)
    UnknownFilename = UnknownFoldername + "/" + filename.split("/")[-1]
    UnknownDNSList = ["136.159.205.37", "136.159.205.38", "136.159.205.39"]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP in UnknownDNSList:
            dump_str += (line + "\n")
    UnknownFile = fileWriter(UnknownFilename)
    UnknownFile.writeString(dump_str)
Exemplo n.º 16
0
def getHourlyCampusNewRow(filename, CampusNewFoldername):
    file = fileReader(filename)
    CampusNewFilename = CampusNewFoldername + "/" + filename.split("/")[-1]
    CampusNewDNSList = ["136.159.222.2", "136.159.222.10"]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP in CampusNewDNSList:
            dump_str += (line + "\n")
    CampusNewFile = fileWriter(CampusNewFilename)
    CampusNewFile.writeString(dump_str)
Exemplo n.º 17
0
def getHourlyAkamaiRow(filename, AkamaiFoldername):
    file = fileReader(filename)
    AkamaiFilename = AkamaiFoldername + "/" + filename.split("/")[-1]
    AkamaiDNSList = ["136.159.222.244"]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP in AkamaiDNSList:
            dump_str += (line + "\n")
    AkamaiFile = fileWriter(AkamaiFilename)
    AkamaiFile.writeString(dump_str)
Exemplo n.º 18
0
def getHourlyCampusOneRow(filename, campusOneFoldername):
    file = fileReader(filename)
    campusOneFilename = campusOneFoldername + "/" + filename.split("/")[-1]
    campusOneDNSList = ["136.159.1.21"]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP in campusOneDNSList:
            dump_str += (line + "\n")
    campusFile = fileWriter(campusOneFilename)
    campusFile.writeString(dump_str)
Exemplo n.º 19
0
def getHourlyAuroralRow(filename, AuroralFoldername):
    file = fileReader(filename)
    AuroralFilename = AuroralFoldername + "/" + filename.split("/")[-1]
    AuroralDNSList = ["136.159.142.4", "136.159.142.5"]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP in AuroralDNSList:
            dump_str += (line + "\n")
    AuroralFile = fileWriter(AuroralFilename)
    AuroralFile.writeString(dump_str)
Exemplo n.º 20
0
def dumpParticalIP(filename, outputFilename):
    outputStr = ""
    file = fileReader(filename)
    for line in file:
        query = line.split("\t")[FieldToLoc["query"]]
        if ".in-addr.arpa" in query:
            fullIP = getNormByReverse(query)
            particalIP = ".".join(fullIP.split(".")[2:4])
            if particalIP != "" and "." in particalIP:
                outputStr = outputStr + particalIP + "\n"
    outputFile = fileWriter(outputFilename)
    outputFile.writeString(outputStr)
Exemplo n.º 21
0
 def dumpCount(self):
     outputFilename = "../../analResult/batchedWork/%s.log" % (
         self.outputname)
     outputF = fileWriter(outputFilename)
     for key in self.staticCount.keys():
         valuestr = ""
         for value in self.staticCount[key]:
             valuestr += "%d\t" % (value)
         outputF.writeString("%s\t%s\n" % (key, valuestr))
     outputF.close()
     currentDT = datetime.datetime.now()
     print("All Job Done: %s. At: %s" % (self.taskname, str(currentDT)))
Exemplo n.º 22
0
def dumpDailySrcIPCount(date):
    dailySrcIPCounter = Counter()
    for time in range(24):
        # Generate Hourly Report
        dump_str = ""
        time = str(time).zfill(2)
        filename = "../../result/NBSTAT/%s/%s_%02s.log" % (date, date, time)
        outputFoldername = "../../result/NBSTAT/srcIPAnalysis/%s" % date
        if not os.path.exists(outputFoldername):
            os.makedirs(outputFoldername)
        outputFilename = "%s/srcIP_%s_%s.log" % (outputFoldername, date, time)
        srcIPCounter = getHourlySrcIPCount(filename)
        for tuple in srcIPCounter.most_common(50):
            dump_str = dump_str + str(tuple[0]) + "\t" + str(tuple[1]) + "\n"
        campusFile = fileWriter(outputFilename)
        campusFile.writeString(dump_str)
        dailySrcIPCounter += srcIPCounter
    # Generate Daily Summary
    dump_str = ""
    dailySummaryFilename = "../../result/NBSTAT/srcIPAnalysis/srcIP_dailySummary_%s.log" % date
    for tuple in dailySrcIPCounter.most_common(100):
        dump_str = dump_str + str(tuple[0]) + "\t" + str(tuple[1]) + "\n"
    campusFile = fileWriter(dailySummaryFilename)
    campusFile.writeString(dump_str)
Exemplo n.º 23
0
def getHourlylessPopRow(filename, lessPopFoldername):
    file = fileReader(filename)
    lessPopFilename = lessPopFoldername + "/" + filename.split("/")[-1]
    PopDNSList = [
        "136.159.222.2", "136.159.222.10", "136.159.222.244", "136.159.205.37",
        "136.159.205.38", "136.159.205.39", "136.159.2.1", "136.159.2.4",
        "136.159.1.21", "136.159.34.201", "136.159.142.4", "136.159.142.5",
        "136.159.51.4", "136.159.51.5", "136.159.52.10"
    ]
    dump_str = ""
    for line in file:
        dstIP = line.split("\t")[FieldToLoc["dstIP"]]
        if dstIP not in PopDNSList:
            dump_str += (line + "\n")
    lessPopFile = fileWriter(lessPopFilename)
    lessPopFile.writeString(dump_str)
Exemplo n.º 24
0
def getNewIPRow(cookie, filenameA, filenameB, outputFilename):
    newIPList = getNewIPList(cookie, filenameA, filenameB)
    newIPList.sort()
    file = fileReader(filenameA)
    if "src" in cookie:
        fieldName = "srcIP"
    else:
        fieldName = "dstIP"
    ret_str = ""
    for line in file:
        ip = line.split("\t")[FieldToLoc[fieldName]]
        if ip in newIPList:
            ret_str += "%s\n" % line

    outputF = fileWriter(outputFilename)
    outputF.writeString(ret_str)
Exemplo n.º 25
0
def dumpDailyCount(date, cookie):
    countDict = getDailyCount(date, cookie)
    __exp_countDict = __exp_getDailyCount(date, cookie)
    str_out = ""
    # Get filtered result
    for hour in sorted(countDict.keys()):
        str_out += "%s\t%d\n" % (hour, countDict[hour])
    # Get original result
    for hour in countDict.keys():
        str_out += "%s\t%d\n" % (hour, __exp_countDict[hour])
    # dump/write result
    outputFoldername = "../../result_summary/connectionCount/To%s/" % (cookie)
    if not os.path.exists(outputFoldername):
        os.makedirs(outputFoldername)
    outputFilename = outputFoldername + "%s.log" % (date)
    outputFile = fileWriter(outputFilename)
    outputFile.writeString(str_out)
Exemplo n.º 26
0
def batchedDump(direction, date):
    def dictSelection(checkedIP):
        if checkedIP in Campus1List:
            targetDict = campus1Dict
        elif checkedIP in Campus2List:
            targetDict = campus2Dict
        elif checkedIP in AkamaiList:
            targetDict = akamaiDict
        elif checkedIP in CPSCList:
            targetDict = cpscDict
        elif checkedIP in WebpaxList:
            targetDict = webpaxDict

        else:
            targetDict = othersDict
        return targetDict

    print("Start date: %s" % date)
    errorLog = "../../result/error.log"
    errorOut = fileWriter(errorLog)

    connFoldername = "../../data/conn_%sbound/%s/" % (direction, date)
    dnsFoldername = "../../data/dns_%sbound/%s/" % (direction, date)
    weirdFoldername = "../../data/weird_%sbound/%s/" % (direction, date)


    # CLNS Lists and the corresponding IPs
    AkamaiList = ["136.159.222.244"]
    Campus1List = ["136.159.222.2"]
    Campus2List = ["136.159.222.10"]
    CPSCList = ["136.159.5.75", "136.159.5.76"]
    WebpaxList = ["136.159.190.37"]

    # Others

    for hour in range(0, 24):

        # Init CLNS related dicts
        akamaiDict = {}
        campus1Dict = {}
        campus2Dict = {}
        cpscDict = {}
        webpaxDict = {}

        othersDict = {}
        print("Start task: %s" % hour)

        hour = str(hour).zfill(2)
        connFilename = connFoldername + "%s_%s.log" % (date, hour)
        dnsFilename = dnsFoldername + "%s_%s.log" % (date, hour)
        weirdFilename = weirdFoldername + "%s_%s.log" % (date, hour)

        # handle all conn logs and init all the structures.
        connfile = fileReader(connFilename)
        for line in connfile:
            line_list = line.strip().split("\t")
            uid = line_list[connFTL["uid"]]
            checkedIP = line_list[connFTL["srcIP"]]
            # get the proper dict to store this info
            targetDict = dictSelection(checkedIP)
            targetDict[uid] = {"ts"   : line_list[connFTL["timestamp"]],
                                "addr" : (line_list[connFTL["srcIP"]],
                                       line_list[connFTL["srcPort"]],
                                       line_list[connFTL["dstIP"]],
                                       line_list[connFTL["dstPort"]]),
                                "conn" : None,
                                "dns"  : None,
                                "weird": None
                                }
            connList = [line_list[connFTL["duration"]], line_list[connFTL["sentByte"]],
                        line_list[connFTL["recvByte"]], line_list[connFTL["endFlag"]]]
            if targetDict[uid]["conn"]:
                print("duplicated uid! date:%s_%s, uid=%s" % (date, hour, uid))
            else:
                targetDict[uid]["conn"] = connList

        # handle all the dns files
        dnsfile = fileReader(dnsFilename)
        for line in dnsfile:
            line_list = line.strip().split("\t")
            uid = line_list[dnsFTL["uid"]]
            checkedIP = line_list[dnsFTL["srcIP"]]
            targetDict = dictSelection(checkedIP)
            dnsList = [line_list[dnsFTL["transID"]], line_list[dnsFTL["rtt"]],
                       line_list[dnsFTL["query"]], line_list[dnsFTL["answers"]],
                       line_list[dnsFTL["ttls"]], line_list[dnsFTL["type"]],
                       line_list[dnsFTL["error"]]]
            try:
                if not targetDict[uid]["dns"]:
                    targetDict[uid]["dns"] = []
                    targetDict[uid]["dns"].append(dnsList)
                else:
                    # Remove duplicated dns traces.
                    existTIDList = [dnsr[0] for dnsr in targetDict[uid]["dns"]]
                    tID = dnsList[0]
                    if tID not in existTIDList:
                        targetDict[uid]["dns"].append(dnsList)
            except KeyError as keyE:
                # errorOut.writeString("DNS UID Not Found: %s.\n" % uid)
                pass

        # handle all the weird files
        weirdfile = fileReader(weirdFilename)
        for line in weirdfile:
            line_list = line.strip().split("\t")
            uid = line_list[weirdFTL["uid"]]
            checkedIP = line_list[weirdFTL["srcIP"]]
            targetDict = dictSelection(checkedIP)
            weirdList = [line_list[weirdFTL["weirdName"]], line_list[weirdFTL["addl"]],
                         line_list[weirdFTL["notice"]], line_list[weirdFTL["peer"]]]
            try:
                if not targetDict[uid]["weird"]:
                    targetDict[uid]["weird"] = []
                targetDict[uid]["weird"].append(weirdList)
            except KeyError as keyE:
                # errorOut.writeString("Weird UID Not Found: %s.\n" % uid)
                pass
        print("Finish task: %s" % hour)

        print("Start dump: %s\n" %date)

        # output all the dicts as json file.
        outputilename = "%s_%s.log" % (date, hour)
        akamaiOutputFolder = "../../struct/outakamai/%s/" % (date)
        if not os.path.exists(akamaiOutputFolder):
            os.makedirs(akamaiOutputFolder)
        with open(akamaiOutputFolder+outputilename, 'a') as f:
            json.dump(akamaiDict, f)

        campus1OutputFolder = "../../struct/outcampus1/%s/" % (date)
        if not os.path.exists(campus1OutputFolder):
            os.makedirs(campus1OutputFolder)
        with open(campus1OutputFolder+outputilename, 'a') as f:
            json.dump(campus1Dict, f)

        campus2OutputFolder = "../../struct/outcampus2/%s/" % (date)
        if not os.path.exists(campus2OutputFolder):
            os.makedirs(campus2OutputFolder)
        with open(campus2OutputFolder+outputilename, 'a') as f:
            json.dump(campus2Dict, f)

        cpscOutputFolder = "../../struct/outcpsc/%s/" % (date)
        if not os.path.exists(cpscOutputFolder):
            os.makedirs(cpscOutputFolder)
        with open(cpscOutputFolder+outputilename, 'a') as f:
            json.dump(cpscDict, f)

        webpaxOutputFolder = "../../struct/outwebpax/%s/" % (date)
        if not os.path.exists(webpaxOutputFolder):
            os.makedirs(webpaxOutputFolder)
        with open(webpaxOutputFolder+outputilename, 'a') as f:
            json.dump(webpaxDict, f)

        othersOutputFolder = "../../struct/outothers/%s/" % (date)
        if not os.path.exists(othersOutputFolder):
            os.makedirs(othersOutputFolder)
        with open(othersOutputFolder+outputilename, 'a') as f:
            json.dump(othersDict, f)

    errorOut.close()
Exemplo n.º 27
0
    files = batchFileReader(foldername, cookie=date)
    srcCounter = Counter()
    for line in files:
        srcIP = line.split("\t")[FieldToLoc["srcIP"]]
        srcCounter[srcIP] += 1
    return srcCounter


def getDailySrcCount(foldername, date):
    return doDailySrcCount(foldername, date)


def getDailySrcCount_AsString(foldername, date):
    srcCounter = doDailySrcCount(foldername, date)
    srcCounter = OrderedDict(srcCounter.most_common())
    ret_str = ""
    for key in srcCounter.keys():
        ret_str += "%s\t%d\n" % (key, srcCounter[key])

    return ret_str


if __name__ == '__main__':
    date = "2018-03-07"
    foldername = "../../result/Blackhole/%s/" % date
    srcCountStr = getDailySrcCount_AsString(foldername, date)

    outputFilename = "../../result/Blackhole/srcCounter_%s.log" % date
    outputF = fileWriter(outputFilename)

    outputF.writeString(srcCountStr)
Exemplo n.º 28
0
def batchedDump(direction, date):
    def dictSelection(checkedIP):
        if checkedIP in CampusList:
            targetDict = campusDict
        elif checkedIP in AkamaiList:
            targetDict = akamaiDict
        elif checkedIP in CPSCList:
            targetDict = cpscDict
        elif checkedIP in Unknown205List:
            targetDict = unknown205Dict
        elif checkedIP in CampusNewList:
            targetDict = campusNewDict
        elif checkedIP in PhysList:
            targetDict = physDict
        elif checkedIP in AuroraList:
            targetDict = auroraDict
        else:
            targetDict = othersDict
        return targetDict

    print("Start date: %s" % date)
    errorLog = "../../result/error.log"
    errorOut = fileWriter(errorLog)

    connFoldername = "../../data/conn_%sbound/%s/" % (direction, date)
    dnsFoldername = "../../data/dns_%sbound/%s/" % (direction, date)
    weirdFoldername = "../../data/weird_%sbound/%s/" % (direction, date)

    # CLNS Lists and the corresponding IPs
    AkamaiList = ["136.159.222.244"]
    CampusList = ["136.159.1.21", "136.159.34.201"]
    CampusNewList = ["136.159.222.2", "136.159.222.10"]
    CPSCList = ["136.159.2.1", "136.159.2.4"]
    PhysList = ["136.159.51.4", "136.159.51.5", "136.159.52.10"]
    AuroraList = ["136.159.142.4", "136.159.142.5"]

    # UNS Lists
    Unknown205List = ["136.159.205.37", "136.159.205.38", "136.159.205.39"]
    # Others

    for hour in range(0, 24):
        # Init CLNS related dicts
        akamaiDict = {}
        campusDict = {}
        campusNewDict = {}
        cpscDict = {}
        physDict = {}
        auroraDict = {}

        # Init UNS related dicts
        unknown205Dict = {}
        othersDict = {}
        print("Start task: %s" % hour)

        hour = str(hour).zfill(2)
        connFilename = connFoldername + "%s_%s.log" % (date, hour)
        dnsFilename = dnsFoldername + "%s_%s.log" % (date, hour)
        weirdFilename = weirdFoldername + "%s_%s.log" % (date, hour)

        # handle all conn logs and init all the structures.
        connfile = fileReader(connFilename)
        for line in connfile:
            line_list = line.strip().split("\t")
            uid = line_list[connFTL["uid"]]
            checkedIP = line_list[connFTL["dstIP"]]
            # get the proper dict to store this info
            targetDict = dictSelection(checkedIP)
            targetDict[uid] = {
                "ts":
                line_list[connFTL["timestamp"]],
                "addr":
                (line_list[connFTL["srcIP"]], line_list[connFTL["srcPort"]],
                 line_list[connFTL["dstIP"]], line_list[connFTL["dstPort"]]),
                "conn":
                None,
                "dns":
                None,
                "weird":
                None
            }
            connList = [
                line_list[connFTL["duration"]], line_list[connFTL["sentByte"]],
                line_list[connFTL["recvByte"]], line_list[connFTL["endFlag"]]
            ]
            if targetDict[uid]["conn"]:
                print("duplicated uid! date:%s_%s, uid=%s" % (date, hour, uid))
            else:
                targetDict[uid]["conn"] = connList

        # handle all the dns files
        # Done: Urgent! need to add the duplication avoidance plugin here to avoid the duplication problem.
        # The duplication can be checked by verify the transID field. However, this might affect some
        # DNS retransmission msgs, but since the retransmission is limited in number. This approach should
        # be acceptable.
        dnsfile = fileReader(dnsFilename)
        for line in dnsfile:
            line_list = line.strip().split("\t")
            uid = line_list[dnsFTL["uid"]]
            checkedIP = line_list[dnsFTL["dstIP"]]
            targetDict = dictSelection(checkedIP)
            dnsList = [
                line_list[dnsFTL["transID"]], line_list[dnsFTL["rtt"]],
                line_list[dnsFTL["query"]], line_list[dnsFTL["answers"]],
                line_list[dnsFTL["ttls"]], line_list[dnsFTL["type"]],
                line_list[dnsFTL["error"]]
            ]
            try:
                if not targetDict[uid]["dns"]:
                    targetDict[uid]["dns"] = []
                    # Done: add transID check here to avoid the duplication.
                    targetDict[uid]["dns"].append(dnsList)
                else:
                    existTIDList = [dnsr[0] for dnsr in targetDict[uid]["dns"]]
                    tID = dnsList[0]
                    if tID not in existTIDList:
                        targetDict[uid]["dns"].append(dnsList)

            except KeyError as keyE:
                # errorOut.writeString("DNS UID Not Found: %s.\n" % uid)
                pass
        # handle all the weird files
        weirdfile = fileReader(weirdFilename)
        for line in weirdfile:
            line_list = line.strip().split("\t")
            uid = line_list[weirdFTL["uid"]]
            checkedIP = line_list[weirdFTL["dstIP"]]
            targetDict = dictSelection(checkedIP)
            weirdList = [
                line_list[weirdFTL["weirdName"]], line_list[weirdFTL["addl"]],
                line_list[weirdFTL["notice"]], line_list[weirdFTL["peer"]]
            ]
            try:
                if not targetDict[uid]["weird"]:
                    targetDict[uid]["weird"] = []
                targetDict[uid]["weird"].append(weirdList)
            except KeyError as keyE:
                # errorOut.writeString("Weird UID Not Found: %s.\n" % uid)
                pass
        print("Finish task: %s" % hour)

        print("Start dump: %s\n" % date)

        # output all the dicts as json file.
        outputilename = "%s_%s.log" % (date, hour)
        akamaiOutputFolder = "../../struct/inakamai/%s/" % (date)
        if not os.path.exists(akamaiOutputFolder):
            os.makedirs(akamaiOutputFolder)
        with open(akamaiOutputFolder + outputilename, 'a') as f:
            json.dump(akamaiDict, f)

        campusOutputFolder = "../../struct/incampus/%s/" % (date)
        if not os.path.exists(campusOutputFolder):
            os.makedirs(campusOutputFolder)
        with open(campusOutputFolder + outputilename, 'a') as f:
            json.dump(campusDict, f)

        campusNewOutputFolder = "../../struct/incampusNew/%s/" % (date)
        if not os.path.exists(campusNewOutputFolder):
            os.makedirs(campusNewOutputFolder)
        with open(campusNewOutputFolder + outputilename, 'a') as f:
            json.dump(campusNewDict, f)

        cpscOutputFolder = "../../struct/incpsc/%s/" % (date)
        if not os.path.exists(cpscOutputFolder):
            os.makedirs(cpscOutputFolder)
        with open(cpscOutputFolder + outputilename, 'a') as f:
            json.dump(cpscDict, f)

        physOutputFolder = "../../struct/inphys/%s/" % (date)
        if not os.path.exists(physOutputFolder):
            os.makedirs(physOutputFolder)
        with open(physOutputFolder + outputilename, 'a') as f:
            json.dump(physDict, f)

        auroraOutputFolder = "../../struct/inaurora/%s/" % (date)
        if not os.path.exists(auroraOutputFolder):
            os.makedirs(auroraOutputFolder)
        with open(auroraOutputFolder + outputilename, 'a') as f:
            json.dump(auroraDict, f)

        unknown205OutputFolder = "../../struct/inunknown205/%s/" % (date)
        if not os.path.exists(unknown205OutputFolder):
            os.makedirs(unknown205OutputFolder)
        with open(unknown205OutputFolder + outputilename, 'a') as f:
            json.dump(unknown205Dict, f)

        othersOutputFolder = "../../struct/inothers/%s/" % (date)
        if not os.path.exists(othersOutputFolder):
            os.makedirs(othersOutputFolder)
        with open(othersOutputFolder + outputilename, 'a') as f:
            json.dump(othersDict, f)
Exemplo n.º 29
0
def dumpDailyOrgCount(outputFoldername, ret_str, date):
    outputFilename = outputFoldername + "dailyOrgCount_%s.log" % date
    outputF = fileWriter(outputFilename)
    outputF.writeString(ret_str)
Exemplo n.º 30
0
def dumpHourlyData(filename, ns1OutputFilename, ns2OutputFilename):
    ret_str_ns1, ret_str_ns2 = getHourlyData_AsString(filename)
    ns1OutputF = fileWriter(ns1OutputFilename)
    ns1OutputF.writeString(ret_str_ns1)
    ns2OutputF = fileWriter(ns2OutputFilename)
    ns2OutputF.writeString(ret_str_ns2)