def entrance(configurefile): indexstr = None willcompress = None willdelete = None right = True try: indexstr = sys.argv[2] willcompress = sys.argv[4] willdelete = sys.argv[6] except: right = False if right == False: help(configurefile) return conf = confmodel.readconf(configurefile) if needBackup(conf['host'], conf['port'], indexstr) == True: res = backup(conf['host'], conf['port'], conf['repositoryname'], indexstr) if res == True: if willdelete == "true": deleteIndex(conf['host'], conf['port'], indexstr) if willcompress == "true": compress(conf['repositorylocation'], indexstr) else: print "index( %s ) has no data,so not need to backup it" % (indexstr)
def entrance(configurefile): conf = confmodel.readconf(configurefile) #today=datetime.date.today() #thdate=today-datetime.timedelta(days=conf['deletelimit']) #indexstr="dns"+thdate.strftime("%Y%m%d") if len(sys.argv) < 2: help() return indexstr = sys.argv[1] rmZipData(conf['repositorylocation'], indexstr) modifySnapshot(conf['repositorylocation'], indexstr)
def entrance(configurefile): if len(sys.argv) < 2: help() return indexstr = sys.argv[1] conf = confmodel.readconf(configurefile) if decompress(conf['repositorylocation'], indexstr) == True: res = restore(conf['host'], conf['port'], conf['repositoryname'], indexstr) if res == True: deleteBackup(conf['host'], conf['port'], conf['repositoryname'], indexstr) os.chdir(conf['repositorylocation']) os.system("/bin/rm -rf indices/%s" % (indexstr)) else: print "failed to decompress backup( %s ) in disc" % (indexstr)
def entrance(configurefile): conf = confmodel.readconf(configurefile) today = datetime.date.today() yesterday = today - datetime.timedelta(days=1) logger = logmodel.createlogger(conf['log_dir'] + str(yesterday) + ".log") logger.debug("read configure file successfully") metafiles = metaAnalyzer.getpathlist( conf['data_dir'] + str(yesterday) + "/", logger) indexstr = "dns" + yesterday.strftime("%Y%m%d") elasticmodel.initIndex(conf['host'], conf['port'], indexstr, logger) elasticmodel.initType(conf['host'], conf['port'], indexstr, "querydata", "question", logger) elasticmodel.initType(conf['host'], conf['port'], indexstr, "dnsdata", "name", logger) conn = elasticmodel.getconnect(conf['host'], conf['port']) query_size = elasticmodel.docsize(conn, indexstr, "querydata") query_startid = query_size + 1 data_size = elasticmodel.docsize(conn, indexstr, "dnsdata") data_startid = data_size + 1 logger.debug("start analysing file and putting data into elasticsearch") query_total = 0 data_total = 0 treemap = treemodel.DnsTree() for file in metafiles: datalist = metaAnalyzer.readfile(file, logger, treemap) query_total += len(datalist) query_startid = elasticmodel.addbatch(conn, indexstr, "querydata", datalist, conf['batch_size'], query_startid, logger) del datalist[:] reslist = treemap.listFormat() data_total = len(reslist) elasticmodel.addbatch(conn, indexstr, "dnsdata", reslist, conf['batch_size'], data_startid, logger) del reslist[:] logger.debug("All done") logger.debug("%s querydata pushed : %d" % (indexstr, query_total)) logger.debug("%s dnsdata pushed : %d" % (indexstr, data_total))
def entrance(configurefile): conf = confmodel.readconf(configurefile) initRepository(conf['host'], conf['port'], conf['repositoryname'], conf['repositorylocation'], conf['snapshotSpeed'], conf['restoreSpeed'])