Пример #1
0
def testNoeud(n, p, listAlgo):
    """
    Calcule le nombre de noeuds dans l'arbre de branchement visites par les 
    differents algorithmes de branchement passes dans listAlgo.
    
    Args :
        n : tableau numpy avec les valeurs de n a etre testes.
        p : tableau numpy de meme taille que n avec les valeurs correspondants 
        de p a etre testes.
        listAlgo : une liste d'algorithmes a etre testes. Elle doit etre une 
        liste de tuples. Dans chaque tuple, la premiere entree est une chaine 
        de caracteres contenant le nom de la methode utilisee et la deuxieme 
        entree doit etre un dictionnaire avec les arguments passes a la methode. 
        Les cles de ce dictionnaire sont les noms des arguments (chaines de 
        caracteres) et les valeurs sont les valeurs des arguments.
    
    Returns :
        res : tableau numpy de taille len(listAlgo) x n.size contenant, en 
        ligne i et colonne j, le nombre de noeuds visites par l'algorithme 
        listAlgo[i] dans un graphe aleatoire a n[j] sommets et probabilite 
        d'aretes p[j].
    """
    assert n.size == p.size
    res = np.zeros((len(listAlgo), n.size), dtype = int)
    progressBar = ProgressBar(maxValue = n.size * len(listAlgo))
    
    for ni in range(n.size):
        for i, (algoName, algoArgs) in enumerate(listAlgo):
            progressBar.update(ni * len(listAlgo) + i + 1)
            graphe = Graphe(nbSommets = n[ni], probaArete = p[ni])
            _, noeuds = getattr(graphe, algoName)(**algoArgs)
            res[i, ni] = noeuds
    print("")
    
    return res
Пример #2
0
def test():
    p = int(input("Enter file iteration: "))
    fileName = "3D Maps/3D Matrix(" + str(p) + ").txt"
    x = 0
    count = 0
    no = 0
    temp = []
    pbar = ProgressBar(getFileSize(fileName), "Reading in...")
    file = open(fileName)
    for line in file:
        x += 1
        if x > 2:
            vals = line.rstrip().split(",")
            for element in vals:
                if element == "None" or element == None:
                    no += 1
            pbar.update()
            count += len(vals)
        else:
            pbar.update()
    file.close()
    print()
    print("Lines in file............= ", x)
    print("Elements in file.........= ", count)
    print("Number of None types.....= ", no)
    print("Number of not None types.= ", count - no)

    a = input("**Hit ENTER to close**")  #keeps the terminal open
    return
Пример #3
0
def main():
    fileName = sys.argv[1]
    fileH = MY_FILEH(fileName)
    syshostPattern = re.compile(r'syshost:([^ ]*) ')
    fnSz = os.path.getsize(fileName)

    pbar = ProgressBar(maxVal=fnSz)
    sz = 0
    t1 = time.time()

    with open(fileName) as f:
        for line in f:
            sz += len(line)
            pbar.update(sz)
            m = syshostPattern.search(line)
            if (not m):
                continue
            syshost = m.group(1)
            fh = fileH.get_file_handle(syshost)
            fh.write(line)

    fileH.close_file_handles()
    pbar.fini()
    t2 = time.time()
    print("Time: ", time.strftime("%T", time.gmtime(t2 - t1)))
Пример #4
0
    def trainingHelper(self, trainingData, trainingLabels, iterations):
        """
        trainingHelper() classifies training data using the perceptron weights
        and updates the perceptron weights if the perceptron is incorrect.

        Keyword Arguments:
        trainingData -- training data for the perceptron
        trainingLabels -- labels for the associated training data
        iterations -- desired number of iterations over the training dataset.
        """
        for i in range(iterations):
            progressBar = ProgressBar(100, len(trainingData), "Learning Weights, Iteration {0} of {1}"
                .format(i + 1, iterations))
            for j in range(len(trainingData)):
                progressBar.update(j)

                values = util.Counter()

                # Go over each label, and create the value from the training data and current vectors
                for label in self.legalLabels:
                    activation = 0
                    for key, value in trainingData[j].items():
                        activation += self.weights[label][key] * value
                    values[label] = activation

                # Here, we update values in weight vectors if we reach an incorrect conclusion.
                if values.argMax() != trainingLabels[j]:
                    for key, value in trainingData[j].items():
                        self.weights[trainingLabels[j]][key] += value
                        self.weights[values.argMax()][key] -= value
            progressBar.clear()
Пример #5
0
def testTempsPfunc(nMax, p, nomMethode, fois = 10, **kwargs):
    """
    Teste le temps d'execution moyen d'un algorithme pour le probleme de 
    couverture de graphe passe par l'argument nomMethode. Son fonctionnement 
    est identique a la fonction testTempsN, a part le fait que l'argument p est
    maintenant une fonction de n.
    
    Args :
        nMax : valeur maximale du nombre de sommets du graphe.
        p : fonction prennant en argument une valeur de n et renvoyant la 
            probabilite de presence d'une arete pour cette valeur de n.
        nomMethode : chaine de caracteres donnant le nom de la methode a tester.
        fois (facultatif) : nombre de repetitions pour chaque valeur de n et p.
        **kwargs (facultatif) : arguments passes a la methode a tester.
        
    Returns : 
        res : tableau numpy de taille 10 avec les temps moyens d'execution en 
        secondes pour chaque n.
        n : tableau numpy de taille 10 contenant les valeurs de n utilises 
        dans les tests.
    """
    n = np.linspace(nMax / 10, nMax, 10, dtype = int)
    res = np.zeros(n.shape)
    progressBar = ProgressBar(maxValue = n.size * fois)
    
    for ni in range(n.size):
        for f in range(fois):
            progressBar.update(ni * fois + f + 1)
            graphe = Graphe(nbSommets = n[ni], probaArete = p(n[ni]))
            start =  t.process_time()
            getattr(graphe, nomMethode)(**kwargs)
            res[ni] += t.process_time() - start
    print("")
    
    return res / fois, n
Пример #6
0
def main():
  fileName       = sys.argv[1]
  fileH          = MY_FILEH(fileName)
  syshostPattern = re.compile(r'syshost:([^ ]*) ')
  fnSz           = os.path.getsize(fileName)

  pbar           = ProgressBar(maxVal=fnSz)
  sz             = 0
  t1             = time.time()

  with open(fileName) as f:
    for line in f:
      sz += len(line)
      pbar.update(sz)
      m = syshostPattern.search(line)
      if (not m):
        continue
      syshost = m.group(1)
      fh = fileH.get_file_handle(syshost)
      fh.write(line)

  fileH.close_file_handles()
  pbar.fini()
  t2 = time.time()
  print("Time: ", time.strftime("%T", time.gmtime(t2-t1)))
Пример #7
0
    def classify(self, data):
        """
        classify() classifies each data item in the input by finding the best prototype vector.

        Keyword Arguments:
        data -- the test data to classify
        """
        guesses = []
        progressBar = ProgressBar(100, len(data), "Classifying Data")
        for index, entry in enumerate(data):
            progressBar.update(index)
            values = util.Counter()

            # for each label, compute activation
            for label in self.legalLabels:
                activation = 0
                # sum over the weights * values to get activation
                for key, value in entry.items():
                    activation += self.weights[label][key] * value
                values[label] = activation

            # add classification guess for data by getting the argmax
            guesses.append(values.argMax())
        progressBar.clear()
        return guesses
Пример #8
0
def maxMatrix(it):
    i = 0
    lineNum = 0
    altMax = None
    altMin = None

    size = getFileSize(it)

    pbar = ProgressBar(size, "Reading in data...")

    #reads in the data from the file
    file = open(it)
    for line in file:
        points = []
        vals = line.rstrip().split(',')

        #determine the max altitude
        for element in vals:
            if altMin == None and float(element) > 2.4384:
                altMin = float(element)
            elif altMin != None and float(element) < altMin and float(
                    element) >= 2.4384:
                altMin = float(element)
            if altMax == None:
                altMax = float(element)
            elif float(element) > altMax:
                altMax = float(element)
                lineNum = i
        i += 1
        pbar.update()
    file.close()
    return altMax, altMin
Пример #9
0
def searchForBestAccuracy(y, pred, tries=100, step=1):
    pb = ProgressBar(len(range(1, tries, step)))
    accs = [calculateAccuracy(y, pred)]
    x = [0]
    bestwindow = 0
    bestacc = 0.0
    for window in xrange(1, tries, step):
        f = Filter(pred, window)
        newp = []
        for target, lwin in f:
            dc = Counter(lwin)
            target = max(dc.iteritems(), key=operator.itemgetter(1))[0]
            newp.append(target)

        accuracy = calculateAccuracy(y, newp)
        accs.append(accuracy)
        if accuracy > bestacc:
            bestacc = accuracy
            bestwindow = window
        pb.update()
    x.extend(range(1, tries, step))
    #print len(x), len(accs)
    plt.plot(x, accs)
    plt.savefig('accurLocal.png', bbox_inches='tight')
    print
    print 'Best window:', bestwindow
    print 'Best accuracy:', bestacc
    return bestwindow
Пример #10
0
def main():
    """
  Walks the list of users via the passwd_generator and load the
  link and run files.
  """

    # Push command line on to XALT_Stack
    sA = []
    sA.append("CommandLine:")
    for v in sys.argv:
        sA.append('"' + v + '"')
    XALT_Stack.push(" ".join(sA))

    args = CmdLineOptions().execute()
    xalt = XALTdb(dbConfigFn(args.dbname))

    num = int(capture("getent passwd | wc -l"))
    pbar = ProgressBar(maxVal=num)
    icnt = 0

    t1 = time.time()

    rmapT = Rmap(args.rmapD).reverseMapT()

    iuser = 0
    lnkCnt = 0
    runCnt = 0

    xaltDir = args.datadir
    if (os.path.isdir(xaltDir)):
        iuser += 1
        linkFnA = files_in_tree(xaltDir, "*/link.*.json")
        XALT_Stack.push("link_json_to_db()")
        lnkCnt += link_json_to_db(xalt, args.listFn, rmapT, linkFnA)
        XALT_Stack.pop()
        if (args.delete):
            remove_files(linkFnA)
            #remove_files(files_in_tree(xaltDir, "*/.link.*.json"))

        runFnA = files_in_tree(xaltDir, "*/run.*.json")
        XALT_Stack.push("run_json_to_db()")
        runCnt += run_json_to_db(xalt, args.listFn, rmapT, runFnA)
        XALT_Stack.pop()
        if (args.delete):
            remove_files(runFnA)
            #remove_files(files_in_tree(xaltDir, "*/.run.*.json"))
    icnt += 1
    pbar.update(icnt)

    xalt.connect().close()
    pbar.fini()
    t2 = time.time()
    rt = t2 - t1
    if (args.timer):
        print("Time: ", time.strftime("%T", time.gmtime(rt)))

    print("num users: ", iuser, ", num links: ", lnkCnt, ", num runs: ",
          runCnt)
Пример #11
0
def main():
  """
  Walks the list of users via the passwd_generator and load the
  link and run files.
  """

  # Push command line on to XALT_Stack
  sA = []
  sA.append("CommandLine:")
  for v in sys.argv:
    sA.append('"'+v+'"')
  XALT_Stack.push(" ".join(sA))

  args   = CmdLineOptions().execute()
  xalt   = XALTdb(dbConfigFn(args.dbname))

  num    = int(capture("getent passwd | wc -l"))
  pbar   = ProgressBar(maxVal=num)
  icnt   = 0

  t1     = time.time()

  rmapT  = Rmap(args.rmapD).reverseMapT()

  iuser  = 0
  lnkCnt = 0
  runCnt = 0

  xaltDir = args.datadir
  if (os.path.isdir(xaltDir)):
    iuser   += 1
    linkFnA  = files_in_tree(xaltDir, "*/link.*.json")
    XALT_Stack.push("link_json_to_db()")
    lnkCnt  += link_json_to_db(xalt, args.listFn, rmapT, linkFnA)
    XALT_Stack.pop()
    if (args.delete):
      remove_files(linkFnA)
      #remove_files(files_in_tree(xaltDir, "*/.link.*.json"))

    runFnA   = files_in_tree(xaltDir, "*/run.*.json")
    XALT_Stack.push("run_json_to_db()")
    runCnt  += run_json_to_db(xalt, args.listFn, rmapT, runFnA)
    XALT_Stack.pop()
    if (args.delete):
      remove_files(runFnA)
      #remove_files(files_in_tree(xaltDir, "*/.run.*.json"))
  icnt += 1
  pbar.update(icnt)

  xalt.connect().close()
  pbar.fini()
  t2 = time.time()
  rt = t2 - t1
  if (args.timer):
    print("Time: ", time.strftime("%T", time.gmtime(rt)))

  print("num users: ", iuser, ", num links: ", lnkCnt, ", num runs: ", runCnt)
Пример #12
0
def generateActions(backupDataSet, config):
	inNewDir = None
	actions = []
	progbar = ProgressBar(50, 1000, len(backupDataSet.fileDirSet))
	
	for i, element in enumerate(backupDataSet.fileDirSet):
		progbar.update(i)

		# source\compare
		if element.inSourceDir and not element.inCompareDir:
			stats.files_to_copy += 1
			stats.bytes_to_copy += element.fileSize
			if inNewDir != None and element.path.startswith(inNewDir):
				actions.append(Action("copy", element.isDirectory, name=element.path, htmlFlags="inNewDir"))
			else:
				if element.isDirectory:
					inNewDir = element.path
					actions.append(Action("copy", True, name=element.path, htmlFlags="Folder"))
				else:
					actions.append(Action("copy", False, name=element.path))

		# source&compare
		elif element.inSourceDir and element.inCompareDir:
			if element.isDirectory:
				if config["versioned"] and config["compare_with_last_backup"]:
					# Formerly, only empty directories were created. This step was changed, as we want to create all directories
					# explicitly for setting their modification times later
					if dirEmpty(os.path.join(backupDataSet.sourceDir, element.path)):
						actions.append(Action("copy", True, name=element.path, htmlFlags="emptyFolder"))
					else:
						actions.append(Action("copy", True, name=element.path, htmlFlags="Folder"))
			else:
				# same
				if filesEq(os.path.join(backupDataSet.sourceDir, element.path), os.path.join(backupDataSet.compareDir, element.path), config["compare_method"]):
					if config["mode"] == "hardlink":
						actions.append(Action("hardlink", False, name=element.path))
						stats.files_to_hardlink += 1
						stats.bytes_to_hardlink += element.fileSize
				# different
				else:
					actions.append(Action("copy", False, name=element.path))
					stats.files_to_copy += 1
					stats.bytes_to_copy += element.fileSize

		# compare\source
		elif not element.inSourceDir and element.inCompareDir:
			if config["mode"] == "mirror":
				if not config["compare_with_last_backup"] or not config["versioned"]:
					actions.append(Action("delete", element.isDirectory, name=element.path))
					stats.files_to_delete += 1
					stats.bytes_to_delete += element.fileSize
	print("") # so the progress output from before ends with a new line
	return actions
	
Пример #13
0
def main():
    args = argument_parser()
    progress_bar = ProgressBar(args.input)
    date = parse_date(args.date)

    input = open(args.input, 'r')
    output = open(args.output, 'w')

    if args.zmap_log:
        log_dict = zmap_log(args.port, date, input)

        output.write(json.dumps(log_dict))
        sys.exit(0)

    progress_bar.start()

    for line in input:
        progress_bar.update(1)
        data = json.loads(line)

        if args.clean_errors and clean_json(data):
            continue

        if args.dns_reverse:
            data = dns_reverse(data)

        if args.whois:
            data = ip_whois(data)

        if args.http:
            data = http_protocol(data, args.old_data)

        if args.https:
            data = https_protocol(data, date)

        if args.ssh:
            data = ssh_protocol()

        data['date'] = date.strftime("%Y-%m-%d")
        data['schema_version'] = '1.0'

        try:
            output.write(json.dumps(data) + '\n')
        except UnicodeDecodeError:
            output.write(json.dumps(data, encoding='latin1') + '\n')

    progress_bar.finish()
Пример #14
0
def makeMatrix(y,x):
    rowArray = []
    matrix = []
    i = 0
    j = 0
    pbar = ProgressBar(y,"Creating Matrix...")
    
    #creates the appropriately sized rowArray
    while i < y:
        j = 0
        rowArray = []
        while j < x:
            rowArray.append(None)
            j = j + 1
        matrix.append(rowArray)
        i = i + 1
        pbar.update()
    return matrix
Пример #15
0
def readFile(it):
    temp = []
    matrix = []
    fileT = "./3D Maps/3D Matrix("+str(it)+").txt"
    size = getFileSize(fileT)
    heading = "Reading in matrix("+str(it)+")..." 
    pbar = ProgressBar(size,heading)    
    
    #reads in the data from the file
    file = open(fileT)
    for line in file:  
        temp = []
        vals = line.rstrip().split(',')
        for element in vals: 
            temp.append(float(element))
        matrix.append(temp)
        pbar.update()
    return matrix
Пример #16
0
  def convert_save(self, filename):
    widgets = ['    ', Bar('#'), ' ', BarLabel('Parsing "%s" ....' % filename, 'Parsing "%s" Done' % filename), ' | ',Percentage(),' | ', ETA(), '    ']
    tmpname = filename + '.' + self.convertTo
    fr = open(filename)
    fw = open(tmpname, 'w')
    bar = ProgressBar(widgets=widgets, maxval=os.path.getsize(filename)).start()
    try:
      while True:
        line = fr.readline()
        if not line: break
        fw.write( self.__parse(line) )
        bar.update(fr.tell())

      shutil.move(tmpname, filename)
      bar.finish()
    finally:
      fr.close()
      fw.close()
Пример #17
0
def main():

    args = CmdLineOptions().execute()
    xalt = XALTdb(ConfigFn)

    num = int(capture("getent passwd | wc -l"))
    pbar = ProgressBar(maxVal=num)
    icnt = 0

    t1 = time.time()

    rmapT = Rmap(args.rmapD).reverseMapT()

    iuser = 0
    lnkCnt = 0
    runCnt = 0

    for user, hdir in passwd_generator():
        xaltDir = os.path.join(hdir, ".xalt.d")
        if (os.path.isdir(xaltDir)):
            iuser += 1
            linkFnA = files_in_tree(xaltDir, "*/link.*.json")
            lnkCnt += link_json_to_db(xalt, user, rmapT, linkFnA)
            if (args.delete):
                remove_files(linkFnA)
                remove_files(files_in_tree(xaltDir, "*/.link.*.json"))

            runFnA = files_in_tree(xaltDir, "*/run.*.json")
            runCnt += run_json_to_db(xalt, user, rmapT, runFnA)
            if (args.delete):
                remove_files(runFnA)
                remove_files(files_in_tree(xaltDir, "*/.run.*.json"))
        icnt += 1
        pbar.update(icnt)

    xalt.connect().close()
    pbar.fini()
    t2 = time.time()
    rt = t2 - t1
    if (args.timer):
        print("Time: ", time.strftime("%T", time.gmtime(rt)))

    print("num users: ", iuser, ", num links: ", lnkCnt, ", num runs: ",
          runCnt)
Пример #18
0
def main():
    altMax = None
    altMin = None
    fileName = input("Enter file name: ")
    fileName = "./Drone Data/" + fileName
    it = (input("Enter iteration: "))
    it = "./3D Maps/3D Matrix(" + it + ").txt"

    size = getFileSize(fileName)

    pbar = ProgressBar(size, "Reading in data...")

    #reads in the data from the file
    file = open(fileName)
    for line in file:
        points = []
        vals = line.rstrip().split(',')
        for element in vals:
            temp = element.split(' ')
            points.append(temp[len(temp) - 1])
        vals = points

        #determine the max altitude
        if altMax == None:
            altMax = abs(float(vals[2]))
            altMin = abs(float(vals[2]))
        elif abs(float(vals[2])) > altMax:
            altMax = abs(float(vals[2]))
        elif abs(float(vals[2])) < altMin:
            altMin = abs(float(vals[2]))

        pbar.update()  #updates the prgress bar
    file.close()  #closes the file

    maxM, minM = maxMatrix(it)

    print("Max altitude in matrix.= ", maxM)
    print("Min altitude in matrix.= ", minM)
    print("Max altitude...........= ", altMax)
    print("Min altitude...........= ", altMin)

    a = input("**Hit ENTER to close**")  #keeps the terminal open
    return
Пример #19
0
def main():

  args   = CmdLineOptions().execute()
  xalt   = XALTdb(ConfigFn)

  num    = int(capture("getent passwd | wc -l"))
  pbar   = ProgressBar(maxVal=num)
  icnt   = 0

  t1     = time.time()

  rmapT  = Rmap(args.rmapD).reverseMapT()

  iuser  = 0
  lnkCnt = 0
  runCnt = 0

  for user, hdir in passwd_generator():
    xaltDir = os.path.join(hdir,".xalt.d")
    if (os.path.isdir(xaltDir)):
      iuser   += 1
      linkFnA  = files_in_tree(xaltDir, "*/link.*.json")
      lnkCnt  += link_json_to_db(xalt, user, rmapT, linkFnA)
      if (args.delete):
        remove_files(linkFnA)
        remove_files(files_in_tree(xaltDir, "*/.link.*.json"))

      runFnA   = files_in_tree(xaltDir, "*/run.*.json")
      runCnt  += run_json_to_db(xalt, user, rmapT, runFnA)
      if (args.delete):
        remove_files(runFnA)
        remove_files(files_in_tree(xaltDir, "*/.run.*.json"))
    icnt += 1
    pbar.update(icnt)

  xalt.connect().close()
  pbar.fini()
  t2 = time.time()
  rt = t2 - t1
  if (args.timer):
    print("Time: ", time.strftime("%T", time.gmtime(rt)))

  print("num users: ", iuser, ", num links: ", lnkCnt, ", num runs: ", runCnt)
Пример #20
0
    def classify(self, testData):
        """
        classify() classifies each data item in the input by finding the best prototype vector.

        Keyword Arguments:
        testData -- the test data to classify
        """
        guesses = []
        self.posteriors = [
        ]  # Log posteriors are stored for later data analysis.
        counter = 0
        size = len(testData)
        progressBar = ProgressBar(100, len(testData), "Classifying Data")
        for index, datum in enumerate(testData):
            progressBar.update(index)
            posterior = self.calculateLogJointProbabilities(datum)
            guesses.append(posterior.argMax())
            self.posteriors.append(posterior)
        progressBar.clear()
        return guesses
Пример #21
0
def backgroundSubtraction(inputfile):
    """
    Receives a file containing paths to images in jpg format labels.
    The background is subtracted from each image and the new image is
    saved into `outfolder`. A new path file is generated containing the
    path to the image without the background and its respective label.

    Parameters:
    -----------
    input_folder : string
        path to the folder containing images

    Notes:
    ------
    The dictionary generated by `pathfiles.genDictFromPaths()` has the form:
        dic {id_data: {activity: [(img, class)]}}
    """
    fout = open(join(PATH, 'pathbg_label.txt'), 'w')
    dic, n = pathfiles.genDictFromPaths(inputfile)
    pb = ProgressBar(n)

    for data in sorted(dic):
        for activity in dic[data]:
            imgs = sorted(dict(dic[data][activity]).keys())
            fgbg = cv2.BackgroundSubtractorMOG2()
            fgbg = trainBackground(fgbg, data, activity, imgs[:100])

            for idimg, y in sorted(dic[data][activity]):
                path_img = join(PATH, 'data' + str(data), activity,
                                'img' + str(SIZE_IMG),
                                str(idimg) + '.jpg')
                dirbg = join(PATH, 'data' + str(data), activity,
                             'bg' + str(SIZE_IMG))
                if not isdir(dirbg):
                    os.mkdir(dirbg)
                path_out = join(dirbg, str(idimg) + '.jpg')
                img = cv2.imread(path_img, 1)
                fgmask = fgbg.apply(img)
                cv2.imwrite(path_out, fgmask)
                fout.write('%s %d\n' % (path_out, y))
                pb.update()
Пример #22
0
def testEcart(nMax, p, nomMethode, fois = 10):
    """
    Teste l'ecart entre les solutions donnees par les algorithmes approches et 
    la solution exacte. L'algorithme approche a etre teste est donne par 
    l'argument nomMethode, alors que l'algorithme exact de comparaison est 
    l'algorithme de branchement ameliore avec sommets pris par ordre 
    decroissant de degre et elimination de sommets de degre 1. 
    La fonction cree des graphes aleatoires a n sommets pour 10 valeurs de n 
    allant de nMax/10 jusqu'à nMax, avec un choix aleatoire de presence 
    d'aretes donne par la probabilite constante p. Pour chaque n et p, le temps
    est calcule par une moyenne sur fois graphes.
    
    Args :
        nMax : valeur maximale du nombre de sommets du graphe.
        p : probabilite de presence d'une arete, constante.
        nomMethode : chaine de caracteres donnant le nom de la methode a tester.
        fois (facultatif) : nombre de repetitions pour chaque valeur de n et p.
        
    Returns : 
        res : tableau numpy de taille 10 x fois avec, en ligne i et colonne j, 
        le rapport entre la longueur de la solution trouvee par nomMethode 
        et la solution exacte avec un graphe a n[i] sommets et lors de 
        l'execution j.
        n : tableau numpy de taille 10 contenant les valeurs de n utilises 
        dans les tests.
    """
    n = np.linspace(nMax / 10, nMax, 10, dtype = int)
    res = np.zeros((n.size, fois))
    progressBar = ProgressBar(maxValue = n.size * fois)
    
    for ni in range(n.size):
        for f in range(fois):
            progressBar.update(ni * fois + f + 1)
            graphe = Graphe(nbSommets = n[ni], probaArete = p)
            resMethode = getattr(graphe, nomMethode)()
            resExacte, _ = graphe.algoBranchementAmeliore(sommetMax = True, elimDegre1 = True)
            res[ni, f] = len(resMethode) / len(resExacte) if len(resExacte) != 0 else 1
    print("")
    
    return res, n
Пример #23
0
def findAllMissing(
        table,
        acc=0):  #acc = 0 is normal accuracy         acc = 1 is high accuracy
    pos = 0
    size = len(table)
    if acc == 1:
        size = size * 2
    pbar = ProgressBar(size, "Packing Matrix...")

    while pos < len(table):
        table[pos] = solveMissing(table, pos)
        pos = pos + 1
        pbar.update()

    #high accuracy
    if acc == 1:
        pos = 0
        while pos < len(table):
            table[pos] = solveMissing(table, pos)
            pos = pos + 1
            pbar.update()
    return table
Пример #24
0
def defineFeatures(imageList, chop):
    """
    defineFeatures() defines a simple feature of a pixel either being white (0)
    or not (1) for a list of images and pixel values

    chops off pixels on outside of image for faster (but less accurate) classification
    """
    featureList = []
    features = []
    progressBar = ProgressBar(100, len(imageList),
                              "Getting Features for Images")
    for index, image in enumerate(imageList):
        # update progress
        progressBar.update(index)

        # create feature of on/off for (x, y) positions in image
        imgFeature = Counter()
        for x in range(chop, len(image) - chop):
            for y in range(chop, len(image[x]) - chop):
                if image[x][y] == 0:
                    imgFeature[(x, y)] = 0
                else:
                    imgFeature[(x, y)] = 1

        featureList.append(imgFeature)

    progressBar.clear()

    # Here, we create a list of all of the features for use in the
    # perceptron and Naive Bayes classifiers.
    if len(imageList) > 0:
        image = imageList[0]
        for x in range(chop, len(image) - chop):
            for y in range(chop, len(image[x]) - chop):
                features.append((x, y))

    return featureList, features
Пример #25
0
def testTempsN(nMax, p, nomMethode, fois = 10, **kwargs):
    """
    Teste le temps d'execution moyen d'un algorithme pour le probleme de 
    couverture de graphe passe par l'argument nomMethode. 
    Pour cela, la fonction cree des graphes aleatoires a n sommets pour 10 
    valeurs de n allant de nMax/10 jusqu'a nMax, avec un choix aleatoire de 
    presence d'aretes donne par la probabilite constante p. 
    Pour chaque n et p, le temps est calcule par une moyenne sur fois graphes.
    
    Args :
        nMax : valeur maximale du nombre de sommets du graphe.
        p : probabilité de présence d'une arete, constante.
        nomMethode : chaine de caracteres donnant le nom de la méthode a tester.
        fois (facultatif) : nombre de repetitions pour chaque valeur de n et p.
        **kwargs (facultatif) : arguments passes a la methode a tester.
        
    Returns : 
        res : tableau numpy de taille 10 avec les temps moyens d'execution 
        en secondes pour chaque n.
        n : tableau numpy de taille 10 contenant les valeurs de n utilises 
        dans les tests.
    """
    n = np.linspace(nMax / 10, nMax, 10, dtype = int)
    res = np.zeros(n.shape)
    progressBar = ProgressBar(maxValue = n.size * fois)
    
    for ni in range(n.size):
        for f in range(fois):
            progressBar.update(ni * fois + f + 1)
            graphe = Graphe(nbSommets = n[ni], probaArete = p)
            start =  t.process_time()
            getattr(graphe, nomMethode)(**kwargs)
            res[ni] += t.process_time() - start
    print("")
    
    return res / fois, n
Пример #26
0
def searchForBest(inputfile, tries=100, step=2):
    paths, ys, preds = [], [], []
    with open(inputfile) as fin:
        for n, line in enumerate(fin):
            path, y, pred = line.strip().split()
            paths.append(path)
            ys.append(y)
            preds.append(pred)

    pb = ProgressBar(len(range(1, tries, step)))
    accs = [calculateAccuracy(ys, preds)]
    x = [0]
    bestwindow = 0
    bestacc = 0.0
    for window in xrange(1, tries, step):
        f = Filter(preds, window)
        newp = []
        for target, lwin in f:
            dc = Counter(lwin)
            target = max(dc.iteritems(), key=operator.itemgetter(1))[0]
            newp.append(target)

        accuracy = calculateAccuracy(ys, newp)
        accs.append(accuracy)
        if accuracy > bestacc:
            bestacc = accuracy
            bestwindow = window
        pb.update()
    x.extend(range(1, tries, step))
    #print len(x), len(accs)
    plt.plot(x, accs)
    plt.savefig('accur.png', bbox_inches='tight')
    print
    print 'Best window:', bestwindow
    print 'Best accuracy:', bestacc
    return bestwindow
Пример #27
0
def main():
  """
  read from syslog file into XALT db.
  """

  sA = []
  sA.append("CommandLine:")
  for v in sys.argv:
    sA.append('"'+v+'"')
  XALT_Stack.push(" ".join(sA))

  args   = CmdLineOptions().execute()
  xalt   = XALTdb(ConfigFn)

  syslogFile  = args.syslog

  # should add a check if file exists
  num    = int(capture("cat "+syslogFile+" | wc -l"))
  icnt   = 0

  t1     = time.time()

  rmapT  = Rmap(args.rmapD).reverseMapT()

  lnkCnt = 0
  runCnt = 0
  badCnt = 0
  count = 0

  if num != 0: 
    pbar   = ProgressBar(maxVal=num)
    if (os.path.isfile(syslogFile)):
      f=open(syslogFile, 'r')
      for line in f:
        if "XALT_LOGGING" in line:

          i=line.find("link:")
          if i == -1 :
            i=line.find("run:")
          if i == -1:
            continue   # did not find a link or run, continue to next line

          array=line[i:].split(":")
          type = array[0].strip()
          syshost = array[1].strip()
          try:
            resultT=json.loads(base64.b64decode(array[2]))
            XALT_Stack.push("XALT_LOGGING: " + type + " " + syshost)
#            XALT_Stack.push("XALT_LOGGING resultT: " + resultT)

            if ( type == "link" ):
              XALT_Stack.push("link_to_db()")
              xalt.link_to_db(rmapT, resultT)
              XALT_Stack.pop()
              lnkCnt += 1
            elif ( type == "run" ):
              XALT_Stack.push("run_to_db()")
              xalt.run_to_db(rmapT, resultT)
              XALT_Stack.pop()
              runCnt += 1
            else:
              print("Error in xalt_syslog_to_db")
            XALT_Stack.pop()
          except:
            badCnt += 1
            # figure out length of array[2], as it might be 
            # near the syslog message size limit
            strg=array[2]
            lens = len(strg)
#            lenx = lens - (lens % 4 if lens % 4 else 4)
            print("xalt_syslog_to_db: undecodable entry!  length: ",lens)
#            resultT = base64.decodestring(strg[:lenx])
#            print("result:  ",result)

        count += 1
        pbar.update(count)

    pbar.fini()

#  what should be done if there are errors?
#    what went wrong?
#    how do we restart?
#
#  xalt.connect().close()

  t2 = time.time()
  rt = t2 - t1
  if (args.timer):
    print("Time: ", time.strftime("%T", time.gmtime(rt)))
#
  print("total processed : ", count, ", num links: ", lnkCnt, ", num runs: ", runCnt, ", badCnt: ", badCnt)
Пример #28
0
    def trainAndTune(self, trainingData, trainingLabels, validationData,
                     validationLabels, kgrid):
        """
        trainAndTune() trains the classifier by collecting counts over the training data and choosing the smoothing parameter among the choices in kgrid by
        using the validation data. This method should store the right parameters
        as a side-effect and should return the best smoothing parameters.

        Keyword Arguments:
        trainingData -- training data for the perceptron
        trainingLabels -- labels for the associated training data
        validationData -- validation data for the perceptron tuning function
        validationLabels -- labels for the associated validation data
        kgrid -- a list of possible k values to try for smoothing
        """

        # We begin by creating the prior probabilities for each of the labels
        # and the features based on the counts in the training data.
        countLabel = util.Counter()  # in form k = label, v = numOfL
        countFeature = util.Counter()
        # We begin looking over the training data here.
        progressBar = ProgressBar(100, len(trainingData), "Counting Data")
        for i in range(len(trainingData)):
            # update our progress bar
            progressBar.update(i)

            label = trainingLabels[i]
            # Labels are counted at each point they are seen here.
            countLabel[label] += 1
            # Then, if we haven't seen the label, we add it to the feature counter.
            if label not in countFeature:
                countFeature[label] = util.Counter()
            # Finally, we loop over the features for each datum and add each feature once
            # for each occurrence.
            for feature in trainingData[i]:
                countFeature[label][feature] += trainingData[i][feature]
        progressBar.clear()

        self.probLabel = copy.deepcopy(countLabel)
        self.probLabel.normalize()

        # At this point we have the counts, and we want to see what level of smoothing
        # increases our accuracy the most over the training set. Essentially, we just
        # create all of the probabilities from the feature counts while adding the smoothing
        # and classify the training data each time and pick whatever was most accurate.
        kClassifications = util.Counter()
        probForK = util.Counter()
        numCorrectK = util.Counter()
        print "Validation Accuracy"
        print "==================="
        for k in kgrid:
            # make counter for probabilities for each k
            probForK[k] = util.Counter()
            # make counters for probabilities for each label
            for label in self.legalLabels:
                probForK[k][label] = util.Counter()
                # find probability of each feature given each label
                progressBar = ProgressBar(
                    100, len(self.features),
                    "Getting Probabilities for Features, Label {0}".format(
                        label))
                for index, feature in enumerate(self.features):
                    progressBar.update(index)
                    if countFeature[label] != 0:
                        probForK[k][label][feature] = float(
                            countFeature[label][feature] +
                            k) / (countLabel[label] + k * len(self.features))
                progressBar.clear()

            # set probabilities for features and classify validation data
            self.probFeature = probForK[k]
            classificationLabels = self.classify(validationData)

            # check how much of the data was classified correctly
            correct = 0
            for i in range(len(classificationLabels)):
                if classificationLabels[i] == validationLabels[i]:
                    correct += 1

            # print accuracy for each k
            print "k = {0}, number of correct classifications = {1}".format(
                k, correct)
            # store the number of correct classifications for k value
            numCorrectK[k] = correct

        # pick k from our list of possible k values
        self.k = None
        for k in numCorrectK:
            # find k with the highest number of correct classifications
            # if there is a tie, use a lower k value
            if (self.k == None or numCorrectK[self.k] < numCorrectK[k]
                    or (numCorrectK[self.k] == numCorrectK[k] and k < self.k)):
                self.k = k
        self.probFeature = probForK[self.k]

        # print final choice for k
        print "K chosen = {0}".format(self.k)

        return self.k
Пример #29
0
def executeActionList(dataSet):
    logging.info("Applying actions for the target \"" + dataSet.name + "\"")
    if len(dataSet.actions) == 0:
        logging.warning("There is nothing to do for the target \"" +
                        dataSet.name + "\"")
        return

    os.makedirs(dataSet.targetDir, exist_ok=True)
    progbar = ProgressBar(50, 1000, len(dataSet.actions))
    # Phase 1: apply the actions
    for i, action in enumerate(dataSet.actions):
        progbar.update(i)

        actionType = action["type"]
        params = action["params"]
        try:
            if actionType == "copy":
                fromPath = os.path.join(dataSet.sourceDir, params["name"])
                toPath = os.path.join(dataSet.targetDir, params["name"])
                logging.debug('copy from "' + fromPath + '" to "' + toPath +
                              '"')
                #TODO: remove the manual checks for isFile etc., switch to action["isDir"]
                if os.path.isfile(fromPath):
                    os.makedirs(os.path.dirname(toPath), exist_ok=True)
                    shutil.copy2(fromPath, toPath)
                    stats.bytes_copied += os.path.getsize(
                        fromPath
                    )  # If copy2 doesn't fail, getsize shouldn't either
                    stats.files_copied += 1
                elif os.path.isdir(fromPath):
                    os.makedirs(toPath, exist_ok=True)
                else:
                    # We know there is a problem, because isfile and isdir both return false. Most likely permissions or a missing file,
                    # in which case the error handling is done in the permission check. If not, throw a general error
                    accessible, _ = filesize_and_permission_check(fromPath)
                    if accessible:
                        logging.error(
                            "Entry \"" + fromPath +
                            "\" exists but is neither a file nor a directory.")
                        stats.backup_errors += 1
            elif actionType == "delete":
                path = os.path.join(dataSet.targetDir, params["name"])
                logging.debug('delete file "' + path + '"')
                stats.files_deleted += 1
                if os.path.isfile(path):
                    stats.bytes_deleted += os.path.getsize(path)
                    os.remove(path)
                elif os.path.isdir(path):
                    shutil.rmtree(path)
            elif actionType == "hardlink":
                fromPath = os.path.join(dataSet.compareDir, params["name"])
                toPath = os.path.join(dataSet.targetDir, params["name"])
                logging.debug('hardlink from "' + fromPath + '" to "' +
                              toPath + '"')
                toDirectory = os.path.dirname(toPath)
                os.makedirs(toDirectory, exist_ok=True)
                hardlink(fromPath, toPath)
                stats.bytes_hardlinked += os.path.getsize(
                    fromPath
                )  # If hardlink doesn't fail, getsize shouldn't either
                stats.files_hardlinked += 1
            else:
                logging.error("Unknown action type: " + actionType)
        except Exception as e:
            logging.error(e)
            stats.backup_errors += 1
    print("")  # so the progress output from before ends with a new line

    # Phase 2: Set the modification timestamps for all directories
    # This has to be done in a separate step, as copying into a directory will reset its modification timestamp
    logging.info(
        "Applying directory modification timestamps for the target \"" +
        dataSet.name + "\"")
    progbar.update(0)
    for i, action in enumerate(dataSet.actions):
        progbar.update(i)
        params = action["params"]
        if not action["isDir"]:
            continue
        try:
            fromPath = os.path.join(dataSet.sourceDir, params["name"])
            toPath = os.path.join(dataSet.targetDir, params["name"])
            logging.debug('set modtime for "' + toPath + '"')
            modTime = os.path.getmtime(fromPath)
            os.utime(toPath, (modTime, modTime))
        except Exception as e:
            logging.error(e)
            stats.backup_errors += 1
    print("")  # so the progress output from before ends with a new line
Пример #30
0
    qt = os.listdir(weights_path)
    pb = ProgressBar(len(qt) / 2)
    for k, weight_file in enumerate(sorted(qt)):

        if "caffemodel" not in weight_file:
            continue

        iteration = re.findall("\d+", weight_file)[-1]

        if iteration in tested_models:
            #		print iteration, "model already tested. Skip this model!"
            continue

#		print "-----> Processing model", iteration, "<-----"
#update_progress((k+1)/2,len(qt)/2)
        pb.update()
        caffe_weights = "-weights " + os.path.join(weights_path,
                                                   weight_file) + " "
        command = caffe_command + caffe_model + caffe_weights + caffe_gpu + caffe_iterations
        output_file.write("Testing model " + weight_file + "\n")
        p = subprocess.Popen(command,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT,
                             shell=True)
        loss = []
        accuracy = []
        while True:
            line = p.stdout.readline()
            if not line:
                break
            line = line.strip()
Пример #31
0
def main():
    ACCURACY = 1            #controls the accuracy when solving for missing data points
                            #1 = high accuracy 
                            #0 = standard accuracy    
    longs = set()       #unique longitudes
    latMin = None       #minimum latitudes
    longMin = None      #minimum longitude
    latMax = None       #maximum latitude
    longMax = None      #maximum longitude
    altMax = None       #maximum altitude
    x = 0               #current line number
    size = 0
    interval = float(input("Enter the inches per pixel: "))
    fileName = input("Enter file name: ")
    fileName = "./Drone Data/" + fileName      
    
    size = getFileSize(fileName)
    
    pbar = ProgressBar(size,"Reading in data...")  
    
    #reads in the data from the file
    file = open(fileName)
    for line in file:   
        points = []
        vals = line.rstrip().split(',')
        #for element in vals:
            #temp = element.split(' ')
            #points.append(temp[len(temp)-1])
        #vals = points        
        longs.add(round(float(vals[1]),2))       #places unique longs in a set          
        
        #determines the max and min latitudes
        if latMax == None:
            latMax = abs(round(float(vals[0]),2))
            latMin = abs(round(float(vals[0]),2))
        elif abs(round(float(vals[0]),2)) > latMax:
            latMax = abs(round(float(vals[0]),2))
        elif abs(round(float(vals[0]),2)) < latMin:
            latMin = abs(round(float(vals[0]),2))        
        
        #determines the max and min longitudes
        if longMax == None:
            longMax = abs(round(float(vals[1]),2))
            longMin = abs(round(float(vals[1]),2))
        elif abs(round(float(vals[1]),2)) > longMax:
            longMax = abs(round(float(vals[1]),2))
        elif abs(round(float(vals[1]),2)) < longMin:
            longMin = abs(round(float(vals[1]),2))
            
        #determine the max altitude
        if altMax == None:
            altMax = abs(float(vals[2]))
        elif abs(float(vals[2])) > altMax:
            altMax = abs(float(vals[2]))
            
        pbar.update()       #updates the prgress bar
    file.close()            #closes the file 
    
    file = open(fileName)
    interval = 5 - int(1/interval)  
    if interval == 0:
        interval = 1
    matrix = makeMatrix(int(((round((float(latMax)-float(latMin)),2) * 100))/interval)+1,int(((round((float(longMax)-float(longMin)),2) * 100))/interval)+1)
    pbar = ProgressBar(size,"Filling matrix...")
    count = 0
    num = 0
    
    for line in file:
        points = []
        vals = line.rstrip().split(',')
        #for element in vals:
            #temp = element.split(' ')
            #points.append(temp[len(temp)-1])
        #vals = points
        x = int((round((abs(float(vals[0])) - float(latMin)),2) * 100))
        y = int((round((abs(float(vals[1])) - float(longMin)),2) * 100))
        try:
            matrix[int(x/interval)][int(y/interval)] = altMax - abs(float(vals[2]))
            num += 1
        except:
            count = count + 1
            
        pbar.update()
    file.close()
    
    matrix = findAllMissing(matrix,ACCURACY)    
    writeBack(matrix)
    
    print()
    print("Number missed...................= ",count)
    print("Number added....................= ",num)
    print("Number of alts..................= ",size)
    print("Size of the matrix..............= ",(len(matrix) * len(matrix[0])))
    print("Matrix height...................= ",len(matrix) - 1)
    print("Matrix width....................= ",len(matrix[0]) - 1)    
    print("Difference in latitude..........= ",int(round((float(latMax)-float(latMin)),2) * 100))
    print("Difference in longitude.........= ",int(round(float((longMax)-float(longMin)),2) * 100))    
    print("Average dist between points.....= ", interval," cm(s)") 
    print("Average number of Longs per alt.= ",round(size/len(longs),2))
    print(latMin,"--->",latMax)
    print(longMin,"--->",longMax)    
    
    a = input("**Hit ENTER to close**")         #keeps the terminal open    
    return
Пример #32
0
def writeBack(matrix):
    size = len(matrix) + len(matrix[0]) + int((len(matrix) * len(matrix[0])))
    pbar = ProgressBar(size,"Writing points to file...")    #initializes the progress bar
    line = ""
    i = 0
    
    #gets the appropriate file name
    it = str(getIteration())
    newFile = "./3D Maps/3D Latitudes(" + it + ").txt"
    
    #opens the file
    file = open(newFile,"w+") 
    
    #writes the latitudes to the text file
    while i < len(matrix):
        if i < len(matrix) - 1:
            line = line + str(i) + ","
        else:
            line = line + str(i) + "\n"
        pbar.update()
        i = i + 1
    file.write(line)
    file.close
    
    newFile = "./3D Maps/3D Longitudes(" + it + ").txt"
    
    #opens the file
    file = open(newFile,"w+")
    
    line = ""           #resets line
    i = 0           #resets count
    #writes the longitudes to the text file
    while i < len(matrix[0]):
        if i < len(matrix[0]) - 1:
            line = line + str(i) + ","
        else:
            line = line + str(i) + "\n"
        pbar.update()
        i = i + 1
    file.write(line)
    file.close
    
    newFile = "./3D Maps/3D Matrix(" + it + ").txt"
    i = 0
    
    #opens the file
    file = open(newFile,"w+")    
    
    #writes the matrix to the textfile
    for row in matrix:
        line = ""           #resets line
        count = 0           #resets count    
        for element in row:
            if count < len(matrix[0]) - 1:
                line = line + str(element) + ","
            else:
                line = line + str(element) + '\n'
            pbar.update()
            count = count + 1
        file.write(line)        
    
    file.close()
    temp = "Done. 3D Map("+it+") has been created. You may now close the program."
    print(temp)
    return    
Пример #33
0
def main():
    """
  read from syslog file into XALT db.
  """

    sA = []
    sA.append("CommandLine:")
    for v in sys.argv:
        sA.append('"' + v + '"')
    XALT_Stack.push(" ".join(sA))

    args = CmdLineOptions().execute()
    xalt = XALTdb(args.confFn)
    syslogFile = args.syslog

    icnt = 0
    t1 = time.time()

    try:
        rmapT = Rmap(args.rmapD).reverseMapT()
    except Exception as e:
        print(e, file=sys.stderr)
        print("Failed to read reverseMap file -> exiting")
        sys.exit(1)

    lnkCnt = 0
    pkgCnt = 0
    runCnt = 0
    badCnt = 0
    count = 0

    recordT = {}

    fnA = [args.leftover, syslogFile]

    parseSyslog = ParseSyslog(args.leftover)

    #-----------------------------
    # Figure out size in bytes.

    fnSz = 0
    for fn in fnA:
        if (not os.path.isfile(fn)):
            continue
        fnSz += os.path.getsize(fn)

    #----------------------------------------------------------
    # Count the number and sum the run_time for all scalar jobs

    filter = Filter(100)
    pbar = ProgressBar(maxVal=fnSz, fd=sys.stdout)
    for fn in fnA:
        if (not os.path.isfile(fn)):
            continue

        old = (fn == args.leftover)

        lineNo = 0
        f = open(fn, 'r')
        for line in f:
            lineNo += 1
            count += len(line)
            pbar.update(count)
            if (not ("XALT_LOGGING" in line)):
                continue
            try:
                t, done = parseSyslog.parse(line, args.syshost, old)
            except Exception as e:
                #print(e, file=sys.stderr)
                #print("lineNo:",lineNo,"file:",fn,"line:",line, file=sys.stderr)
                #print("Now continuing processing!", file=sys.stderr)
                continue

            if (not done or t['kind'] != "run"):
                continue

            ##################################
            # If the json conversion fails,
            # then ignore record and keep going
            value = False

            try:
                value = json.loads(t['value'])
                filter.register(value)
            except Exception as e:
                #print("fn:",fn,"line:",lineNo,"value:",t['value'],file=sys.stderr)
                continue

        f.close()
    pbar.fini()

    filter.report_stats()

    badsyslog = 0
    count = 0
    parseSyslog = ParseSyslog(args.leftover)
    pbar = ProgressBar(maxVal=max(fnSz, 1), fd=sys.stdout)
    for fn in fnA:
        if (not os.path.isfile(fn)):
            continue

        old = (fn == args.leftover)

        f = open(fn, 'r')
        for line in f:
            count += len(line)
            pbar.update(count)
            if (not ("XALT_LOGGING" in line)):
                continue
            try:
                t, done = parseSyslog.parse(line, args.syshost, old)
            except Exception as e:
                badsyslog += 1
                continue

            if (not done):
                continue

            ##################################
            # If the json conversion fails,
            # then ignore record and keep going
            try:
                value = json.loads(t['value'])
            except Exception as e:
                continue

            try:
                XALT_Stack.push("XALT_LOGGING: " + t['kind'] + " " +
                                t['syshost'])

                if (t['kind'] == "link"):
                    XALT_Stack.push("link_to_db()")
                    xalt.link_to_db(rmapT, value)
                    XALT_Stack.pop()
                    lnkCnt += 1
                elif (t['kind'] == "run"):
                    if (filter.apply(value)):
                        XALT_Stack.push("run_to_db()")
                        xalt.run_to_db(rmapT, value)
                        XALT_Stack.pop()
                        runCnt += 1
                elif (t['kind'] == "pkg"):
                    XALT_Stack.push("pkg_to_db()")
                    xalt.pkg_to_db(t['syshost'], value)
                    XALT_Stack.pop()
                    pkgCnt += 1
                else:
                    print("Error in xalt_syslog_to_db", file=sys.stderr)
                XALT_Stack.pop()
            except Exception as e:
                print(e, file=sys.stderr)
                badCnt += 1

        f.close()

    pbar.fini()

    t2 = time.time()
    rt = t2 - t1
    if (args.timer):
        print("Time: ", time.strftime("%T", time.gmtime(rt)))
    print("total processed : ", count, ", num links: ", lnkCnt, ", num runs: ",
          runCnt, ", pkgCnt: ", pkgCnt, ", badCnt: ", badCnt, ", badsyslog: ",
          badsyslog)

    # if there is anything left in recordT file write it out to the leftover file.
    parseSyslog.writeRecordT()
Пример #34
0
def main():
  """
  read from syslog file into XALT db.
  """

  sA = []
  sA.append("CommandLine:")
  for v in sys.argv:
    sA.append('"'+v+'"')
  XALT_Stack.push(" ".join(sA))

  args       = CmdLineOptions().execute()
  xalt       = XALTdb(dbConfigFn(args.dbname))
  syslogFile = args.syslog

  # should add a check if file exists
  num    = int(capture("cat "+syslogFile+" | wc -l"))
  icnt   = 0

  t1     = time.time()

  rmapT  = Rmap(args.rmapD).reverseMapT()

  lnkCnt = 0
  runCnt = 0
  badCnt = 0
  count  = 0

  recordT = {}

  if (num == 0):
    return
    
  pbar   = ProgressBar(maxVal=num)

  fnA = [ args.leftover, syslogFile ]

  for fn in fnA:
    if (not os.path.isfile(fn)):
      continue

    f=open(fn, 'r')
    for line in f:
      if (not ("XALT_LOGGING" in line)):
        continue
      t, done = parseSyslog(line, recordT)
      if (not done):
        continue

      try:
        XALT_Stack.push("XALT_LOGGING: " + t['kind'] + " " + t['syshost'])

        if ( t['kind'] == "link" ):
          XALT_Stack.push("link_to_db()")
          xalt.link_to_db(rmapT, json.loads(t['value']))
          XALT_Stack.pop()
          lnkCnt += 1
        elif ( t['kind'] == "run" ):
          XALT_Stack.push("run_to_db()")
          xalt.run_to_db(rmapT, json.loads(t['value']))
          XALT_Stack.pop()
          runCnt += 1
        else:
          print("Error in xalt_syslog_to_db", file=sys.stderr)
        XALT_Stack.pop()
      except Exception as e:
        print(e, file=sys.stderr)
        badCnt += 1

      count += 1
      pbar.update(count)

    f.close()

  pbar.fini()

  t2 = time.time()
  rt = t2 - t1
  if (args.timer):
    print("Time: ", time.strftime("%T", time.gmtime(rt)))
  print("total processed : ", count, ", num links: ", lnkCnt, ", num runs: ", runCnt, ", badCnt: ", badCnt)
        
  leftover = args.leftover
  if (os.path.isfile(leftover)):
    os.rename(leftover, leftover + ".old")
  
  # if there is anything left in recordT file write it out to the leftover file.

  if (recordT):
    f = open(leftover, "w")
    for key in recordT:
      r = recordT[key]
      s = r.prt("XALT_LOGGING V=2", key)
      f.write(s)
    f.close()
Пример #35
0
def main():
    """
  read from syslog file into XALT db.
  """

    sA = []
    sA.append("CommandLine:")
    for v in sys.argv:
        sA.append('"' + v + '"')
    XALT_Stack.push(" ".join(sA))

    args = CmdLineOptions().execute()
    xalt = XALTdb(dbConfigFn(args.dbname))
    syslogFile = args.syslog

    # should add a check if file exists
    num = int(capture("cat " + syslogFile + " | wc -l"))
    icnt = 0

    t1 = time.time()

    rmapT = Rmap(args.rmapD).reverseMapT()

    lnkCnt = 0
    runCnt = 0
    badCnt = 0
    count = 0

    recordT = {}

    if (num == 0):
        return

    pbar = ProgressBar(maxVal=num)

    fnA = [args.leftover, syslogFile]

    for fn in fnA:
        if (not os.path.isfile(fn)):
            continue

        f = open(fn, 'r')
        for line in f:
            if (not ("XALT_LOGGING" in line)):
                continue
            t, done = parseSyslog(line, recordT)
            if (not done):
                continue

            try:
                XALT_Stack.push("XALT_LOGGING: " + t['kind'] + " " +
                                t['syshost'])

                if (t['kind'] == "link"):
                    XALT_Stack.push("link_to_db()")
                    xalt.link_to_db(rmapT, json.loads(t['value']))
                    XALT_Stack.pop()
                    lnkCnt += 1
                elif (t['kind'] == "run"):
                    XALT_Stack.push("run_to_db()")
                    xalt.run_to_db(rmapT, json.loads(t['value']))
                    XALT_Stack.pop()
                    runCnt += 1
                else:
                    print("Error in xalt_syslog_to_db", file=sys.stderr)
                XALT_Stack.pop()
            except Exception as e:
                print(e, file=sys.stderr)
                badCnt += 1

            count += 1
            pbar.update(count)

        f.close()

    pbar.fini()

    t2 = time.time()
    rt = t2 - t1
    if (args.timer):
        print("Time: ", time.strftime("%T", time.gmtime(rt)))
    print("total processed : ", count, ", num links: ", lnkCnt, ", num runs: ",
          runCnt, ", badCnt: ", badCnt)

    leftover = args.leftover
    if (os.path.isfile(leftover)):
        os.rename(leftover, leftover + ".old")

    # if there is anything left in recordT file write it out to the leftover file.

    if (recordT):
        f = open(leftover, "w")
        for key in recordT:
            r = recordT[key]
            s = r.prt("XALT_LOGGING V=2", key)
            f.write(s)
        f.close()
Пример #36
0
def main():
  """
  Walks the list of users via the passwd_generator and load the
  link and run files.
  """

  # Find transmission style
  transmission = os.environ.get("XALT_TRANSMISSION_STYLE")
  if (not transmission):
    transmission = "@XALT_TRANSMISSION_STYLE@"

  if (not transmission):
    transmission = "file"
    
  transmission = transmission.lower()


  # Push command line on to XALT_Stack
  sA = []
  sA.append("CommandLine:")
  for v in sys.argv:
    sA.append('"'+v+'"')
  XALT_Stack.push(" ".join(sA))

  args   = CmdLineOptions().execute()
  xalt   = XALTdb(args.confFn)

  num    = int(capture("getent passwd | wc -l"))
  pbar   = ProgressBar(maxVal=num)
  icnt   = 0

  t1     = time.time()

  rmapT  = Rmap(args.rmapD).reverseMapT()

  iuser  = 0
  lnkCnt = 0
  runCnt = 0
  pkgCnt = 0

  for user, hdir in passwd_generator():
    XALT_Stack.push("User: "******"link")
    if (os.path.isdir(xaltDir)):
      iuser   += 1
      linkFnA  = files_in_tree(xaltDir, "*/link." + args.syshost + ".*.json")
      XALT_Stack.push("link_json_to_db()")
      lnkCnt  += link_json_to_db(xalt, args.listFn, rmapT, args.delete, linkFnA)
      XALT_Stack.pop()

    xaltDir = build_xaltDir(user, hdir, transmission, "run")
    if (os.path.isdir(xaltDir)):
      runFnA   = files_in_tree(xaltDir, "*/run." + args.syshost + ".*.json") 
      XALT_Stack.push("run_json_to_db()")
      runCnt  += run_json_to_db(xalt, args.listFn, rmapT, args.delete, runFnA)
      XALT_Stack.pop()

    xaltDir = build_xaltDir(user, hdir, transmission, "pkg")
    if (os.path.isdir(xaltDir)):
      pkgFnA   = files_in_tree(xaltDir, "*/pkg." + args.syshost + ".*.json") 
      XALT_Stack.push("pkg_json_to_db()")
      pkgCnt  += pkg_json_to_db(xalt, args.listFn, args.syshost, args.delete, pkgFnA)
      XALT_Stack.pop()


    icnt += 1
    v = XALT_Stack.pop()
    carp("User",v)
    pbar.update(icnt)

  xalt.connect().close()
  pbar.fini()
  t2 = time.time()
  rt = t2 - t1
  if (args.timer):
    print("Time: ", time.strftime("%T", time.gmtime(rt)))

  print("num users: ", iuser, ", num links: ", lnkCnt, ", num pkgs: ", pkgCnt, ", num runs: ", runCnt)
Пример #37
0
def main():
    """
  read from syslog file into XALT db.
  """

    sA = []
    sA.append("CommandLine:")
    for v in sys.argv:
        sA.append('"' + v + '"')
    XALT_Stack.push(" ".join(sA))

    args = CmdLineOptions().execute()
    xalt = XALTdb(ConfigFn)

    syslogFile = args.syslog

    # should add a check if file exists
    num = int(capture("cat " + syslogFile + " | wc -l"))
    icnt = 0

    t1 = time.time()

    rmapT = Rmap(args.rmapD).reverseMapT()

    lnkCnt = 0
    runCnt = 0
    badCnt = 0
    count = 0

    if num != 0:
        pbar = ProgressBar(maxVal=num)
        if (os.path.isfile(syslogFile)):
            f = open(syslogFile, 'r')
            for line in f:
                if "XALT_LOGGING" in line:

                    i = line.find("link:")
                    if i == -1:
                        i = line.find("run:")
                    if i == -1:
                        continue  # did not find a link or run, continue to next line

                    array = line[i:].split(":")
                    type = array[0].strip()
                    syshost = array[1].strip()
                    try:
                        resultT = json.loads(base64.b64decode(array[2]))
                        XALT_Stack.push("XALT_LOGGING: " + type + " " +
                                        syshost)
                        #            XALT_Stack.push("XALT_LOGGING resultT: " + resultT)

                        if (type == "link"):
                            XALT_Stack.push("link_to_db()")
                            xalt.link_to_db(rmapT, resultT)
                            XALT_Stack.pop()
                            lnkCnt += 1
                        elif (type == "run"):
                            XALT_Stack.push("run_to_db()")
                            xalt.run_to_db(rmapT, resultT)
                            XALT_Stack.pop()
                            runCnt += 1
                        else:
                            print("Error in xalt_syslog_to_db")
                        XALT_Stack.pop()
                    except:
                        badCnt += 1
                        # figure out length of array[2], as it might be
                        # near the syslog message size limit
                        strg = array[2]
                        lens = len(strg)
                        #            lenx = lens - (lens % 4 if lens % 4 else 4)
                        print(
                            "xalt_syslog_to_db: undecodable entry!  length: ",
                            lens)
#            resultT = base64.decodestring(strg[:lenx])
#            print("result:  ",result)

                count += 1
                pbar.update(count)

        pbar.fini()

#  what should be done if there are errors?
#    what went wrong?
#    how do we restart?
#
#  xalt.connect().close()

    t2 = time.time()
    rt = t2 - t1
    if (args.timer):
        print("Time: ", time.strftime("%T", time.gmtime(rt)))


#
    print("total processed : ", count, ", num links: ", lnkCnt, ", num runs: ",
          runCnt, ", badCnt: ", badCnt)