Exemple #1
0
 def test_parse_next(self):
     tf = tempfile.TemporaryFile()
     tf.write("[foo = 1] [bar = 2]")
     tf.seek(0)
     ad = classad.parseNext(tf)
     self.assertEqual(len(ad), 1)
     self.assertEqual(ad["foo"], 1)
     self.assertEquals(" [bar = 2]", tf.read())
     tf = tempfile.TemporaryFile()
     tf.write("-----\nfoo = 1\n\nbar = 2\n")
     tf.seek(0)
     ad = classad.parseNext(tf)
     self.assertEqual(len(ad), 1)
     self.assertEqual(ad["foo"], 1)
     self.assertEquals("bar = 2\n", tf.read())
Exemple #2
0
 def test_parse_next(self):
     tf = tempfile.TemporaryFile()
     tf.write("[foo = 1] [bar = 2]")
     tf.seek(0)
     ad = classad.parseNext(tf)
     self.assertEqual(len(ad), 1)
     self.assertEqual(ad["foo"], 1)
     self.assertEquals(" [bar = 2]", tf.read())
     tf = tempfile.TemporaryFile()
     tf.write("-----\nfoo = 1\n\nbar = 2\n")
     tf.seek(0)
     ad = classad.parseNext(tf)
     self.assertEqual(len(ad), 1)
     self.assertEqual(ad["foo"], 1)
     self.assertEquals("bar = 2\n", tf.read())
Exemple #3
0
def parse_classad(classad_file):
    all_ads = []

    current_cluster_id = 0

    while True:
        try:
            ad = classad.parseNext(classad_file)
        except StopIteration:
            # We are done
            break
        
        if 'ClusterId' in ad and ad['ClusterId'] != current_cluster_id:
            current_cluster_id = ad['ClusterId']

            # create ad objects, add to the output, and keep updating the objects
            cluster_ad = classad.ClassAd()
            proc_ads = []
            all_ads.append((cluster_ad, proc_ads))

        # Adjust ad contents
        del ad['ClusterId']
        del ad['ProcId']

        if 'x509userproxy' in ad:
            # condor_submit dry-run / dump do not evaluate the proxy contents
            set_x509_attributes(ad)

        if len(cluster_ad) == 0:
            cluster_ad.update(ad)

        proc_ads.append((ad, 1))

    return all_ads
Exemple #4
0
 def test_parse_next(self):
     tf = tempfile.TemporaryFile()
     tf.write(b"[foo = 1] [bar = 2]")
     tf.seek(0)
     if sys.version_info > (3, ):
         tf, tf_ = open(tf.fileno()), tf
     ad = classad.parseNext(tf)
     self.assertEqual(len(ad), 1)
     self.assertEqual(ad["foo"], 1)
     self.assertEqual(" [bar = 2]", tf.read())
     tf = tempfile.TemporaryFile()
     tf.write(b"-----\nfoo = 1\n\nbar = 2\n")
     tf.seek(0)
     if sys.version_info > (3, ):
         tf, tf_ = open(tf.fileno()), tf
     ad = classad.parseNext(tf)
     self.assertEqual(len(ad), 1)
     self.assertEqual(ad["foo"], 1)
     self.assertEqual("bar = 2\n", tf.read())
Exemple #5
0
 def test_old_classad_v2(self):
     ad = classad.parseNext(open("tests/test.old.ad"))
     contents = open("tests/test.old.ad").read()
     keys = []
     for line in contents.splitlines():
         info = line.split(" = ")
         if len(info) != 2:
             continue
         self.assertTrue(info[0] in ad)
         self.assertEqual(ad.lookup(info[0]).__repr__(), info[1])
         keys.append(info[0])
     for key in ad:
         self.assertTrue(key in keys)
Exemple #6
0
 def test_old_classad_v2(self):
     ad = classad.parseNext(open("tests/test.old.ad"))
     contents = open("tests/test.old.ad").read()
     keys = []
     for line in contents.splitlines():
         info = line.split(" = ")
         if len(info) != 2:
             continue
         self.assertTrue(info[0] in ad)
         self.assertEqual(ad.lookup(info[0]).__repr__(), info[1])
         keys.append(info[0])
     for key in ad:
         self.assertTrue(key in keys)
def test_can_parse_ads_across_pipes(ad_string, parser):
    """
    The parser must be set manually. Auto-discovery won't work on a pipe because
    it can't rewind using seek.
    """
    r, w = os.pipe()

    with open(w, mode="w") as wf:
        wf.write(ad_string)

    with open(r, mode="r") as rf:
        ad = classad.parseNext(rf, parser)

    assert ad["foo"] == 1
Exemple #8
0
 def test_pipes(self):
     # One regression we saw in the ClassAd library is the new
     # parsing routines would fail if tell/seek was non-functional.
     r, w = os.pipe()
     rfd = os.fdopen(r, 'r')
     wfd = os.fdopen(w, 'w')
     wfd.write("[foo = 1]")
     wfd.close()
     ad = classad.parseNext(rfd ,parser=classad.Parser.New)
     self.assertEquals(tuple(dict(ad).items()), (('foo', 1),))
     self.assertRaises(StopIteration, classad.parseNext, rfd, classad.Parser.New)
     rfd.close()
     r, w = os.pipe()
     rfd = os.fdopen(r, 'r')
     wfd = os.fdopen(w, 'w')
     wfd.write("[foo = 1]")
     wfd.close()
     self.assertRaises(ValueError, classad.parseNext, rfd)
     rfd.close()
Exemple #9
0
 def test_pipes(self):
     # One regression we saw in the ClassAd library is the new
     # parsing routines would fail if tell/seek was non-functional.
     r, w = os.pipe()
     rfd = os.fdopen(r, 'r')
     wfd = os.fdopen(w, 'w')
     wfd.write("[foo = 1]")
     wfd.close()
     ad = classad.parseNext(rfd, parser=classad.Parser.New)
     self.assertEqual(tuple(dict(ad).items()), (('foo', 1), ))
     self.assertRaises(StopIteration, classad.parseNext, rfd,
                       classad.Parser.New)
     rfd.close()
     r, w = os.pipe()
     rfd = os.fdopen(r, 'r')
     wfd = os.fdopen(w, 'w')
     wfd.write("[foo = 1]")
     wfd.close()
     self.assertRaises(ValueError, classad.parseNext, rfd)
     rfd.close()
# close all the opening files
job_ID_history.close()
condor_history_log.close()

# parse the generated condor history log file and evaluate attributes
# write to new log files
input = open("/home/bockelman/zzhang/ELK_stack/condor_history_log_backup/" + \
              "condor_history_" + timestamp_str + ".log", "r")
output = open("/var/log/condor_history/p_condor_history_" + \
              timestamp_str + ".log", "a+")

global_job_id_set = set()
while True:
  try:
    ad = classad.parseNext(input)
    for k in ad:
      if ad.eval(k) is classad.Value.Undefined:
        del ad[k]
      else:
        ad[k] = ad.eval(k)
    if "GlobalJobId" in ad.keys():
      if ad["GlobalJobId"] in global_job_id_set:
        pass
      else:
        global_job_id_set.add(ad["GlobalJobId"])
        output.write(ad.printOld()+"\n")
    else:
      output.write(ad.printOld()+"\n")
  except StopIteration:
    break
Exemple #11
0
job_ID_history = open(checkpoint_path, "w")
job_ID_history.write(output)

# close all the opening files
job_ID_history.close()
condor_history_log.close()

# parse the generated condor history log file and evaluate attributes
# write to new log files
input = open(raw_log, "r")
output = open(processed_log, "a+")

global_job_id_set = set()
while True:
  try:
    ad = classad.parseNext(input)
    for k in ad:
      if ad.eval(k) is classad.Value.Undefined:
        del ad[k]
      else:
        ad[k] = ad.eval(k)
    if "GlobalJobId" in ad.keys():
      if ad["GlobalJobId"] in global_job_id_set:
        pass
      else:
        global_job_id_set.add(ad["GlobalJobId"])
        check_data_locality(ad)
        output.write(ad.printOld()+"\n")
    else:
      check_data_locality(ad)
      output.write(ad.printOld()+"\n")
def test_parse_next_ad_from_file_like_object(ad_file):
    with ad_file.open(mode="r") as f:
        ads = [classad.parseNext(f), classad.parseNext(f)]

    assert ads[0]["foo"] == "bar"
    assert ads[1]["foo"] == "wiz"
def test_parse_next_ad_from_string(ad_string):
    ads = [classad.parseNext(ad_string), classad.parseNext(ad_string)]

    assert ads[0]["foo"] == "bar"
    assert ads[1]["foo"] == "wiz"