Пример #1
0
 def testTransferOne(self):
     createJDS.createTestSet(createJDS.jsonFileData,
                             self.initKwargs[0],
                             rootDir=self.testMoveFrom)
     storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[0])
     oldStorage = JDS.JsonDumpStorage(self.testMoveFrom,
                                      **self.initKwargs[0])
     itemNumber = 0
     xmas = datetime.datetime(2001, 12, 25, 12, 25, tzinfo=UTC)
     for id in createJDS.jsonFileData.keys():
         createLinks = 0 == itemNumber % 2
         removeOld = 0 == itemNumber % 3
         itemNumber += 1
         storage.transferOne(id,
                             oldStorage,
                             createLinks=createLinks,
                             removeOld=removeOld,
                             aDate=xmas)
         try:
             storage.getJson(id)
         except Exception, x:
             print '(%s): %s' % (type(x), x)
             assert False, 'Expected to find a transferred json file for %s' % id
         if createLinks:
             self.__hasLinkOrFail(storage, id)
             self.__hasDatePathOrFail(storage, id, xmas)
         if removeOld:
             assert_raises(OSError, oldStorage.getJson, id)
Пример #2
0
 def testDestructiveDateWalkNotNow(self):
   createJDS.createTestSet(self.currenttimes,self.initKwargs[1],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[1])
   seenids = []
   for id in storage.destructiveDateWalk():
     seenids.append(id)
   assert [] == seenids
 def testMove(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[3],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[3])
   os.mkdir(self.testMoveTo)
   for uuid in createJDS.jsonFileData.keys():
     storage.move(uuid,os.path.join('.','TEST-MOVETO'))
   allfiles = []
   alllinks = []
   for dir, dirs, files in os.walk(self.testDir):
     for file in files:
       allfiles.append(file)
       if os.path.islink(os.path.join(dir,file)):
         alllinks.append(file)
     for d in dirs:
       if os.path.islink(os.path.join(dir,d)):
         alllinks.append(d)
   assert [] == allfiles, 'Expect that all moved files are gone, but found %s' % allfiles
   assert [] == alllinks, 'Expcet that all links are gone, but found %s' % alllinks
   allfiles = []
   alllinks = []
   expectedFiles = [x+storage.jsonSuffix for x in createJDS.jsonFileData.keys() ]
   expectedFiles.extend([x+storage.dumpSuffix for x in createJDS.jsonFileData.keys() ])
   for dir, dirs, files in os.walk(os.path.join('.','TEST-MOVETO')):
     for file in files:
       allfiles.append(file)
       assert file in expectedFiles, 'Expect that each moved file will be expected but found %s' % file
       if os.path.islink(os.path.join(dir,file)): alllinks.append(file)
     for d in dirs:
       if os.path.islink(os.path.join(dir,d)): alllinks.append(d)
   assert [] == alllinks, 'Expect no links in the move-to directory, but found %s' % alllinks
   for file in expectedFiles:
     assert file in allfiles, 'Expect that every file will be moved but did not find %s' % file
Пример #4
0
    def testRemove(self):
        createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[2],
                                self.testDir)
        storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[2])
        counter = 0
        for uuid in createJDS.jsonFileData.keys():
            if 0 == counter % 3:
                # test that we don't throw for missing links
                storage.markAsSeen(uuid)
            if 1 == counter % 3:
                # test that we don't throw for one missing file
                if 0 == counter % 2:
                    os.unlink(storage.getDump(uuid))
                else:
                    os.unlink(storage.getJson(uuid))
            if 2 == counter % 3:
                # test that we don't throw for both missing files, but with links
                os.unlink(storage.getJson(uuid))
                os.unlink(storage.getDump(uuid))
            storage.remove(uuid)
            counter += 1
        allfiles = []
        alllinks = []
        for dir, dirs, files in os.walk(self.testDir):
            for file in files:
                allfiles.append(file)
                if os.path.islink(os.path.join(dir, file)):
                    alllinks.append(file)
            for d in dirs:
                if os.path.islink(os.path.join(dir, d)):
                    alllinks.append(d)

        assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
        assert [] == alllinks, 'Expect that all links are gone, but found %s' % alllinks
        assert_raises(JDS.NoSuchUuidFound, storage.remove, "bogusdatax3yymmdd")
Пример #5
0
 def markAsSeen(self):
     createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[3],
                             self.testDir)
     storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[3])
     for uuid, data in createJDS.jsonFileData.items():
         assert os.path.islink(
             os.sep.join(
                 (storage.dateBranch, data[3],
                  uuid))), 'Expect a link from date to name for %s' % uuid
         assert os.path.islink(
             os.sep.join(
                 (storage.nameBranch, data[2],
                  uuid))), 'Expect link from name to timed for %s' % uuid
         assert not os.path.islink(
             os.sep.join(
                 (storage.dateBranch, data[3],
                  uuid))), 'Expect no link from date to name for %s' % uuid
         assert not os.path.islink(
             os.sep.join(
                 (storage.nameBranch, data[2],
                  uuid))), 'Expect no link from name to date for %s' % uuid
     try:
         storage.markAsSeen(createJDS.jsonBadUuid)
         assert False, 'Expect to throw IOError from attempt to openAndMarkAsSeen(non-existent-uuid)'
     except IOError:
         assert True, 'Got expected error from attempt to openAndMarkAsSeen(non-existent-uuid)'
     except Exception, e:
         assert False, 'Got unexpected error %s from attempt to openAndMarkAsSeen(non-existent-uuid' % e
  def testTransferMany(self):
    createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],rootDir=self.testMoveFrom)
    oldStorage = JDS.JsonDumpStorage(self.testMoveFrom, **self.initKwargs[0])
    itemNumber = 0
    xmas = DT.datetime(2001,12,25,12,25)
    hasLinks = {}
    for id in createJDS.jsonFileData.keys():
      hasLinks[id] = True
      if 0 == itemNumber %2 :
        oldStorage.markAsSeen(id)
        self.__hasNoLinkOrFail(oldStorage,id)
        hasLinks[id] = False

    opts = ((False,True),(True,False),(False,False)) #copyLinks, makeNewLinks
    targets = (self.testMoveTo, self.testMoveToAlt,self.testDir)
    assert len(opts) == len(targets), "set of opts must be one-to-one with set of targets, or fail"
    for i in range(len(opts)):
      aDate = None
      if opts[i][1]: aDate = xmas
      storage = JDS.JsonDumpStorage(targets[i], **self.initKwargs[0])
      storage.transferMany(createJDS.jsonFileData.keys(),oldStorage,copyLinksBoolean=opts[i][0],makeNewDateLinksBoolean=opts[i][1],aDate=aDate)
      for id in createJDS.jsonFileData.keys():
        try:
          storage.getJson(id)
        except Exception,x:
          print '(%s): %s'%(type(x),x)
          assert False, 'Expected to find a transferred json file for %s' % id
        if opts[i][1] or hasLinks[id]:
          self.__hasLinkOrFail(storage,id)
          if opts[i][1]:
            self.__hasDatePathOrFail(storage,xmas)
        if not opts[i][1] and not hasLinks[id]:
          self.__hasNoLinkOrFail(storage,id)
Пример #7
0
 def testDestructiveDateWalkNotNow(self):
   createJDS.createTestSet(self.currenttimes,self.initKwargs[1],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[1])
   seenids = []
   for id in storage.destructiveDateWalk():
     seenids.append(id)
   assert [] == seenids
Пример #8
0
  def testRemove(self):
    createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
    storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[2])
    counter = 0
    for uuid in createJDS.jsonFileData.keys():
      if 0 == counter % 3:
        # test that we don't throw for missing links
        storage.markAsSeen(uuid)
      if 1 == counter % 3:
        # test that we don't throw for one missing file
        if 0 == counter % 2:
          os.unlink(storage.getDump(uuid))
        else:
          os.unlink(storage.getJson(uuid))
      if 2 == counter % 3:
        # test that we don't throw for both missing files, but with links
        os.unlink(storage.getJson(uuid))
        os.unlink(storage.getDump(uuid))
      storage.remove(uuid)
      counter += 1
    allfiles = []
    alllinks = []
    for dir, dirs, files in os.walk(self.testDir):
      for file in files:
        allfiles.append(file)
        if os.path.islink(os.path.join(dir,file)):
          alllinks.append(file)
      for d in dirs:
        if os.path.islink(os.path.join(dir,d)):
          alllinks.append(d)

    assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
    assert [] == alllinks, 'Expect that all links are gone, but found %s' % alllinks
    assert_raises(JDS.NoSuchUuidFound, storage.remove, "bogusdatax3yymmdd")
Пример #9
0
 def testCleanUpCompletedAndFailedJobs_WithSaves(self):
   """
   testCleanUpCompletedAndFailedJobs_WithSaves(self):
   The default config asks for successful and failed jobs to be saved
   """
   global me
   cursor = self.connection.cursor()
   dbtestutil.fillProcessorTable(cursor,4)
   m = monitor.Monitor(me.config)
   createJDS.createTestSet(createJDS.jsonFileData,jsonKwargs={'logger':me.logger},rootDir=me.config.storageRoot)
   runInOtherProcess(m.standardJobAllocationLoop, stopCondition=(lambda : self.jobsAllocated() == 14))
   started = dbtestutil.datetimeNow(cursor)
   self.connection.commit()
   completed = started + dt.timedelta(microseconds=100)
   idTimesAndSuccessSeq = [
     [started,completed,True,1],
     [started,completed,True,3],
     [started,completed,True,5],
     [started,completed,True,11],
     [started,None,False,2],
     [started,None,False,4],
     [started,None,False,8],
     [started,None,False,12],
     ]
   dbCon,dbCur = m.getDatabaseConnectionPair()
   try:
     jobdata = self.setJobSuccess(dbCur,idTimesAndSuccessSeq)
     m.cleanUpCompletedAndFailedJobs()
   finally:
     m.databaseConnectionPool.cleanup()
   successSave = set()
   failSave = set()
   expectSuccessSave = set()
   expectFailSave = set()
   remainBehind = set()
   for dir, dirs, files in os.walk(me.config.storageRoot):
     remainBehind.update(os.path.splitext(x)[0] for x in files)
   for d in idTimesAndSuccessSeq:
     if d[2]:
       expectSuccessSave.add(d[3])
     else:
       expectFailSave.add(d[3])
   for dir,dirs,files in os.walk(me.config.saveSuccessfulMinidumpsTo):
     successSave.update((os.path.splitext(x)[0] for x in files))
   for dir,dirs,files in os.walk(me.config.saveFailedMinidumpsTo):
     failSave.update((os.path.splitext(x)[0] for x in files))
   for x in jobdata:
     if None == x[2]:
       assert not x[1] in failSave and not x[1] in successSave, "if we didn't set success state for %s, then it wasn't copied"%(x[1])
       assert x[1] in remainBehind, "if we didn't set success state for %s, then it should remain behind"%(x[1])
       assert not x[0] in expectFailSave and not x[0] in expectSuccessSave, "database should match expectatations for id=%s"%(x[0])
     elif True == x[2]:
       assert  not x[1] in failSave and x[1] in successSave, "if we set success for %s, it is copied to %s"%(x[1],me.config.saveSussessfulMinidumpsTo)
       assert not x[0] in expectFailSave and x[0] in expectSuccessSave, "database should match expectatations for id=%s"%(x[0])
       assert not x[1] in remainBehind, "if we did set success state for %s, then it should not remain behind"%(x[1])
     elif False == x[2]:
       assert  x[1] in failSave and not x[1] in successSave, "if we set failure for %s, it is copied to %s"%(x[1],me.config.saveFailedMinidumpsTo)
       assert  x[0] in expectFailSave and not x[0] in expectSuccessSave, "database should match expectatations for id=%s"%(x[0])
       assert not x[1] in remainBehind, "if we did set success state for %s, then it should not remain behind"%(x[1])
Пример #10
0
 def testRemoveBadUuidFromJsonDumpStorage(self):
   """
   testRemoveBadUuidFromJsonDumpStorage(self):
   This just wraps JsonDumpStorage. Assure we aren't futzing up the wrap (fail with non-exist uuid)
   """
   global me
   createJDS.createTestSet(createJDS.jsonFileData,jsonKwargs={'logger':me.logger},rootDir=me.config.storageRoot)
   mon = monitor.Monitor(me.config)
   badUuid = '0bad0bad-0bad-6666-9999-0bad20001025'
   assert_raises(monitor.UuidNotFoundException,mon.removeUuidFromJsonDumpStorage,badUuid)
 def testDestructiveDateWalk(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[0])
   uuids = createJDS.jsonFileData.keys()
   seenids = []
   for id in storage.destructiveDateWalk():
     assert id in uuids, 'Expect that %s is among the uuids we stored' % uuid
     seenids.append(id)
   for id in uuids:
     assert id in seenids, 'Expect that we found every uuid we stored (%s) from %s' % (id,seenids)
   assert not os.listdir(storage.dateBranch), 'Expect that destructive walk will remove all date links, and their dirs'
Пример #12
0
 def testRemoveAlsoNames(self):
   """testJsonDumpStorage:TestJsonDumpStorage.testRemoveAlsoNames(self)
   Try to remove them all, and check that they are indeed all gone.
   """
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
   kwargs = self.initKwargs[2]
   kwargs['cleanIndexDirectories'] = 'True'
   storage = JDS.JsonDumpStorage(self.testDir,**kwargs)
   for uuid,data in createJDS.jsonFileData.items():
     storage.remove(uuid)
   assert not os.listdir(storage.root), 'Expected them all to go, but %s'%(os.listdir(storage.root))
Пример #13
0
 def testRemoveAlsoNames(self):
   """testJsonDumpStorage:TestJsonDumpStorage.testRemoveAlsoNames(self)
   Try to remove them all, and check that they are indeed all gone.
   """
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
   kwargs = self.initKwargs[2]
   kwargs['cleanIndexDirectories'] = 'True'
   storage = JDS.JsonDumpStorage(self.testDir,**kwargs)
   for uuid,data in createJDS.jsonFileData.items():
     storage.remove(uuid)
   assert not os.listdir(storage.root), 'Expected them all to go, but %s'%(os.listdir(storage.root))
 def testGetDump(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[1],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[1])
   for uuid,data in createJDS.jsonFileData.items():
     expected = os.sep.join((storage.nameBranch,data[2],uuid+storage.dumpSuffix))
     got =  storage.getDump(uuid)
     assert expected == got, 'Expected dump file %s, got %s' % (expected,got)
   try:
     storage.getDump(createJDS.jsonBadUuid)
     assert False, 'Should throw IOError from attempt to getDump(non-existent-uuid)'
   except OSError,e:
     assert True
  def testRemoveOlderThan(self):
    createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],self.testDir)
    storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[0])
    cutoff = DT.datetime(2008,12,26,05,0)
    youngkeys = [x for x,d in createJDS.jsonFileData.items() if DT.datetime(*[int(i) for i in d[0].split('-')]) >= cutoff]
    oldkeys = [x for x,d in createJDS.jsonFileData.items() if DT.datetime(*[int(i) for i in d[0].split('-')]) < cutoff]

    for k in youngkeys:
      assert k in createJDS.jsonFileData.keys(),"Expected %s in %s"%(k,createJDS.jsonFileData.keys())
    for k in oldkeys:
      assert k in createJDS.jsonFileData.keys()
    for k in createJDS.jsonFileData.keys():
      assert (k in youngkeys or k in oldkeys)
    storage.removeOlderThan(cutoff)
    seenuuid = {}
    seendirs = []
    for dir,dirs,files in os.walk(storage.nameBranch):
      for f in files:
        if os.path.islink(os.path.join(dir,f)):
          uuid = os.path.splitext(f)[0]
          seenuuid[uuid] = True
          assert uuid in youngkeys, 'File: Expect that each remaining link has a young uuid, got %s' % uuid
          assert not uuid in oldkeys, 'File Expect no remaining link has old uuid, got %s' % uuid
      for d in dirs:
        if os.path.islink(os.path.join(dir,d)):
          uuid = os.path.splitext(d)[0]
          seenuuid[uuid] = True
          assert uuid in youngkeys, 'Dir: Expect that each remaining link has a young uuid, got %s' % uuid
          assert not uuid in oldkeys, 'Dir: Expect no remaining link has old uuid, got %s' % uuid
    for id in oldkeys:
      assert not id in seenuuid,'Expect that no old key is found, but %s' % id
    for id in youngkeys:
      assert id in seenuuid, 'Expect that every new key is found, but %s' % id

    seenuuid = {}
    seendirs = []
    for dir, dirs, files in os.walk(storage.dateBranch):
      for f in files:
        uuid = os.path.splitext(f)[0]
        seenuuid[uuid] = True
        assert uuid in youngkeys, 'Expect that each remaining file has a young uuid, got %s' % uuid
        assert not uuid in oldkeys, 'Expect no remaining file has old uuid, got %s' % uuid
      for d in dirs:
        uuid = os.path.splitext(d)[0]
        if '-' in uuid:
          seenuuid[uuid] = True
          assert uuid in youngkeys, 'Expect that each remaining file has a young uuid, got %s' % uuid
          assert not uuid in oldkeys, 'Expect no remaining file has old uuid, got %s' % uuid
    for id in oldkeys:
      assert not id in seenuuid,'Expect that no old key is found but %s' % id
    for id in youngkeys:
      assert id in seenuuid, 'Expect that every new key is found, but %s' % id
      assert os.path.isdir(os.path.join(storage.dateBranch,createJDS.jsonFileData[id][3]))
Пример #16
0
 def testRemoveWithBadlyFormattedDateLink(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[2])
   uuid = createJDS.jsonFileData.keys()[0]
   head, json_unused = os.path.split(storage.getJson(uuid))
   target = os.readlink(os.path.join(head,uuid))
   idx = target.index('/date/')
   target = "%s%s%s" %(target[:idx+6],target[idx+7:idx+10],target[idx+10:])
   os.unlink(os.path.join(head,uuid))
   os.symlink(target,os.path.join(head,uuid))
   #print "LINK:%s"%(os.readlink(os.path.join(head,uuid)))
   # assure that we don't throw for a badly formatted path
   storage.remove(uuid)
Пример #17
0
 def testRemoveWithBadlyFormattedDateLink(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[2])
   uuid = createJDS.jsonFileData.keys()[0]
   head, json_unused = os.path.split(storage.getJson(uuid))
   target = os.readlink(os.path.join(head,uuid))
   idx = target.index('/date/')
   target = "%s%s%s" %(target[:idx+6],target[idx+7:idx+10],target[idx+10:])
   os.unlink(os.path.join(head,uuid))
   os.symlink(target,os.path.join(head,uuid))
   #print "LINK:%s"%(os.readlink(os.path.join(head,uuid)))
   # assure that we don't throw for a badly formatted path
   storage.remove(uuid)
Пример #18
0
 def testDestructiveDateWalk(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[0])
   uuids = createJDS.jsonFileData.keys()
   #uuidsSet = set(uuids)
   seenids = set()
   for id in storage.destructiveDateWalk():
     assert id in uuids, 'Expect that %s is among the uuids we stored\n%s' % (id,uuids)
     seenids.add(id)
   for id in uuids:
     assert id in seenids, 'Expect that we found every uuid we stored (%s) from %s' % (id,seenids)
   daily = os.listdir(storage.root)
   for d in daily:
     assert not storage.dateName in os.listdir(os.path.join(storage.root,d)), 'Expected all date subdirs to be gone, but %s'%d
Пример #19
0
 def testGetDump(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[1],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[1])
   for uuid,data in createJDS.jsonFileData.items():
     dateparts = data[0].split('-')
     daily = "%4d%02d%02d"%tuple([int(x) for x in dateparts[:3]])
     expected = os.sep.join((storage.root,daily,storage.indexName,data[2],uuid+storage.dumpSuffix))
     got =  storage.getDump(uuid)
     assert expected == got, 'Expected dump file %s, got %s' % (expected,got)
   try:
     storage.getDump(createJDS.jsonBadUuid)
     assert False, 'Should throw IOError from attempt to getDumpAsFile(non-existent-uuid)'
   except OSError,e:
     assert True
Пример #20
0
 def testRemoveGoodUuidFromJsonDumpStorage(self):
   """
   testRemoveGoodUuidFromJsonDumpStorage(self):
   This really just wraps JsonDumpStorage call. Assure we aren't futzing up the wrap (succeed with existing uuids)
   """
   global me
   createJDS.createTestSet(createJDS.jsonFileData,jsonKwargs={'logger':me.logger},rootDir=me.config.storageRoot)
   createJDS.createTestSet(createJDS.jsonMoreData,jsonKwargs={'logger':me.logger},rootDir=me.config.deferredStorageRoot)
   mon = monitor.Monitor(me.config)
   goodUuid = '0b781b88-ecbe-4cc4-dead-6bbb20081225';
   # this should work the first time...
   mon.removeUuidFromJsonDumpStorage(goodUuid)
   # ... and then fail the second time
   assert_raises(monitor.UuidNotFoundException,mon.removeUuidFromJsonDumpStorage, goodUuid)
Пример #21
0
 def testGetDump(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[1],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[1])
   for uuid,data in createJDS.jsonFileData.items():
     dateparts = data[0].split('-')
     daily = "%4d%02d%02d"%tuple([int(x) for x in dateparts[:3]])
     expected = os.sep.join((storage.root,daily,storage.indexName,data[2],uuid+storage.dumpSuffix))
     got =  storage.getDump(uuid)
     assert expected == got, 'Expected dump file %s, got %s' % (expected,got)
   try:
     storage.getDump(createJDS.jsonBadUuid)
     assert False, 'Should throw IOError from attempt to getDumpAsFile(non-existent-uuid)'
   except OSError,e:
     assert True
Пример #22
0
 def testDestructiveDateWalk(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[0])
   uuids = createJDS.jsonFileData.keys()
   #uuidsSet = set(uuids)
   seenids = set()
   for id in storage.destructiveDateWalk():
     assert id in uuids, 'Expect that %s is among the uuids we stored\n%s' % (id,uuids)
     seenids.add(id)
   for id in uuids:
     assert id in seenids, 'Expect that we found every uuid we stored (%s) from %s' % (id,seenids)
   daily = os.listdir(storage.root)
   for d in daily:
     assert not storage.dateName in os.listdir(os.path.join(storage.root,d)), 'Expected all date subdirs to be gone, but %s'%d
Пример #23
0
 def markAsSeen(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[3],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[3])
   for uuid,data in createJDS.jsonFileData.items():
     assert os.path.islink(os.sep.join((storage.dateBranch,data[3],uuid))), 'Expect a link from date to name for %s' % uuid
     assert os.path.islink(os.sep.join((storage.nameBranch,data[2],uuid))), 'Expect link from name to timed for %s' % uuid
     assert not os.path.islink(os.sep.join((storage.dateBranch,data[3],uuid))), 'Expect no link from date to name for %s' % uuid
     assert not os.path.islink(os.sep.join((storage.nameBranch,data[2],uuid))), 'Expect no link from name to date for %s' % uuid
   try:
     storage.markAsSeen(createJDS.jsonBadUuid)
     assert False, 'Expect to throw IOError from attempt to openAndMarkAsSeen(non-existent-uuid)'
   except IOError:
     assert True, 'Got expected error from attempt to openAndMarkAsSeen(non-existent-uuid)'
   except Exception, e:
     assert False, 'Got unexpected error %s from attempt to openAndMarkAsSeen(non-existent-uuid' % e
Пример #24
0
 def testMarkAsSeen(self):
   """testNewJsonDumpStorage:TestJsonDumpStorage.testMarkAsSeen()
   somewhat bogus test: Doesn't look for failure modes
   """
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],rootDir=self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[2])
   for ooid in createJDS.jsonFileData.keys():
     namePath,parts = storage.namePath(ooid)
     linkInName = os.path.join(namePath,ooid)
     assert os.path.islink(linkInName), 'expected %s as link'%linkInName
     dpath = os.path.join(namePath,os.readlink(linkInName))
     linkInDate = os.path.join(dpath,ooid)
     assert os.path.islink(linkInDate), 'expected %s as link'%linkInDate
     storage.markAsSeen(ooid)
     assert not os.path.exists(linkInName), 'expected %s gone'%linkInName
     assert not os.path.exists(linkInDate), 'expected %s gone'%linkInDate
Пример #25
0
 def testMarkAsSeen(self):
   """testNewJsonDumpStorage:TestJsonDumpStorage.testMarkAsSeen()
   somewhat bogus test: Doesn't look for failure modes
   """
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],rootDir=self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[2])
   for ooid in createJDS.jsonFileData.keys():
     namePath,parts = storage.namePath(ooid)
     linkInName = os.path.join(namePath,ooid)
     assert os.path.islink(linkInName), 'expected %s as link'%linkInName
     dpath = os.path.join(namePath,os.readlink(linkInName))
     linkInDate = os.path.join(dpath,ooid)
     assert os.path.islink(linkInDate), 'expected %s as link'%linkInDate
     storage.markAsSeen(ooid)
     assert not os.path.exists(linkInName), 'expected %s gone'%linkInName
     assert not os.path.exists(linkInDate), 'expected %s gone'%linkInDate
Пример #26
0
 def testLookForPriorityJobsInJsonDumpStorage(self):
   """
   testLookForPriorityJobsInJsonDumpStorage(self):
     assure that we can find each uuid in standard and deferred storage
     assure that we do not find any bogus uuid
     assure that found uuids are added to jobs table with priority 1, and priority_jobs_NNN table for processor id NNN
   """
   global me
   m = monitor.Monitor(me.config)
   createJDS.createTestSet(createJDS.jsonFileData,jsonKwargs={'logger':me.logger},rootDir=me.config.storageRoot)
   createJDS.createTestSet(createJDS.jsonMoreData,jsonKwargs={'logger':me.logger},rootDir=me.config.deferredStorageRoot)
   normUuids = createJDS.jsonFileData.keys()
   defUuids =  createJDS.jsonMoreData.keys()
   allUuids = []
   allUuids.extend(normUuids)
   allUuids.extend(defUuids)
   badUuid = '0bad0bad-0bad-6666-9999-0bad20001025'
   dbCon,dbCur = m.getDatabaseConnectionPair()
   try:
     numProcessors = 5
     dbtestutil.fillProcessorTable(self.connection.cursor(),numProcessors)
     self.markLog()
     m.lookForPriorityJobsInJsonDumpStorage(dbCur,allUuids)
     assert [] == allUuids, 'Expect that all the uuids were found and removed from the looked for "set"'
     m.lookForPriorityJobsInJsonDumpStorage(dbCur,(badUuid,))
     self.markLog()
     seg = self.extractLogSegment()
     getIdAndPrioritySql = "SELECT owner,priority from jobs WHERE uuid = %s"
     getCountSql = "SELECT count(*) from %s"
     idCounts = dict( ( (x,0) for x in range(1,numProcessors+1) ) )
     allUuids.extend(normUuids)
     allUuids.extend(defUuids)
     for uuid in allUuids:
       dbCur.execute(getIdAndPrioritySql,(uuid,))
       procid,pri = dbCur.fetchone()
       assert 1 == pri, 'Expected priority of 1 for %s, but got %s'%(uuid,pri)
       idCounts[procid] += 1
     dbCur.execute(getIdAndPrioritySql,(badUuid,))
     assert not dbCur.fetchone(), "Expect to get None entries in jobs table for badUuid"
     for id,expectCount in idCounts.items():
       dbCur.execute(getCountSql%('priority_jobs_%s'%id))
       seenCount = dbCur.fetchone()[0]
       assert expectCount == seenCount, 'Expected %s, got %s as count in priority_jobs_%s'%(expectCount,seenCount,id)
   finally:
     m.databaseConnectionPool.cleanup()
Пример #27
0
    def testRemoveRemovesOnlyDate(self):
        createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[2],
                                self.testDir)
        storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[2])
        dailies = set([])
        expectedSubs = []
        alldirs = []
        allfiles = []
        alllinks = []
        for uuid, data in createJDS.jsonFileData.items():
            dailies.add(''.join(data[0].split('-')[:3]))
            storage.remove(uuid)
        for day in dailies:
            for dir, dirs, files in os.walk(
                    os.sep.join((storage.root, day, storage.dateName))):
                for file in files:
                    allfiles.append(file)
                    if os.path.islink(os.path.join(dir, file)):
                        alllinks.append(file)
                for d in dirs:
                    if os.path.islink(os.path.join(dir, d)):
                        alllinks.append(d)
                    alldirs.append(os.path.join(dir, d))
        assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
        assert [] == alllinks, 'Expcet that all links are gone, but found %s' % alllinks
        assert [] == alldirs, 'Expect that all date dirs are gone, but found %s' % alldirs

        for day in dailies:
            for dir, dirs, files in os.walk(
                    os.sep.join((storage.root, day, storage.indexName))):
                for file in files:
                    allfiles.append(file)
                    if os.path.islink(os.path.join(dir, file)):
                        alllinks.append(file)
                for d in dirs:
                    if os.path.islink(os.path.join(dir, d)):
                        alllinks.append(d)
                    alldirs.append(os.path.join(dir, d))
        assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
        assert [] == alllinks, 'Expect that all links are gone, but found %s' % alllinks
        for sub in expectedSubs:
            assert sub in alldirs, "Expect each subdirectory is still there, but didn't find %s" % sub
 def testTransferOne(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],rootDir=self.testMoveFrom)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[0])
   oldStorage = JDS.JsonDumpStorage(self.testMoveFrom, **self.initKwargs[0])
   itemNumber = 0
   xmas = DT.datetime(2001,12,25,12,25)
   for id in createJDS.jsonFileData.keys():
     #case 0: copyLinks = True, makeNewDateLinks = False and there are links
     #case 1: copyLinks = True, makeNewDateLinks = False  and there are no links
     #case 2: makeNewDateLinks = True and there were existing date links
     #case 3: makeNewDateLinks = True and there were no existing date links
     copyLinks = True
     makeNewLinks = False
     removeOldLink = False
     newDate = None
     if 0 == itemNumber % 4:
       pass
     elif 1 == itemNumber % 4:
       removeOldLink = True
     elif 2 == itemNumber % 4:
       makeNewLinks = True
       newDate = xmas
     elif 3 == itemNumber % 4:
       removeOldLink = True
       makeNewLinks = True
       newDate = xmas
     itemNumber += 1
     if(removeOldLink):
       oldStorage.markAsSeen(id)
       self.__hasNoLinkOrFail(oldStorage,id)
     storage.transferOne(id,oldStorage,copyLinksBoolean=copyLinks,makeNewDateLinksBoolean=makeNewLinks,aDate=newDate)
     try:
       storage.getJson(id)
     except Exception,x:
       print '(%s): %s'%(type(x),x)
       assert False, 'Expected to find a transferred json file for %s' % id
     if makeNewLinks or not removeOldLink:
       self.__hasLinkOrFail(storage,id)
       if makeNewLinks:
         self.__hasDatePathOrFail(storage,xmas)
     if not makeNewLinks and removeOldLink:
       self.__hasNoLinkOrFail(storage,id)
Пример #29
0
  def testGetStorageFor(self):
    """
    testGetStorageFor(self):
    Test that the wrapper for JsonDumpStorage doesn't twist things incorrectly
    """
    global me
    self.markLog()
    createJDS.createTestSet(createJDS.jsonFileData,jsonKwargs={'logger':me.logger},rootDir=me.config.storageRoot)
    createJDS.createTestSet(createJDS.jsonMoreData,jsonKwargs={'logger':me.logger},rootDir=me.config.deferredStorageRoot)
    mon = monitor.Monitor(me.config)
    assert_raises(monitor.UuidNotFoundException,mon.getStorageFor,'nothing')
    expected = me.config.storageRoot.rstrip(os.sep)
    got = mon.getStorageFor('0bba929f-8721-460c-dead-a43c20071025').root
    assert expected == got, 'Expected [%s] got [%s]'%(expected,got)

    expected = me.config.deferredStorageRoot.rstrip(os.sep)
    got = mon.getStorageFor('29adfb61-f75b-11dc-b6be-001320081225').root
    assert expected == got, 'Expected [%s] got [%s]'%(expected,got)
    self.markLog()
    assert [] == self.extractLogSegment(), 'expected no logging for this test'
Пример #30
0
 def testUuidInJsonDumpStorage(self):
   """
   testUuidInJsonDumpStorage(self):
   Test that the wrapper for JsonDumpStorage isn't all twisted up:
     assure we find something in normal and deferred storage, and miss something that isn't there
     do NOT test that the 'markAsSeen' actually works: That should be testJsonDumpStorage's job
   """
   global me
   m = monitor.Monitor(me.config)
   createJDS.createTestSet(createJDS.jsonFileData,jsonKwargs={'logger':me.logger},rootDir=me.config.storageRoot)
   createJDS.createTestSet(createJDS.jsonMoreData,jsonKwargs={'logger':me.logger},rootDir=me.config.deferredStorageRoot)
   self.markLog()
   badUuid = '0bad0bad-0bad-6666-9999-0bad20001025'
   goodUuid = '0bba929f-8721-460c-dead-a43c20071025'
   defUuid = '29adfb61-f75b-11dc-b6be-001320081225'
   assert m.uuidInJsonDumpStorage(goodUuid), 'Dunno how that happened'
   assert m.uuidInJsonDumpStorage(defUuid), 'Dunno how that happened'
   assert not m.uuidInJsonDumpStorage(badUuid), 'Dunno how that happened'
   self.markLog()
   seg = self.extractLogSegment()
   assert [] == seg, "Shouldn't log for success or failure"
 def testRemoveAlsoNames(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
   kwargs = self.initKwargs[2]
   kwargs['cleanIndexDirectories'] = 'True'
   storage = JDS.JsonDumpStorage(self.testDir,**kwargs)
   expectedSubs = []
   fullSubs = []
   for uuid in createJDS.jsonFileData.keys():
     storage.remove(uuid)
     exp = os.path.join(storage.nameBranch,createJDS.jsonFileData[uuid][2][:2])
     if not exp in expectedSubs:
       expectedSubs.append(exp)
     fullSubs.append(os.path.join(storage.nameBranch,createJDS.jsonFileData[uuid][2]))
   alldirs = []
   for dir, dirs, files in os.walk(storage.nameBranch):
     for d in dirs:
       alldirs.append(os.path.join(dir,d))
   for d in fullSubs:
     assert not d in alldirs, 'Expected %s NOT in alldirs, but it was' % d
   for d in expectedSubs:
     assert d in alldirs, 'Expected %s in all dirs(%s) but it was not' % (d,alldirs)
Пример #32
0
 def testTransferOne(self):
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[0],rootDir=self.testMoveFrom)
   storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[0])
   oldStorage = JDS.JsonDumpStorage(self.testMoveFrom, **self.initKwargs[0])
   itemNumber = 0
   xmas = datetime.datetime(2001,12,25,12,25, tzinfo=UTC)
   for id in createJDS.jsonFileData.keys():
     createLinks = 0 == itemNumber%2
     removeOld = 0 == itemNumber%3
     itemNumber += 1
     storage.transferOne(id,oldStorage,createLinks=createLinks,removeOld=removeOld,aDate=xmas)
     try:
       storage.getJson(id)
     except Exception,x:
       print '(%s): %s'%(type(x),x)
       assert False, 'Expected to find a transferred json file for %s' % id
     if createLinks:
       self.__hasLinkOrFail(storage,id)
       self.__hasDatePathOrFail(storage,id,xmas)
     if removeOld:
       assert_raises(OSError,oldStorage.getJson,id)
Пример #33
0
  def testRemoveRemovesOnlyDate(self):
    createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
    storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[2])
    dailies = set([])
    expectedSubs = []
    alldirs = []
    allfiles = []
    alllinks = []
    for uuid,data in createJDS.jsonFileData.items():
      dailies.add(''.join(data[0].split('-')[:3]))
      storage.remove(uuid)
    for day in dailies:
      for dir, dirs, files in os.walk(os.sep.join((storage.root,day,storage.dateName))):
        for file in files:
          allfiles.append(file)
          if os.path.islink(os.path.join(dir,file)):
            alllinks.append(file)
        for d in dirs:
          if os.path.islink(os.path.join(dir,d)):
            alllinks.append(d)
          alldirs.append(os.path.join(dir,d))
    assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
    assert [] == alllinks, 'Expcet that all links are gone, but found %s' % alllinks
    assert [] == alldirs, 'Expect that all date dirs are gone, but found %s' % alldirs

    for day in dailies:
      for dir,dirs,files in os.walk(os.sep.join((storage.root,day,storage.indexName))):
        for file in files:
          allfiles.append(file)
          if os.path.islink(os.path.join(dir,file)):
            alllinks.append(file)
        for d in dirs:
          if os.path.islink(os.path.join(dir,d)):
            alllinks.append(d)
          alldirs.append(os.path.join(dir,d))
    assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
    assert [] == alllinks, 'Expect that all links are gone, but found %s' % alllinks
    for sub in expectedSubs:
      assert sub in alldirs, "Expect each subdirectory is still there, but didn't find %s" % sub
 def testNewEntryDirectoryOverflow(self):
   ''' tests that we write new date links in appropriate overflow dir when we get too many in the regular dir'''
   createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[3],self.testDir)
   storage = JDS.JsonDumpStorage(self.testDir,maxDirectoryEntries=3,**self.initKwargs[3])
   for uuid,data in createJDS.jsonTooMany.items():
     abspathpart = data[3]
     datetimedata = [int(x) for x in data[0].split('-')]
     storage.newEntry(uuid,webheadHostName=data[1],timestamp = DT.datetime(*datetimedata))
   datePathUpOne = os.path.join(storage.dateBranch,abspathpart)
   webheads = os.listdir(datePathUpOne)
   assert 3 == len(webheads)
   for datePath in [os.path.join(datePathUpOne,x) for x in webheads]:
     assert 3 >= len(os.listdir(datePath))
   storage2 = JDS.JsonDumpStorage(self.testDir,maxDirectoryEntries=3, **self.initKwargs[3])
   for uuid,data in createJDS.jsonMoreData.items():
     abspathpart = data[3]
     datetimedata = [int(x) for x in data[0].split('-')]
     storage2.newEntry(uuid,webheadHostName=data[1],timestamp = DT.datetime(*datetimedata))
   webheads = os.listdir(datePathUpOne)
   assert 4 == len(webheads)
   for datePath in [os.path.join(datePathUpOne,x) for x in webheads]:
     assert 3 >= len(os.listdir(datePath))
  def testRemoveRemovesOnlyDate(self):
    createJDS.createTestSet(createJDS.jsonFileData,self.initKwargs[2],self.testDir)
    storage = JDS.JsonDumpStorage(self.testDir,**self.initKwargs[2])
    expectedSubs = []
    for uuid in createJDS.jsonFileData.keys():
      storage.remove(uuid)
      expectedSubs.append(os.path.join(storage.nameBranch,createJDS.jsonFileData[uuid][2]))
    allfiles = []
    alllinks = []
    alldirs = []
    for dir, dirs, files in os.walk(storage.dateBranch):
      for file in files:
        allfiles.append(file)
        if os.path.islink(os.path.join(dir,file)):
          alllinks.append(file)
      for d in dirs:
        if os.path.islink(os.path.join(dir,d)):
          alllinks.append(d)
        alldirs.append(os.path.join(dir,d))
    assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
    assert [] == alllinks, 'Expcet that all links are gone, but found %s' % alllinks
    assert [] == alldirs, 'Expect that all date dirs are gone, but found %s' % alldirs

    for dir,dirs,files in os.walk(storage.nameBranch):
      for file in files:
        allfiles.append(file)
        if os.path.islink(os.path.join(dir,file)):
          alllinks.append(file)
      for d in dirs:
        if os.path.islink(os.path.join(dir,d)):
          alllinks.append(d)
        alldirs.append(os.path.join(dir,d))
    assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles
    assert [] == alllinks, 'Expect that all links are gone, but found %s' % alllinks
    for sub in expectedSubs:
      assert sub in alldirs, "Expect each subdirectory is still there, but didn't find %s" % sub
Пример #36
0
 def testCleanUpCompletedAndFailedJobs_WithoutSaves(self):
   """
   testCleanUpCompletedAndFailedJobs_WithoutSaves(self):
   First, dynamically set config to not save successful or failed jobs. They are still removed from the file system
   """
   global me
   cc = copy.copy(me.config)
   cursor = self.connection.cursor()
   dbtestutil.fillProcessorTable(cursor,4)
   for conf in ['saveSuccessfulMinidumpsTo','saveFailedMinidumpsTo']:
     cc[conf] = ''
   m = monitor.Monitor(cc)
   createJDS.createTestSet(createJDS.jsonFileData,jsonKwargs={'logger':me.logger},rootDir=me.config.storageRoot)
   runInOtherProcess(m.standardJobAllocationLoop, stopCondition=(lambda : self.jobsAllocated() == 14))
   started = dbtestutil.datetimeNow(cursor)
   self.connection.commit()
   completed = started + dt.timedelta(microseconds=100)
   idTimesAndSuccessSeq = [
     [started,completed,True,1],
     [started,completed,True,3],
     [started,completed,True,5],
     [started,completed,True,11],
     [started,None,False,2],
     [started,None,False,4],
     [started,None,False,8],
     [started,None,False,12],
     ]
   dbCon,dbCur = m.getDatabaseConnectionPair()
   try:
     jobdata = self.setJobSuccess(dbCur,idTimesAndSuccessSeq)
     m.cleanUpCompletedAndFailedJobs()
   finally:
     m.databaseConnectionPool.cleanup()
   successSave = set()
   failSave = set()
   expectSuccessSave = set()
   expectFailSave = set()
   for d in idTimesAndSuccessSeq:
     if d[2]:
       expectSuccessSave.add(d[3])
     else:
       expectFailSave.add(d[3])
   for dir,dirs,files in os.walk(me.config.saveSuccessfulMinidumpsTo):
     successSave.update((os.path.splitext(x)[0] for x in files))
   for dir,dirs,files in os.walk(me.config.saveFailedMinidumpsTo):
     failSave.update((os.path.splitext(x)[0] for x in files))
   remainBehind = set()
   for dir, dirs, files in os.walk(me.config.storageRoot):
     remainBehind.update(os.path.splitext(x)[0] for x in files)
   assert len(successSave) == 0, "We expect not to save any successful jobs with this setting"
   assert len(failSave) == 0, "We expect not to save any failed jobs with this setting"
   for x in jobdata:
     if None ==  x[2]:
       assert  not x[0] in expectFailSave and not x[0] in expectSuccessSave, "database should match expectatations for id=%s"%(x[0])
       assert x[1] in remainBehind, "if we didn't set success state for %s, then it should remain behind"%(x[1])
     elif True ==  x[2]:
       assert not x[1] in remainBehind, "if we did set success state for %s, then it should not remain behind"%(x[1])
       assert not x[0] in expectFailSave and x[0] in expectSuccessSave, "database should match expectatations for id=%s"%(x[0])
     elif False == x[2]:
       assert not x[1] in remainBehind, "if we did set success state for %s, then it should not remain behind"%(x[1])
       assert x[0] in expectFailSave and not x[0] in expectSuccessSave, "database should match expectatations for id=%s"%(x[0])
Пример #37
0
def testCreateTestSet():
  testDir = "./TEST_CREATE_DIR"
  try:
    shutil.rmtree(testDir)
  except:
    pass
  assert not os.path.exists(testDir)
  try:
    createJDS.createTestSet({},{},testDir)
    assert os.path.isdir(testDir)
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  expected = {
    '%s/20071025/date/05'%testDir:(set(['04']), set([])),
    '%s/20071025/date'%testDir:(set(['05']), set([])),
    '%s/20071025/name/0b/ba/61/c5'%testDir:(set(['0bba61c5-dfc3-43e7-effe-8afd20071025']), set(['0bba61c5-dfc3-43e7-effe-8afd20071025.dump', '0bba61c5-dfc3-43e7-effe-8afd20071025.json'])),
    '%s/20071025/name/0b'%testDir:(set(['ba']), set([])),
    '%s/20071025/date/05/04'%testDir:(set(['webhead02_0']), set([])),
    '%s/20071025/name/0b/ba/61'%testDir:(set(['c5']), set([])),
    '%s/20071025'%testDir:(set(['date', 'name']), set([])),
    '%s/20071025/date/05/04/webhead02_0'%testDir:(set(['0bba61c5-dfc3-43e7-effe-8afd20071025']), set([])),
    '%s/20071025/name'%testDir:(set(['0b']), set([])),
    '%s'%testDir:(set(['20071025']), set([])),
    '%s/20071025/name/0b/ba'%testDir:(set(['61']), set([])),
    }
  minSet = {'0bba61c5-dfc3-43e7-effe-8afd20071025': ('2007-10-25-05-04','webhead02','0b/ba/61/c5','2007/10/25/05/00/webhead02_0')}
  try:
    createJDS.createTestSet(minSet,{},testDir)
    got = {}
    for dirpath, files, dirs in os.walk(testDir):
      got[dirpath] = (set(files),set(dirs))
    if expected != got:
      print
      for k, v in expected.items():
        print '   X %s: %s'%(k,v)
        if k in got:
          if got[k] == expected[k]:
            print '   G %s: %s'%(k,got[k])
          else:
            print 'xx G %s: %s'%(k,got[k])
        else:
          print 'xx G %s: (IS MISSING)'%(k)
      for k,v in got.items():
        if not k in expected:
          print '++ G %s: %s'%(k,v)
    assert expected == got
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'json test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  try:
    createJDS.createTestSet(minSet,{'jsonIsEmpty':True},testDir)
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 0 == len(data)
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  try:
    createJDS.createTestSet(minSet,{'jsonIsBogus':False, 'jsonFileGenerator':'default'},testDir)
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5//0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 1 == len(data)
    expect='{"BuildID": "bogusBuildID-00", "Version": "bogusVersion-00", "ProductName": "bogusName-00"}'
    assert expect == data[0].strip()
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  try:
    createJDS.createTestSet(minSet,{'jsonIsBogus':False},testDir)
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 1 == len(data)
    expect='{"what": "legal json, bad contents", "uuid": "0bba61c5-dfc3-43e7-effe-8afd20071025"}'
    assert expect == data[0].strip()
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass
Пример #38
0
def testCreateTestSet():
  testDir = "./TEST_CREATE_DIR"
  try:
    shutil.rmtree(testDir)
  except:
    pass
  assert not os.path.exists(testDir)
  try:
    createJDS.createTestSet({},{},testDir)
    assert os.path.isdir(testDir)
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  expected = {
    '%s/20071025/date/05'%testDir:(set(['04']), set([])),
    '%s/20071025/date'%testDir:(set(['05']), set([])),
    '%s/20071025/name/0b/ba/61/c5'%testDir:(set(['0bba61c5-dfc3-43e7-effe-8afd20071025']), set(['0bba61c5-dfc3-43e7-effe-8afd20071025.dump', '0bba61c5-dfc3-43e7-effe-8afd20071025.json'])),
    '%s/20071025/name/0b'%testDir:(set(['ba']), set([])),
    '%s/20071025/date/05/04'%testDir:(set(['webhead02_0']), set([])),
    '%s/20071025/name/0b/ba/61'%testDir:(set(['c5']), set([])),
    '%s/20071025'%testDir:(set(['date', 'name']), set([])),
    '%s/20071025/date/05/04/webhead02_0'%testDir:(set(['0bba61c5-dfc3-43e7-effe-8afd20071025']), set([])),
    '%s/20071025/name'%testDir:(set(['0b']), set([])),
    '%s'%testDir:(set(['20071025']), set([])),
    '%s/20071025/name/0b/ba'%testDir:(set(['61']), set([])),
    }
  minSet = {'0bba61c5-dfc3-43e7-effe-8afd20071025': ('2007-10-25-05-04','webhead02','0b/ba/61/c5','2007/10/25/05/00/webhead02_0')}
  try:
    createJDS.createTestSet(minSet,{},testDir)
    got = {}
    for dirpath, files, dirs in os.walk(testDir):
      got[dirpath] = (set(files),set(dirs))
    if expected != got:
      print
      for k, v in expected.items():
        print '   X %s: %s'%(k,v)
        if k in got:
          if got[k] == expected[k]:
            print '   G %s: %s'%(k,got[k])
          else:
            print 'xx G %s: %s'%(k,got[k])
        else:
          print 'xx G %s: (IS MISSING)'%(k)
      for k,v in got.items():
        if not k in expected:
          print '++ G %s: %s'%(k,v)
    assert expected == got
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'json test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  try:
    createJDS.createTestSet(minSet,{'jsonIsEmpty':True},testDir)
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 0 == len(data)
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  try:
    createJDS.createTestSet(minSet,{'jsonIsBogus':False, 'jsonFileGenerator':'default'},testDir)
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5//0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 1 == len(data)
    expect='{"BuildID": "bogusBuildID-00", "Version": "bogusVersion-00", "ProductName": "bogusName-00"}'
    assert expect == data[0].strip()
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass

  try:
    createJDS.createTestSet(minSet,{'jsonIsBogus':False},testDir)
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.dump'))
    data = f.readlines()
    assert 1 == len(data)
    assert 'dump test of 0bba61c5-dfc3-43e7-effe-8afd20071025' == data[0].strip()
    f.close()
    f = open(os.path.join(testDir,'20071025/name/0b/ba/61/c5/0bba61c5-dfc3-43e7-effe-8afd20071025.json'))
    data = f.readlines()
    assert 1 == len(data)
    expect='{"what": "legal json, bad contents", "uuid": "0bba61c5-dfc3-43e7-effe-8afd20071025"}'
    assert expect == data[0].strip()
    f.close()
  finally:
    try:
      shutil.rmtree(testDir)
    except:
      pass