Esempio n. 1
0
 def test_success(self):
     synchroniser = PilotCStoJSONSynchronizer()
     synchroniser.pilotFileServer = 'value'
     res = synchroniser._syncJSONFile()
     assert res['OK'], res['Message']
     # ensure pilot.json was "uploaded"
     assert 'pilot.json' in synchroniser._checksumDict
Esempio n. 2
0
    def export_commitNewData(self, sData):
        global gPilotSynchronizer
        credDict = self.getRemoteCredentials()
        if 'DN' not in credDict or 'username' not in credDict:
            return S_ERROR("You must be authenticated!")
        res = gServiceInterface.updateConfiguration(sData,
                                                    credDict['username'])
        if not res['OK']:
            return res

        # Check the flag for updating the pilot 3 JSON file
        updatePilotCStoJSONFileFlag = self.srv_getCSOption(
            'UpdatePilotCStoJSONFile', False)
        if updatePilotCStoJSONFileFlag and gServiceInterface.isMaster():
            if gPilotSynchronizer is None:
                try:
                    # This import is only needed for the Master CS service, making it conditional avoids
                    # dependency on the git client preinstalled on all the servers running CS slaves
                    from DIRAC.WorkloadManagementSystem.Utilities.PilotCStoJSONSynchronizer import PilotCStoJSONSynchronizer
                except ImportError as exc:
                    self.log.exception(
                        "Failed to import PilotCStoJSONSynchronizer",
                        repr(exc))
                    return S_ERROR(
                        DErrno.EIMPERR,
                        'Failed to import PilotCStoJSONSynchronizer')
                gPilotSynchronizer = PilotCStoJSONSynchronizer()
            return gPilotSynchronizer.sync()

        return res
Esempio n. 3
0
def main():
    global includeMasterCS
    Script.registerSwitch("n", "noMasterCS", "do not include master CS",
                          setNoMasterCS)
    Script.parseCommandLine()

    from DIRAC import gLogger, exit as DIRACExit
    from DIRAC.WorkloadManagementSystem.Utilities.PilotCStoJSONSynchronizer import PilotCStoJSONSynchronizer

    ps = PilotCStoJSONSynchronizer()

    gLogger.verbose("Parameters for this sync:")
    gLogger.verbose("repo=" + ps.pilotRepo)
    gLogger.verbose("VO repo=" + ps.pilotVORepo)
    gLogger.verbose("projectDir=" + ps.projectDir)
    gLogger.verbose("pilotScriptsPath=" + ps.pilotScriptPath)
    gLogger.verbose("pilotVOScriptsPath=" + ps.pilotVOScriptPath)
    gLogger.verbose("pilotRepoBranch=" + ps.pilotRepoBranch)
    gLogger.verbose("pilotVORepoBranch=" + ps.pilotVORepoBranch)

    # pilot.json
    res = ps.getCSDict(includeMasterCS=includeMasterCS)
    if not res['OK']:
        DIRACExit(1)
    pilotDict = res['Value']
    print(json.dumps(
        pilotDict, indent=4,
        sort_keys=True))  # just print here as formatting is important
    with open('pilot.json', 'w') as jf:
        json.dump(pilotDict, jf)

    # pilot files
    res = ps.syncScripts()
    if not res['OK']:
        DIRACExit(1)
    gLogger.always(res['Value'])
    tarPath, tarFiles = res['Value']

    allFiles = [tarPath] + tarFiles + ['pilot.json']

    # checksums
    checksumDict = {}
    for pFile in allFiles:
        filename = os.path.basename(pFile)
        with open(pFile, 'rb') as fp:
            checksumDict[filename] = hashlib.sha512(fp.read()).hexdigest()
        cksPath = 'checksums.sha512'
    with open(cksPath, 'wt') as chksums:
        for filename, chksum in sorted(checksumDict.items()):
            # same as the output from sha512sum commands
            chksums.write('%s  %s\n' % (chksum, filename))

    allFiles = allFiles + [cksPath]

    print(allFiles)
Esempio n. 4
0
  def export_commitNewData(self, sData):
    global gPilotSynchronizer
    credDict = self.getRemoteCredentials()
    if 'DN' not in credDict or 'username' not in credDict:
      return S_ERROR("You must be authenticated!")
    res = gServiceInterface.updateConfiguration(sData, credDict['username'])
    if not res['OK']:
      return res

    # Check the flag for updating the pilot 3 JSON file
    if self.srv_getCSOption('UpdatePilotCStoJSONFile', False) and gServiceInterface.isMaster():
      if gPilotSynchronizer is None:
        try:
          # This import is only needed for the Master CS service, making it conditional avoids
          # dependency on the git client preinstalled on all the servers running CS slaves
          from DIRAC.WorkloadManagementSystem.Utilities.PilotCStoJSONSynchronizer import PilotCStoJSONSynchronizer
        except ImportError as exc:
          self.log.exception("Failed to import PilotCStoJSONSynchronizer", repr(exc))
          return S_ERROR(DErrno.EIMPERR, 'Failed to import PilotCStoJSONSynchronizer')
        gPilotSynchronizer = PilotCStoJSONSynchronizer()
      return gPilotSynchronizer.sync()

    return res
Esempio n. 5
0
 def test_syncchecksum(self):
     # If the hashes need to be changed because the pilot or test.cfg file change, they should be created with the
     # sha512sum command line tool, and the files should be checked for correctness
     expectedHash = '00e67a2d45e2c2508a935500a4765e1a5f1ce661f23c1fb329987c8211bde754ed' + \
                    '79f6b02cdeabd429979a82014c474c5ce2f46a879f17e2a6ce4bcac683e2e4'
     expectedPJHash = '6f0a45fc703ca03ad3f277173c3464dafcaad2b85fa6643f66a02721e69233cd' + \
                      'ec6c50c75eb779740788b5211f740631944f8d703d4149f6526d149c761c02f4'
     synchroniser = PilotCStoJSONSynchronizer()
     synchroniser.pilotFileServer = 'value'
     synchroniser._checksumFile(self.testCfgFileName)
     res = synchroniser._syncJSONFile()
     assert res['OK'], res['Message']
     synchroniser._syncChecksum()
     assert self.testCfgFileName in synchroniser._checksumDict
     assert synchroniser._checksumDict[self.testCfgFileName] == expectedHash
     assert open('checksums.sha512', 'rb').read().split('\n')[0] == '%s  %s' % (expectedPJHash, 'pilot.json'), \
         'pilot.json content: ' + repr(open('pilot.json', 'rb').read())
     assert open(
         'checksums.sha512',
         'rb').read().split('\n')[1] == '%s  %s' % (expectedHash,
                                                    self.testCfgFileName)
     # this tests if the checksums file was also "uploaded"
     assert 'checksums.sha512' in list(synchroniser._checksumDict)
Esempio n. 6
0
 def test_success(self):
     synchroniser = PilotCStoJSONSynchronizer()
     res = synchroniser._syncJSONFile()
     self.assertTrue(res['OK'])
Esempio n. 7
0
 def test_success(self):
     synchroniser = PilotCStoJSONSynchronizer()
     res = synchroniser.getCSDict()
     assert res["OK"], res["Message"]
     res = synchroniser.getCSDict(includeMasterCS=False)
     assert res["OK"], res["Message"]
Esempio n. 8
0
    def execute(self):
        """cycle"""

        ps = PilotCStoJSONSynchronizer()
        ps.workDir = self.workingDirectory

        self.log.verbose("Parameters for this sync:")
        self.log.verbose("repo=" + ps.pilotRepo)
        self.log.verbose("VO repo=" + ps.pilotVORepo)
        self.log.verbose("projectDir=" + ps.projectDir)
        self.log.verbose("pilotScriptsPath=" + ps.pilotScriptPath)
        self.log.verbose("pilotVOScriptsPath=" + ps.pilotVOScriptPath)
        self.log.verbose("pilotRepoBranch=" + ps.pilotRepoBranch)
        self.log.verbose("pilotVORepoBranch=" + ps.pilotVORepoBranch)

        # pilot.json
        res = ps.getCSDict(includeMasterCS=self.includeMasterCS)
        if not res["OK"]:
            return res
        pilotDict = res["Value"]
        print(json.dumps(pilotDict, indent=4, sort_keys=True))  # just print here as formatting is important
        with open(os.path.join(self.workingDirectory, "pilot.json"), "w") as jf:
            json.dump(pilotDict, jf)

        # pilot files
        res = ps.syncScripts()
        if not res["OK"]:
            return res
        tarPath, tarFiles = res["Value"]

        allFiles = [tarPath] + tarFiles + [os.path.join(self.workingDirectory, "pilot.json")]

        # checksums
        checksumDict = {}
        for pFile in allFiles:
            filename = os.path.basename(pFile)
            with open(pFile, "rb") as fp:
                checksumDict[filename] = hashlib.sha512(fp.read()).hexdigest()
            cksPath = os.path.join(self.workingDirectory, "checksums.sha512")
        with open(cksPath, "wt") as chksums:
            for filename, chksum in sorted(checksumDict.items()):
                # same as the output from sha512sum commands
                chksums.write("%s  %s\n" % (chksum, filename))

        allFiles = list(set(allFiles + [cksPath]))

        if self.saveDir:
            # Moving files to the correct location
            self.log.info("Moving pilot files", "to %s" % self.saveDir)
            for tf in allFiles:
                # this overrides the destinations
                shutil.move(tf, os.path.join(self.saveDir, os.path.basename(tf)))

        # Here, attempting upload somewhere, and somehow
        for server in self.uploadLocations:
            self.log.info("Attempting to upload", "to %s" % server)
            if server.startswith("https://"):
                for tf in allFiles:
                    res = requests.put(server, data=tf, verify=self.casLocation, cert=self.certAndKeyLocation)
                    if res.status_code not in (200, 202):
                        self.log.error("Could not upload", "to %s: status %s" % (server, res.status_code))
            else:  # Assumes this is a DIRAC SE
                for tf in allFiles:
                    res = DataManager().put(lfn=tf, fileName=tf, diracSE=server)
                    if not res["OK"]:
                        self.log.error("Could not upload", "to %s: %s" % (server, res["Message"]))

        return S_OK()
 def test_success(self):
   synchroniser = PilotCStoJSONSynchronizer()
   res = synchroniser.sync()
   self.assertTrue(res['OK'])