Beispiel #1
0
    def testUploadDownload(self):
        if 'UFCURL' in os.environ:
            currdir = getTestBase()
            upfile = path.join(currdir, 'WMCore_t/Services_t/UserFileCache_t/test_file.tgz') #file to upload
            ufc = UserFileCache({'endpoint':os.environ['UFCURL']})

            #named upload/download
            res = ufc.upload(upfile, 'name_publish.tgz')
            ufc.download(name=res['name'], output='name_publish.tgz')

            #hashkey upload/download
            res = ufc.upload(upfile)
            ufc.download(res['hashkey'], output='pippo_publish_down.tgz')
Beispiel #2
0
    def testUploadDownload(self):
        if "UFCURL" in os.environ:
            currdir = getTestBase()
            upfile = path.join(currdir, "WMCore_t/Services_t/UserFileCache_t/test_file.tgz")  # file to upload
            ufc = UserFileCache({"endpoint": os.environ["UFCURL"]})

            # named upload/download
            res = ufc.upload(upfile, "name_publish.tgz")
            ufc.download(name=res["name"], output="name_publish.tgz")

            # hashkey upload/download
            res = ufc.upload(upfile)
            ufc.download(res["hashkey"], output="pippo_publish_down.tgz")
Beispiel #3
0
    def testUploadDownload(self):
        if 'UFCURL' in os.environ:
            currdir = getTestBase()
            upfile = path.join(
                currdir, 'WMCore_t/Services_t/UserFileCache_t/test_file.tgz'
            )  #file to upload
            upfileLog = path.join(
                currdir, 'WMCore_t/Services_t/UserFileCache_t/uplog.txt'
            )  #file to upload
            ufc = UserFileCache({
                'endpoint': os.environ['UFCURL'],
                'pycurl': True
            })

            #hashkey upload/download
            res = ufc.upload(upfile)
            ufc.download(res['hashkey'], output='pippo_publish_down.tgz')

            #hashkey deletion
            ufc.removeFile(res['hashkey'])

            #log upload/download
            res = ufc.uploadLog(upfileLog)
            ufc.downloadLog(upfileLog, upfileLog + '.downloaded')
            self.assertTrue(filecmp.cmp(upfileLog, upfileLog + '.downloaded'))
Beispiel #4
0
def uploadPublishWorkflow(config, workflow, ufcEndpoint, workDir):
    """
    Write out and upload to the UFC a JSON file
    with all the info needed to publish this dataset later
    """
    retok, proxyfile = getProxy(config, workflow.dn, workflow.vogroup, workflow.vorole)
    if not retok:
        logging.info("Cannot get the user's proxy")
        return False

    ufc = UserFileCache({'endpoint': ufcEndpoint, 'cert': proxyfile, 'key': proxyfile})

    # Skip tasks ending in LogCollect, they have nothing interesting.
    taskNameParts = workflow.task.split('/')
    if taskNameParts.pop() in ['LogCollect']:
        logging.info('Skipping LogCollect task')
        return False
    logging.info('Generating JSON for publication of %s of type %s' % (workflow.name, workflow.wfType))

    myThread = threading.currentThread()

    dbsDaoFactory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                logger = myThread.logger, dbinterface = myThread.dbi)
    findFiles = dbsDaoFactory(classname = "LoadFilesByWorkflow")

    # Fetch and filter the files to the ones we actually need
    uploadDatasets = {}
    uploadFiles = findFiles.execute(workflowName = workflow.name)
    for file in uploadFiles:
        datasetName = file['datasetPath']
        if not uploadDatasets.has_key(datasetName):
            uploadDatasets[datasetName] = []
        uploadDatasets[datasetName].append(file)

    if not uploadDatasets:
        logging.info('No datasets found to upload.')
        return False

    # Write JSON file and then create tarball with it
    baseName = '%s_publish.tgz'  % workflow.name
    jsonName = os.path.join(workDir, '%s_publish.json' % workflow.name)
    tgzName = os.path.join(workDir, baseName)
    with open(jsonName, 'w') as jsonFile:
        json.dump(uploadDatasets, fp=jsonFile, cls=FileEncoder, indent=2)

    # Only in 2.7 does tarfile become usable as context manager
    tgzFile = tarfile.open(name=tgzName, mode='w:gz')
    tgzFile.add(jsonName)
    tgzFile.close()

    result = ufc.upload(fileName=tgzName, name=baseName)
    logging.debug('Upload result %s' % result)
    # If this doesn't work, exception will propogate up and block archiving the task
    logging.info('Uploaded with name %s and hashkey %s' % (result['name'], result['hashkey']))
    return
Beispiel #5
0
 def upload(self):
     """
     Upload the tarball to the File Cache
     """
     self.close()
     archiveName = self.tarfile.name
     serverUrl = ""
     self.logger.debug(" uploading archive to cache %s " % archiveName)
     ufc = UserFileCache({'endpoint' : self.config.JobType.filecacheurl})
     result = ufc.upload(archiveName)
     if 'hashkey' not in result:
         self.logger.error("Failed to upload source files: %s" % str(result))
         raise CachefileNotFoundException
     return self.config.JobType.filecacheurl, str(result['hashkey']) + '.tar.gz', self.checksum
Beispiel #6
0
    def testUploadDownload(self):
        if 'UFCURL' in os.environ:
            currdir = getTestBase()
            upfile = path.join(currdir, 'WMCore_t/Services_t/UserFileCache_t/test_file.tgz') #file to upload
            upfileLog = path.join(currdir, 'WMCore_t/Services_t/UserFileCache_t/uplog.txt') #file to upload
            ufc = UserFileCache({'endpoint':os.environ['UFCURL']})

            #hashkey upload/download
            res = ufc.upload(upfile)
            ufc.download(res['hashkey'], output='pippo_publish_down.tgz')

            #log upload/download
            res = ufc.uploadLog(upfileLog)
            ufc.downloadLog(upfileLog, upfileLog+'.downloaded')
            self.assertTrue(filecmp.cmp(upfileLog, upfileLog+'.downloaded'))
Beispiel #7
0
    def testUploadDownload(self):
        if "UFCURL" in os.environ:
            currdir = getTestBase()
            upfile = path.join(currdir, "WMCore_t/Services_t/UserFileCache_t/test_file.tgz")  # file to upload
            upfileLog = path.join(currdir, "WMCore_t/Services_t/UserFileCache_t/uplog.txt")  # file to upload
            ufc = UserFileCache({"endpoint": os.environ["UFCURL"], "pycurl": True})

            # hashkey upload/download
            res = ufc.upload(upfile)
            ufc.download(res["hashkey"], output="pippo_publish_down.tgz")

            # hashkey deletion
            ufc.removeFile(res["hashkey"])

            # log upload/download
            res = ufc.uploadLog(upfileLog)
            ufc.downloadLog(upfileLog, upfileLog + ".downloaded")
            self.assertTrue(filecmp.cmp(upfileLog, upfileLog + ".downloaded"))
 def uploadSandboxToUFC( self, url, sandbox = None, name = "YAATFile" ):
     if not sandbox:
         sandbox = self.sandbox
     req = UserFileCache( { 'endpoint' : url} )
     return req.upload( sandbox, name )
Beispiel #9
0
def uploadPublishWorkflow(config, workflow, ufcEndpoint, workDir):
    """
    Write out and upload to the UFC a JSON file
    with all the info needed to publish this dataset later
    """
    retok, proxyfile = getProxy(config, workflow.dn, workflow.vogroup,
                                workflow.vorole)
    if not retok:
        logging.info("Cannot get the user's proxy")
        return False

    ufc = UserFileCache({
        'endpoint': ufcEndpoint,
        'cert': proxyfile,
        'key': proxyfile
    })

    # Skip tasks ending in LogCollect, they have nothing interesting.
    taskNameParts = workflow.task.split('/')
    if taskNameParts.pop() in ['LogCollect']:
        logging.info('Skipping LogCollect task')
        return False
    logging.info('Generating JSON for publication of %s of type %s' %
                 (workflow.name, workflow.wfType))

    myThread = threading.currentThread()

    dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer",
                               logger=myThread.logger,
                               dbinterface=myThread.dbi)
    findFiles = dbsDaoFactory(classname="LoadFilesByWorkflow")

    # Fetch and filter the files to the ones we actually need
    uploadDatasets = {}
    uploadFiles = findFiles.execute(workflowName=workflow.name)
    for file in uploadFiles:
        datasetName = file['datasetPath']
        if not uploadDatasets.has_key(datasetName):
            uploadDatasets[datasetName] = []
        uploadDatasets[datasetName].append(file)

    if not uploadDatasets:
        logging.info('No datasets found to upload.')
        return False

    # Write JSON file and then create tarball with it
    baseName = '%s_publish.tgz' % workflow.name
    jsonName = os.path.join(workDir, '%s_publish.json' % workflow.name)
    tgzName = os.path.join(workDir, baseName)
    with open(jsonName, 'w') as jsonFile:
        json.dump(uploadDatasets, fp=jsonFile, cls=FileEncoder, indent=2)

    # Only in 2.7 does tarfile become usable as context manager
    tgzFile = tarfile.open(name=tgzName, mode='w:gz')
    tgzFile.add(jsonName)
    tgzFile.close()

    result = ufc.upload(fileName=tgzName, name=baseName)
    logging.debug('Upload result %s' % result)
    # If this doesn't work, exception will propogate up and block archiving the task
    logging.info('Uploaded with name %s and hashkey %s' %
                 (result['name'], result['hashkey']))
    return