Пример #1
0
    def _copy_file(self, source, destination, usercfg, log):
        '''Copy a file from source to destination'''

        log.info('Copying {0} to {1}'.format(source, destination))
        source_datapoint = arc_utils.DataPoint(str(source), usercfg)
        destination_datapoint = arc_utils.DataPoint(str(destination), usercfg)
        dm = arc.DataMover()
        dm.retry(False)
        dm.passive(True)
        dm.secure(False)

        status = dm.Transfer(source_datapoint.h, destination_datapoint.h,
                             arc.FileCache(), arc.URLMap())
        return status
Пример #2
0
 def _delete_file(self, filename, usercfg, log):
     '''Delete a remote file on ARC CE'''
     log.info('Deleting {0}'.format(filename))
     datapoint = arc_utils.DataPoint(str(filename), usercfg)
     datapoint.h.SetSecure(False)
     status = datapoint.h.Remove()
     return status
Пример #3
0
    def _list_url_recursive(self, url, log, fname='', filelist=[]):
        '''List ARC job directory recursively to find all files'''

        dp = arc_utils.DataPoint(url+'/'+fname, self.userconfig)
        files = dp.h.List(arc.DataPoint.INFO_TYPE_NAME | arc.DataPoint.INFO_TYPE_TYPE)
        if not files[1]:
            log.warning("Failed listing %s/%s" % (url, fname))
            return filelist
        for f in files[0]:
            if f.GetType() == f.file_type_file:
                filelist.append((fname+'/'+f.GetName()).strip('/'))
            elif f.GetType() == f.file_type_dir:
                filelist = self.listUrlRecursive(url, log, (fname+'/'+str(f.GetName())).strip('/'), filelist)
        return filelist
Пример #4
0
    def _download_outputs(self, files, logdir, jobid, pandaid, userconfig,
                          log):
        '''Download the output files specified in downloadfiles'''

        # construct datapoint object, initialising connection. Use the same
        # object until base URL changes. TODO group by base URL.

        datapoint = arc_utils.DataPoint(str(jobid), userconfig)
        dp = datapoint.h
        dm = arc.DataMover()
        dm.retry(False)
        dm.passive(True)
        dm.secure(False)
        fetched = []
        notfetched = []
        notfetchedretry = []

        # create required local log dirs
        try:
            os.makedirs(logdir, 0755)
        except OSError as e:
            if e.errno != errno.EEXIST or not os.path.isdir(logdir):
                log.warning('Failed to create directory {0}: {1}'.format(
                    logdir, os.strerror(e.errno)))
                notfetched.append(jobid)
                return (fetched, notfetched, notfetchedretry)

        tmpdldir = os.path.join(self.tmpdir, pandaid)
        try:
            os.makedirs(tmpdldir, 0755)
        except OSError as e:
            if e.errno != errno.EEXIST or not os.path.isdir(tmpdldir):
                log.warning('Failed to create directory {0}: {1}'.format(
                    tmpdldir, os.strerror(e.errno)))
                notfetched.append(jobid)
                return (fetched, notfetched, notfetchedretry)

        filelist = files.split(';')
        if re.search(r'[\*\[\]\?]', files):
            # found wildcard, need to get sessiondir list
            remotefiles = self.listUrlRecursive(jobid, log)
            expandedfiles = []
            for wcf in filelist:
                if re.search(r'[\*\[\]\?]', wcf):
                    # only match wildcards in matching dirs
                    expandedfiles += [
                        rf for rf in remotefiles if fnmatch.fnmatch(rf, wcf)
                        and os.path.dirname(rf) == os.path.dirname(wcf)
                    ]
                else:
                    expandedfiles.append(wcf)
            # remove duplicates from wildcard matching through set
            filelist = list(set(expandedfiles))

        for f in filelist:
            if f == 'gmlog/errors':
                localfile = os.path.join(logdir, '%s.log' % pandaid)
            elif f.find('.log') != -1:
                localfile = os.path.join(logdir, '%s.out' % pandaid)
            else:
                localfile = os.path.join(tmpdldir, f)

            remotefile = arc.URL(str(jobid + '/' + f))
            dp.SetURL(remotefile)
            localdp = arc_utils.DataPoint(str(localfile), userconfig)
            # do the copy
            status = dm.Transfer(dp, localdp.h, arc.FileCache(), arc.URLMap())
            if not status and str(status).find(
                    'File unavailable'
            ) == -1:  # tmp fix for globus error which is always retried
                if status.Retryable():
                    log.warning(
                        'Failed to download but will retry {0}: {1}'.format(
                            dp.GetURL().str(), str(status)))
                    notfetchedretry.append(jobid)
                else:
                    log.error(
                        'Failed to download with permanent failure {0}: {1}'.
                        format(dp.GetURL().str(), str(status)))
                    notfetched.append(jobid)
            else:
                os.chmod(localfile, 0644)
                log.info('Downloaded {0}'.format(dp.GetURL().str()))

        if jobid not in notfetched and jobid not in notfetchedretry:
            fetched.append(jobid)

        return (fetched, notfetched, notfetchedretry)