Esempio n. 1
0
    def _presigned_putFile(self, urls, sourceSize=0):
        """Upload a local file.

        ..warning:: no 3rd party copy possible

        :param urls: dictionary { urls : localFile }
        :param sourceSize: size of the file in byte. Mandatory for third party copy (WHY ???)
                             Also, this parameter makes it essentially a non bulk operation for
                             third party copy, unless all files have the same size...
        :returns: * Successful dict: { path : size }
                  * Failed dict: { path : error message }
                  * S_ERROR in case of argument problems
        """

        log = LOG.getSubLogger("putFile")

        failed = {}
        successful = {}

        # Construct a dict <url:{x-amz-meta-checksum: adler32}>
        # it needs to be passed to createPresignedUrl
        urlAdlers = {url: {"x-amz-meta-checksum": fileAdler(src_file)} for url, src_file in urls.items()}

        res = self.S3GatewayClient.createPresignedUrl(self.name, "put_object", urlAdlers)
        if not res["OK"]:
            return res

        failed.update(res["Value"]["Failed"])

        # Contains <url: presignedResponse>
        presignedResponses = res["Value"]["Successful"]

        for dest_url, presignedResponse in presignedResponses.items():

            src_file = urls[dest_url]

            try:
                cks = fileAdler(src_file)
                if not cks:
                    log.warn("Cannot get ADLER32 checksum for %s" % src_file)

                presignedURL = presignedResponse["url"]
                presignedFields = presignedResponse["fields"]
                with open(src_file, "rb") as src_fd:
                    # files = {'file': (dest_key, src_fd)}
                    files = {"file": src_fd}
                    response = requests.post(presignedURL, data=presignedFields, files=files)

                    if not response.ok:
                        raise Exception(response.reason)

                successful[dest_url] = os.path.getsize(src_file)

            except Exception as e:
                failed[dest_url] = repr(e)

        return S_OK({"Failed": failed, "Successful": successful})
Esempio n. 2
0
  def getFileMetadata(self, candidateFiles):
    """Returns the candidate file dictionary with associated metadata.

    :param dict candidateFiles: The input candidate files dictionary has the structure: {'lfn':'','path':'','workflowSE':''}
       This also assumes the files are in the current working directory.
    :return: S_OK with File Metadata, S_ERROR
    """
    #Retrieve the POOL File GUID(s) for any final output files
    self.log.info('Will search for POOL GUIDs for: %s' %(', '.join(candidateFiles.keys())))
    pfnGUIDs = {}
    generated = []
    for fname in candidateFiles.keys():
      guid = makeGuid(fname)
      pfnGUIDs[fname] = guid
      generated.append(fname)
    pfnGUID = S_OK(pfnGUIDs)
    pfnGUID['generated'] = generated
    #pfnGUID = getGUID(candidateFiles.keys())
    #if not pfnGUID['OK']:
    #  self.log.error('PoolXMLFile failed to determine POOL GUID(s) for output file list, these will be generated by \
    #                   the DataManager',pfnGUID['Message'])
    #  for fileName in candidateFiles.keys():
    #    candidateFiles[fileName]['guid']=''
    #if pfnGUID['generated']:
    self.log.debug('Generated GUID(s) for the following files ', ', '.join(pfnGUID['generated']))
    #else:
    #  self.log.info('GUIDs found for all specified POOL files: %s' %(string.join(candidateFiles.keys(),', ')))

    for pfn, guid in pfnGUID['Value'].items():
      candidateFiles[pfn]['GUID'] = guid

    #Get all additional metadata about the file necessary for requests
    final = {}
    for fileName, metadata in candidateFiles.items():
      fileDict = {}
      fileDict['LFN'] = metadata['lfn']
      fileDict['Size'] = os.path.getsize(fileName)
      fileDict['Addler'] = fileAdler(fileName)
      fileDict['ADLER32'] = fileAdler(fileName)
      fileDict['Checksum'] = fileAdler(fileName)
      fileDict['ChecksumType'] = "ADLER32"
      fileDict['GUID'] = metadata['GUID']
      fileDict['Status'] = "Waiting"

      final[fileName] = metadata
      final[fileName]['filedict'] = fileDict
      final[fileName]['localpath'] = '%s/%s' % (os.getcwd(), fileName)

    #Sanity check all final candidate metadata keys are present (return S_ERROR if not)
    mandatoryKeys = ['GUID', 'filedict'] #filedict is used for requests (this method adds guid and filedict)
    for fileName, metadata in final.items():
      for key in mandatoryKeys:
        if key not in metadata:
          return S_ERROR('File %s has missing %s' % (fileName, key))

    return S_OK(final)
Esempio n. 3
0
  def getFileMetadata(self, candidateFiles):
    """Returns the candidate file dictionary with associated metadata.

    :param dict candidateFiles: The input candidate files dictionary has the structure: {'lfn':'','path':'','workflowSE':''}
       This also assumes the files are in the current working directory.
    :return: S_OK with File Metadata, S_ERROR
    """
    #Retrieve the POOL File GUID(s) for any final output files
    self.log.info('Will search for POOL GUIDs for: %s' %(', '.join(candidateFiles.keys())))
    pfnGUIDs = {}
    generated = []
    for fname in candidateFiles.keys():
      guid = makeGuid(fname)
      pfnGUIDs[fname] = guid
      generated.append(fname)
    pfnGUID = S_OK(pfnGUIDs)
    pfnGUID['generated'] = generated
    #pfnGUID = getGUID(candidateFiles.keys())
    #if not pfnGUID['OK']:
    #  self.log.error('PoolXMLFile failed to determine POOL GUID(s) for output file list, these will be generated by \
    #                   the DataManager',pfnGUID['Message'])
    #  for fileName in candidateFiles.keys():
    #    candidateFiles[fileName]['guid']=''
    #if pfnGUID['generated']:
    self.log.debug('Generated GUID(s) for the following files ', ', '.join(pfnGUID['generated']))
    #else:
    #  self.log.info('GUIDs found for all specified POOL files: %s' %(string.join(candidateFiles.keys(),', ')))

    for pfn, guid in pfnGUID['Value'].items():
      candidateFiles[pfn]['GUID'] = guid

    #Get all additional metadata about the file necessary for requests
    final = {}
    for fileName, metadata in candidateFiles.items():
      fileDict = {}
      fileDict['LFN'] = metadata['lfn']
      fileDict['Size'] = os.path.getsize(fileName)
      fileDict['Addler'] = fileAdler(fileName)
      fileDict['ADLER32'] = fileAdler(fileName)
      fileDict['Checksum'] = fileAdler(fileName)
      fileDict['ChecksumType'] = "ADLER32"
      fileDict['GUID'] = metadata['GUID']
      fileDict['Status'] = "Waiting"

      final[fileName] = metadata
      final[fileName]['filedict'] = fileDict
      final[fileName]['localpath'] = '%s/%s' % (os.getcwd(), fileName)

    #Sanity check all final candidate metadata keys are present (return S_ERROR if not)
    mandatoryKeys = ['GUID', 'filedict'] #filedict is used for requests (this method adds guid and filedict)
    for fileName, metadata in final.items():
      for key in mandatoryKeys:
        if not key in metadata:
          return S_ERROR('File %s has missing %s' % (fileName, key))

    return S_OK(final)
Esempio n. 4
0
  def __stat( path ):
    """  Issue a stat call and format it the dirac way, and add the checksum

      :param self: self reference
      :param path: path on the storage
      :returns Successful S_OK(metadataDict) or S_ERROR
    """
    try:
      statInfo = os.stat( path )
      metadataDict = {}
  
      metadataDict['ModTime'] = statInfo.st_mtime
      metadataDict['Size'] = statInfo.st_size
      metadataDict['Mode'] = stat.S_IMODE( statInfo.st_mode )
      metadataDict['Directory'] = bool( stat.S_ISDIR( statInfo.st_mode ) )
      isFile = bool( stat.S_ISREG( statInfo.st_mode ) )
      metadataDict['File'] = isFile

      cks = ""
      if isFile:
        cks = fileAdler( path )

      metadataDict['Checksum'] = cks if cks else ""

      # FIXME: only here for compatibility with SRM until multi protocol is properly handled
      metadataDict['Cached'] = 1
      metadataDict['Migrated'] = 0
      metadataDict['Lost'] = 0
      metadataDict['Unavailable'] = 0

    except OSError as ose:
      return S_ERROR( str( ose ) )

    return S_OK( metadataDict )
Esempio n. 5
0
def registerFilesTree(catalog, lfn, pfn):
    global processedFiles
    fileDict = {}
    dirlist = os.listdir(pfn)
    for entry in dirlist:
        entrypfn = "%s/%s" % (pfn, entry)
        entrylfn = "%s/%s" % (lfn, entry)
        if os.path.isdir(entrypfn):
            registerFilesTree(catalog, entrylfn, entrypfn)
            continue
        infoDict = {}
        infoDict['PFN'] = entrypfn
        try:
            size = os.fstat(entrypfn).ST_SIZE
            infoDict['Size'] = int(size)
        except Exception:
            print "Setting size 0, because error getting ls -l " + entrypfn
            infoDict['Size'] = 0
        infoDict['SE'] = 'EISCAT-disk'
        # make a UUID based on the host ID and current time
        #infoDict['GUID'] = uuid.uuid1().int
        infoDict['GUID'] = str(uuid.uuid1().hex)
        infoDict['Checksum'] = ''
        if registerChecksum:
            infoDict['Checksum'] = str(fileAdler(entrypfn))

        fileDict[entrylfn] = infoDict

    # bulk files registration in the current level
    if fileDict:
        print "Registering %d files in %s directory" % (len(fileDict), pfn)
        registerFiles(catalog, fileDict)
        processedFiles += len(fileDict)
Esempio n. 6
0
    def __stat(path):
        """  Issue a stat call and format it the dirac way, and add the checksum

      :param self: self reference
      :param path: path on the storage
      :returns Successful S_OK(metadataDict) or S_ERROR
    """
        try:
            statInfo = os.stat(path)
            metadataDict = {}

            metadataDict['ModTime'] = statInfo.st_mtime
            metadataDict['Size'] = statInfo.st_size
            metadataDict['Mode'] = stat.S_IMODE(statInfo.st_mode)
            metadataDict['Directory'] = bool(stat.S_ISDIR(statInfo.st_mode))
            isFile = bool(stat.S_ISREG(statInfo.st_mode))
            metadataDict['File'] = isFile

            cks = ""
            if isFile:
                cks = fileAdler(path)

            metadataDict['Checksum'] = cks if cks else ""

            # FIXME: only here for compatibility with SRM until multi protocol is properly handled
            metadataDict['Cached'] = 1
            metadataDict['Migrated'] = 0
            metadataDict['Lost'] = 0
            metadataDict['Unavailable'] = 0

        except OSError as ose:
            return S_ERROR(str(ose))

        return S_OK(metadataDict)
Esempio n. 7
0
    def getFileMetadata(self, candidateFiles):
        """Returns the candidate file dictionary with associated metadata.
    
    @param candidateFiles: The input candidate files dictionary has the structure:
    {'lfn':'','path':'','workflowSE':''}
       
    This also assumes the files are in the current working directory.
    @return: File Metadata
    """
        # Retrieve the POOL File GUID(s) for any final output files
        self.log.info("Will search for POOL GUIDs for: %s" % (string.join(candidateFiles.keys(), ", ")))
        pfnGUIDs = {}
        generated = []
        for fname in candidateFiles.keys():
            guid = makeGuid(fname)
            pfnGUIDs[fname] = guid
            generated.append(fname)
        pfnGUID = S_OK(pfnGUIDs)
        pfnGUID["generated"] = generated
        # pfnGUID = getGUID(candidateFiles.keys())
        # if not pfnGUID['OK']:
        #  self.log.error('PoolXMLFile failed to determine POOL GUID(s) for output file list, these will be generated by \
        #                   the ReplicaManager',pfnGUID['Message'])
        #  for fileName in candidateFiles.keys():
        #    candidateFiles[fileName]['guid']=''
        # if pfnGUID['generated']:
        self.log.debug("Generated GUID(s) for the following files ", string.join(pfnGUID["generated"], ", "))
        # else:
        #  self.log.info('GUIDs found for all specified POOL files: %s' %(string.join(candidateFiles.keys(),', ')))

        for pfn, guid in pfnGUID["Value"].items():
            candidateFiles[pfn]["guid"] = guid

        # Get all additional metadata about the file necessary for requests
        final = {}
        for fileName, metadata in candidateFiles.items():
            fileDict = {}
            fileDict["LFN"] = metadata["lfn"]
            fileDict["Size"] = os.path.getsize(fileName)
            fileDict["Addler"] = fileAdler(fileName)
            fileDict["GUID"] = metadata["guid"]
            fileDict["Status"] = "Waiting"

            final[fileName] = metadata
            final[fileName]["filedict"] = fileDict
            final[fileName]["localpath"] = "%s/%s" % (os.getcwd(), fileName)

        # Sanity check all final candidate metadata keys are present (return S_ERROR if not)
        mandatoryKeys = ["guid", "filedict"]  # filedict is used for requests (this method adds guid and filedict)
        for fileName, metadata in final.items():
            for key in mandatoryKeys:
                if not metadata.has_key(key):
                    return S_ERROR("File %s has missing %s" % (fileName, key))

        return S_OK(final)
Esempio n. 8
0
    def getFileMetadata(self, candidateFiles):
        """Returns the candidate file dictionary with associated metadata.
        
        @param candidateFiles: The input candidate files dictionary has the structure:
        {'lfn':'','path':'','workflowSE':''}
           
        This also assumes the files are in the current working directory.
        @return: File Metadata
        """
        #Retrieve the POOL File GUID(s) for any final output files
        self.log.info('Will search GUIDs for: %s' %
                      (', '.join(candidateFiles.keys())))
        pfnGUIDs = {}
        generated = []
        for fname in candidateFiles.keys():
            guid = makeGuid(fname)
            pfnGUIDs[fname] = guid
            generated.append(fname)
        pfnGUID = S_OK(pfnGUIDs)
        pfnGUID['generated'] = generated

        self.log.debug('Generated GUID(s) for the following files ',
                       ', '.join(pfnGUID['generated']))

        for pfn, guid in pfnGUID['Value'].items():
            candidateFiles[pfn]['GUID'] = guid

        #Get all additional metadata about the file necessary for requests
        final = {}
        for fileName, metadata in candidateFiles.items():
            fileDict = {}
            fileDict['LFN'] = metadata['lfn']
            fileDict['Size'] = os.path.getsize(fileName)
            fileDict['Addler'] = fileAdler(fileName)
            fileDict['GUID'] = metadata['GUID']
            fileDict['Status'] = "Waiting"

            final[fileName] = metadata
            final[fileName]['filedict'] = fileDict
            final[fileName]['localpath'] = '%s/%s' % (os.getcwd(), fileName)

        gLogger.verbose("Full file dict", str(final))

        #Sanity check all final candidate metadata keys are present (return S_ERROR if not)
        mandatoryKeys = [
            'GUID', 'filedict'
        ]  #filedict is used for requests (this method adds guid and filedict)
        for fileName, metadata in final.items():
            for key in mandatoryKeys:
                if not metadata.has_key(key):
                    return S_ERROR('File %s has missing %s' % (fileName, key))

        return S_OK(final)
Esempio n. 9
0
  def getFileMetadata(self, candidateFiles):
    """ Returns the candidate file dictionary with associated metadata.

        The input candidate files dictionary has the structure:
        {'foo_1.txt': {'lfn': '/lhcb/MC/2010/DST/00012345/0001/foo_1.txt',
                       'type': 'txt',
                       'workflowSE': SE1},
        'bar_2.py': {'lfn': '/lhcb/MC/2010/DST/00012345/0001/bar_2.py',
                     'type': 'py',
                     'workflowSE': 'SE2'},
        }

        this also assumes the files are in the current working directory.
    """
    # Retrieve the POOL File GUID(s) for any final output files
    self.log.info('Will search for POOL GUIDs for: %s' % (', '.join(candidateFiles.keys())))
    pfnGUID = getGUID(candidateFiles.keys())
    if not pfnGUID['OK']:
      self.log.error('''PoolXMLFile failed to determine POOL GUID(s) for output file list,
      these will be generated by the DataManager''', pfnGUID['Message'])
      for fileName in candidateFiles.keys():
        candidateFiles[fileName]['guid'] = ''
    elif pfnGUID['generated']:
      self.log.warn('PoolXMLFile generated GUID(s) for the following files ', ', '.join(pfnGUID['generated']))
    else:
      self.log.info('GUIDs found for all specified POOL files: %s' % (', '.join(candidateFiles.keys())))

    for pfn, guid in pfnGUID['Value'].iteritems():
      candidateFiles[pfn]['guid'] = guid

    # Get all additional metadata about the file necessary for requests
    final = {}
    for fileName, metadata in candidateFiles.iteritems():
      fileDict = {}
      fileDict['LFN'] = metadata['lfn']
      fileDict['Size'] = os.path.getsize(fileName)
      fileDict['Checksum'] = fileAdler(fileName)
      fileDict['ChecksumType'] = 'ADLER32'
      fileDict['GUID'] = metadata['guid']
      fileDict['Status'] = 'Waiting'

      final[fileName] = metadata
      final[fileName]['filedict'] = fileDict
      final[fileName]['localpath'] = '%s/%s' % (os.getcwd(), fileName)

    # Sanity check all final candidate metadata keys are present (return S_ERROR if not)
    mandatoryKeys = ['guid', 'filedict']  # filedict is used for requests (this method adds guid and filedict)
    for fileName, metadata in final.iteritems():
      for key in mandatoryKeys:
        if key not in metadata:
          raise RuntimeError("File %s has missing %s" % (fileName, key))

    return final
Esempio n. 10
0
    def getFileMetadata(self, candidateFiles):
        """Returns the candidate file dictionary with associated metadata.
        
        @param candidateFiles: The input candidate files dictionary has the structure:
        {'lfn':'','path':'','workflowSE':''}
           
        This also assumes the files are in the current working directory.
        @return: File Metadata
        """
        #Retrieve the POOL File GUID(s) for any final output files
        self.log.info('Will search GUIDs for: %s' %(', '.join(candidateFiles.keys())))
        pfnGUIDs = {}
        generated = []
        for fname in candidateFiles.keys():
            guid = makeGuid(fname)
            pfnGUIDs[fname] = guid
            generated.append(fname)
        pfnGUID = S_OK(pfnGUIDs)
        pfnGUID['generated'] = generated

        self.log.debug('Generated GUID(s) for the following files ', ', '.join(pfnGUID['generated']))

        for pfn, guid in pfnGUID['Value'].items():
            candidateFiles[pfn]['GUID'] = guid
        
        #Get all additional metadata about the file necessary for requests
        final = {}
        for fileName, metadata in candidateFiles.items():
            fileDict = {}
            fileDict['LFN'] = metadata['lfn']
            fileDict['Size'] = os.path.getsize(fileName)
            fileDict['Addler'] = fileAdler(fileName)
            fileDict['GUID'] = metadata['GUID']
            fileDict['Status'] = "Waiting"   
          
            final[fileName] = metadata
            final[fileName]['filedict'] = fileDict
            final[fileName]['localpath'] = '%s/%s' % (os.getcwd(), fileName)  
        
        gLogger.verbose("Full file dict", str(final))
        
        #Sanity check all final candidate metadata keys are present (return S_ERROR if not)
        mandatoryKeys = ['GUID', 'filedict'] #filedict is used for requests (this method adds guid and filedict)
        for fileName, metadata in final.items():
            for key in mandatoryKeys:
                if not metadata.has_key(key):
                    return S_ERROR('File %s has missing %s' % (fileName, key))
        
        return S_OK(final)
Esempio n. 11
0
def main():
    dm = DataManager()

    fileTupleBuffer = []

    counter = 0
    for f in files:
        counter += 1

        if not f.startswith('/cefs'):
            gLogger.error('File must be under "/cefs"')
            return 1

        lfn = '/cepc/lustre-ro' + f

        result = fcc.isFile(lfn)
        if result['OK'] and lfn in result['Value']['Successful'] and result[
                'Value']['Successful'][lfn]:
            continue

        size = os.path.getsize(f)
        adler32 = fileAdler(f)
        guid = makeGuid()
        fileTuple = (lfn, f, size, _se, guid, adler32)
        fileTupleBuffer.append(fileTuple)
        gLogger.debug('Register to lfn: %s' % lfn)
        gLogger.debug('fileTuple: %s' % (fileTuple, ))

        if len(fileTupleBuffer) >= _bufferSize:
            result = dm.registerFile(fileTupleBuffer)
            print('register result', result)

            if not result['OK']:
                gLogger.error('Register file failed')
                return 1
            del fileTupleBuffer[:]
            gLogger.debug('%s files registered' % counter)

    if fileTupleBuffer:
        result = dm.registerFile(fileTupleBuffer)
        print('register result', result)

        if not result['OK']:
            gLogger.error('Register file failed')
            return 1
        del fileTupleBuffer[:]

    gLogger.info('Totally %s files registered' % counter)
    return 0
Esempio n. 12
0
 def files(self, userName, userGroup):
     """get list of files in user domain"""
     files = []
     for i in range(10):
         fname = "/tmp/testUserFile-%s" % i
         if userGroup == "dteam_user":
             lfn = "/lhcb/user/%s/%s/%s" % (userName[0], userName, fname.split("/")[-1])
         else:
             lfn = "/lhcb/certification/test/rmsdms/%s" % fname.split("/")[-1]
         fh = open(fname, "w+")
         for i in range(100):
             fh.write(str(random.randint(0, i)))
         fh.close()
         size = os.stat(fname).st_size
         checksum = fileAdler(fname)
         guid = makeGuid(fname)
         files.append((fname, lfn, size, checksum, guid))
     return files
Esempio n. 13
0
 def files( self, userName, userGroup ):
   """ get list of files in user domain """
   files = []
   for i in range( 10 ):
     fname = "/tmp/testUserFile-%s" % i
     if userGroup == "lhcb_user":
       lfn = "/lhcb/user/%s/%s/%s" % ( userName[0], userName, fname.split( "/" )[-1] )
     else:
       lfn = "/lhcb/certification/test/rmsdms/%s" % fname.split( "/" )[-1]
     fh = open( fname, "w+" )
     for i in range( 100 ):
       fh.write( str( random.randint( 0, i ) ) )
     fh.close()
     size = os.stat( fname ).st_size
     checksum = fileAdler( fname )
     guid = makeGuid( fname )
     files.append( ( fname, lfn, size, checksum, guid ) )
   return files
Esempio n. 14
0
def main():
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument(["File:     File Name"])
    _, files = Script.parseCommandLine(ignoreErrors=False)

    exitCode = 0

    import DIRAC
    from DIRAC.Core.Utilities.Adler import fileAdler

    for fa in files:
        adler = fileAdler(fa)
        if adler:
            print(fa.rjust(100), adler.ljust(10))  # pylint: disable=no-member
        else:
            print("ERROR %s: Failed to get adler" % fa)
            exitCode = 2

    DIRAC.exit(exitCode)
Esempio n. 15
0
def main():
    Script.parseCommandLine(ignoreErrors=False)
    files = Script.getPositionalArgs()
    if len(files) == 0:
        Script.showHelp()

    exitCode = 0

    import DIRAC
    from DIRAC.Core.Utilities.Adler import fileAdler

    for fa in files:
        adler = fileAdler(fa)
        if adler:
            print(fa.rjust(100), adler.ljust(10))  # pylint: disable=no-member
        else:
            print('ERROR %s: Failed to get adler' % fa)
            exitCode = 2

    DIRAC.exit(exitCode)
Esempio n. 16
0
    def _direct_putFile(self, urls, sourceSize=0):
        """Upload a local file.

        ..warning:: no 3rd party copy possible

        :param urls: dictionary { urls : localFile }
        :param sourceSize: size of the file in byte. Mandatory for third party copy (WHY ???)
                             Also, this parameter makes it essentially a non bulk operation for
                             third party copy, unless all files have the same size...
        :returns: * Successful dict: { path : size }
                  * Failed dict: { path : error message }
                  * S_ERROR in case of argument problems
        """

        log = LOG.getSubLogger("putFile")

        # the @_extractKeyFromS3Path transformed URL into keys
        keys = urls

        failed = {}
        successful = {}

        for dest_key, src_file in keys.items():
            try:
                cks = fileAdler(src_file)
                if not cks:
                    log.warn("Cannot get ADLER32 checksum for %s" % src_file)

                with open(src_file, "rb") as src_fd:
                    self.s3_client.put_object(Body=src_fd,
                                              Bucket=self.bucketName,
                                              Key=dest_key,
                                              Metadata={"Checksum": cks})

                successful[dest_key] = os.path.getsize(src_file)

            except Exception as e:
                failed[dest_key] = repr(e)

        return S_OK({"Failed": failed, "Successful": successful})
Esempio n. 17
0
    def __getFileStat(path):
        """ Get the file stat information
    """
        resultDict = {}
        try:
            statTuple = os.stat(path)
        except OSError as x:
            if str(x).find('No such file') >= 0:
                resultDict['Exists'] = False
                return S_OK(resultDict)
            return S_ERROR('Failed to get metadata for %s' % path)

        resultDict['Exists'] = True
        mode = statTuple[stat.ST_MODE]
        resultDict['Type'] = "File"
        resultDict['File'] = True
        resultDict['Directory'] = False
        if stat.S_ISDIR(mode):
            resultDict['Type'] = "Directory"
            resultDict['File'] = False
        resultDict['Directory'] = True
        resultDict['Size'] = statTuple[stat.ST_SIZE]
        resultDict['TimeStamps'] = (statTuple[stat.ST_ATIME],
                                    statTuple[stat.ST_MTIME],
                                    statTuple[stat.ST_CTIME])
        resultDict['Cached'] = 1
        resultDict['Migrated'] = 0
        resultDict['Lost'] = 0
        resultDict['Unavailable'] = 0
        resultDict['Mode'] = stat.S_IMODE(mode)

        if resultDict['File']:
            cks = fileAdler(path)
            resultDict['Checksum'] = cks

        resultDict = StorageBase._addCommonMetadata(resultDict)

        return S_OK(resultDict)
Esempio n. 18
0
    def __getFileStat(path):
        """Get the file stat information"""
        resultDict = {}
        try:
            statTuple = os.stat(path)
        except OSError as x:
            if str(x).find("No such file") >= 0:
                resultDict["Exists"] = False
                return S_OK(resultDict)
            return S_ERROR("Failed to get metadata for %s" % path)

        resultDict["Exists"] = True
        mode = statTuple[stat.ST_MODE]
        resultDict["Type"] = "File"
        resultDict["File"] = True
        resultDict["Directory"] = False
        if stat.S_ISDIR(mode):
            resultDict["Type"] = "Directory"
            resultDict["File"] = False
        resultDict["Directory"] = True
        resultDict["Size"] = statTuple[stat.ST_SIZE]
        resultDict["TimeStamps"] = (statTuple[stat.ST_ATIME],
                                    statTuple[stat.ST_MTIME],
                                    statTuple[stat.ST_CTIME])
        resultDict["Cached"] = 1
        resultDict["Migrated"] = 0
        resultDict["Lost"] = 0
        resultDict["Unavailable"] = 0
        resultDict["Mode"] = stat.S_IMODE(mode)

        if resultDict["File"]:
            cks = fileAdler(path)
            resultDict["Checksum"] = cks

        resultDict = StorageBase._addCommonMetadata(resultDict)

        return S_OK(resultDict)
Esempio n. 19
0
  def __getFileStat(path):
    """ Get the file stat information
    """
    resultDict = {}
    try:
      statTuple = os.stat(path)
    except OSError as x:
      if str(x).find('No such file') >= 0:
        resultDict['Exists'] = False
        return S_OK(resultDict)
      return S_ERROR('Failed to get metadata for %s' % path)

    resultDict['Exists'] = True
    mode = statTuple[stat.ST_MODE]
    resultDict['Type'] = "File"
    resultDict['File'] = True
    resultDict['Directory'] = False
    if stat.S_ISDIR(mode):
      resultDict['Type'] = "Directory"
      resultDict['File'] = False
    resultDict['Directory'] = True
    resultDict['Size'] = statTuple[stat.ST_SIZE]
    resultDict['TimeStamps'] = (statTuple[stat.ST_ATIME], statTuple[stat.ST_MTIME], statTuple[stat.ST_CTIME])
    resultDict['Cached'] = 1
    resultDict['Migrated'] = 0
    resultDict['Lost'] = 0
    resultDict['Unavailable'] = 0
    resultDict['Mode'] = stat.S_IMODE(mode)

    if resultDict['File']:
      cks = fileAdler(path)
      resultDict['Checksum'] = cks

    resultDict = StorageBase._addCommonMetadata(resultDict)

    return S_OK(resultDict)
Esempio n. 20
0
        if lfn in lfnQuery:
            if counter % 1000 == 0:
                gLogger.notice('Skip file in query counter: %s' % counter)
            continue

        if existCheck:
            result = fcc.isFile(lfn)
            if result['OK'] and lfn in result['Value'][
                    'Successful'] and result['Value']['Successful'][lfn]:
                if counter % 1000 == 0:
                    gLogger.notice('Skip file existed counter: %s' % counter)
                continue

        size = os.path.getsize(fullFn)
        adler32 = fileAdler(fullFn)
        guid = makeGuid()
        fileTuple = (lfn, fullFn, size, toSE, guid, adler32)
        fileTupleBuffer.append(fileTuple)
        gLogger.debug('Register to lfn: %s' % lfn)
        gLogger.debug('fileTuple: %s' % (fileTuple, ))

        if len(fileTupleBuffer) >= bufferSize:
            result = dm.registerFile(fileTupleBuffer)
            if not result['OK']:
                gLogger.error('Can not register %s' % fullFn)
                exit(1)
            del fileTupleBuffer[:]
            gLogger.notice('%s files registered' % counter)

if fileTupleBuffer:
Esempio n. 21
0
    def testWorkflow(self):
        """ This perform a complete workflow puting, removing, stating files and directories
    """

        putDir = {
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA'):
            os.path.join(self.LOCAL_PATH, 'Workflow/FolderA'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderB'):
            os.path.join(self.LOCAL_PATH, 'Workflow/FolderB')
        }

        createDir = [
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAA'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderABA'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAAB')
        ]

        putFile = {
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1'):
            os.path.join(self.LOCAL_PATH, 'Workflow/File1'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderAA/File1'):
            os.path.join(self.LOCAL_PATH, 'Workflow/File1'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderBB/File2'):
            os.path.join(self.LOCAL_PATH, 'Workflow/File2'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderB/File2'):
            os.path.join(self.LOCAL_PATH, 'Workflow/File2'),
            os.path.join(DESTINATION_PATH, 'Workflow/File3'):
            os.path.join(self.LOCAL_PATH, 'Workflow/File3')
        }

        isFile = {
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1'):
            os.path.join(self.LOCAL_PATH, 'Workflow/File1'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderB/FileB'):
            os.path.join(self.LOCAL_PATH, 'Workflow/FolderB/FileB'),
        }

        listDir = [
            os.path.join(DESTINATION_PATH, 'Workflow'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
        ]

        getDir = [
            os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
        ]

        removeFile = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1')]
        rmdir = [os.path.join(DESTINATION_PATH, 'Workflow')]

        ##### Computing local adler and size #####

        fileAdlers = {}
        fileSizes = {}

        for lfn, localFn in isFile.iteritems():
            fileAdlers[lfn] = fileAdler(localFn)
            fileSizes[lfn] = getSize(localFn)

        ########## uploading directory #############
        res = self.writeSE.putDirectory(putDir)
        self.assertEqual(res['OK'], True)
        # time.sleep(5)
        res = self.readSE.listDirectory(listDir)
        self.assertEqual(
            any(
                os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FileA') in
                dictKey for dictKey in res['Value']['Successful'][os.path.join(
                    DESTINATION_PATH, 'Workflow/FolderA')]['Files'].keys()),
            True)
        self.assertEqual(
            any(
                os.path.join(DESTINATION_PATH, 'Workflow/FolderB/FileB') in
                dictKey for dictKey in res['Value']['Successful'][os.path.join(
                    DESTINATION_PATH, 'Workflow/FolderB')]['Files'].keys()),
            True)

        ########## createDir #############
        res = self.writeSE.createDirectory(createDir)
        self.assertEqual(res['OK'], True)
        res = res['Value']
        self.assertEqual(res['Successful'][createDir[0]], True)
        self.assertEqual(res['Successful'][createDir[1]], True)
        self.assertEqual(res['Successful'][createDir[2]], True)

        ######## putFile ########
        res = self.writeSE.putFile(putFile)
        self.assertEqual(res['OK'], True)
        # time.sleep(5)
        res = self.readSE.isFile(isFile)
        self.assertEqual(res['OK'], True)
        self.assertTrue(
            all([x for x in res['Value']['Successful'].itervalues()]))
        # self.assertEqual( res['Value']['Successful'][isFile[0]], True )
        # self.assertEqual( res['Value']['Successful'][isFile[1]], True )

        ######## getMetadata ###########
        res = self.readSE.getFileMetadata(isFile)
        self.assertEqual(res['OK'], True)
        res = res['Value']['Successful']
        self.assertEqual(
            any(path in resKey for path in isFile for resKey in res.keys()),
            True)

        # Checking that the checksums and sizes are correct
        for lfn in isFile:
            self.assertEqual(res[lfn]['Checksum'], fileAdlers[lfn])
            self.assertEqual(res[lfn]['Size'], fileSizes[lfn])

        ####### getDirectory ######
        res = self.readSE.getDirectory(getDir,
                                       os.path.join(self.LOCAL_PATH, 'getDir'))
        self.assertEqual(res['OK'], True)
        res = res['Value']
        self.assertEqual(
            any(getDir[0] in dictKey for dictKey in res['Successful']), True)
        self.assertEqual(
            any(getDir[1] in dictKey for dictKey in res['Successful']), True)

        ###### removeFile ##########
        res = self.writeSE.removeFile(removeFile)
        self.assertEqual(res['OK'], True)
        res = self.readSE.exists(removeFile)
        self.assertEqual(res['OK'], True)
        self.assertEqual(res['Value']['Successful'][removeFile[0]], False)

        ###### remove non existing file #####
        res = self.writeSE.removeFile(removeFile)
        self.assertEqual(res['OK'], True)
        res = self.readSE.exists(removeFile)
        self.assertEqual(res['OK'], True)
        self.assertEqual(res['Value']['Successful'][removeFile[0]], False)

        ########### removing directory  ###########
        res = self.writeSE.removeDirectory(rmdir, True)

        res = self.readSE.exists(rmdir)
        self.assertEqual(res['OK'], True)
        self.assertEqual(res['Value']['Successful'][rmdir[0]], False)
Esempio n. 22
0
def setuptest(request):
    global local_path, putDir, createDir, putFile, isFile, listDir,\
        getDir, getFile, rmDir, removeFile, se, filesInFolderAandB, fileAdlers, fileSizes
    local_path = tempfile.mkdtemp()

    # create the local structure
    workPath = os.path.join(local_path, 'Workflow')
    os.mkdir(workPath)

    os.mkdir(os.path.join(workPath, 'FolderA'))
    with open(os.path.join(workPath, 'FolderA', 'FileA'), 'w') as f:
        f.write(_mul('FileA'))

    os.mkdir(os.path.join(workPath, 'FolderA', 'FolderAA'))
    with open(os.path.join(workPath, 'FolderA', 'FolderAA', 'FileAA'),
              'w') as f:
        f.write(_mul('FileAA'))

    os.mkdir(os.path.join(workPath, 'FolderB'))
    with open(os.path.join(workPath, 'FolderB', 'FileB'), 'w') as f:
        f.write(_mul('FileB'))

    for fn in ["File1", "File2", "File3"]:
        with open(os.path.join(workPath, fn), 'w') as f:
            f.write(_mul(fn))

    se = StorageElement(STORAGE_NAME)

    putDir = {
        os.path.join(DESTINATION_PATH, 'Workflow/FolderA'):
        os.path.join(local_path, 'Workflow/FolderA'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderB'):
        os.path.join(local_path, 'Workflow/FolderB')
    }

    createDir = [
        os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAA'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderABA'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAAB')
    ]

    putFile = {
        os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1'):
        os.path.join(local_path, 'Workflow/File1'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderAA/File1'):
        os.path.join(local_path, 'Workflow/File1'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderBB/File2'):
        os.path.join(local_path, 'Workflow/File2'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderB/File2'):
        os.path.join(local_path, 'Workflow/File2'),
        os.path.join(DESTINATION_PATH, 'Workflow/File3'):
        os.path.join(local_path, 'Workflow/File3')
    }

    isFile = putFile.keys()

    listDir = [
        os.path.join(DESTINATION_PATH, 'Workflow'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
    ]

    getDir = [
        os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
        os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
    ]

    removeFile = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1')]
    rmdir = [os.path.join(DESTINATION_PATH, 'Workflow')]

    # This list is used to check for existance of files
    # after uploading the directory: they should NOT exist.
    # Uploading a directory does not work.
    filesInFolderAandB = []
    for dirName in ('Workflow/FolderA', 'Workflow/FolderB'):
        for root, _dirs, files in os.walk(os.path.join(local_path, dirName)):
            for fn in files:
                filesInFolderAandB.append(
                    os.path.join(DESTINATION_PATH,
                                 root.replace(local_path, '').strip('/'), fn))
    filesInFolderAandB = dict.fromkeys(filesInFolderAandB, False)

    fileAdlers = {}
    fileSizes = {}

    for lfn, localFn in putFile.iteritems():
        fileAdlers[lfn] = fileAdler(localFn)
        fileSizes[lfn] = getSize(localFn)

    clearDirectory(se, local_path, DESTINATION_PATH)

    def teardown():
        print("Cleaning local test")
        shutil.rmtree(local_path)
        clearDirectory(se, local_path, DESTINATION_PATH)

    request.addfinalizer(teardown)
    return local_path, random.randint(0, 100)  # provide the fixture value
Esempio n. 23
0
########################################################################
"""
  Calculate alder32 of the supplied file
"""
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Utilities.Adler import fileAdler
from DIRAC.Core.Base import Script

Script.setUsageMessage('\n'.join([
    __doc__.split('\n')[1], 'Usage:',
    '  %s [option|cfgfile] ... File ...' % Script.scriptName, 'Arguments:',
    '  File:     File Name'
]))
Script.parseCommandLine(ignoreErrors=False)
files = Script.getPositionalArgs()
if len(files) == 0:
    Script.showHelp()

exitCode = 0

for fa in files:
    adler = fileAdler(fa)
    if adler:
        print fa.rjust(100), adler.ljust(10)
    else:
        print 'ERROR %s: Failed to get adler' % fa
        exitCode = 2

DIRAC.exit(exitCode)
Esempio n. 24
0
  from DIRAC.RequestManagementSystem.Client.Request import Request
  from DIRAC.RequestManagementSystem.Client.Operation import Operation
  from DIRAC.RequestManagementSystem.Client.File import File
  from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
  from DIRAC.Core.Utilities.Adler import fileAdler

  if not os.path.exists( PFN ):
    gLogger.error( "%s does not exist" % PFN )
    DIRAC.exit( -1 )
  if not os.path.isfile( PFN ):
    gLogger.error( "%s is not a file" % PFN )
    DIRAC.exit( -1 )

  PFN = os.path.abspath( PFN )
  size = os.path.getsize( PFN )
  adler32 = fileAdler( PFN )

  request = Request()
  request.RequestName = requestName

  putAndRegister = Operation()
  purAndRegister.Type = "PutAndRegister"
  purAndRegister.TargetSE = targetSE
  opFile = File()
  opFile.LFN = LFN
  opFile.PFN = PFN
  opFile.Size = size
  opFile.Checksum = adler32
  opFile.ChecksumType = "ADLER32"
  putAndRegister.addFile( opFile )
Esempio n. 25
0
########################################################################
"""
  Calculate alder32 of the supplied file
"""
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Utilities.Adler import fileAdler
from DIRAC.Core.Base import Script

Script.setUsageMessage('\n'.join([
    __doc__.split('\n')[1], 'Usage:',
    '  %s [option|cfgfile] ... File ...' % Script.scriptName, 'Arguments:',
    '  File:     File Name'
]))
Script.parseCommandLine(ignoreErrors=False)
files = Script.getPositionalArgs()
if len(files) == 0:
    Script.showHelp()

exitCode = 0

for file in files:
    adler = fileAdler(file)
    if adler:
        print file.rjust(100), adler.ljust(10)
    else:
        print 'ERROR %s: Failed to get adler' % file
        exitCode = 2

DIRAC.exit(exitCode)
  from DIRAC.RequestManagementSystem.Client.Request import Request
  from DIRAC.RequestManagementSystem.Client.Operation import Operation
  from DIRAC.RequestManagementSystem.Client.File import File
  from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
  from DIRAC.Core.Utilities.Adler import fileAdler

  if not os.path.exists( PFN ):
    gLogger.error( "%s does not exist" % PFN )
    DIRAC.exit( -1 )
  if not os.path.isfile( PFN ):
    gLogger.error( "%s is not a file" % PFN )
    DIRAC.exit( -1 )

  PFN = os.path.abspath( PFN )
  size = os.path.getsize( PFN )
  adler32 = fileAdler( PFN )

  request = Request()
  request.RequestName = requestName

  putAndRegister = Operation()
  putAndRegister.Type = "PutAndRegister"
  putAndRegister.TargetSE = targetSE
  opFile = File()
  opFile.LFN = LFN
  opFile.PFN = PFN
  opFile.Size = size
  opFile.Checksum = adler32
  opFile.ChecksumType = "ADLER32"
  putAndRegister.addFile( opFile )
  request.addOperation( putAndRegister )
########################################################################
"""
  Calculate alder32 of the supplied file
"""
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Utilities.Adler     import fileAdler
from DIRAC.Core.Base                import Script

Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
                                     'Usage:',
                                     '  %s [option|cfgfile] ... File ...' % Script.scriptName,
                                     'Arguments:',
                                     '  File:     File Name' ] ) )
Script.parseCommandLine( ignoreErrors = False )
files = Script.getPositionalArgs()
if len( files ) == 0:
  Script.showHelp()

exitCode = 0

for fa in files:
  adler = fileAdler( fa )
  if adler:
    print fa.rjust( 100 ), adler.ljust( 10 )
  else:
    print 'ERROR %s: Failed to get adler' % fa
    exitCode = 2

DIRAC.exit( exitCode )
Esempio n. 28
0
########################################################################
"""
  Calculate alder32 of the supplied file
"""
__RCSID__ = "2b9038b (2010-12-15 07:24:22 +0000) Ricardo Graciani <*****@*****.**>"
import DIRAC
from DIRAC.Core.Utilities.Adler     import fileAdler
from DIRAC.Core.Base                import Script

Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
                                     'Usage:',
                                     '  %s [option|cfgfile] ... File ...' % Script.scriptName,
                                     'Arguments:',
                                     '  File:     File Name' ] ) )
Script.parseCommandLine( ignoreErrors = False )
files = Script.getPositionalArgs()
if len( files ) == 0:
  Script.showHelp()

exitCode = 0

for file in files:
  adler = fileAdler( file )
  if adler:
    print file.rjust( 100 ), adler.ljust( 10 )
  else:
    print 'ERROR %s: Failed to get adler' % file
    exitCode = 2

DIRAC.exit( exitCode )
  def testWorkflow(self):
    """ This perform a complete workflow puting, removing, stating files and directories
    """

    putDir = {os.path.join(DESTINATION_PATH,
                           'Workflow/FolderA'): os.path.join(self.LOCAL_PATH,
                                                             'Workflow/FolderA'),
              os.path.join(DESTINATION_PATH,
                           'Workflow/FolderB'): os.path.join(self.LOCAL_PATH,
                                                             'Workflow/FolderB')}

    createDir = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAA'),
                 os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderABA'),
                 os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAAB')
                 ]

    putFile = {os.path.join(DESTINATION_PATH,
                            'Workflow/FolderA/File1'): os.path.join(self.LOCAL_PATH,
                                                                    'Workflow/File1'),
               os.path.join(DESTINATION_PATH,
                            'Workflow/FolderAA/File1'): os.path.join(self.LOCAL_PATH,
                                                                     'Workflow/File1'),
               os.path.join(DESTINATION_PATH,
                            'Workflow/FolderBB/File2'): os.path.join(self.LOCAL_PATH,
                                                                     'Workflow/File2'),
               os.path.join(DESTINATION_PATH,
                            'Workflow/FolderB/File2'): os.path.join(self.LOCAL_PATH,
                                                                    'Workflow/File2'),
               os.path.join(DESTINATION_PATH,
                            'Workflow/File3'): os.path.join(self.LOCAL_PATH,
                                                            'Workflow/File3')}

    isFile = {os.path.join(DESTINATION_PATH,
                           'Workflow/FolderA/File1'): os.path.join(self.LOCAL_PATH,
                                                                   'Workflow/File1'),
              os.path.join(DESTINATION_PATH,
                           'Workflow/FolderB/FileB'): os.path.join(self.LOCAL_PATH,
                                                                   'Workflow/FolderB/FileB'),
              }

    listDir = [os.path.join(DESTINATION_PATH, 'Workflow'),
               os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
               os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
               ]

    getDir = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
              os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
              ]

    removeFile = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1')]
    rmdir = [os.path.join(DESTINATION_PATH, 'Workflow')]

    ##### Computing local adler and size #####

    fileAdlers = {}
    fileSizes = {}

    for lfn, localFn in isFile.iteritems():
      fileAdlers[lfn] = fileAdler(localFn)
      fileSizes[lfn] = getSize(localFn)

    ########## uploading directory #############
    res = self.writeSE.putDirectory(putDir)
    self.assertEqual(res['OK'], True)
    # time.sleep(5)
    res = self.readSE.listDirectory(listDir)
    self.assertEqual(any(os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FileA') in dictKey for dictKey in
                         res['Value']['Successful'][os.path.join(DESTINATION_PATH, 'Workflow/FolderA')]['Files'].keys()), True)
    self.assertEqual(any(os.path.join(DESTINATION_PATH, 'Workflow/FolderB/FileB') in dictKey for dictKey in
                         res['Value']['Successful'][os.path.join(DESTINATION_PATH, 'Workflow/FolderB')]['Files'].keys()), True)

    ########## createDir #############
    res = self.writeSE.createDirectory(createDir)
    self.assertEqual(res['OK'], True)
    res = res['Value']
    self.assertEqual(res['Successful'][createDir[0]], True)
    self.assertEqual(res['Successful'][createDir[1]], True)
    self.assertEqual(res['Successful'][createDir[2]], True)

    ######## putFile ########
    res = self.writeSE.putFile(putFile)
    self.assertEqual(res['OK'], True)
    # time.sleep(5)
    res = self.readSE.isFile(isFile)
    self.assertEqual(res['OK'], True)
    self.assertTrue(all([x for x in res['Value']['Successful'].itervalues()]))
    # self.assertEqual( res['Value']['Successful'][isFile[0]], True )
    # self.assertEqual( res['Value']['Successful'][isFile[1]], True )

    ######## getMetadata ###########
    res = self.readSE.getFileMetadata(isFile)
    self.assertEqual(res['OK'], True)
    res = res['Value']['Successful']
    self.assertEqual(any(path in resKey for path in isFile for resKey in res.keys()), True)

    # Checking that the checksums and sizes are correct
    for lfn in isFile:
      self.assertEqual(res[lfn]['Checksum'], fileAdlers[lfn])
      self.assertEqual(res[lfn]['Size'], fileSizes[lfn])

    ####### getDirectory ######
    res = self.readSE.getDirectory(getDir, os.path.join(self.LOCAL_PATH, 'getDir'))
    self.assertEqual(res['OK'], True)
    res = res['Value']
    self.assertEqual(any(getDir[0] in dictKey for dictKey in res['Successful']), True)
    self.assertEqual(any(getDir[1] in dictKey for dictKey in res['Successful']), True)

    ###### removeFile ##########
    res = self.writeSE.removeFile(removeFile)
    self.assertEqual(res['OK'], True)
    res = self.readSE.exists(removeFile)
    self.assertEqual(res['OK'], True)
    self.assertEqual(res['Value']['Successful'][removeFile[0]], False)

    ###### remove non existing file #####
    res = self.writeSE.removeFile(removeFile)
    self.assertEqual(res['OK'], True)
    res = self.readSE.exists(removeFile)
    self.assertEqual(res['OK'], True)
    self.assertEqual(res['Value']['Successful'][removeFile[0]], False)

    ########### removing directory  ###########
    res = self.writeSE.removeDirectory(rmdir, True)

    res = self.readSE.exists(rmdir)
    self.assertEqual(res['OK'], True)
    self.assertEqual(res['Value']['Successful'][rmdir[0]], False)
def main():
    from DIRAC.Core.Base.Script import parseCommandLine
    parseCommandLine()

    import DIRAC
    from DIRAC import gLogger

    args = Script.getPositionalArgs()

    requestName = None
    LFN = None
    PFN = None
    targetSE = None
    if len(args) != 4:
        Script.showHelp()
    else:
        requestName = args[0]
        LFN = args[1]
        PFN = args[2]
        targetSE = args[3]

    if not os.path.isabs(LFN):
        gLogger.error("LFN should be absolute path!!!")
        DIRAC.exit(-1)

    gLogger.info("will create request '%s' with 'PutAndRegister' "
                 "operation using %s pfn and %s target SE" %
                 (requestName, PFN, targetSE))

    from DIRAC.RequestManagementSystem.Client.Request import Request
    from DIRAC.RequestManagementSystem.Client.Operation import Operation
    from DIRAC.RequestManagementSystem.Client.File import File
    from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
    from DIRAC.Core.Utilities.Adler import fileAdler

    if not os.path.exists(PFN):
        gLogger.error("%s does not exist" % PFN)
        DIRAC.exit(-1)
    if not os.path.isfile(PFN):
        gLogger.error("%s is not a file" % PFN)
        DIRAC.exit(-1)

    PFN = os.path.abspath(PFN)
    size = os.path.getsize(PFN)
    adler32 = fileAdler(PFN)

    request = Request()
    request.RequestName = requestName

    putAndRegister = Operation()
    putAndRegister.Type = "PutAndRegister"
    putAndRegister.TargetSE = targetSE
    opFile = File()
    opFile.LFN = LFN
    opFile.PFN = PFN
    opFile.Size = size
    opFile.Checksum = adler32
    opFile.ChecksumType = "ADLER32"
    putAndRegister.addFile(opFile)
    request.addOperation(putAndRegister)
    reqClient = ReqClient()
    putRequest = reqClient.putRequest(request)
    if not putRequest["OK"]:
        gLogger.error("unable to put request '%s': %s" %
                      (requestName, putRequest["Message"]))
        DIRAC.exit(-1)

    gLogger.always("Request '%s' has been put to ReqDB for execution." %
                   requestName)
    gLogger.always(
        "You can monitor its status using command: 'dirac-rms-request %s'" %
        requestName)
    DIRAC.exit(0)
def main():
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument("requestName:  a request name")
    Script.registerArgument("LFN:          logical file name")
    Script.registerArgument("localFile:    local file you want to put")
    Script.registerArgument("targetSE:     target SE")
    Script.parseCommandLine()

    import DIRAC
    from DIRAC import gLogger

    # parseCommandLine show help when mandatory arguments are not specified or incorrect argument
    requestName, LFN, PFN, targetSE = Script.getPositionalArgs(group=True)

    if not os.path.isabs(LFN):
        gLogger.error("LFN should be absolute path!!!")
        DIRAC.exit(-1)

    gLogger.info("will create request '%s' with 'PutAndRegister' "
                 "operation using %s pfn and %s target SE" %
                 (requestName, PFN, targetSE))

    from DIRAC.RequestManagementSystem.Client.Request import Request
    from DIRAC.RequestManagementSystem.Client.Operation import Operation
    from DIRAC.RequestManagementSystem.Client.File import File
    from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
    from DIRAC.Core.Utilities.Adler import fileAdler

    if not os.path.exists(PFN):
        gLogger.error("%s does not exist" % PFN)
        DIRAC.exit(-1)
    if not os.path.isfile(PFN):
        gLogger.error("%s is not a file" % PFN)
        DIRAC.exit(-1)

    PFN = os.path.abspath(PFN)
    size = os.path.getsize(PFN)
    adler32 = fileAdler(PFN)

    request = Request()
    request.RequestName = requestName

    putAndRegister = Operation()
    putAndRegister.Type = "PutAndRegister"
    putAndRegister.TargetSE = targetSE
    opFile = File()
    opFile.LFN = LFN
    opFile.PFN = PFN
    opFile.Size = size
    opFile.Checksum = adler32
    opFile.ChecksumType = "ADLER32"
    putAndRegister.addFile(opFile)
    request.addOperation(putAndRegister)
    reqClient = ReqClient()
    putRequest = reqClient.putRequest(request)
    if not putRequest["OK"]:
        gLogger.error("unable to put request '%s': %s" %
                      (requestName, putRequest["Message"]))
        DIRAC.exit(-1)

    gLogger.always("Request '%s' has been put to ReqDB for execution." %
                   requestName)
    gLogger.always(
        "You can monitor its status using command: 'dirac-rms-request %s'" %
        requestName)
    DIRAC.exit(0)
Esempio n. 32
0
def setuptest(request):
  global local_path, putDir, createDir, putFile, isFile, listDir,\
      getDir, getFile, rmDir, removeFile, se, filesInFolderAandB, fileAdlers, fileSizes
  local_path = tempfile.mkdtemp()

  # create the local structure
  workPath = os.path.join(local_path, 'Workflow')
  os.mkdir(workPath)

  os.mkdir(os.path.join(workPath, 'FolderA'))
  with open(os.path.join(workPath, 'FolderA', 'FileA'), 'w') as f:
    f.write(_mul('FileA'))

  os.mkdir(os.path.join(workPath, 'FolderA', 'FolderAA'))
  with open(os.path.join(workPath, 'FolderA', 'FolderAA', 'FileAA'), 'w') as f:
    f.write(_mul('FileAA'))

  os.mkdir(os.path.join(workPath, 'FolderB'))
  with open(os.path.join(workPath, 'FolderB', 'FileB'), 'w') as f:
    f.write(_mul('FileB'))

  for fn in ["File1", "File2", "File3"]:
    with open(os.path.join(workPath, fn), 'w') as f:
      f.write(_mul(fn))

  se = StorageElement(STORAGE_NAME)

  putDir = {os.path.join(DESTINATION_PATH,
                         'Workflow/FolderA'): os.path.join(local_path,
                                                           'Workflow/FolderA'),
            os.path.join(DESTINATION_PATH,
                         'Workflow/FolderB'): os.path.join(local_path,
                                                           'Workflow/FolderB')}

  createDir = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAA'),
               os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderABA'),
               os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAAB')
               ]

  putFile = {os.path.join(DESTINATION_PATH,
                          'Workflow/FolderA/File1'): os.path.join(local_path,
                                                                  'Workflow/File1'),
             os.path.join(DESTINATION_PATH,
                          'Workflow/FolderAA/File1'): os.path.join(local_path,
                                                                   'Workflow/File1'),
             os.path.join(DESTINATION_PATH,
                          'Workflow/FolderBB/File2'): os.path.join(local_path,
                                                                   'Workflow/File2'),
             os.path.join(DESTINATION_PATH,
                          'Workflow/FolderB/File2'): os.path.join(local_path,
                                                                  'Workflow/File2'),
             os.path.join(DESTINATION_PATH,
                          'Workflow/File3'): os.path.join(local_path,
                                                          'Workflow/File3')}

  isFile = putFile.keys()

  listDir = [os.path.join(DESTINATION_PATH, 'Workflow'),
             os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
             os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
             ]

  getDir = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA'),
            os.path.join(DESTINATION_PATH, 'Workflow/FolderB')
            ]

  removeFile = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1')]
  rmdir = [os.path.join(DESTINATION_PATH, 'Workflow')]

  # This list is used to check for existance of files
  # after uploading the directory: they should NOT exist.
  # Uploading a directory does not work.
  filesInFolderAandB = []
  for dirName in ('Workflow/FolderA', 'Workflow/FolderB'):
    for root, _dirs, files in os.walk(os.path.join(local_path, dirName)):
      for fn in files:
        filesInFolderAandB.append(
            os.path.join(
                DESTINATION_PATH,
                root.replace(
                    local_path,
                    '').strip('/'),
                fn))
  filesInFolderAandB = dict.fromkeys(filesInFolderAandB, False)

  fileAdlers = {}
  fileSizes = {}

  for lfn, localFn in putFile.iteritems():
    fileAdlers[lfn] = fileAdler(localFn)
    fileSizes[lfn] = getSize(localFn)

  clearDirectory(se, local_path, DESTINATION_PATH)

  def teardown():
    print("Cleaning local test")
    shutil.rmtree(local_path)
    clearDirectory(se, local_path, DESTINATION_PATH)

  request.addfinalizer(teardown)
  return local_path, random.randint(0, 100)  # provide the fixture value
Esempio n. 33
0
class StorageElementHandler( RequestHandler ):
  """
  .. class:: StorageElementHandler

  """

  def __confirmToken( self, token, path, mode ):
    """ Confirm the access rights for the path in a given mode
    """
    # Not yet implemented
    return True

  @staticmethod
  def __checkForDiskSpace( dpath, size ):
    """ Check if the directory dpath can accommodate 'size' volume of data
    """
    stats = os.statvfs( dpath )
    dsize = stats.f_bsize * stats.f_bavail
    maxStorageSizeBytes = MAX_STORAGE_SIZE * 1024 * 1024
    return min( dsize, maxStorageSizeBytes ) > size

  def __resolveFileID( self, fileID ):
    """ get path to file for a given :fileID: """

    port = self.getCSOption( 'Port', '' )
    if not port:
      return ''

    if ":%s" % port in fileID:
      loc = fileID.find( ":%s" % port )
      if loc >= 0:
        fileID = fileID[loc + len( ":%s" % port ):]

    serviceName = self.serviceInfoDict['serviceName']
    loc = fileID.find( serviceName )
    if loc >= 0:
      fileID = fileID[loc + len( serviceName ):]

    loc = fileID.find( '?=' )
    if loc >= 0:
      fileID = fileID[loc + 2:]

    if fileID.find( BASE_PATH ) == 0:
      return fileID
    while fileID and fileID[0] == '/':
      fileID = fileID[1:]
    return os.path.join( BASE_PATH, fileID )

  @staticmethod
  def __getFileStat( path ):
    """ Get the file stat information
    """
    resultDict = {}
    try:
      statTuple = os.stat( path )
    except OSError, x:
      if str( x ).find( 'No such file' ) >= 0:
        resultDict['Exists'] = False
        return S_OK( resultDict )
      else:
        return S_ERROR( 'Failed to get metadata for %s' % path )

    resultDict['Exists'] = True
    mode = statTuple[ST_MODE]
    resultDict['Type'] = "File"
    resultDict['File'] = True
    resultDict['Directory'] = False
    if S_ISDIR( mode ):
      resultDict['Type'] = "Directory"
      resultDict['File'] = False
    resultDict['Directory'] = True
    resultDict['Size'] = statTuple[ST_SIZE]
    resultDict['TimeStamps'] = ( statTuple[ST_ATIME], statTuple[ST_MTIME], statTuple[ST_CTIME] )
    resultDict['Cached'] = 1
    resultDict['Migrated'] = 0
    resultDict['Lost'] = 0
    resultDict['Unavailable'] = 0
    resultDict['Mode'] = S_IMODE( mode )


    if resultDict['File']:
      cks = fileAdler( path )
      resultDict['Checksum'] = cks


    resultDict = StorageBase._addCommonMetadata( resultDict )


    return S_OK( resultDict )
Esempio n. 34
0
def main():
    dm = DataManager()

    fileTupleBuffer = []

    res = getProxyInfo( False, False )
    if not res['OK']:
        gLogger.error( "Failed to get client proxy information.", res['Message'] )
        DIRAC.exit( 2 ) 
    proxyInfo = res['Value']
    if proxyInfo['secondsLeft'] == 0:
        gLogger.error( "Proxy expired" )
        DIRAC.exit( 2 ) 
    username = proxyInfo['username']
    vo = ''
    if 'group' in proxyInfo:
        vo = getVOForGroup( proxyInfo['group'] )



    counter = 0
    for f in files:
        counter += 1

        local_f=f
        if not f.startswith('/cefs'):
#            gLogger.error('File must be under "/cefs"')
#            continue

	    #if the file to reg is not under /cefs, use put and register
            folder_name=os.path.basename(os.path.dirname(f))
#            lfn = '/cepc/user/%s/%s/jsub/'%(username[0],username) + folder_name + '/' + os.path.basename(f)
            lfn = '/cepc/user/%s/%s/jsub/'%(username[0],username) + folder_name + '/' + os.path.basename(f)
#            dirname = os.path.dirname(local_f)
#            os.system('mkdir -p %s'%(dirname))
#            os.system('cp %s %s' %(f,local_f))
            do_put_and_register=True
        else: 
            lfn = '/cepc/lustre-ro' + os.path.abspath(f)
            do_put_and_register=False

        result = fcc.isFile(lfn)
        if result['OK'] and lfn in result['Value']['Successful'] and result['Value']['Successful'][lfn]:
            continue

        size = os.path.getsize(f)
        adler32 = fileAdler(f)
        guid = makeGuid()
        fileTuple = (lfn, local_f, size, _se, guid, adler32)
        fileTupleBuffer.append(fileTuple)
        gLogger.debug('Register to lfn: %s' % lfn)
        gLogger.debug('fileTuple: %s' % (fileTuple,))

        if len(fileTupleBuffer) >= _bufferSize:
            if do_put_and_register:
                result = dm.putAndRegister(lfn, local_f, _se, guid, overwrite=overwrite)
            else:
                result = dm.registerFile(fileTupleBuffer)
            print('register result', result)
#            if not result['OK']:
#                gLogger.error('Register file failed')
#                return 1
            del fileTupleBuffer[:]
            gLogger.debug('%s files registered' % counter)

    if fileTupleBuffer:
        if do_put_and_register:
            result = dm.putAndRegister(lfn, local_f, _se, guid, overwrite=overwrite)
        else:
            result = dm.registerFile(fileTupleBuffer)
        print('register result', result)
#        if not result['OK']:
#            gLogger.error('Register file failed')
#            return 1
        del fileTupleBuffer[:]

    gLogger.info('Totally %s files registered' % counter)
    return 0
Esempio n. 35
0
def setuptest(request):
    global local_path, download_dir, putDir, createDir, putFile, isFile, listDir, getDir, getFile, rmDir, removeFile, se, filesInFolderAandB, fileAdlers, fileSizes
    local_path = tempfile.mkdtemp()
    download_dir = os.path.join(local_path, "getFile")
    os.mkdir(download_dir)

    # create the local structure
    workPath = os.path.join(local_path, "Workflow")
    os.mkdir(workPath)

    os.mkdir(os.path.join(workPath, "FolderA"))
    with open(os.path.join(workPath, "FolderA", "FileA"), "w") as f:
        f.write(_mul("FileA"))

    os.mkdir(os.path.join(workPath, "FolderA", "FolderAA"))
    with open(os.path.join(workPath, "FolderA", "FolderAA", "FileAA"),
              "w") as f:
        f.write(_mul("FileAA"))

    os.mkdir(os.path.join(workPath, "FolderB"))
    with open(os.path.join(workPath, "FolderB", "FileB"), "w") as f:
        f.write(_mul("FileB"))

    for fn in ["File1", "File2", "File3"]:
        with open(os.path.join(workPath, fn), "w") as f:
            f.write(_mul(fn))

    # request.param is the SE name one after the other
    se = StorageElement(request.param)

    putDir = {
        os.path.join(DESTINATION_PATH, "Workflow/FolderA"):
        os.path.join(local_path, "Workflow/FolderA"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderB"):
        os.path.join(local_path, "Workflow/FolderB"),
    }

    createDir = [
        os.path.join(DESTINATION_PATH, "Workflow/FolderA/FolderAA"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderA/FolderABA"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderA/FolderAAB"),
    ]

    putFile = {
        os.path.join(DESTINATION_PATH, "Workflow/FolderA/File1"):
        os.path.join(local_path, "Workflow/File1"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderAA/File1"):
        os.path.join(local_path, "Workflow/File1"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderBB/File2"):
        os.path.join(local_path, "Workflow/File2"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderB/File2"):
        os.path.join(local_path, "Workflow/File2"),
        os.path.join(DESTINATION_PATH, "Workflow/File3"):
        os.path.join(local_path, "Workflow/File3"),
    }

    isFile = list(putFile)

    listDir = [
        os.path.join(DESTINATION_PATH, "Workflow"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderA"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderB"),
    ]

    getDir = [
        os.path.join(DESTINATION_PATH, "Workflow/FolderA"),
        os.path.join(DESTINATION_PATH, "Workflow/FolderB")
    ]

    removeFile = [os.path.join(DESTINATION_PATH, "Workflow/FolderA/File1")]
    rmdir = [os.path.join(DESTINATION_PATH, "Workflow")]

    # This list is used to check for existance of files
    # after uploading the directory: they should NOT exist.
    # Uploading a directory does not work.
    filesInFolderAandB = []
    for dirName in ("Workflow/FolderA", "Workflow/FolderB"):
        for root, _dirs, files in os.walk(os.path.join(local_path, dirName)):
            for fn in files:
                filesInFolderAandB.append(
                    os.path.join(DESTINATION_PATH,
                                 root.replace(local_path, "").strip("/"), fn))
    filesInFolderAandB = dict.fromkeys(filesInFolderAandB, False)

    fileAdlers = {}
    fileSizes = {}

    for lfn, localFn in putFile.items():
        fileAdlers[lfn] = fileAdler(localFn)
        fileSizes[lfn] = getSize(localFn)

    clearDirectory(se, local_path, DESTINATION_PATH)

    def teardown():
        print("Cleaning local test")
        shutil.rmtree(local_path)
        clearDirectory(se, local_path, DESTINATION_PATH)

    request.addfinalizer(teardown)
    return local_path, random.randint(0, 100)  # provide the fixture value