def web_index(self): # Render base template data = self.getSessionData() url_state = "" if self.request.arguments.has_key("url_state") and len(self.request.arguments["url_state"][0]) > 0: url_state = self.request.arguments["url_state"][0] view_name = Conf.getTheme() if self.request.arguments.has_key("view") and len(self.request.arguments["view"][0]) > 0: view_name = self.request.arguments["view"][0] theme_name = "ext-all-gray" if self.request.arguments.has_key("theme") and len(self.request.arguments["theme"][0]) > 0: if self.request.arguments["theme"][0]=="Neptune": theme_name = "ext-all-neptune" if self.request.arguments["theme"][0]=="Classic": theme_name = "ext-all" open_app = "" if self.request.arguments.has_key("open_app") and len(self.request.arguments["open_app"][0]) > 0: open_app = self.request.arguments["open_app"][0].strip() level = str(gLogger.getLevel()).lower() self.render( "root.tpl", base_url = data[ 'baseURL' ], _dev = Conf.devMode(), ext_version = data[ 'extVersion' ], url_state = url_state, extensions = data[ 'extensions' ], credentials = data[ 'user' ], title = Conf.getTitle(), theme = theme_name, root_url = Conf.rootURL(), view = view_name, open_app = open_app, debug_level= level)
def __init__(self, params): self.__params = params self.__extVersion = '4.2.1.883' self.__extDir = 'extjs' # this directory will contain all the resources required by ExtJS self.__sdkDir = params.extjspath if self.__params.extjspath is not None else '/opt/dirac/extjs/ext-4.2.1.883' self.__webAppPath = os.path.join(self.__params.destination, 'WebAppDIRAC', 'WebApp') self.__staticPaths = [os.path.join(self.__webAppPath, 'static')] if self.__params.name != 'WebAppDIRAC': self.__staticPaths.append(os.path.join(self.__params.destination, self.__params.name, 'WebApp', 'static')) self.__classPaths = [os.path.join(self.__webAppPath, *p) for p in (("static", "core", "js", "utils"), ("static", "core", "js", "core"))] self.__classPaths.append(os.path.join(os.path.dirname(self.__sdkDir), "examples", "ux")) self.__classPaths.append(os.path.join(os.path.dirname(self.__sdkDir), "examples", "ux", "form")) self.__sdkPath = os.path.join(self.__sdkDir, "src") self.__extjsDirsToCopy = [os.path.join(os.path.dirname(self.__sdkDir), "resources")] self.__extjsFilesToCopy = [os.path.join(os.path.dirname(self.__sdkDir), "ext-all-dev.js")] self.__debugFlag = str(gLogger.getLevel() in ('DEBUG', 'VERBOSE', 'INFO')).lower() self.__compileTemplate = os.path.join(self.__params.destination, 'WebAppDIRAC', "Lib", "CompileTemplates") # this place will be used, if sencha cmd is not available self.__senchacmddir = os.path.join(rootPath, "sbin", "Sencha", "Cmd") self.__senchaVersion = "v6.5.0.180" self.__appDependency = {} self.__dependencySection = "Dependencies"
def __init__(self, pool_size=15): """c'tor :param self: self reference :param pool_size: size of the connection pool to the DB """ self.log = gLogger.getSubLogger('FTS3DB') # Initialize the connection info self.__getDBConnectionInfo('DataManagement/FTS3DB') runDebug = (gLogger.getLevel() == 'DEBUG') self.engine = create_engine( 'mysql://%s:%s@%s:%s/%s' % (self.dbUser, self.dbPass, self.dbHost, self.dbPort, self.dbName), echo=runDebug, pool_size=pool_size) metadata.bind = self.engine self.dbSession = sessionmaker(bind=self.engine)
def __init__(self, pool_size=15, url=None): """c'tor :param self: self reference :param pool_size: size of the connection pool to the DB """ self.log = gLogger.getSubLogger("FTS3DB") if not url: # Initialize the connection info self.__getDBConnectionInfo("DataManagement/FTS3DB") url = "mysql://%s:%s@%s:%s/%s" % (self.dbUser, self.dbPass, self.dbHost, self.dbPort, self.dbName) runDebug = gLogger.getLevel() == "DEBUG" self.engine = create_engine( url, echo=runDebug, pool_size=pool_size, pool_recycle=3600, ) metadata.bind = self.engine self.dbSession = sessionmaker(bind=self.engine)
def __init__(self, pool_size=15): """c'tor :param self: self reference :param pool_size: size of the connection pool to the DB """ self.log = gLogger.getSubLogger('FTS3DB') # Initialize the connection info self.__getDBConnectionInfo('DataManagement/FTS3DB') runDebug = (gLogger.getLevel() == 'DEBUG') self.engine = create_engine( 'mysql://%s:%s@%s:%s/%s' % (self.dbUser, self.dbPass, self.dbHost, self.dbPort, self.dbName), echo=runDebug, pool_size=pool_size, pool_recycle=3600) metadata.bind = self.engine self.dbSession = sessionmaker(bind=self.engine)
def __init__(self): self.__extVersion = SessionData.getExtJSVersion() self.__staticPaths = HandlerMgr().getPaths("static") self.__extensions = getInstalledExtensions() self.__webAppPath = os.path.dirname(self.__staticPaths[-1]) self.__extPath = os.path.join(self.__webAppPath, "static", "extjs", self.__extVersion) self.__sdkPath = os.path.join(self.__webAppPath, "static", "extjs", self.__extVersion, "src") self.__appDependency = CompilerHelper().getAppDependencies() self.__classPaths = [ os.path.join(self.__webAppPath, *p) for p in (("static", "core", "js", "utils"), ("static", "core", "js", "core")) ] self.__classPaths.append( os.path.join(self.__extPath, "examples", "ux", "form")) self.__debugFlag = str(gLogger.getLevel() in ('DEBUG', 'VERBOSE', 'INFO')).lower() self.__inDir = os.path.join(os.path.dirname(self.__webAppPath), "Lib", "CompileTemplates") self.__senchacmddir = os.path.join(rootPath, "sbin", "Sencha", "Cmd") self.__senchaVersion = "v6.5.0.180"
def web_index(self): # Render base template data = self.getSessionData() url_state = "" if self.request.arguments.has_key("url_state") and len( self.request.arguments["url_state"][0]) > 0: url_state = xss_filter(self.request.arguments["url_state"][0]) view_name = Conf.getTheme() if self.request.arguments.has_key("view") and len( self.request.arguments["view"][0]) > 0: view_name = xss_filter(self.request.arguments["view"][0]) theme_name = "ext-all-gray" if self.request.arguments.has_key("theme") and len( self.request.arguments["theme"][0]) > 0: if self.request.arguments["theme"][0] == "Neptune": theme_name = "ext-all-neptune" if self.request.arguments["theme"][0] == "Classic": theme_name = "ext-all" open_app = "" if self.request.arguments.has_key("open_app") and len( self.request.arguments["open_app"][0]) > 0: open_app = xss_filter( self.request.arguments["open_app"][0].strip()) icon = data['baseURL'] + Conf.getIcon() background = data['baseURL'] + Conf.getBackgroud() logo = data['baseURL'] + Conf.getLogo() welcomeFile = Conf.getWelcome() welcome = '' if welcomeFile: try: with open(welcomeFile, 'r') as f: welcome = f.read().replace('\n', '') except: gLogger.warn('Welcome page not found here: %s' % welcomeFile) level = str(gLogger.getLevel()).lower() self.render("root.tpl", iconUrl=icon, base_url=data['baseURL'], _dev=Conf.devMode(), ext_version=data['extVersion'], url_state=url_state, extensions=data['extensions'], credentials=data['user'], title=Conf.getTitle(), theme=theme_name, root_url=Conf.rootURL(), view=view_name, open_app=open_app, debug_level=level, welcome=welcome, backgroundImage=background, logo=logo)
def __init__(self, params): self.__params = params self.__extVersion = '4.2.1.883' self.__extDir = 'extjs' # this directory will contain all the resources required by ExtJS self.__sdkDir = params.extjspath if self.__params.extjspath is not None else '/opt/dirac/extjs/ext-4.2.1.883' self.__webAppPath = os.path.join(self.__params.destination, 'WebAppDIRAC', 'WebApp') self.__staticPaths = [os.path.join(self.__webAppPath, 'static')] if self.__params.name != 'WebAppDIRAC': self.__staticPaths.append( os.path.join(self.__params.destination, self.__params.name, 'WebApp', 'static')) self.__classPaths = [ os.path.join(self.__webAppPath, *p) for p in (("static", "core", "js", "utils"), ("static", "core", "js", "core")) ] self.__classPaths.append( os.path.join(os.path.dirname(self.__sdkDir), "examples", "ux")) self.__classPaths.append( os.path.join(os.path.dirname(self.__sdkDir), "examples", "ux", "form")) self.__sdkPath = os.path.join(self.__sdkDir, "src") self.__extjsDirsToCopy = [ os.path.join(os.path.dirname(self.__sdkDir), "resources") ] self.__extjsFilesToCopy = [ os.path.join(os.path.dirname(self.__sdkDir), "ext-all-dev.js") ] self.__debugFlag = str(gLogger.getLevel() in ('DEBUG', 'VERBOSE', 'INFO')).lower() self.__compileTemplate = os.path.join(self.__params.destination, 'WebAppDIRAC', "Lib", "CompileTemplates") # this place will be used, if sencha cmd is not available self.__senchacmddir = os.path.join(rootPath, "sbin", "Sencha", "Cmd") self.__senchaVersion = "v6.5.0.180" self.__appDependency = {} self.__dependencySection = "Dependencies"
def __init__( self ): self.__extVersion = SessionData.getExtJSVersion() self.__staticPaths = HandlerMgr().getPaths( "static" ) self.__extensions = getInstalledExtensions() self.__webAppPath = os.path.dirname( self.__staticPaths[-1] ) self.__extPath = os.path.join( self.__webAppPath, "static", "extjs", self.__extVersion ) self.__sdkPath = os.path.join( self.__webAppPath, "static", "extjs", self.__extVersion, "src" ) self.__classPaths = [ os.path.join( self.__webAppPath, *p ) for p in ( ("static", "core", "js", "utils" ), ("static", "core", "js", "core" ))] self.__classPaths.append( os.path.join( self.__extPath, "examples", "ux", "form" ) ) self.__debugFlag = str( gLogger.getLevel() in ( 'DEBUG', 'VERBOSE', 'INFO' ) ).lower() self.__inDir = os.path.join( os.path.dirname( self.__webAppPath ), "Lib", "CompileTemplates" )
def _getFilesFromDirectoryScan( self, dirs ): """ calls dm.getFilesFromDirectory """ level = gLogger.getLevel() gLogger.setLevel( 'FATAL' ) res = self.dm.getFilesFromDirectory( dirs ) gLogger.setLevel( level ) if not res['OK']: if 'No such file or directory' not in res['Message']: gLogger.error( "Error getting files from directories %s:" % dirs, res['Message'] ) return [] if res['Value']: lfnsFound = res['Value'] else: lfnsFound = [] return lfnsFound
def web_index(self): # Render base template data = self.getSessionData() url_state = "" if self.request.arguments.has_key("url_state") and len( self.request.arguments["url_state"][0]) > 0: url_state = self.request.arguments["url_state"][0] view_name = Conf.getTheme() if self.request.arguments.has_key("view") and len( self.request.arguments["view"][0]) > 0: view_name = self.request.arguments["view"][0] theme_name = "ext-all-gray" if self.request.arguments.has_key("theme") and len( self.request.arguments["theme"][0]) > 0: if self.request.arguments["theme"][0] == "Neptune": theme_name = "ext-all-neptune" if self.request.arguments["theme"][0] == "Classic": theme_name = "ext-all" open_app = "" if self.request.arguments.has_key("open_app") and len( self.request.arguments["open_app"][0]) > 0: open_app = self.request.arguments["open_app"][0].strip() icon = data['baseURL'] + Conf.getIcon() level = str(gLogger.getLevel()).lower() self.render("root.tpl", iconUrl=icon, base_url=data['baseURL'], _dev=Conf.devMode(), ext_version=data['extVersion'], url_state=url_state, extensions=data['extensions'], credentials=data['user'], title=Conf.getTitle(), theme=theme_name, root_url=Conf.rootURL(), view=view_name, open_app=open_app, debug_level=level)
def _getFilesFromDirectoryScan(self, dirs): """calls dm.getFilesFromDirectory""" level = gLogger.getLevel() gLogger.setLevel("FATAL") res = self.dataManager.getFilesFromDirectory(dirs) gLogger.setLevel(level) if not res["OK"]: if "No such file or directory" not in res["Message"]: gLogger.error( "Error getting files from directories %s:" % dirs, res["Message"]) return [] if res["Value"]: lfnsFound = res["Value"] else: lfnsFound = [] return lfnsFound
def __init__(self): """c'tor :param self: self reference """ self.log = gLogger.getSubLogger('RequestDB') # Initialize the connection info self.__getDBConnectionInfo('RequestManagement/ReqDB') runDebug = (gLogger.getLevel() == 'DEBUG') self.engine = create_engine( 'mysql://%s:%s@%s:%s/%s' % (self.dbUser, self.dbPass, self.dbHost, self.dbPort, self.dbName), echo=runDebug) metadata.bind = self.engine self.DBSession = sessionmaker(bind=self.engine)
def __init__( self ): """c'tor :param self: self reference """ self.log = gLogger.getSubLogger( 'RequestDB' ) # Initialize the connection info self.__getDBConnectionInfo( 'RequestManagement/ReqDB' ) runDebug = ( gLogger.getLevel() == 'DEBUG' ) self.engine = create_engine( 'mysql://%s:%s@%s:%s/%s' % ( self.dbUser, self.dbPass, self.dbHost, self.dbPort, self.dbName ), echo = runDebug ) metadata.bind = self.engine self.DBSession = sessionmaker( bind = self.engine )
def web_index(self): # Render base template data = self.getSessionData() url_state = "" if self.request.arguments.has_key("url_state") and len(self.request.arguments["url_state"][0]) > 0: url_state = xss_filter( self.request.arguments["url_state"][0] ) view_name = Conf.getTheme() if self.request.arguments.has_key("view") and len(self.request.arguments["view"][0]) > 0: view_name = xss_filter( self.request.arguments["view"][0] ) theme_name = "ext-all-gray" if self.request.arguments.has_key("theme") and len(self.request.arguments["theme"][0]) > 0: if self.request.arguments["theme"][0]=="Neptune": theme_name = "ext-all-neptune" if self.request.arguments["theme"][0]=="Classic": theme_name = "ext-all" open_app = "" if self.request.arguments.has_key("open_app") and len(self.request.arguments["open_app"][0]) > 0: open_app = xss_filter( self.request.arguments["open_app"][0].strip() ) icon = data[ 'baseURL' ] + Conf.getIcon() background = data[ 'baseURL' ] + Conf.getBackgroud() logo = data[ 'baseURL' ] + Conf.getLogo() welcomeFile = Conf.getWelcome() welcome = '' if welcomeFile: try: with open(welcomeFile, 'r') as f: welcome = f.read().replace('\n', '') except: gLogger.warn('Welcome page not found here: %s' % welcomeFile) level = str(gLogger.getLevel()).lower() self.render( "root.tpl", iconUrl=icon, base_url = data[ 'baseURL' ], _dev = Conf.devMode(), ext_version = data[ 'extVersion' ], url_state = url_state, extensions = data[ 'extensions' ], credentials = data[ 'user' ], title = Conf.getTitle(), theme = theme_name, root_url = Conf.rootURL(), view = view_name, open_app = open_app, debug_level = level, welcome = welcome, backgroundImage = background, logo = logo )
def __init__(self): """c'tor :param self: self reference """ self.log = gLogger.getSubLogger('ProvenanceDB') # Initialize the connection info self.__getDBConnectionInfo('DataManagement/ProvenanceDB') runDebug = (gLogger.getLevel() == 'DEBUG') self.engine = create_engine( 'postgresql://%s:%s@%s:%s/%s' % (self.dbUser, self.dbPass, self.dbHost, self.dbPort, self.dbName), echo=runDebug) self.sessionMaker_o = sessionmaker(bind=self.engine) self.inspector = Inspector.from_engine(self.engine) #These are the list of tables that will be created. self.__initializeDB()
def __init__(self): """c'tor :param self: self reference """ self.log = gLogger.getSubLogger("RequestDB") # Initialize the connection info self.__getDBConnectionInfo("RequestManagement/ReqDB") runDebug = gLogger.getLevel() == "DEBUG" self.engine = create_engine( "mysql://%s:%s@%s:%s/%s" % (self.dbUser, self.dbPass, self.dbHost, self.dbPort, self.dbName), echo=runDebug, pool_recycle=3600, ) metadata.bind = self.engine self.DBSession = sessionmaker(bind=self.engine)
def compareChecksum(self, lfns): """compare the checksum of the file in the FC and the checksum of the physical replicas. Returns a dictionary containing 3 sub-dictionaries: one with files with missing PFN, one with files with all replicas corrupted, and one with files with some replicas corrupted and at least one good replica """ retDict = { "AllReplicasCorrupted": {}, "SomeReplicasCorrupted": {}, "MissingReplica": {}, "MissingAllReplicas": {}, "NoReplicas": {}, } chunkSize = 100 replicas = {} setLfns = set(lfns) cachedLfns = setLfns & set(self.cachedReplicas) for lfn in cachedLfns: replicas[lfn] = self.cachedReplicas[lfn] lfnsLeft = list(setLfns - cachedLfns) if lfnsLeft: self.__write("Get replicas for %d files (chunks of %d): " % (len(lfnsLeft), chunkSize)) for lfnChunk in breakListIntoChunks(lfnsLeft, chunkSize): self.__write(".") replicasRes = self.fileCatalog.getReplicas(lfnChunk) if not replicasRes["OK"]: gLogger.error("error: %s" % replicasRes["Message"]) return S_ERROR(errno.ENOENT, "error: %s" % replicasRes["Message"]) replicasRes = replicasRes["Value"] if replicasRes["Failed"]: retDict["NoReplicas"].update(replicasRes["Failed"]) replicas.update(replicasRes["Successful"]) self.__write("Get FC metadata for %d files to be checked: " % len(lfns)) metadata = {} for lfnChunk in breakListIntoChunks(replicas, chunkSize): self.__write(".") res = self.fileCatalog.getFileMetadata(lfnChunk) if not res["OK"]: return S_ERROR(errno.ENOENT, "error %s" % res["Message"]) metadata.update(res["Value"]["Successful"]) gLogger.notice("Check existence and compare checksum file by file...") csDict = {} seFiles = {} # Reverse the LFN->SE dictionary nReps = 0 for lfn in replicas: csDict.setdefault(lfn, {})["FCChecksum"] = metadata.get( lfn, {}).get("Checksum") for se in replicas[lfn]: seFiles.setdefault(se, []).append(lfn) nReps += 1 gLogger.notice("Getting checksum of %d replicas in %d SEs" % (nReps, len(seFiles))) checkSum = {} lfnNotExisting = {} lfnNoInfo = {} logLevel = gLogger.getLevel() gLogger.setLevel("FATAL") for num, se in enumerate(sorted(seFiles)): self.__write("\n%d. At %s (%d files): " % (num, se, len(seFiles[se]))) oSe = StorageElement(se) notFound = 0 for surlChunk in breakListIntoChunks(seFiles[se], chunkSize): self.__write(".") metadata = oSe.getFileMetadata(surlChunk) if not metadata["OK"]: gLogger.error( "Error: getFileMetadata returns %s. Ignore those replicas" % (metadata["Message"])) # Remove from list of replicas as we don't know whether it is OK or # not for lfn in seFiles[se]: lfnNoInfo.setdefault(lfn, []).append(se) else: metadata = metadata["Value"] notFound += len(metadata["Failed"]) for lfn in metadata["Failed"]: lfnNotExisting.setdefault(lfn, []).append(se) for lfn in metadata["Successful"]: checkSum.setdefault( lfn, {})[se] = metadata["Successful"][lfn]["Checksum"] if notFound: gLogger.error("%d files not found" % notFound) gLogger.setLevel(logLevel) gLogger.notice("Verifying checksum of %d files" % len(replicas)) for lfn in replicas: # get the lfn checksum from the FC replicaDict = replicas[lfn] oneGoodReplica = False allGoodReplicas = True fcChecksum = csDict[lfn].pop("FCChecksum") for se in replicaDict: # If replica doesn't exist skip check if se in lfnNotExisting.get(lfn, []): allGoodReplicas = False continue if se in lfnNoInfo.get(lfn, []): # If there is no info, a priori it could be good oneGoodReplica = True continue # get the surls metadata and compare the checksum surlChecksum = checkSum.get(lfn, {}).get(se, "") if not surlChecksum or not compareAdler( fcChecksum, surlChecksum): # if fcChecksum does not match surlChecksum csDict[lfn][se] = {"PFNChecksum": surlChecksum} gLogger.info( "ERROR!! checksum mismatch at %s for LFN %s: FC checksum: %s , PFN checksum : %s " % (se, lfn, fcChecksum, surlChecksum)) allGoodReplicas = False else: oneGoodReplica = True if not oneGoodReplica: if lfn in lfnNotExisting: gLogger.info("=> All replicas are missing", lfn) retDict["MissingAllReplicas"][lfn] = "All" else: gLogger.info("=> All replicas have bad checksum", lfn) retDict["AllReplicasCorrupted"][lfn] = csDict[lfn] elif not allGoodReplicas: if lfn in lfnNotExisting: gLogger.info("=> At least one replica missing", lfn) retDict["MissingReplica"][lfn] = lfnNotExisting[lfn] else: gLogger.info("=> At least one replica with good Checksum", lfn) retDict["SomeReplicasCorrupted"][lfn] = csDict[lfn] return S_OK(retDict)
def compareChecksum(self, lfns): """compare the checksum of the file in the FC and the checksum of the physical replicas. Returns a dictionary containing 3 sub-dictionaries: one with files with missing PFN, one with files with all replicas corrupted, and one with files with some replicas corrupted and at least one good replica """ retDict = {'AllReplicasCorrupted': {}, 'SomeReplicasCorrupted': {}, 'MissingReplica': {}, 'MissingAllReplicas': {}, 'NoReplicas': {}} chunkSize = 100 replicas = {} setLfns = set(lfns) cachedLfns = setLfns & set(self.cachedReplicas) for lfn in cachedLfns: replicas[lfn] = self.cachedReplicas[lfn] lfnsLeft = list(setLfns - cachedLfns) if lfnsLeft: self.__write("Get replicas for %d files (chunks of %d): " % (len(lfnsLeft), chunkSize)) for lfnChunk in breakListIntoChunks(lfnsLeft, chunkSize): self.__write('.') replicasRes = self.fileCatalog.getReplicas(lfnChunk) if not replicasRes['OK']: gLogger.error("error: %s" % replicasRes['Message']) return S_ERROR(errno.ENOENT, "error: %s" % replicasRes['Message']) replicasRes = replicasRes['Value'] if replicasRes['Failed']: retDict['NoReplicas'].update(replicasRes['Failed']) replicas.update(replicasRes['Successful']) self.__write("Get FC metadata for %d files to be checked: " % len(lfns)) metadata = {} for lfnChunk in breakListIntoChunks(replicas, chunkSize): self.__write('.') res = self.fileCatalog.getFileMetadata(lfnChunk) if not res['OK']: return S_ERROR(errno.ENOENT, "error %s" % res['Message']) metadata.update(res['Value']['Successful']) gLogger.notice("Check existence and compare checksum file by file...") csDict = {} seFiles = {} # Reverse the LFN->SE dictionary nReps = 0 for lfn in replicas: csDict.setdefault(lfn, {})['LFCChecksum'] = metadata.get( lfn, {}).get('Checksum') for se in replicas[lfn]: seFiles.setdefault(se, []).append(lfn) nReps += 1 gLogger.notice('Getting checksum of %d replicas in %d SEs' % (nReps, len(seFiles))) checkSum = {} lfnNotExisting = {} lfnNoInfo = {} logLevel = gLogger.getLevel() gLogger.setLevel('FATAL') for num, se in enumerate(sorted(seFiles)): self.__write('\n%d. At %s (%d files): ' % (num, se, len(seFiles[se]))) oSe = StorageElement(se) notFound = 0 for surlChunk in breakListIntoChunks(seFiles[se], chunkSize): self.__write('.') metadata = oSe.getFileMetadata(surlChunk) if not metadata['OK']: gLogger.error("Error: getFileMetadata returns %s. Ignore those replicas" % ( metadata['Message'])) # Remove from list of replicas as we don't know whether it is OK or # not for lfn in seFiles[se]: lfnNoInfo.setdefault(lfn, []).append(se) else: metadata = metadata['Value'] notFound += len(metadata['Failed']) for lfn in metadata['Failed']: lfnNotExisting.setdefault(lfn, []).append(se) for lfn in metadata['Successful']: checkSum.setdefault( lfn, {})[se] = metadata['Successful'][lfn]['Checksum'] if notFound: gLogger.error('%d files not found' % notFound) gLogger.setLevel(logLevel) gLogger.notice('Verifying checksum of %d files' % len(replicas)) for lfn in replicas: # get the lfn checksum from the FC replicaDict = replicas[lfn] oneGoodReplica = False allGoodReplicas = True lfcChecksum = csDict[lfn].pop('LFCChecksum') for se in replicaDict: # If replica doesn't exist skip check if se in lfnNotExisting.get(lfn, []): allGoodReplicas = False continue if se in lfnNoInfo.get(lfn, []): # If there is no info, a priori it could be good oneGoodReplica = True continue # get the surls metadata and compare the checksum surlChecksum = checkSum.get(lfn, {}).get(se, '') if not surlChecksum or not compareAdler(lfcChecksum, surlChecksum): # if lfcChecksum does not match surlChecksum csDict[lfn][se] = {'PFNChecksum': surlChecksum} gLogger.info("ERROR!! checksum mismatch at %s for LFN %s: LFC checksum: %s , PFN checksum : %s " % (se, lfn, lfcChecksum, surlChecksum)) allGoodReplicas = False else: oneGoodReplica = True if not oneGoodReplica: if lfn in lfnNotExisting: gLogger.info("=> All replicas are missing", lfn) retDict['MissingAllReplicas'][lfn] = 'All' else: gLogger.info("=> All replicas have bad checksum", lfn) retDict['AllReplicasCorrupted'][lfn] = csDict[lfn] elif not allGoodReplicas: if lfn in lfnNotExisting: gLogger.info("=> At least one replica missing", lfn) retDict['MissingReplica'][lfn] = lfnNotExisting[lfn] else: gLogger.info("=> At least one replica with good Checksum", lfn) retDict['SomeReplicasCorrupted'][lfn] = csDict[lfn] return S_OK(retDict)