def iiCheckMetadataXmlForSchemaIdentifier(rule_args, callback, rei): rods_zone = session_vars.get_map(rei)["client_user"]["irods_zone"] callback.writeString("stdout", "[METADATA] Start check for schema identifiers.\n") # Find all research and vault collections, ordered by COLL_ID. iter = genquery.row_iterator( "ORDER(COLL_ID), COLL_NAME", "COLL_NAME like '/%s/home/%%' AND DATA_NAME like 'yoda-metadata%%xml'" % (rods_zone), genquery.AS_LIST, callback ) # Check each collection in batch. for row in iter: coll_id = int(row[0]) coll_name = row[1] pathParts = coll_name.split('/') group_name = pathParts[3] if 'research-' in group_name: checkMetadataXmlForSchemaIdentifier(callback, rods_zone, coll_name, group_name, "yoda-metadata.xml") elif 'vault-' in group_name: # Get vault package path. vault_package = '/'.join(pathParts[:5]) data_name = getLatestVaultMetadataXml(callback, vault_package) if data_name != "": checkMetadataXmlForSchemaIdentifier(callback, rods_zone, vault_package, group_name, data_name) else: callback.writeLine("stdout", "Missing metadata file: %s" % (vault_package)) callback.writeString("stdout", "[METADATA] Finished check for schema identifiers.\n")
def getUnpreservableFiles(callback, rei, folder, list): zoneName = "" rods_zone = session_vars.get_map(rei)["client_user"]["irods_zone"] # Retrieve JSON list of preservable file formats. json = parseJson(callback, "/" + rods_zone + "/yoda/file_formats/" + list + ".json") preservableFormats = json['formats'] unpreservableFormats = [] # Retrieve all files in collection. iter = genquery.row_iterator("DATA_NAME, COLL_NAME", "COLL_NAME like '%s%%'" % (folder), genquery.AS_LIST, callback) for row in iter: filename, file_extension = os.path.splitext(row[0]) # Convert to lowercase and remove dot. file_extension = (file_extension.lower())[1:] # Check if extention is in preservable format list. if (file_extension not in preservableFormats): unpreservableFormats.append(file_extension) # Remove duplicate file formats. output = [] for x in unpreservableFormats: if x not in output: output.append(x) return {'formats': output}
def storeJsonPayload(cbk, rei, jobUUID='', computeResource='', jsonString=None): map_ = session_vars.get_map(rei)['client_user'] colln = '/{irods_zone}/home/{user_name}'.format(**map_) if jobUUID: colln += "/{}".format(jobUUID) retv = cbk.msiCollCreate(colln, "0", 0) cbk.writeLine('stdout', 'coll_create -> {}'.format(retv['code'])) cbk.writeLine('stdout', 'coln = {} '.format(colln)) config_file_name = "" if jsonString is not None: descriptor = "" config_file_name = colln + "/config.json" try: create_rv = cbk.msiDataObjCreate( config_file_name, "forceFlag=++++destRescName={}".format(computeResource), 0) descriptor = create_rv['arguments'][2] except: config_file_name = "" if type(descriptor) is not int: pass cbk.writeLine("serverLog", "Could not create JSON data object") else: try: write_rv = cbk.msiDataObjWrite(descriptor, jsonString, 0) bytesWritten = write_rv['arguments'][2] cbk.writeLine("stdout", "{} bytes written".format(bytesWritten)) close_rv = cbk.msiDataObjClose(descriptor, 0) cbk.writeLine( "stdout", "descriptor {} closed -> {}".format( descriptor, close_rv['arguments'][1])) except: pass return config_file_name
def pythonRuleEnginePluginTest(rule_args, callback, rei): c = 0 for arg in rule_args: logger.debug(str(c) + ":" + str(arg)) c = c + 1 logging.info("rei:") sv = session_vars.get_map(rei) logging.info("svs:%s" % sv)
def groupUserExists(rule_args, callback, rei): groups = getGroupData(callback) user = rule_args[1] if '#' not in user: import session_vars user = user + "#" + session_vars.get_map(rei)["client_user"]["irods_zone"] if rule_args[2] == "false": groups = list(filter(lambda group: rule_args[0] == group["name"] and user in group["members"], groups)) else: groups = list(filter(lambda group: rule_args[0] == group["name"] and (user in group["read"] or user in group["members"]), groups)) rule_args[3] = "true" if len(groups) == 1 else "false"
def acPostProcForPut(rule_args, callback, rei): sv = session_vars.get_map(rei) phypath = sv['data_object']['file_path'] objpath = sv['data_object']['object_path'] exiflist = [] with open(phypath, 'rb') as f: tags = exifread.process_file(f, details=False) for (k, v) in tags.iteritems(): if k not in ('JPEGThumbnail', 'TIFFThumbnail', 'Filename', 'EXIF MakerNote'): exifpair = '{0}={1}'.format(k, v) exiflist.append(exifpair) exifstring = '%'.join(exiflist) callback.add_metadata_to_objpath(exifstring, objpath, '-d') callback.writeLine('serverLog', 'PYTHON - acPostProcForPut() complete')
def iiCheckMetadataXmlForSchemaUpdates(rule_args, callback, rei): coll_id = int(rule_args[0]) batch = int(rule_args[1]) pause = float(rule_args[2]) delay = int(rule_args[3]) rods_zone = session_vars.get_map(rei)["client_user"]["irods_zone"] # Check one batch of metadata schemas. coll_id = checkMetadataXmlForSchemaUpdatesBatch(callback, rods_zone, coll_id, batch, pause) if coll_id != 0: # Check the next batch after a delay. callback.delayExec( "<PLUSET>%ds</PLUSET>" % delay, "iiCheckMetadataXmlForSchemaUpdates('%d', '%d', '%f', '%d')" % (coll_id, batch, pause, delay), "")
def uuCheckVaultIntegrity(rule_args, callback, rei): data_id = int(rule_args[0]) batch = int(rule_args[1]) pause = float(rule_args[2]) delay = int(rule_args[3]) rods_zone = session_vars.get_map(rei)["client_user"]["irods_zone"] # Check one batch of vault data. data_id = checkVaultIntegrityBatch(callback, rods_zone, data_id, batch, pause) if data_id != 0: # Check the next batch after a delay. callback.delayExec( "<PLUSET>%ds</PLUSET>" % delay, "uuCheckVaultIntegrity('%d', '%d', '%f', '%d')" % (data_id, batch, pause, delay), "")
def getPreservableFormatsLists(callback, rei): preservableLists = {} zoneName = "" rods_zone = session_vars.get_map(rei)["client_user"]["irods_zone"] # Retrieve all preservable file formats lists on the system. iter = genquery.row_iterator( "DATA_NAME, COLL_NAME", "COLL_NAME = '/{}/yoda/file_formats' AND DATA_NAME like '%%.json'". format(rods_zone), genquery.AS_LIST, callback) for row in iter: data_name = row[0] coll_name = row[1] # Retrieve filename and name of list. filename, file_extension = os.path.splitext(data_name) json = parseJson(callback, coll_name + "/" + data_name) # Add to list of preservable file formats. preservableLists[filename] = json return {'lists': preservableLists}
def __init__(self, rei=None, event=""): self.event = event sv = session_vars.get_map(rei) self.filePath = sv['data_object']['object_path'] self.clientUser = sv['client_user']['user_name'] self.zone = sv['client_user']['irods_zone']
def get_user_name (callback,rei): u = '' try: u = session_vars.get_map(rei)['client_user']['user_name'] except: pass return u