from custom_handlers import get_script_logger import archivematicaFunctions def _move_file(src, dst, exit_on_error=True): print 'Moving', src, 'to', dst try: shutil.move(src, dst) except IOError: print 'Could not move', src if exit_on_error: raise if __name__ == '__main__': logger = get_script_logger( "archivematica.mcp.client.restructureBagAIPToSIP") sip_path = sys.argv[1] # Move everything out of data directory for item in os.listdir(os.path.join(sip_path, 'data')): src = os.path.join(sip_path, 'data', item) dst = os.path.join(sip_path, item) _move_file(src, dst) os.rmdir(os.path.join(sip_path, 'data')) # Move metadata and logs out of objects if they exist src = os.path.join(sip_path, 'objects', 'metadata') dst = os.path.join(sip_path, 'metadata') _move_file(src, dst, exit_on_error=False)
from django.db import transaction import django django.setup() # dashboard from main.models import File, Transfer # archivematicaCommon from custom_handlers import get_script_logger from fileOperations import addFileToTransfer from fileOperations import addFileToSIP import metsrw import namespaces as ns logger = get_script_logger("archivematica.mcp.client.assignFileUUID") def find_mets_file(unit_path): """ Return the location of the original METS in a Archivematica AIP transfer. """ src = os.path.join(unit_path, "metadata") mets_paths = glob.glob(os.path.join(src, "METS.*.xml")) if len(mets_paths) == 1: return mets_paths[0] elif len(mets_paths) == 0: raise Exception("No METS file found in %s" % src) else: raise Exception("Multiple METS files found in %s: %r" %
# @author Joseph Perry <*****@*****.**> import uuid import shutil import os import sys # dashboard from main.models import File # archivematicaCommon import archivematicaFunctions from custom_handlers import get_script_logger import databaseFunctions if __name__ == '__main__': logger = get_script_logger( "archivematica.mcp.client.createSIPfromTRIMTransferContainers") objectsDirectory = sys.argv[1] transferName = sys.argv[2] transferUUID = sys.argv[3] processingDirectory = sys.argv[4] autoProcessSIPDirectory = sys.argv[5] sharedPath = sys.argv[6] transfer_objects_directory = '%transferDirectory%objects' for container in os.listdir(objectsDirectory): sipUUID = uuid.uuid4().__str__() containerPath = os.path.join(objectsDirectory, container) if not os.path.isdir(containerPath): print >> sys.stderr, "file (not container) found: ", container continue
import sys import subprocess import os import uuid # archivematicaCommon from custom_handlers import get_script_logger from fileOperations import updateFileLocation from archivematicaFunctions import unicodeToStr import sanitizeNames # dashboard from main.models import File if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.sanitizeObjectNames") objectsDirectory = sys.argv[1] #the directory to run sanitization on. sipUUID = sys.argv[2] date = sys.argv[3] taskUUID = sys.argv[4] groupType = sys.argv[5] groupType = "%%%s%%" % (groupType) groupSQL = sys.argv[6] sipPath = sys.argv[7] #the unit path groupID = sipUUID #relativeReplacement = "%sobjects/" % (groupType) #"%SIPDirectory%objects/" relativeReplacement = objectsDirectory.replace( sipPath, groupType, 1) #"%SIPDirectory%objects/"
# dashboard from main.models import UnitVariable # archivematicaCommon from custom_handlers import get_script_logger import elasticSearchFunctions import storageService as storage_service import identifier_functions import django django.setup() from django.conf import settings as mcpclient_settings logger = get_script_logger("archivematica.mcp.client.indexAIP") def get_identifiers(job, sip_path): """Get additional identifiers to index.""" identifiers = [] # MODS mods_paths = glob( "{}/submissionDocumentation/**/mods/*.xml".format(sip_path)) for mods in mods_paths: identifiers.extend( identifier_functions.extract_identifiers_from_mods(mods)) # Islandora identifier islandora_path = glob(
import uuid # databaseFunctions requires Django to be set up import django django.setup() from django.utils import timezone # archivematicaCommon from archivematicaFunctions import get_file_checksum from custom_handlers import get_script_logger import databaseFunctions from main.models import Agent, File import metsrw logger = get_script_logger("archivematica.mcp.client.parse_dataverse_mets") transfer_objects_directory = "%transferDirectory%objects" class ParseDataverseError(Exception): """Exception class for failures that might occur during the execution of this script. """ def get_db_objects(job, mets, transfer_uuid): """ Get DB objects for files in METS. This also validates that files exist for each file asserted in the structMap
tool_output = 'Standard Output="{}"; Standard Error="{}"'.format( std_out, std_err) databaseFunctions.insertIntoEvents( eventType='compression', eventDetail=tool_info, eventOutcomeDetailNote=tool_output, fileUUID=file_uuid, ) update_unit(sip_uuid, compressed_location) return exit_code if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.compressAIP") parser = argparse.ArgumentParser(description='Compress an AIP.') parser.add_argument('compression', type=str, help='%AIPCompressionAlgorithm%') parser.add_argument('compression_level', type=str, help='%AIPCompressionLevel%') parser.add_argument('sip_directory', type=str, help='%SIPDirectory%') parser.add_argument('sip_name', type=str, help='%SIPName%') parser.add_argument('sip_uuid', type=str, help='%SIPUUID%') args = parser.parse_args() sys.exit( compress_aip(args.compression, args.compression_level, args.sip_directory, args.sip_name, args.sip_uuid))
# Note that linkTaskManagerFiles.py will take the highest exit code it has seen # from all tasks and will use that as the exit code of the job as a whole. SUCCESS_CODE = 0 NOT_APPLICABLE_CODE = 0 FAIL_CODE = 1 def main(file_path, file_uuid, sip_uuid, shared_path, file_type): setup_dicts(mcpclient_settings) policy_checker = PolicyChecker( file_path, file_uuid, sip_uuid, shared_path, file_type) return policy_checker.check() LOGGER = get_script_logger("archivematica.mcp.client.policyCheck") class PolicyChecker(object): """Checks whether a given file conforms to all of the MediaConch policies that the system is configured to run against that type of file, given the file's format and its purpose, i.e., whether it is intended for access or preservation. Usage involves initializing on a file and then calling the ``check`` method. """ def __init__(self, file_path, file_uuid, sip_uuid, shared_path, file_type): self.file_path = file_path self.file_uuid = file_uuid self.sip_uuid = sip_uuid
import os # archivematicaCommon from custom_handlers import get_script_logger import django import scandir django.setup() from django.db import connection from django.db import transaction # dashboard from main.models import File, FileID, FileFormatVersion logger = get_script_logger( "archivematica.mcp.client.setMaildirFileGrpUseAndFileIDs") def get_files(sip_uuid, current_location, removed_time=0): return File.objects.filter(uuid=sip_uuid, current_location=current_location, removedtime=removed_time) def insert_file_format_version(file_uuid, description): sql = """ INSERT INTO {file_format_version} (fileUUID, fileID) VALUES (%s, ( SELECT pk FROM {file_id} WHERE
elif fileUUID and not fileGrpUUID: fileOperations.updateFileGrpUse( fileUUID, "TRIM container metadata") os.removedirs(src) else: destDir = "metadata" if item == "manifest.txt": destDir = "metadata/submissionDocumentation" dst = os.path.join(unitPath, destDir, item) fileOperations.updateFileLocation2(src, dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith) files = fileOperations.getFileUUIDLike(dst, unitPath, unitIdentifier, unitIdentifierType, unitPathReplaceWith) for key, value in files.iteritems(): fileUUID = value fileOperations.updateFileGrpUse(fileUUID, "TRIM metadata") if __name__ == '__main__': logger = get_script_logger( "archivematica.mcp.client.trimRestructureForCompliance") transferUUID = sys.argv[1] transferName = sys.argv[2] transferPath = sys.argv[3] restructureTRIMForComplianceFileUUIDsAssigned(transferPath, transferUUID)
args = rd.to_gnu_options exitstatus, stdout, stderr = executeOrRun(rule.command.script_type, script, arguments=args) if exitstatus != 0: succeeded = False output_path = rd.replace(rule.command.output_location)[0] relative_path = output_path.replace(rd['%SIPDirectory%'], '%SIPDirectory%') event = insert_transcription_event(exitstatus, file_uuid, rule, relative_path) if os.path.isfile(output_path): insert_file_into_database(file_uuid, rd['%SIPUUID%'], event, rule, output_path, relative_path) return 0 if succeeded else 1 if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.transcribeFile") task_uuid = sys.argv[1] file_uuid = sys.argv[2] transcribe = sys.argv[3] if transcribe == 'False': print('Skipping transcription') sys.exit(0) sys.exit(main(task_uuid, file_uuid))
""" import argparse from functools import wraps from django.db import transaction import django django.setup() # dashboard from main.models import DashboardSetting, File, Identifier # archivematicaCommon import bindpid from custom_handlers import get_script_logger from archivematicaFunctions import str2bool logger = get_script_logger('archivematica.mcp.client.bind_pid') class BindPIDException(Exception): """If I am raised, return 1.""" exit_code = 1 class BindPIDWarning(Exception): """If I am raised, return 0.""" exit_code = 0 def exit_on_known_exception(func): """Decorator that makes this module's ``main`` function cleaner by handling early exiting by catching particular exceptions.
import django from django.conf import settings as mcpclient_settings from django.contrib.auth.models import User from django.core.mail import send_mail from django.db import transaction from django.template import Context, Template from main.models import File, Job, Report, SIP, Task import components.helpers as helpers from custom_handlers import get_script_logger django.setup() logger = get_script_logger('archivematica.mcp.client.normalizeReport') # Based on http://leemunroe.github.io/responsive-html-email-template/email.html EMAIL_TEMPLATE = """ <!doctype html> <html> <head> <meta name="viewport" content="width=device-width"> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Normalization failure report - failures detected!</title> </head> <body> <table cellpadding="10" width="100%"> <tr> <td></td> <td>
# @package Archivematica # @subpackage archivematicaClientScript # @author Mike Cantelon <*****@*****.**> import sys # elasticSearchFunctions requires Django to be set up import django django.setup() # archivematicaCommon from custom_handlers import get_script_logger import elasticSearchFunctions from django.conf import settings as mcpclient_settings logger = get_script_logger( 'archivematica.mcp.client.elasticSearchIndexProcessTransfer') if __name__ == '__main__': if not mcpclient_settings.SEARCH_ENABLED: logger.info('Skipping indexing: indexing is currently disabled.') sys.exit(0) transfer_path = sys.argv[1] transfer_uuid = sys.argv[2] try: status = sys.argv[3] except IndexError: status = '' elasticSearchFunctions.setup_reading_from_conf(mcpclient_settings) client = elasticSearchFunctions.get_client()
""" Returns True if this package has already been extracted, False otherwise. """ # Look for files in a directory that starts with the package name files = File.objects.filter(transfer=f.transfer, currentlocation__startswith=f.currentlocation, removedtime__isnull=True).exclude(uuid=f.uuid) # Check for unpacking events that reference the package if Event.objects.filter(file_uuid__in=files, event_type='unpacking', event_detail__contains=f.currentlocation).exists(): return True return False def main(sip_uuid): transfer = Transfer.objects.get(uuid=sip_uuid) for f in transfer.file_set.filter(removedtime__isnull=True).iterator(): if is_extractable(f) and not already_extracted(f): print(f.currentlocation, 'is extractable and has not yet been extracted.') return 0 print('No extractable files found.') return 1 if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.hasPackages") sys.exit(main(sys.argv[1]))
import sys from uuid import uuid4 # storageService requires Django to be set up import django django.setup() from metsrw.plugins import premisrw from main.models import UnitVariable, Event, Agent, DublinCore # archivematicaCommon from custom_handlers import get_script_logger import storageService as storage_service from archivematicaFunctions import escape LOGGER = get_script_logger("archivematica.mcp.client.storeAIP") def get_upload_dip_path(aip_path): """Replace uploadedDIPs/ dirname in ``aip_path`` with uploadDIP/.""" new_aip_path = [] aip_path_parts = os.path.normpath(aip_path).split(os.path.sep) uploaded_dips_found = False for part in aip_path_parts: if not uploaded_dips_found and part == 'uploadedDIPs': uploaded_dips_found = True new_aip_path.append('uploadDIP') else: new_aip_path.append(part) return os.path.sep + os.path.join(*new_aip_path)
for src, dst in update: eventDetail = "" eventOutcomeDetailNote = "moved from=\"" + src + "\"; moved to=\"" + dst + "\"" updateFileLocation( src, dst, "movement", date, eventDetail, sipUUID=SIPUUID, eventOutcomeDetailNote=eventOutcomeDetailNote) return exitCode if __name__ == '__main__': logger = get_script_logger( "archivematica.mcp.client.checkForAccessDirectory") parser = OptionParser() #'--SIPDirectory "%SIPDirectory%" --accessDirectory "objects/access/" --objectsDirectory "objects" --DIPDirectory "DIP" -c' parser.add_option("-s", "--SIPDirectory", action="store", dest="SIPDirectory", default="") parser.add_option("-u", "--SIPUUID", action="store", dest="SIPUUID", default="") parser.add_option("-a", "--accessDirectory",
import shutil import os import sys import django django.setup() # dashboard from main.models import File, Directory, SIP, Transfer # archivematicaCommon import archivematicaFunctions from custom_handlers import get_script_logger import databaseFunctions if __name__ == '__main__': logger = get_script_logger( "archivematica.mcp.client.createSIPFromTransferObjects") objectsDirectory = sys.argv[1] transferName = sys.argv[2] transferUUID = sys.argv[3] processingDirectory = sys.argv[4] autoProcessSIPDirectory = sys.argv[5] sharedPath = sys.argv[6] sipName = transferName tmpSIPDir = os.path.join(processingDirectory, sipName) + "/" destSIPDir = os.path.join(autoProcessSIPDirectory, sipName) + "/" archivematicaFunctions.create_structured_directory( tmpSIPDir, manual_normalization=False) # If transfer is a reingested AIP, then pass that info to the SIP
# { "eventOutcomeInformation": "fail", # "eventOutcomeDetailNote": "format=\"Not detected\"; result=\"Not well-formed\"" } output = ast.literal_eval(stdout) event_detail = 'program="{tool.description}"; version="{tool.version}"'.format( tool=rule.command.tool) print('Creating validation event for {} ({})'.format( file_path, file_uuid)) databaseFunctions.insertIntoEvents( fileUUID=file_uuid, eventType='validation', eventDetail=event_detail, eventOutcome=output.get('eventOutcomeInformation'), eventOutcomeDetailNote=output.get('eventOutcomeDetailNote'), ) if failed: return -1 else: return 0 if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.validateFile") file_path = sys.argv[1] file_uuid = sys.argv[2] sip_uuid = sys.argv[3] sys.exit(main(file_path, file_uuid, sip_uuid))
# use universal newline mode to support unusual newlines, like \r with open(metadataCSVFilePath, 'rbU') as f: reader = csv.reader(f) # Parse first row as header header = reader.next() # Strip filename column, strip whitespace from header values header = [h.strip() for h in header[1:]] # Parse data for row in reader: if not row: continue entry_name = row[0] if entry_name.endswith("/"): entry_name = entry_name[:-1] # Strip file/dir name from values row = row[1:] values = archivematicaFunctions.OrderedListsDict(zip(header, row)) if entry_name in metadata and metadata[entry_name] != values: print >> sys.stderr, 'Metadata for', entry_name, 'being overwritten. Old:', metadata[ entry_name], 'New:', values metadata[entry_name] = values return collections.OrderedDict(metadata) # Return a normal OrderedDict if __name__ == '__main__': logger = get_script_logger( "archivematica.mcp.client.createMETSMetadataCSV") parseMetadata(sys.argv[1])
import django from django.conf import settings as mcpclient_settings from django.contrib.auth.models import User from django.core.mail import send_mail from django.db import connection from django.db import transaction from main.models import Job, Report from custom_handlers import get_script_logger from externals.HTML import HTML django.setup() logger = get_script_logger("archivematica.mcp.client.emailFailReport") COLORS = { Job.STATUS_COMPLETED_SUCCESSFULLY: "#dff0d8", Job.STATUS_FAILED: "#f2dede", "default": "yellow", } def get_emails_from_dashboard_users(): return User.objects.filter( is_active=True, userprofile__system_emails=True).values_list("email", flat=True) def send_email(subject, to, content):
(ffv, created) = FileFormatVersion.objects.get_or_create( file_uuid=file_, defaults={'format_version': version}) if not created: # Update the version if it wasn't created new ffv.format_version = version ffv.save() print "{} identified as a {}".format(file_path, version.description) write_identification_event(file_uuid, command, format=version.pronom_id) write_file_id(file_uuid, format=version, output=output) return 0 if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.identifyFileFormat") parser = argparse.ArgumentParser(description='Identify file formats.') parser.add_argument('idcommand', type=str, help='%IDCommand%') parser.add_argument('file_path', type=str, help='%relativeLocation%') parser.add_argument('file_uuid', type=str, help='%fileUUID%') parser.add_argument( '--disable-reidentify', action='store_true', help='Disable identification if it has already happened for this file.' ) args = parser.parse_args() sys.exit( main(args.idcommand, args.file_path, args.file_uuid, args.disable_reidentify))
sourceType="aip creation", use='metadata') # To make this work with the createMETS2 (for SIPs) databaseFunctions.insertIntoDerivations(file_uuid, file_uuid) # Insert the count of AIPs in the AIC into UnitVariables, so it can be # indexed later UnitVariable.objects.create(unittype="SIP", unituuid=aic['uuid'], variable="AIPsinAIC", variablevalue=str(len(aips))) def create_aic_mets(aic_uuid, aic_dir): aips = get_aip_info(aic_dir) aic = {'dir': aic_dir, 'uuid': aic_uuid} create_mets_file(aic, aips) if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.createAICMETS") parser = argparse.ArgumentParser(description='') parser.add_argument('aic_uuid', action='store', type=str, help="%SIPUUID%") parser.add_argument('aic_dir', action='store', type=str, help="%SIPDirectory%") args = parser.parse_args() create_aic_mets(args.aic_uuid, args.aic_dir)
def updateDB(dst, transferUUID): Transfer.objects.filter(uuid=transferUUID).update(currentlocation=dst) def moveSIP(src, dst, transferUUID, sharedDirectoryPath): # os.rename(src, dst) if src.endswith("/"): src = src[:-1] dest = dst.replace(sharedDirectoryPath, "%sharedPath%", 1) if dest.endswith("/"): dest = os.path.join(dest, os.path.basename(src)) if dest.endswith("/."): dest = os.path.join(dest[:-1], os.path.basename(src)) if os.path.isdir(src): dest += "/" updateDB(dest, transferUUID) renameAsSudo(src, dst) if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.moveTransfer") src = sys.argv[1] dst = sys.argv[2] transferUUID = sys.argv[3] sharedDirectoryPath = sys.argv[4] moveSIP(src, dst, transferUUID, sharedDirectoryPath)
from fpr.models import FPCommand from main.models import Directory, FileFormatVersion, File, Transfer # archivematicaCommon from custom_handlers import get_script_logger from executeOrRunSubProcess import executeOrRun from databaseFunctions import fileWasRemoved from fileOperations import addFileToTransfer, updateSizeAndChecksum, rename from archivematicaFunctions import get_dir_uuids, format_subdir_path # clientScripts from has_packages import already_extracted file_path_cache = {} logger = get_script_logger("archivematica.mcp.client.extractContents") TRANSFER_DIRECTORY = "%transferDirectory%" def temporary_directory(file_path, date): if file_path_cache.get(file_path): return file_path_cache[file_path] else: path = file_path + '-' + date file_path_cache[file_path] = path return path def tree(root): for dirpath, __, files in os.walk(root):
import requests import django django.setup() from django.conf import settings as mcpclient_settings from django.db import transaction # dashboard from main import models # archivematicaCommon from custom_handlers import get_script_logger import elasticSearchFunctions import storageService as storage_service logger = get_script_logger("archivematica.mcp.client.post_store_aip_hook") COMPLETED = 0 NO_ACTION = 1 ERROR = 2 def dspace_handle_to_archivesspace(job, sip_uuid): """Fetch the DSpace handle from the Storage Service and send to ArchivesSpace.""" # Get association to ArchivesSpace if it exists try: digital_object = models.ArchivesSpaceDigitalObject.objects.get(sip_id=sip_uuid) except models.ArchivesSpaceDigitalObject.DoesNotExist: job.pyprint('SIP', sip_uuid, 'not associated with an ArchivesSpace component') return NO_ACTION job.pyprint('Digital Object', digital_object.remoteid, 'for SIP', digital_object.sip_id, 'found')
try: etree.fromstring(stdout) insertIntoFPCommandOutput(file_uuid, stdout, rule.uuid) print('Saved XML output for command "{}" ({})'.format( rule.command.description, rule.command.uuid)) except etree.XMLSyntaxError: failed = True print( 'XML output for command "{}" ({}) was not valid XML; not saving to database' .format(rule.command.description, rule.command.uuid), file=sys.stderr) else: print( 'Tool output for command "{}" ({}) is not XML; not saving to database' .format(rule.command.description, rule.command.uuid), file=sys.stderr) if failed: return -1 else: return 0 if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.characterizeFile") file_path = sys.argv[1] file_uuid = sys.argv[2] sip_uuid = sys.argv[3] sys.exit(main(file_path, file_uuid, sip_uuid))
# # You should have received a copy of the GNU General Public License # along with Archivematica. If not, see <http://www.gnu.org/licenses/>. """Maps Dataverse specific elements into the AIP METS file generated on ingest. """ from __future__ import print_function import sys import archivematicaFunctions from custom_handlers import get_script_logger import metsrw # Create a module level logger. logger = get_script_logger("archivematica.mcp.client.createMETSDataverse") def create_dataverse_sip_dmdsec(job, sip_path): """ Return SIP-level Dataverse dmdSecs for inclusion in the AIP METS. :param str sip_path: ... :return: List of dmdSec Elements """ logger.info("Create dataverse sip dmdsec %s", sip_path) # Retrieve METS.xml from the file system. metadata_mets_paths = archivematicaFunctions.find_metadata_files( sip_path, "METS.xml", only_transfers=True) if not metadata_mets_paths: return []
import shutil import django django.setup() from main.models import Transfer, SIP from verifyBAG import verify_bag # archivematicaCommon from archivematicaFunctions import (create_structured_directory, reconstruct_empty_directories, REQUIRED_DIRECTORIES, OPTIONAL_FILES) from custom_handlers import get_script_logger logger = get_script_logger('archivematica.mcp.client.restructureForCompliance') def _move_file(src, dst, exit_on_error=True): logger.info('Moving %s to %s', src, dst) try: shutil.move(src, dst) except IOError: print('Could not move', src) if exit_on_error: raise def restructure_transfer(unit_path): # Create required directories create_structured_directory(unit_path, printing=True)
# (at your option) any later version. # # Archivematica is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Archivematica. If not, see <http://www.gnu.org/licenses/>. # @package Archivematica # @subpackage archivematicaClientScript # @author Joseph Perry <*****@*****.**> import sys import django django.setup() # dashboard from main.models import Transfer # archivematicaCommon from custom_handlers import get_script_logger if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.setTransferType") transferUUID = sys.argv[1] transferType = sys.argv[2] Transfer.objects.filter(uuid=transferUUID, type__isnull=False).exclude(type="Archivematica AIP").update(type=transferType)
mdwrap = etree.SubElement(sourcemd, ns.metsBNS + 'mdWrap', {'MDTYPE': 'OTHER', 'OTHERMDTYPE': 'BagIt'}) xmldata = etree.SubElement(mdwrap, ns.metsBNS + 'xmlData') bag_metadata = etree.SubElement(xmldata, "transfer_metadata") for key, value in bagdata.iteritems(): try: bag_tag = etree.SubElement(bag_metadata, key) except ValueError: print >> sys.stderr, "Skipping bag key {}; not a valid XML tag name".format(key) continue bag_tag.text = value return el if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.createMETS2") from optparse import OptionParser parser = OptionParser() parser.add_option("-s", "--baseDirectoryPath", action="store", dest="baseDirectoryPath", default="") parser.add_option("-b", "--baseDirectoryPathString", action="store", dest="baseDirectoryPathString", default="SIPDirectory") #transferDirectory/ parser.add_option("-f", "--fileGroupIdentifier", action="store", dest="fileGroupIdentifier", default="") #transferUUID/sipUUID parser.add_option("-t", "--fileGroupType", action="store", dest="fileGroupType", default="sipUUID") # parser.add_option("-x", "--xmlFile", action="store", dest="xmlFile", default="") parser.add_option("-a", "--amdSec", action="store_true", dest="amdSec", default=False) (opts, args) = parser.parse_args() baseDirectoryPath = opts.baseDirectoryPath XMLFile = opts.xmlFile includeAmdSec = opts.amdSec baseDirectoryPathString = "%%%s%%" % (opts.baseDirectoryPathString)