def main(args_): # if multiple file are present, this script will treat them as a single # instantiation/representation and get aggregate metadata about the whole # package. For now, this will be a clumsy implementation - the first file # will provide most metadata. Things like duration/bitrate/filesize # will be calculated as a whole. # Although another way would be that every call is looped, and if # this could catch files that should not be in the package, eg. a 4:2:2 # file in a 4:2:0 package.. # yup - do it that way! args = parse_args(args_) all_files = ififuncs.recursive_file_list(args.input) silence = True if args.user: user = args.user else: user = ififuncs.get_user() acquisition_type = '' if args.acquisition_type: acquisition_type = ififuncs.get_acquisition_type( args.acquisition_type)[0] for dirs in os.listdir(args.input): if ififuncs.validate_uuid4(dirs) is None: instantiationIdentif = dirs Accession_Number = get_accession_number(args.input) if args.reference: Reference_Number = args.reference.upper() else: Reference_Number = get_reference_number(args.input) if args.p: for root, _, filenames in os.walk(args.input): if os.path.basename(root) == 'metadata': metadata_dir = root elif os.path.basename(root) == 'logs': logs_dir = root csv_filename = os.path.join(metadata_dir, Accession_Number + '_pbcore.csv') sipcreator_log = os.path.join(logs_dir, instantiationIdentif + '_sip_log.log') ififuncs.generate_log(sipcreator_log, 'EVENT = makepbcore.py started') ififuncs.generate_log( sipcreator_log, 'eventDetail=makepbcore.py %s' % ififuncs.get_script_version('makepbcore.py')) ififuncs.generate_log(sipcreator_log, 'Command line arguments: %s' % args) ififuncs.generate_log(sipcreator_log, 'EVENT = agentName=%s' % user) else: csv_filename = 'blaa.csv' print ' - Metadata will be stored in %s' % csv_filename for filenames in os.listdir(args.input): if '_manifest.md5' in filenames: md5_manifest = os.path.join(args.input, filenames) elif 'manifest-sha512.txt' in filenames: sha512_manifest = os.path.join(args.input, filenames) make_csv(csv_filename) ms = 0 FrameCount = 0 instantFileSize_byte = 0 instantFileSize_gigs = 0 scan_types = [] matrix_list = [] transfer_list = [] colour_primaries_list = [] color_spaces = [] chroma = [] frame_sizes = [] par_list = [] container_list = [] fps_list = [] sample_rate_list = [] track_count_list = [] interlace_list = [] compression_list = [] pix_fmt_list = [] audio_fmt_list = [] audio_codecid_list = [] audio_codec_list = [] au_bitdepth_list = [] video_codecid_list = [] video_codec_version_list = [] video_codec_profile_list = [] timecode_list = [] channels_list = [] for source in all_files: metadata = subprocess.check_output( ['mediainfo', '--Output=PBCore2', source]) root = etree.fromstring(metadata) print(' - Analysing %s') % source pbcore_namespace = root.xpath('namespace-uri(.)') track_type = root.xpath('//ns:essenceTrackType', namespaces={'ns': pbcore_namespace}) if len(track_type) > 0: for track in track_type: if track.text == 'Video': essenceTrackEncodvid = get_metadata( "ns:essenceTrackEncoding", track.getparent(), pbcore_namespace) vcodec_attributes = get_attributes(track.getparent(), pbcore_namespace) elif track.text == 'Audio': silence = False essenceTrackEncod_au = get_metadata( "//ns:essenceTrackEncoding", track.getparent(), pbcore_namespace) audio_codec_list.append(essenceTrackEncod_au) acodec_attributes = get_attributes(track.getparent(), pbcore_namespace) try: audio_codecid = acodec_attributes['ref'] except KeyError: audio_codecid = 'n/a' essenceTrackSampling = ififuncs.get_mediainfo( 'samplerate', '--inform=Audio;%SamplingRate_String%', source) sample_rate_list.append(essenceTrackSampling) essenceBitDepth_au = get_metadata( "//ns:essenceTrackBitDepth", root, pbcore_namespace) audio_codecid_list.append(audio_codecid) au_bitdepth_list.append(essenceBitDepth_au) channels = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='Channel(s)']", track.getparent(), pbcore_namespace) channels_list.append(channels) ScanType = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='ScanType']", root, pbcore_namespace) scan_types.append(ScanType) matrix_coefficients = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='matrix_coefficients']", root, pbcore_namespace) timecode_source, starting_timecode = get_timecode( pbcore_namespace, root, source) timecode_list.append(starting_timecode) matrix_list.append(matrix_coefficients) transfer_characteris = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='transfer_characteristics']", root, pbcore_namespace) transfer_list.append(transfer_characteris) colour_primaries = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='color_primaries']", root, pbcore_namespace) colour_primaries_list.append(colour_primaries) FrameCount += int( get_metadata( "//ns:essenceTrackAnnotation[@annotationType='FrameCount']", root, pbcore_namespace)) instantFileSize_byte += int( get_metadata("//ns:instantiationFileSize", root, pbcore_namespace)) instantDataRate = round( float( ififuncs.get_mediainfo('OverallBitRate', '--inform=General;%OverallBitRate%', source)) / 1000 / 1000, 2) instantTracks = ififuncs.get_number_of_tracks(source) track_count_list.append(instantTracks) ms += ififuncs.get_milliseconds(source) ColorSpace = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='ColorSpace']", root, pbcore_namespace) color_spaces.append(ColorSpace) ChromaSubsampling = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='ChromaSubsampling']", root, pbcore_namespace) chroma.append(ChromaSubsampling) instantMediaty = get_metadata("//ns:instantiationMediaType", root, pbcore_namespace) essenceFrameSize = get_metadata("//ns:essenceTrackFrameSize", root, pbcore_namespace) frame_sizes.append(essenceFrameSize) PixelAspectRatio = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='PixelAspectRatio']", root, pbcore_namespace) par_list.append(PixelAspectRatio) instantiationStandar = get_metadata( "//ns:instantiationAnnotation[@annotationType='Format']", root, pbcore_namespace) container_list.append(instantiationStandar) essenceFrameRate = get_metadata("//ns:essenceTrackFrameRate", root, pbcore_namespace) fps_list.append(essenceFrameRate) essenceAspectRatio = ififuncs.get_mediainfo( 'DAR', '--inform=Video;%DisplayAspectRatio_String%', source) Interlacement = get_metadata( "//ns:instantiationAnnotation[@annotationType='Interlacement']", root, pbcore_namespace) interlace_list.append(Interlacement) Compression_Mode = get_metadata( "//ns:instantiationAnnotation[@annotationType='Compression_Mode']", root, pbcore_namespace) colour_range = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='colour_range']", root, pbcore_namespace) format_version = get_metadata( "//ns:instantiationAnnotation[@annotationType='Format_Version']", root, pbcore_namespace) app_company_name = get_metadata( "//ns:instantiationAnnotation[@annotationType='Encoded_Application_CompanyName']", root, pbcore_namespace) app_name = get_metadata( "//ns:instantiationAnnotation[@annotationType='Encoded_Application_Name']", root, pbcore_namespace) app_version = get_metadata( "//ns:instantiationAnnotation[@annotationType='Encoded_Application_Version']", root, pbcore_namespace) library_name = get_metadata( "//ns:instantiationAnnotation[@annotationType='Encoded_Library_Name']", root, pbcore_namespace) library_version = get_metadata( "//ns:instantiationAnnotation[@annotationType='Encoded_Library_Version']", root, pbcore_namespace) compression_list.append(Compression_Mode) instantiationDate_mo = get_metadata( "//ns:instantiationDate[@dateType='file modification']", root, pbcore_namespace) instantDate_other = 'n/a' instantDate_type = 'n/a' pix_fmt = ififuncs.get_ffmpeg_fmt(source, 'video') pix_fmt_list.append(pix_fmt) audio_fmt = ififuncs.get_ffmpeg_fmt(source, 'audio') audio_fmt_list.append(audio_fmt) if silence: audio_codecid = 'n/a' essenceBitDepth_au = 'n/a' essenceTrackEncod_au = 'n/a' essenceTrackSampling = 'n/a' video_codecid = vcodec_attributes['ref'] video_codecid_list.append(video_codecid) try: video_codec_version = vcodec_attributes['version'] except KeyError: video_codec_version = 'n/a' try: video_codec_profile = vcodec_attributes['annotation'][8:] except KeyError: video_codec_profile = 'n/a' video_codec_version_list.append(video_codec_version) video_codec_profile_list.append(video_codec_profile) metadata_error = '' metadata_list = [ scan_types, matrix_list, transfer_list, colour_primaries_list, color_spaces, chroma, frame_sizes, par_list, container_list, fps_list, sample_rate_list, track_count_list, interlace_list, compression_list, pix_fmt_list, audio_fmt_list, audio_codecid_list, audio_codec_list, au_bitdepth_list, video_codecid_list, video_codec_version_list, video_codec_profile_list, channels_list, timecode_list ] for i in metadata_list: if len(set(i)) > 1: metadata_error += 'WARNING - Your metadata values are not the same for all files: %s\n' % set( i) print metadata_error if args.p: ififuncs.generate_log( sipcreator_log, 'EVENT = Metadata mismatch - Your metadata values are not the same for all files: %s' % set(i)) tc = ififuncs.convert_millis(ms) instantiationDuratio = ififuncs.convert_timecode(25, tc) if args.donor: Donor = args.donor else: Donor = '' Edited_By = user Date_Created = '' Date_Last_Modified = '' Film_Or_Tape = 'Digital AV Object' Date_Of_Donation = '' reproduction_creator = '' if args.acquisition_type: if acquisition_type == 'Reproduction': Date_Of_Donation = instantiationDate_mo.split('T')[0] # if a reproduction, then there's no Donor/transfer of title. Donor = 'n/a' if ififuncs.find_concat_user( sipcreator_log) == 'Aoife Fitzmaurice': reproduction_creator = 'Aoife Fitzmaurice (2016)' elif ififuncs.find_concat_user( sipcreator_log) == 'Kieran O\'Leary': reproduction_creator = 'Kieran O\'Leary (2013)' Habitat = '' backup_habitat = '' Type_Of_Deposit = acquisition_type if args.depositor_reference: Depositor_Reference = args.depositor_reference else: Depositor_Reference = '' Master_Viewing = 'Preservation Object' Language_Version = '' Condition_Rating = '' Companion_Elements = '' TTape_Origin = args.parent EditedNew = user FIO = 'In' CollectionTitle = '' Created_By = user instantTimeStart = 'n/a' instantFileSize_gigs = round( float(instantFileSize_byte) / 1024 / 1024 / 1024, 3) instantColors = 'n/a' instantLanguage = 'n/a' instantAltMo = 'n/a' essenceBitDepth_vid = ififuncs.get_mediainfo('duration', '--inform=Video;%BitDepth%', source) instantiationChanCon = 'n/a' colour_range = colour_range format_version = format_version TimeCode_FirstFrame = process_mixed_values(timecode_list) TimeCode_Source = timecode_source reproduction_reason = '' dig_object_descrip = ififuncs.get_digital_object_descriptor(args.input) ififuncs.append_csv(csv_filename, [ Reference_Number, Donor, Edited_By, Date_Created, Date_Last_Modified, Film_Or_Tape, Date_Of_Donation, Accession_Number, Habitat, backup_habitat, TTape_Origin, Type_Of_Deposit, Depositor_Reference, Master_Viewing, Language_Version, Condition_Rating, Companion_Elements, EditedNew, FIO, CollectionTitle, Created_By, instantiationIdentif, instantDate_other, instantDate_type, instantiationDate_mo, instantiationStandar, instantMediaty, instantFileSize_byte, instantFileSize_gigs, instantTimeStart, instantDataRate, instantTracks, instantColors, instantLanguage, instantAltMo, essenceTrackEncodvid, essenceFrameRate, essenceTrackSampling, essenceBitDepth_vid, essenceFrameSize, essenceAspectRatio, essenceTrackEncod_au, essenceBitDepth_au, instantiationDuratio, instantiationChanCon, PixelAspectRatio, FrameCount, ColorSpace, ChromaSubsampling, ScanType, Interlacement, Compression_Mode, colour_primaries, transfer_characteris, matrix_coefficients, pix_fmt, audio_fmt, audio_codecid, video_codecid, video_codec_version, video_codec_profile, channels, colour_range, format_version, TimeCode_FirstFrame, TimeCode_Source, app_company_name, app_name, app_version, library_name, library_version, reproduction_creator, reproduction_reason, dig_object_descrip, ]) if args.p: ififuncs.generate_log( sipcreator_log, 'EVENT = Metadata extraction - eventDetail=Technical record creation using PBCore, eventOutcome=%s, agentName=makepbcore' % (csv_filename)) ififuncs.generate_log(sipcreator_log, 'EVENT = makepbcore.py finished') ififuncs.checksum_replace(md5_manifest, sipcreator_log, 'md5') ififuncs.checksum_replace(sha512_manifest, sipcreator_log, 'sha512') ififuncs.manifest_update(md5_manifest, csv_filename) print ' - Updating %s with %s' % (md5_manifest, csv_filename) ififuncs.sha512_update(sha512_manifest, csv_filename) print ' - Updating %s with %s' % (sha512_manifest, csv_filename) print metadata_error
def main(args_): ''' Batch process packages by running accession.py and makepbcore.py ''' args = parse_args(args_) oe_list = [] if args.csv: for line_item in ififuncs.extract_metadata(args.csv)[0]: oe_number = line_item['Object Entry'].lower() # this transforms OE-#### to oe#### transformed_oe = oe_number[:2] + oe_number[3:] oe_list.append(transformed_oe) if args.reference: reference_number = get_filmographic_number(args.reference) else: reference_number = ififuncs.get_reference_number() donor = ififuncs.ask_question( 'Who is the source of acquisition, as appears on the donor agreement? This will not affect Reproductions.' ) depositor_reference = ififuncs.ask_question( 'What is the donor/depositor number? This will not affect Reproductions.' ) acquisition_type = ififuncs.get_acquisition_type('') user = ififuncs.get_user() accession_number = get_number(args) accession_digits = int(accession_number[3:]) to_accession = initial_check(args, accession_digits, oe_list, reference_number) register = accession.make_register() if args.csv: desktop_logs_dir = ififuncs.make_desktop_logs_dir() if args.dryrun: new_csv_filename = time.strftime( "%Y-%m-%dT%H_%M_%S_DRYRUN_SHEET_PLEASE_DO_NOT_INGEST_JUST_IGNORE_COMPLETELY" ) + os.path.basename(args.csv) else: new_csv_filename = time.strftime( "%Y-%m-%dT%H_%M_%S_") + os.path.basename(args.csv) new_csv = os.path.join(desktop_logs_dir, new_csv_filename) filmographic_dict, headers = ififuncs.extract_metadata(args.csv) for oe_package in to_accession: for filmographic_record in filmographic_dict: if os.path.basename(oe_package).upper( )[:2] + '-' + os.path.basename( oe_package)[2:] == filmographic_record['Object Entry']: filmographic_record['Reference Number'] = to_accession[ oe_package][1] with open(new_csv, 'w') as csvfile: fieldnames = headers # Removes Object Entry from headings as it's not needed in database. del fieldnames[1] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for i in filmographic_dict: i.pop('Object Entry', None) # Only include records that have reference numbers if not i['Reference Number'] == '': writer.writerow(i) if args.dryrun: sys.exit() proceed = ififuncs.ask_yes_no('Do you want to proceed?') if proceed == 'Y': for package in sorted(to_accession.keys(), key=natural_keys): accession_cmd = [ package, '-user', user, '-pbcore', '-f', '-number', to_accession[package][0], '-reference', to_accession[package][1], '-register', register, '-csv', new_csv ] if len(to_accession[package]) == 3: accession_cmd.extend(['-acquisition_type', '13']) accession_cmd.extend(['-parent', order.main(package)]) else: accession_cmd.extend(['-donor', donor]) accession_cmd.extend( ['-depositor_reference', depositor_reference]) accession_cmd.extend( ['-acquisition_type', acquisition_type[2]]) print accession_cmd accession.main(accession_cmd) collated_pbcore = gather_metadata(args.input) sorted_filepath = ififuncs.sort_csv(register, 'accession number') print '\nA helper accessions register has been generated in order to help with registration - located here: %s' % sorted_filepath print '\nA modified filmographic CSV has been generated with added reference numbers - located here: %s' % new_csv print '\nA collated CSV consisting of each PBCore report has been generated for batch database import - located here: %s' % collated_pbcore
def main(args_): ''' Batch process packages by running accession.py and makepbcore.py ''' args = parse_args(args_) oe_list = [] if args.oe_csv: if not args.filmographic: print( ' - batchaccession.py - ERROR\n - No -filmographic argument supplied. This is mandatory when using the -oe_csv option. \n - Exiting..' ) sys.exit() oe_csv_extraction = ififuncs.extract_metadata(args.oe_csv) initial_oe_list = oe_csv_extraction[0] oe_dicts = process_oe_csv(oe_csv_extraction, args.input) # temp hack while we're performing both workflows helper_csv = args.oe_csv elif args.filmographic: initial_oe_list = ififuncs.extract_metadata(args.filmographic)[0] # temp hack while we're performing both workflows helper_csv = args.filmographic if args.oe_csv or args.filmographic: for line_item in ififuncs.extract_metadata(helper_csv)[0]: try: oe_number = line_item['Object Entry'].lower() except KeyError: oe_number = line_item['OE No.'].lower() # this transforms OE-#### to oe#### transformed_oe = oe_number[:2] + oe_number[3:] oe_list.append(transformed_oe) if not args.oe_csv: # No need to ask for the reference number if the OE csv option is supplied. # The assumption here is that the OE csv contains the reference numbers though. if args.reference: reference_number = get_filmographic_number(args.reference) else: reference_number = ififuncs.get_reference_number() donor = ififuncs.ask_question( 'Who is the source of acquisition, as appears on the donor agreement? This will not affect Reproductions.' ) depositor_reference = ififuncs.ask_question( 'What is the donor/depositor number? This will not affect Reproductions.' ) acquisition_type = ififuncs.get_acquisition_type('') user = ififuncs.get_user() accession_number = get_number(args) accession_digits = int(accession_number[3:]) if not args.oe_csv: to_accession = initial_check(args, accession_digits, oe_list, reference_number) else: to_accession = {} for oe_record in oe_dicts: if os.path.isdir(oe_record['source_path']): to_accession[oe_record['source_path']] = [ 'aaa' + str(accession_digits).zfill(4), oe_record['reference number'], oe_record['parent'] ] accession_digits += 1 for success in sorted(to_accession.keys()): print('%s will be accessioned as %s' % (success, to_accession[success])) register = accession.make_register() if args.filmographic: desktop_logs_dir = ififuncs.make_desktop_logs_dir() if args.dryrun: new_csv_filename = time.strftime( "%Y-%m-%dT%H_%M_%S_DRYRUN_SHEET_PLEASE_DO_NOT_INGEST_JUST_IGNORE_COMPLETELY" ) + os.path.basename(args.filmographic) else: new_csv_filename = time.strftime( "%Y-%m-%dT%H_%M_%S_") + os.path.basename(args.filmographic) new_csv = os.path.join(desktop_logs_dir, new_csv_filename) if not args.oe_csv: filmographic_dict, headers = ififuncs.extract_metadata( args.filmographic) for oe_package in to_accession: for filmographic_record in filmographic_dict: if os.path.basename(oe_package).upper( )[:2] + '-' + os.path.basename(oe_package)[ 2:] == filmographic_record['Object Entry']: filmographic_record['Reference Number'] = to_accession[ oe_package][1] get_filmographic_titles(to_accession, filmographic_dict) with open(new_csv, 'w') as csvfile: fieldnames = headers # Removes Object Entry from headings as it's not needed in database. del fieldnames[1] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for i in filmographic_dict: i.pop('Object Entry', None) # Only include records that have reference numbers if not i['Reference Number'] == '': writer.writerow(i) if args.dryrun: sys.exit() proceed = ififuncs.ask_yes_no('Do you want to proceed?') if args.oe_csv: new_csv = args.filmographic if proceed == 'Y': for package in sorted(to_accession.keys(), key=natural_keys): accession_cmd = [ package, '-user', user, '-pbcore', '-f', '-number', to_accession[package][0], '-reference', to_accession[package][1], '-register', register, '-csv', new_csv ] if len(to_accession[package]) == 3: accession_cmd.extend(['-acquisition_type', '13']) if args.oe_csv: accession_cmd.extend(['-parent', to_accession[package][2]]) else: accession_cmd.extend(['-parent', order.main(package)]) else: accession_cmd.extend(['-donor', donor]) accession_cmd.extend( ['-depositor_reference', depositor_reference]) accession_cmd.extend( ['-acquisition_type', acquisition_type[2]]) print accession_cmd accession.main(accession_cmd) collated_pbcore = gather_metadata(args.input) sorted_filepath = ififuncs.sort_csv(register, 'accession number') print '\nA helper accessions register has been generated in order to help with registration - located here: %s' % sorted_filepath print '\nA modified filmographic CSV has been generated with added reference numbers - located here: %s' % new_csv print '\nA collated CSV consisting of each PBCore report has been generated for batch database import - located here: %s' % collated_pbcore
def main(args_): ''' Launch all the functions for creating an IFI SIP. ''' args = parse_args(args_) start = datetime.datetime.now() inputs = args.i if args.d: try: import clairmeta clairmeta_version = clairmeta.__version__ except ImportError: print( 'Exiting as Clairmeta is not installed. If there is a case for not using clairmeta, please let me know and i can make a workaround' ) sys.exit() print(args) user = ififuncs.determine_user(args) object_entry = get_object_entry(args) sip_path = make_folder_path(os.path.join(args.o), args, object_entry) uuid, uuid_event = determine_uuid(args, sip_path) new_log_textfile = os.path.join(sip_path, 'logs' + '/' + uuid + '_sip_log.log') if args.d: content_title = create_content_title_text(sip_path, args) ififuncs.generate_log(new_log_textfile, 'EVENT = sipcreator.py started') ififuncs.generate_log( new_log_textfile, 'eventDetail=sipcreator.py %s' % ififuncs.get_script_version('sipcreator.py')) ififuncs.generate_log(new_log_textfile, 'Command line arguments: %s' % args) ififuncs.generate_log(new_log_textfile, 'EVENT = agentName=%s' % user) ififuncs.generate_log(new_log_textfile, uuid_event) if not args.sc: ififuncs.generate_log( new_log_textfile, 'EVENT = eventType=Identifier assignement,' ' eventIdentifierType=object entry, value=%s' % object_entry) metadata_dir = os.path.join(sip_path, 'metadata') supplemental_dir = os.path.join(metadata_dir, 'supplemental') logs_dir = os.path.join(sip_path, 'logs') if args.accession: accession_number = ififuncs.get_accession_number() reference_number = ififuncs.get_reference_number() parent = ififuncs.ask_question( 'What is the parent record? eg MV 1234. Enter n/a if this is a born digital acquisition with no parent.' ) donor = ififuncs.ask_question( 'Who is the source of acquisition, as appears on the donor agreement? This will not affect Reproductions.' ) depositor_reference = ififuncs.ask_question( 'What is the donor/depositor number? This will not affect Reproductions.' ) acquisition_type = ififuncs.get_acquisition_type('') donation_date = ififuncs.ask_question( 'When was the donation date in DD/MM/YYYY format? Eg. 31/12/1999 - Unfortunately this is NOT using ISO 8601.' ) if args.zip: inputxml, inputtracexml, dfxml = ififuncs.generate_mediainfo_xmls( inputs[0], args.o, uuid, new_log_textfile) if args.manifest: shutil.copy( args.manifest, args.manifest.replace('_manifest.md5', '_manifest-md5.txt')) source_manifest = args.manifest.replace('_manifest.md5', '_manifest-md5.txt') else: source_manifest = os.path.join( args.o, os.path.basename(args.i[0]) + '_manifest-md5.txt') ififuncs.generate_log( new_log_textfile, 'EVENT = message digest calculation, status=started, eventType=messageDigestCalculation, agentName=hashlib, eventDetail=MD5 checksum of source files within ZIP' ) ififuncs.hashlib_manifest(args.i[0], source_manifest, os.path.dirname(args.i[0])) ififuncs.generate_log( new_log_textfile, 'EVENT = message digest calculation, status=finished, eventType=messageDigestCalculation, agentName=hashlib, eventDetail=MD5 checksum of source files within ZIP' ) ififuncs.generate_log( new_log_textfile, 'EVENT = packing, status=started, eventType=packing, agentName=makezip.py, eventDetail=Source object to be packed=%s' % inputs[0]) makezip_judgement, zip_file = makezip.main([ '-i', inputs[0], '-o', os.path.join(sip_path, 'objects'), '-basename', uuid + '.zip' ]) ififuncs.generate_log( new_log_textfile, 'EVENT = packing, status=finished, eventType=packing, agentName=makezip.py, eventDetail=Source object packed into=%s' % zip_file) if makezip_judgement is None: judgement = 'lossless' else: judgement = makezip_judgement ififuncs.generate_log( new_log_textfile, 'EVENT = losslessness verification, status=finished, eventType=messageDigestCalculation, agentName=makezip.py, eventDetail=embedded crc32 checksum validation, eventOutcome=%s' % judgement) ififuncs.generate_log( new_log_textfile, 'EVENT = losslessness verification, status=finished, eventType=messageDigestCalculation, agentName=makezip.py, eventDetail=embedded crc32 checksum validation, eventOutcome=%s' % judgement) else: log_names = move_files(inputs, sip_path, args, user) ififuncs.get_technical_metadata(sip_path, new_log_textfile) ififuncs.hashlib_manifest(metadata_dir, metadata_dir + '/metadata_manifest.md5', metadata_dir) if args.sc: normalise_objects_manifest(sip_path) new_manifest_textfile = consolidate_manifests(sip_path, 'objects', new_log_textfile) if args.zip: ififuncs.generate_log( new_log_textfile, 'EVENT = Message Digest Calculation, status=started, eventType=message digest calculation, eventDetail=%s module=hashlib' % zip_file) ififuncs.manifest_update(new_manifest_textfile, zip_file) ififuncs.generate_log( new_log_textfile, 'EVENT = Message Digest Calculation, status=finished, eventType=message digest calculation, eventDetail=%s module=hashlib' % zip_file) consolidate_manifests(sip_path, 'metadata', new_log_textfile) ififuncs.hashlib_append(logs_dir, new_manifest_textfile, os.path.dirname(os.path.dirname(logs_dir))) if args.supplement: os.makedirs(supplemental_dir) supplement_cmd = [ '-i', args.supplement, '-user', user, '-new_folder', supplemental_dir, os.path.dirname(sip_path), '-copy' ] package_update.main(supplement_cmd) if args.zip: os.makedirs(supplemental_dir) supplement_cmd = [ '-i', [inputxml, inputtracexml, dfxml, source_manifest], '-user', user, '-new_folder', supplemental_dir, os.path.dirname(sip_path), '-copy' ] package_update.main(supplement_cmd) if args.sc: print('Generating Digital Forensics XML') dfxml = accession.make_dfxml(args, sip_path, uuid) ififuncs.generate_log( new_log_textfile, 'EVENT = Metadata extraction - eventDetail=File system metadata extraction using Digital Forensics XML, eventOutcome=%s, agentName=makedfxml' % (dfxml)) ififuncs.manifest_update(new_manifest_textfile, dfxml) sha512_log = manifest.main([sip_path, '-sha512', '-s']) sha512_manifest = os.path.join(os.path.dirname(sip_path), uuid + '_manifest-sha512.txt') ififuncs.merge_logs_append(sha512_log, new_log_textfile, new_manifest_textfile) ififuncs.checksum_replace(sha512_manifest, new_log_textfile, 'sha512') os.remove(sha512_log) ififuncs.sort_manifest(new_manifest_textfile) if not args.quiet: if 'log_names' in locals(): log_report(log_names) finish = datetime.datetime.now() print('\n- %s ran this script at %s and it finished at %s' % (user, start, finish)) if args.d: process_dcp(sip_path, content_title, args, new_manifest_textfile, new_log_textfile, metadata_dir, clairmeta_version) if args.accession: register = accession.make_register() filmographic_dict = ififuncs.extract_metadata(args.filmo_csv)[0] for filmographic_record in filmographic_dict: if filmographic_record['Reference Number'].lower( ) == reference_number.lower(): if filmographic_record['Title'] == '': title = filmographic_record[ 'TitleSeries'] + '; ' + filmographic_record['EpisodeNo'] else: title = filmographic_record['Title'] oe_register = make_oe_register() ififuncs.append_csv( oe_register, (object_entry.upper()[:2] + '-' + object_entry[2:], donation_date, '1', '', title, donor, acquisition_type[1], accession_number, 'Representation of %s|Reproduction of %s' % (reference_number, parent), '')) accession_cmd = [ os.path.dirname(sip_path), '-user', user, '-f', '-number', accession_number, '-reference', reference_number, '-register', register, '-filmo_csv', args.filmo_csv, '-pbcore' ] if not parent.lower() == 'n/a': accession_cmd.extend(['-parent', parent]) accession_cmd.extend(['-donor', donor]) accession_cmd.extend(['-depositor_reference', depositor_reference]) accession_cmd.extend(['-acquisition_type', acquisition_type[2]]) accession_cmd.extend(['-donation_date', donation_date]) print(accession_cmd) accession.main(accession_cmd) return new_log_textfile, new_manifest_textfile
def main(args_): ''' Launches the various functions that will accession a package ''' args = parse_args(args_) source = args.input uuid_directory = ififuncs.check_for_sip([source]) if uuid_directory is not None: oe_path = os.path.dirname(uuid_directory) oe_number = os.path.basename(oe_path) if args.user: user = args.user else: user = ififuncs.get_user() if args.number: if args.number[:3] != 'aaa': print( 'First three characters must be \'aaa\' and last four characters must be four digits' ) accession_number = ififuncs.get_accession_number() elif len(args.number[3:]) != 4: accession_number = ififuncs.get_accession_number() print( 'First three characters must be \'aaa\' and last four characters must be four digits' ) elif not args.number[3:].isdigit(): accession_number = ififuncs.get_accession_number() print( 'First three characters must be \'aaa\' and last four characters must be four digits' ) else: accession_number = args.number else: accession_number = ififuncs.get_accession_number() if args.reference: Reference_Number = args.reference.upper() else: Reference_Number = ififuncs.get_reference_number() if args.acquisition_type: acquisition_type = ififuncs.get_acquisition_type( args.acquisition_type) print(acquisition_type) accession_path = os.path.join(os.path.dirname(oe_path), accession_number) uuid = os.path.basename(uuid_directory) new_uuid_path = os.path.join(accession_path, uuid) logs_dir = os.path.join(new_uuid_path, 'logs') sipcreator_log = os.path.join(logs_dir, uuid) + '_sip_log.log' if args.force: proceed = 'Y' else: proceed = ififuncs.ask_yes_no('Do you want to rename %s with %s' % (oe_number, accession_number)) if proceed == 'Y': os.rename(oe_path, accession_path) if args.register: register = args.register else: register = make_register() ififuncs.append_csv(register, (oe_number.upper()[:2] + '-' + oe_number[2:], accession_number, '', '', '', '', '', '')) ififuncs.generate_log(sipcreator_log, 'EVENT = accession.py started') ififuncs.generate_log( sipcreator_log, 'eventDetail=accession.py %s' % ififuncs.get_script_version('accession.py')) ififuncs.generate_log(sipcreator_log, 'Command line arguments: %s' % args) ififuncs.generate_log(sipcreator_log, 'EVENT = agentName=%s' % user) ififuncs.generate_log( sipcreator_log, 'EVENT = eventType=Identifier assignment,' ' eventIdentifierType=accession number, value=%s' % accession_number) ififuncs.generate_log( sipcreator_log, 'EVENT = eventType=accession,' ' eventIdentifierType=accession number, value=%s' % accession_number) sip_manifest = os.path.join(accession_path, uuid) + '_manifest.md5' sha512_log = manifest.main([new_uuid_path, '-sha512', '-s']) sha512_manifest = os.path.join(os.path.dirname(new_uuid_path), uuid + '_manifest-sha512.txt') ififuncs.merge_logs_append(sha512_log, sipcreator_log, sip_manifest) os.remove(sha512_log) print('Generating Digital Forensics XML') dfxml = make_dfxml(args, new_uuid_path, uuid) ififuncs.generate_log( sipcreator_log, 'EVENT = Metadata extraction - eventDetail=File system metadata extraction using Digital Forensics XML, eventOutcome=%s, agentName=makedfxml' % (dfxml)) # this is inefficient. The script should not have to ask for reference # number twice if someone wants to insert the filmographic but do not # want to make the pbcore csv, perhaps because the latter already exists. if args.filmo_csv: metadata_dir = os.path.join(new_uuid_path, 'metadata') if '+' in Reference_Number: reference_list = Reference_Number.split('+') else: reference_list = [Reference_Number] for ref in reference_list: package_filmographic = os.path.join(metadata_dir, ref + '_filmographic.csv') insert_filmographic(args.filmo_csv, ref, package_filmographic) ififuncs.generate_log( sipcreator_log, 'EVENT = Metadata extraction - eventDetail=Filmographic descriptive metadata added to metadata folder, eventOutcome=%s, agentName=accession.py' % (package_filmographic)) ififuncs.manifest_update(sip_manifest, package_filmographic) ififuncs.sha512_update(sha512_manifest, package_filmographic) print( 'Filmographic descriptive metadata added to metadata folder' ) ififuncs.generate_log(sipcreator_log, 'EVENT = accession.py finished') ififuncs.checksum_replace(sip_manifest, sipcreator_log, 'md5') ififuncs.checksum_replace(sha512_manifest, sipcreator_log, 'sha512') ififuncs.manifest_update(sip_manifest, dfxml) ififuncs.sha512_update(sha512_manifest, dfxml) if args.pbcore: for ref in reference_list: makepbcore_cmd = [ accession_path, '-p', '-user', user, '-reference', ref ] if args.parent: makepbcore_cmd.extend(['-parent', args.parent]) if args.acquisition_type: makepbcore_cmd.extend( ['-acquisition_type', args.acquisition_type]) if args.donor: makepbcore_cmd.extend(['-donor', args.donor]) if args.donor: makepbcore_cmd.extend( ['-depositor_reference', args.depositor_reference]) if args.donation_date: makepbcore_cmd.extend( ['-donation_date', args.donation_date]) makepbcore.main(makepbcore_cmd) else: print( 'not a valid package. The input should include a package that has been through Object Entry' )
def main(args_): # if multiple file are present, this script will treat them as a single # instantiation/representation and get aggregate metadata about the whole # package. For now, this will be a clumsy implementation - the first file # will provide most metadata. Things like duration/bitrate/filesize # will be calculated as a whole. # Although another way would be that every call is looped, and if # this could catch files that should not be in the package, eg. a 4:2:2 # file in a 4:2:0 package.. # yup - do it that way! args = parse_args(args_) all_files = ififuncs.recursive_file_list(args.input) silence = True audio_only = True if args.user: user = args.user else: user = ififuncs.get_user() acquisition_type = '' if args.acquisition_type: acquisition_type = ififuncs.get_acquisition_type( args.acquisition_type)[0] instantiationIdentif = '' for dirs in os.listdir(args.input): if ififuncs.validate_uuid4(dirs) is None: instantiationIdentif = dirs Accession_Number = get_accession_number(args.input) if args.reference: Reference_Number = args.reference.upper() else: Reference_Number = get_reference_number(args.input) if args.p: for root, _, filenames in os.walk(args.input): if os.path.basename(root) == 'metadata': metadata_dir = root elif os.path.basename(root) == 'logs': logs_dir = root csv_filename = os.path.join( metadata_dir, Accession_Number + '_%s_pbcore.csv' % Reference_Number) sipcreator_log = os.path.join(logs_dir, instantiationIdentif + '_sip_log.log') ififuncs.generate_log(sipcreator_log, 'EVENT = makepbcore.py started') ififuncs.generate_log( sipcreator_log, 'eventDetail=makepbcore.py %s' % ififuncs.get_script_version('makepbcore.py')) ififuncs.generate_log(sipcreator_log, 'Command line arguments: %s' % args) ififuncs.generate_log(sipcreator_log, 'EVENT = agentName=%s' % user) else: csv_filename = 'blaa.csv' print((' - Metadata will be stored in %s' % csv_filename)) for filenames in os.listdir(args.input): if '_manifest.md5' in filenames: md5_manifest = os.path.join(args.input, filenames) elif 'manifest-sha512.txt' in filenames: sha512_manifest = os.path.join(args.input, filenames) make_csv(csv_filename) ms = 0 FrameCount = 0 instantFileSize_byte = 0 instantFileSize_gigs = 0 scan_types = [] matrix_list = [] transfer_list = [] colour_primaries_list = [] color_spaces = [] chroma = [] frame_sizes = [] par_list = [] container_list = [] fps_list = [] sample_rate_list = [] track_count_list = [] interlace_list = [] compression_list = [] pix_fmt_list = [] audio_fmt_list = [] audio_codecid_list = [] audio_codec_list = [] au_bitdepth_list = [] video_codecid_list = [] video_codec_version_list = [] video_codec_profile_list = [] timecode_list = [] channels_list = [] stl = False subtitle_check = ififuncs.get_digital_object_descriptor(args.input) if 'STL' in subtitle_check: stl = True for source in all_files: metadata = subprocess.check_output( ['mediainfo', '--Output=PBCore2', source]) new_metadata = subprocess.check_output( ['mediainfo', '--Output=XML', source]) try: root = etree.fromstring(metadata) new_root = etree.fromstring(new_metadata) except lxml.etree.XMLSyntaxError: print('Windows encoding detected - transforming into utf-8') root = etree.fromstring(metadata.decode('cp1252').encode('utf-8')) new_root = etree.fromstring( new_metadata.decode('cp1252').encode('utf-8')) print(((' - Analysing %s') % source)) pbcore_namespace = root.xpath('namespace-uri(.)') mediainfo_namespace = new_root.xpath('namespace-uri(.)') track_type = root.xpath('//ns:essenceTrackType', namespaces={'ns': pbcore_namespace}) new_track_type = new_root.xpath('//ns:track', namespaces={'ns': mediainfo_namespace}) if len(new_track_type) > 0: for track in new_track_type: if track.attrib['type'] == 'Video': audio_only = False essenceTrackEncodvid = ififuncs.get_metadata( "ns:Format", track, mediainfo_namespace) #vcodec_attributes = get_attributes(track.getparent(), pbcore_namespace) #vcodec_attributes = 'TODO' video_codecid = ififuncs.get_metadata( "ns:CodecID", track, mediainfo_namespace) video_codec_version = ififuncs.get_metadata( "ns:Format_Version", track, mediainfo_namespace) video_codec_profile = ififuncs.get_metadata( "ns:Format_Profile", track, mediainfo_namespace) video_codec_version_list.append(video_codec_version) video_codec_profile_list.append(video_codec_profile) elif track.attrib['type'] == 'Audio': silence = False essenceTrackEncod_au = ififuncs.get_metadata( "ns:Format", track, mediainfo_namespace) audio_codec_list.append(essenceTrackEncod_au) #acodec_attributes = get_attributes(track.getparent(), pbcore_namespace) audio_codecid = ififuncs.get_metadata( "ns:CodecID", track, mediainfo_namespace) essenceTrackSampling = ififuncs.get_mediainfo( 'samplerate', '--inform=Audio;%SamplingRate_String%', source) sample_rate_list.append(essenceTrackSampling) essenceBitDepth_au = ififuncs.get_metadata( "ns:BitDepth", track, mediainfo_namespace) audio_codecid_list.append(audio_codecid) au_bitdepth_list.append(essenceBitDepth_au) channels = ififuncs.get_metadata("//ns:Channels", track, mediainfo_namespace) channels_list.append(channels) if audio_only: essenceTrackEncodvid = 'n/a' video_codecid = 'n/a' video_codec_version = 'n/a' video_codec_profile = 'n/a' ScanType = ififuncs.get_metadata("//ns:ScanType", new_root, mediainfo_namespace) scan_types.append(ScanType) matrix_coefficients = ififuncs.get_metadata("//ns:matrix_coefficients", new_root, mediainfo_namespace) timecode_source, starting_timecode = get_timecode( pbcore_namespace, root, source) timecode_list.append(starting_timecode) matrix_list.append(matrix_coefficients) transfer_characteris = ififuncs.get_metadata( "//ns:transfer_characteristics", new_root, mediainfo_namespace) transfer_list.append(transfer_characteris) colour_primaries = ififuncs.get_metadata("//ns:colour_primaries", new_root, mediainfo_namespace) colour_primaries_list.append(colour_primaries) try: if audio_only: FrameCount = 'n/a' else: # increment if multiple objects are present try: FrameCount += int( ififuncs.get_metadata("//ns:FrameCount", new_root, mediainfo_namespace)) except ValueError: # don't increment if multiple values are returned as str FrameCount = ififuncs.get_metadata("//ns:FrameCount", new_root, mediainfo_namespace) except TypeError: # workaround for silent pic in DCP FrameCount = 'n/a' instantFileSize_byte += int( ififuncs.get_metadata("//ns:FileSize", new_root, mediainfo_namespace)) instantDataRate = round( float( ififuncs.get_mediainfo('OverallBitRate', '--inform=General;%OverallBitRate%', source)) / 1000 / 1000, 2) instantTracks = ififuncs.get_number_of_tracks(source) track_count_list.append(instantTracks) if stl is True: track_count_list.append('STL sidecar') ms += ififuncs.get_milliseconds(source) ColorSpace = ififuncs.get_metadata("//ns:ColorSpace", new_root, mediainfo_namespace) color_spaces.append(ColorSpace) ChromaSubsampling = get_metadata("//ns:ChromaSubsampling", new_root, mediainfo_namespace) chroma.append(ChromaSubsampling) instantMediaty = get_metadata("//ns:instantiationMediaType", root, pbcore_namespace) if audio_only: essenceFrameSize = 'n/a' else: essenceFrameSize = get_metadata("//ns:essenceTrackFrameSize", root, pbcore_namespace) frame_sizes.append(essenceFrameSize) PixelAspectRatio = ififuncs.get_metadata("//ns:PixelAspectRatio", new_root, mediainfo_namespace) par_list.append(PixelAspectRatio) general_root = new_root.xpath("//ns:track[@type='General']", namespaces={'ns': mediainfo_namespace})[0] instantiationStandar = ififuncs.get_metadata("ns:Format", general_root, mediainfo_namespace) container_list.append(instantiationStandar) essenceFrameRate = ififuncs.get_metadata("//ns:FrameRate", new_root, mediainfo_namespace) fps_list.append(essenceFrameRate) essenceAspectRatio = ififuncs.get_mediainfo( 'DAR', '--inform=Video;%DisplayAspectRatio_String%', source) Interlacement = ififuncs.get_metadata("//ns:ScanOrder", new_root, mediainfo_namespace) # FFV1/MKV seems to have this scanorder metadata here rather than Interlacement # FFV1/MKV is the only example I've seen so far that behaves like this :| # It could be that Interlacement is set at a codec level for FFV1, but others are # declared at the container level.. if Interlacement == 'n/a': Interlacement = get_metadata( "//ns:essenceTrackAnnotation[@annotationType='ScanOrder']", root, pbcore_namespace) interlace_list.append(Interlacement) Compression_Mode = ififuncs.get_metadata("//ns:Compression_Mode", new_root, mediainfo_namespace) colour_range = ififuncs.get_metadata("//ns:colour_range", new_root, mediainfo_namespace) # this needs to be clarified as it exists in general and codec format_version = ififuncs.get_metadata("ns:Format_Version", general_root, mediainfo_namespace) app_company_name = ififuncs.get_metadata( "//ns:Encoded_Application_CompanyName", new_root, mediainfo_namespace) app_name = ififuncs.get_metadata("//ns:Encoded_Application_Name", new_root, mediainfo_namespace) app_version = ififuncs.get_metadata("//ns:Encoded_Application_Version", new_root, mediainfo_namespace) library_name = ififuncs.get_metadata("//ns:Encoded_Library_Name", new_root, mediainfo_namespace) if library_name == 'n/a': library_name = ififuncs.get_metadata("//ns:Encoded_Library", general_root, mediainfo_namespace) library_version = ififuncs.get_metadata("//ns:Encoded_Library_Version", new_root, mediainfo_namespace) compression_list.append(Compression_Mode) instantiationDate_mo = get_metadata( "//ns:instantiationDate[@dateType='file modification']", root, pbcore_namespace) instantDate_other = 'n/a' instantDate_type = 'n/a' pix_fmt = ififuncs.get_ffmpeg_fmt(source, 'video') pix_fmt_list.append(pix_fmt) audio_fmt = ififuncs.get_ffmpeg_fmt(source, 'audio') audio_fmt_list.append(audio_fmt) essenceBitDepth_vid = ififuncs.get_mediainfo( 'duration', '--inform=Video;%BitDepth%', source) if silence: audio_codecid = 'n/a' essenceBitDepth_au = 'n/a' essenceTrackEncod_au = 'n/a' essenceTrackSampling = 'n/a' channels = 'n/a' ''' video_codecid = vcodec_attributes['ref'] video_codecid_list.append(video_codecid) try: video_codec_version = vcodec_attributes['version'] except KeyError: video_codec_version = 'n/a' try: video_codec_profile = vcodec_attributes['annotation'][8:] except KeyError: video_codec_profile = 'n/a' ''' metadata_error = '' metadata_list = [ scan_types, matrix_list, transfer_list, colour_primaries_list, color_spaces, chroma, frame_sizes, par_list, container_list, fps_list, sample_rate_list, track_count_list, interlace_list, compression_list, pix_fmt_list, audio_fmt_list, audio_codecid_list, audio_codec_list, au_bitdepth_list, video_codecid_list, video_codec_version_list, video_codec_profile_list, channels_list, timecode_list ] for i in metadata_list: if len(set(i)) > 1: metadata_error += 'WARNING - Your metadata values are not the same for all files - but this could be a false positive if dealing with atomised audio and video as with DCP: %s\n' % set( i) if args.p: ififuncs.generate_log( sipcreator_log, 'EVENT = Metadata mismatch - Your metadata values are not the same for all files - but this could be a false positive if dealing with atomised audio and video as with DCP: %s' % set(i)) print(metadata_error) tc = ififuncs.convert_millis(ms) instantiationDuratio = ififuncs.convert_timecode(25, tc) if args.donor: Donor = args.donor else: Donor = '' Edited_By = user Date_Created = '' Date_Last_Modified = '' Film_Or_Tape = 'Digital AV Object' Date_Of_Donation = '' if args.reproduction_creator: reproduction_creator = args.reproduction_creator else: reproduction_creator = '' if args.acquisition_type: if acquisition_type == 'Reproduction': Date_Of_Donation = instantiationDate_mo.split('T')[0] # if a reproduction, then there's no Donor/transfer of title. Donor = 'n/a' else: Date_Of_Donation = args.donation_date Habitat = '' backup_habitat = '' Type_Of_Deposit = acquisition_type if args.depositor_reference: Depositor_Reference = args.depositor_reference else: Depositor_Reference = '' Master_Viewing = 'Preservation Object' Language_Version = '' Condition_Rating = '' Companion_Elements = '' TTape_Origin = args.parent EditedNew = user FIO = 'In' CollectionTitle = '' Created_By = user instantTimeStart = 'n/a' instantFileSize_gigs = round( float(instantFileSize_byte) / 1024 / 1024 / 1024, 3) instantColors = 'n/a' instantLanguage = 'n/a' instantAltMo = 'n/a' instantiationChanCon = 'n/a' ''' no idea why these are here colour_range = colour_range format_version = format_version ''' TimeCode_FirstFrame = process_mixed_values(timecode_list) pix_fmt = process_mixed_values(pix_fmt_list) audio_fmt = process_mixed_values(audio_fmt_list) instantTracks = process_mixed_values(track_count_list) TimeCode_Source = timecode_source reproduction_reason = '' dig_object_descrip = ififuncs.get_digital_object_descriptor(args.input) if 'STL' in dig_object_descrip: dig_object_descrip = 'AS-11 package' dcp_check = ififuncs.find_cpl(args.input) if dcp_check is not None: essenceFrameSize, ChromaSubsampling, ColorSpace, FrameCount, essenceAspectRatio, instantiationDuratio, PixelAspectRatio, ScanType, dig_object_descrip, instantTracks, instantDataRate, essenceBitDepth_vid, instantMediaty = check_dcp( dcp_check) ififuncs.append_csv(csv_filename, [ Reference_Number, Donor, Edited_By, Date_Created, Date_Last_Modified, Film_Or_Tape, Date_Of_Donation, Accession_Number, Habitat, backup_habitat, TTape_Origin, Type_Of_Deposit, Depositor_Reference, Master_Viewing, Language_Version, Condition_Rating, Companion_Elements, EditedNew, FIO, CollectionTitle, Created_By, instantiationIdentif, instantDate_other, instantDate_type, instantiationDate_mo, instantiationStandar, instantMediaty, instantFileSize_byte, instantFileSize_gigs, instantTimeStart, instantDataRate, instantTracks, instantColors, instantLanguage, instantAltMo, essenceTrackEncodvid, essenceFrameRate, essenceTrackSampling, essenceBitDepth_vid, essenceFrameSize, essenceAspectRatio, essenceTrackEncod_au, essenceBitDepth_au, instantiationDuratio, instantiationChanCon, PixelAspectRatio, FrameCount, ColorSpace, ChromaSubsampling, ScanType, Interlacement, Compression_Mode, colour_primaries, transfer_characteris, matrix_coefficients, pix_fmt, audio_fmt, audio_codecid, video_codecid, video_codec_version, video_codec_profile, channels, colour_range, format_version, TimeCode_FirstFrame, TimeCode_Source, app_company_name, app_name, app_version, library_name, library_version, reproduction_creator, reproduction_reason, dig_object_descrip, ]) if args.p: ififuncs.generate_log( sipcreator_log, 'EVENT = Metadata extraction - eventDetail=Technical record creation using PBCore, eventOutcome=%s, agentName=makepbcore' % (csv_filename)) ififuncs.generate_log(sipcreator_log, 'EVENT = makepbcore.py finished') ififuncs.checksum_replace(md5_manifest, sipcreator_log, 'md5') ififuncs.checksum_replace(sha512_manifest, sipcreator_log, 'sha512') ififuncs.manifest_update(md5_manifest, csv_filename) print((' - Updating %s with %s' % (md5_manifest, csv_filename))) ififuncs.sha512_update(sha512_manifest, csv_filename) print((' - Updating %s with %s' % (sha512_manifest, csv_filename))) print(metadata_error)