def delete_do_instance(): values = admin.login() csvfile = admin.opencsv() txtfile = admin.opentxt() for row in csvfile: archival_object_uri = row[0] digital_object_uri = row[1] #may or may not need this...could just loop through instances and check for DOs, then delete #OR could find a specific digital object instance I want to delete try: archival_object_json = requests.get(values[0] + archival_object_uri, headers=values[1]).json() instance_list = list(archival_object_json['instances']) if digital_object_uri == '': for instance in instance_list: if instance['instance_type'] == 'digital_object': archival_object_json['instances'].remove(instance) archival_object_data = json.dumps(archival_object_json) archival_object_update = requests.post(values[0] + archival_object_uri, headers=values[1], data=archival_object_data).json() admin.writetxt(txtfile, archival_object_update) print(archival_object_update) else: for instance in instance_list: if 'digital_object' in instance: if instance['digital_object'] == {'ref': digital_object_uri}: archival_object_json['instances'].remove(instance) archival_object_data = json.dumps(archival_object_json) archival_object_update = requests.post(values[0] + archival_object_uri, headers=values[1], data=archival_object_data).json() admin.writetxt(txtfile, archival_object_update) print(archival_object_update) except: txtfile.write('error, could not update ' + str(archival_object_uri)) continue
def create_container_profiles(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: name = row[0] extent_dimension = row[1] height = row[2] width = row[3] depth = row[4] dimension_units = row[5] #takes data from spreadsheet and builds JSON new_container_profile = { 'jsonmodel_type': 'container_profile', 'name': name, 'extent_dimension': extent_dimension, 'height': height, 'width': width, 'depth': depth, 'dimension_units': dimension_units } container_profile_data = json.dumps(new_container_profile) #Posts JSON to ArchivesSpace create_profile = requests.post(values[0] + '/container_profiles', headers=values[1], data=container_profile_data).json() #Prints what is happening to IDLE window - will add an error log as well print(create_profile)
def create_resources(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: repo_uri = row[0] identifier = row[1] title = row[2] language = row[3] level = row[4] begin_date = row[5] end_date = row[6] date_type = row[7] date_label = row[8] extent_type = row[9] extent_portion = row[10] extent_number = row[11] container_summary = row[12] new_resource = {'id_0': identifier, 'title': title, 'language': language, 'level': level, 'dates' : [{'begin': begin_date, 'end': end_date, 'date_type': date_type, 'label': date_label, 'jsonmodel_type': 'date'}], 'extents': [{'extent_type': extent_type, 'portion': extent_portion, 'number': extent_number, 'container_summary': container_summary, 'jsonmodel_type': 'extent'}], 'repository': {'ref': repo_uri}, 'jsonmodel_type': 'resource'} resource_data = json.dumps(new_resource) print(resource_data) resource_create = requests.post(values[0] + repo_uri + '/resources', headers=values[1], data=resource_data).json() print(resource_create)
def create_instances(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: archival_object_uri = row[0] top_container_uri = row[1] child_type = row[2] child_indicator = row[3] grandchild_type = row[4] grandchild_indicator = row[5] instance_type = row[6] archival_object_json = requests.get(values[0] + archival_object_uri, headers=values[1]).json() if grandchild_type != '': new_instance = { "instance_type": instance_type, "jsonmodel_type": "instance", "sub_container": { "jsonmodel_type": "sub_container", "indicator_2": child_indicator, "type_2": child_type, "indicator_3": grandchild_indicator, "type_3": grandchild_type, "top_container": { "ref": top_container_uri } } } elif child_type != '': new_instance = { "instance_type": instance_type, "jsonmodel_type": "instance", "sub_container": { "jsonmodel_type": "sub_container", "indicator_2": child_indicator, "type_2": child_type, "top_container": { "ref": top_container_uri } } } else: new_instance = { "instance_type": instance_type, "jsonmodel_type": "instance", "sub_container": { "jsonmodel_type": "sub_container", "top_container": { "ref": top_container_uri } } } archival_object_json["instances"].append(new_instance) archival_object_data = json.dumps(archival_object_json) archival_object_update = requests.post( values[0] + archival_object_uri, headers=values[1], data=archival_object_data).json() print(archival_object_update)
def delete_subrecord_components(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: record_uri = row[0] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() record_data = json.dumps(record_json) record_update = requests.delete(values[0] + record_uri, headers=values[1], data=record_data).json() print(record_update)
def create_note_bibliography(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: record_uri = row[0] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() print(record_update)
def create_repositories(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: repo_name = row[0] record_json = {'repo_name': repo_name} record_data = json.dumps(record_json) record_update = requests.delete(values[0] + '/repositories', headers=values[1], data=record_data).json() print(record_update)
def create_top_containers(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: barcode = row[0] indicator = row[1] container_profile_uri = row[2] locations = row[3] start_date = row[4] repo_num = row[5] if barcode != '': create_tc = { 'barcode': barcode, 'container_profile': { 'ref': container_profile_uri }, 'indicator': indicator, 'container_locations': [{ 'jsonmodel_type': 'container_location', 'status': 'current', 'start_date': start_date, 'ref': locations }], 'jsonmodel_type': 'top_container', 'repository': { 'ref': repo_num } } else: create_tc = { 'container_profile': { 'ref': container_profile_uri }, 'indicator': indicator, 'container_locations': [{ 'jsonmodel_type': 'container_location', 'status': 'current', 'start_date': start_date, 'ref': locations }], 'jsonmodel_type': 'top_container', 'repository': { 'ref': repo_num } } tcdata = json.dumps(create_tc) tcupdate = requests.post(values[0] + repo_num + '/top_containers', headers=values[1], data=tcdata).json() print(tcupdate)
def link_agent(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: agent_uri = row[0] description_uri = row[1] description_json = requests.get(values[0] + description_uri, headers=values[1]).json() description_json['linked_agents'].append({'ref': agent_uri}) description_data = json.dumps(description_json) description_post = requests.post(values[0] + description_uri, headers=values[1], data=description_data).json() print(description_post)
def create_external_documents(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: record_uri = row[0] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() new_ext_doc = {'jsonmodel_type': 'external_document'} record_json['external_documents'].append(new_ext_doc) record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() print(record_update)
def update_record_component(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: record_uri = row[0] component_to_update = row[1] updated_text = row[2] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() record_json[component_to_update] = updated_text record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() print(record_update)
def update_subject_component(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: subject_uri = row[0] component_to_update = row[1] updated_text = row[2] subject_json = requests.get(values[0] + '/subjects/' + subject_uri, headers=values[1]).json() subject_json[component_to_update] = updated_text subject_data = json.dumps(subject_json) subject_update = requests.post(values[0] + subject_uri, headers=values[1], data=subject_data).json() print(subject_update)
def create_file_versions(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: record_uri = row[0] file_uri = row[1] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() new_file_version = {'file_uri': file_uri, 'jsonmodel_type': 'file_version'} record_json['file_versions'].append(new_file_version) record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() print(record_update)
def create_digital_objects(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: repo_uri = row[0] digital_object_id = row[1] title = row[2] new_do = {'digital_object_id': digital_object_id, 'jsonmodel_type': 'digital_object', 'title': title, 'repository': {'ref': repo_uri}} do_data = json.dumps(new_do) do_create = requests.post(values[0] + repo_uri + '/digital_objects', headers=values[1], data=do_data).json() print(do_create)
def link_records(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: classification = row[0] record = row[1] new_rec_link = requests.get(values[0] + classification, headers=values[1]).json() new_rec_link['linked_records'].append({'ref': record}) print(new_rec_link) new_link_json = json.dumps(new_rec_link) new_link_post = requests.post(values[0] + classification, headers=values[1], data=new_link_json).json() print(new_link_post)
def get_rids(): csvfile = admin.opencsv() txtinput = admin.opentxtin() txtoutput = admin.opentxt() new_dict = {} for row in csvfile: new_dict[row[0]] = row[1] for line in txtinput: line = line.rstrip() new_list = [k for k, v in new_dict.items() if line == v][0] txtoutput.write('/repositories/12/resource_descriptions/' + new_list + '\n')
def create_top_level(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: identifier = row[0] title = row[1] description = row[2] repository = row[3] new_class_term = {"identifier": identifier, "title": title, "description": description, "publish": True, "repository": {'ref': repository}} new_class_json = json.dumps(new_class_term) new_class_post = requests.post(values[0] + '/repositories/12/classifications', headers=values[1], data=new_class_json).json() print(new_class_post)
def create_dig_object_components(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: repo_uri = row[0] parent_uri = row[1] component_id = row[2] title = row[3] new_doc = {'component_id': component_id, 'title': title, 'parent': {'ref': parent_uri}, 'repository': {'ref': repo_uri}, 'jsonmodel_type': 'digital_object_component'} doc_data = json.dumps(new_doc) doc_create = requests.post(values[0] + repo_uri + '/digital_object_components', headers=values[1], data=doc_data).json() print(doc_create)
def create_accessions(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: repo_uri = row[0] identifier = row[1] title = row[2] accession_date = row[3] new_accession = {'id_0': identifier, 'title': title, 'accession_date': accession_date, 'repository': {'ref': repo_uri}, 'jsonmodel_type': 'accession'} accession_data = json.dumps(new_accession) accession_create = requests.post(values[0] + repo_uri + '/accessions', headers=values[1], data=accession_data).json() print(accession_create)
def update_tc_component(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: top_container_uri = row[0] component_to_update = row[1] update_text = row[2] tc_json = requests.get(values[0] + top_container_uri, headers=values[1]).json() tc_json[component_to_update] = update_text tc_data = json.dumps(tc_json) tc_update = requests.post(values[0] + top_container_uri, headers=values[1], data=tc_data).json() print(tc_update)
def update_record_pub_status(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: record_uri = row[0] #1 for publish, 0 for unpublish updated_status = row[1] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() if updated_status == '1': record_json['publish'] = True elif updated_status == '0': record_json['publish'] = False record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() print(record_update)
def delete_records(): values = admin.login() csvfile = admin.opencsv() txtfile = admin.opentxt() for row in csvfile: record_uri = row[0] try: record_json = requests.get(values[0] + record_uri, headers=values[1]).json() record_data = json.dumps(record_json) delete = requests.delete(values[0] + record_uri, headers=values[1], data=record_data).json() admin.writetxt(txtfile, delete) print(delete) except: txtfile.write('error, could not delete ' + str(record_uri)) continue
def create_singlepart_notes(): values = admin.login() csvfile = admin.opencsv() txtfile = admin.opentxt() for row in csvfile: record_uri = row[0] note_text = row[1] note_type = row[2] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() new_note = {'jsonmodel_type': 'note_singlepart', 'content': [note_text], 'type': note_type} record_json['notes'].append(new_note) record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() admin.writetxt(txtfile, record_update) print(record_update)
def update_note_pub_status(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: uri = row[0] persistent_id = row[1] updated_status = row[2] json = requests.get(values[0] + uri, headers=values[1]).json() for note in json['notes']: if note['persistent_id'] == persistent_id: if updated_status == '1': note['publish'] = True elif updated_status == '0': note['publish'] = False data = json.dumps(json) ao_update = requests.post(values[0] + uri, headers=values[1], data=data).json() print(ao_update)
def create_extents(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: record_uri = row[0] container_summary = row[1] extent_type = row[2] number = row[3] portion = row[4] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() new_extent = {'container_summary': container_summary, 'extent_type': extent_type, 'jsonmodel_type': 'extent', 'number': number, 'portion': portion} record_json['extents'].append(new_extent) record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() print(record_update)
def update_note_component(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: resource_uri = row[0] component_to_update = row[1] update_text = row[2] resource_json = requests.get(values[0] + resource_uri, headers=values[1]).json() for note in resource_json['notes']: if note['jsonmodel_type'] == 'note_multipart': note['subnotes'][0][component_to_update] = update_text elif note['jsonmodel_type'] == 'note_singlepart': note[component_to_update] = [update_text] resource_data = json.dumps(resource_json) resource_update = requests.post(values[0] + resource_uri, headers=values[1], data=resource_data).json() # admin.writetxt(txtfile, resource_update) print(resource_update)
def create_locations(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: building = row[0] room = row[1] coordinate_1_label = row[2] coordinate_1_indicator = row[3] coordinate_2_label = row[4] coordinate_2_indicator = row[5] coordinate_3_label = row[6] coordinate_3_indicator = row[7] location_profile = row[8] if location_profile != '': new_location = { 'jsonmodel_type': 'location', 'building': building, 'room': room, 'coordinate_1_label': coordinate_1_label, 'coordinate_1_indicator': coordinate_1_indicator, 'coordinate_2_label': coordinate_2_label, 'coordinate_2_indicator': coordinate_2_indicator, 'coordinate_3_label': coordinate_3_label, 'coordinate_3_indicator': coordinate_3_indicator, 'location_profile': { 'ref': location_profile } } else: new_location = { 'jsonmodel_type': 'location', 'building': building, 'room': room, 'coordinate_1_label': coordinate_1_label, 'coordinate_1_indicator': coordinate_1_indicator, 'coordinate_2_label': coordinate_2_label, 'coordinate_2_indicator': coordinate_2_indicator, 'coordinate_3_label': coordinate_3_label, 'coordinate_3_indicator': coordinate_3_indicator } location_data = json.dumps(new_location) create_location = requests.post(values[0] + '/locations', headers=values[1], data=location_data).json() print(create_location)
def delete_notes(): values = admin.login() csvfile = admin.opencsv() txtfile = admin.opentxt() for row in csvfile: resource_uri = row[0] persistent_id = row[1] resource_json = requests.get(values[0] + resource_uri, headers=values[1]).json() for key, valuelist in resource_json.items(): if key == 'notes': for note in valuelist: newdict = {k:v for k,v in note.items()} for key, value in newdict.items(): if value == persistent_id: note.clear() resource_data = json.dumps(resource_json) resource_update = requests.post(values[0] + resource_uri, headers=values[1], data=resource_data).json() admin.writetxt(txtfile, resource_update) print(resource_update)
def create_multipart_notes(): values = admin.login() csvfile = admin.opencsv() txtfile = admin.opentxt() for row in csvfile: record_uri = row[0] note_text = row[1] note_type = row[2] record_json = requests.get(values[0] + record_uri, headers=values[1]).json() new_note = {"jsonmodel_type": "note_multipart", "subnotes": [{'content': note_text, 'jsonmodel_type': 'note_text', 'publish': True}], 'type': note_type, 'publish': True} try: record_json['notes'].append(new_note) except: print('note did not append') record_data = json.dumps(record_json) record_update = requests.post(values[0] + record_uri, headers=values[1], data=record_data).json() admin.writetxt(txtfile, record_update) print(record_update)
def create_archival_objects(): values = admin.login() csvfile = admin.opencsv() for row in csvfile: resource_uri = row[0] repo_uri = row[1] title = row[2] level = row[3] parent_series_uri = row[4] component_id = row[5] if parent_series_uri != '': new_ao = {'title': title, 'level': level, 'repository': {'ref': repo_uri}, 'resource': {'ref': resource_uri}, 'jsonmodel_type': 'archival_object'} else: new_ao = {'component_id': component_id, 'title': title, 'level': level, 'repository': {'ref': repo_uri}, 'resource': {'ref': resource_uri}, 'parent': {'ref': parent_series_uri}, 'jsonmodel_type': 'archival_object'} ao_data = json.dumps(new_ao) ao_create = requests.post(values[0] + repo_uri + '/archival_objects', headers=values[1], data=ao_data).json() print(ao_create)