def fix_tags_no_path(entity): if entity['name'] == 'designsafe.project': proj_other = BaseFileResource.listing(service_account(), system="project-{}".format(entity['uuid']), path="") for child in proj_other.children: try: pub_file = BaseFileResource.listing(service_account(), system="designsafe.storage.published", path="{}{}".format(project_id, child.path)) proj_file = BaseFileResource.listing(service_account(), system="project-{}".format(entity['uuid']), path=child.path) for tag in entity['value']['fileTags']: if tag['fileUuid'] == proj_file.uuid: tag['fileUuid'] = pub_file.uuid except Exception as err: LOG.info('error: {}'.format(err)) continue else: for fobj in entity['fileObjs']: try: pub_file = BaseFileResource.listing(service_account(), system="designsafe.storage.published", path="{}{}".format(project_id, fobj['path'])) proj_file = BaseFileResource.listing(service_account(), system="project-{}".format(pub_dict['project']['uuid']), path=fobj['path']) for tag in entity['value']['fileTags']: if tag['fileUuid'] == proj_file.uuid: tag['fileUuid'] = pub_file.uuid except Exception as err: LOG.info('error: {}'.format(err)) continue
def create_meta(path, system, meta): """ Create metadata for a file. """ sa_client = service_account() meta_body = file_meta_obj(path=path, system=system, meta=meta) sa_client.meta.addMetadata(body=json.dumps(meta_body))
def query_file_meta(system, path): """ Return all metadata objects starting with a given path and matching a system exactly """ sa_client = service_account() query = { "name": "designsafe.file", "value.system": system, "value.path": os.path.join('/', path, '*') } all_results = [] offset = 0 while True: result = sa_client.meta.listMetadata(q=json.dumps(query), limit=300, offset=offset) all_results = all_results + result offset += 300 if len(result) != 300: break return all_results
def publish_resource(project_id, entity_uuid=None): """Publish a resource. Retrieves a project and/or an entity and set any saved DOIs as published. If no DOIs are saved in the specified project or entity it will fail silently. We need to specify the project id because this function also changes the status of the locally saved publication to `"published"` that way it shows up in the published listing. :param str project_id: Project Id to publish. :param str entity_uuid: Entity uuid to publish. """ mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) entity = None if entity_uuid: entity = mgr.get_entity_by_uuid(entity_uuid) responses = [] for doi in prj.dois: res = DataciteManager.publish_doi(doi) responses.append(res) if entity: for doi in entity.dois: res = DataciteManager.publish_doi(doi) responses.append(res) pub = BaseESPublication(project_id=project_id) pub.update(status='published') for res in responses: LOG.info("DOI published: %(doi)s", {"doi": res['data']['id']}) return responses
def _populate_entities_in_publication(entity, publication): """Populate entities in publication dict. :param entity: Entity resource instance. :param dict publication: Publication dict. """ mgr = ProjectsManager(service_account()) reverse_field_attrs = entity._meta._reverse_fields reverse_fields = [] for attr in reverse_field_attrs: try: field = getattr(entity, attr) except AttributeError: LOG.exception("No field '%(attr)s' in '%(ent)s'", { "attr": attr, "ent": entity }) reverse_fields.append(field.rel_cls.model_name) for field_name in reverse_fields: for pent in publication.get(FIELD_MAP[field_name], []): ent = mgr.get_entity_by_uuid(pent['uuid']) ent_dict = ent.to_body_dict() _delete_unused_fields(ent_dict) pent.update(ent_dict)
def freeze_project_and_entity_metadata(project_id, entity_uuids=None): """Freeze project and entity metadata. Given a project id and an entity uuid (should be a main entity) this function retrieves all metadata related to these entities and stores it into Elasticsearch as :class:`~designafe.libs.elasticsearch.docs.publications.BaseESPublication` :param str project_id: Project id. :param list of entity_uuid strings: Entity uuids. """ mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) pub_doc = BaseESPublication(project_id=project_id) publication = pub_doc.to_dict() if entity_uuids: # clear any existing entities in publication entity = mgr.get_entity_by_uuid(entity_uuids[0]) pub_entities_field_name = FIELD_MAP[entity.name] publication[pub_entities_field_name] = [] for ent_uuid in entity_uuids: entity = None entity = mgr.get_entity_by_uuid(ent_uuid) entity_json = entity.to_body_dict() if entity: pub_entities_field_name = FIELD_MAP[entity.name] publication['authors'] = entity_json['value']['authors'][:] entity_json['authors'] = [] _populate_entities_in_publication(entity, publication) _transform_authors(entity_json, publication) if entity_json['value']['dois']: entity_json['doi'] = entity_json['value']['dois'][-1] _delete_unused_fields(entity_json) publication[pub_entities_field_name].append(entity_json) prj_json = prj.to_body_dict() _delete_unused_fields(prj_json) award_number = publication.get('project', {}).get('value', {}).pop( 'awardNumber', []) or [] if not isinstance(award_number, list): award_number = [] prj_json['value']['awardNumbers'] = award_number prj_json['value'].pop('awardNumber', None) if publication.get('project'): publication['project'].update(prj_json) else: publication['project'] = prj_json pub_doc.update(**publication) return pub_doc
def publish_resource(project_id, entity_uuids=None, publish_dois=False, revision=None): """Publish a resource. Retrieves a project and/or an entity and set any saved DOIs as published. If no DOIs are saved in the specified project or entity it will fail silently. We need to specify the project id because this function also changes the status of the locally saved publication to `"published"` that way it shows up in the published listing. If publish_dois is False Datacite will keep the newly created DOIs in "DRAFT" status, and not "PUBLISHED". A DOI on DataCite can only be deleted if it is in "DRAFT" status. Once a DOI is set to "PUBLISHED" or "RESERVED" it can't be deleted. :param str project_id: Project Id to publish. :param list entity_uuids: list of str Entity uuids to publish. :param int revision: Revision number to publish. """ es_client = new_es_client() # If revision number passed, set status to "published" for specified revision and # set status to "revised" for old versions mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) responses = [] if publish_dois: if entity_uuids: for ent_uuid in entity_uuids: entity = None if ent_uuid: entity = mgr.get_entity_by_uuid(ent_uuid) if entity: for doi in entity.dois: res = DataciteManager.publish_doi(doi) responses.append(res) for doi in prj.dois: res = DataciteManager.publish_doi(doi) responses.append(res) pub = BaseESPublication(project_id=project_id, revision=revision, using=es_client) pub.update(status='published', using=es_client) IndexedPublication._index.refresh(using=es_client) if revision: # Revising a publication sets the status of the previous document to 'archived' last_revision = revision - 1 if revision > 2 else 0 archived_pub = BaseESPublication(project_id=project_id, revision=last_revision) archived_pub.update(status='archived') for res in responses: logger.info( "DOI published: %(doi)s", {"doi": res['data']['id']} ) return responses
def mission_reserve_xml(publication, project, mission, authors_details=None, mis_doi=None): xml_obj = _mission_required_xml(authors_details, mission, publication['created'], mis_doi) if not mis_doi: reserve_res = _reserve_doi( xml_obj, ENTITY_TARGET_BASE.format( project_id=publication['project']['value']['projectId'], entity_uuid=mission.uuid)) doi = reserve_res ark = doi else: doi = mis_doi ark = mis_doi doi = doi.strip() ark = ark.strip() identifier = xml_obj.find('identifier') identifier.text = doi resource = xml_obj subjects = ET.SubElement(resource, 'subjects') mis_type = ET.SubElement(subjects, 'subject') mis_type.text = project.nh_event.title() for collection_dict in publication['collections']: collection = Collection.manager().get(service_account(), uuid=collection_dict['uuid']) collection_subj = ET.SubElement(subjects, 'subject') collection_subj.text = collection.title collection_dict.update(collection.to_body_dict()) for report_dict in publication['reports']: report = Report.manager().get(service_account(), uuid=report_dict['uuid']) report_subj = ET.SubElement(subjects, 'subject') report_subj.text = report.title report_dict.update(report.to_body_dict()) _update_doi(doi, xml_obj) return (doi, ark, xml_obj)
def get_context_data(self, **kwargs): """ Update context data to add publication. """ context = super(DataDepotPublishedView, self).get_context_data(**kwargs) logger.info('Get context Data') pub = BaseESPublication(project_id=kwargs['project_id'].strip('/')) logger.debug('pub: %s', pub.to_dict()) context['projectId'] = pub.projectId context['citation_title'] = pub.project.value.title context['citation_date'] = pub.created if pub.project.value.to_dict().get('dois') != None: #This is for newer publications context['doi'] = pub.project.value.dois[0] elif pub.project.to_dict().get('doi') != None: #This is for older publications context['doi'] = pub.project.doi context['keywords'] = pub.project.value.keywords.split(',') if 'users' in pub.to_dict(): context['authors'] = [{ 'full_name': '{last_name}, {first_name}'.format( last_name=user['last_name'], first_name=user['first_name'] ), 'institution': getattr(getattr(user, 'profile', ''), 'institution', '') } for user in getattr(pub, 'users', [])] elif 'authors' in pub.to_dict(): context['authors'] = [{ 'full_name': '{last_name}, {first_name}'.format( last_name=author['lname'], first_name=author['fname'] ), 'institution': getattr(author, 'inst', '') } for author in getattr(pub, 'authors',[])] else: context['authors'] = [{ 'full_name': '{last_name}, {first_name}'.format( last_name=author['lname'], first_name=author['fname'] ), 'institution': getattr(author, 'inst', '') } for author in getattr(pub.project.value, 'teamOrder', [])] context['publication'] = pub context['description'] = pub.project.value.description context['experiments'] = getattr(pub, 'experimentsList', []) context['missions'] = getattr(pub, 'missions', []) context['reports'] = getattr(pub, 'reports', []) context['simulations'] = getattr(pub, 'simulations', []) context['hybrid_simulations'] = getattr(pub, 'hybrid_simulations',[]) proj = ProjectsManager(service_account()).get_project_by_id(pub.projectId) context['dc_json'] = json.dumps(proj.to_dataset_json()) if self.request.user.is_authenticated: context['angular_init'] = json.dumps({ 'authenticated': True, }) else: context['angular_init'] = json.dumps({ 'authenticated': False, }) return context
def amend_publication(project_id, amendments=None, authors=None, revision=None): """Amend a Publication Update Amendable fields on a publication and the corrosponding DataCite records. These changes do not produce a new version of a publication, but they do allow for limited changes to a published project. This is currently configured to support "Other" publications only. :param str project_id: Project uuid to amend :param int revision: Revision number to amend """ es_client = new_es_client() mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) pub = BaseESPublication(project_id=project_id, revision=revision, using=es_client) prj_dict = prj.to_body_dict() pub_dict = pub.to_dict() _delete_unused_fields(prj_dict) if pub.project.value.projectType != 'other': pub_entity_uuids = pub.entities() for uuid in pub_entity_uuids: if uuid in amendments: entity = amendments[uuid] else: entity = mgr.get_entity_by_uuid(uuid) entity = entity.to_body_dict() _delete_unused_fields(entity) for pub_ent in pub_dict[FIELD_MAP[entity['name']]]: if pub_ent['uuid'] == entity['uuid']: for key in entity['value']: ent_type = 'entity' if 'dois' in entity['value'] else 'subentity' if key not in UNAMENDABLE_FIELDS[ent_type]: pub_ent['value'][key] = entity['value'][key] if 'authors' in entity['value']: pub_ent['value']['authors'] = authors[entity['uuid']] _set_authors(pub_ent, pub_dict) # weird key swap for old issues with awardnumber(s) award_number = prj.award_number or [] if not isinstance(award_number, list): award_number = [] prj_dict['value']['awardNumbers'] = award_number prj_dict['value'].pop('awardNumber', None) for key in prj_dict['value']: if key not in UNAMENDABLE_FIELDS['project']: pub_dict['project']['value'][key] = prj_dict['value'][key] if authors and prj_dict['value']['projectType'] == 'other': pub_dict['project']['value']['teamOrder'] = authors pub.update(**pub_dict) IndexedPublication._index.refresh(using=es_client) return pub
def fix_tags_path(entity): for tag in entity['value']['fileTags']: try: pub_file = BaseFileResource.listing( service_account(), system="designsafe.storage.published", path="{}{}".format(project_id, tag['path'])) tag['fileUuid'] = pub_file.uuid except Exception as err: logger.info('error: {}'.format(err)) continue
def __init__(self, user): """Initialize. :param user: Django user instance. """ self.user = user if user.is_authenticated: self._ac = user.agave_oauth.client else: self._ac = service_account() self._mp = MongoProjectsHelper(self._ac) self._pm = ProjectsManager(self._ac)
def project_by_uuid(uuid, prj_type): """Retrieves a project.""" agv = service_account() if prj_type == 'experimental': project = ExperimentalProject.manager().get(agv, uuid=uuid) elif prj_type == 'simulation': project = SimulationProject.manager().get(agv, uuid=uuid) elif prj_type == 'simulation': project = HybridSimulationProject.manager().get(agv, uuid=uuid) else: project = BaseProject.manager().get(agv, uuid=uuid) return project
def copy_meta(src_system, src_path, dest_system, dest_path): """ Check for and copy metadata record(s) """ sa_client = service_account() copies = [] meta_listing = query_file_meta(system=src_system, path=src_path) if meta_listing: for meta in meta_listing: meta_copy = file_meta_obj(path=meta.value['path'].replace( src_path, dest_path), system=dest_system, meta=meta['value']) copies.append(meta_copy) sa_client.meta.bulkCreate(body=json.dumps(copies))
def update_meta(src_system, src_path, dest_system, dest_path): """ Check for and update metadata record(s) """ sa_client = service_account() updates = [] meta_listing = query_file_meta(system=src_system, path=src_path) if meta_listing: for meta in meta_listing: meta.value['system'] = dest_system meta.value['path'] = meta.value['path'].replace( src_path, dest_path) meta.value['basePath'] = meta.value['basePath'].replace( os.path.dirname(src_path), os.path.dirname(dest_path)) updates.append({"uuid": meta.uuid, "update": meta}) sa_client.meta.bulkUpdate(body=json.dumps(updates))
def publish_resource(project_id, entity_uuids=None, publish_dois=False): """Publish a resource. Retrieves a project and/or an entity and set any saved DOIs as published. If no DOIs are saved in the specified project or entity it will fail silently. We need to specify the project id because this function also changes the status of the locally saved publication to `"published"` that way it shows up in the published listing. If publish_dois is False Datacite will keep the newly created DOIs in "DRAFT" status, but they will not be set to "PUBLISHED". A DOI on DataCite can only be deleted if it is in "DRAFT" status. Once a DOI is set to "PUBLISHED" or "RESERVED" it can't be deleted. :param str project_id: Project Id to publish. :param list entity_uuids: list of str Entity uuids to publish. """ mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) responses = [] if publish_dois: for ent_uuid in entity_uuids: entity = None if ent_uuid: entity = mgr.get_entity_by_uuid(ent_uuid) if entity: for doi in entity.dois: res = DataciteManager.publish_doi(doi) responses.append(res) for doi in prj.dois: res = DataciteManager.publish_doi(doi) responses.append(res) pub = BaseESPublication(project_id=project_id) pub.update(status='published') for res in responses: LOG.info( "DOI published: %(doi)s", {"doi": res['data']['id']} ) return responses
def create_metadata(): mgr = ProjectsManager(service_account()) pub_dict = pub._wrapped.to_dict() meta_dict = {} entity_type_map = { 'experimental': 'experimentsList', 'simulation': 'simulations', 'hybrid_simulation': 'hybrid_simulations', 'field_recon': 'missions', } project_uuid = pub_dict['project']['uuid'] try: logger.debug("Creating metadata for {}".format(pub.projectId)) if pub_dict['project']['value']['projectType'] in entity_type_map: ent_type = entity_type_map[pub_dict['project']['value'] ['projectType']] entity_uuids = [] if ent_type in pub_dict.keys(): entity_uuids = [x['uuid'] for x in pub_dict[ent_type]] meta_dict = mgr.get_entity_by_uuid( project_uuid).to_datacite_json() meta_dict['published_resources'] = [] meta_dict['url'] = TARGET_BASE.format( project_id=pub_dict['project_id']) for uuid in entity_uuids: entity = mgr.get_entity_by_uuid(uuid) ent_json = entity.to_datacite_json() ent_json['doi'] = entity.dois[0] ent_json['url'] = ENTITY_TARGET_BASE.format( project_id=pub_dict['project_id'], entity_uuid=uuid) meta_dict['published_resources'].append(ent_json) else: project = mgr.get_entity_by_uuid(project_uuid) meta_dict = project.to_datacite_json() meta_dict['doi'] = project.dois[0] meta_dict['url'] = TARGET_BASE.format( project_id=pub_dict['project_id']) with open(metadata_path, 'w') as meta_file: json.dump(meta_dict, meta_file) except: logger.exception("Failed to create metadata!")
def draft_publication( project_id, main_entity_uuid=None, project_doi=None, main_entity_doi=None, upsert_project_doi=False, upsert_main_entity_doi=True, ): """Reserve a publication. A publication is reserved by creating a DOI through Datacite. For some of the projects a DOI is only created for the main entity e.g. Mission or Simulation. For some other projects we also (or only) get a DOI for the project. - If :param:`project_doi` and/or :param:`main_entity_doi` values are given then those dois will be updated (or created if they don't exist in datacite). - If :param:`upsert_project_doi` and/or :param:`upsert_main_entity_doi` are set to `True` then any saved DOIs will be updated (even if there's multiple unless a specific DOI is given). If there are no saved DOIs then a new DOI will be created. Meaning, it will act as update or insert. - If :param:`project_id` is given **but** :param:`main_entity_uuid` is ``None`` then a project DOI will be created or updated. .. warning:: This funciton only creates a *Draft* DOI and not a public one. .. warning:: An entity *might* have multiple DOIs, if this is the case and :param:`upsert_project_doi` or :param:`upsert_main_entity_doi` are set to True then *all* saved dois will be updated. .. note:: In theory a single resource *should not* have multiple DOIs but we don't know how this will change in the future, hence, we are supporting multiple DOIs. .. note:: If no :param:`main_entity_uuid` is given then a project DOI will be created. :param str project_id: Project Id :param str main_entity_uuid: Uuid of main entity. :param str project_doi: Custom doi for project. :param str main_entity_doi: Custom doi for main entity. :param bool upsert_project_doi: Update or insert project doi. :param bool upsert_main_entity_doi: Update or insert main entity doi. """ mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) entity = None if main_entity_uuid: entity = mgr.get_entity_by_uuid(main_entity_uuid) else: upsert_project_doi = True responses = [] prj_url = TARGET_BASE.format(project_id=project_id) if entity: entity_url = ENTITY_TARGET_BASE.format(project_id=project_id, entity_uuid=main_entity_uuid) prj_datacite_json = prj.to_datacite_json() prj_datacite_json['url'] = prj_url if entity: ent_datacite_json = entity.to_datacite_json() ent_datacite_json['url'] = entity_url if upsert_project_doi and project_doi: prj_res = DataciteManager.create_or_update_doi(prj_datacite_json, project_doi) prj.dois += [project_doi] prj.dois = list(set(prj.dois)) prj.save(service_account()) responses.append(prj_res) elif upsert_project_doi and prj.dois: for doi in prj.dois: prj_res = DataciteManager.create_or_update_doi( prj_datacite_json, doi) responses.append(prj_res) elif upsert_project_doi and not prj.dois: prj_res = DataciteManager.create_or_update_doi(prj_datacite_json) prj.dois += [prj_res['data']['id']] prj.save(service_account()) responses.append(prj_res) if entity and upsert_main_entity_doi and main_entity_doi: me_res = DataciteManager.create_or_update_doi(ent_datacite_json, main_entity_doi) entity.dois += [main_entity_doi] entity.dois = list(set(entity.dois)) entity.save(service_account()) responses.append(me_res) elif entity and upsert_main_entity_doi and entity.dois: for doi in entity.dois: me_res = DataciteManager.create_or_update_doi( ent_datacite_json, doi) responses.append(me_res) elif entity and upsert_main_entity_doi and not entity.dois: me_res = DataciteManager.create_or_update_doi(ent_datacite_json) entity.dois += [me_res['data']['id']] entity.save(service_account()) responses.append(me_res) for res in responses: LOG.info("DOI created or updated: %(doi)s", {"doi": res['data']['id']}) return responses
def experiment_reserve_xml(publication, project, experiment, authors_details=None, exp_doi=None): xml_obj = _experiment_required_xml(authors_details, experiment, publication['created'], exp_doi) if not exp_doi: reserve_res = _reserve_doi( xml_obj, ENTITY_TARGET_BASE.format( project_id=publication['project']['value']['projectId'], entity_uuid=experiment.uuid)) doi = reserve_res ark = doi else: doi = exp_doi ark = exp_doi doi = doi.strip() ark = ark.strip() identifier = xml_obj.find('identifier') identifier.text = doi resource = xml_obj contributors = ET.SubElement(resource, 'contributors') contributor = ET.SubElement(contributors, 'contributor') contributor.attrib['contributorType'] = 'HostingInstitution' name = ET.SubElement(contributor, 'contributorName') name.text = experiment.experimental_facility subjects = ET.SubElement(resource, 'subjects') exp_type = ET.SubElement(subjects, 'subject') exp_type.text = experiment.experimental_facility.title() eq_type = ET.SubElement(subjects, 'subject') eq_type.text = experiment.equipment_type for event_dict in publication['eventsList']: event = Event.manager().get(service_account(), uuid=event_dict['uuid']) event_subj = ET.SubElement(subjects, 'subject') event_subj.text = event.title event_dict.update(event.to_body_dict()) for mcf_dict in publication['modelConfigs']: mcf = ModelConfig.manager().get(service_account(), uuid=mcf_dict['uuid']) mcf_subj = ET.SubElement(subjects, 'subject') mcf_subj.text = mcf.title mcf_dict.update(mcf.to_body_dict()) for slt_dict in publication['sensorLists']: slt = SensorList.manager().get(service_account(), uuid=slt_dict['uuid']) slt_subj = ET.SubElement(subjects, 'subject') slt_subj.text = slt.title slt_dict.update(slt.to_body_dict()) for report_dict in publication.get('reportsList', []): report = Report.manager().get(service_account(), uuid=report_dict['uuid']) report_subj = ET.SubElement(subjects, 'subject') report_subj.text = report.title report_dict.update(report.to_body_dict()) for analysis_dict in publication.get('analysisList', []): analysis = Analysis.manager().get(service_account(), uuid=analysis_dict['uuid']) analysis_subj = ET.SubElement(subjects, 'subject') analysis_subj.text = analysis.title analysis_dict.update(analysis.to_body_dict()) _update_doi(doi, xml_obj) return (doi, ark, xml_obj)
def simulation_reserve_xml(publication, project, simulation, authors_details=None, sim_doi=None): xml_obj = _simulation_required_xml(authors_details, simulation, publication['created'], sim_doi) if not sim_doi: reserve_res = _reserve_doi( xml_obj, ENTITY_TARGET_BASE.format( project_id=publication['project']['value']['projectId'], entity_uuid=simulation.uuid)) doi = reserve_res ark = doi else: doi = sim_doi ark = sim_doi doi = doi.strip() ark = ark.strip() identifier = xml_obj.find('identifier') identifier.text = doi resource = xml_obj subjects = ET.SubElement(resource, 'subjects') sim_type = ET.SubElement(subjects, 'subject') sim_type.text = simulation.simulation_type.title() for model_dict in publication['models']: model = SimModel.manager().get(service_account(), uuid=model_dict['uuid']) model_subj = ET.SubElement(subjects, 'subject') model_subj.text = model.title model_dict.update(model.to_body_dict()) for input_dict in publication['inputs']: sim_input = SimInput.manager().get(service_account(), uuid=input_dict['uuid']) input_subj = ET.SubElement(subjects, 'subject') input_subj.text = sim_input.title input_dict.update(sim_input.to_body_dict()) for output_dict in publication['outputs']: output = SimOutput.manager().get(service_account(), uuid=output_dict['uuid']) output_subj = ET.SubElement(subjects, 'subject') output_subj.text = output.title output_dict.update(output.to_body_dict()) for report_dict in publication.get('reports', []): report = SimReport.manager().get(service_account(), uuid=report_dict['uuid']) report_subj = ET.SubElement(subjects, 'subject') report_subj.text = report.title report_dict.update(report.to_body_dict()) for analysis_dict in publication.get('analysiss', []): analysis = SimAnalysis.manager().get(service_account(), uuid=analysis_dict['uuid']) analysis_subj = ET.SubElement(subjects, 'subject') analysis_subj.text = analysis.title analysis_dict.update(analysis.to_body_dict()) _update_doi(doi, xml_obj) return (doi, ark, xml_obj)
def hybrid_simulation_reserve_xml(publication, project, hybrid_simulation, authors_details, hybrid_sim_doi): xml_obj = _simulation_required_xml(authors_details, hybrid_simulation, publication['created'], hybrid_sim_doi) if not hybrid_sim_doi: reserve_res = _reserve_doi( xml_obj, ENTITY_TARGET_BASE.format( project_id=publication['project']['value']['projectId'], entity_uuid=hybrid_simulation.uuid)) doi = reserve_res ark = doi else: doi = hybrid_sim_doi ark = hybrid_sim_doi doi = doi.strip() ark = ark.strip() identifier = xml_obj.find('identifier') identifier.text = doi resource = xml_obj subjects = ET.SubElement(resource, 'subjects') sim_type = ET.SubElement(subjects, 'subject') sim_type.text = hybrid_simulation.simulation_type.title() for global_model_dict in publication['global_models']: global_model = GlobalModel.manager().get( service_account(), uuid=global_model_dict['uuid']) global_model_subj = ET.SubElement(subjects, 'subject') global_model_subj.text = global_model.title global_model_dict.update(global_model.to_body_dict()) for coordinator_dict in publication['coordinators']: coordinator = Coordinator.manager().get(service_account(), uuid=coordinator_dict['uuid']) coordinator_subj = ET.SubElement(subjects, 'subject') coordinator_subj.text = coordinator.title coordinator_dict.update(coordinator.to_body_dict()) for sim_substructure_dict in publication['sim_substructures']: sim_substructure = SimSubstructure.manager().get( service_account(), uuid=sim_substructure_dict['uuid']) sim_substructure_subj = ET.SubElement(subjects, 'subject') sim_substructure_subj.text = sim_substructure.title sim_substructure_dict.update(sim_substructure.to_body_dict()) for exp_substructure_dict in publication['exp_substructures']: exp_substructure = ExpSubstructure.manager().get( service_account(), uuid=exp_substructure_dict['uuid']) exp_substructure_subj = ET.SubElement(subjects, 'subject') exp_substructure_subj.text = exp_substructure.title exp_substructure_dict.update(exp_substructure.to_body_dict()) for coordinator_output_dict in publication['coordinator_outputs']: coordinator_output = CoordinatorOutput.manager().get( service_account(), uuid=coordinator_output_dict['uuid']) coordinator_output_subj = ET.SubElement(subjects, 'subject') coordinator_output_subj.text = coordinator_output.title coordinator_output_dict.update(coordinator_output.to_body_dict()) for sim_output_dict in publication['sim_outputs']: sim_output = SimOutput.manager().get(service_account(), uuid=sim_output_dict['uuid']) sim_output_subj = ET.SubElement(subjects, 'subject') sim_output_subj.text = sim_output.title sim_output_dict.update(sim_output.to_body_dict()) for exp_output_dict in publication['exp_outputs']: exp_output = ExpOutput.manager().get(service_account(), uuid=exp_output_dict['uuid']) exp_output_subj = ET.SubElement(subjects, 'subject') exp_output_subj.text = exp_output.title exp_output_dict.update(exp_output.to_body_dict()) for report_dict in publication.get('reports', []): report = HybridReport.manager().get(service_account(), uuid=report_dict['uuid']) report_subj = ET.SubElement(subjects, 'subject') report_subj.text = report.title report_dict.update(report.to_body_dict()) for analysis_dict in publication.get('analysiss', []): analysis = HybridAnalysis.manager().get(service_account(), uuid=analysis_dict['uuid']) analysis_subj = ET.SubElement(subjects, 'subject') analysis_subj.text = analysis.title analysis_dict.update(analysis.to_body_dict()) _update_doi(doi, xml_obj) return (doi, ark, xml_obj)
def freeze_project_and_entity_metadata(project_id, entity_uuids=None): """Freeze project and entity metadata. Given a project id and an entity uuid (should be a main entity) this function retrieves all metadata related to these entities and stores it into Elasticsearch as :class:`~designafe.libs.elasticsearch.docs.publications.BaseESPublication` When publishing for the first time or publishing over an existing publication. We will clear any existing entities (if any) from the published metadata. We'll use entity_uuids (the entities getting DOIs) to rebuild the rest of the publication. These entities usually do not have files associated to them (except published reports/documents). :param str project_id: Project id. :param list of entity_uuid strings: Entity uuids. """ mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) pub_doc = BaseESPublication(project_id=project_id) publication = pub_doc.to_dict() if entity_uuids: # clear any existing sub entities in publication and keep updated fileObjs fields_to_clear = [] entities_with_files = [] for key in list(FIELD_MAP.keys()): if FIELD_MAP[key] in list(publication.keys()): fields_to_clear.append(FIELD_MAP[key]) fields_to_clear = set(fields_to_clear) for field in fields_to_clear: for ent in publication[field]: if 'fileObjs' in ent: entities_with_files.append(ent) if ent['uuid'] in entity_uuids: publication[field] = [] for ent_uuid in entity_uuids: entity = None entity = mgr.get_entity_by_uuid(ent_uuid) if entity: entity_json = entity.to_body_dict() pub_entities_field_name = FIELD_MAP[entity.name] for e in entities_with_files: if e['uuid'] == entity_json['uuid']: entity_json['fileObjs'] = e['fileObjs'] publication['authors'] = list(entity_json['value']['authors']) entity_json['authors'] = [] _populate_entities_in_publication(entity, publication) _transform_authors(entity_json, publication) if entity_json['value']['dois']: entity_json['doi'] = entity_json['value']['dois'][-1] _delete_unused_fields(entity_json) publication[pub_entities_field_name].append(entity_json) prj_json = prj.to_body_dict() _delete_unused_fields(prj_json) award_number = publication.get('project', {}).get('value', {}).pop( 'awardNumber', [] ) or [] if not isinstance(award_number, list): award_number = [] prj_json['value']['awardNumbers'] = award_number prj_json['value'].pop('awardNumber', None) if publication.get('project'): publication['project'].update(prj_json) else: publication['project'] = prj_json pub_doc.update(**publication) return pub_doc
def reserve_publication(publication): project = project_by_uuid(publication['project']['uuid'], publication['project']['value']['projectType']) proj_doi, proj_ark, proj_xml = project_reserve_xml( publication, project, get_or_craete_authors(publication)) logger.debug('proj_doi: %s', proj_doi) logger.debug('proj_ark: %s', proj_ark) logger.debug('proj_xml: %s', proj_xml) exps_dois = [] anl_dois = [] sim_dois = [] mis_dois = [] xmls = {proj_doi: proj_xml} publication['project']['doi'] = proj_doi if project.project_type.lower() == 'experimental': for pexp in publication['experimentsList']: exp = Experiment.manager().get(service_account(), uuid=pexp['uuid']) exp_doi = pexp.get('doi', '') authors = pexp['authors'] exp_doi, exp_ark, exp_xml = experiment_reserve_xml( publication, project, exp, authors, exp_doi) add_related(exp_xml, [proj_doi]) exps_dois.append(exp_doi) exp_dict = exp.to_body_dict() keys_to_drop = [] for key in exp_dict: if key.startswith('_'): keys_to_drop.append(key) elif key.endswith('_set'): keys_to_drop.append(key) exp_dict['value'].pop('authors', '') for key in keys_to_drop: exp_dict.pop(key) pexp.update(exp_dict) pexp['doi'] = exp_doi xmls[exp_doi] = exp_xml logger.debug('exp_doi: %s', exp_doi) logger.debug('exp_ark: %s', exp_ark) logger.debug('exp_xml: %s', exp_xml) add_related(proj_xml, exps_dois + anl_dois) for _doi in [proj_doi] + exps_dois + anl_dois: logger.debug('Final project doi: %s', _doi) _update_doi(_doi, xmls[_doi], status='public') elif project.project_type.lower() == 'simulation': for psim in publication['simulations']: sim = Simulation.manager().get(service_account(), uuid=psim['uuid']) sim_doi = psim.get('doi') authors = psim['authors'] sim_doi, sim_ark, sim_xml = simulation_reserve_xml( publication, project, sim, authors, sim_doi) add_related(sim_xml, [proj_doi]) sim_dois.append(sim_doi) sim_dict = sim.to_body_dict() keys_to_drop = [] for key in sim_dict: if key.startswith('_'): keys_to_drop.append(key) elif key.endswith('_set'): keys_to_drop.append(key) sim_dict['value'].pop('authors', '') for key in keys_to_drop: sim_dict.pop(key) psim.update(sim_dict) psim['doi'] = sim_doi xmls[sim_doi] = sim_xml logger.debug('sim_doi: %s', sim_doi) logger.debug('sim_ark: %s', sim_ark) logger.debug('sim_xml: %s', sim_xml) add_related(proj_xml, sim_dois) for _doi in [proj_doi] + sim_dois: logger.debug('DOI: %s', _doi) _update_doi(_doi, xmls[_doi], status='public') elif project.project_type.lower() == 'hybrid_simulation': for psim in publication.get('hybrid_simulations', []): sim = HybridSimulation.manager().get(service_account(), uuid=psim['uuid']) sim_doi = psim.get('doi') authors = psim['authors'] sim_doi, sim_ark, sim_xml = hybrid_simulation_reserve_xml( publication, project, sim, authors, sim_doi) add_related(sim_xml, [proj_doi]) sim_dois.append(sim_doi) sim_dict = sim.to_body_dict() keys_to_drop = [] for key in sim_dict: if key.startswith('_'): keys_to_drop.append(key) elif key.endswith('_set'): keys_to_drop.append(key) sim_dict['value'].pop('authors', '') for key in keys_to_drop: sim_dict.pop(key) psim.update(sim_dict) psim['doi'] = sim_doi xmls[sim_doi] = sim_xml logger.debug('sim_doi: %s', sim_doi) logger.debug('sim_ark: %s', sim_ark) logger.debug('sim_xml: %s', sim_xml) add_related(proj_xml, sim_dois) for _doi in [proj_doi] + sim_dois: logger.debug('DOI: %s', _doi) _update_doi(_doi, xmls[_doi], status='public') elif project.project_type.lower() == 'field_recon': for pmis in publication.get('missions', []): mission = Mission.manager().get(service_account(), uuid=psim['uuid']) mis_doi = pmis.get('doi') authors = pmis['authors'] mis_doi, mis_ark, mis_xml = mission_reserve_xml( publication, project, mission, authors, mis_doi) add_related(mis_xml, [proj_doi]) mis_dois.append(mis_doi) mis_dict = mission.to_body_dict() keys_to_drop = [] for key in mis_dict: if key.startswith('_'): keys_to_drop.append(key) elif key.endswith('_set'): keys_to_drop.append(key) mis_dict['value'].pop('authors', '') for key in keys_to_drop: mis_dict.pop(key) pmis.update(mis_dict) pmis['doi'] = mis_doi xmls[mis_doi] = mis_xml logger.debug('mis_doi: %s', mis_doi) logger.debug('mis_ark: %s', mis_ark) logger.debug('mis_xml: %s', mis_xml) add_related(proj_xml, mis_dois) for _doi in [proj_doi] + mis_dois: logger.debug('DOI: %s', _doi) _update_doi(_doi, xmls[_doi], status='public') else: _update_doi(proj_doi, xmls[proj_doi], status='public') return publication
def draft_publication( project_id, main_entity_uuids=None, project_doi=None, main_entity_doi=None, upsert_project_doi=False, upsert_main_entity_doi=True, revision=None, revised_authors=None ): """Reserve a publication. A publication is reserved by creating a DOI through Datacite. For some of the projects a DOI is only created for the main entity e.g. Mission or Simulation. For some other projects we also (or only) get a DOI for the project. - If :param:`project_doi` and/or :param:`main_entity_doi` values are given then those dois will be updated (or created if they don't exist in datacite). - If :param:`upsert_project_doi` and/or :param:`upsert_main_entity_doi` are set to `True` then any saved DOIs will be updated (even if there's multiple unless a specific DOI is given). If there are no saved DOIs then a new DOI will be created. Meaning, it will act as update or insert. - If :param:`project_id` is given **but** :param:`main_entity_uuids` is ``None`` then a project DOI will be created or updated. .. warning:: This funciton only creates a *Draft* DOI and not a public one. .. warning:: An entity *might* have multiple DOIs, if this is the case and :param:`upsert_project_doi` or :param:`upsert_main_entity_doi` are set to True then *all* saved dois will be updated. .. note:: In theory a single resource *should not* have multiple DOIs but we don't know how this will change in the future, hence, we are supporting multiple DOIs. .. note:: If no :param:`main_entity_uuids` is given then a project DOI will be created. :param str project_id: Project Id :param list main_entity_uuids: uuid strings of main entities. :param str project_doi: Custom doi for project. :param str main_entity_doi: Custom doi for main entity. :param bool upsert_project_doi: Update or insert project doi. :param bool upsert_main_entity_doi: Update or insert main entity doi. """ mgr = ProjectsManager(service_account()) prj = mgr.get_project_by_id(project_id) responses = [] if main_entity_uuids: ### Draft Entity DOI(s) ### pub = BaseESPublication(project_id=project_id, revision=revision) for ent_uuid in main_entity_uuids: entity = mgr.get_entity_by_uuid(ent_uuid) if entity: if revision: entity_url = ENTITY_TARGET_BASE.format( project_id='{}v{}'.format(project_id, revision), entity_uuid=ent_uuid ) original_entities = getattr(pub, FIELD_MAP[entity.name]) pub_ent = next(ent for ent in original_entities if ent.uuid == ent_uuid) entity.title = pub_ent.value.title entity.authors = revised_authors[ent_uuid] else: entity_url = ENTITY_TARGET_BASE.format( project_id=project_id, entity_uuid=ent_uuid ) ent_datacite_json = entity.to_datacite_json() ent_datacite_json['url'] = entity_url # ent_datacite_json['version'] = str(revision) # omitting version number per Maria if upsert_main_entity_doi and main_entity_doi: me_res = DataciteManager.create_or_update_doi( ent_datacite_json, main_entity_doi ) entity.dois += [main_entity_doi] entity.dois = list(set(entity.dois)) entity.save(service_account()) responses.append(me_res) elif upsert_main_entity_doi and entity.dois: for doi in entity.dois: me_res = DataciteManager.create_or_update_doi( ent_datacite_json, doi ) responses.append(me_res) elif upsert_main_entity_doi and not entity.dois: me_res = DataciteManager.create_or_update_doi( ent_datacite_json ) entity.dois += [me_res['data']['id']] entity.save(service_account()) responses.append(me_res) else: ### Draft Project DOI ### upsert_project_doi = True if revision: # Versions should not update certain fields # Add version number to DataCite info prj_url = TARGET_BASE.format(project_id='{}v{}'.format(project_id, revision)) pub = BaseESPublication(project_id=project_id, revision=revision) prj.title = pub.project.value.title prj.team_order = pub.project.value.teamOrder if revised_authors: prj.team_order = revised_authors prj_datacite_json = prj.to_datacite_json() prj_datacite_json['url'] = prj_url prj_datacite_json['version'] = str(revision) # append links to previous versions in DOI... relatedIdentifiers = [] for ver in range(1, revision): id = '{}v{}'.format(project_id, ver) if ver!=1 else project_id relatedIdentifiers.append( { 'relatedIdentifierType': 'URL', 'relationType': 'IsNewVersionOf', 'relatedIdentifier': TARGET_BASE.format(project_id=id), } ) prj_datacite_json['relatedIdentifiers'] = relatedIdentifiers else: # format project for publication prj_url = TARGET_BASE.format(project_id=project_id) prj_datacite_json = prj.to_datacite_json() prj_datacite_json['url'] = prj_url if upsert_project_doi and project_doi: prj_res = DataciteManager.create_or_update_doi( prj_datacite_json, project_doi ) prj.dois += [project_doi] prj.dois = list(set(prj.dois)) prj.save(service_account()) responses.append(prj_res) elif upsert_project_doi and prj.dois: for doi in prj.dois: prj_res = DataciteManager.create_or_update_doi( prj_datacite_json, doi ) responses.append(prj_res) elif upsert_project_doi and not prj.dois: prj_res = DataciteManager.create_or_update_doi(prj_datacite_json) prj.dois += [prj_res['data']['id']] prj.save(service_account()) responses.append(prj_res) for res in responses: logger.info( "DOI created or updated: %(doi)s", {"doi": res['data']['id']} ) return responses