def create_dataset_user(user): """ After dataset validation, add a personnal AF and JDD so the user can add new user. """ af_desc_and_name = "Cadre d'acquisition personnel de {name} {surname}".format( name=user["nom_role"], surname=user["prenom_role"]) # actor = data productor af_productor = CorAcquisitionFrameworkActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "6"), ) af_contact = CorAcquisitionFrameworkActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "1"), ) new_af = TAcquisitionFramework( acquisition_framework_name=af_desc_and_name, acquisition_framework_desc=af_desc_and_name + " - auto-créé via la demande de création de compte", acquisition_framework_start_date=datetime.datetime.now(), ) new_af.cor_af_actor = [af_productor, af_contact] DB.session.add(new_af) DB.session.commit() ds_desc_and_name = "Jeu de données personnel de {name} {surname}".format( name=user["nom_role"], surname=user["prenom_role"]) ds_productor = CorDatasetActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "6"), ) ds_contact = CorDatasetActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "1"), ) # add new JDD: terrestrial and marine = True as default new_dataset = TDatasets( id_acquisition_framework=new_af.id_acquisition_framework, dataset_name=ds_desc_and_name, dataset_shortname=ds_desc_and_name + " - auto-créé via la demande de création de compte", dataset_desc=ds_desc_and_name, marine_domain=True, terrestrial_domain=True, ) new_dataset.cor_dataset_actor = [ds_productor, ds_contact] DB.session.add(new_dataset) DB.session.commit()
def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) dataset_list_model = [] for ds in dataset_list: new_af = post_acquisition_framework( uuid=ds['uuid_acquisition_framework'], id_user=id_user, id_organism=id_organism ) ds['id_acquisition_framework'] = new_af['id_acquisition_framework'] ds.pop('uuid_acquisition_framework') # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds['unique_dataset_id']) ds['id_dataset'] = id_dataset dataset = TDatasets(**ds) # id_role in cor_dataset_actor actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) dataset_list_model.append(dataset) try: if id_dataset: DB.session.merge(dataset) else: DB.session.add(dataset) DB.session.commit() DB.session.flush() # TODO catch db error ? except SQLAlchemyError as e: DB.session.rollback() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds['unique_dataset_id'], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {'message': 'Not found'}, 404
def post_dataset(info_role): """ Post a dataset .. :quickref: Metadata; """ data = dict(request.get_json()) cor_dataset_actor = data.pop("cor_dataset_actor") modules = data.pop("modules") dataset = TDatasets(**data) for cor in cor_dataset_actor: # remove id_cda if None otherwise merge no working well if "id_cda" in cor and cor.get("id_cda") is None: cor.pop("id_cda") dataset.cor_dataset_actor.append(CorDatasetActor(**cor)) # init the relationship as an empty list modules_obj = DB.session.query(TModules).filter(TModules.id_module.in_(modules)).all() dataset.modules = modules_obj if dataset.id_dataset: DB.session.merge(dataset) # add id_digitiser only on creation else: dataset.id_digitizer = info_role.id_role DB.session.add(dataset) DB.session.commit() return dataset.as_dict(True)
def post_dataset(info_role): """ Post a dataset .. :quickref: Metadata; """ if info_role.value_filter == "0": raise InsufficientRightsError( ('User "{}" cannot "{}" a dataset').format(info_role.id_role, info_role.code_action), 403, ) data = dict(request.get_json()) cor_dataset_actor = data.pop("cor_dataset_actor") modules = data.pop("modules") dataset = TDatasets(**data) for cor in cor_dataset_actor: # remove id_cda if None otherwise merge no working well if "id_cda" in cor and cor["id_cda"] is None: cor.pop("id_cda") dataset.cor_dataset_actor.append(CorDatasetActor(**cor)) # init the relationship as an empty list modules_obj = (DB.session.query(TModules).filter( TModules.id_module.in_(modules)).all()) dataset.modules = modules_obj if dataset.id_dataset: DB.session.merge(dataset) else: DB.session.add(dataset) DB.session.commit() return dataset.as_dict(True)
def post_dataset(info_role): if info_role.value_filter == "0": raise InsufficientRightsError( ('User "{}" cannot "{}" a dataset').format( info_role.id_role, info_role.code_action ), 403, ) data = dict(request.get_json()) cor_dataset_actor = data.pop("cor_dataset_actor") dataset = TDatasets(**data) for cor in cor_dataset_actor: # remove id_cda if None otherwise merge no working well if "id_cda" in cor and cor["id_cda"] is None: cor.pop("id_cda") dataset.cor_dataset_actor.append(CorDatasetActor(**cor)) if dataset.id_dataset: DB.session.merge(dataset) else: DB.session.add(dataset) DB.session.commit() return dataset.as_dict(True)
def post_dataset(): data = dict(request.get_json()) cor_dataset_actor = data.pop('cor_dataset_actor') dataset = TDatasets(**data) for cor in cor_dataset_actor: dataset.cor_dataset_actor.append(CorDatasetActor(**cor)) if dataset.id_dataset: DB.session.merge(dataset) else: DB.session.add(dataset) DB.session.commit() return dataset.as_dict(True)
def create_dataset(name, digitizer=None): with db.session.begin_nested(): dataset = TDatasets( id_acquisition_framework=af.id_acquisition_framework, dataset_name=name, dataset_shortname=name, dataset_desc=name, marine_domain=True, terrestrial_domain=True, id_digitizer=digitizer.id_role if digitizer else None) db.session.add(dataset) if digitizer and digitizer.organisme: actor = CorDatasetActor( organism=digitizer.organisme, nomenclature_actor_role=principal_actor_role) dataset.cor_dataset_actor.append(actor) return dataset
def create_cor_object_actors(actors, new_object): """ Create a new cor_dataset_actor/cor_acquisition_framework_actor object for the JDD/AF Input : actors (list) : List of all actors related to the JDD/AF new_object : JDD or AF """ for act in actors: # person = None # id_person = None org = None id_organism = None # For the moment wo do not match the user with the actor provided by the XML -> only the organism # If the email of the contact Person was provided in the XML file, we try to link him to the t_role table # if act["email"]: # # We first check if the Person's email exists in the t_role table # person = ( # DB.session.query(User) # .filter(User.email == act["email"]) # .first() # ) # # If not, we create it as a new Person in the t_role table and get his ID back # if not person: # if act["uuid_organism"]: # org = ( # DB.session.query(BibOrganismes) # .filter(BibOrganismes.uuid_organisme == act["uuid_organism"]) # .first() # ) # person = { # "id_role": None, # "nom_role": act["name"], # "email": act["email"], # } # if org: # person['id_organisme'] = org.id_organisme # resp = users.insert_role(person) # id_person = json.loads(resp.data.decode('utf-8'))['id_role'] # else: # id_person = person.id_role # If the informations about the Organism is provided, we try to link it to the bib_organismes table if act["uuid_organism"] or act["organism"]: # UUID in actually only present on JDD XML files # Filter on UUID is preferable if available since it avoids dupes based on name changes if act["uuid_organism"]: org = (DB.session.query(BibOrganismes).filter( BibOrganismes.uuid_organisme == act["uuid_organism"]).first()) else: org = (DB.session.query(BibOrganismes).filter( BibOrganismes.nom_organisme == act["organism"]).first()) # If no Organism was corresponding in the bib_organismes table, we add it if not org: org = BibOrganismes( **{ "nom_organisme": act["organism"], "uuid_organisme": act["uuid_organism"], }) DB.session.add(org) DB.session.commit() id_organism = org.id_organisme # With at least the Person or the Organism was provided for the actor in the XML file, # we build the data for the correlation if id_organism: dict_cor = { "id_organism": id_organism, "id_nomenclature_actor_role": func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", act["actor_role"]), } # We finally build the correlation corresponding to the JDD/AF if isinstance(new_object, TAcquisitionFramework): if not any( map( lambda cafa: dict_cor['id_organism'] == cafa. id_organism and act['actor_role'] == cafa. id_nomenclature_actor_role.clauses.clauses[ 1].value, new_object.cor_af_actor)): cor_actor = CorAcquisitionFrameworkActor(**dict_cor) new_object.cor_af_actor.append(cor_actor) elif isinstance(new_object, TDatasets): if not any( map( lambda ca: dict_cor['id_organism'] == ca. id_organism and act['actor_role'] == ca. id_nomenclature_actor_role.clauses.clauses[ 1].value, new_object.cor_dataset_actor)): cor_actor = CorDatasetActor(**dict_cor) new_object.cor_dataset_actor.append(cor_actor)
def create_dataset_user(user): """ After dataset validation, add a personnal AF and JDD so the user can add new user. """ af_desc_and_name = "Cadre d'acquisition personnel de {name} {surname}".format( name=user["nom_role"], surname=user["prenom_role"]) # actor = data productor af_productor = CorAcquisitionFrameworkActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "6"), ) af_contact = CorAcquisitionFrameworkActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "1"), ) new_af = TAcquisitionFramework( acquisition_framework_name=af_desc_and_name, acquisition_framework_desc=af_desc_and_name + " - auto-créé via la demande de création de compte", acquisition_framework_start_date=datetime.datetime.now(), ) new_af.cor_af_actor = [af_productor, af_contact] db.session.add(new_af) ds_desc_and_name = "Jeu de données personnel de {name} {surname}".format( name=user["nom_role"], surname=user["prenom_role"]) ds_productor = CorDatasetActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "6"), ) ds_contact = CorDatasetActor( id_role=user["id_role"], id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( "ROLE_ACTEUR", "1"), ) # add new JDD: terrestrial and marine = True as default new_dataset = TDatasets( acquisition_framework=new_af, dataset_name=ds_desc_and_name, dataset_shortname=ds_desc_and_name + " - auto-créé via la demande de création de compte", dataset_desc=ds_desc_and_name, marine_domain=True, terrestrial_domain=True, ) new_dataset.cor_dataset_actor = [ds_productor, ds_contact] db.session.add(new_dataset) for module_code in current_app.config['ACCOUNT_MANAGEMENT'][ 'DATASET_MODULES_ASSOCIATION']: module = TModules.query.filter_by( module_code=module_code).one_or_none() if module is None: warn("Module code '{}' does not exist, can not associate dataset.". format(module_code)) continue new_dataset.modules.append(module) db.session.commit()
def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) dataset_list_model = [] if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) posted_af_uuid = {} for ds in dataset_list: # prevent to not fetch, post or merge the same acquisition framework multiple times if ds['uuid_acquisition_framework'] not in posted_af_uuid: new_af = post_acquisition_framework( uuid=ds['uuid_acquisition_framework'], id_user=id_user, id_organism=id_organism) # build a cached dict like {'<uuid>': 'id_acquisition_framework} posted_af_uuid[ds['uuid_acquisition_framework']] = new_af[ 'id_acquisition_framework'] # get the id from the uuid ds['id_acquisition_framework'] = posted_af_uuid.get( ds['uuid_acquisition_framework']) ds.pop('uuid_acquisition_framework') # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds['unique_dataset_id']) ds['id_dataset'] = id_dataset dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: #check if actor exist: actor_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1'), id_role=id_user) if actor_role is None: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) organism_role = None if id_organism: organism_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1'), id_organism=id_organism) if organism_role is None: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) # finnaly merge DB.session.merge(dataset) # if not dataset already in database else: actor = CorDatasetActor(id_role=id_user, id_nomenclature_actor_role=func. ref_nomenclatures.get_id_nomenclature( 'ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) DB.session.add(dataset) try: DB.session.commit() dataset_list_model.append(dataset) except SQLAlchemyError as e: DB.session.commit() DB.session.flush() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds['unique_dataset_id'], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {'message': 'Not found'}, 404
def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) dataset_list_model = [] if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) posted_af_uuid = {} for ds in dataset_list: # prevent to not fetch, post or merge the same acquisition framework multiple times if ds["uuid_acquisition_framework"] not in posted_af_uuid: new_af = post_acquisition_framework( uuid=ds["uuid_acquisition_framework"], id_user=id_user, id_organism=id_organism, ) # build a cached dict like {'<uuid>': 'id_acquisition_framework} posted_af_uuid[ds["uuid_acquisition_framework"]] = new_af[ "id_acquisition_framework"] # get the id from the uuid ds["id_acquisition_framework"] = posted_af_uuid.get( ds["uuid_acquisition_framework"]) ds.pop("uuid_acquisition_framework") # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds["unique_dataset_id"]) ds["id_dataset"] = id_dataset # search nomenclature for key, value in ds.items(): if key.startswith("id_nomenclature"): ds[key] = func.ref_nomenclatures.get_id_nomenclature( NOMENCLATURE_MAPPING.get(key), value) # set validable = true ds["validable"] = True dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: # check if actor exist: actor_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), id_role=id_user, ) if actor_role is None: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) organism_role = None if id_organism: organism_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), id_organism=id_organism, ) if organism_role is None: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) add_dataset_module(dataset) # finnaly merge DB.session.merge(dataset) # if not dataset already in database else: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) add_dataset_module(dataset) DB.session.add(dataset) try: DB.session.commit() DB.session.flush() dataset_list_model.append(dataset) except SQLAlchemyError as e: DB.session.rollback() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds["unique_dataset_id"], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {"message": "Not found"}, 404
def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) dataset_list_model = [] if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) posted_af_uuid = {} for ds in dataset_list: # prevent to not fetch, post or merge the same acquisition framework multiple times if ds['uuid_acquisition_framework'] not in posted_af_uuid: new_af = post_acquisition_framework( uuid=ds['uuid_acquisition_framework'], id_user=id_user, id_organism=id_organism ) # build a cached dict like {'<uuid>': 'id_acquisition_framework} posted_af_uuid[ds['uuid_acquisition_framework']] = new_af['id_acquisition_framework'] # get the id from the uuid ds['id_acquisition_framework'] = posted_af_uuid.get(ds['uuid_acquisition_framework']) ds.pop('uuid_acquisition_framework') # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds['unique_dataset_id']) ds['id_dataset'] = id_dataset dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: #check if actor exist: actor_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1'), id_role=id_user ) if actor_role is None: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) organism_role = None if id_organism: organism_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1'), id_organism=id_organism ) if organism_role is None: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) # finnaly merge DB.session.merge(dataset) # if not dataset already in database else: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) DB.session.add(dataset) try: DB.session.commit() dataset_list_model.append(dataset) except SQLAlchemyError as e: DB.session.commit() DB.session.flush() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds['unique_dataset_id'], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {'message': 'Not found'}, 404