def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) dataset_list_model = [] for ds in dataset_list: new_af = post_acquisition_framework( uuid=ds['uuid_acquisition_framework'], id_user=id_user, id_organism=id_organism ) ds['id_acquisition_framework'] = new_af['id_acquisition_framework'] ds.pop('uuid_acquisition_framework') # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds['unique_dataset_id']) ds['id_dataset'] = id_dataset dataset = TDatasets(**ds) # id_role in cor_dataset_actor actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) dataset_list_model.append(dataset) try: if id_dataset: DB.session.merge(dataset) else: DB.session.add(dataset) DB.session.commit() DB.session.flush() # TODO catch db error ? except SQLAlchemyError as e: DB.session.rollback() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds['unique_dataset_id'], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {'message': 'Not found'}, 404
def import_all_dataset_af_and_actors(table_name): file_handler = logging.FileHandler('/tmp/uuid_ca.txt') file_handler.setLevel(logging.CRITICAL) log.addHandler(file_handler) datasets = DB.engine.execute(f"SELECT * FROM {table_name}") for d in datasets: xml_jdd = get_jdd_by_uuid(str(d.unique_dataset_id)) if xml_jdd: ds_list = parse_jdd_xml(xml_jdd) if ds_list: ds = ds_list[0] inpn_user = get_user_from_id_inpn_ws(ds['id_digitizer']) # get user info from id_digitizer if inpn_user: # insert user id digitizer insert_user_and_org(inpn_user) actors = ds.pop("actors") # prevent to not fetch, post or merge the same acquisition framework multiple times new_af = post_acquisition_framework( uuid=ds["uuid_acquisition_framework"], ) # get the id from the uuid ds["id_acquisition_framework"] = new_af[ 'id_acquisition_framework'] log.critical(str(new_af['id_acquisition_framework']) + ",") ds.pop("uuid_acquisition_framework") # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds["unique_dataset_id"]) ds["id_dataset"] = id_dataset # search nomenclature ds_copy = copy(ds) for key, value in ds_copy.items(): if key.startswith("id_nomenclature"): if value is not None: ds[key] = func.ref_nomenclatures.get_id_nomenclature( NOMENCLATURE_MAPPING.get(key), value) else: ds.pop(key) # set validable = true ds["validable"] = True dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: # delete cor_ds_actor dataset.id_dataset = id_dataset delete_q = CorDatasetActor.__table__.delete().where( CorDatasetActor.id_dataset == id_dataset) DB.session.execute(delete_q) DB.session.commit() # create the correlation links create_cor_object_actors(actors, dataset) add_dataset_module(dataset) DB.session.merge(dataset) # its a new DS else: # set the dataset as activ dataset.active = True # create the correlation links create_cor_object_actors(actors, dataset) add_dataset_module(dataset) # Add the new DS DB.session.add(dataset) # try to commit try: DB.session.commit() # TODO catch db error ? except SQLAlchemyError as e: error_msg = "Error posting a dataset\nTrace:\n{} \n\n ".format( e) print(error_msg) else: print("NO USER FOUND") else: "NO JDD IN XML ????" else: print("JDD NOT FOUND")
def post_jdd_from_user(id_user=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) posted_af_uuid = {} for ds in dataset_list: actors = ds.pop("actors") # prevent to not fetch, post or merge the same acquisition framework multiple times if ds["uuid_acquisition_framework"] not in posted_af_uuid: new_af = post_acquisition_framework( uuid=ds["uuid_acquisition_framework"], ) # build a cached dict like {'<uuid>': 'id_acquisition_framework} posted_af_uuid[ds["uuid_acquisition_framework"]] = new_af[ "id_acquisition_framework"] # get the id from the uuid ds["id_acquisition_framework"] = posted_af_uuid.get( ds["uuid_acquisition_framework"]) ds.pop("uuid_acquisition_framework") # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds["unique_dataset_id"]) ds["id_dataset"] = id_dataset # search nomenclature ds_copy = copy(ds) for key, value in ds_copy.items(): if key.startswith("id_nomenclature"): response = DB.session.query( func.ref_nomenclatures.get_id_nomenclature( NOMENCLATURE_MAPPING.get(key), value)).one_or_none() if response and response[0]: ds[key] = response[0] else: ds.pop(key) # set validable = true ds["validable"] = True dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: # delete cor_ds_actor dataset.id_dataset = id_dataset delete_q = CorDatasetActor.__table__.delete().where( CorDatasetActor.id_dataset == id_dataset) DB.session.execute(delete_q) DB.session.commit() # create the correlation links create_cor_object_actors(actors, dataset) add_dataset_module(dataset) DB.session.merge(dataset) # its a new DS else: # set the dataset as activ dataset.active = True # create the correlation links create_cor_object_actors(actors, dataset) add_dataset_module(dataset) # Add the new DS DB.session.add(dataset) # try to commit try: DB.session.commit() # TODO catch db error ? except SQLAlchemyError as e: error_msg = "Error posting a dataset\nTrace:\n{} \n\n ".format( e) log.error(error_msg)
def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) dataset_list_model = [] if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) posted_af_uuid = {} for ds in dataset_list: # prevent to not fetch, post or merge the same acquisition framework multiple times if ds['uuid_acquisition_framework'] not in posted_af_uuid: new_af = post_acquisition_framework( uuid=ds['uuid_acquisition_framework'], id_user=id_user, id_organism=id_organism) # build a cached dict like {'<uuid>': 'id_acquisition_framework} posted_af_uuid[ds['uuid_acquisition_framework']] = new_af[ 'id_acquisition_framework'] # get the id from the uuid ds['id_acquisition_framework'] = posted_af_uuid.get( ds['uuid_acquisition_framework']) ds.pop('uuid_acquisition_framework') # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds['unique_dataset_id']) ds['id_dataset'] = id_dataset dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: #check if actor exist: actor_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1'), id_role=id_user) if actor_role is None: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) organism_role = None if id_organism: organism_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1'), id_organism=id_organism) if organism_role is None: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) # finnaly merge DB.session.merge(dataset) # if not dataset already in database else: actor = CorDatasetActor(id_role=id_user, id_nomenclature_actor_role=func. ref_nomenclatures.get_id_nomenclature( 'ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature('ROLE_ACTEUR', '1')) dataset.cor_dataset_actor.append(actor) DB.session.add(dataset) try: DB.session.commit() dataset_list_model.append(dataset) except SQLAlchemyError as e: DB.session.commit() DB.session.flush() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds['unique_dataset_id'], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {'message': 'Not found'}, 404
def export_sinp(info_role): """ Return the data (CSV) at SINP from pr_occtax.export_occtax_sinp view If no paramater return all the dataset allowed of the user params: - id_dataset : integer - uuid_dataset: uuid """ viewSINP = GenericTable('export_occtax_dlb', 'pr_occtax', None) q = DB.session.query(viewSINP.tableDef) params = request.args allowed_datasets = TDatasets.get_user_datasets(info_role) # if params in empty and user not admin, # get the data off all dataset allowed if not params.get('id_dataset') and not params.get('uuid_dataset'): if info_role.tag_object_code != '3': allowed_uuid = (str(TDatasets.get_uuid(id_dataset)) for id_dataset in allowed_datasets) q = q.filter(viewSINP.tableDef.columns.jddId.in_(allowed_uuid)) # filter by dataset id or uuid else: if 'id_dataset' in params: id_dataset = int(params['id_dataset']) uuid_dataset = TDatasets.get_uuid(id_dataset) elif 'uuid_dataset' in params: id_dataset = TDatasets.get_id(params['uuid_dataset']) uuid_dataset = params['uuid_dataset'] # if data_scope 1 or 2, check if the dataset requested is allorws if (info_role.tag_object_code == '1' or info_role.tag_object_code == '2'): if id_dataset not in allowed_datasets: raise InsufficientRightsError( ('User "{}" cannot export dataset no "{}').format( info_role.id_role, id_dataset), 403) elif info_role.tag_object_code == '1': # join on TCounting, TOccurrence, Treleve and corRoleOccurrence # to get users q = q.outerjoin( CorCountingOccurrence, viewSINP.tableDef.columns.permId == CorCountingOccurrence.unique_id_sinp_occtax ).join( TOccurrencesOccurrence, CorCountingOccurrence.id_occurrence_occtax == TOccurrencesOccurrence.id_occurrence_occtax).join( TRelevesOccurrence, TOccurrencesOccurrence.id_releve_occtax == TRelevesOccurrence.id_releve_occtax).outerjoin( corRoleRelevesOccurrence, TRelevesOccurrence.id_releve_occtax == corRoleRelevesOccurrence.columns.id_releve_occtax) q = q.filter( or_( corRoleRelevesOccurrence.columns.id_role == info_role.id_role, TRelevesOccurrence.id_digitiser == info_role.id_role)) q = q.filter(viewSINP.tableDef.columns.jddId == str(uuid_dataset)) data = q.all() export_columns = blueprint.config['export_columns'] file_name = datetime.datetime.now().strftime('%Y-%m-%d-%Hh%Mm%S') return (filemanager.removeDisallowedFilenameChars(file_name), [viewSINP.as_dict(d) for d in data], export_columns, ';')
def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) dataset_list_model = [] if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) posted_af_uuid = {} for ds in dataset_list: # prevent to not fetch, post or merge the same acquisition framework multiple times if ds["uuid_acquisition_framework"] not in posted_af_uuid: new_af = post_acquisition_framework( uuid=ds["uuid_acquisition_framework"], id_user=id_user, id_organism=id_organism, ) # build a cached dict like {'<uuid>': 'id_acquisition_framework} posted_af_uuid[ds["uuid_acquisition_framework"]] = new_af[ "id_acquisition_framework"] # get the id from the uuid ds["id_acquisition_framework"] = posted_af_uuid.get( ds["uuid_acquisition_framework"]) ds.pop("uuid_acquisition_framework") # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds["unique_dataset_id"]) ds["id_dataset"] = id_dataset # search nomenclature for key, value in ds.items(): if key.startswith("id_nomenclature"): ds[key] = func.ref_nomenclatures.get_id_nomenclature( NOMENCLATURE_MAPPING.get(key), value) # set validable = true ds["validable"] = True dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: # check if actor exist: actor_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), id_role=id_user, ) if actor_role is None: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) organism_role = None if id_organism: organism_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), id_organism=id_organism, ) if organism_role is None: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) add_dataset_module(dataset) # finnaly merge DB.session.merge(dataset) # if not dataset already in database else: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures. get_id_nomenclature("ROLE_ACTEUR", "1"), ) dataset.cor_dataset_actor.append(actor) add_dataset_module(dataset) DB.session.add(dataset) try: DB.session.commit() DB.session.flush() dataset_list_model.append(dataset) except SQLAlchemyError as e: DB.session.rollback() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds["unique_dataset_id"], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {"message": "Not found"}, 404
def post_jdd_from_user(id_user=None, id_organism=None): """ Post a jdd from the mtd XML""" xml_jdd = None xml_jdd = get_jdd_by_user_id(id_user) dataset_list_model = [] if xml_jdd: dataset_list = parse_jdd_xml(xml_jdd) posted_af_uuid = {} for ds in dataset_list: # prevent to not fetch, post or merge the same acquisition framework multiple times if ds['uuid_acquisition_framework'] not in posted_af_uuid: new_af = post_acquisition_framework( uuid=ds['uuid_acquisition_framework'], id_user=id_user, id_organism=id_organism ) # build a cached dict like {'<uuid>': 'id_acquisition_framework} posted_af_uuid[ds['uuid_acquisition_framework']] = new_af['id_acquisition_framework'] # get the id from the uuid ds['id_acquisition_framework'] = posted_af_uuid.get(ds['uuid_acquisition_framework']) ds.pop('uuid_acquisition_framework') # get the id of the dataset to check if exists id_dataset = TDatasets.get_id(ds['unique_dataset_id']) ds['id_dataset'] = id_dataset dataset = TDatasets(**ds) # if the dataset already exist if id_dataset: #check if actor exist: actor_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1'), id_role=id_user ) if actor_role is None: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) organism_role = None if id_organism: organism_role = CorDatasetActor.get_actor( id_dataset=id_dataset, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1'), id_organism=id_organism ) if organism_role is None: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) # finnaly merge DB.session.merge(dataset) # if not dataset already in database else: actor = CorDatasetActor( id_role=id_user, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) # id_organism in cor_dataset_actor if id_organism: actor = CorDatasetActor( id_organism=id_organism, id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature('ROLE_ACTEUR', '1') ) dataset.cor_dataset_actor.append(actor) DB.session.add(dataset) try: DB.session.commit() dataset_list_model.append(dataset) except SQLAlchemyError as e: DB.session.commit() DB.session.flush() error_msg = """ Error posting JDD {} \n\n Trace: \n {} """.format(ds['unique_dataset_id'], e) log.error(error_msg) raise GeonatureApiError(error_msg) return [d.as_dict() for d in dataset_list_model] return {'message': 'Not found'}, 404