Esempio n. 1
0
def import_all_dataset_af_and_actors(table_name):
    file_handler = logging.FileHandler('/tmp/uuid_ca.txt')
    file_handler.setLevel(logging.CRITICAL)
    log.addHandler(file_handler)
    datasets = DB.engine.execute(f"SELECT * FROM {table_name}")
    for d in datasets:
        xml_jdd = get_jdd_by_uuid(str(d.unique_dataset_id))
        if xml_jdd:
            ds_list = parse_jdd_xml(xml_jdd)
            if ds_list:
                ds = ds_list[0]
                inpn_user = get_user_from_id_inpn_ws(ds['id_digitizer'])
                # get user info from id_digitizer
                if inpn_user:
                    # insert user id digitizer
                    insert_user_and_org(inpn_user)
                    actors = ds.pop("actors")
                    # prevent to not fetch, post or merge the same acquisition framework multiple times
                    new_af = post_acquisition_framework(
                        uuid=ds["uuid_acquisition_framework"], )
                    # get the id from the uuid
                    ds["id_acquisition_framework"] = new_af[
                        'id_acquisition_framework']
                    log.critical(str(new_af['id_acquisition_framework']) + ",")
                    ds.pop("uuid_acquisition_framework")
                    # get the id of the dataset to check if exists
                    id_dataset = TDatasets.get_id(ds["unique_dataset_id"])
                    ds["id_dataset"] = id_dataset
                    # search nomenclature
                    ds_copy = copy(ds)
                    for key, value in ds_copy.items():
                        if key.startswith("id_nomenclature"):
                            if value is not None:
                                ds[key] = func.ref_nomenclatures.get_id_nomenclature(
                                    NOMENCLATURE_MAPPING.get(key), value)
                            else:
                                ds.pop(key)

                    #  set validable = true
                    ds["validable"] = True
                    dataset = TDatasets(**ds)
                    # if the dataset already exist
                    if id_dataset:
                        # delete cor_ds_actor
                        dataset.id_dataset = id_dataset

                        delete_q = CorDatasetActor.__table__.delete().where(
                            CorDatasetActor.id_dataset == id_dataset)
                        DB.session.execute(delete_q)
                        DB.session.commit()

                        # create the correlation links
                        create_cor_object_actors(actors, dataset)
                        add_dataset_module(dataset)
                        DB.session.merge(dataset)

                    # its a new DS
                    else:
                        # set the dataset as activ
                        dataset.active = True
                        # create the correlation links
                        create_cor_object_actors(actors, dataset)
                        add_dataset_module(dataset)
                        # Add the new DS
                        DB.session.add(dataset)
                    # try to commit
                    try:
                        DB.session.commit()
                    # TODO catch db error ?
                    except SQLAlchemyError as e:
                        error_msg = "Error posting a dataset\nTrace:\n{} \n\n ".format(
                            e)
                        print(error_msg)
                else:
                    print("NO USER FOUND")
            else:
                "NO JDD IN XML ????"
        else:
            print("JDD NOT FOUND")
Esempio n. 2
0
def post_jdd_from_user(id_user=None):
    """ Post a jdd from the mtd XML"""
    xml_jdd = None
    xml_jdd = get_jdd_by_user_id(id_user)
    if xml_jdd:
        dataset_list = parse_jdd_xml(xml_jdd)
        posted_af_uuid = {}
        for ds in dataset_list:
            actors = ds.pop("actors")
            # prevent to not fetch, post or merge the same acquisition framework multiple times
            if ds["uuid_acquisition_framework"] not in posted_af_uuid:
                new_af = post_acquisition_framework(
                    uuid=ds["uuid_acquisition_framework"], )
                # build a cached dict like {'<uuid>': 'id_acquisition_framework}
                posted_af_uuid[ds["uuid_acquisition_framework"]] = new_af[
                    "id_acquisition_framework"]
            # get the id from the uuid
            ds["id_acquisition_framework"] = posted_af_uuid.get(
                ds["uuid_acquisition_framework"])

            ds.pop("uuid_acquisition_framework")
            # get the id of the dataset to check if exists
            id_dataset = TDatasets.get_id(ds["unique_dataset_id"])
            ds["id_dataset"] = id_dataset
            # search nomenclature
            ds_copy = copy(ds)
            for key, value in ds_copy.items():
                if key.startswith("id_nomenclature"):
                    response = DB.session.query(
                        func.ref_nomenclatures.get_id_nomenclature(
                            NOMENCLATURE_MAPPING.get(key),
                            value)).one_or_none()
                    if response and response[0]:
                        ds[key] = response[0]
                    else:
                        ds.pop(key)

            #  set validable = true
            ds["validable"] = True
            dataset = TDatasets(**ds)
            # if the dataset already exist
            if id_dataset:
                # delete cor_ds_actor
                dataset.id_dataset = id_dataset

                delete_q = CorDatasetActor.__table__.delete().where(
                    CorDatasetActor.id_dataset == id_dataset)
                DB.session.execute(delete_q)
                DB.session.commit()

                # create the correlation links
                create_cor_object_actors(actors, dataset)
                add_dataset_module(dataset)
                DB.session.merge(dataset)

            # its a new DS
            else:
                # set the dataset as activ
                dataset.active = True
                # create the correlation links
                create_cor_object_actors(actors, dataset)
                add_dataset_module(dataset)
                # Add the new DS
                DB.session.add(dataset)
            # try to commit
            try:
                DB.session.commit()
            # TODO catch db error ?
            except SQLAlchemyError as e:
                error_msg = "Error posting a dataset\nTrace:\n{} \n\n ".format(
                    e)
                log.error(error_msg)