Ejemplo n.º 1
0
def test_import_submission(celery_session_worker, partners, gdpr_roles,
                           can_defer_constraint_checks):
    VIP = factories.VIPGroup()
    reinhard = factories.UserFactory.create(first_name='Rene',
                                            last_name='Sahoo',
                                            groups=[VIP])

    factories.UserFactory.create(first_name='Elgin',
                                 last_name='Gray',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Rob',
                                 last_name='Blue',
                                 groups=[VIP])
    elixir_project = factories.ProjectFactory.create(
        acronym='ELIXIR', title='ELIXIR', local_custodians=[reinhard])
    dataset_json = os.path.join(TEST_DATA_PATH, "ELX_LU_SUB-1.json")

    with open(dataset_json, "r") as file_with_dataset:
        importer = DishSubmissionImporter(elixir_project.title)
        importer.import_json(file_with_dataset.read(), True, True)
    assert 1 == Dataset.objects.all().count()
    # assert 2 == Project.objects.all().count()
    dataset = Dataset.objects.first()
    assert 'ELX_LU_SUB-1' == dataset.title
    # assert 2 == dataset.data_declarations.all().count()
    # TODO finalise Submission importer once elixir-dcp i.e. DISH goes into production.
    # Mapping from DISH to DAISY not yet complete...
    assert 'ELIXIR' == dataset.project.title
    assert 2 == dataset.local_custodians.all().count()
    assert ["Elgin Gray", "Rob Blue"] == [
        custodian.full_name for custodian in dataset.local_custodians.all()
    ]
Ejemplo n.º 2
0
def test_export_datasets(celery_session_worker, contact_types, partners,
                         gdpr_roles, storage_resources,
                         can_defer_constraint_checks):

    VIP = factories.VIPGroup()
    rebecca = factories.UserFactory.create(first_name='Rebecca',
                                           last_name='Kafe',
                                           groups=[VIP])
    embury = factories.UserFactory.create(first_name='Embury',
                                          last_name='Bask')

    a_project = factories.ProjectFactory.create(
        acronym='Test_PRJ',
        title='Title of test project.',
        local_custodians=[rebecca, embury])
    a_dataset = factories.DatasetFactory.create(
        title='A test dataset',
        project=a_project,
        local_custodians=[rebecca, embury])

    exp = DatasetsExporter()
    buffer = exp.export_to_buffer(StringIO())

    dict = json.loads(buffer.getvalue())
    dataset_dicts = dict['items']
    assert 1 == len(dataset_dicts)
    assert "A test dataset" == dataset_dicts[0]['name']
    assert "Test_PRJ" == dataset_dicts[0]['project']
Ejemplo n.º 3
0
def test_import_datasets(celery_session_worker, storage_resources, partners,
                         gdpr_roles, can_defer_constraint_checks):
    VIP = factories.VIPGroup()

    factories.UserFactory.create(first_name='Igor',
                                 last_name='Teal',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Joanne',
                                 last_name='Swift',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Elgin',
                                 last_name='Gray',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Paul',
                                 last_name='Mauve',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Rob',
                                 last_name='Blue',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Ali',
                                 last_name='Gator',
                                 groups=[VIP])

    data_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                             "../data/datasets.json")
    with open(data_file, "r") as f:
        importer = DatasetsImporter()
        importer.import_json(f.read(), True)
    assert 5 == Dataset.objects.all().count()
    assert 4 == Project.objects.all().count()

    d1 = Dataset.objects.filter(title='ABCD data').first()
    assert ["Igor Teal"] == [
        custodian.full_name for custodian in d1.local_custodians.all()
    ]
    assert 1 == d1.data_locations.all().count()
    shares = d1.shares.all()
    share1 = shares[0]
    assert 1 == shares.count()

    d2 = Dataset.objects.filter(title='Hypertension data').first()
    assert ["Joanne Swift"] == [
        employee.full_name for employee in d2.local_custodians.all()
    ]
    assert "Hypertension" == d2.project.acronym
    assert 1 == d2.data_locations.all().count()

    d3 = Dataset.objects.filter(title='MDPDP data').first()
    assert ["Rob Blue"] == [
        employee.full_name for employee in d3.local_custodians.all()
    ]
    assert 2 == d3.data_locations.all().count()

    d4 = Dataset.objects.filter(title='PD data').first()
    assert ["Ali Gator"] == [
        employee.full_name for employee in d4.local_custodians.all()
    ]
    assert 7 == d4.data_locations.all().count()
def test_import_projects(celery_session_worker, contact_types, partners):

    VIP = factories.VIPGroup()

    factories.UserFactory.create(first_name='Julia',
                                 last_name='Crayon',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Rebecca',
                                 last_name='Kafe',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Embury', last_name='Bask')

    factories.UserFactory.create(first_name='Colman',
                                 last_name='Level',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Nic',
                                 last_name='Purple',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='James', last_name='BK')

    projects_json = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                 "../data/projects.json")
    importer = ProjectsImporter()
    importer.import_json_file(projects_json, True)

    projects = Project.objects.all()
    assert 2 == projects.count()
    project1 = Project.objects.filter(
        acronym='In vitro disease modeling').first()
    assert ["Joanne Swift", "Rebecca Kafe"] == [
        custodian.full_name for custodian in project1.local_custodians.all()
    ]
    assert 1 == project1.company_personnel.count()
    assert False == project1.has_cner
    assert True == project1.has_erp
    assert ["Embury Bask"] == [
        employee.full_name for employee in project1.company_personnel.all()
    ]
    assert "test notes 123" == project1.erp_notes
    assert 2 == project1.publications.count()

    project2 = Project.objects.filter(acronym='CCCC deficiency').first()
    assert ["Colman Level"] == [
        custodian.full_name for custodian in project2.local_custodians.all()
    ]
    assert 3 == project2.company_personnel.count()
    assert 1 == project2.publications.count()
    #2016-11-01
    assert 2016 == project2.start_date.year
    assert 11 == project2.start_date.month
    assert 1 == project2.start_date.day
    assert 1 == project2.publications.count()
Ejemplo n.º 5
0
def test_import_datadecs(celery_session_worker, contact_types, partners,
                         gdpr_roles, storage_resources,
                         can_defer_constraint_checks):

    VIP = factories.VIPGroup()

    factories.UserFactory.create(first_name='Igor',
                                 last_name='Teal',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Joanne',
                                 last_name='Swift',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Elgin',
                                 last_name='Gray',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Paul',
                                 last_name='Mauve',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Rene',
                                 last_name='Sahoo',
                                 groups=[VIP])
    factories.UserFactory.create(first_name='Rob',
                                 last_name='Blue',
                                 groups=[VIP])

    dataset_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                "../data/datasets.json")
    with open(dataset_file, "r") as f:
        importer = DatasetsImporter()
        importer.import_json(f.read(), True)

    datadec_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                "../data/datadecs.json")
    with open(datadec_file, "r") as f:
        importer = DatadecsImporter()
        importer.import_json(f.read(), True)

    dsets = Dataset.objects.all()
    assert 5 == dsets.count()

    ddecs = DataDeclaration.objects.all()
    HyperData = ddecs[1]
    assert 'Hypertension-ABC disease' == HyperData.title
    contract = HyperData.contract
    first_partner_role = contract.partners_roles.first()
    assert first_partner_role.contacts.count() > 0
    assert "Alberto" == first_partner_role.contacts.first().first_name
    assert "Pico" == first_partner_role.contacts.first().last_name
    assert "Hypertension" == contract.project.acronym
    assert "ELU_I_94" == first_partner_role.partner.elu_accession
Ejemplo n.º 6
0
def test_export_projects(celery_session_worker, contact_types, partners, gdpr_roles, storage_resources, can_defer_constraint_checks):

    VIP = factories.VIPGroup()

    rebecca = factories.UserFactory.create(first_name='Rebecca', last_name='Kafe', groups=[VIP])
    embury = factories.UserFactory.create(first_name='Embury', last_name='Bask')

    a_project =  factories.ProjectFactory.create(acronym='Test_PRJ', title='Title of test project.', local_custodians=[rebecca, embury])
    another_project =  factories.ProjectFactory.create(acronym='Another PRJ', title='Title of another test project.', local_custodians=[rebecca, embury])

    exp = ProjectsExporter()
    buffer = exp.export_to_buffer(StringIO())

    dict = json.loads(buffer.getvalue())
    project_dicts = dict['items']
    assert 2 == len(project_dicts)

    assert "Title of test project." ==  project_dicts[0]['name']
    assert 2 == len(project_dicts[0]['contacts'])

    #TODO add check of more fields

    schema = ProjectJSONSchemaValidator()
    assert schema.validate_items(project_dicts)
Ejemplo n.º 7
0
def test_import_datasets(celery_session_worker, storage_resources,
                         contact_types, data_types, partners, gdpr_roles,
                         can_defer_constraint_checks):
    VIP = factories.VIPGroup()

    factories.UserFactory.create(first_name='Igor',
                                 last_name='Teal',
                                 groups=[VIP],
                                 email="*****@*****.**")
    factories.UserFactory.create(first_name='Joanne',
                                 last_name='Swift',
                                 groups=[VIP],
                                 email="*****@*****.**")
    factories.UserFactory.create(first_name='Elgin',
                                 last_name='Gray',
                                 groups=[VIP],
                                 email="*****@*****.**")
    factories.UserFactory.create(first_name='Paul',
                                 last_name='Mauve',
                                 groups=[VIP],
                                 email="*****@*****.**")
    factories.UserFactory.create(first_name='Rob',
                                 last_name='Blue',
                                 groups=[VIP],
                                 email="*****@*****.**")
    factories.UserFactory.create(first_name='Ali',
                                 last_name='Gator',
                                 groups=[VIP],
                                 email="*****@*****.**")

    data_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                             "../data/datasets.json")
    importer = DatasetsImporter()
    importer.import_json_file(data_file, True)

    assert 5 == Dataset.objects.all().count()
    assert 4 == Project.objects.all().count()

    d1 = Dataset.objects.filter(title='ABCD data').first()
    assert ["Igor Teal"] == [
        custodian.full_name for custodian in d1.local_custodians.all()
    ]
    assert 1 == d1.data_locations.all().count()
    shares = d1.shares.all()
    share1 = shares[0]
    assert 1 == shares.count()

    d2 = Dataset.objects.filter(title='Hypertension data').first()
    assert ["Joanne Swift"] == [
        employee.full_name for employee in d2.local_custodians.all()
    ]
    assert "Hypertension" == d2.project.acronym
    assert 1 == d2.data_locations.all().count()

    d3 = Dataset.objects.filter(title='MDPDP data').first()
    assert ["Rob Blue"] == [
        employee.full_name for employee in d3.local_custodians.all()
    ]
    assert 2 == d3.data_locations.all().count()

    d4 = Dataset.objects.filter(title='PD data').first()
    assert ["Ali Gator"] == [
        employee.full_name for employee in d4.local_custodians.all()
    ]
    assert 7 == d4.data_locations.all().count()

    ddecs = DataDeclaration.objects.all()
    assert 5 == ddecs.count()

    ddec = DataDeclaration.objects.get(title='XYZ')
    assert "2030-05-10" == ddec.end_of_storage_duration.strftime("%Y-%m-%d")