Exemple #1
0
    def test_add_data_sample_ko(self):
        url = reverse('substrapp:data_sample-list')

        # missing datamanager
        data = {'data_manager_keys': ['toto']}
        extra = {
            'HTTP_ACCEPT': 'application/json;version=0.0',
        }

        response = self.client.post(url, data, format='multipart', **extra)
        r = response.json()
        self.assertEqual(
            r['message'],
            "One or more datamanager keys provided do not exist in local database. "
            "Please create them before. DataManager keys: ['toto']")
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

        self.add_default_data_manager()

        # missing local storage field
        data = {
            'data_manager_keys': [get_hash(self.data_description)],
            'test_only': True,
        }
        response = self.client.post(url, data, format='multipart', **extra)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

        # missing ledger field
        data = {
            'data_manager_keys': [get_hash(self.data_description)],
            'file': self.script,
        }
        response = self.client.post(url, data, format='multipart', **extra)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Exemple #2
0
def prepare_opener(directory, tuple_):
    """Prepare opener for tuple execution."""
    from substrapp.models import DataManager
    dataset_key = tuple_['dataset']['key']
    data_opener_checksum = tuple_['dataset']['opener_checksum']

    datamanager = DataManager.objects.get(key=dataset_key)

    # verify that local storage opener file exists
    if not os.path.exists(datamanager.data_opener.path) or not os.path.isfile(
            datamanager.data_opener.path):
        raise Exception(
            f'DataOpener file ({datamanager.data_opener.path}) is missing in local storage'
        )

    # verify that local db opener file is not corrupted
    if get_hash(datamanager.data_opener.path) != data_opener_checksum:
        raise Exception(
            'DataOpener checksum in Subtuple is not the same as in local db')

    opener_dst_path = path.join(directory, 'opener/__init__.py')
    if not os.path.exists(opener_dst_path):
        os.symlink(datamanager.data_opener.path, opener_dst_path)
    else:
        # verify that local subtuple data opener file is not corrupted
        if get_hash(opener_dst_path) != data_opener_checksum:
            raise Exception(
                'DataOpener checksum in Subtuple is not the same as in local medias'
            )
    def create(self, validated_data):
        instance = self.initial_data.get('instance')
        name = validated_data.get('name')
        permissions = validated_data.get('permissions')

        # TODO, create a datamigration with new Site domain name when we will know the name of the final website
        # current_site = Site.objects.get_current()
        request = self.context.get('request', None)
        protocol = 'https://' if request.is_secure() else 'http://'
        host = '' if request is None else request.get_host()

        args = '"%(name)s", "%(algoHash)s", "%(storageAddress)s", "%(descriptionHash)s", "%(descriptionStorageAddress)s", "%(permissions)s"' % {
            'name': name,
            'algoHash': get_hash(instance.file),
            'storageAddress': protocol + host + reverse('substrapp:algo-file', args=[instance.pk]),
            'descriptionHash': get_hash(instance.description),
            'descriptionStorageAddress': protocol + host + reverse('substrapp:algo-description', args=[instance.pk]),
            'permissions': permissions
        }

        if getattr(settings, 'LEDGER_SYNC_ENABLED'):
            return createLedgerAlgo(args, instance.pkhash, sync=True)
        else:
            # use a celery task, as we are in an http request transaction
            createLedgerAlgoAsync.delay(args, instance.pkhash)
            data = {
                'message': 'Algo added in local db waiting for validation. The substra network has been notified for adding this Algo'
            }
            st = status.HTTP_202_ACCEPTED
            return data, st
def put_model(subtuple, subtuple_directory, model_content):
    if model_content is not None:
        from substrapp.models import Model

        model_dst_path = path.join(
            subtuple_directory, f'model/{subtuple["model"]["traintupleKey"]}')

        try:
            model = Model.objects.get(pk=subtuple['model']['hash'])
        except:  # write it to local disk
            with open(model_dst_path, 'wb') as f:
                f.write(model_content)
        else:
            if get_hash(model.file.path, subtuple["model"]
                        ["traintupleKey"]) != subtuple['model']['hash']:
                raise Exception(
                    'Model Hash in Subtuple is not the same as in local db')

            if not os.path.exists(model_dst_path):
                os.link(model.file.path, model_dst_path)
            else:
                if get_hash(model_dst_path, subtuple["model"]
                            ["traintupleKey"]) != subtuple['model']['hash']:
                    raise Exception(
                        'Model Hash in Subtuple is not the same as in local medias'
                    )
Exemple #5
0
def _put_model(subtuple_directory,
               model_content,
               model_hash,
               traintuple_hash,
               filename_prefix=''):
    if not model_content:
        raise Exception('Model content should not be empty')

    from substrapp.models import Model

    # store a model in local subtuple directory from input model content
    model_dst_path = path.join(subtuple_directory,
                               f'model/{filename_prefix}{traintuple_hash}')
    model = None
    try:
        model = Model.objects.get(pk=model_hash)
    except ObjectDoesNotExist:  # write it to local disk
        with open(model_dst_path, 'wb') as f:
            f.write(model_content)
    else:
        # verify that local db model file is not corrupted
        if get_hash(model.file.path, traintuple_hash) != model_hash:
            raise Exception(
                'Model Hash in Subtuple is not the same as in local db')

        if not os.path.exists(model_dst_path):
            os.link(model.file.path, model_dst_path)
        else:
            # verify that local subtuple model file is not corrupted
            if get_hash(model_dst_path, traintuple_hash) != model_hash:
                raise Exception(
                    'Model Hash in Subtuple is not the same as in local medias'
                )
Exemple #6
0
    def test_composite_algo_create(self):
        url = reverse('substrapp:composite_algo-list')

        dir_path = os.path.dirname(os.path.realpath(__file__))

        composite_algo_path = os.path.join(dir_path, '../../../../fixtures/chunantes/algos/algo3/algo.tar.gz')
        description_path = os.path.join(dir_path, '../../../../fixtures/chunantes/algos/algo3/description.md')

        pkhash = get_hash(composite_algo_path)

        data = {'name': 'Composite Algo',
                'file': open(composite_algo_path, 'rb'),
                'description': open(description_path, 'rb'),
                'objective_key': get_hash(os.path.join(
                    dir_path, '../../../../fixtures/chunantes/objectives/objective0/description.md')),
                'permissions_public': True,
                'permissions_authorized_ids': []}

        with mock.patch.object(LedgerCompositeAlgoSerializer, 'create') as mcreate:

            mcreate.return_value = {}

            response = self.client.post(url, data=data, format='multipart', **self.extra)

        self.assertEqual(response.data['pkhash'], pkhash)
        self.assertEqual(response.status_code, status.HTTP_201_CREATED)

        data['description'].close()
        data['file'].close()
Exemple #7
0
    def create(self, channel_name, validated_data):
        instance = self.initial_data.get('instance')
        name = validated_data.get('name')
        permissions = validated_data.get('permissions')
        metadata = validated_data.get('metadata')

        # TODO, create a datamigration with new Site domain name when we will know the name of the final website
        current_site = getattr(settings, "DEFAULT_DOMAIN")

        args = {
            'key':
            instance.key,
            'name':
            name,
            'checksum':
            get_hash(instance.file),
            'storage_address':
            current_site +
            reverse('substrapp:composite_algo-file', args=[instance.key]),
            'description_checksum':
            get_hash(instance.description),
            'description_storage_address':
            current_site + reverse('substrapp:composite_algo-description',
                                   args=[instance.key]),
            'permissions': {
                'process': {
                    'public': permissions.get('public'),
                    'authorized_ids': permissions.get('authorized_ids'),
                }
            },
            'metadata':
            metadata
        }
        return ledger.assets.create_compositealgo(channel_name, args,
                                                  instance.key)
    def get_default_algo_data_zip(self):
        expected_hash = get_hash(self.algo_zip)

        data = {
            'file': self.algo_zip,
            'description': self.data_description,  # fake it
            'name': 'super top algo',
            'objective_key': get_hash(self.objective_description),
            'permissions_public': True,
            'permissions_authorized_ids': [],
        }

        return expected_hash, data
    def create(self, validated_data):
        instance = self.initial_data.get('instance')
        name = validated_data.get('name')
        metrics_name = validated_data.get('metrics_name')
        permissions = validated_data.get('permissions')
        test_data_manager_key = validated_data.get('test_data_manager_key', '')
        test_data_sample_keys = validated_data.get('test_data_sample_keys', [])

        # TODO, create a datamigration with new Site domain name when we will know the name of the final website
        current_site = getattr(settings, "DEFAULT_DOMAIN")

        args = {
            'name':
            name,
            'descriptionHash':
            get_hash(instance.description),
            'descriptionStorageAddress':
            current_site + reverse('substrapp:objective-description',
                                   args=[instance.pk]),  # noqa
            'metricsName':
            metrics_name,
            'metricsHash':
            get_hash(instance.metrics),
            'metricsStorageAddress':
            current_site +
            reverse('substrapp:objective-metrics', args=[instance.pk]),
            'testDataset': {
                'dataManagerKey': test_data_manager_key,
                'dataSampleKeys': test_data_sample_keys,
            },
            'permissions': {
                'process': {
                    'public': permissions.get('public'),
                    'authorizedIDs': permissions.get('authorized_ids'),
                }
            }
        }

        if getattr(settings, 'LEDGER_SYNC_ENABLED'):
            data = createLedgerObjective(args, instance.pkhash, sync=True)
        else:
            # use a celery task, as we are in an http request transaction
            createLedgerObjectiveAsync.delay(args, instance.pkhash)
            data = {
                'message':
                'Objective added in local db waiting for validation. '
                'The substra network has been notified for adding this Objective'
            }

        return data
Exemple #10
0
    def test_bulk_add_data_sample_sync_ok(self):

        self.add_default_data_manager()

        url = reverse('substrapp:data_sample-list')

        file_mock = MagicMock(spec=InMemoryUploadedFile)
        file_mock2 = MagicMock(spec=InMemoryUploadedFile)
        file_mock.name = 'foo.zip'
        file_mock2.name = 'bar.zip'
        file_mock.read = MagicMock(return_value=self.data_file.read())
        file_mock2.read = MagicMock(return_value=self.data_file_2.read())

        data = {
            file_mock.name:
            file_mock,
            file_mock2.name:
            file_mock2,
            'data_manager_keys': [
                get_hash(self.data_data_opener),
                get_hash(self.data_data_opener2)
            ],
            'test_only':
            True,
        }
        extra = {
            'HTTP_ACCEPT': 'application/json;version=0.0',
        }

        with mock.patch(
                'substrapp.serializers.ledger.datasample.util.create_ledger_assets'
        ) as mcreate_ledger_assets:
            self.data_file.seek(0)
            self.data_file_2.seek(0)
            ledger_data = {
                'pkhash': [get_dir_hash(file_mock),
                           get_dir_hash(file_mock2)],
                'validated': True
            }
            mcreate_ledger_assets.return_value = ledger_data

            response = self.client.post(url, data, format='multipart', **extra)
            r = response.json()

            self.assertEqual(len(r), 2)
            self.assertEqual(r[0]['pkhash'], get_dir_hash(file_mock))
            self.assertTrue(r[0]['path'].endswith(
                f'/datasamples/{get_dir_hash(file_mock)}'))
            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
    def test_uncompress_content_zip(self):
        filename = 'algo.py'
        filepath = os.path.join(self.subtuple_path, filename)
        with open(filepath, 'w') as f:
            f.write('Hello World')
        self.assertTrue(os.path.exists(filepath))

        zipname = 'sample.zip'
        zippath = os.path.join(self.subtuple_path, zipname)
        with zipfile.ZipFile(zippath, mode='w') as zf:
            zf.write(filepath, arcname=filename)
        self.assertTrue(os.path.exists(zippath))

        subtuple_key = 'testkey'
        subtuple = {'key': subtuple_key, 'algo': 'testalgo'}

        with mock.patch('substrapp.tasks.tasks.get_hash') as mget_hash:
            with open(zippath, 'rb') as content:
                mget_hash.return_value = get_hash(zippath)
                uncompress_content(
                    content.read(),
                    os.path.join(self.subtuple_path,
                                 f'subtuple/{subtuple["key"]}/'))

        self.assertTrue(
            os.path.exists(
                os.path.join(self.subtuple_path,
                             f'subtuple/{subtuple["key"]}/{filename}')))
Exemple #12
0
    def test_bulkcreatedatasample_datamanager_do_not_exist(self):

        dir_path = os.path.dirname(os.path.realpath(__file__))
        data_manager_keys = [
            get_hash(
                os.path.join(
                    dir_path,
                    '../../../fixtures/owkin/datamanagers/datamanager0/opener.py'
                ))
        ]

        data = {
            'files': ['./foo'],
            'data_manager_keys': data_manager_keys,
            'test_only': False
        }

        err = StringIO()
        sys.stderr = err
        call_command('bulkcreatedatasample', json.dumps(data))

        output = err.getvalue().strip()

        wanted_output = f"One or more datamanager keys provided do not exist in local database. " \
                        f"Please create them before. DataManager keys: {data_manager_keys}"

        self.assertEqual(wanted_output, output)
Exemple #13
0
    def _create(self, request, data_opener):

        try:
            checksum = get_hash(data_opener)
            key = checksum
        except Exception as e:
            raise ValidationException(e.args, '(not computed)',
                                      status.HTTP_400_BAD_REQUEST)

        serializer = self.get_serializer(
            data={
                'key': key,
                'data_opener': data_opener,
                'description': request.data.get('description'),
                'name': request.data.get('name'),
                'checksum': checksum
            })

        try:
            serializer.is_valid(raise_exception=True)
        except Exception as e:
            raise ValidationException(e.args, '(not computed)',
                                      status.HTTP_400_BAD_REQUEST)
        else:
            # create on ledger + db
            return self.commit(serializer, request)
Exemple #14
0
    def test_add_data_sample_ko_ledger_invalid(self):
        url = reverse('substrapp:data_sample-list')

        self.add_default_data_manager()

        file_mock = MagicMock(spec=InMemoryUploadedFile)
        file_mock.name = 'foo.zip'
        file_mock.read = MagicMock(return_value=self.data_file.file.read())

        data = {
            'file': file_mock,
            'data_manager_keys': [get_hash(self.data_data_opener)],
            'test_only': True,
        }
        extra = {
            'HTTP_ACCEPT': 'application/json;version=0.0',
        }

        with mock.patch.object(zipfile, 'is_zipfile') as mis_zipfile, \
                mock.patch('substrapp.views.datasample.LedgerDataSampleSerializer',
                           spec=True) as mLedgerDataSampleSerializer:
            mocked_LedgerDataSampleSerializer = MagicMock()
            mocked_LedgerDataSampleSerializer.is_valid.return_value = False
            mocked_LedgerDataSampleSerializer.errors = 'Failed'
            mLedgerDataSampleSerializer.return_value = mocked_LedgerDataSampleSerializer

            mis_zipfile.return_value = True
            response = self.client.post(url, data, format='multipart', **extra)
            r = response.json()
            self.assertEqual(r['message'],
                             "[ErrorDetail(string='Failed', code='invalid')]")
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Exemple #15
0
    def test_add_data_sample_ko_serializer_invalid(self):
        url = reverse('substrapp:data_sample-list')

        self.add_default_data_manager()

        file_mock = MagicMock(spec=InMemoryUploadedFile)
        file_mock.name = 'foo.zip'
        file_mock.read = MagicMock(return_value=self.data_file.read())

        data = {
            'file': file_mock,
            'data_manager_keys': [get_hash(self.data_data_opener)],
            'test_only': True,
        }
        extra = {
            'HTTP_ACCEPT': 'application/json;version=0.0',
        }

        with mock.patch.object(zipfile, 'is_zipfile') as mis_zipfile, \
                mock.patch.object(DataSampleViewSet, 'get_serializer') as mget_serializer:
            mocked_serializer = MagicMock(DataSampleSerializer)
            mocked_serializer.is_valid.return_value = True
            mocked_serializer.save.side_effect = Exception('Failed')
            mget_serializer.return_value = mocked_serializer

            mis_zipfile.return_value = True

            response = self.client.post(url, data, format='multipart', **extra)
            r = response.json()
            self.assertEqual(r['message'], "Failed")
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
    def test_put_algo_tar(self):
        algo_content = self.algo.read()
        subtuple_key = get_hash(self.algo)

        subtuple = {'key': subtuple_key, 'algo': 'testalgo'}

        with mock.patch('substrapp.tasks.get_hash') as mget_hash:
            mget_hash.return_value = subtuple_key
            put_algo(
                os.path.join(self.subtuple_path,
                             f'subtuple/{subtuple["key"]}/'), algo_content)

        def tree_printer(root):
            for root, dirs, files in os.walk(root):
                for d in dirs:
                    print(os.path.join(root, d))
                for f in files:
                    print(os.path.join(root, f))

        self.assertTrue(
            os.path.exists(
                os.path.join(self.subtuple_path,
                             f'subtuple/{subtuple["key"]}/algo.py')))
        self.assertTrue(
            os.path.exists(
                os.path.join(self.subtuple_path,
                             f'subtuple/{subtuple["key"]}/Dockerfile')))
Exemple #17
0
    def test_add_data_sample_ko_already_exists(self):
        url = reverse('substrapp:data_sample-list')

        self.add_default_data_manager()

        file_mock = MagicMock(spec=InMemoryUploadedFile)
        file_mock.name = 'foo.zip'
        file_mock.read = MagicMock(return_value=self.data_file.file.read())
        file_mock.open = MagicMock(return_value=file_mock)

        _, datasamples_path_from_file = store_datasamples_archive(file_mock)

        d = DataSample(path=datasamples_path_from_file)
        # trigger pre save
        d.save()

        data = {
            'file': file_mock,
            'data_manager_keys': [get_hash(self.data_data_opener)],
            'test_only': True,
        }
        extra = {
            'HTTP_ACCEPT': 'application/json;version=0.0',
        }

        with mock.patch.object(zipfile, 'is_zipfile') as mis_zipfile:
            mis_zipfile.return_value = True
            response = self.client.post(url, data, format='multipart', **extra)
            r = response.json()
            self.assertEqual(r['message'], [[{
                'pkhash': ['data sample with this pkhash already exists.']
            }]])
            self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)
    def test_put_opener(self):

        filepath = os.path.join(self.subtuple_path, self.script_filename)
        with open(filepath, 'w') as f:
            f.write(self.script.read())
        self.assertTrue(os.path.exists(filepath))

        opener_directory = os.path.join(self.subtuple_path, 'opener')
        create_directory(opener_directory)

        with mock.patch('substrapp.models.DataManager.objects.get') as mget:
            mget.return_value = FakeDataManager(filepath)

            # test fail
            with self.assertRaises(Exception):
                put_opener({'dataset': {
                    'openerHash': 'HASH'
                }}, self.subtuple_path)

            # test work
            put_opener({'dataset': {
                'openerHash': get_hash(filepath)
            }}, self.subtuple_path)

        self.assertTrue(
            os.path.exists(os.path.join(opener_directory, 'opener.py')))
    def test_get_objective(self):
        metrics_content = self.script.read().encode('utf-8')
        objective_hash = get_hash(self.script)

        with mock.patch('substrapp.models.Objective.objects.get') as mget:

            mget.return_value = FakeObjective()

            objective = get_objective(
                {'objective': {
                    'hash': objective_hash,
                    'metrics': ''
                }})
            self.assertTrue(isinstance(objective, bytes))
            self.assertEqual(objective, b'foo')

        with mock.patch('substrapp.tasks.utils.get_remote_file_content') as mget_remote_file, \
                mock.patch('substrapp.tasks.tasks.get_object_from_ledger'), \
                mock.patch('substrapp.tasks.utils.authenticate_worker'),\
                mock.patch('substrapp.models.Objective.objects.update_or_create') as mupdate_or_create:

            mget.return_value = FakeObjective()
            mget_remote_file.return_value = metrics_content
            mupdate_or_create.return_value = FakeObjective(), True

            objective = get_objective(
                {'objective': {
                    'hash': objective_hash,
                    'metrics': ''
                }})
            self.assertTrue(isinstance(objective, bytes))
            self.assertEqual(objective, b'foo')
    def test_get_algo(self):
        algo_content = self.algo.read()
        algo_hash = get_hash(self.algo)

        with mock.patch('substrapp.tasks.get_remote_file') as mget_remote_file:
            mget_remote_file.return_value = algo_content, algo_hash
            self.assertEqual((algo_content, algo_hash), get_algo({'algo': ''}))
    def test_get_remote_file(self):
        content = str(self.script.read())
        remote_file = {
            'storageAddress': 'localhost',
            'hash': compute_hash(content)
        }

        with mock.patch(
                'substrapp.utils.get_computed_hash') as mget_computed_hash:
            pkhash = compute_hash(content)
            mget_computed_hash.return_value = content, pkhash

            content_remote, pkhash_remote = get_remote_file(remote_file)
            self.assertEqual(pkhash_remote, get_hash(self.script))
            self.assertEqual(content_remote, content)

        with mock.patch(
                'substrapp.utils.get_computed_hash') as mget_computed_hash:
            content = content + ' FAIL'
            pkhash = compute_hash(content)
            mget_computed_hash.return_value = content, pkhash

            with self.assertRaises(Exception):
                get_remote_file(
                    remote_file)  # contents (by pkhash) are different
Exemple #22
0
    def test_add_data_sample_ko_408(self):
        url = reverse('substrapp:data_sample-list')

        self.add_default_data_manager()

        file_mock = MagicMock(spec=InMemoryUploadedFile)
        file_mock.name = 'foo.zip'
        file_mock.read = MagicMock(return_value=self.data_file.file.read())
        file_mock.open = MagicMock(return_value=file_mock)

        data = {
            'file': file_mock,
            'data_manager_keys': [get_hash(self.data_data_opener)],
            'test_only': True,
        }
        extra = {
            'HTTP_ACCEPT': 'application/json;version=0.0',
        }

        with mock.patch.object(zipfile, 'is_zipfile') as mis_zipfile, \
                mock.patch.object(LedgerDataSampleSerializer, 'create') as mcreate:
            mcreate.side_effect = LedgerTimeout('Timeout')
            mis_zipfile.return_value = True
            response = self.client.post(url, data, format='multipart', **extra)
            r = response.json()
            self.assertEqual(r['message'], {
                'pkhash': [get_dir_hash(file_mock)],
                'validated': False
            })
            self.assertEqual(response.status_code,
                             status.HTTP_408_REQUEST_TIMEOUT)
    def test_datamanager_retrieve_fail(self):

        dir_path = os.path.dirname(os.path.realpath(__file__))
        url = reverse('substrapp:data_manager-list')

        # PK hash < 64 chars
        search_params = '42303efa663015e729159833a12ffb510ff/'
        response = self.client.get(url + search_params, **self.extra)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

        # PK hash not hexa
        search_params = 'X' * 64 + '/'
        response = self.client.get(url + search_params, **self.extra)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

        with mock.patch('substrapp.views.datamanager.get_object_from_ledger'
                        ) as mget_object_from_ledger:
            mget_object_from_ledger.side_effect = LedgerError('TEST')

            file_hash = get_hash(
                os.path.join(
                    dir_path,
                    "../../../../fixtures/owkin/objectives/objective0/description.md"
                ))
            search_params = f'{file_hash}/'
            response = self.client.get(url + search_params, **self.extra)
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Exemple #24
0
 def test_create_model(self):
     modelfile, _ = get_sample_model()
     model = Model.objects.create(file=modelfile)
     self.assertEqual(model.pkhash, get_hash(modelfile))
     self.assertFalse(model.validated)
     self.assertIn(f'pkhash {model.pkhash}', str(model))
     self.assertIn(f'validated {model.validated}', str(model))
Exemple #25
0
 def test_create_algo(self):
     script, _ = get_sample_script()
     algo = Algo.objects.create(file=script)
     self.assertEqual(algo.pkhash, get_hash(script))
     self.assertFalse(algo.validated)
     self.assertIn(f'pkhash {algo.pkhash}', str(algo))
     self.assertIn(f'validated {algo.validated}', str(algo))
    def test_add_algo_ko(self):
        url = reverse('substrapp:algo-list')

        # non existing associated objective
        data = {
            'file': self.algo,
            'description': self.data_description,
            'name': 'super top algo',
            'objective_key':
            'non existing objectivexxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
            'permissions_public': True,
            'permissions_authorized_ids': [],
        }
        extra = {
            'HTTP_ACCEPT': 'application/json;version=0.0',
        }

        with mock.patch.object(LedgerAlgoSerializer, 'create') as mcreate:
            mcreate.side_effect = LedgerError(
                'Fail to add algo. Objective does not exist')

            response = self.client.post(url, data, format='multipart', **extra)
            r = response.json()
            self.assertIn('does not exist', r['message'])
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

            Objective.objects.create(description=self.objective_description,
                                     metrics=self.objective_metrics)

            # missing local storage field
            data = {
                'name': 'super top algo',
                'objective_key': get_hash(self.objective_description),
                'permissions_public': True,
                'permissions_authorized_ids': [],
            }
            response = self.client.post(url, data, format='multipart', **extra)
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

            # missing ledger field
            data = {
                'file': self.algo,
                'description': self.data_description,
                'objective_key': get_hash(self.objective_description),
            }
            response = self.client.post(url, data, format='multipart', **extra)
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
    def test_objective_create(self):
        url = reverse('substrapp:objective-list')

        dir_path = os.path.dirname(os.path.realpath(__file__))

        objective_path = os.path.join(
            dir_path, '../../../../fixtures/owkin/objectives/objective0/')

        description_path = os.path.join(objective_path, 'description.md')

        metrics_path = os.path.join(MEDIA_ROOT, 'metrics.zip')

        zip_folder(objective_path, metrics_path)

        pkhash = get_hash(description_path)

        test_data_manager_key = get_hash(
            os.path.join(
                dir_path,
                '../../../../fixtures/owkin/datamanagers/datamanager0/opener.py'
            ))

        data = {
            'name': 'Simplified skin lesion classification',
            'description': open(description_path, 'rb'),
            'metrics_name': 'macro-average recall',
            'metrics': open(metrics_path, 'rb'),
            'permissions_public': True,
            'permissions_authorized_ids': [],
            'test_data_sample_keys': self.test_data_sample_keys,
            'test_data_manager_key': test_data_manager_key
        }

        with mock.patch.object(LedgerObjectiveSerializer, 'create') as mcreate:

            mcreate.return_value = {}

            response = self.client.post(url,
                                        data=data,
                                        format='multipart',
                                        **self.extra)

        self.assertEqual(response.data['pkhash'], pkhash)
        self.assertEqual(response.status_code, status.HTTP_201_CREATED)

        data['description'].close()
        data['metrics'].close()
    def create(self, validated_data):
        instance = self.initial_data.get('instance')
        name = validated_data.get('name')
        metrics_name = validated_data.get('metrics_name')
        permissions = validated_data.get('permissions')
        test_data_manager_key = validated_data.get('test_data_manager_key', '')
        test_data_sample_keys = validated_data.get('test_data_sample_keys', [])

        # TODO, create a datamigration with new Site domain name when we will know the name of the final website
        # current_site = Site.objects.get_current()
        request = self.context.get('request', None)
        protocol = 'https://' if request.is_secure() else 'http://'
        host = '' if request is None else request.get_host()

        args = '"%(name)s", "%(descriptionHash)s", "%(descriptionStorageAddress)s", "%(metricsName)s", "%(metricsHash)s", "%(metricsStorageAddress)s", "%(testDataSample)s", "%(permissions)s"' % {
            'name':
            name,
            'descriptionHash':
            get_hash(instance.description),
            'descriptionStorageAddress':
            protocol + host +
            reverse('substrapp:objective-description', args=[instance.pk]),
            'metricsName':
            metrics_name,
            'metricsHash':
            get_hash(instance.metrics),
            'metricsStorageAddress':
            protocol + host +
            reverse('substrapp:objective-metrics', args=[instance.pk]),
            'testDataSample':
            f'{test_data_manager_key}:{",".join([x for x in test_data_sample_keys])}',
            'permissions':
            permissions
        }

        if getattr(settings, 'LEDGER_SYNC_ENABLED'):
            return createLedgerObjective(args, instance.pkhash, sync=True)
        else:
            # use a celery task, as we are in an http request transaction
            createLedgerObjectiveAsync.delay(args, instance.pkhash)
            data = {
                'message':
                'Objective added in local db waiting for validation. The substra network has been notified for adding this Objective'
            }
            st = status.HTTP_202_ACCEPTED
            return data, st
Exemple #29
0
 def test_create_datamanager(self):
     description, _, data_opener, _ = get_sample_datamanager()
     datamanager = DataManager.objects.create(description=description,
                                              data_opener=data_opener,
                                              name="slides_opener")
     self.assertEqual(datamanager.pkhash, get_hash(data_opener))
     self.assertFalse(datamanager.validated)
     self.assertIn(f'pkhash {datamanager.pkhash}', str(datamanager))
     self.assertIn(f'name {datamanager.name}', str(datamanager))
Exemple #30
0
    def test_create_objective(self):
        description, _, metrics, _ = get_sample_objective()
        objective = Objective.objects.create(description=description,
                                             metrics=metrics)

        self.assertEqual(objective.pkhash, get_hash(description))
        self.assertFalse(objective.validated)
        self.assertIn(f'pkhash {objective.pkhash}', str(objective))
        self.assertIn(f'validated {objective.validated}', str(objective))