Esempio n. 1
0
    def post(self):
        """
        Creates a dataset and links it to the token
        :return:
        """
        required_privileges = [
            Privileges.CREATE_DATASET, Privileges.EDIT_DATASET,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(
            required_any_token_privileges=required_privileges)
        kwargs = self.post_parser.parse_args()
        if 'tags' in request.json:
            kwargs['tags'] = request.json[
                'tags']  # fast fix for split-bug of the tags.

        dataset = DatasetFactory(token).create_dataset(**kwargs)

        self.session.flush()

        TokenFactory(token).link_datasets(token.token_gui, [dataset])

        self.session.flush()

        result = dataset.serialize()

        return result, 201
Esempio n. 2
0
    def post(self, token_prefix, dataset_prefix, dest_token_gui):
        required_privileges = [
            Privileges.RO_WATCH_DATASET, Privileges.ADMIN_EDIT_TOKEN
        ]
        _, token = self.token_parser.parse_args(
            request, required_any_token_privileges=required_privileges)

        kwargs = self.post_parser.parse_args()

        dest_token = TokenDAO.query.get(token_gui=dest_token_gui)

        dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        if dest_token is None:
            abort(404, message="Token not found.")

        if 'options' in request.json:
            options = request.json['options']
        else:
            options = None

        kwargs['options'] = options
        kwargs['tags'] = request.json['tags']

        dataset = DatasetFactory(dest_token).fork_dataset(
            dataset_url_prefix, token, **kwargs)

        TokenFactory(dest_token).link_datasets(dest_token.token_gui, [dataset])

        self.session.flush()

        result = dataset.serialize()

        return result
Esempio n. 3
0
    def test_dataset_fork(self):
        """
        Factory can fork a dataset
        :return:
        """
        viewer = TokenDAO("normal user only view dataset", 2, 10, "viewer", privileges=Privileges.RO_WATCH_DATASET)
        creator = TokenDAO("normal user privileged", 2, 10, "creator", privileges=Privileges.CREATE_DATASET)

        d = DatasetDAO("viewer/dataset", "dataset", "description for dataset", "none", ["d1", "d2"])

        self.session.flush()

        element1 = DatasetElementDAO("a", "a", None, "noneaa", ["taga"], dataset=d)
        element2 = DatasetElementDAO("b", "b", None, "nonebb", ["tagb"], dataset=d)

        self.session.flush()

        d = d.update()

        self.assertEqual(len(d.elements), 2)

        viewer = viewer.link_dataset(d)
        d.update()

        # Creator can clone it from viewer.
        forked_dataset = DatasetFactory(creator).fork_dataset(d.url_prefix, viewer, title="dataset_cloned",
                                             url_prefix="dataset", description="desc", reference="none",
                                             tags=["d2", "d1"])

        self.assertEqual(forked_dataset.url_prefix, "creator/dataset")
        self.assertEqual(forked_dataset.title, "dataset_cloned")
        self.assertEqual(forked_dataset.description, "desc")
        self.assertEqual(forked_dataset.tags, ["d2", "d1"])

        self.assertEqual(len(forked_dataset.elements), 2)

        elements_titles = [element1.title, element2.title]

        self.assertIn(forked_dataset.elements[0].title, elements_titles)
        self.assertIn(forked_dataset.elements[1].title, elements_titles)

        # viewer can NOT clone it from creator.
        with self.assertRaises(Unauthorized) as ex:
            forked_dataset = DatasetFactory(viewer).fork_dataset(forked_dataset.url_prefix, creator, title="dataset_cloned",
                                                 url_prefix="dataset", description="desc", reference="none",
                                                 tags=["d2", "d1"])

        # Dataset can be forked omitting some options
        forked_dataset2 = DatasetFactory(creator).fork_dataset(d.url_prefix, viewer,
                                             url_prefix="dataset2")

        self.assertEqual(forked_dataset2.title, d.title)
        self.assertEqual(forked_dataset2.description, d.description)
        self.assertEqual(forked_dataset2.tags, d.tags)
        self.assertEqual(forked_dataset2.reference, d.reference)
Esempio n. 4
0
    def get(self, token_prefix, dataset_prefix):
        required_privileges = [
            Privileges.RO_WATCH_DATASET, Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(
            required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)
        result = dataset.serialize()

        return result, 200
Esempio n. 5
0
    def testDatasetCreationLimit(self):
        """
        Factory limits creation of dataset depending on the token used to create it.
        """
        creator = TokenDAO("normal user privileged", 1, 1, "user1", privileges=Privileges.CREATE_DATASET)

        # Creator should not be able to create more than 1 dataset
        dataset1 = DatasetFactory(creator).create_dataset(url_prefix="creator", title="Creator dataset", description="Dataset example creator", reference="Unknown")

        creator = creator.link_dataset(dataset1)

        self.session.flush()
        with self.assertRaises(Unauthorized) as ex:
            dataset2 = DatasetFactory(creator).create_dataset(url_prefix="creator2", title="Creator dataset2", description="Dataset2 example creator", reference="Unknown")
Esempio n. 6
0
    def delete(self, token_prefix, dataset_prefix):
        """
        """
        required_privileges = [
            Privileges.DESTROY_ELEMENTS,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        args = self.delete_parser.parse_args()

        full_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_prefix)

        # That json value is required, it is validated by the get_parser; so, it is ensured that key exists in the dict.
        elements_ids = request.json['elements']

        # Note that if no elements are provided (length is 0) then we purge the whole dataset. It is the clear()
        # behavior.

        DatasetElementFactory(token, dataset).destroy_elements([ObjectId(x) for x in elements_ids])

        self.session.flush()

        return 200
Esempio n. 7
0
    def get(self, token_prefix, dataset_prefix):
        """
        Retrieves specific dataset elements from a given dataset.
        Accepts parameters:
            elements. It will retrieve the info from the specified array of elements IDs.
        :return:
        """
        required_privileges = [
            Privileges.RO_WATCH_DATASET,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        args = self.get_parser.parse_args()

        full_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_prefix)

        # That json value is required, it is validated by the get_parser; so, it is ensured that key exists in the dict.
        elements_ids = request.json['elements']

        elements_info = DatasetElementFactory(token, dataset).get_specific_elements_info([ObjectId(x) for x in elements_ids])

        result = [element.serialize() for element in elements_info]

        return result
Esempio n. 8
0
    def post(self, token_prefix, dataset_prefix):
        """
        Creates the element's header into the dataset.
        :return:
        """
        required_privileges = [
            Privileges.CREATE_DATASET,
            Privileges.EDIT_DATASET,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        kwargs = self.post_parser.parse_args()

        if "content" not in kwargs and "http_ref" not in kwargs:
            kwargs["http_ref"] = "unknown"

        if 'tags' in request.json:
            kwargs['tags'] = request.json['tags']  # fast fix for split-bug of the tags.

        full_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_prefix)

        element = DatasetElementFactory(token, dataset).create_element(**kwargs)

        self.session.flush()

        result=str(element._id)

        return result, 201
Esempio n. 9
0
    def get(self, token_prefix, dataset_prefix):
        required_privileges = [
            Privileges.RO_WATCH_DATASET, Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(
            required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)

        total_size = global_config.get_storage().get_files_size(
            [l.file_ref_id for l in dataset.elements])
        dataset.size = total_size
        self.session.flush()

        return total_size, 200
Esempio n. 10
0
    def test_dataset_modification(self):
        """
        Factory can modify datasets
        :return:
        """
        dataset = DatasetDAO("foo/hello", "notitle", "desc", "ref")
        dataset2 = DatasetDAO("bar/hello", "notitle", "desc", "ref")

        token_unprivileged = TokenDAO("unprivileged", 0, 0, "bar")
        token_privileged = TokenDAO("privileged", 0, 0, "bar", privileges=Privileges.EDIT_DATASET)
        token_admin = TokenDAO("admin", 0, 0, "bar", privileges=Privileges.ADMIN_EDIT_TOKEN)

        self.session.flush()

        # Unprivileged token cannot modify dataset
        with self.assertRaises(Unauthorized) as ex:
            DatasetFactory(token_unprivileged).edit_dataset(dataset.url_prefix, title="hello")

        # Privileged token cannot modify dataset if not in same url prefix
        with self.assertRaises(Unauthorized) as ex:
            DatasetFactory(token_privileged).edit_dataset(dataset.url_prefix, title="hello")

        # Privileged token can modify dataset if in same url prefix
        # NOT: Because dataset is not linked with the token.
        with self.assertRaises(Unauthorized) as ex:
            dataset2 = DatasetFactory(token_privileged).edit_dataset(dataset2.url_prefix, title="hello")

        # If we link it, then it does:
        token_privileged = token_privileged.link_dataset(dataset2)
        dataset2 = DatasetFactory(token_privileged).edit_dataset(dataset2.url_prefix, title="hello")

        self.assertEqual(dataset2.title, "hello")

        # Admin token can modify any dataset
        dataset = DatasetFactory(token_admin).edit_dataset(dataset.url_prefix, title="hello2")
        self.assertEqual(dataset.title, "hello2")

        # Privileged can partially modify url prefix
        dataset2 = DatasetFactory(token_privileged).edit_dataset(dataset2.url_prefix, url_prefix="new_prefix")
        self.assertEqual(dataset2.url_prefix, "bar/new_prefix")

        with self.assertRaises(BadRequest) as ex:
            dataset2 = DatasetFactory(token_privileged).edit_dataset(dataset2.url_prefix, url_prefix="bar2/new_prefix")

        # Admin can modify url prefix without problems
        dataset2 = DatasetFactory(token_admin).edit_dataset(dataset2.url_prefix, url_prefix="bar2/new_prefix")

        self.assertEqual(dataset2.url_prefix, "bar2/new_prefix")
Esempio n. 11
0
    def delete(self, token_prefix, dataset_prefix):
        required_privileges = [
            Privileges.DESTROY_DATASET, Privileges.ADMIN_DESTROY_TOKEN
        ]

        _, token = self.token_parser.parse_args(
            required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        DatasetFactory(token).destroy_dataset(full_dataset_url_prefix)

        self.session.flush()

        return "Done", 200
Esempio n. 12
0
    def get(self, token_prefix, dataset_prefix, element_id):
        required_privileges = [
            Privileges.RO_WATCH_DATASET,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)

        content = DatasetElementFactory(token, dataset).get_element_content(ObjectId(element_id))

        return send_file(BytesIO(content), mimetype="application/octet-stream")
Esempio n. 13
0
    def test_dataset_destruction(self):
        """
        Factory can destroy datasets
        :return:
        """
        anonymous = TokenDAO("Anonymous", 1, 1, "anonymous")
        watcher = TokenDAO("normal user", 1, 1, "user1")
        creator = TokenDAO("normal user privileged", 1, 1, "user1", privileges=Privileges.CREATE_DATASET)
        creator2 = TokenDAO("normal user privileged", 1, 1, "user2", privileges=Privileges.CREATE_DATASET)
        destructor = TokenDAO("normal user privileged", 1, 1, "user1", privileges=Privileges.DESTROY_DATASET)
        admin = TokenDAO("admin user", 1, 1, "admin", privileges=Privileges.ADMIN_CREATE_TOKEN + Privileges.ADMIN_EDIT_TOKEN + Privileges.ADMIN_DESTROY_TOKEN)

        dataset = DatasetFactory(creator).create_dataset(url_prefix="creator", title="Creator dataset", description="Dataset example creator", reference="Unknown")
        dataset2 = DatasetFactory(creator2).create_dataset(url_prefix="creator", title="Creator dataset", description="Dataset example creator", reference="Unknown")
        self.session.flush()

        # Anonymous or watcher can't destroy datasets
        with self.assertRaises(Unauthorized) as ex:
            DatasetFactory(anonymous).destroy_dataset(url_prefix="user1/creator")
        with self.assertRaises(Unauthorized) as ex:
            DatasetFactory(watcher).destroy_dataset(url_prefix="user1/creator")

        # creator can't destroy a dataset
        with self.assertRaises(Unauthorized) as ex:
            DatasetFactory(creator).destroy_dataset(url_prefix="user1/creator")

        # destructor can't destroy other's datasets
        with self.assertRaises(Unauthorized) as ex:
            DatasetFactory(destructor).destroy_dataset(url_prefix="user2/creator")

        # destructor can destroy within his url-prefix datasets
        DatasetFactory(destructor).destroy_dataset(url_prefix="user1/creator")

        self.session.flush()
        self.session.clear()

        dataset = DatasetDAO.query.get(url_prefix="user1/creator")
        self.assertIsNone(dataset, None)

        # destructor can destroy within any url-prefix datasets
        DatasetFactory(admin).destroy_dataset(url_prefix="user2/creator")

        self.session.flush()
        self.session.clear()

        dataset = DatasetDAO.query.get(url_prefix="user2/creator")
        self.assertIsNone(dataset, None)
Esempio n. 14
0
    def delete(self, token_prefix, dataset_prefix, element_id):
        required_privileges = [
            Privileges.DESTROY_ELEMENTS,
            Privileges.ADMIN_DESTROY_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)

        DatasetElementFactory(token, dataset).destroy_element(ObjectId(element_id))

        self.session.flush()

        return "Done", 200
Esempio n. 15
0
    def test_dataset_retrieval(self):
        """
        Factory can retrieve datasets.
        :return:
        """
        anonymous = TokenDAO("Anonymous", 1, 1, "anonymous")
        watcher = TokenDAO("normal user", 1, 1, "user1", privileges=0)
        creator = TokenDAO("normal user privileged", 1, 1, "user1", privileges=Privileges.CREATE_DATASET)
        creator2 = TokenDAO("normal user privileged", 1, 1, "user2", privileges=Privileges.CREATE_DATASET)
        admin = TokenDAO("admin user", 1, 1, "admin", privileges=Privileges.ADMIN_CREATE_TOKEN + Privileges.ADMIN_EDIT_TOKEN + Privileges.ADMIN_DESTROY_TOKEN)

        dataset = DatasetFactory(creator).create_dataset(url_prefix="creator", title="Creator dataset", description="Dataset example creator", reference="Unknown")
        dataset2 = DatasetFactory(creator2).create_dataset(url_prefix="creator", title="Creator dataset", description="Dataset example creator", reference="Unknown")
        self.session.flush()

        # anonymous should not be able to get info from the dataset
        with self.assertRaises(Unauthorized) as ex:
            dataset3 =DatasetFactory(anonymous).get_dataset(dataset.url_prefix)

        # watcher should not be able to get info from the dataset
        with self.assertRaises(Unauthorized) as ex:
            dataset3 =DatasetFactory(watcher).get_dataset(dataset.url_prefix)

        # creator should not be able to get info from the dataset
        with self.assertRaises(Unauthorized) as ex:
            dataset3 =DatasetFactory(creator).get_dataset(dataset.url_prefix)

        # admin should be able to get info from the dataset
        dataset3 =DatasetFactory(admin).get_dataset(dataset.url_prefix)
        self.assertEqual(dataset3.url_prefix, dataset.url_prefix)

        anonymous = anonymous.link_dataset(dataset)

        # anonymous should now be able to get info from the dataset
        dataset3 =DatasetFactory(anonymous).get_dataset(dataset.url_prefix)
        self.assertEqual(dataset3.url_prefix, dataset.url_prefix)

        # The privilege RO_WATCH_DATASET is always required except for admin

        watcher = watcher.link_dataset(dataset)
        with self.assertRaises(Unauthorized) as ex:
            dataset3 =DatasetFactory(watcher).get_dataset(dataset.url_prefix)
Esempio n. 16
0
    def put(self, token_prefix, dataset_prefix, element_id):
        required_privileges = [
            Privileges.EDIT_ELEMENTS,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)

        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)

        content = request.stream.read()
        DatasetElementFactory(token, dataset).edit_element(ObjectId(element_id), content=content)

        self.session.flush()

        return "Done", 200
Esempio n. 17
0
    def put(self, token_prefix, dataset_prefix):
        required_privileges = [
            Privileges.EDIT_ELEMENTS,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        content = request.stream.read()
        packet = PyZip().from_bytes(content)

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)

        crafted_request = {ObjectId(k): {'content': v} for k, v in packet.items()}
        DatasetElementFactory(token, dataset).edit_elements(crafted_request)

        self.session.flush()

        return "Done", 200
Esempio n. 18
0
    def patch(self, token_prefix, dataset_prefix):
        required_privileges = [
            Privileges.EDIT_DATASET, Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(
            required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        kwargs = self.patch_parser.parse_args()

        if "tags" in request.json:
            kwargs['tags'] = request.json[
                'tags']  # fast fix for split-bug of the tags.

        kwargs = {k: v for k, v in kwargs.items() if v is not None}

        DatasetFactory(token).edit_dataset(full_dataset_url_prefix, **kwargs)

        self.session.flush()

        return "Done", 200
Esempio n. 19
0
    def get(self, token_prefix, dataset_prefix):
        """
        Retrieves dataset elements from a given dataset.
        Accepts parameters:
            page. It will strip the results to `global_config.get_page_size()` elements per page.
            elements. It will retrieve the info from the specified array of elements IDs rather than the page.
            options. result's find options.
        :return:
        """
        required_privileges = [
            Privileges.RO_WATCH_DATASET,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        args = self.get_parser.parse_args()

        full_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_prefix)

        try:
            page_size = int(args['page-size'])
        except ValueError as ex:
            page_size = None
            abort(400, message="The page-size must be an integer.")

        page = args['page']

        if 'options' in request.json:
            options = request.json['options']
        else:
            options = None

        elements_info = DatasetElementFactory(token, dataset).get_elements_info(page, options=options, page_size=page_size)

        result = [element.serialize() for element in elements_info]

        return result
Esempio n. 20
0
    def get(self, token_prefix, dataset_prefix):
        required_privileges = [
            Privileges.RO_WATCH_DATASET,
            Privileges.ADMIN_EDIT_TOKEN
        ]
        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        kwargs = self.get_parser.parse_args()

        if 'elements' in request.json:
            kwargs['elements'] = request.json['elements']  # fast fix for split-bug of the tags.

        elements = kwargs['elements']

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)

        elements_content = DatasetElementFactory(token, dataset).get_elements_content([ObjectId(id) for id in elements])

        packet = PyZip(elements_content)

        return send_file(BytesIO(packet.to_bytes()), mimetype="application/octet-stream")
Esempio n. 21
0
    def post(self, token_prefix, dataset_prefix):

        required_privileges = [
            Privileges.ADD_ELEMENTS,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        args = self.post_parser.parse_args()

        full_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_prefix)

        # That json value is required, it is validated by the post_parser; so, it is ensured that key exists in the dict.
        elements_kwargs = request.json['elements']

        elements_created = DatasetElementFactory(token, dataset).create_elements(elements_kwargs)

        self.session.flush()

        result = [element.serialize() for element in elements_created]

        return result
Esempio n. 22
0
    def patch(self, token_prefix, dataset_prefix):

        required_privileges = [
            Privileges.EDIT_ELEMENTS,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)
        args = self.post_parser.parse_args()

        full_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_prefix)

        # That json value is required, it is validated by the delete_parser; so, it is ensured that key exists in the dict.
        elements_kwargs = request.json['elements']

        edited_elements = DatasetElementFactory(token, dataset).edit_elements({ObjectId(k): v for k, v in elements_kwargs.items()})

        self.session.flush()

        result = [element.serialize() for element in edited_elements]

        return result
Esempio n. 23
0
    def patch(self, token_prefix, dataset_prefix, element_id):
        required_privileges = [
            Privileges.EDIT_ELEMENTS,
            Privileges.ADMIN_EDIT_TOKEN
        ]

        _, token = self.token_parser.parse_args(required_any_token_privileges=required_privileges)

        kwargs = self.patch_parser.parse_args()

        if "tags" in request.json:
            kwargs['tags'] = request.json['tags']  # fast fix for split-bug of the tags.

        kwargs = {k:v for k, v in kwargs.items() if v is not None}

        full_dataset_url_prefix = "{}/{}".format(token_prefix, dataset_prefix)

        dataset = DatasetFactory(token).get_dataset(full_dataset_url_prefix)

        DatasetElementFactory(token, dataset).edit_element(ObjectId(element_id), **kwargs)

        self.session.flush()

        return "Done", 200
Esempio n. 24
0
    def test_dataset_creation(self):
        """
        Factory can create datasets
        :return:
        """
        anonymous = TokenDAO("Anonymous", 1, 1, "anonymous")
        watcher = TokenDAO("normal user", 1, 1, "user1")
        creator = TokenDAO("normal user privileged", 1, 1, "user1", privileges=Privileges.CREATE_DATASET)
        admin = TokenDAO("admin user", 1, 1, "admin", privileges=Privileges.ADMIN_CREATE_TOKEN + Privileges.ADMIN_EDIT_TOKEN + Privileges.ADMIN_DESTROY_TOKEN)

        # Anonymous or watcher can't create datasets
        with self.assertRaises(Unauthorized) as ex:
            dataset = DatasetFactory(anonymous).create_dataset(url_prefix="anonymous", title="Anonymous dataset", description="Dataset example anonymous", reference="Unknown")
        with self.assertRaises(Unauthorized) as ex:
            dataset = DatasetFactory(watcher).create_dataset(url_prefix="watcher", title="Watcher dataset", description="Dataset example watcher", reference="Unknown")

        # Creator can create a dataset
        dataset = DatasetFactory(creator).create_dataset(url_prefix="creator", title="Creator dataset", description="Dataset example creator", reference="Unknown")
        self.assertEqual(join_prefixes(creator.url_prefix, "creator"), dataset.url_prefix)
        self.assertEqual(dataset.description, "Dataset example creator")

        # Not all prefixes allowed (for example, "/" char is protected)
        illegal_chars = "/*;:,.ç´`+Ǩ^><¿?'¡¿!\"·$%&/()@~¬"

        for illegal_char in illegal_chars:
            with self.assertRaises(BadRequest) as ex:
                dataset = DatasetFactory(creator).create_dataset(url_prefix="creator{}da".format(illegal_char), title="Creator dataset", description="Dataset example creator", reference="Unknown")

        # Admin can create dataset
        dataset = DatasetFactory(admin).create_dataset(url_prefix="admin", title="Admin dataset", description="Dataset example admin", reference="Unknown")
        self.assertEqual(join_prefixes(admin.url_prefix, "admin"), dataset.url_prefix)
        self.assertEqual(dataset.description, "Dataset example admin")

        # Admin can create dataset on other's url prefixes
        dataset = DatasetFactory(admin).create_dataset(url_prefix="user1/admin", title="Admin dataset", description="Dataset example admin", reference="Unknown")
        self.assertEqual(join_prefixes(creator.url_prefix, "admin"), dataset.url_prefix)
        self.assertEqual(dataset.description, "Dataset example admin")
    def test_dataset_fork_element_modification(self):
        """
        Elements modified from a forked dataset do not affect the main dataset.
        :return:
        """
        editor = TokenDAO("normal user privileged with link", 100, 200, "user1",
                     privileges=Privileges.RO_WATCH_DATASET + Privileges.CREATE_DATASET + Privileges.EDIT_DATASET +
                                Privileges.ADD_ELEMENTS + Privileges.EDIT_ELEMENTS + Privileges.DESTROY_ELEMENTS
                 )

        main_dataset = DatasetFactory(editor).create_dataset(url_prefix="foobar", title="foo", description="bar",
                                                             reference="none", tags=["a"])

        editor = editor.link_dataset(main_dataset)

        elements_proto = [{
            'title': 't{}'.format(i),
            'description': 'desc{}'.format(i),
            'http_ref': 'none',
            'tags': ['none'],
            'content': "content{}".format(i).encode()
        } for i in range(4)]

        elements = [DatasetElementFactory(editor, main_dataset).create_element(**element_proto) for element_proto in elements_proto]
        self.session.flush()

        self.assertEqual(len(elements), len(main_dataset.elements))
        forked_dataset = DatasetFactory(editor).fork_dataset(main_dataset.url_prefix, editor, url_prefix="foo")
        editor.link_dataset(forked_dataset)
        self.session.flush()
        self.assertEqual(len(forked_dataset.elements), len(main_dataset.elements))
        DatasetElementFactory(editor, forked_dataset).destroy_element(forked_dataset.elements[0]._id)

        self.session.flush()
        self.assertEqual(len(forked_dataset.elements), len(main_dataset.elements)-1)
        self.assertEqual(forked_dataset.elements[0].title, "t1")

        DatasetElementFactory(editor, main_dataset).destroy_element(main_dataset.elements[1]._id)

        self.session.flush()
        self.assertEqual(len(forked_dataset.elements), len(main_dataset.elements))
        self.assertEqual(forked_dataset.elements[0].title, "t1")
        self.assertEqual(forked_dataset.elements[1].title, "t2")
        self.assertEqual(forked_dataset.elements[2].title, "t3")
        self.assertEqual(main_dataset.elements[0].title, "t0")
        self.assertEqual(main_dataset.elements[1].title, "t2")
        self.assertEqual(main_dataset.elements[2].title, "t3")

        DatasetElementFactory(editor, forked_dataset).edit_element(main_dataset.elements[2]._id, title='tc2')
        self.session.flush()

        self.assertEqual(main_dataset.elements[2].title, "t3")
        self.assertEqual(forked_dataset.elements[2].title, "tc2")

        DatasetElementFactory(editor, main_dataset).edit_element(main_dataset.elements[1]._id, title='tc')
        self.session.flush()

        self.assertEqual(main_dataset.elements[1].title, "tc")
        self.assertEqual(forked_dataset.elements[1].title, "t2")

        main_dataset = DatasetFactory(editor).create_dataset(url_prefix="foobar2", title="foo", description="bar",
                                                             reference="none", tags=["a"])

        editor.link_dataset(main_dataset)
        self.session.flush()

        elements_proto = [{
            'title': 't{}'.format(i),
            'description': 'desc{}'.format(i),
            'http_ref': 'none',
            'tags': ['none'],
            'content': "content{}".format(i).encode()
        } for i in range(4)]

        elements = [DatasetElementFactory(editor, main_dataset).create_element(**element_proto) for element_proto in elements_proto]
        self.session.flush()

        forked_dataset2 = DatasetFactory(editor).fork_dataset(main_dataset.url_prefix, editor, url_prefix="bar")
        self.session.flush()
        editor.link_dataset(forked_dataset2)
        self.session.flush()
        mod_proto = {
            main_dataset.elements[0]._id: dict(title="t3"),
            main_dataset.elements[1]._id: dict(title="t3_2"),
        }

        self.assertEqual(forked_dataset2.elements[0]._id, main_dataset.elements[0]._id)
        self.assertEqual(forked_dataset2.elements[1]._id, main_dataset.elements[1]._id)
        self.assertEqual(forked_dataset2.elements[2]._id, main_dataset.elements[2]._id)
        self.assertEqual(forked_dataset2.elements[3]._id, main_dataset.elements[3]._id)

        DatasetElementFactory(editor, main_dataset).edit_elements(mod_proto)
        self.session.flush()
        self.assertEqual(forked_dataset2.elements[0].title, "t0")
        self.assertEqual(forked_dataset2.elements[1].title, "t1")
        self.assertEqual(forked_dataset2.elements[2].title, "t2")
        self.assertEqual(forked_dataset2.elements[3].title, "t3")
        self.assertEqual(main_dataset.elements[0].title, "t3")
        self.assertEqual(main_dataset.elements[1].title, "t3_2")
        self.assertEqual(main_dataset.elements[2].title, "t2")
        self.assertEqual(main_dataset.elements[3].title, "t3")

        self.assertNotEqual(forked_dataset2.elements[0]._id, main_dataset.elements[0]._id)
        self.assertNotEqual(forked_dataset2.elements[1]._id, main_dataset.elements[1]._id)
        self.assertEqual(forked_dataset2.elements[2]._id, main_dataset.elements[2]._id)
        self.assertEqual(forked_dataset2.elements[3]._id, main_dataset.elements[3]._id)

        mod_proto = {
            forked_dataset2.elements[2]._id: dict(title="t4"),
            forked_dataset2.elements[3]._id: dict(title="t4_2"),
        }
        DatasetElementFactory(editor, forked_dataset2).edit_elements(mod_proto)

        self.assertNotEqual(forked_dataset2.elements[0]._id, main_dataset.elements[0]._id)
        self.assertNotEqual(forked_dataset2.elements[1]._id, main_dataset.elements[1]._id)
        self.assertNotEqual(forked_dataset2.elements[2]._id, main_dataset.elements[2]._id)
        self.assertNotEqual(forked_dataset2.elements[3]._id, main_dataset.elements[3]._id)
        self.assertEqual(forked_dataset2.elements[2].title, "t4")
        self.assertEqual(forked_dataset2.elements[3].title, "t4_2")
        self.assertEqual(main_dataset.elements[2].title, "t2")
        self.assertEqual(main_dataset.elements[3].title, "t3")