Beispiel #1
0
    def create(self, folder_id=None, auto_upload=True, chunksize=100000, verbose=False):
        """Creates a new dataset.
        Args:
            folder_id (str, optional): ID of the shared folder that the dataset should be created within. If `None`,
                defaults to the user's My Reports folder.
            auto_upload: If True, automatically uploads the data used to create the dataset definition to the dataset.
                If False, simply creates the dataset but does not upload data to it.
            chunksize (int, optional): Number of rows to transmit to the server with each request when uploading.
            verbose: If True, prints status information about the dataset upload.
        """

        if folder_id is not None:
            self._folder_id = folder_id
        else:
            self._folder_id = ""

        # generate model of the dataset
        self.__build_model()

        # makes request to create the dataset
        response = datasets.create_multitable_dataset(connection=self._connection, body=self.__model)

        response_json = response.json()
        self._dataset_id = response_json['id']

        if verbose:
            print("Created dataset '{}' with ID: '{}'.".format(*[self._name, self._dataset_id]))

        # if desired, automatically upload and publish the data to the new dataset
        if auto_upload:
            self.update(chunksize=chunksize)
            self.publish()
Beispiel #2
0
    def create(self,
               folder_id=None,
               auto_upload=True,
               chunksize=100000,
               progress_bar=True,
               verbose=False):
        """Creates a new dataset.
        Args:
            folder_id (str, optional): ID of the shared folder that the dataset should be created within. If `None`,
                defaults to the user's My Reports folder.
            auto_upload: If True, automatically uploads the data used to create the dataset definition to the dataset.
                If False, simply creates the dataset but does not upload data to it.
            chunksize (int, optional): Number of rows to transmit to the server with each request when uploading.
            progress_bar(bool, optional): If True (default), show the upload progress bar.
            verbose: If True, prints status information about the dataset upload.
        """

        if folder_id is not None:
            self._folder_id = folder_id
        else:
            self._folder_id = ""

        # generate model of the dataset
        self.__build_model()

        # makes request to create the dataset
        response = datasets.create_multitable_dataset(
            connection=self._connection, body=self.__model)

        if not response.ok:
            self.__response_handler(response=response,
                                    msg="Error creating new dataset model.")
        else:
            response_json = response.json()
            self._dataset_id = response_json['id']

            if verbose:
                print("Created dataset '{}' with ID: '{}'.".format(
                    *[self._name, self._dataset_id]))

        # if desired, automatically upload and publish the data to the new dataset
        if auto_upload:
            self.update(chunksize=chunksize, progress_bar=progress_bar)
            self.publish()

            status = 6  # default initial status
            while status != 1:
                pub = datasets.publish_status(connection=self._connection,
                                              dataset_id=self._dataset_id,
                                              session_id=self._session_id)
                if not pub.ok:
                    self.__response_handler(
                        response=pub, msg="Error publishing the dataset.")
                    break
                else:
                    pub = pub.json()
                    status = pub['status']
                    if status == 1:
                        break
Beispiel #3
0
    def create(self,
               folder_id: str = None,
               auto_upload: bool = True,
               auto_publish: bool = True,
               chunksize: int = 100000) -> None:
        """Create a new super cube and initialize cube object after successful
        creation. This function does not return new super cube, but it updates
        object inplace.

        Args:
            folder_id (str, optional): ID of the shared folder in which the
                super cube will be created. If `None`, defaults to the user's
                My Reports folder.
            auto_upload (bool, optional): If True, automatically uploads the
                data to the I-Server. If False, simply creates the super cube
                definition but does not upload data to it.
            auto_publish (bool, optional): If True, automatically publishes the
                data used to create the super cube definition. If False, simply
                creates the super cube but does not publish it. To publish the
                super cube, data has to be uploaded first.
            chunksize (int, optional): Number of rows to transmit to the
                I-Server with each request when uploading.
        """
        if auto_publish and not auto_upload:
            raise ValueError(
                "Data needs to be uploaded to the I-Server before the super cube can be published."
            )

        if folder_id is not None:
            self._folder_id = folder_id
        else:
            self._folder_id = ""

        # generate model of the super cube
        self.__build_model()

        # makes request to create the super cube
        response = datasets.create_multitable_dataset(
            connection=self._connection, body=self.__model)
        self._set_object(**response.json())

        if config.verbose:
            print("Created super cube '{}' with ID: '{}'.".format(
                *[self.name, self._id]))

        if auto_upload:
            self.update(chunksize=chunksize, auto_publish=auto_publish)

        # after creating super cube fetch definition and create filter object
        self._get_definition()
Beispiel #4
0
    def create(self,
               folder_id: str = None,
               auto_upload: bool = True,
               auto_publish: bool = True,
               chunksize: int = 100000):
        """Creates a new dataset.

        Args:
            folder_id (str, optional): ID of the shared folder that the dataset
                should be created within. If `None`, defaults to the user's
                My Reports folder.
            auto_upload (bool, optional): If True, automatically uploads the
                data to the I-Server. If False, simply creates the dataset
                definition but does not upload data to it.
            auto_publish (bool, optional): If True, automatically publishes the
                data used to create the dataset definition. If False, simply
                creates the dataset but does not publish it. To publish the
                dataset, data has to be uploaded first.
            chunksize (int, optional): Number of rows to transmit to the
                I-Server with each request when uploading.
        """
        if auto_publish and not auto_upload:
            helper.exception_handler(
                "Data needs to be uploaded to the I-Server before the dataset can be published.",
                ValueError)

        if folder_id is not None:
            self._folder_id = folder_id
        else:
            self._folder_id = ""

        # generate model of the dataset
        self.__build_model()

        # makes request to create the dataset
        response = datasets.create_multitable_dataset(
            connection=self._connection, body=self.__model)

        response_json = response.json()
        self._dataset_id = response_json['id']

        if self.verbose:
            print("Created dataset '{}' with ID: '{}'.".format(
                *[self.name, self._dataset_id]))

        if auto_upload:
            self.update(chunksize=chunksize, auto_publish=auto_publish)