def start_build(self, build_type, *, server_address=None, orchestrator_task=None): """Start cube build Windows only. For Linux use data models. Args: build_type (str): The build type (SchemaChanges, Accumulate, or Entire) server_address (str): (Optional) The server address of the ElastiCube. Set this to your server ip if this method fails without it set. orchestrator_task (str): (Optional) The orchestrator task """ PySenseUtils.validate_version(self.py_client, SisenseVersion.Version.WINDOWS, 'start_build') query_params = { 'type': build_type, 'orchestratorTask': orchestrator_task } server_address = server_address if server_address else self.server_address self.py_client.connector.rest_call( 'post', 'api/elasticubes/{}/{}/startBuild'.format( server_address, self.get_title(url_encoded=True)), query_params=query_params)
def import_schema(self, path, *, title=None, target_data_model=None): """Import schema file from path Sisense does not support this in Windows Can be used to update an existing data model by adding it to target data model. To add a new model with a new title import_schema(path, title='New Title') To update an existing model import_schema(path, target_data_model=old_data_model) If updating an existing data model, no modifications to title will happen. Args: path: The path to the schema smodel file title: (Optional) Title to give the data model target_data_model: (Optional) The data model to update. """ PySenseUtils.validate_version(self, SisenseVersion.Version.LINUX, 'import_schema') target_data_model_id = target_data_model.get_oid( ) if target_data_model is not None else None query_params = {'title': title, 'datamodelId': target_data_model_id} data_model_json = self.connector.rest_call( 'post', 'api/v2/datamodel-imports/schema', query_params=query_params, json_payload=PySenseUtils.read_json(path)) return PySenseDataModel.DataModel(self, data_model_json)
def start_build(self, build_type, *, row_limit=None): """Initiates a build of the data model Only supported on Linux Args: build_type (str): Type of build (schema_changes, by_table, full, publish) row_limit (int): (Optional) Number of rows to build Returns: BuildTask: The build task object for the build """ PySenseUtils.validate_version(self.py_client, SisenseVersion.Version.LINUX, 'start_build') build_type = build_type.lower() if build_type not in ['schema_changes', 'by_table', 'full', 'publish']: raise PySenseException.PySenseException('Unsupported build type {}'.format(build_type)) json_payload = { 'datamodelId': self.get_oid(), 'buildType': build_type } if row_limit is not None: json_payload['rowLimit'] = row_limit resp_json = self.py_client.connector.rest_call('post', 'api/v2/builds', json_payload=json_payload) return PySenseBuildTask.BuildTask(self.py_client, resp_json)
def get_data_models(self, *, title=None, fields=None, sort=None, limit=None, skip=None): """Gets data model schemas Linux Only If fields is specified, PySense may experience issues. To get all data models: get_data_models() To get a data model called PySense: get_data_models(title='PySense') Args: title (str): (Optional) Datamodel Title to search for fields (list[str]): (Optional) A whitelist of fields to return for each object in the response. sort (str): (Optional) A field by which the results should be sorted. Results will be sorted in ascending order by default, or descending if the field name is prefixed by -. limit (int): (Optional) Number of results to be returned from the data set. This field must be used with the skip parameter, and is intended for paging. skip (int): (Optional) Number of results to skip from the start of the data set. This parameter must be used with the limit parameter, and is intended for paging. Returns: list[DataModel]: The data models found """ PySenseUtils.validate_version(self, SisenseVersion.Version.LINUX, 'get_data_models') query_params = { 'title': title, 'fields': fields, 'sort': sort, 'limit': limit, 'skip': skip } data_models = self.connector.rest_call('get', 'api/v2/datamodels/schema', query_params=query_params) if title is not None: if data_models is not None and len(data_models) > 0: return [PySenseDataModel.DataModel(self, data_models)] else: return None else: ret_arr = [] for data_model in data_models: ret_arr.append(PySenseDataModel.DataModel(self, data_model)) return ret_arr
def delete_data_models(self, data_models): """Deletes the given data models Args: data_models: One to many data models to delete """ PySenseUtils.validate_version(self, SisenseVersion.Version.LINUX, 'delete_data_model') for data_model in PySenseUtils.make_iterable(data_models): self.connector.rest_call( 'delete', 'api/v2/datamodels/{}'.format(data_model.get_oid()))
def cancel_build(self): """ Cancels all builds for data model Only supported on Linux """ PySenseUtils.validate_version(self.py_client, SisenseVersion.Version.LINUX, 'cancel_build') query_params = { 'datamodelId': self.get_oid() } self.py_client.connector.rest_call('delete', 'api/v2/builds', query_params=query_params)
def get_created_date(self): """Returns the creation datetime of the dashboard Returns: datetime: The created datetime """ return PySenseUtils.sisense_time_to_python(self.json['created'])
def authenticate_by_file(config_file): """Do not call directly. Call from PySense""" with open(config_file, 'r') as yml_file: cfg = yaml.safe_load(yml_file) debug = cfg['debug'] if 'debug' in cfg else False verify = cfg['verify'] if 'verify' in cfg else True token = cfg['token'] if 'token' in cfg else None host = PySenseUtils.format_host(cfg['host']) if token is None: return authenticate_by_password(host, cfg['username'], cfg['password'], cfg['version'], debug=debug, verify=verify, param_dict=cfg) else: return authenticate_by_token(host, cfg['token'], cfg['version'], debug=debug, verify=verify, param_dict=cfg)
def remove_shares(self, shares): """Unshare a cube to groups and users To unshare a cube we have to: - Query for the whom the cube is currently shared with - Delete the users/groups we want to unshare with - Re upload the reduced share Args: shares (list[Group,User]): Users and groups to unshare the cube to """ curr_shares_arr = self.get_shares_json() curr_id_arr = [] for share in curr_shares_arr: curr_id_arr.append(share['partyId']) for share in PySenseUtils.make_iterable(shares): share_id = share.get_id() if share_id is None: raise PySenseException.PySenseException( 'No id found for {}'.format(share)) elif share_id in curr_id_arr: index = curr_id_arr.index(share_id) del curr_shares_arr[index] del curr_id_arr[index] self.py_client.connector.rest_call( 'put', 'api/elasticubes/{}/{}/permissions'.format( self.server_address, self.get_title(url_encoded=True)), json_payload=curr_shares_arr)
def get_connections(self, *, provider=None, sort=None, skip=None, limit=None): """Returns all the connections Args: provider (list[str]): Type or list of types to filter for sort (str): Field by which the results should be sorted. Ascending by default, descending if prefixed by - skip (int): Number of results to skip from the start of the data set. Skip is to be used with the limit parameter for paging limit (int): How many results should be returned. limit is to be used with the skip parameter for paging """ query_params = { 'sort': sort, 'skip': skip, 'limit': limit } provider = PySenseUtils.make_iterable(provider) resp_json = self.connector.rest_call('get', 'api/v1/connection', query_params=query_params) ret_arr = [] for connection in resp_json: connection = PySenseConnection.Connection(self, connection) if len(provider) > 0: if connection.get_provider() in provider: ret_arr.append(connection) else: ret_arr.append(connection) return ret_arr
def import_dashboards(self, path, *, action='overwrite', republish=True): """Import dashboard file from path Can be used to update an existing dashboard. Args: path (str): The path to the dash file action (str): Determines if the dashboard should be overwritten republish (bool): Whether to republish after import Returns: list[Dashboard]: The newly added dashboards """ query_params = {'action': action, 'republish': republish} json_obj = PySenseUtils.read_json(path) if isinstance(json_obj, list): json_array = json_obj else: json_array = [json_obj] result_json = self.connector.rest_call('post', 'api/v1/dashboards/import/bulk', query_params=query_params, json_payload=json_array) ret_arr = [] for dashboard_json in result_json["succeded"]: ret_arr.append(PySenseDashboard.Dashboard(self, dashboard_json)) return ret_arr
def get_last_update(self): """Get the plugins last update time. Returns: datetime: The time the plugin was last updated """ return PySenseUtils.sisense_time_to_python(self.json['lastUpdate'])
def get_last_login(self): """Returns the time the user last logged in. Returns: datetime: The datetime the user last logged in """ return PySenseUtils.sisense_time_to_python(self.json['lastLogin'])
def delete_groups(self, groups): """Delete groups. Args: groups (list[Group]): Groups to delete """ for group in PySenseUtils.make_iterable(groups): self.connector.rest_call('delete', 'api/groups/{}'.format(group.get_id()))
def delete_connections(self, connections): """Deletes the given PySense connections Args: connections (list[Connection]): The connections to delete """ for connection in PySenseUtils.make_iterable(connections): self.connector.rest_call('delete', 'api/v1/connection/{}'.format(connection.get_oid()))
def restart_cube(self, *, server_address=None): """Start cube Windows only Args: server_address (str): (Optional) The server address of the ElastiCube. Set this to your server ip if this method fails without it set. """ PySenseUtils.validate_version(self.py_client, SisenseVersion.Version.WINDOWS, 'restart_cube') server_address = server_address if server_address else self.server_address self.py_client.connector.rest_call( 'post', 'api/elasticubes/{}/{}/restart'.format( server_address, self.get_title(url_encoded=True)))
def delete_users(self, users): """Deletes the specified users Args: users: Users to delete """ for user in PySenseUtils.make_iterable(users): self.connector.rest_call('delete', 'api/v1/users/{}'.format(user.get_id()))
def add_share(self, shares, rule, subscribe, *, share_cube=True, admin_access=None): """Share a dashboard to a new group or user. If dashboard is already shared with user or group, nothing happens By default gives query permission to the cube as well. Set share_cubes to false to not update cube shares Args: shares (list[Group,User]): One to many PySense Groups or Users rule (str): The permission of the user on the dashboard (view, edit, etc) subscribe (bool): Whether to subscribe the user to reports share_cube (bool): (Optional) If set to false user will not get dashboard results. admin_access (bool): (Optional) Set to true if logged in as admin and getting unowned dashboard """ query_params = {'adminAccess': admin_access} curr_shares = self.get_shares_json(admin_access=admin_access) for share in PySenseUtils.make_iterable(shares): share_id = share.get_id() for curr_share in curr_shares['sharesTo']: if share_id == curr_share['shareId']: share_id = None if share_id is not None: if isinstance(share, PySenseUser.User): curr_shares['sharesTo'].append({ 'shareId': share.get_id(), 'type': 'user', 'rule': rule, 'subscribe': subscribe }) elif isinstance(share, PySenseGroup.Group): curr_shares['sharesTo'].append({ 'shareId': share.get_id(), 'type': 'group', 'rule': rule, 'subscribe': subscribe }) self.py_client.connector.rest_call('post', 'api/shares/dashboard/{}'.format( self.get_oid()), json_payload=curr_shares, query_params=query_params) if share_cube: data_source = self.get_datasource() data_source.add_share(shares)
def add_data_model(self, data_model, *, title=None, target_data_model=None): """Adds a new data model to the instance. Sisense does not support this in Windows Can be used to update an existing data model by adding it to target data model. To add a new model with a new title add_data_model(model_to_add, title='New Title') To update an existing model add_data_model(new_data_model, target_data_model=old_data_model) If updating an existing data model, no modifications to title will happen. Args: data_model (DataModel): The PySense DataModel object to import title (str): (Optional) Title to give the data model target_data_model (DataModel): (Optional) The data model to update. Returns: DataModel: The newly added data model """ PySenseUtils.validate_version(self, SisenseVersion.Version.LINUX, 'add_data_model') target_data_model_id = target_data_model.get_oid( ) if target_data_model is not None else None query_params = {'title': title, 'datamodelId': target_data_model_id} data_model_json = self.connector.rest_call( 'post', 'api/v2/datamodel-imports/schema', query_params=query_params, json_payload=data_model.get_schema_json()) return PySenseDataModel.DataModel(self, data_model_json)
def delete_widget(self, widgets): """Deletes widgets from its dashboard. Args: widgets (list[Widget]): Widgets to delete """ for widget in PySenseUtils.make_iterable(widgets): self.py_client.connector.rest_call('delete', 'api/v1/dashboards/{}/widgets/{}' .format(self.get_oid(), widget.get_oid())) self._reset()
def remap_field(self, old_table, old_column, new_table, new_column): """Remaps all widgets and filters from the old table.old column to new table.new column Does not work with date fields Args: old_table (str): The old table name old_column (str): The old column name new_table (str): The new table name new_column (str): The new column name """ for widget in self.get_widgets(): widget.remap_field(old_table, old_column, new_table, new_column) filters = self.json['filters'] for f in filters: PySenseUtils.update_jaql(old_table, old_column, new_table, new_column, f['jaql']) payload = {"filters": filters} self.py_client.connector.rest_call('patch', 'api/v1/dashboards/{}'.format(self.get_oid()), json_payload=payload)
def get_last_opened(self): """Returns the last opened time for the dashboard Returns: datetime: The last opened datetime """ if 'lastOpened' in self.json: return PySenseUtils.sisense_time_to_python(self.json['lastOpened']) else: return None
def add_user(self, users): """Adds users to group. Args: users (list[User]): The users to add to the group """ payload = [] for user in PySenseUtils.make_iterable(users): payload.append(user.get_id()) self.py_client.connector.rest_call('post', 'api/groups/{}/users'.format(self.get_id()), json_payload=payload)
def remove_user(self, users): """Remove users from group Args: users (list[User]): Users to remove from the group """ payload = [] for user in PySenseUtils.make_iterable(users): payload.append(user.get_id()) self.py_client.connector.rest_call('delete', 'api/groups/{}/users'.format(self.get_id()), json_payload=payload)
def export_to_sdata(self, path): """Download data model as an sdata file. Only supported on Linux Args: path (str): Path to save location of the sdata file. Returns: str: The path of the created file """ PySenseUtils.validate_version(self.py_client, SisenseVersion.Version.LINUX, 'export_to_sdata') query_params = { 'datamodelId': self.get_oid() } self.py_client.connector.rest_call('get', '/api/v2/datamodel-exports/stream/full', query_params=query_params, path=path, raw=True) return path
def delete_blox_actions(self, actions): """Deletes the blox actions Args: actions (list[BloxAction]): Actions to delete """ for action in PySenseUtils.make_iterable(actions): json_payload = {"type": action.get_type()} self.connector.rest_call('post', 'api/v1/deleteCustomAction/Blox', json_payload=json_payload)
def remap_field(self, old_table, old_column, new_table, new_column): """Remaps all usages of old_table and old_column in a widget to new_table and new_column respectively Args: old_table: The old table name old_column: The old column name new_table: The new table name new_column: The new column name """ panels = self.json["metadata"]["panels"] for panel in panels: items = panel["items"] if panel["name"] == "rows": for item in items: if "jaql" in item: PySenseUtils.update_jaql(old_table, old_column, new_table, new_column, item["jaql"]) if "field" in item and "id" in item["field"]: item["field"]["id"] = "[{}.{}]".format(new_table, new_column) if panel["name"] == "values": for item in items: if "jaql" in item and "context" in item["jaql"]: for context in item["jaql"]["context"]: PySenseUtils.update_jaql(old_table, old_column, new_table, new_column, item["jaql"]["context"][context]) if panel["name"] in ["columns", "filters"]: for item in items: if "jaql" in item: PySenseUtils.update_jaql(old_table, old_column, new_table, new_column, item["jaql"]) update_json = {"metadata": self.json["metadata"]} self.py_client.connector.rest_call('patch', 'api/v1/dashboards/{}/widgets/{}' .format(self.get_dashboard_id(), self.get_oid()), json_payload=update_json)
def __init__(self, host, token, debug, verify): """ Args: host (str): The host string, the base of the url to call debug (bool): Whether to print debug messages about rest requests verify (bool): Whether to use SSL Certificate Verification token (JSON): The authorization header """ self.host = PySenseUtils.format_host(host) self.debug = debug self.verify = verify self.token = token
def get_data_model(self): """Returns the data model object for the cube. Linux only Returns: DataModel: The data model for the cube """ PySenseUtils.validate_version(self.py_client, SisenseVersion.Version.LINUX, 'get_model') query_params = {'datamodelId': self.get_oid(), 'type': 'schema-latest'} data_model_json = self.py_client.connector.rest_call( 'get', 'api/v2/datamodel-exports/schema', query_params=query_params) data_model_json['oid'] = self.get_oid() return PySenseDataModel.DataModel(self.py_client, data_model_json)
def set_connection(self, connection): """Sets a new connection Replaces the current connection with the new one. Use to change the source of a table. Args: connection (Connection): The new Connection object to set for this data set """ connection_json = connection.json PySenseUtils.strip_json(connection_json, ['id', '_id', 'owner', 'lastUpdated']) json_payload = { 'name': self.get_full_name(), 'type': self.get_type(), 'connection': connection_json } self.py_client.connector.rest_call( 'patch', 'api/v2/datamodels/{}/schema/datasets/{}'.format( self.data_model.get_oid(), self.get_oid()), json_payload=json_payload) self._sync_data_set()