def test_restserversettings_auth(self):
     conn = microstrategy.Connection(base_url=self.BASE_URL, username=self.USERNAME,
                                     password=self.PASSWORD, project_name=self.PROJECT_NAME,
                                     auth=HTTPBasicAuth(self.BASIC_USERNAME, self.BASIC_PASSWORD),
                                     ssl_verify=False)
     conn.connect()
     response = restserversettings_auth(conn, verbose=True)
     print(response.content)
 def test_trust_delete_trustrelationship(self):
     conn = microstrategy.Connection(base_url=self.BASE_URL, username=self.USERNAME,
                                     password=self.PASSWORD, project_name=self.PROJECT_NAME,
                                     auth=HTTPBasicAuth(self.BASIC_USERNAME, self.BASIC_PASSWORD),
                                     ssl_verify=False)
     conn.connect()
     response = Trust(conn).delete_relationship()
     print(response.content)
Exemple #3
0
    def test_report(self, mock_get):

        conn = microstrategy.Connection(base_url=BASE_URL, username=USERNAME,
                                        password=PASSWORD, project_name=PROJECT_NAME)

        mock_get.return_value.status_code = 200

        response = reports.report_definition(conn, report_id=REPORT_ID)

        self.assertEqual(response.status_code, 200)
Exemple #4
0
    def test_sessions(self, mock_put):

        conn = microstrategy.Connection(base_url=BASE_URL, username=USERNAME,
                                        password=PASSWORD, project_name=PROJECT_NAME)

        mock_put.return_value.status_code = 200

        response = authentication.sessions(conn)

        self.assertEqual(response.status_code, 200)
Exemple #5
0
    def test_report_instance(self, mock_post):

        conn = microstrategy.Connection(base_url=BASE_URL, username=USERNAME,
                                        password=PASSWORD, project_name=PROJECT_NAME)

        mock_post.return_value.status_code = 200

        response = reports.report_instance(conn, report_id=REPORT_ID, offset=OFFSET, limit=LIMIT)

        self.assertEqual(response.status_code, 200)
Exemple #6
0
 def test_compatibility(self):
     connection = microstrategy.Connection(base_url='http://mocked.url.com',
                                           username='******',
                                           password='******')
     tested_cube = cube.Cube(parallel=False,
                             connection=connection,
                             cube_id=self.cube_id)
     tested_cube.apply_filters(metrics=['7B708EF011EA4C20098D0080EFF5EA25'])
     tested_cube.to_dataframe()
     df = tested_cube.dataframe
     self.assertIsInstance(df, pd.DataFrame)
    def test_delete_dataset(self, mock_delete):
        conn = microstrategy.Connection(base_url=BASE_URL,
                                        username=USERNAME,
                                        password=PASSWORD,
                                        project_name=PROJECT_NAME)

        mock_delete.return_value.status_code = 200

        response = datasets.delete_dataset(conn, dataset_id=DATASET_ID)

        self.assertEqual(response.status_code, 200)
Exemple #8
0
    def test_login(self, mock_post):
        conn = microstrategy.Connection(base_url=self.BASE_URL,
                                        username=self.USERNAME,
                                        password=self.PASSWORD,
                                        project_name=self.PROJECT_NAME)

        mock_post.return_value.status_code = 200

        response = authentication.login(conn)

        self.assertEqual(response.status_code, 200)
Exemple #9
0
    def test_cube_instance(self, mock_post):

        conn = microstrategy.Connection(base_url=BASE_URL,
                                        username=USERNAME,
                                        password=PASSWORD,
                                        project_name=PROJECT_NAME)

        mock_post.return_value.status_code = 200

        response = cubes.cube_instance(conn, cube_id=CUBE_ID)

        self.assertEqual(response.status_code, 200)
Exemple #10
0
    def test_projects(self, mock_get):

        conn = microstrategy.Connection(base_url=BASE_URL,
                                        username=USERNAME,
                                        password=PASSWORD,
                                        project_name=PROJECT_NAME)

        mock_get.return_value.status_code = 200

        response = projects.projects(conn, "test error msg")

        self.assertEqual(response.status_code, 200)
    def test_create_dataset(self, mock_post):

        conn = microstrategy.Connection(base_url=BASE_URL,
                                        username=USERNAME,
                                        password=PASSWORD,
                                        project_name=PROJECT_NAME)

        mock_post.return_value.status_code = 200

        response = datasets.create_dataset(conn, body=JSON_BODY)

        self.assertEqual(response.status_code, 200)
Exemple #12
0
    def test_login_app_code_present(self, mock_post):
        """Checks that application code is present in request body."""
        conn = microstrategy.Connection(base_url=self.BASE_URL,
                                        username=self.USERNAME,
                                        password=self.PASSWORD,
                                        project_name=self.PROJECT_NAME)

        mock_post.return_value.status_code = 200
        response = authentication.login(conn)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(mock_post.call_args[1]['data']["applicationType"],
                         self.APPCODE)
Exemple #13
0
    def test_logout(self, mock_post):
        """Validate logout method and that cookies are passed in request."""
        conn = microstrategy.Connection(base_url=self.BASE_URL,
                                        username=self.USERNAME,
                                        password=self.PASSWORD,
                                        project_name=self.PROJECT_NAME)
        conn.cookies = self.COOKIES

        mock_post.return_value.status_code = 200
        response = authentication.logout(conn)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(mock_post.call_args[1]['cookies'], self.COOKIES)
    def test_update_dataset(self, mock_patch):

        conn = microstrategy.Connection(base_url=BASE_URL,
                                        username=USERNAME,
                                        password=PASSWORD,
                                        project_name=PROJECT_NAME)

        mock_patch.return_value.status_code = 200

        response = datasets.update_dataset(conn,
                                           dataset_id=DATASET_ID,
                                           table_name=TABLE_NAME,
                                           update_policy=UPDATE_POLICY,
                                           body=JSON_BODY)

        self.assertEqual(response.status_code, 200)
Exemple #15
0
def push2cube():

    global value_list
    global tst_out
    global out_filename1
    global this_prefix

    usrname = entry11.get()
    passwd = entry12.get()

    print('\nStarting ' + '\x1b[6;30;42m' + 'PUSH DATAFRAME TO MSTR CUBE: ' +
          '\x1b[0m')
    blz.tic()

    datasetName = this_prefix + '_cube'
    tableName = this_prefix + '_table'
    cubeinfo_name = this_prefix + '_cubeinfo'

    # Authentication request and connect to the Rally Analytics project
    conn = microstrategy.Connection(base_url=baseURL,
                                    login_mode=16,
                                    username=usrname,
                                    password=passwd,
                                    project_name=projName)
    conn.connect()

    print("Connect to " + baseURL)

    # if the cube does not exist, acquire Data Set Id & Table Id, and create a new cube
    newDatasetId, newTableId = conn.create_dataset(data_frame=df_data2,
                                                   dataset_name=datasetName,
                                                   table_name=tableName)
    # Store Data Set Id and Table Id locally
    cubeInfoFile = open(cubeinfo_name, 'w')
    cubeInfoFile.write(newDatasetId + '\n')
    cubeInfoFile.write(newTableId)
    cubeInfoFile.close()
    print("CREATE Cube on URL: " + baseURL[:-25])
    print('[ Dataset Name: ' + datasetName + ' \ Cube ID = ' + newDatasetId +
          ']   [Table Name: ' + tableName + ' \ Table ID = ' + newTableId +
          ' ]')
    blz.toc()
    print(
        '\x1b[1;33m' +
        "Done with [Output to MSTR Cube for Dossier Reporting (without PA)]" +
        '\x1b[0m')
Exemple #16
0
    def test_login_app_code_dynamic(self, mock_post):
        """Tests that changing the app code works."""
        conn = microstrategy.Connection(base_url=self.BASE_URL,
                                        username=self.USERNAME,
                                        password=self.PASSWORD,
                                        project_name=self.PROJECT_NAME)

        app_code = 99
        conn.application_code = app_code

        mock_post.return_value.status_code = 200
        response = authentication.login(conn)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(conn.application_code, app_code)
        self.assertEqual(mock_post.call_args[1]['data']["applicationType"],
                         app_code)
Exemple #17
0
# Important prerequisites : when using this script, you need to
# 1. Install the python package mstrio-py
# 2. Enable internet (Kaggle beta feature)

# mstr initialization
mstr_username = '******'  #changeme
mstr_password = '******'  #changeme
mstr_library_api_url = 'https://yourserver.com/MicroStrategyLibrary/api'  #changeme
mstr_project_name = 'MicroStrategy Tutorial'

import os
import pandas as pd  # data processing, CSV file I/O (e.g. pd.read_csv)
from mstrio import microstrategy  # MicroStrategy library for sending data to in-memory cubes

mstr_conn = microstrategy.Connection(base_url=mstr_library_api_url,
                                     username=mstr_username,
                                     password=mstr_password,
                                     project_name=mstr_project_name)
print('1. Connecting to MicroStrategy')
mstr_conn.connect()

# Loop through all datasets added to the Kaggle Kernel
print('2. Starting pushing datasets')
for dirname, dirnames, filenames in os.walk('../input'):
    # loop through all files included in the dataset
    print('3. Dataset: ' + dirname)
    for filename in filenames:
        print('  Filename: ' + os.path.join(dirname, filename))
        dataset_name = filename.replace('.csv', '')  #your login goes here
        print('  Target Cube Name: ' + dataset_name)
        table_name = dataset_name
        newDatasetId, newTableId = mstr_conn.create_dataset(
# mstr initialization
username = '******'  # ex: hchadeisson
password = '******'  # ex: password
base_url = 'PARAM3'  # ex: https://env-112094.customer.cloud.microstrategy.com/MicroStrategyLibrary/api
project_name = 'PARAM4'  # ex: MicroStrategy Tutorial

# -*- coding: utf-8 -*-
import dataiku
import pandas as pd, numpy as np
from mstrio import microstrategy  # MicroStrategy library for sending data to in-memory cubes
from dataiku import pandasutils as pdu

# Connect to MicroStrategy
conn = microstrategy.Connection(base_url=base_url,
                                username=username,
                                password=password,
                                project_name=project_name)
conn.connect()

# Read recipe inputs
revenue_prediction = dataiku.Dataset("revenue_prediction")
revenue_prediction_df = revenue_prediction.get_dataframe()

# Send Data to MicroStrategy
newDatasetId, newTableId = conn.create_dataset(
    data_frame=revenue_prediction_df,
    dataset_name="dataiku_prediction",
    table_name="dataiku_prediction")
Exemple #19
0
def run_query(str_query_type, str_table_name, str_url, str_app_token, str_user_name, str_password, str_webservice_id, str_cube_id, str_where_clause):

    import pandas as pd
    import datetime
    from sodapy import Socrata
    from dataservices.utils.config_funcs import read_config_value

    client = Socrata(str_url,
                    str_app_token,
                    username=str_user_name,
                    password=str_password
                    )

    print("	" + str_table_name + " query started at: " + str(datetime.datetime.now()))

    print("	  using query: '" + str_where_clause + "'")

    # query the webservice using the appropriate where clause
    results = client.get(str_webservice_id, where=str_where_clause, limit=99999999)
    client.close()

    # Convert results to pandas DataFrame
    df = pd.DataFrame.from_records(results)
    if len(df.index) > 0:

        # convert series to the correct types
        # right now mstrio does not import datetime values ... so use this formula in Mstr to create a new datetime attribute: ToDateTime(LeftStr(date@ID, 10))
        # df[["date"]] = df[["date"]].apply(pd.to_datetime)

        # print("columns grouped by data type: " + str(df.columns.to_series().groupby(df.dtypes).groups))

        print("	  connecting to MicroStrategy at: " + str(datetime.datetime.now()))

        from mstrio import microstrategy
        conn = microstrategy.Connection(base_url=read_config_value('MSTR_LIBRARY', 'rest_api_url'), username=read_config_value('MSTR_LIBRARY', 'username'), password=read_config_value('MSTR_LIBRARY', 'password'), project_name=read_config_value('MSTR_LIBRARY', 'project'))
        conn.connect()

        df = pd.DataFrame(df, columns=df.columns.values)

        # query mstrio differently depending on the type of update
        if str_query_type == "create":  # creates should use the create_dataset method

            new_dataset_id, newTableId = conn.create_dataset(data_frame=df, dataset_name=str_table_name,
                                                             table_name=str_table_name)

        elif str_query_type == "update" or str_query_type == "scheduled":  # updates or scheduled should use the update method

            conn.update_dataset(data_frame=df, dataset_id=str_cube_id, table_name=str_table_name, update_policy='add')
            new_dataset_id = str_cube_id

        else:

            conn.close()
            print("	mstrio not updated, was this in error??")
            return ""

        conn.close()
        print("	  MicroStrategy update completed at: " + str(datetime.datetime.now()))
        return str(new_dataset_id)

    else:

        print("    query returned no results")
        return ""
Exemple #20
0
from tkinter import *
from PIL import ImageTk, Image
from mstrio import microstrategy

ANIMAL_STATS_CUBE_ID = '13D00F8811E9328F86670080EFA5E658'
ANIMAL_WORDS_CUBE_ID = 'A83DFB8611E932A783C60080EF554555'

# Establish connection to MicroStrategy API
conn = microstrategy.Connection(base_url="https://env-132697.customer.cloud.mic\
rostrategy.com/MicroStrategyLibrary/api"                                        , username="******", \
password="******", project_name="MicroStrategy Tutorial")
conn.connect()


def get_query(query):
    global consta
    # Check for empty String
    if len(query) == 0:
        return False

    conn.connect()

    # Get first cube
    asc_df = conn.get_cube(cube_id=ANIMAL_STATS_CUBE_ID)
    # Check if any results for query
    res = asc_df.loc[asc_df['Animal'] == query.title()]

    if res.empty:
        print("No results for that query")
        return False
    else:
def push2cube_nopa():

    global value_list
    global tst_out
    global out_filename1
    global isLDAP
    global this_prefix

    FMT1 = '%Y-%m-%d %H:%M:%S'

    print('\nStarting ' + '\x1b[6;30;42m' +
          'PUSH DATAFRAME TO MSTR CUBE (WO. PA): ' + '\x1b[0m')
    blz.tic()

    df_cube = pd.read_csv(out_filename1)
    df_cube['PID'] = df_cube['PID'].apply(str)

    # datasetName = this_prefix + 'err_n' + value_list[6] + '_nopa'
    # tableName = this_prefix + 'ErrorRank_n' + value_list[6] + '_nopa'
    # cubeinfo_name = 'Cube Info_' + this_prefix + 'n' + value_list[6] + '_nopa.txt'
    # datasetName0 = this_prefix + 'cube' + value_list[3] + '_n' + value_list[6] + '_nopa'

    datasetName = this_prefix + 'err_nopa'
    tableName = this_prefix + 'ErrorRank_nopa'
    cubeinfo_name = 'Cube Info_' + this_prefix + '_nopa.txt'
    datasetName0 = this_prefix + 'cube' + value_list[3] + '_nopa'

    # Authentication request and connect to the Rally Analytics project
    # is LDAP login (1) or standard user (0)
    if isLDAP == 1:
        conn = microstrategy.Connection(base_url=baseURL,
                                        login_mode=16,
                                        username=value_list[0],
                                        password=value_list[1],
                                        project_name=projName)
    else:
        conn = microstrategy.Connection(base_url=baseURL,
                                        username=value_list[0],
                                        password=value_list[1],
                                        project_name=projName)
    conn.connect()

    print("Connect to " + baseURL)

    # Create a new cube or use the existing cube
    if var1.get() == 1:
        # if the cube does not exist, acquire Data Set Id & Table Id, and create a new cube
        newDatasetId, newTableId = conn.create_dataset(
            data_frame=df_cube, dataset_name=datasetName, table_name=tableName)
        # Store Data Set Id and Table Id locally
        cubeInfoFile = open(cubeinfo_name, 'w')
        cubeInfoFile.write(newDatasetId + '\n')
        cubeInfoFile.write(newTableId)
        cubeInfoFile.close()
        print("CREATE Cube on URL: " + baseURL[:-25])
        print('[ Dataset Name: ' + datasetName + ' \ Cube ID = ' +
              newDatasetId + ']   [Table Name: ' + tableName +
              ' \ Table ID = ' + newTableId + ' ]')
    else:
        # Read saved cube IDs

        cubeInfoFile = open(cubeinfo_name, 'r')
        datasetID = cubeInfoFile.read().splitlines()
        cubeInfoFile.close()
        # Establish cube connection
        conn.update_dataset(data_frame=df_cube,
                            dataset_id=datasetID[0],
                            table_name=tableName,
                            update_policy='add')
        print("UPDATE Cube on URL: " + baseURL[:-25])
        print("Dataset Name " + datasetName + "[Cube ID: " + datasetID[0] +
              "   Table Name: " + tableName + "]")

    print("CREATE a backup cube: " + datasetName0)
    newDatasetId0, newTableId0 = conn.create_dataset(data_frame=df_cube,
                                                     dataset_name=datasetName0,
                                                     table_name=tableName)
    blz.toc()
    print(
        '\x1b[1;33m' +
        "Done with [Output to MSTR Cube for Dossier Reporting (without PA)]" +
        '\x1b[0m')
def push2cube_pa():

    # global value_list
    global df_final
    global baseURL
    global projName
    global out_filename0

    print('\nStarting ' + '\x1b[6;30;42m' +
          'PUSH DATAFRAME TO MSTR CUBE (W. PA): ' + '\x1b[0m')
    blz.tic()
    df_cube = pd.read_csv(out_filename0)
    df_cube['PID'] = df_cube['PID'].apply(str)

    datasetName = 'DemoTest_n' + value_list[6] + '_pa'
    tableName = 'ErrorRank_demo_n' + value_list[6] + '_pa'
    cubeinfoName = 'demoInfo_n' + value_list[6] + '_pa.txt'
    datasetName0 = 'DemoTest_' + value_list[3] + '_n' + value_list[6] + '_pa'

    isNewCube = False
    if value_list[2] == '':
        isNewCube = True

    # Authentication request and connect to the Rally Analytics project
    conn = microstrategy.Connection(base_url=baseURL,
                                    login_mode=16,
                                    username=value_list[0],
                                    password=value_list[1],
                                    project_name=projName)
    conn.connect()

    print("Connect to " + baseURL)

    if var1.get() == 1:
        # if the cube does not exist, acquire Data Set Id & Table Id, and create a new cube
        newDatasetId, newTableId = conn.create_dataset(
            data_frame=df_cube, dataset_name=datasetName, table_name=tableName)

        # Store Data Set Id and Table Id locally
        cubeInfoFile = open(cubeinfoName, 'w')
        cubeInfoFile.write(newDatasetId + '\n')
        cubeInfoFile.write(newTableId)
        cubeInfoFile.close()
        print("CREATE Cube on URL: " + baseURL[:-25])
        print('[ Dataset Name: ' + datasetName + ' \ Cube ID = ' +
              newDatasetId + ']   [Table Name: ' + tableName +
              ' \ Table ID = ' + newTableId + ' ]')
    else:
        # Read saved cube IDs
        cubeInfoFile = open(cubeinfoName, 'r')
        datasetID = cubeInfoFile.read().splitlines()
        cubeInfoFile.close()
        # Establish cube connection
        conn.update_dataset(data_frame=df_cube,
                            dataset_id=datasetID[0],
                            table_name=tableName,
                            update_policy='add')
        print("UPDATE Cube on URL: " + baseURL[:-25])
        print("Dataset Name " + datasetName + "[Cube ID: " + datasetID[0] +
              "   Table Name: " + tableName + "]")

    print("CREATE a backup cube: " + datasetName0)
    newDatasetId0, newTableId0 = conn.create_dataset(data_frame=df_cube,
                                                     dataset_name=datasetName0,
                                                     table_name=tableName)
    blz.toc()
    print('\x1b[1;33m' +
          "Done with [Output to MSTR Cube for Dossier Reporting (with PA)]" +
          '\x1b[0m')