Esempio n. 1
0
    def test_can_save_tds(self):
        original_tds = Datasource.from_file(self.tds_file.name)
        original_tds.connections[0].dbname = 'newdb.test.tsi.lan'
        original_tds.save()

        new_tds = Datasource.from_file(self.tds_file.name)
        self.assertEqual(new_tds.connections[0].dbname, 'newdb.test.tsi.lan')
    def test_can_save_tds(self):
        original_tds = Datasource.from_file(self.tds_file.name)
        original_tds.connections[0].dbname = 'newdb'
        original_tds.save()

        new_tds = Datasource.from_file(self.tds_file.name)
        self.assertEqual(new_tds.connections[0].dbname, 'newdb')
Esempio n. 3
0
    def test_can_open_tdsx_and_save_changes(self):
        original_tdsx = Datasource.from_file(self.tdsx_file.name)
        original_tdsx.connections[0].server = 'newdb.test.tsi.lan'
        original_tdsx.save()

        new_tdsx = Datasource.from_file(self.tdsx_file.name)
        self.assertEqual(new_tdsx.connections[0].server, 'newdb.test.tsi.lan')
    def test_can_open_tdsx_and_save_changes(self):
        original_tdsx = Datasource.from_file(self.tdsx_file.name)
        original_tdsx.connections[0].server = 'newdb'
        original_tdsx.save()

        new_tdsx = Datasource.from_file(self.tdsx_file.name)
        self.assertEqual(new_tdsx.connections[
                         0].server, 'newdb')
Esempio n. 5
0
    def test_can_open_tdsx_and_save_as_changes(self):
        new_tdsx_filename = 'newtdsx.tdsx'
        original_wb = Datasource.from_file(self.tdsx_file.name)
        original_wb.connections[0].server = 'newdb.test.tsi.lan'
        original_wb.save_as(new_tdsx_filename)

        new_wb = Datasource.from_file(new_tdsx_filename)
        self.assertEqual(new_wb.connections[0].server, 'newdb.test.tsi.lan')
        os.unlink(new_tdsx_filename)
    def test_can_open_tdsx_and_save_as_changes(self):
        new_tdsx_filename = 'newtdsx.tdsx'
        original_wb = Datasource.from_file(self.tdsx_file.name)
        original_wb.connections[0].server = 'newdb'
        original_wb.save_as(new_tdsx_filename)

        new_wb = Datasource.from_file(new_tdsx_filename)
        self.assertEqual(new_wb.connections[
                         0].server, 'newdb')
        os.unlink(new_tdsx_filename)
    def test_can_save_as_tds(self):
        new_filename = os.path.join(
            os.path.dirname(self.tds_file.name),
            "new_{}".format(os.path.basename(self.tds_file.name)))

        try:
            original_tds = Datasource.from_file(self.tds_file.name)
            original_tds.connections[0].dbname = 'newdb'

            original_tds.save_as(new_filename)

            new_tds = Datasource.from_file(new_filename)
            self.assertEqual(new_tds.connections[0].dbname, 'newdb')
        finally:
            if os.path.exists(new_filename):
                os.unlink(new_filename)
    def test_can_save_as_tds(self):
        new_filename = os.path.join(
            os.path.dirname(self.tds_file.name),
            "new_{}".format(os.path.basename(self.tds_file.name))
        )

        try:
            original_tds = Datasource.from_file(self.tds_file.name)
            original_tds.connections[0].dbname = 'newdb'

            original_tds.save_as(new_filename)

            new_tds = Datasource.from_file(new_filename)
            self.assertEqual(new_tds.connections[0].dbname, 'newdb')
        finally:
            if os.path.exists(new_filename):
                os.unlink(new_filename)
    def test_datasource_can_remove_caption(self):
        filename = self.get_temp_file('test_datasource_can_remove_caption')
        del self.ds.caption
        self.ds.save_as(filename)

        actual = Datasource.from_file(filename)
        self.assertIsNotNone(actual)
        self.assertEqual(actual.caption, '')
Esempio n. 10
0
    def test_datasource_can_remove_caption(self):
        filename = self.get_temp_file('test_datasource_can_remove_caption')
        del self.ds.caption
        self.ds.save_as(filename)

        actual = Datasource.from_file(filename)
        self.assertIsNotNone(actual)
        self.assertEqual(actual.caption, '')
Esempio n. 11
0
    def test_save_has_xml_declaration(self):
        original_tds = Datasource.from_file(self.tds_file.name)
        original_tds.connections[0].dbname = 'newdb'

        original_tds.save()

        with open(self.tds_file.name) as f:
            first_line = f.readline().strip()  # first line should be xml tag
            self.assertEqual(
                first_line, "<?xml version='1.0' encoding='utf-8'?>")
Esempio n. 12
0
    def test_save_has_xml_declaration(self):
        original_tds = Datasource.from_file(self.tds_file.name)
        original_tds.connections[0].dbname = 'newdb.test.tsi.lan'

        original_tds.save()

        with open(self.tds_file.name) as f:
            first_line = f.readline().strip()  # first line should be xml tag
            self.assertEqual(first_line,
                             "<?xml version='1.0' encoding='utf-8'?>")
Esempio n. 13
0
def prepare_datasources(xml_root):
    datasources = []
    datasource_elements = xml_root.find('datasources')
    # loop through xml to get a list of the datasources to be updated
    if datasource_elements is None:
        return []
    for datasource in datasource_elements:
        ds = Datasource(datasource)
        datasources.append(ds)
    return datasources
Esempio n. 14
0
    def test_can_create_datasource_from_connections(self):
        conn1 = Connection.from_attributes(
            server='a', dbname='b', username='******', dbclass='mysql', authentication='d')
        conn2 = Connection.from_attributes(
            server='1', dbname='2', username='******', dbclass='mysql', port='1337', authentication='7')
        ds = Datasource.from_connections('test', connections=[conn1, conn2])

        self.assertEqual(ds.connections[0].server, 'a')
        self.assertEqual(ds.connections[0].port, None)
        self.assertEqual(ds.connections[1].server, '1')
        self.assertEqual(ds.connections[1].port, '1337')
Esempio n. 15
0
    def test_set_last_refresh_increment_value(self):
        filename = os.path.join(TEST_ASSET_DIR, 'set_last_refresh_increment_value.tds')
        newdate = '#2003-03-03#'

        self.ds.extract.refresh.refresh_events[-1].increment_value = newdate
        try:
            self.ds.save_as(filename)
            ds_new = Datasource.from_file(filename)
            self.assertEqual(dt.datetime.strptime(newdate, '#%Y-%m-%d#'),
                             dt.datetime.strptime(ds_new.extract.refresh.refresh_events[1].increment_value, '#%Y-%m-%d#'))
        finally:
            if os.path.exists(filename):
                os.unlink(filename)
Esempio n. 16
0
    def _prepare_datasources(xml_root):
        datasources = []

        # loop through our datasources and append
        datasource_elements = xml_root.find('datasources')
        if datasource_elements is None:
            return []

        for datasource in datasource_elements:
            ds = Datasource(datasource)
            datasources.append(ds)

        return datasources
def print_info_ds(file_name):
    ############################################################
    # Step 2)  Open the .tds we want to inspect
    ############################################################
    sourceTDS = Datasource.from_file(file_name)

    ############################################################
    # Step 3)  Print out all of the fields and what type they are
    ############################################################
    print('----------------------------------------------------------')
    print('-- Info for our .tds:')
    print('--   name:\t{0}'.format(sourceTDS.name))
    print('--   version:\t{0}'.format(sourceTDS.version))
    print('----------------------------------------------------------')

    print('----------------------------------------------------------')
    print('--- {} total connections in this datasource'.format(
        len(sourceTDS.connections)))
    print('----------------------------------------------------------')

    for count, field in enumerate(sourceTDS.connections):
        print(field.dbname)
        print(field.server)
        print(field.username)
        print(field.authentication)

    ############################################################
    # Step 4)  Print the total field using on each datasource,
    #          the fields and what type they are
    ############################################################
    print('----------------------------------------------------------')
    print('--- {} total fields in this datasource'.format(len(
        sourceTDS.fields)))
    print('----------------------------------------------------------')
    for count, field in enumerate(sourceTDS.fields.values()):
        print('{:>4}: {} is a {}'.format(count + 1, field.name,
                                         field.datatype))
        blank_line = False
        if field.calculation:
            print('      the formula is {}'.format(field.calculation))
            blank_line = True
        if field.default_aggregation:
            print('      the default aggregation is {}'.format(
                field.default_aggregation))
            blank_line = True
        if field.description:
            print('      the description is {}'.format(field.description))

        if blank_line:
            print('')
    print('----------------------------------------------------------')
Esempio n. 18
0
    def test_can_create_datasource_from_connections(self):
        conn1 = Connection.from_attributes(server='a',
                                           dbname='b',
                                           username='******',
                                           dbclass='mysql',
                                           authentication='d')
        conn2 = Connection.from_attributes(server='1',
                                           dbname='2',
                                           username='******',
                                           dbclass='mysql',
                                           authentication='7')
        ds = Datasource.from_connections('test', connections=[conn1, conn2])

        self.assertEqual(ds.connections[0].server, 'a')
        self.assertEqual(ds.connections[1].server, '1')
def main(argv):
    server = None
    port = None
    filename = None
    username = None
    opts, args = getopt.getopt(
        argv, "hs:p:f:u:d:",
        ["server=", "port=", "filename=", "username="******"dbname="])
    for opt, arg in opts:
        if opt == '-h':
            print 'update_datasource.py -f <filename> -u <username> -s <server> -p <port> -d <dbname>'
            sys.exit()
        elif opt in ("-s", "--server"):
            server = arg
        elif opt in ("-p", "--port"):
            port = arg
        elif opt in ("-f", "--filename"):
            filename = arg
        elif opt in ("-u", "--username"):
            username = arg
        elif opt in ("-d", "--dbname"):
            dbname = arg

    print 'filename  is ', filename
    print 'username is ', username
    print 'server is ', server
    print 'port is ', port
    print 'dbname is ', dbname

    datasource = Datasource.from_file(filename)

    if username:
        datasource.connections[0].username = username
    if server:
        datasource.connections[0].server = server
    if port:
        datasource.connections[0].port = port
    if dbname:
        datasource.connections[0].dbname = dbname

    datasource.save()
Esempio n. 20
0
def download_datasources(tableau_server, tableau_authentication, directory,
                         **context):
    datasource_dataframe = (pd.read_json(context['task_instance'].xcom_pull(
        task_ids='join_postgres_data')).reset_index().drop(['index'], axis=1))
    df = pd.DataFrame()

    tableau_server.auth.sign_in(tableau_authentication)
    for i in range(len(datasource_dataframe)):
        ds_id = datasource_dataframe['id'][i]
        filename = directory / (ds_id + '.tdsx')

        tableau_server.datasources.download(ds_id,
                                            filepath=filename.as_posix(),
                                            include_extract=False)
        tds = Datasource.from_file(filename=filename.as_posix())
        ds_xml = tds._datasourceXML

        df_i = datasource_dataframe.iloc[[i]]

        sql_object = (ds_xml.find('connection').find('relation').get('table'))

        if not sql_object:
            sql_object = (
                ds_xml.find('connection').find('relation').get('stored-proc'))

        df_i = df_i.assign(
            sql_object_name=sql_object.replace('[dbo].[', '').replace(']', ''))
        df_i = df_i.assign(server=tds.connections[0].server)
        df_i = df_i.assign(db=tds.connections[0].dbname)

        df = df.append(df_i)

    tableau_server.auth.sign_out()

    df['db'] = df['db'].str.upper()

    df = df[df.db == 'FI_DM_EBI'].reset_index().drop(['index'], axis=1)
    return df.to_json()
Esempio n. 21
0
from tableaudocumentapi import Datasource

sourceTDS = Datasource.from_file('Sales.tds')
print(sourceTDS.fields)
print(sourceTDS.extract_fields)

print(sourceTDS.get_query())

sourceTDS = Datasource.from_file('Cash Register.tds')

print(sourceTDS.get_query())
Esempio n. 22
0
 def test_description_unicode(self):
     ds = Datasource.from_file(TEST_UNICODE_FILE)
     self.assertIsNotNone(ds.fields['A'].description)
Esempio n. 23
0
cur.execute(
    "SELECT uniqueField, schema, DB_Username, DB_Password, Server_Username FROM tablename"
)

for row in cur.fetchall():
    uniqueField.append(row[0])
    schema.append(row[1])
    db_username.append(row[2])
    db_password.append(row[3])
    server_username.append(row[4])
    counter = counter + 1
db.close()

###Creating duplicate copies of workbooks###
sourceWB = Workbook('Base.twbx')
sourceDS = Datasource.from_file('Base.tdsx')

for i in range(0, len(uniqueField)):
    for x in sourceWB.datasources:
        for j in x.connections:
            j.dbname = schema[i]
            j.username = db_username[i]
    for j in sourceDS.connections:
        j.dbname = schema[i]
        j.username = db_username[i]
    #Saving the workbook and datasource
    Workbook.save_as(sourceWB, uniqueField[i] + '.twbx')
    Datasource.save_as(sourceDS, uniqueField[i] + '.tdsx')

###Creating sites, projects and users if they don't exist, and publishing the workbooks###
server = TSC.Server('server')
Esempio n. 24
0
 def test_can_extract_connection(self):
     ds = Datasource.from_file(self.tds_file.name)
     self.assertIsInstance(ds.connections[0], Connection)
     self.assertIsInstance(ds.connections, list)
 def setUp(self):
     self.ds = Datasource.from_file(TEST_TDS_FILE)
     self.to_delete = set()
Esempio n. 26
0
 def test_can_open_tdsx(self):
     ds = Datasource.from_file(self.tdsx_file.name)
     self.assertTrue(ds.connections)
     self.assertTrue(ds.name)
Esempio n. 27
0
 def test_can_extract_datasource_from_file(self):
     ds = Datasource.from_file(self.tds_file.name)
     self.assertEqual(ds.name, 'sqlserver.17u3bqc16tjtxn14e2hxh19tyvpo')
     self.assertEqual(ds.version, '9.3')
Esempio n. 28
0
############################################################
# Step 1)  Use Datasource object from the Document API
############################################################
from tableaudocumentapi import Datasource

############################################################
# Step 2)  Open the .tds we want to inspect
############################################################
sourceTDS = Datasource.from_file('World.tds')

############################################################
# Step 3)  Print out all of the fields and what type they are
############################################################
print('----------------------------------------------------------')
print('--- {} total fields in this datasource'.format(len(sourceTDS.fields)))
print('----------------------------------------------------------')
for count, field in enumerate(sourceTDS.fields.values()):
    print('{:>4}: {} is a {}'.format(count+1, field.name, field.datatype))
    blank_line = False
    if field.calculation:
        print('      the formula is {}'.format(field.calculation))
        blank_line = True
    if field.default_aggregation:
        print('      the default aggregation is {}'.format(field.default_aggregation))
        blank_line = True

    if blank_line:
        print('')
print('----------------------------------------------------------')
 def test_exception_when_datasource_given_twbx(self):
     with self.assertRaises(TableauInvalidFileException):
         ds = Datasource.from_file(TABLEAU_10_TWBX)
Esempio n. 30
0
from tableaudocumentapi import Datasource
from tableaudocumentapi import Field
from tableaudocumentapi import Connection

sourceWB = Workbook('TestCase4.twbx')

db = ""
sourceDB = ""
count = 1

#Importing Datasource object from the workbook
for j in sourceWB.datasources:
    print "Datasource " + str(count)
    for x in j.connections:
        db = x.dbclass
    sourceDB = Datasource.from_connections(db, j.connections)

    #Printing information about the datasource
    print "Connection information :"
    print sourceDB.connections
    print ""
    print "Datasource caption :"
    print sourceDB.caption
    print ""
    print "Tableau version :"
    print sourceDB.version
    print ""
    sourceDB.caption = "abc"
    print sourceDB.caption

    #Datasource.save (sourceDB)
Esempio n. 31
0
 def test_can_extract_datasource_from_file(self):
     ds = Datasource.from_file(self.tds_file.name)
     self.assertEqual(ds.name, 'federated.1s4nxn20cywkdv13ql0yk0g1mpdx')
     self.assertEqual(ds.version, '10.0')
def datasource_prepare(server, project, ds):
    """Function that prepares the data source on the given server in the given project:
    - get the functional ordered column and the last update value of the reference table
    - clean up the hyper extract by deleting to be refreshed data
    - set the last refresh value to be applied in the next incremental update of the hyper extract
    """
    global projects
    global updates

    p = projects[project]
    for datasource in tsc.Pager(server.datasources):
        logging.debug("{0} ({1})".format(datasource.name,
                                         datasource.project_name))
        if datasource.name == ds and datasource.project_name == project:
            logging.info("{0}: {1}".format(datasource.name,
                                           datasource.project_name,
                                           datasource.id))
            ds_file = server.datasources.download(datasource.id,
                                                  filepath=WORK_DIR,
                                                  include_extract=True)
            if zipfile.is_zipfile(ds_file):
                with zipfile.ZipFile(ds_file) as zf:
                    zf.extractall()
            tds = Datasource.from_file(ds_file)
            if not tds.has_extract():
                logging.error(f"datasource {ds} does not contain an extract")
                return
            if tds.extract.connection.dbclass != 'hyper':
                logging.error(
                    f"datasource {ds} is not based on a hyper extract")
                return
            if not tds.extract.has_refresh():
                logging.error(
                    f"datasource {ds} does not have refresh information")
                return
            database = tds.connections[0].dbname

            update_value = updates['datasources'][ds]['last_update_value']
            if update_value is None or update_value == "":
                logging.error(
                    f"datasource {ds} does not have a last update value set, please provide one"
                )
                return

            functional_ordered_column_value_min, functional_ordered_column_value_previous, last_update_value = get_database_values(
                database, ds, update_value)
            hyper_file = tds.extract.connection.dbname
            rows_affected = hyper_prepare(
                hyper_file,
                config['datasources'][ds]['functional_ordered_column'],
                functional_ordered_column_value_min)
            logging.info(
                f"datasource {ds} with hyper file {hyper_file}: {rows_affected} rows were deleted"
            )
            tds.extract.refresh.refresh_events[
                -1].increment_value = functional_ordered_column_value_previous
            tds.save_as(ds_file)
            credentials = ConnectionCredentials(
                config['databases'][database]['args']['user'],
                config['databases'][database]['args']['password'],
                embed=True)
            new_ds = tsc.DatasourceItem(p.id)
            new_ds.name = ds
            server.datasources.publish(new_ds,
                                       ds_file,
                                       mode=tsc.Server.PublishMode.Overwrite,
                                       connection_credentials=credentials)
            updates['datasources'][ds]['last_update_value'] = last_update_value
 def setUp(self):
     self.ds = Datasource.from_file(TEST_TDS_FILE)
Esempio n. 34
0
 def test_can_open_tdsx(self):
     ds = Datasource.from_file(self.tdsx_file.name)
     self.assertTrue(ds.connections)
     self.assertTrue(ds.name)
Esempio n. 35
0
 def test_can_extract_datasource_from_file(self):
     ds = Datasource.from_file(self.tds_file.name)
     self.assertEqual(ds.name, 'federated.1s4nxn20cywkdv13ql0yk0g1mpdx')
     self.assertEqual(ds.version, '10.0')
Esempio n. 36
0
 def setUp(self):
     self.ds = Datasource.from_file(TEST_TDS_FILE)
Esempio n. 37
0
#The Order Date field has been renamed to 'abc', and contains a calculation for profit ratio.
from tableaudocumentapi import Workbook
from tableaudocumentapi import Datasource
from tableaudocumentapi import Field
from tableaudocumentapi import Connection

sourceDB = Datasource.from_file('Test.tdsx')
print "Datasource name :"
print sourceDB.name
print ""
print "Tableau version :"
print sourceDB.version
print ""
print "Connection information :"
print sourceDB.connections
print ""
sourceDB.caption = "Modified data source"
Datasource.save (sourceDB)
print "Caption post saving:"
print sourceDB.caption
print ""
#Field information that is originally commented out
#print sourceDB.fields


#Printing information about all fields
print "Information about fields in this order (name, id, caption, alias, datatype, role, calculation, is quantitative?, is ordinal?, is nominal?, worksheets used in, default aggregation :"
for x in sourceDB.fields.values():
	print x.name
	print x.id
	print x.caption
Esempio n. 38
0
from tableaudocumentapi import Workbook
from tableaudocumentapi import Datasource
from tableaudocumentapi import Field
from tableaudocumentapi import Connection

import pprint as pp

sourceWB = Workbook('ZenoWorkbook.twb')
sourceDB = Datasource.from_file('Beatlog (BeatlogV1).tdsx')

print("Filename: " + sourceWB.filename)
print(sourceWB.worksheets)
pp.pprint(sourceWB.datasources)


def listTDS(sourceTDS):
    print('----------------------------------------------------------')
    print('-- Info for our .tds:')
    print('--   name:\t{0}'.format(sourceTDS.name))
    print('--   version:\t{0}'.format(sourceTDS.version))
    print('----------------------------------------------------------')
    return


pp.pprint(sourceDB.connections)

pp.pprint(sourceDB.fields)


def showFields(sourceTDS):
    print('----------------------------------------------------------')
Esempio n. 39
0
 def test_can_extract_datasource_from_file(self):
     ds = Datasource.from_file(self.tds_file.name)
     self.assertEqual(ds.name, 'sqlserver.17u3bqc16tjtxn14e2hxh19tyvpo')
     self.assertEqual(ds.version, '9.3')
Esempio n. 40
0
 def test_exception_when_datasource_given_twbx(self):
     with self.assertRaises(TableauInvalidFileException):
         ds = Datasource.from_file(TABLEAU_10_TWBX)
 def test_description_unicode(self):
     ds = Datasource.from_file(TEST_UNICODE_FILE)
     self.assertIsNotNone(ds.fields['A'].description)
Esempio n. 42
0
def main():
    parser = argparse.ArgumentParser(
        description='Publish datasource to server')
    parser.add_argument('--host', '-H', required=True, help='database host')
    parser.add_argument('--port', required=True, help='database port')
    parser.add_argument('--database',
                        '-D',
                        required=True,
                        help='database name')
    parser.add_argument('--login',
                        '-L',
                        required=True,
                        help='login to sign into database')
    parser.add_argument('-P',
                        required=True,
                        help='password to sign into database')
    parser.add_argument('--server',
                        '-s',
                        required=True,
                        help='server to publish to')
    parser.add_argument('--site', '-S', default=None)
    parser.add_argument('--project', default=None)
    parser.add_argument('--username',
                        '-u',
                        required=True,
                        help='username to sign into server')
    parser.add_argument('-p', '--password', required=True, default=None)
    parser.add_argument('--directory', '-d', required=True, default='migrated')

    parser.add_argument('--logging-level',
                        '-l',
                        choices=['debug', 'info', 'error'],
                        default='warning',
                        help='desired logging level (set to error by default)')

    parser.add_argument('datasource',
                        help='one or more datasources to publish',
                        nargs='+')

    args = parser.parse_args()

    # Set logging level based on user input, or error by default
    logging_level = getattr(logging, args.logging_level.upper())
    logging.basicConfig(level=logging_level)

    tableau_auth = TSC.TableauAuth(args.username, args.password)
    server = TSC.Server(args.server)

    overwrite_true = TSC.Server.PublishMode.Overwrite

    with server.auth.sign_in(tableau_auth):
        server.use_server_version()

        all_projects, _ = server.projects.get()
        project = next(
            (project
             for project in all_projects if project.name == args.project),
            None)

        if project is None:
            error = "project {0} can not be found".format(args.project)
            raise LookupError(error)

        for ds in args.datasource:
            tds = Datasource.from_file(ds)
            if len(tds.connections) > 1:
                error = "only single connection data sources are supported at this time"
                raise ValueError(error)
            if tds.has_extract() and (os.path.splitext(os.path.basename(ds))[1]
                                      != '.tdsx'):
                error = "datasource {0} has an extract defined, but has not been saved as a .tdsx file".format(
                    ds)
                raise ValueError(error)
            extract_flag = [
                p for p in tds.parameters
                if re.search('empty.*extract', p.caption, re.IGNORECASE)
            ]
            if len(extract_flag) > 0 and extract_flag[0].value == 'true':
                warning = "datasource {0} has an empty extract parameter which is set to true".format(
                    ds)
                logging.warning(warning)
            tds.connections[0].dbname = args.database
            tds.connections[0].server = args.host
            tds.connections[0].port = args.port
            tds.connections[0].username = args.login
            filename = os.path.basename(ds)
            filename_short = os.path.splitext(filename)[0]
            file_extension = os.path.splitext(filename)[1][1:]
            new_ds_name = "{0}_{1}.{2}".format(filename_short, args.database,
                                               file_extension)
            new_ds_name = os.path.join(args.directory, new_ds_name)
            tds.save_as(new_ds_name)
            creds = ConnectionCredentials(args.login, args.P, embed=True)
            new_ds = TSC.DatasourceItem(project.id)
            new_ds.name = filename_short
            try:
                new_ds = server.datasources.publish(
                    new_ds,
                    new_ds_name,
                    mode=overwrite_true,
                    connection_credentials=creds)
            except TSC.server.endpoint.exceptions.ServerResponseError:
                server.version = '2.4'
                new_ds = server.datasources.publish(
                    new_ds,
                    new_ds_name,
                    mode=overwrite_true,
                    connection_credentials=creds)
            print("data source {0} published ID: {1}".format(
                new_ds.name, new_ds.id))
Esempio n. 43
0
 def test_can_extract_connection(self):
     ds = Datasource.from_file(self.tds_file.name)
     self.assertIsInstance(ds.connections[0], Connection)
     self.assertIsInstance(ds.connections, list)
Esempio n. 44
0
 def setUp(self):
     self.ds = Datasource.from_file(TEST_TDS_FILE)
     self.to_delete = set()