Exemple #1
0
def frames_to_hyper(
    dict_of_frames: Dict[pantab_types.TableType, pd.DataFrame],
    database: Union[str, pathlib.Path],
    table_mode: str = "w",
    *,
    hyper_process: Optional[tab_api.HyperProcess] = None,
) -> None:
    """See api.rst for documentation."""
    _validate_table_mode(table_mode)

    with ensure_hyper_process(hyper_process) as hpe:
        tmp_db = pathlib.Path(tempfile.gettempdir()) / f"{uuid.uuid4()}.hyper"

        if table_mode == "a" and pathlib.Path(database).exists():
            shutil.copy(database, tmp_db)

        with tab_api.Connection(
                hpe.endpoint, tmp_db,
                tab_api.CreateMode.CREATE_IF_NOT_EXISTS) as connection:
            for table, df in dict_of_frames.items():
                _insert_frame(df,
                              connection=connection,
                              table=table,
                              table_mode=table_mode)

        # In Python 3.9+ we can just pass the path object, but due to bpo 32689
        # and subsequent typeshed changes it is easier to just pass as str for now
        shutil.move(str(tmp_db), database)
Exemple #2
0
def frames_to_hyper(
    dict_of_frames: Dict[pantab_types.TableType, pd.DataFrame],
    database: Union[str, pathlib.Path],
    table_mode: str = "w",
) -> None:
    """See api.rst for documentation."""
    _validate_table_mode(table_mode)

    with tab_api.HyperProcess(
            tab_api.Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU) as hpe:
        tmp_db = pathlib.Path(tempfile.gettempdir()) / f"{uuid.uuid4()}.hyper"

        if table_mode == "a" and pathlib.Path(database).exists():
            shutil.copy(database, tmp_db)

        with tab_api.Connection(
                hpe.endpoint, tmp_db,
                tab_api.CreateMode.CREATE_IF_NOT_EXISTS) as connection:
            for table, df in dict_of_frames.items():
                _insert_frame(df,
                              connection=connection,
                              table=table,
                              table_mode=table_mode)

        shutil.move(tmp_db, database)
Exemple #3
0
def frames_from_hyper(
    source: Union[str, pathlib.Path, tab_api.Connection],
    *,
    hyper_process: Optional[tab_api.HyperProcess] = None,
) -> Dict[tab_api.TableName, pd.DataFrame]:
    """See api.rst for documentation."""
    result: Dict[TableType, pd.DataFrame] = {}

    if isinstance(source, tab_api.Connection):
        forbid_hyper_process(hyper_process)
        connection = source
        for schema in connection.catalog.get_schema_names():
            for table in connection.catalog.get_table_names(schema=schema):
                result[table] = _read_table(connection=connection, table=table)
    else:
        with tempfile.TemporaryDirectory() as tmp_dir, ensure_hyper_process(
                hyper_process) as hpe:
            tmp_db = shutil.copy(source, tmp_dir)
            with tab_api.Connection(hpe.endpoint, tmp_db) as connection:
                for schema in connection.catalog.get_schema_names():
                    for table in connection.catalog.get_table_names(
                            schema=schema):
                        result[table] = _read_table(connection=connection,
                                                    table=table)

    return result
Exemple #4
0
def frame_to_hyper(
    df: pd.DataFrame,
    database: Union[str, pathlib.Path],
    *,
    table: pantab_types.TableType,
    table_mode: str = "w",
) -> None:
    """See api.rst for documentation"""
    with tab_api.HyperProcess(
            tab_api.Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU) as hpe:
        tmp_db = pathlib.Path(tempfile.gettempdir()) / f"{uuid.uuid4()}.hyper"

        if table_mode == "a" and pathlib.Path(database).exists():
            shutil.copy(database, tmp_db)

        with tab_api.Connection(
                hpe.endpoint, tmp_db,
                tab_api.CreateMode.CREATE_IF_NOT_EXISTS) as connection:
            _insert_frame(df,
                          connection=connection,
                          table=table,
                          table_mode=table_mode)

        # In Python 3.9+ we can just pass the path object, but due to bpo 32689
        # and subsequent typeshed changes it is easier to just pass as str for now
        shutil.move(str(tmp_db), database)
Exemple #5
0
def workbook_owners_publish():

    workbook_owners_dict = workbook_owners()

    with hyp.HyperProcess(hyp.Telemetry.SEND_USAGE_DATA_TO_TABLEAU) as hyper:
        print("The HyperProcess has started.")

        with hyp.Connection(hyper.endpoint, '../data/workbook_owners.hyper',
                            hyp.CreateMode.CREATE_AND_REPLACE) as connection:
            print("The connection to the Hyper file is open.")

            connection.catalog.create_schema('Extract')

            table = hyp.TableDefinition(hyp.TableName('Extract', 'Extract'), [
                hyp.TableDefinition.Column('workbook_name',
                                           hyp.SqlType.text()),
                hyp.TableDefinition.Column('owner', hyp.SqlType.text())
            ])

            print("The table is defined.")

            connection.catalog.create_table(table)

            with hyp.Inserter(connection, table) as inserter:

                for i in workbook_owners_dict:

                    inserter.add_row([i['workbook_name'], i['owner']])

                inserter.execute()

            print("The data was added to the table.")
        print("The connection to the Hyper extract file is closed.")
    print("The HyperProcess has shut down.")
Exemple #6
0
def frame_from_hyper(database: Union[str, pathlib.Path], *,
                     table: TableType) -> pd.DataFrame:
    """See api.rst for documentation"""
    with tempfile.TemporaryDirectory() as tmp_dir, tab_api.HyperProcess(
            tab_api.Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU) as hpe:
        tmp_db = shutil.copy(database, tmp_dir)
        with tab_api.Connection(hpe.endpoint, tmp_db) as connection:
            return _read_table(connection=connection, table=table)
Exemple #7
0
def frame_from_hyper(
    database: Union[str, pathlib.Path],
    *,
    table: TableType,
    hyper_process: Optional[tab_api.HyperProcess] = None,
) -> pd.DataFrame:
    """See api.rst for documentation"""

    with tempfile.TemporaryDirectory() as tmp_dir, ensure_hyper_process(
            hyper_process) as hpe:
        tmp_db = shutil.copy(database, tmp_dir)
        with tab_api.Connection(hpe.endpoint, tmp_db) as connection:
            return _read_table(connection=connection, table=table)
Exemple #8
0
def frame_from_hyper_query(
    database: Union[str, pathlib.Path],
    query: str,
    *,
    hyper: Optional[tab_api.HyperProcess] = None,
) -> pd.DataFrame:
    """See api.rst for documentation."""
    with tempfile.TemporaryDirectory() as tmp_dir, ensure_hyper_process(
            hyper) as hpe:
        tmp_db = shutil.copy(database, tmp_dir)
        with tab_api.Connection(hpe.endpoint, tmp_db) as connection:
            with connection.execute_query(query) as result:
                return _read_query_result(result, None)
Exemple #9
0
def main(argv: Optional[List[str]] = None) -> int:
    if argv is None:
        argv = sys.argv[1:]
    parser = argparse.ArgumentParser()
    parser.add_argument("input_json_filepaths", type=Path, nargs="+")
    args_ns = parser.parse_args(argv)

    input_json_filepaths = args_ns.input_json_filepaths
    if len(input_json_filepaths) > 1:
        raise NotImplementedError("Only one input supported at this time")

    # create a dict of scan ids to GraphSets. This contains all of the data in the provided input.
    scan_ids_graph_sets: Dict[int, GraphSet] = {
        scan_id: GraphSet.from_json_file(filepath)
        for scan_id, filepath in enumerate(input_json_filepaths)
    }

    # discover tables which need to be created by iterating over resources and finding the maximum
    # set of predicates used for each type
    table_defns = build_table_defns(scan_ids_graph_sets.values())

    # build data
    table_names_datas = build_data(scan_ids_graph_sets.values(), table_defns)

    table_names_tables: Dict[str, tableauhyperapi.TableDefinition] = {}
    with tableauhyperapi.HyperProcess(
            telemetry=tableauhyperapi.Telemetry.
            DO_NOT_SEND_USAGE_DATA_TO_TABLEAU) as hyper:
        with tableauhyperapi.Connection(
                endpoint=hyper.endpoint,
                database="altimeter.hyper",
                create_mode=tableauhyperapi.CreateMode.CREATE_AND_REPLACE,
        ) as connection:
            # create tables
            for table_name, columns in table_defns.items():
                table = tableauhyperapi.TableDefinition(
                    table_name=table_name,
                    columns=[column.to_hyper() for column in columns])
                connection.catalog.create_table(table)
                table_names_tables[table_name] = table

            for table_name, datas in table_names_datas.items():
                with tableauhyperapi.Inserter(
                        connection,
                        table_names_tables[table_name]) as inserter:
                    inserter.add_rows(datas)
                    inserter.execute()

    return 0
Exemple #10
0
def frames_from_hyper(
    database: Union[str, pathlib.Path]
) -> Dict[tab_api.TableName, pd.DataFrame]:
    """See api.rst for documentation."""
    result: Dict[TableType, pd.DataFrame] = {}
    with tempfile.TemporaryDirectory() as tmp_dir, tab_api.HyperProcess(
        tab_api.Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU
    ) as hpe:
        tmp_db = shutil.copy(database, tmp_dir)
        with tab_api.Connection(hpe.endpoint, tmp_db) as connection:
            for schema in connection.catalog.get_schema_names():
                for table in connection.catalog.get_table_names(schema=schema):
                    result[table] = _read_table(connection=connection, table=table)

    return result
# This utility script will help generate those files which can be
# incorporate into testing

import tableauhyperapi as tab_api

if __name__ == "__main__":

    table = tab_api.TableDefinition(
        table_name=tab_api.TableName("public", "table"),
        columns=[
            tab_api.TableDefinition.Column(
                name="Non-Nullable String",
                type=tab_api.SqlType.text(),
                nullability=tab_api.NOT_NULLABLE,
            )
        ],
    )

    with tab_api.HyperProcess(telemetry=tab_api.Telemetry.
                              DO_NOT_SEND_USAGE_DATA_TO_TABLEAU) as hyper:
        with tab_api.Connection(
                endpoint=hyper.endpoint,
                database="non_pantab_writeable.hyper",
                create_mode=tab_api.CreateMode.CREATE_AND_REPLACE,
        ) as connection:
            connection.catalog.create_table(table_definition=table)

            with tab_api.Inserter(connection, table) as inserter:
                inserter.add_rows([["row1"], ["row2"]])
                inserter.execute()