Esempio n. 1
0
def return_schemas_in_db(request: SchemaListRequest):
    """Returns the full list of schemas in a database."""
    DUMMY_CONNECTOR_CREDS.update({"database": request.dict()["database"]})
    try:
        connector = PostgresConnector(DUMMY_CONNECTOR_CREDS)
        schemas = connector.get_schemas_in_db()
    except sqlalchemy.exc.OperationalError as e:
        raise DbMissingError(message=e)
    return ListOfSchemas(**{"schemas": schemas})
Esempio n. 2
0
def return_table_content_from_sql(request: SqlRequest):
    """Returns table content from a sql statement.

    It must be a select otherwise we error.
    """
    try:
        connector = PostgresConnector(DUMMY_CONNECTOR_CREDS)
        table_content_output = connector.return_result_from_raw_sql(
            request.dict()["sql_string"])
    except NotImplementedError as e:
        raise NotImplementedQueryType(message=e)
    except Exception as e:
        raise UnhandledException(message=e)
    return TableContent(**table_content_output)
Esempio n. 3
0
def return_table_content(request: TableContentRequest) -> TableContent:
    """Dummy"""
    connector = PostgresConnector({
        "host": "localhost",
        "username": "******",
        "password": "******",
        "database": "anytool_test_db",
        "port": "5432",
    })
    table_content_payload = connector.show_table_contents(
        target_schema=request.dict()["target_schema"],
        target_table=request.dict()["target_table"],
    )
    return table_content_payload
 def save_table_to_datamart(self, df, table):
     """Saves the content of dataframe to the Datamart database.
     Args:
         df (data frame):    data frame with selected data
         table (string):     name of the table to save to in the Datamart database
     Returns:
         None
     """
     try:
         connector = PostgresConnector(EndPoint.PROTESTS_DATAMART.value)
         connector.write_to_db(df, table, self.mode)
     except IOError as err:
         sys.stderr.write(
             "IO error when saving to database: {0}".format(err))
         logging.warning(
             "IO error when saving to database: {0}".format(err))
Esempio n. 5
0
def update_table_content(request: UpdateTableRequest):
    """Updates a table with new content."""
    connector = PostgresConnector({
        "host": "localhost",
        "username": "******",
        "password": "******",
        "database": "anytool_test_db",
        "port": "5432",
    })
    try:
        connector.update_table(
            target_schema=request.dict()["target_schema"],
            target_table=request.dict()["target_table"],
            payload=request.dict()["payload"],
        )
    except KeyError as e:
        raise TableKeyError(message=e)
Esempio n. 6
0
def return_tables_in_schema(request: SchemaRequest, ) -> ListOfTables:
    """Returns a list of tables given a schema name in the database.

    Args:
        request (SchemaRequest): json payload of shape e.g: {"schema_name": "public"}

    Returns:
        ListOfTables: json payload of shape {"tables": ["list", "of", "tables"]}
    """
    connector = PostgresConnector({
        "host": "localhost",
        "username": "******",
        "password": "******",
        "database": "anytool_test_db",
        "port": "5432",
    })
    tables = connector.get_tables_in_db_schema(
        target_schema=request.dict()["schema_name"])
    return ListOfTables(**{"tables": tables})
Esempio n. 7
0
    def write_df_to_db(self, data_frame, schema_type):
        """
        Writes data frame object contents to the main and delta tables (central data storage).
        Correct table name is selected based on the schema type.
        Args:
            data_frame (data frame):    Spark data frame object with transformed data.
            schema_type (string):       Specifies which schema to be used for data transformation
        Returns:
            None.
        """
        db_table = None
        delta_table = None

        if schema_type == "event":
            db_table = self.events_table
            delta_table = self.events_delta_table
        elif schema_type == "mention":
            db_table = self.mentions_table
            delta_table = self.mentions_delta_table
        elif schema_type == "gkg":
            db_table = self.gkg_table
            delta_table = self.gkg_delta_table

        try:
            connector = PostgresConnector(EndPoint.CENTRAL_STORAGE.value)
            connector.write_to_db(data_frame, db_table, self.mode)
            connector.write_to_db(data_frame, delta_table, self.mode)
        except IOError as err:
            sys.stderr.write("IO error when saving to database: {0}".format(err))
            logging.warning("IO error when saving to database: {0}".format(err))
from postgres_connector import PostgresConnector

db_connector = PostgresConnector("postgres", "localhost", "5432", "nba",
                                 "postgresql://localhost/nba")
db_connection = db_connector.create_database_connection()
db_session = db_connector.create_database_session()
Esempio n. 9
0
def main():
    """
    Data loading process
    """

    # Connect to database
    connector = PostgresConnector()
    connector.connect()

    # Recreate tables
    connector.drop_tables()
    connector.create_tables()

    # Load all data
    DataLoader.load_data(connector)

    # Run some test queries
    rec = connector.query_ipv4('1.0.1.66')
    print('1.0.1.66 =>', rec)
    rec = connector.query_ipv6('9c:1e::95:69:9d:41')
    print('9c:1e::95:69:9d:41 =>', rec)

    # Close db connection
    connector.disconnect()
from postgres_connector import PostgresConnector

db_connector = PostgresConnector("postgres", "localhost", "5432", "nba", "postgresql://localhost/nba")
db_connection = db_connector.create_database_connection()
db_session = db_connector.create_database_session()
 def read_data_from_datamart(self, query):
     connector = PostgresConnector(EndPoint.PROTESTS_DATAMART.value)
     df = connector.read_from_db(self.spark, query)
     return df
 def read_data_from_central_storage(self, query):
     connector = PostgresConnector(EndPoint.CENTRAL_STORAGE.value)
     df = connector.read_from_db(self.spark, query)
     return df