def initialize_etl_example():
    logging.info('Creating connections, pool and sql path')

    session = Session()

    def create_new_conn(session, attributes):
        new_conn = models.Connection()
        new_conn.conn_id = attributes.get("conn_id")
        new_conn.conn_type = attributes.get('conn_type')
        new_conn.host = attributes.get('host')
        new_conn.port = attributes.get('port')
        new_conn.schema = attributes.get('schema')
        new_conn.login = attributes.get('login')
        new_conn.set_password(attributes.get('password'))

        session.add(new_conn)
        session.commit()

    create_new_conn(
        session, {
            "conn_id": "mssql",
            "conn_type": "MS SQL Server",
            "host": "mssql",
            "port": 1433,
            "schema": "master",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id": "postgres_dwh",
            "conn_type": "postgres",
            "host": "postgres",
            "port": 5432,
            "schema": "dwh",
            "login": "******",
            "password": "******"
        })

    new_var = models.Variable()
    new_var.key = "sql_path"
    new_var.set_val("/usr/local/airflow/sql")
    session.add(new_var)
    session.commit()

    new_pool = models.Pool()
    new_pool.pool = "postgres_dwh"
    new_pool.slots = 10
    new_pool.description = "Allows max. 10 connections to the DWH"

    session.add(new_pool)
    session.commit()

    session.close()
Example #2
0
 def setUp(self):
     self.pools = []
     for i in range(2):
         name = 'experimental_%s' % (i + 1)
         pool = models.Pool(
             pool=name,
             slots=i,
             description=name,
         )
         self.pools.append(pool)
     with create_session() as session:
         session.add_all(self.pools)
Example #3
0
 def setUp(self):
     clear_db_pools()
     self.pools = [Pool.get_default_pool()]
     for i in range(self.USER_POOL_COUNT):
         name = f'experimental_{i + 1}'
         pool = models.Pool(
             pool=name,
             slots=i,
             description=name,
         )
         self.pools.append(pool)
     with create_session() as session:
         session.add_all(self.pools)
Example #4
0
 def setUp(self):
     self.session = settings.Session()
     self.pools = []
     for i in range(2):
         name = 'experimental_%s' % (i + 1)
         pool = models.Pool(
             pool=name,
             slots=i,
             description=name,
         )
         self.session.add(pool)
         self.pools.append(pool)
     self.session.commit()
Example #5
0
def initialize_etl_example():
    logging.info('Creating connections, pool and sql path')

    session = Session()

    def create_new_conn(session, attributes):
        new_conn = models.Connection()
        new_conn.conn_id = attributes.get("conn_id")
        new_conn.conn_type = attributes.get('conn_type')
        new_conn.host = attributes.get('host')
        new_conn.port = attributes.get('port')
        new_conn.schema = attributes.get('schema')
        new_conn.login = attributes.get('login')
        new_conn.set_password(attributes.get('password'))

        session.add(new_conn)
        session.commit()

    create_new_conn(session,
                    {"conn_id": "postgres_oltp",
                     "conn_type": "postgres",
                     "host": "postgres",
                     "port": 5432,
                     "schema": "orders",
                     "login": "******",
                     "password": "******"})

    # change from dwh_svc_account to db_owner                
    create_new_conn(session,
                    {"conn_id": "postgres_dwh",
                     "conn_type": "postgres",
                     "host": "postgres",
                     "port": 5432,
                     "schema": "dwh",
                     "login": "******",
                     "password": "******"})
#   Move variable setting to startup batch, to avoid console err msg when loading those dags refto this var.
#   new_var = models.Variable()
#   new_var.key = "sql_path"
#   new_var.set_val("/usr/local/airflow/sql")
#   session.add(new_var)
#   session.commit()

    new_pool = models.Pool()
    new_pool.pool = "postgres_dwh"
    new_pool.slots = 10
    new_pool.description = "Allows max. 10 connections to the DWH"

    session.add(new_pool)
    session.commit()
    session.close()
Example #6
0
 def setUp(self):
     self.session = settings.Session()
     clear_db_pools()
     self.pools = [Pool.get_default_pool()]
     for i in range(self.USER_POOL_COUNT):
         name = 'experimental_%s' % (i + 1)
         pool = models.Pool(
             pool=name,
             slots=i,
             description=name,
         )
         self.session.add(pool)
         self.pools.append(pool)
     self.session.commit()
def generate_config():
    logging.info('Creating connections, pool and sql path')

    session = Session()

    def create_new_conn(session, attributes):
        if Session.query(models.Connection).filter(
                models.Connection.conn_id == attributes.get(
                    "conn_id")).count() == 0:
            new_conn = models.Connection()
            new_conn.conn_id = attributes.get("conn_id")
            new_conn.conn_type = attributes.get('conn_type')
            new_conn.host = attributes.get('host')
            new_conn.port = attributes.get('port')
            new_conn.schema = attributes.get('schema')
            new_conn.login = attributes.get('login')
            new_conn.set_password(attributes.get('password'))
            session.add(new_conn)
            session.commit()
        else:
            logging.info('Connection {} already exists'.format(
                attributes.get("conn_id")))

    create_new_conn(
        session, {
            "conn_id": "mysql_oltp",
            "conn_type": "mysql",
            "host": "host.docker.internal",
            "port": 3306,
            "schema": "employees",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id": "mysql_dwh",
            "conn_type": "mysql",
            "host": "host.docker.internal",
            "port": 3306,
            "schema": "dwh",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id": "postgres_oltp",
            "conn_type": "postgres",
            "host": "host.docker.internal",
            "port": 5432,
            "schema": "dwh",
            "login": "******",
            "password": "******"
        })

    create_new_conn(
        session, {
            "conn_id": "postgres_dwh",
            "conn_type": "postgres",
            "host": "host.docker.internal",
            "port": 5432,
            "schema": "dwh",
            "login": "******",
            "password": "******"
        })

    if Session.query(models.Variable).filter(
            models.Variable.key == "sql_template_paths").count() == 0:
        new_var = models.Variable()
        new_var.key = "sql_template_paths"
        new_var.set_val("./sql_templates")
        session.add(new_var)
        session.commit()
    else:
        logging.info('Variable sql_template_paths already exists')

    if Session.query(
            models.Pool).filter(models.Pool.pool == "mysql_dwh").count() == 0:
        new_pool = models.Pool()
        new_pool.pool = "mysql_dwh"
        new_pool.slots = 10
        new_pool.description = "Allows max. 10 connections to the DWH"
        session.add(new_pool)
        session.commit()
    else:
        logging.info('Pool mysql_dwh already exists')

    session.close()