def get_bigquery_credentials(cls, profile_credentials): method = profile_credentials.method creds = GoogleServiceAccountCredentials.Credentials if method == BigQueryConnectionMethod.OAUTH: credentials, _ = get_bigquery_defaults(scopes=cls.SCOPE) return credentials elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT: keyfile = profile_credentials.keyfile return creds.from_service_account_file(keyfile, scopes=cls.SCOPE) elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT_JSON: details = profile_credentials.keyfile_json return creds.from_service_account_info(details, scopes=cls.SCOPE) elif method == BigQueryConnectionMethod.OAUTH_SECRETS: return GoogleCredentials.Credentials( token=profile_credentials.token, refresh_token=profile_credentials.refresh_token, client_id=profile_credentials.client_id, client_secret=profile_credentials.client_secret, token_uri=profile_credentials.token_uri, scopes=cls.SCOPE ) error = ('Invalid `method` in profile: "{}"'.format(method)) raise FailedToConnectException(error)
def open(cls, connection): if connection.state == 'open': logger.debug('Connection is already open, skipping open.') return connection try: handle = cls.get_bigquery_client(connection.credentials) except google.auth.exceptions.DefaultCredentialsError: logger.info("Please log into GCP to continue") gcloud.setup_default_credentials() handle = cls.get_bigquery_client(connection.credentials) except Exception as e: logger.debug("Got an error when attempting to create a bigquery " "client: '{}'".format(e)) connection.handle = None connection.state = 'fail' raise FailedToConnectException(str(e)) connection.handle = handle connection.state = 'open' return connection
def open(cls, connection): if connection.state == 'open': logger.debug('Connection is already open, skipping open.') return connection try: creds = connection.credentials handle = snowflake.connector.connect( account=creds.account, user=creds.user, database=creds.database, schema=creds.schema, warehouse=creds.warehouse, role=creds.role, autocommit=False, client_session_keep_alive=creds.client_session_keep_alive, application='dbt', **creds.auth_args()) connection.handle = handle connection.state = 'open' except snowflake.connector.errors.Error as e: logger.debug("Got an error when attempting to open a snowflake " "connection: '{}'".format(e)) connection.handle = None connection.state = 'fail' raise FailedToConnectException(str(e))
def exception_handler(self, sql): try: yield except snowflake.connector.errors.ProgrammingError as e: msg = str(e) logger.debug('Snowflake query id: {}'.format(e.sfqid)) logger.debug('Snowflake error: {}'.format(msg)) if 'Empty SQL statement' in msg: logger.debug("got empty sql statement, moving on") elif 'This session does not have a current database' in msg: self.release() raise FailedToConnectException( ('{}\n\nThis error sometimes occurs when invalid ' 'credentials are provided, or when your default role ' 'does not have access to use the specified database. ' 'Please double check your profile and try again.' ).format(msg)) else: self.release() raise DatabaseException(msg) except Exception as e: if isinstance(e, snowflake.connector.errors.Error): logger.debug('Snowflake query id: {}'.format(e.sfqid)) logger.debug("Error running SQL: {}", sql) logger.debug("Rolling back transaction.") self.release() if isinstance(e, RuntimeException): # during a sql query, an internal to dbt exception was raised. # this sounds a lot like a signal handler and probably has # useful information, so raise it without modification. raise raise RuntimeException(str(e)) from e
def open(cls, connection: Connection) -> Connection: if connection.state == "open": logger.debug("Connection is already open, skipping open.") return connection try: creds: AthenaCredentials = connection.credentials handle = AthenaConnection(s3_staging_dir=creds.s3_staging_dir, region_name=creds.region_name, schema_name=creds.schema, work_group=creds.work_group, cursor_class=AthenaCursor, formatter=AthenaParameterFormatter(), poll_interval=creds.poll_interval, profile_name=creds.aws_profile_name) connection.state = "open" connection.handle = handle except Exception as e: logger.debug("Got an error when attempting to open a Athena " "connection: '{}'".format(e)) connection.handle = None connection.state = "fail" raise FailedToConnectException(str(e)) return connection
def open(cls, connection): if connection.state == "open": logger.debug("Connection is already open, skipping open.") return connection credentials = connection.credentials schemas = connection.credentials.schemas.split(',') try: handle: sqlite3.Connection = sqlite3.connect(host=credentials.host) cursor = handle.cursor() for schema in schemas: schema_file, schema_name = schema, schema.replace('.db', '') cursor.execute(f"attach '{schema_file}' as {schema_name}") connection.state = "open" connection.handle = handle return connection except sqlite3.Error as e: logger.debug( "Got an error when attempting to open a sqlite3 connection: '%s'", e) connection.handle = None connection.state = "fail" raise FailedToConnectException(str(e))
def open(cls, connection: Connection) -> Connection: if connection.state == 'open': logger.debug('Connection is already open, skipping open.') return connection credentials = connection.credentials try: connection_string = '{}:{}/{}' handle = oracle.connect( credentials.username, credentials.password, connection_string.format(credentials.host, credentials.port, credentials.database), mode=oracle.SYSDBA if credentials.as_sysdba else oracle.DEFAULT_AUTH ) connection.state = 'open' connection.handle = handle if credentials.nls_date_format is not None: handle.cursor().execute(f"ALTER SESSION SET NLS_DATE_FORMAT = '{credentials.nls_date_format}'") handle.cursor().execute(f"ALTER SESSION SET NLS_TIMESTAMP_FORMAT = '{credentials.nls_date_format}XFF'") handle.cursor().execute(f"ALTER SESSION SET NLS_TIMESTAMP_TZ_FORMAT = '{credentials.nls_date_format}XFF TZR'") except Exception as e: logger.debug( f"Got an error when attempting to open a oracle connection. Connection string = {credentials.connection_string}. Error: {e}" ) connection.handle = None connection.state = 'fail' raise FailedToConnectException(f"{str(e)} using connection string {credentials.connection_string}") return connection
def get_bigquery_credentials(cls, profile_credentials): method = profile_credentials.method creds = service_account.Credentials if method == BigQueryConnectionMethod.OAUTH: credentials, project_id = google.auth.default(scopes=cls.SCOPE) return credentials elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT: keyfile = profile_credentials.keyfile return creds.from_service_account_file(keyfile, scopes=cls.SCOPE) elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT_JSON: details = profile_credentials.keyfile_json return creds.from_service_account_info(details, scopes=cls.SCOPE) error = ('Invalid `method` in profile: "{}"'.format(method)) raise FailedToConnectException(error)
def open(cls, connection: Connection): if connection.state == "open": logger.debug("Connection is already open, skipping open.") return connection credentials: SQLiteCredentials = connection.credentials schemas_and_paths = {} for schema, path in credentials.schemas_and_paths.items(): # Make .db file path absolute schemas_and_paths[schema] = os.path.abspath(path) try: if 'main' in schemas_and_paths: handle: sqlite3.Connection = sqlite3.connect( schemas_and_paths['main']) else: raise FailedToConnectException( "at least one schema must be called 'main'") if len(credentials.extensions) > 0: handle.enable_load_extension(True) for ext_path in credentials.extensions: handle.load_extension(ext_path) cursor = handle.cursor() attached = [] for schema in set(schemas_and_paths.keys()) - set(['main']): path = schemas_and_paths[schema] cursor.execute(f"attach '{path}' as '{schema}'") attached.append(schema) for path in glob.glob( os.path.join(credentials.schema_directory, "*.db")): abs_path = os.path.abspath(path) # if file was already attached from being defined in schemas_and_paths, ignore it if not abs_path in schemas_and_paths.values(): schema = os.path.basename(path)[:-3] # has schema name been used already? if schema not in attached: cursor.execute(f"attach '{path}' as '{schema}'") else: raise FailedToConnectException( f"found {path} while scanning schema_directory, but cannot attach it as '{schema}' " + f"because that schema name is already defined in schemas_and_paths. " + f"fix your ~/.dbt/profiles.yml file") # # uncomment these lines to print out SQL: this only happens if statement is successful # handle.set_trace_callback(print) # sqlite3.enable_callback_tracebacks(True) connection.state = "open" connection.handle = handle return connection except sqlite3.Error as e: logger.debug( "Got an error when attempting to open a sqlite3 connection: '%s'", e) connection.handle = None connection.state = "fail" raise FailedToConnectException(str(e)) except Exception as e: print(f"Unknown error opening SQLite connection: {e}") raise e