def test_tunnel_without_password(self, ssh_mock): hook = SSHHook(remote_host='remote_host', port='port', username='******', timeout=10, key_file='fake.file') with hook.get_tunnel(1234): ssh_mock.assert_called_once_with('remote_host', ssh_port='port', ssh_username='******', ssh_pkey='fake.file', ssh_proxy=None, local_bind_address=('localhost', ), remote_bind_address=('localhost', 1234), host_pkey_directories=[], logger=hook.log)
def test_tunnel_with_private_key(self, ssh_mock): hook = SSHHook( ssh_conn_id=self.CONN_SSH_WITH_PRIVATE_KEY_EXTRA, remote_host='remote_host', port='port', username='******', timeout=10, ) with hook.get_tunnel(1234): ssh_mock.assert_called_once_with( 'remote_host', ssh_port='port', ssh_username='******', ssh_pkey=TEST_PKEY, ssh_proxy=None, local_bind_address=('localhost', ), remote_bind_address=('localhost', 1234), host_pkey_directories=[], logger=hook.log)
def _record_statistics(ts, postgres_conn_id, ssh_conn_id, **context): # Can't use PostgresHook because our TCP port is likely to be dynamic # because we are connecting through an SSH Tunnel (pg_secret, ) = secrets.get_connections(postgres_conn_id) (gh_token, ) = secrets.get_connections("github_metrics_token") ssh_conn = SSHHook(ssh_conn_id=ssh_conn_id) tunnel = ssh_conn.get_tunnel(remote_port=pg_secret.port, remote_host=pg_secret.host) tunnel.start() with tunnel: log(f"Connected SSH Tunnel: {tunnel}") # Airflow conflates dbname and schema, even though they are very different in PG constr = ( f"host=localhost user={pg_secret.login} dbname={pg_secret.schema} " f"port={tunnel.local_bind_port} password={pg_secret.password}") # It's important to wrap this connection in a try/finally block, otherwise # we can cause a deadlock with the SSHTunnel conn = psycopg2.connect(constr) try: log(f"Connected to Postgres: {conn}") cur = conn.cursor() # ensure_pg_table(cur) stats_retriever = GitHubStatsRetriever(gh_token.password) for owner, repo in TRACKED_REPOS: log(f"Recording repo stats for {owner}/{repo}") stats = stats_retriever.get_repo_stats(owner, repo) log(stats) save_record_to_pg(cur, ts, f"{owner}/{repo}", stats) conn.commit() log("Transaction committed") cur.close() finally: conn.close()