예제 #1
0
    def test_exec_success_json(self, gcs_hook_mock_class,
                               oracle_hook_mock_class):
        """Test successful run of execute function for JSON"""
        op = OracleToGCSOperator(task_id=TASK_ID,
                                 oracle_conn_id=ORACLE_CONN_ID,
                                 sql=SQL,
                                 bucket=BUCKET,
                                 filename=JSON_FILENAME)

        oracle_hook_mock = oracle_hook_mock_class.return_value
        oracle_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        oracle_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket,
                           obj,
                           tmp_filename,
                           mime_type=None,
                           gzip=False):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(JSON_FILENAME.format(0), obj)
            self.assertEqual('application/json', mime_type)
            self.assertEqual(GZIP, gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(b''.join(NDJSON_LINES), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)

        oracle_hook_mock_class.assert_called_once_with(
            oracle_conn_id=ORACLE_CONN_ID)
        oracle_hook_mock.get_conn().cursor().execute.assert_called_once_with(
            SQL)
예제 #2
0
    def test_file_splitting(self, gcs_hook_mock_class, oracle_hook_mock_class):
        """Test that ndjson is split by approx_max_file_size_bytes param."""
        oracle_hook_mock = oracle_hook_mock_class.return_value
        oracle_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        oracle_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value
        expected_upload = {
            JSON_FILENAME.format(0): b''.join(NDJSON_LINES[:2]),
            JSON_FILENAME.format(1): NDJSON_LINES[2],
        }

        def _assert_upload(bucket,
                           obj,
                           tmp_filename,
                           mime_type=None,
                           gzip=False):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual('application/json', mime_type)
            self.assertEqual(GZIP, gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(expected_upload[obj], file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = OracleToGCSOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=JSON_FILENAME,
            approx_max_file_size_bytes=len(
                expected_upload[JSON_FILENAME.format(0)]),
        )
        op.execute(None)
예제 #3
0
 def test_init(self):
     """Test OracleToGoogleCloudStorageOperator instance is properly initialized."""
     op = OracleToGCSOperator(task_id=TASK_ID,
                              sql=SQL,
                              bucket=BUCKET,
                              filename=JSON_FILENAME)
     self.assertEqual(op.task_id, TASK_ID)
     self.assertEqual(op.sql, SQL)
     self.assertEqual(op.bucket, BUCKET)
     self.assertEqual(op.filename, JSON_FILENAME)
예제 #4
0
 def test_init(self):
     """Test OracleToGoogleCloudStorageOperator instance is properly initialized."""
     op = OracleToGCSOperator(task_id=TASK_ID,
                              sql=SQL,
                              bucket=BUCKET,
                              filename=JSON_FILENAME)
     assert op.task_id == TASK_ID
     assert op.sql == SQL
     assert op.bucket == BUCKET
     assert op.filename == JSON_FILENAME
예제 #5
0
    def test_schema_file(self, gcs_hook_mock_class, oracle_hook_mock_class):
        """Test writing schema files."""
        oracle_hook_mock = oracle_hook_mock_class.return_value
        oracle_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        oracle_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip):  # pylint: disable=unused-argument
            if obj == SCHEMA_FILENAME:
                with open(tmp_filename, 'rb') as file:
                    self.assertEqual(b''.join(SCHEMA_JSON), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = OracleToGCSOperator(task_id=TASK_ID,
                                 sql=SQL,
                                 bucket=BUCKET,
                                 filename=JSON_FILENAME,
                                 schema_filename=SCHEMA_FILENAME)
        op.execute(None)

        # once for the file and once for the schema
        self.assertEqual(2, gcs_hook_mock.upload.call_count)
예제 #6
0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

import os
from datetime import datetime

from airflow import models
from airflow.providers.google.cloud.transfers.oracle_to_gcs import OracleToGCSOperator

GCS_BUCKET = os.environ.get("GCP_GCS_BUCKET", "example-airflow-oracle-gcs")
FILENAME = 'test_file'

SQL_QUERY = "SELECT * from test_table"

with models.DAG(
    'example_oracle_to_gcs',
    schedule_interval=None,
    start_date=datetime(2021, 1, 1),
    catchup=False,
    tags=['example'],
) as dag:
    # [START howto_operator_oracle_to_gcs]
    upload = OracleToGCSOperator(
        task_id='oracle_to_gcs', sql=SQL_QUERY, bucket=GCS_BUCKET, filename=FILENAME, export_format='csv'
    )
    # [END howto_operator_oracle_to_gcs]