def test_exec_success(self, gcs_hook_mock_class, pg_hook_mock_class):
        """Test the execute function in case where the run is successful."""
        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            postgres_conn_id=POSTGRES_CONN_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME)

        pg_hook_mock = pg_hook_mock_class.return_value
        pg_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        pg_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(FILENAME.format(0), obj)
            self.assertEqual('application/json', content_type)
            with open(tmp_filename, 'rb') as f:
                self.assertEqual(b''.join(NDJSON_LINES), f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)

        pg_hook_mock_class.assert_called_once_with(
            postgres_conn_id=POSTGRES_CONN_ID)
        pg_hook_mock.get_conn().cursor().execute.assert_called_once_with(
            SQL, None)
    def test_file_splitting(self, gcs_hook_mock_class):
        """Test that ndjson is split by approx_max_file_size_bytes param."""

        gcs_hook_mock = gcs_hook_mock_class.return_value
        expected_upload = {
            FILENAME.format(0): b''.join(NDJSON_LINES[:2]),
            FILENAME.format(1): NDJSON_LINES[2],
        }

        def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual('application/json', mime_type)
            self.assertFalse(gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(expected_upload[obj], file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME,
            approx_max_file_size_bytes=len(expected_upload[FILENAME.format(0)]))
        op.execute(None)
    def test_file_splitting(self, gcs_hook_mock_class, pg_hook_mock_class):
        """Test that ndjson is split by approx_max_file_size_bytes param."""
        pg_hook_mock = pg_hook_mock_class.return_value
        pg_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        pg_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value
        expected_upload = {
            FILENAME.format(0): b''.join(NDJSON_LINES[:2]),
            FILENAME.format(1): NDJSON_LINES[2],
        }

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual('application/json', content_type)
            with open(tmp_filename, 'rb') as f:
                self.assertEqual(expected_upload[obj], f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME,
            approx_max_file_size_bytes=len(
                expected_upload[FILENAME.format(0)]))
        op.execute(None)
    def test_schema_file(self, gcs_hook_mock_class, pg_hook_mock_class):
        """Test writing schema files."""
        pg_hook_mock = pg_hook_mock_class.return_value
        pg_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        pg_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            if obj == SCHEMA_FILENAME:
                with open(tmp_filename, 'rb') as f:
                    self.assertEqual(SCHEMA_JSON, f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME,
            schema_filename=SCHEMA_FILENAME)
        op.execute(None)

        # once for the file and once for the schema
        self.assertEqual(2, gcs_hook_mock.upload.call_count)
    def test_file_splitting(self, gcs_hook_mock_class, pg_hook_mock_class):
        """Test that ndjson is split by approx_max_file_size_bytes param."""
        pg_hook_mock = pg_hook_mock_class.return_value
        pg_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        pg_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value
        expected_upload = {
            FILENAME.format(0): b''.join(NDJSON_LINES[:2]),
            FILENAME.format(1): NDJSON_LINES[2],
        }

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual('application/json', content_type)
            with open(tmp_filename, 'rb') as f:
                self.assertEqual(expected_upload[obj], f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME,
            approx_max_file_size_bytes=len(expected_upload[FILENAME.format(0)]))
        op.execute(None)
    def test_exec_success(self, gcs_hook_mock_class, pg_hook_mock_class):
        """Test the execute function in case where the run is successful."""
        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            postgres_conn_id=POSTGRES_CONN_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME)

        pg_hook_mock = pg_hook_mock_class.return_value
        pg_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        pg_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(FILENAME.format(0), obj)
            self.assertEqual('application/json', content_type)
            with open(tmp_filename, 'rb') as f:
                self.assertEqual(b''.join(NDJSON_LINES), f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)

        pg_hook_mock_class.assert_called_once_with(postgres_conn_id=POSTGRES_CONN_ID)
        pg_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL, None)
    def test_schema_file(self, gcs_hook_mock_class, pg_hook_mock_class):
        """Test writing schema files."""
        pg_hook_mock = pg_hook_mock_class.return_value
        pg_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        pg_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            if obj == SCHEMA_FILENAME:
                with open(tmp_filename, 'rb') as f:
                    self.assertEqual(SCHEMA_JSON, f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME,
            schema_filename=SCHEMA_FILENAME)
        op.execute(None)

        # once for the file and once for the schema
        self.assertEqual(2, gcs_hook_mock.upload.call_count)
    def test_empty_query(self, gcs_hook_mock_class):
        """If the sql returns no rows, we should not upload any files"""
        gcs_hook_mock = gcs_hook_mock_class.return_value

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql='SELECT * FROM postgres_to_gcs_operator_empty',
            bucket=BUCKET,
            filename=FILENAME)
        op.execute(None)

        assert not gcs_hook_mock.upload.called, 'No data means no files in the bucket'
    def test_empty_query(self, gcs_hook_mock_class):
        """If the sql returns no rows, we should not upload any files"""
        gcs_hook_mock = gcs_hook_mock_class.return_value

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql='SELECT * FROM postgres_to_gcs_operator_empty',
            bucket=BUCKET,
            filename=FILENAME)
        op.execute(None)

        assert not gcs_hook_mock.upload.called, 'No data means no files in the bucket'
    def test_exec_success(self, gcs_hook_mock_class):
        """Test the execute function in case where the run is successful."""
        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            postgres_conn_id=POSTGRES_CONN_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME)

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(FILENAME.format(0), obj)
            self.assertEqual('application/json', content_type)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(b''.join(NDJSON_LINES), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)
    def test_exec_success(self, gcs_hook_mock_class):
        """Test the execute function in case where the run is successful."""
        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            postgres_conn_id=POSTGRES_CONN_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME)

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, content_type):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(FILENAME.format(0), obj)
            self.assertEqual('application/json', content_type)
            with open(tmp_filename, 'rb') as f:
                self.assertEqual(b''.join(NDJSON_LINES), f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)
    def test_schema_file(self, gcs_hook_mock_class):
        """Test writing schema files."""

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip):  # pylint: disable=unused-argument
            if obj == SCHEMA_FILENAME:
                with open(tmp_filename, 'rb') as f:
                    self.assertEqual(SCHEMA_JSON, f.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = PostgresToGoogleCloudStorageOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=FILENAME,
            schema_filename=SCHEMA_FILENAME)
        op.execute(None)

        # once for the file and once for the schema
        self.assertEqual(2, gcs_hook_mock.upload.call_count)