コード例 #1
0
 def test_execute_raises_for_bad_glob_val(self, mock_hook):
     operator = LocalToAzureDataLakeStorageOperator(
         task_id=TASK_ID,
         local_path=BAD_LOCAL_PATH,
         remote_path=REMOTE_PATH)
     with pytest.raises(AirflowException) as ctx:
         operator.execute(None)
     assert str(ctx.value
                ) == "Recursive glob patterns using `**` are not supported"
コード例 #2
0
 def test_execute_success(self, mock_hook):
     operator = LocalToAzureDataLakeStorageOperator(task_id=TASK_ID,
                                                    local_path=LOCAL_PATH,
                                                    remote_path=REMOTE_PATH)
     operator.execute(None)
     mock_hook.return_value.upload_file.assert_called_once_with(
         local_path=LOCAL_PATH,
         remote_path=REMOTE_PATH,
         nthreads=64,
         overwrite=True,
         buffersize=4194304,
         blocksize=4194304,
     )
コード例 #3
0
 def test_extra_options_is_passed(self, mock_hook):
     operator = LocalToAzureDataLakeStorageOperator(
         task_id=TASK_ID,
         local_path=LOCAL_PATH,
         remote_path=REMOTE_PATH,
         extra_upload_options={'run': False},
     )
     operator.execute(None)
     mock_hook.return_value.upload_file.assert_called_once_with(
         local_path=LOCAL_PATH,
         remote_path=REMOTE_PATH,
         nthreads=64,
         overwrite=True,
         buffersize=4194304,
         blocksize=4194304,
         run=False,  # extra upload options
     )
コード例 #4
0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

import os

from airflow import models
from airflow.providers.microsoft.azure.transfers.local_to_adls import LocalToAzureDataLakeStorageOperator
from airflow.utils.dates import days_ago

LOCAL_FILE_PATH = os.environ.get("LOCAL_FILE_PATH", 'localfile.txt')
REMOTE_FILE_PATH = os.environ.get("REMOTE_LOCAL_PATH", 'remote')


with models.DAG(
    "example_local_to_adls",
    start_date=days_ago(1),
    schedule_interval=None,
    tags=['example'],
) as dag:
    # [START howto_operator_local_to_adls]
    upload_file = LocalToAzureDataLakeStorageOperator(
        task_id='upload_task',
        local_path=LOCAL_FILE_PATH,
        remote_path=REMOTE_FILE_PATH,
    )
    # [END howto_operator_local_to_adls]