Пример #1
0
def move_blobs_to_processing(**context):
    results = blob_service.connection.list_blobs(
        input_container, processing_file_prefix)
    blobs_moved = 0
    blob_urls = []
    for blob in results:
        print("\t Blob name: " + blob.name)
        # Generate a SAS token for blob access
        blob_input_url = blob_service.connection.make_blob_url(
            input_container,
            blob.name,
            sas_token=blob_service.connection.generate_blob_shared_access_signature(
                input_container,
                blob.name,
                permission=BlobPermissions(read=True),
                expiry=datetime.utcnow() + timedelta(days=5)))

        print("\t SAS URL:{}".format(blob_input_url))
        # Copy blob to processing bucket
        blob_service.connection.copy_blob(
            output_container, blob.name, blob_input_url, requires_sync=True)

        # Generate a SAS token the now moved blob for downstream dags
        blob_output_url = blob_service.connection.make_blob_url(
            output_container,
            blob.name,
            sas_token=blob_service.connection.generate_blob_shared_access_signature(
                output_container,
                blob.name,
                permission=BlobPermissions(read=True),
                expiry=datetime.utcnow() + timedelta(hours=1)))

        blobs_moved += 1
        blob_urls.append(blob_output_url)

        def trigger_processing_dag(context, dag_run_obj):
            dag_run_obj.payload = {
                "image_url": blob_output_url,
            }
            return dag_run_obj

        TriggerDagRunOperator(
            task_id="trigger_processing",
            trigger_dag_id="image_processing",
            python_callable=trigger_processing_dag,
            dag=dag
        ).execute(context)

        # Remove existing blob
        blob_service.connection.delete_blob(input_container, blob.name)

    return blob_urls
Пример #2
0
def _get_files_from_bfs(cli_ctx, bfs, path, expiry):
    """Returns a list of files and directories under given path on mounted blob container.

    :param models.AzureBlobFileSystemReference bfs: blob file system reference.
    :param str path: path to list files from.
    :param int expiry: SAS expiration time in minutes.
    """
    from azure.storage.blob import BlockBlobService
    from azure.storage.blob.models import Blob, BlobPermissions
    result = []
    service = BlockBlobService(bfs.account_name, _get_storage_account_key(cli_ctx, bfs.account_name, None))
    effective_path = _get_path_for_storage(path)
    folders = set()
    for b in service.list_blobs(bfs.container_name, effective_path + '/', delimiter='/'):
        if isinstance(b, Blob):
            name = os.path.basename(b.name)
            sas = service.generate_blob_shared_access_signature(
                bfs.container_name, b.name, BlobPermissions(read=True),
                expiry=datetime.datetime.utcnow() + datetime.timedelta(minutes=expiry))
            result.append(
                LogFile(
                    name, service.make_blob_url(bfs.container_name, b.name, 'https', sas),
                    False, b.properties.content_length))
        else:
            name = b.name.split('/')[-2]
            folders.add(name)
            result.append(LogFile(name, None, True, None))
    result = [f for f in result if f.is_directory or f.name not in folders]
    return result
Пример #3
0
def _create_short_lived_file_sas(account_name, account_key, share, directory_name, file_name):
    # if dir is empty string change it to None
    directory_name = directory_name if directory_name else None
    expiry = (datetime.utcnow() + timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ')
    sas = SharedAccessSignature(account_name, account_key)
    return sas.generate_file(share, directory_name=directory_name, file_name=file_name,
                             permission=BlobPermissions(read=True), expiry=expiry, protocol='https')
Пример #4
0
def _create_short_lived_blob_sas(account_name, account_key, container, blob):
    expiry = (datetime.utcnow() +
              timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ')
    sas = SharedAccessSignature(account_name, account_key)
    return sas.generate_blob(container,
                             blob,
                             permission=BlobPermissions(read=True),
                             expiry=expiry,
                             protocol='https')
Пример #5
0
def create_short_lived_container_sas(account_name, account_key, container):
    from datetime import datetime, timedelta
    from azure.storage.sharedaccesssignature import SharedAccessSignature
    from azure.storage.blob.models import BlobPermissions

    expiry = (datetime.utcnow() +
              timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ')
    sas = SharedAccessSignature(account_name, account_key)
    return sas.generate_container(container,
                                  permission=BlobPermissions(read=True),
                                  expiry=expiry,
                                  protocol='https')
Пример #6
0
def get_sas_url(name):

    baseblobservice = BaseBlobService(account_name='', account_key='')

    sasToken = baseblobservice.generate_blob_shared_access_signature(
        container_name='images',
        blob_name=name,
        permission=BlobPermissions(read=True, write=True, create=True),
        start=datetime.now() - timedelta(hours=2),
        expiry=datetime.now() + timedelta(hours=2))

    sasurl = baseblobservice.make_blob_url('images', name, sas_token=sasToken)

    return sasurl
    def _create_read_sas(cls, client, share=None, container=None):
        if (share and container) or (not share and not container):
            raise ValueError('set either share or container')

        if share:
            return client.generate_share_shared_access_signature(
                share,
                FilePermissions(read=True),
                datetime.utcnow() + timedelta(minutes=15))
        elif container:
            return client.generate_container_shared_access_signature(
                container,
                BlobPermissions(read=True),
                datetime.utcnow() + timedelta(minutes=15))
Пример #8
0
def blob_service_generate_blob_shared_access_signature(account_name,
                                                       storage_key,
                                                       container_name,
                                                       blob_name):
    blob_permissions = BlobPermissions(read=True)
    start = datetime.now()
    expiry = datetime.now() + timedelta(days=365)

    block_blob_service = get_block_blob_service(account_name, storage_key)
    return block_blob_service.generate_blob_shared_access_signature(
        container_name,
        blob_name,
        permission=blob_permissions,
        expiry=expiry,
        start=start,
        protocol='https')
Пример #9
0
def refresh_test(job_id: str):
    import requests
    storage = get_blob_storage_client()

    test_run = DbTestRun.query.filter_by(id=job_id).first()
    if not test_run:
        return "Test run job not found", 404

    test_run_job = get_job(job_id)
    test_run.state = test_run_job.state.value

    if test_run_job.state == JobState.completed:
        for task in list_tasks(job_id):
            if task.id == 'test-creator':
                continue

            test_case = DbTestCase.query.filter_by(
                id=DbTestCase.get_full_name(task, test_run)).one_or_none()
            if test_case:
                continue

            test_case = DbTestCase(task, test_run)

            if not test_case.passed:
                # only load output of failed tests for performance reason
                container_name = 'output'
                blob_name = os.path.join(job_id, task.id, 'stdout.txt')
                sas = storage.generate_blob_shared_access_signature(
                    container_name,
                    blob_name,
                    permission=BlobPermissions(read=True),
                    protocol='https',
                    expiry=(datetime.utcnow() + timedelta(hours=1)))
                url = storage.make_blob_url(container_name,
                                            blob_name,
                                            sas_token=sas,
                                            protocol='https')

                response = requests.request('GET', url)
                test_case.output = '\n'.join(response.text.split('\n')[58:-3])

            db.session.add(test_case)

    db.session.commit()

    return redirect(url_for('test', job_id=job_id))
Пример #10
0
def api_hook():
    import requests
    from morocco.core import get_batch_client, get_blob_storage_client

    if request.headers.get('X-Batch-Event') == 'test.finished':
        event = DbWebhookEvent(source='batch',
                               content=request.data.decode('utf-8'))
        db.session.add(event)
        db.session.commit()

        job_id = request.form.get('job_id')
        task_id = request.form.get('task_id')

        test_run_job = get_job(job_id)
        test_run = DbTestRun.query.filter_by(id=job_id).first()
        test_run.state = test_run_job.state.value

        task = get_batch_client().task.get(job_id, task_id)
        test_case = DbTestCase(task, test_run)

        storage = get_blob_storage_client()
        if not test_case.passed:
            # only load output of failed tests for performance reason
            container_name = 'output'
            blob_name = os.path.join(job_id, task.id, 'stdout.txt')
            sas = storage.generate_blob_shared_access_signature(
                container_name,
                blob_name,
                permission=BlobPermissions(read=True),
                protocol='https',
                expiry=(datetime.utcnow() + timedelta(hours=1)))
            url = storage.make_blob_url(container_name,
                                        blob_name,
                                        sas_token=sas,
                                        protocol='https')

            response = requests.request('GET', url)
            test_case.output = '\n'.join(response.text.split('\n')[58:-3])

        db.session.add(test_case)
        db.session.commit()

        return 'Update {} {}'.format(job_id, task_id), 200

    return 'Unknown event', 400
    def test_batch_copy_blob_with_sas(self):
        # create sas token for read permission on the source blob container
        sas_token = self._blob_service.generate_container_shared_access_signature(
            self._test_source_container,
            BlobPermissions(read=True),
            datetime.utcnow() + timedelta(minutes=15))

        cmd_template = 'storage file copy start-batch' \
                       ' --source-container {}' \
                       ' --destination-share {}' \
                       ' --account-name {}' \
                       ' --account-key {}' \
                       ' --source-sas {}'

        _cli_main(cmd_template.format(self._test_source_container,
                                      self._test_target_share,
                                      self._file_service.account_name,
                                      self._file_service.account_key,
                                      sas_token))

        files = list(glob_files_remotely(self._file_service, self._test_target_share, pattern=None))
        assert len(files) == 41
Пример #12
0
def sync_build(commit: dict = None, sha: str = None, create_job=False):
    from datetime import datetime, timedelta

    from azure.batch.models import BatchErrorException, JobState
    from azure.storage.blob.models import BlobPermissions

    from morocco.core.services import (get_source_control_commit,
                                       get_source_control_commits,
                                       get_batch_client,
                                       get_blob_storage_client)
    from morocco.main import DbBuild, db
    from morocco.batch import create_build_job

    if not dict and not sha:
        raise ValueError('Missing commit')

    if not commit:
        if sha == '<latest>':
            commit = get_source_control_commits()[0]
        else:
            commit = commit or get_source_control_commit(sha)

    sha = commit['sha']

    build_record = DbBuild.query.filter_by(id=sha).one_or_none()
    if build_record:
        build_record.update_commit(commit)
    else:
        build_record = DbBuild(commit=commit)
        db.session.add(build_record)

    try:
        batch_client = get_batch_client()
        batch_job = batch_client.job.get(sha)
        if create_job and batch_job.state == JobState.completed:
            batch_client.job.delete(sha)
            batch_job = create_build_job(sha)
    except BatchErrorException:
        if create_job:
            batch_job = create_build_job(sha)
        else:
            batch_job = None

    if batch_job:
        # build job can be deleted. it is not required to keep data in sync
        build_task = get_batch_client().task.get(job_id=sha, task_id='build')
        if not build_task:
            return 'Cloud task for the build is not found', 400
        build_record.state = build_task.state.value

    storage = get_blob_storage_client()
    blob = 'azure-cli-{}.tar'.format(sha)
    if storage.exists(container_name='builds', blob_name=blob):
        build_record.build_download_url = storage.make_blob_url(
            'builds',
            blob_name=blob,
            protocol='https',
            sas_token=storage.generate_blob_shared_access_signature(
                'builds',
                blob,
                BlobPermissions(read=True),
                expiry=datetime.utcnow() + timedelta(days=365)))

    db.session.commit()

    return build_record
Пример #13
0
    account_key=
    'b/qWJCuFxdUD4A9Y6erFvXwqMcUBNJz+MAHHADXWN4v+8JRMxMfIW+nqeGKfUFhP1xcb5GJzA2OSuVEs3rVr0Q=='
)

block_blob_service.create_blob_from_path(
    'addresses',
    'zoom_0.mp4',
    'zoom_0.mp4',
    content_settings=ContentSettings(content_type='video/mp4'))

#get url
# block_blob_service.set_container_acl("addresses",{"AccessPolicy": "abc"})
sas_token = block_blob_service.generate_blob_shared_access_signature(
    "addresses",
    "zoom_0.mp4",
    permission=BlobPermissions().READ,
    expiry='2020-10-12',
    start=None)

url = block_blob_service.make_blob_url(
    "addresses",
    "zoom_0.mp4",
    sas_token=sas_token,
)

conn = http.client.HTTPSConnection("videobreakdown.azure-api.net")

headers = {
    'ocp-apim-subscription-key': "d7739fcadadc4280a02cbd6482f5ef86",
    'content-type': "multipart/form-data",
    'cache-control': "no-cache",