コード例 #1
0
    def create_target_container(self, data=None):
        # for offline tests
        if not self.is_live:
            return "dummy_string"

        # for actual live tests
        self.target_container_name = "target" + str(uuid.uuid4())
        container_client = ContainerClient(self.storage_endpoint,
                                           self.target_container_name,
                                           self.storage_key)
        container_client.create_container()
        if data:
            self.upload_documents(data, container_client)

        return self.generate_sas_url(self.target_container_name, "wl")
コード例 #2
0
    def test_create_client_for_emulator(self):
        container_client = ContainerClient(
            account_url='http://127.0.0.1:1000/devstoreaccount1',
            container_name='newcontainer',
            credential=
            'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='
        )

        self.assertEqual(container_client.container_name, "newcontainer")
        self.assertEqual(container_client.account_name, "devstoreaccount1")

        ContainerClient.from_container_url(
            'http://127.0.0.1:1000/devstoreaccount1/newcontainer')
        self.assertEqual(container_client.container_name, "newcontainer")
        self.assertEqual(container_client.account_name, "devstoreaccount1")
コード例 #3
0
 def create_source_container(self, data, blob_prefix=""):
     container_name = "src" + str(uuid.uuid4())
     container_client = ContainerClient(self.storage_endpoint,
                                        container_name, self.storage_key)
     container_client.create_container()
     if isinstance(data, list):
         for blob in data:
             container_client.upload_blob(name=blob_prefix +
                                          str(uuid.uuid4()) + ".txt",
                                          data=blob)
     else:
         container_client.upload_blob(name=blob_prefix + str(uuid.uuid4()) +
                                      ".txt",
                                      data=data)
     return self.generate_sas_url(container_name, "rl")
コード例 #4
0
ファイル: status.py プロジェクト: zanachka/city-scrapers-core
    def update_status_svg(self, spider, svg):
        from azure.storage.blob import ContainerClient, ContentSettings

        container_client = ContainerClient(
            "{}.blob.core.windows.net".format(
                self.crawler.settings.get("AZURE_ACCOUNT_NAME")),
            self.crawler.settings.get("CITY_SCRAPERS_STATUS_CONTAINER"),
            credential=self.crawler.settings.get("AZURE_ACCOUNT_KEY"),
        )
        container_client.upload_blob(
            "{}.svg".format(spider.name),
            svg,
            content_settings=ContentSettings(content_type="image/svg+xml",
                                             cache_control="no-cache"),
            overwrite=True,
        )
コード例 #5
0
def copy_output(step_id,
                env):
    account_url = f'https://{env.scoring_datastore_storage_name}.blob.core.windows.net'
    src_blob_name = f'azureml/{step_id}/{env.scoring_datastore_storage_name}_out/parallel_run_step.txt'
    src_blob_url = f'{account_url}/{env.scoring_datastore_output_container}/{src_blob_name}'
    container_client = ContainerClient(account_url=account_url,
                                       container_name=env.scoring_datastore_output_container,
                                       credential=env.scoring_datastore_access_key)
    src_blob_properties = container_client.get_blob_client(src_blob_name).get_blob_properties()
    
    destfolder = src_blob_properties.last_modified.date().isoformat()
    file_time = (src_blob_properties.last_modified.time()).isoformat('milliseconds').replace(':','_').replace('.','_')
    filename_parts = env.scoring_datastore_output_filename.split('.')
    dest_blob_name = f'{destfolder}/{filename_parts[0]}_{file_time}.{filename_parts[1]}'
    dest_client = container_client.get_blob_client(dest_blob_name)
    dest_client.start_copy_from_url(src_blob_url)
コード例 #6
0
def list_blobs(account_name: str, account_key: str,
               container_name: str) -> List[BlobProperties]:
    """ List the blobs in an Azure Storage Blob Container.

    :param account_name: Azure Storage account name.
    :param account_key: Azure Storage account key.
    :param container_name: the name of the container to list the blobs in.
    :return: a list of the blobs that were found in the container.
    """

    account_url = make_account_url(account_name)
    client: ContainerClient = ContainerClient(account_url,
                                              container_name,
                                              credential=account_key)
    blobs = client.list_blobs()
    return [b for b in blobs]
コード例 #7
0
    def __init__(self, crawler, output_format):
        from azure.storage.blob import ContainerClient

        feed_uri = crawler.settings.get("FEED_URI")
        account_name, account_key = feed_uri[8::].split("@")[0].split(":")
        self.spider = crawler.spider
        self.container = feed_uri.split("@")[1].split("/")[0]
        self.container_client = ContainerClient(
            "{}.blob.core.windows.net".format(account_name),
            self.container,
            credential=account_key,
        )
        self.feed_prefix = crawler.settings.get(
            "CITY_SCRAPERS_DIFF_FEED_PREFIX", "%Y/%m/%d"
        )
        super().__init__(crawler, output_format)
コード例 #8
0
    def __init__(self, uri):
        from azure.storage.blob import ContainerClient

        container = uri.split("@")[1].split("/")[0]
        filename = "/".join(uri.split("@")[1].split("/")[1::])
        account_name, account_key = uri[8::].split("@")[0].split(":")

        self.account_name = account_name
        self.account_key = account_key
        self.container = container
        self.filename = filename
        self.container_client = ContainerClient(
            "{}.blob.core.windows.net".format(self.account_name),
            self.container,
            credential=self.account_key,
        )
コード例 #9
0
def copy_output(args):
    print("Output : {}".format(args.output_path))
    accounturl = "https://mlopsxebiaamlsa.blob.core.windows.net"
    containerclient = ContainerClient(accounturl, args.score_container,
                                      args.scoring_datastore_key)

    destfolder = date.today().isoformat()
    filetime = (
        datetime.now(timezone.utc).time().isoformat("milliseconds").replace(
            ":", "_").replace(".", "_"))  # noqa E501
    destfilenameparts = args.scoring_output_filename.split(".")
    destblobname = "{}/{}_{}.{}".format(destfolder, destfilenameparts[0],
                                        filetime, destfilenameparts[1])

    destblobclient = containerclient.get_blob_client(destblobname)
    with open(os.path.join(args.output_path, "parallel_run_step.txt"),
              "rb") as scorefile:  # noqa E501
        destblobclient.upload_blob(scorefile, blob_type="BlockBlob")
コード例 #10
0
 def _reinit_session(self):
     """
     Create a new session
     """
     if self.credential:
         # Any supplied credential takes precedence over the environment
         credential = self.credential
     elif "AZURE_STORAGE_CONNECTION_STRING" in os.environ:
         logging.info("Authenticating to Azure with connection string")
         self.container_client = ContainerClient.from_connection_string(
             conn_str=os.getenv("AZURE_STORAGE_CONNECTION_STRING"),
             container_name=self.bucket_name,
         )
         return
     else:
         if "AZURE_STORAGE_SAS_TOKEN" in os.environ:
             logging.info("Authenticating to Azure with SAS token")
             credential = os.getenv("AZURE_STORAGE_SAS_TOKEN")
         elif "AZURE_STORAGE_KEY" in os.environ:
             logging.info("Authenticating to Azure with shared key")
             credential = os.getenv("AZURE_STORAGE_KEY")
         else:
             logging.info(
                 "Authenticating to Azure with default credentials")
             # azure-identity is not part of azure-storage-blob so only import
             # it if needed
             try:
                 from azure.identity import DefaultAzureCredential
             except ImportError:
                 raise SystemExit(
                     "Missing required python module: azure-identity")
             credential = DefaultAzureCredential()
     session = requests.Session()
     adapter = requests.adapters.HTTPAdapter(
         pool_maxsize=self.REQUESTS_POOL_MAXSIZE)
     session.mount("https://", adapter)
     self.container_client = ContainerClient(
         account_url=self.account_url,
         container_name=self.bucket_name,
         credential=credential,
         max_single_put_size=self.max_single_put_size,
         max_block_size=self.max_block_size,
         session=session,
     )
コード例 #11
0
 def _container_client(self, custom_domain=None, connection_string=None):
     if custom_domain is None:
         account_domain = "blob.core.windows.net"
     else:
         account_domain = custom_domain
     if connection_string is None:
         connection_string = "{}://{}.{}".format(self.azure_protocol,
                                                 self.account_name,
                                                 account_domain)
     credential = None
     if self.account_key:
         credential = self.account_key
     elif self.sas_token:
         credential = self.sas_token
     elif self.token_credential:
         credential = self.token_credential
     return ContainerClient(connection_string,
                            self.azure_container,
                            credential=credential)
コード例 #12
0
def _azure_get_configs(layer: "Layer") -> List[str]:
    providers = layer.gen_providers(0)

    credentials = Azure.get_credentials()
    storage_account_name = providers["terraform"]["backend"]["azurerm"][
        "storage_account_name"
    ]
    container_name = providers["terraform"]["backend"]["azurerm"]["container_name"]
    storage_client = ContainerClient(
        account_url=f"https://{storage_account_name}.blob.core.windows.net",
        container_name=container_name,
        credential=credentials,
    )
    prefix = "opta_config/"
    blobs = storage_client.list_blobs(name_starts_with=prefix)
    configs = [blob.name[len(prefix) :] for blob in blobs]
    if layer.name in configs:
        configs.remove(layer.name)
    return configs
コード例 #13
0
ファイル: azure.py プロジェクト: run-x/opta
    def delete_opta_config(self) -> None:
        providers = self.layer.gen_providers(0)
        credentials = self.get_credentials()

        storage_account_name = providers["terraform"]["backend"]["azurerm"][
            "storage_account_name"]
        container_name = providers["terraform"]["backend"]["azurerm"][
            "container_name"]

        storage_client = ContainerClient(
            account_url=f"https://{storage_account_name}.blob.core.windows.net",
            container_name=container_name,
            credential=credentials,
        )
        config_path = f"opta_config/{self.layer.name}"
        try:
            storage_client.delete_blob(config_path, delete_snapshots="include")
        except ResourceNotFoundError:
            logger.info("Remote opta config was already deleted")
コード例 #14
0
def generate_writable_container_sas(account_name: str,
                                    account_key: str,
                                    container_name: str,
                                    access_duration_hrs: float,
                                    account_url: Optional[str] = None) -> str:
    """Creates a container and returns a SAS URI with read/write/list
    permissions.

    Args:
        account_name: str, name of blob storage account
        account_key: str, account SAS token or account shared access key
        container_name: str, name of container to create, must not match an
            existing container in the given storage account
        access_duration_hrs: float
        account_url: str, optional, defaults to default Azure Storage URL

    Returns: str, URL to newly created container

    Raises: azure.core.exceptions.ResourceExistsError, if container already
        exists

    NOTE: This method currently fails on non-default Azure Storage URLs. The
    initializer for ContainerClient() assumes the default Azure Storage URL
    format, which is a bug that has been reported here:
        https://github.com/Azure/azure-sdk-for-python/issues/12568
    """
    if account_url is None:
        account_url = build_azure_storage_uri(account=account_name)
    container_client = ContainerClient(account_url=account_url,
                                       container_name=container_name,
                                       credential=account_key)
    container_client.create_container()

    permissions = ContainerSasPermissions(read=True, write=True, list=True)
    container_sas_token = generate_container_sas(
        account_name=account_name,
        container_name=container_name,
        account_key=account_key,
        permission=permissions,
        expiry=datetime.utcnow() + timedelta(hours=access_duration_hrs))

    return f'{account_url}/{container_name}?{container_sas_token}'
コード例 #15
0
ファイル: azure.py プロジェクト: run-x/opta
    def upload_opta_config(self) -> None:
        providers = self.layer.gen_providers(0)
        credentials = self.get_credentials()

        storage_account_name = providers["terraform"]["backend"]["azurerm"][
            "storage_account_name"]
        container_name = providers["terraform"]["backend"]["azurerm"][
            "container_name"]

        storage_client = ContainerClient(
            account_url=f"https://{storage_account_name}.blob.core.windows.net",
            container_name=container_name,
            credential=credentials,
        )
        config_path = f"opta_config/{self.layer.name}"
        storage_client.upload_blob(
            name=config_path,
            data=json.dumps(self.layer.structured_config()),
            overwrite=True,
        )
コード例 #16
0
def get_preprocessed_file_from_azure():
    service = ContainerClient(account_url=cfg.azure_details['account_url'],container_name=cfg.azure_details['storage_preprocessed_transcripts'], credential=cfg.azure_details['azure_storage_account_key'])
    blob_list = service.list_blobs()
    blob_name=''
    for blob in blob_list:
        blob_name = blob.name
    
    # Create the BlobServiceClient object which will be used to create a container client
    blob_service_client = BlobServiceClient.from_connection_string(cfg.azure_details['account_connection_string'])
    local_file_name = blob_name
    full_path_to_file = os.path.join(local_path, local_file_name)
    # Create a blob client using the local file name as the name for the blob
    container_name="preprocessed-transcripts"
    blob_client = blob_service_client.get_blob_client(container=cfg.azure_details['storage_preprocessed_transcripts'], blob=local_file_name)
    download_file_path = os.path.join(local_path, local_file_name)

    with open(download_file_path, "wb") as download_file:
        download_file.write(blob_client.download_blob().readall())
    
    return local_file_name
コード例 #17
0
ファイル: containers.py プロジェクト: nharper285/onefuzz
def get_container_sas_url_service(
    client: ContainerClient,
    *,
    read: bool = False,
    write: bool = False,
    delete: bool = False,
    list_: bool = False,
    delete_previous_version: bool = False,
    tag: bool = False,
    days: int = 30,
    hours: int = 0,
    minutes: int = 0,
) -> str:
    account_name = client.account_name
    container_name = client.container_name
    account_key = get_storage_account_name_key_by_name(account_name)

    start, expiry = sas_time_window(days=days, hours=hours, minutes=minutes)

    sas = generate_container_sas(
        account_name,
        container_name,
        account_key=account_key,
        permission=ContainerSasPermissions(
            read=read,
            write=write,
            delete=delete,
            list=list_,
            delete_previous_version=delete_previous_version,
            tag=tag,
        ),
        start=start,
        expiry=expiry,
    )

    with_sas = ContainerClient(
        get_url(account_name),
        container_name=container_name,
        credential=sas,
    )
    return cast(str, with_sas.url)
コード例 #18
0
 def __init__(self,
              account_name,
              account_key,
              container,
              listfiles,
              dir_out,
              nm_out,
              extension,
              previous_task,
              *args,
              **kwargs) :
     super (ConsolidateServicoOperator, self).__init__ (*args, **kwargs)
     self.client = ContainerClient (account_url = f"https://{account_name}.blob.core.windows.net/",
                                    credential = account_key,
                                    container_name = container)
     self.listfiles = listfiles
     self.extension = extension
     self.dir_out = dir_out
     self.nm_out = nm_out
     self.previous_task = previous_task
     self.temp_dir = _TEMP_FILE
コード例 #19
0
    def __init__(
        self, account_url,  # type: str
        file_system_name,  # type: str
        credential=None,  # type: Optional[Any]
        **kwargs  # type: Any
    ):
        # type: (...) -> None
        try:
            if not account_url.lower().startswith('http'):
                account_url = "https://" + account_url
        except AttributeError:
            raise ValueError("account URL must be a string.")
        parsed_url = urlparse(account_url.rstrip('/'))
        if not file_system_name:
            raise ValueError("Please specify a file system name.")
        if not parsed_url.netloc:
            raise ValueError("Invalid URL: {}".format(account_url))

        blob_account_url = convert_dfs_url_to_blob_url(account_url)
        # TODO: add self.account_url to base_client and remove _blob_account_url
        self._blob_account_url = blob_account_url

        datalake_hosts = kwargs.pop('_hosts', None)
        blob_hosts = None
        if datalake_hosts:
            blob_primary_account_url = convert_dfs_url_to_blob_url(datalake_hosts[LocationMode.PRIMARY])
            blob_hosts = {LocationMode.PRIMARY: blob_primary_account_url, LocationMode.SECONDARY: ""}
        self._container_client = ContainerClient(blob_account_url, file_system_name,
                                                 credential=credential, _hosts=blob_hosts, **kwargs)

        _, sas_token = parse_query(parsed_url.query)
        self.file_system_name = file_system_name
        self._query_str, self._raw_credential = self._format_query_string(sas_token, credential)

        super(FileSystemClient, self).__init__(parsed_url, service='dfs', credential=self._raw_credential,
                                               _hosts=datalake_hosts, **kwargs)
        # ADLS doesn't support secondary endpoint, make sure it's empty
        self._hosts[LocationMode.SECONDARY] = ""
        self._client = DataLakeStorageClient(self.url, file_system_name, None, pipeline=self._pipeline)
コード例 #20
0
ファイル: containers.py プロジェクト: ranweiler/onefuzz
def get_container_sas_url_service(
    client: ContainerClient,
    *,
    read: bool = False,
    write: bool = False,
    delete: bool = False,
    list_: bool = False,
    delete_previous_version: bool = False,
    tag: bool = False,
    duration: datetime.timedelta = CONTAINER_SAS_DEFAULT_DURATION,
) -> str:
    account_name = client.account_name
    container_name = client.container_name
    account_key = get_storage_account_name_key_by_name(account_name)

    start, expiry = sas_time_window(duration)

    sas = generate_container_sas(
        account_name,
        container_name,
        account_key=account_key,
        permission=ContainerSasPermissions(
            read=read,
            write=write,
            delete=delete,
            list=list_,
            delete_previous_version=delete_previous_version,
            tag=tag,
        ),
        start=start,
        expiry=expiry,
    )

    with_sas = ContainerClient(
        get_url(account_name),
        container_name=container_name,
        credential=sas,
    )
    return cast(str, with_sas.url)
コード例 #21
0
ファイル: view.py プロジェクト: yasir002/demo
def deleted_orders(request):
    """
    Access all deleted orders from 'deleted-orders' container,
    splits the order number and submission date from order's
    name and lists them
    :param request:
    :return:
    """
    container_client = ContainerClient(storage_url,
                                       'deleted-orders',
                                       credential=None)
    deleted_order_list = container_client.list_blobs()
    order_list = []
    i = 1
    for order in deleted_order_list:
        order_no = order['name'].split('.')[0]
        creation_date = order['name'].split('.')[1]
        order_id_and_date_tuple = [order_no, creation_date]
        order_list.append(order_id_and_date_tuple)
        i += 1
    order_list.sort(reverse=True)
    n = 1
    for order in order_list:
        order.insert(0, n)
        n += 1
    page = request.GET.get('page', 1)
    paginator = Paginator(order_list, 4)
    orders = paginator.page(page)
    # messages.info(request, 'yes man yes')
    context = {
        'deleted_orders': orders,
        'redirect_url': 'deleted_orders',
    }
    print(request.COOKIES)
    response = render(request, 'deleted_orders.html', context)
    # response.delete_cookie('cookie_name1')

    return response
コード例 #22
0
ファイル: sas_blob_utils.py プロジェクト: tobbas75/ai4eutils
def generate_writable_container_sas(account_name: str,
                                    account_key: str,
                                    container_name: str,
                                    access_duration_hrs: float,
                                    account_url: Optional[str] = None) -> str:
    """
    Creates a container and returns a SAS URI with read/write/list
    permissions.

    Args:
        account_name: str, name of blob storage account
        account_key: str, account SAS token or account shared access key
        container_name: str, name of container to create, must not match an
            existing container in the given storage account
        access_duration_hrs: float
        account_url: str, optional, defaults to default Azure Storage URL

    Returns: str, URL to newly created container

    Raises: azure.core.exceptions.ResourceExistsError, if container already
        exists
    """
    if account_url is None:
        account_url = build_azure_storage_uri(account=account_name)
    with ContainerClient(account_url=account_url,
                         container_name=container_name,
                         credential=account_key) as container_client:
        container_client.create_container()

    permissions = ContainerSasPermissions(read=True, write=True, list=True)
    container_sas_token = generate_container_sas(
        account_name=account_name,
        container_name=container_name,
        account_key=account_key,
        permission=permissions,
        expiry=datetime.utcnow() + timedelta(hours=access_duration_hrs))

    return f'{account_url}/{container_name}?{container_sas_token}'
コード例 #23
0
    def test_invalid_api_version(self):
        with pytest.raises(ValueError) as error:
            BlobServiceClient(
                "https://foo.blob.core.windows.net/account",
                credential="fake_key",
                api_version="foo")
        self.assertTrue(str(error.value).startswith("Unsupported API version 'foo'."))

        with pytest.raises(ValueError) as error:
            ContainerClient(
                "https://foo.blob.core.windows.net/account",
                self.container_name,
                credential="fake_key",
                api_version="foo")
        self.assertTrue(str(error.value).startswith("Unsupported API version 'foo'."))

        with pytest.raises(ValueError) as error:
            BlobClient(
                "https://foo.blob.core.windows.net/account",
                self.container_name,
                self._get_blob_reference(),
                credential="fake_key",
                api_version="foo")
        self.assertTrue(str(error.value).startswith("Unsupported API version 'foo'."))
コード例 #24
0
def main(args):
    blob_account_url = f"https://{args.storageaccountname}.blob.core.windows.net"

    input_blob = BlobClient.from_blob_url(args.url)
    _, filename = os.path.split(input_blob.blob_name)
    root, ext = os.path.splitext(filename)
    print(f"root: {root}")

    assert ext.lower() == ".mp4"

    gps_output_container = ContainerClient(account_url=blob_account_url,
                                           container_name="gpsdata",
                                           credential=credential)
    assert gps_output_container.exists()

    video_upload_container = ContainerClient(
        account_url=blob_account_url,
        container_name="inputvideos",
        credential=credential,
    )
    assert video_upload_container.exists()

    # Download blob into "input.mp4"
    print("Downloading video file")
    with open(local_video_filename, "wb") as fh:
        input_blob.download_blob().readinto(fh)

    # GET CREATION DATE FROM FILE
    print("Probing File")
    probe = ffmpeg.probe(filename="input.mp4")
    creation_time = probe["format"]["tags"]["creation_time"]
    creation_datetime = datetime.datetime.strptime(creation_time,
                                                   "%Y-%m-%dT%H:%M:%S.%fZ")
    print(f"Creation time: {creation_datetime}")

    # STORE JSON TO YYYY/DD/MM/GOPROFILENAME.JSON
    target_folder = os.path.join(
        str(creation_datetime.year),
        str(creation_datetime.month),
        str(creation_datetime.day),
    )
    gps_json_output_filename = os.path.join(target_folder, f"{root}.json")
    video_upload_filename = os.path.join(target_folder, f"{root}.MP4")
    print(f"JSON FILENAME: {gps_json_output_filename}")

    # Extract GPS data into gps_json_output_filename
    print("Extracting GPS data")
    result = subprocess.check_output("node process.js", shell=True)
    print(result)

    # Upload json file to gps data container
    try:
        with open("out.json", "rb") as fh:
            extracted_gps = json.load(fh)
            print("extraction successful")

        gps_output_blob = gps_output_container.upload_blob(
            name=gps_json_output_filename,
            data=json.dumps(extracted_gps),
            overwrite=True,
        )
        print(f"file '{gps_json_output_filename}' uploaded.")
        print(gps_output_blob.get_blob_properties())
    except ResourceExistsError:
        print("File already exists")

    # upload to inputvideos folder
    with open(local_video_filename, "rb") as data:
        length = os.path.getsize(local_video_filename)
        gps_output_blob = video_upload_container.upload_blob(
            name=video_upload_filename,
            data=data,
            length=length,
            overwrite=True,
        )
コード例 #25
0
def check_subscription(tenant_id, tenant_name, sub_id, sub_name, creds):
    print("\n\t[*] Checking subscription {}:".format(sub_name), flush=True)

    storage_client = StorageManagementClient(creds, sub_id)

    # Obtain the management object for resources
    resource_client = ResourceManagementClient(creds, sub_id)

    # Retrieve the list of resource groups
    group_list = resource_client.resource_groups.list()
    resource_groups = [group.name for group in list(group_list)]
    print("\t\t[+] Found {} resource groups".format(len(resource_groups)),
          flush=True)
    group_to_names_dict = {group: dict() for group in resource_groups}

    accounts_counter = 0
    for group in resource_groups:
        for item in storage_client.storage_accounts.list_by_resource_group(
                group):
            accounts_counter += 1
            group_to_names_dict[group][item.name] = ''

    print("\t\t[+] Found {} storage accounts".format(accounts_counter),
          flush=True)

    for group in resource_groups:
        for account in group_to_names_dict[group].keys():
            try:
                storage_keys = storage_client.storage_accounts.list_keys(
                    group, account)
                storage_keys = {v.key_name: v.value for v in storage_keys.keys}
                group_to_names_dict[group][account] = storage_keys['key1']

            except azure.core.exceptions.HttpResponseError:
                print(
                    "\t\t[-] User do not have permissions to retrieve storage accounts keys in the given"
                    " subscription",
                    flush=True)
                print("\t\t    Can not scan storage accounts", flush=True)
                return

    output_list = list()

    for group in resource_groups:
        for account in group_to_names_dict[group].keys():
            key = group_to_names_dict[group][account]
            public_containers = check_storage_account(account, key)

            for cont in public_containers:
                access_level = cont.public_access
                container_client = ContainerClient(
                    ENDPOINT_URL.format(account), cont.name, credential=key)
                files = [f.name for f in container_client.list_blobs()]
                ext_dict = count_files_extensions(files, EXTENSIONS)
                row = [
                    tenant_id, tenant_name, sub_id, sub_name, group, account,
                    cont.name, access_level,
                    CONTAINER_URL.format(account, cont.name),
                    len(files)
                ]

                for ext in ext_dict.keys():
                    row.append(ext_dict[ext])

                output_list.append(row)

    print("\t\t[+] Scanned all storage accounts successfully", flush=True)

    if len(output_list) > 0:
        print("\t\t[+] Found {} PUBLIC containers".format(len(output_list)),
              flush=True)
    else:
        print("\t\t[+] No PUBLIC containers found")

    header = [
        "Tenant ID", "Tenant Name", "Subscription ID", "Subscription Name",
        "Resource Group", "Storage Account", "Container",
        "Public Access Level", "URL", "Total Files"
    ]

    for ext in EXTENSIONS:
        header.append(ext)

    header.append("others")
    write_csv('public-containers-{}.csv'.format(date.today()), header,
              output_list)
コード例 #26
0
    def test_create_service_with_custom_account_endpoint_path(self):
        account_name = "blobstorage"
        account_key = "blobkey"
        custom_account_url = "http://local-machine:11002/custom/account/path/" + self.sas_token
        for service_type in SERVICES.items():
            conn_string = 'DefaultEndpointsProtocol=http;AccountName={};AccountKey={};BlobEndpoint={};'.format(
                account_name, account_key, custom_account_url)

            # Act
            service = service_type[0].from_connection_string(
                conn_string, container_name="foo", blob_name="bar")

            # Assert
            self.assertEqual(service.account_name, account_name)
            self.assertEqual(service.credential.account_name, account_name)
            self.assertEqual(service.credential.account_key, account_key)
            self.assertEqual(service.primary_hostname,
                             'local-machine:11002/custom/account/path')

        service = BlobServiceClient(account_url=custom_account_url)
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname,
                         'local-machine:11002/custom/account/path')
        self.assertTrue(
            service.url.startswith(
                'http://local-machine:11002/custom/account/path/?'))

        service = ContainerClient(account_url=custom_account_url,
                                  container_name="foo")
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname,
                         'local-machine:11002/custom/account/path')
        self.assertTrue(
            service.url.startswith(
                'http://local-machine:11002/custom/account/path/foo?'))

        service = ContainerClient.from_container_url(
            "http://local-machine:11002/custom/account/path/foo?query=value")
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname,
                         'local-machine:11002/custom/account/path')
        self.assertEqual(service.url,
                         'http://local-machine:11002/custom/account/path/foo')

        service = BlobClient(account_url=custom_account_url,
                             container_name="foo",
                             blob_name="bar",
                             snapshot="baz")
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.blob_name, "bar")
        self.assertEqual(service.snapshot, "baz")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname,
                         'local-machine:11002/custom/account/path')
        self.assertTrue(
            service.url.startswith(
                'http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&'
            ))

        service = BlobClient.from_blob_url(
            "http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&query=value"
        )
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.blob_name, "bar")
        self.assertEqual(service.snapshot, "baz")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname,
                         'local-machine:11002/custom/account/path')
        self.assertEqual(
            service.url,
            'http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz'
        )
コード例 #27
0
import random
import numpy as np
import cv2
import imageio
from matplotlib import patches
from matplotlib.patches import Polygon
import matplotlib.pyplot as plt
from datetime import datetime
import zlib
import base64
from mrcnn.model import MaskRCNN
from mrcnn.utils import resize_image

account_url = "https://hecdf.blob.core.windows.net"
facts_blob_service = ContainerClient(account_url=account_url,
                                     container_name=facts_container,
                                     credential=facts_sas_token)


# ===============================
# INLINES
# ===============================


def get_people_local_path(sample_id: str, directory_path: str) -> str:
    return os.path.join(directory_path, sample_id + '_people.json')


def get_poly_local_path(sample_id: str, directory_path: str) -> str:
    return os.path.join(directory_path, sample_id + '_poly.json')
コード例 #28
0
ファイル: __init__.py プロジェクト: Lucas1988/ExcelToSql
def main(myblob: func.InputStream):
    try:

        logging.info('Python blob trigger function processed a request.')

        account_name = "###" # confidential
        account_key = "###" # confidential
        top_level_container_name = "###" # confidential
        blob_service = ContainerClient(account_url=account_name, container_name=top_level_container_name, credential=account_key)

        # Make connection with Azure SQL database

        server = 'datatrust-ff.database.windows.net' 
        database = 'DataTrust' 
        username = '******' 
        password = '******' 
        cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};SERVER='+server+';PORT=1433;DATABASE='+database+';UID='+username+';PWD='+ password)
        cursor = cnxn.cursor()
        cursor.execute("SELECT @@version;")
        row = cursor.fetchone() 
        while row:
            print(row[0])
            row = cursor.fetchone()

        # Download xlsx-files from Azure blob storage

        logging.info("\nList blobs in the container")
        generator = blob_service.list_blobs()
        for blob in generator:
            if blob.name.endswith('.xlsx'):
                logging.info("\t Blob name: " + blob.name)
                file_name = re.sub('.*/', '', blob.name)
                xlsx_file = open(file_name, 'wb')
                b = blob_service.download_blob(blob)
                b.readinto(xlsx_file)
                #xlsx_file.write(b)
                xlsx_file = open(file_name, 'rb')
                data = xlsx_file.read()
                data = pd.read_excel(data)
                headers = list(data.columns.values)
                list_values = data.values.tolist()
                blob_name = blob.name

                tableName = re.sub('.xlsx', '', blob_name)
                tableName = re.sub('^.*/', '', tableName)
                tableName = re.sub(' ', '_', tableName)

                # If table exists: remove and rewrite

                try:
                    cursor.execute("DROP TABLE dbo." + tableName)
                except:
                    print('Table does not exist yet')

                # Create new table

                query_string = 'CREATE TABLE dbo.' + tableName + ' ('

                # Add columns to table

                columns = ''
                for i in range(len(headers)):
                    headers[i] = re.sub('[ /-]', '_', str(headers[i]))
                    headers[i] = re.sub("[\(\)€'\.,]", '', str(headers[i]))
                    columns += headers[i] + ', '
                    if i == len(headers) - 1:
                        query_string += '\n' + headers[i] + ' VARCHAR(1000)'
                    else:
                        query_string += '\n' + headers[i] + ' VARCHAR(1000),'
                query_string += '\n);'
                query_string = re.sub('[/-]', '', query_string)
                cursor.execute(query_string)

                # Add rows to table

                query_string = "INSERT INTO dbo." + tableName + "(" + columns[:-2] +") VALUES "
                for row in range(len(list_values)):
                    list_values[row] = [str(i) for i in list_values[row]]
                    row_new = []
                    for item in list_values[row]:
                        item = re.sub('[\(\)\r\n\,\'\-]', '', item)
                        item = "'" + item + "'"
                        row_new.append(item)

                    row_new = ','.join(row_new)
                    if (row + 1) % 1000 == 0 or row + 1 == len(list_values):
                        query_string += '(' + row_new + ');'
                        print(query_string)
                        cursor.execute(query_string)
                        query_string = "INSERT INTO dbo." + tableName + "(" + columns[:-2] +") VALUES "
                    else:
                        query_string += '(' + row_new + '),'
                    
                cnxn.commit()

    except Exception as e:
        logging.exception(e)
コード例 #29
0
from azure.storage.blob import ContainerClient
import numpy as np
import io
import cv2
import time
import matplotlib.pyplot as plt

# Dataset website: http://theairlab.org/tartanair-dataset/
account_url = 'https://tartanair.blob.core.windows.net/'
container_name = 'tartanair-release1'

container_client = ContainerClient(account_url=account_url,
                                 container_name=container_name,
                                 credential=None)


def get_environment_list():
    '''
    List all the environments shown in the root directory
    '''
    env_gen = container_client.walk_blobs()
    envlist = []
    for env in env_gen:
        envlist.append(env.name)
    return envlist


def get_trajectory_list(envname, easy_hard='Easy'):
    '''
    List all the trajectory folders, which is named as 'P0XX'
    '''
コード例 #30
0
def sample_batch_translation_with_storage():
    import os
    from azure.core.credentials import AzureKeyCredential
    from azure.ai.documenttranslation import (DocumentTranslationClient,
                                              DocumentTranslationInput,
                                              TranslationTarget)
    from azure.storage.blob import ContainerClient, generate_container_sas, ContainerSasPermissions

    endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"]
    key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"]
    source_storage_endpoint = os.environ["AZURE_STORAGE_SOURCE_ENDPOINT"]
    source_storage_account_name = os.environ[
        "AZURE_STORAGE_SOURCE_ACCOUNT_NAME"]
    source_storage_container_name = os.environ[
        "AZURE_STORAGE_SOURCE_CONTAINER_NAME"]
    source_storage_key = os.environ["AZURE_STORAGE_SOURCE_KEY"]
    target_storage_endpoint = os.environ["AZURE_STORAGE_TARGET_ENDPOINT"]
    target_storage_account_name = os.environ[
        "AZURE_STORAGE_TARGET_ACCOUNT_NAME"]
    target_storage_container_name = os.environ[
        "AZURE_STORAGE_TARGET_CONTAINER_NAME"]
    target_storage_key = os.environ["AZURE_STORAGE_TARGET_KEY"]

    translation_client = DocumentTranslationClient(endpoint,
                                                   AzureKeyCredential(key))

    container_client = ContainerClient(
        source_storage_endpoint,
        container_name=source_storage_container_name,
        credential=source_storage_key)

    with open("document.txt", "rb") as doc:
        container_client.upload_blob("document.txt", doc)

    source_container_sas = generate_container_sas(
        account_name=source_storage_account_name,
        container_name=source_storage_container_name,
        account_key=source_storage_key,
        permission=ContainerSasPermissions.from_string("rl"))

    target_container_sas = generate_container_sas(
        account_name=target_storage_account_name,
        container_name=target_storage_container_name,
        account_key=target_storage_key,
        permission=ContainerSasPermissions.from_string("rlwd"))

    source_container_url = source_storage_endpoint + "/" + source_storage_container_name + "?" + source_container_sas
    target_container_url = target_storage_endpoint + "/" + target_storage_container_name + "?" + target_container_sas

    translation_inputs = [
        DocumentTranslationInput(source_url=source_container_url,
                                 targets=[
                                     TranslationTarget(
                                         target_url=target_container_url,
                                         language_code="es")
                                 ],
                                 prefix="document")
    ]

    job_detail = translation_client.create_translation_job(translation_inputs)
    job_result = translation_client.wait_until_done(job_detail.id)

    if job_result.status == "Succeeded":
        print("We translated our documents!")
        if job_result.documents_failed_count > 0:
            check_documents(translation_client, job_result.id)

    elif job_result.status in ["Failed", "ValidationFailed"]:
        if job_result.error:
            print("Translation job failed: {}: {}".format(
                job_result.error.code, job_result.error.message))
        check_documents(translation_client, job_result.id)
        exit(1)

    container_client = ContainerClient(
        target_storage_endpoint,
        container_name=target_storage_container_name,
        credential=target_storage_key)

    target_container_client = container_client.from_container_url(
        target_container_url)

    with open("translated.txt", "wb") as my_blob:
        download_stream = target_container_client.download_blob("document.txt")
        my_blob.write(download_stream.readall())