예제 #1
0
    def copy(self, source_path, dest_path, account=None, group_name=None):
        """Copy file from a path to another path. The azure url format is https://myaccount.blob.core.windows.net/mycontainer/myblob.
         Args:
             source_path(str): The path of the file to be copied.
             dest_path(str): The destination path where the file is going to be allocated.
         Raises:
             :exc:`~..DriverError`: if the file is not uploaded correctly.
        """
        if 'core.windows.net' not in source_path and 'core.windows.net' not in dest_path:
            self.logger.error(
                "Source or destination must be a azure storage url (format "
                "https://myaccount.blob.core.windows.net/mycontainer/myblob")
            raise DriverError

        # Check if source exists and can read
        if 'core.windows.net' in source_path:
            parse_url = _parse_url(source_path)
            key = self.storage_client.storage_accounts.list_keys(
                self.resource_group_name, parse_url.account).keys[0].value
            if parse_url.file_type == 'blob':
                bs = BlockBlobService(account_name=parse_url.account,
                                      account_key=key)
                return bs.get_blob_to_path(parse_url.container_or_share_name,
                                           parse_url.file, dest_path)
            elif parse_url.file_type == 'file':
                fs = FileService(account_name=parse_url.account,
                                 account_key=key)
                return fs.get_file_to_path(parse_url.container_or_share_name,
                                           parse_url.path, parse_url.file,
                                           dest_path)
            else:
                raise ValueError(
                    "This azure storage type is not valid. It should be blob or file."
                )
        else:
            parse_url = _parse_url(dest_path)
            key = self.storage_client.storage_accounts.list_keys(
                self.resource_group_name, parse_url.account).keys[0].value
            if parse_url.file_type == 'blob':
                bs = BlockBlobService(account_name=parse_url.account,
                                      account_key=key)
                return bs.create_blob_from_path(
                    parse_url.container_or_share_name, parse_url.file,
                    source_path)
            elif parse_url.file_type == 'file':
                fs = FileService(account_name=parse_url.account,
                                 account_key=key)
                return fs.create_file_from_path(
                    parse_url.container_or_share_name, parse_url.path,
                    parse_url.file, source_path)
            else:
                raise ValueError(
                    "This azure storage type is not valid. It should be blob or file."
                )
예제 #2
0
def shares():
    # Create Container and Share
    global storage_account_key, blob_service, blob_share, file_service, file_share
    sak = storage_client.storage_accounts.list_keys(resourcegroupname,
                                                    storageaccountname)
    storage_account_key = sak.keys[0].value
    cloudstorage_client = CloudStorageAccount(storageaccountname,
                                              storage_account_key)
    blob_service = cloudstorage_client.create_block_blob_service()
    blob_share = blob_service.create_container(
        sharename, public_access=PublicAccess.Container)
    file_service = FileService(account_name=storageaccountname,
                               account_key=storage_account_key)
    file_share = file_service.create_share(sharename)
    # Copy Setup Files to Container and Share
    blob_service.create_blob_from_path(
        sharename,
        filename,
        filename,
    )
    file_service.create_file_from_path(
        sharename,
        '',
        filename,
        filename,
    )
예제 #3
0
 def file_srv(self):
     if (self._file_srv is None or self.storage_acc_name
             is not None) or self._changed_properties["storage_acc_name"]:
         self._file_srv = FileService(account_name=self.storage_acc_name,
                                      account_key=self.storage_acc_key_1)
         self._changed_properties["storage_acc_name"] = False
     return self._file_srv
예제 #4
0
    def test_sas_access_file(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        file_name = self._create_file()

        token = self.fs.generate_file_shared_access_signature(
            self.share_name,
            None,
            file_name,
            permission=FilePermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = FileService(
            self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
            request_session=requests.Session(),
        )
        self._set_test_proxy(service, self.settings)
        result = service.get_file_to_bytes(self.share_name, None, file_name)

        # Assert
        self.assertEqual(self.short_byte_data, result.content)
예제 #5
0
파일: custom.py 프로젝트: zackliu/azure-cli
def _configure_auto_storage(cli_ctx, location):
    """Configures auto storage account for the cluster

    :param str location: location for the auto-storage account.
    :return (str, str): a tuple with auto storage account name and key.
    """
    from azure.mgmt.resource.resources.models import ResourceGroup
    from azure.storage.file import FileService
    from azure.storage.blob import BlockBlobService
    resource_group = _get_auto_storage_resource_group()
    resource_client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES)
    if resource_client.resource_groups.check_existence(resource_group):
        logger.warning('BatchAI will use existing %s resource group for auto-storage account',
                       resource_group)
    else:
        logger.warning('Creating %s resource for auto-storage account', resource_group)
        resource_client.resource_groups.create_or_update(
            resource_group, ResourceGroup(location=location))
    storage_client = _get_storage_management_client(cli_ctx)
    account = None
    for a in storage_client.storage_accounts.list_by_resource_group(resource_group):
        if a.primary_location == location.lower().replace(' ', ''):
            account = a.name
            logger.warning('Using existing %s storage account as an auto-storage account', account)
            break
    if account is None:
        account = _create_auto_storage_account(storage_client, resource_group, location)
        logger.warning('Created auto storage account %s', account)
    key = _get_storage_account_key(cli_ctx, account, None)
    file_service = FileService(account, key)
    file_service.create_share(AUTO_STORAGE_SHARE_NAME, fail_on_exist=False)
    blob_service = BlockBlobService(account, key)
    blob_service.create_container(AUTO_STORAGE_CONTAINER_NAME, fail_on_exist=False)
    return account, key
예제 #6
0
def main(mytimer: func.TimerRequest) -> None:
    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()

    if mytimer.past_due:
        logging.info('The timer is past due!')

    logging.info('Python timer trigger function ran at %s', utc_timestamp)
    # https://docs.microsoft.com/en-us/azure/storage/files/storage-python-how-to-use-file-storage
    file_service = FileService(
        account_name='nyctaxistorageacc',
        account_key=
        '26+AjeiwRNGlLNa6P4J8+xOdReRs/xqNIM2qS4JYr0ZeFQ1C9aTF45IiaGnPXgG7KCg498Q8bcfsRNvCOCaC3A=='
    )
    if not os.path.exists('data'):
        os.makedirs('data')
    for i in range(1, 13):
        file_service.get_file_to_path('nyc-taxi-db',
                                      None,
                                      '%02d-cnt.npy' % i,
                                      'data/%02d-cnt.npy' % i,
                                      open_mode='wb')
        file_service.get_file_to_path('nyc-taxi-db',
                                      None,
                                      '%02d-total-fare.npy' % i,
                                      'data/%02d-total-fare.npy' % i,
                                      open_mode='wb')
    logging.info('Successfully updated model.')
예제 #7
0
def create_snapshot(file_share, directory_name, file_name, container_name, correlation_guid = str(uuid.uuid4())):
    file_service = FileService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
    blob_service = BlockBlobService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
    file_sas_token = file_service.generate_file_shared_access_signature(
        file_share,
        directory_name,
        file_name,
        permission = FilePermissions.READ,
        expiry = datetime.now() + timedelta(minutes = 10))

    file_url = file_service.make_file_url(file_share, directory_name, file_name, sas_token = file_sas_token)

    blob_name = '{0}/{1}/{2}'.format(correlation_guid, directory_name, file_name)
    blob_service.create_container(container_name)

    try:
        blob_service.copy_blob(container_name, blob_name, file_url)
    except Exception as e:
        raise ValueError('Missing file ' + file_name)

    blob_sas_token = blob_service.generate_blob_shared_access_signature(
        container_name,
        blob_name,
        permission = BlobPermissions.READ,
        expiry = datetime.now() + timedelta(days = 1000))

    return blob_service.make_blob_url(container_name, blob_name, sas_token = blob_sas_token)
    def file_sas(self):
        share_name = self._create_share()
        self.service.create_directory(share_name, 'dir1')
        self.service.create_file_from_text(share_name, 'dir1', 'file1',
                                           b'hello world')

        # Read access only to this particular file
        # Expires in an hour
        token = self.service.generate_file_shared_access_signature(
            share_name,
            'dir1',
            'file1',
            FilePermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Create a service and use the SAS
        sas_service = FileService(
            account_name=self.account.account_name,
            sas_token=token,
        )

        file = sas_service.get_file_to_text(share_name, 'dir1', 'file1')
        content = file.content  # hello world

        self.service.delete_share(share_name)
예제 #9
0
def create_and_attach_file_storage(cfg, ws):
    if len(cfg.DataReference.localDirectoryFilesList) > 0:
        for ref in cfg.DataReference.localDirectoryFilesList:
            log.info("Attempting to create file share '%s' on storage account '%s'.", ref.remoteFileShare, ref.storageAccountName)
            file_service = FileService(ref.storageAccountName, ref.storageAccountKey)
            exist = file_service.create_share(ref.remoteFileShare, fail_on_exist=False)
            if exist:
                log.info("File Share '%s' on storage account '%s' created.", ref.remoteFileShare, ref.storageAccountName)
            else:
                log.info("File Share '%s' on storage account '%s' already existed.", ref.remoteFileShare, ref.storageAccountName)
            # Get most recent list of datastores linked to current workspace
            datastores = ws.datastores()
            # Validate if share_ds is created
            ds = None if ref.dataref_id not in datastores else Datastore(workspace = ws, name = ref.dataref_id)
            # Register the DS to the workspace
            if ds:
                if ds.account_name == ref.storageAccountName and ds.container_name == ref.remoteFileShare:
                    recreate = False
                else:
                    recreate = True
                    # also remove the existing reference
                    ds.unregister()
            else:
                recreate = True
            if recreate:
                log.info('Registering file share "{}" to AML datastore for AML workspace "{}" under datastore id "{}".'.format(ref.remoteFileShare, ws.name, ref.dataref_id))
                ds = Datastore.register_azure_file_share(workspace = ws,
                                                    datastore_name = ref.dataref_id, 
                                                    file_share_name = ref.remoteFileShare, 
                                                    account_name = ref.storageAccountName, 
                                                    account_key= ref.storageAccountKey,
                                                    overwrite=True,
                                                    )
            else:
                log.info('File share "{}" under AML workspace "{}" already registered under datastore id "{}".'.format(ref.remoteFileShare, ws.name, ref.dataref_id))
예제 #10
0
파일: main.py 프로젝트: ilovecee/ef
def delete_result(filename):
  AzureStorageAccount = 'effiles'
  key = 'axLykwdLsUwKTDY5flU6ivGrt9obV38k2UMVDCSpLYE3K6jAkwsjWOThQydhuMSWHfx6lTq102gdkas/GyKhEA=='
  down_path = 'results'
  path1 = 'efficientfrontier'
  file_service = FileService(account_name = AzureStorageAccount, account_key = key)
  file_service.delete_file (path1, down_path, filename)
  # Create Cursor
  cur = mysql.connection.cursor()

  # Execute
  cur.execute ('DELETE FROM result_files WHERE filename = %s', [filename])

  # Commit to DB
  mysql.connection.commit()

  # Close connection
  cur.close()

  target = os.path.join(APP_ROOT, 'results/')
  destination = '/'.join([target, filename])
  
  if os.path.exists(destination):
    os.remove(destination)


  flash ('File Deleted', 'success')
    
  return redirect(url_for('results'))
def main():
    global CONFIG
    CONFIG = load_config(os.getenv("WONDERCOMPUTECONFIG"))
    for param, val in CONFIG.items():
        print(param + ":", val)
    CONFIG["stub"] = new_client()
    CONFIG["repo_storage"] = Path(CONFIG["repo_storage"]).expanduser()
    CONFIG["file_service"] = FileService(
        account_name=CONFIG["azurefs_acc_name"],
        account_key=CONFIG["azurefs_acc_key"])

    last_work = time.time()
    if os.getenv("DEBUG") == "True":
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)
    while True:
        sleep_at_work(last_work)
        try:
            logging.debug("Knock, knock, wonderland")
            pulled_jobs = CONFIG["stub"].PullPendingJobs(
                ListJobsRequest(how_many=1, kind='hyperopt'))
            for job in pulled_jobs.jobs:
                last_work = time.time()
                logging.info("Gotcha!Learning...JOB_ID={}\n".format(job.id))
                process_job(job)
                logging.info("Processed:\n{}".format(job))
        except Exception as exc:
            logging.warning(exc)
예제 #12
0
 def file_storage_connect(self):
     self.file_storage_url = self.get_property('fs_server', 'general')
     self.file_storage_user = self.get_property('fs_username', 'general')
     self.file_storage_pwd = self.get_property('fs_password', 'general')
     self.file_storage_share = self.get_property('fs_share', 'general')
     self.file_storage_dir = self.get_property('fs_directory_prefix',
                                               'general')
     self.file_service = FileService(account_name=self.file_storage_user,
                                     account_key=self.file_storage_pwd)
     try:
         if self.file_service.exists(self.file_storage_share):
             print(
                 'Connection to Azure file storage successfully established...'
             )
             if len(self.file_storage_dir
                    ) > 0 and not self.file_service.exists(
                        self.file_storage_share,
                        directory_name=self.file_storage_dir):
                 subdirs = self.file_storage_dir.split('/')
                 subdirfull = ""
                 for subdir in subdirs:
                     subdirfull += subdir
                     self.file_service.create_directory(
                         self.file_storage_share, subdirfull)
                     subdirfull += "/"
                 print('Created directory:' + self.file_storage_dir)
         else:
             print(
                 'Filaed to connect to Asure file storage, share does not exist: '
                 + self.file_storage_share)
     except Exception as ex:
         print('Error connecting to Azure file storage: ', ex)
예제 #13
0
def run(job, **kwargs):
    resource = kwargs.get('resource')
    create_custom_fields_as_needed()

    storage_account = '{{ storage_account }}'
    file = "{{ file }}"
    azure_storage_file_share_name = '{{ azure_storage_file_share_name }}'
    file_name = Path(file).name
    if file.startswith(settings.MEDIA_URL):
        set_progress("Converting relative URL to filesystem path")
        file = file.replace(settings.MEDIA_URL, settings.MEDIA_ROOT)

    account_key = Resource.objects.filter(name__icontains='{{ storage_account }}')[0].azure_account_key
    fallback_account_key = Resource.objects.filter(name__icontains="{{ storage_account }}")[0].azure_account_key_fallback

    set_progress("Connecting To Azure...")
    file_service = FileService(account_name=storage_account, account_key=account_key)

    set_progress('Creating a file share...')
    file_service.create_share(share_name=azure_storage_file_share_name, quota=1)

    set_progress('Creating a file...')
    if file_service.exists(share_name=azure_storage_file_share_name, file_name=file_name, directory_name=''):
        file_service.create_file_from_path(share_name=azure_storage_file_share_name, file_name=file_name, directory_name='', local_file_path=file)
        return "WARNING", "File with this name already exists", "The file will be updated."
    else:
        file_service.create_file_from_path(share_name=azure_storage_file_share_name, file_name=file_name, directory_name='', local_file_path=file)
        resource.name = azure_storage_file_share_name + '-' + file_name
        resource.azure_storage_account_name = storage_account
        resource.azure_account_key = account_key
        resource.azure_account_key_fallback = fallback_account_key
        resource.azure_storage_file_share_name = azure_storage_file_share_name
        resource.azure_storage_file_name = file_name
        resource.save()
    return "Success", "The File has succesfully been uploaded", ""
예제 #14
0
 def onRecordingComplete(self):
     print("recording completed {}".format(self.filename))
     file_service = FileService(account_name=cfg.storageAc,
                                account_key=cfg.accountkey)
     file_service.create_file_from_path(cfg.fileShare, None,
                                        '{}.flv'.format(self.filename),
                                        'temp/{}.flv'.format(self.filename))
     sharedAccessStorage = SharedAccessSignature(cfg.storageAc,
                                                 cfg.accountkey)
     sasKey = sharedAccessStorage.generate_file(
         cfg.fileShare,
         file_name='{}.flv'.format(self.filename),
         permission=FilePermissions.READ,
         start=datetime.utcnow(),
         expiry=datetime.utcnow() + timedelta(minutes=5))
     downloadLink = cfg.downloadLinkFormat.format(cfg.storageAcUrl,
                                                  cfg.fileShare,
                                                  self.filename, sasKey)
     self.recorderSock.emit('recordingcomplete', {
         'sid': self.sid,
         'download': downloadLink
     })
     os.remove('temp/{}.flv'.format(self.filename))
     time.sleep(cfg.timeBeforeFileDelete)
     file_service.delete_file(cfg.fileShare, None,
                              '{}.flv'.format(self.filename))
     return
예제 #15
0
def fileService():
    file_service = FileService(
        account_name="jonesabinostorage",
        account_key=
        "QvgR5kwDrFN4OYkWp+s3S9QAaSDhky9RuUPMMw0QgfdZEnx7LG9WfiByFhHO+aNYaWKiMp31G86Ltz5fvDNJKA=="
    )
    return file_service
예제 #16
0
 def get_conn(self) -> FileService:
     """Return the FileService object."""
     prefix = "extra__azure_fileshare__"
     if self._conn:
         return self._conn
     conn = self.get_connection(self.conn_id)
     service_options_with_prefix = conn.extra_dejson
     service_options = {}
     for key, value in service_options_with_prefix.items():
         # in case dedicated FileShareHook is used, the connection will use the extras from UI.
         # in case deprecated wasb hook is used, the old extras will work as well
         if key.startswith(prefix):
             if value != '':
                 service_options[key[len(prefix) :]] = value
             else:
                 # warn if the deprecated wasb_connection is used
                 warnings.warn(
                     "You are using deprecated connection for AzureFileShareHook."
                     " Please change it to `Azure FileShare`.",
                     DeprecationWarning,
                 )
         else:
             service_options[key] = value
             # warn if the old non-prefixed value is used
             warnings.warn(
                 "You are using deprecated connection for AzureFileShareHook."
                 " Please change it to `Azure FileShare`.",
                 DeprecationWarning,
             )
     self._conn = FileService(account_name=conn.login, account_key=conn.password, **service_options)
     return self._conn
    def sas_with_signed_identifiers(self):
        share_name = self._create_share()
        self.service.create_directory(share_name, 'dir1')
        self.service.create_file_from_text(share_name, 'dir1', 'file1',
                                           b'hello world')

        # Set access policy on share
        access_policy = AccessPolicy(permission=SharePermissions.READ,
                                     expiry=datetime.utcnow() +
                                     timedelta(hours=1))
        identifiers = {'id': access_policy}
        acl = self.service.set_share_acl(share_name, identifiers)

        # Wait 30 seconds for acl to propagate
        time.sleep(30)

        # Indicates to use the access policy set on the share
        token = self.service.generate_share_shared_access_signature(share_name,
                                                                    id='id')

        # Create a service and use the SAS
        sas_service = FileService(
            account_name=self.account.account_name,
            sas_token=token,
        )

        file = sas_service.get_file_to_text(share_name, 'dir1', 'file1')
        content = file.content  # hello world

        self.service.delete_share(share_name)
예제 #18
0
    def _get_file_service(self, storage_client, group_name, storage_name):
        """Get Azure file service for given storage

        :param storage_client: azure.mgmt.storage.StorageManagementClient instance
        :param group_name: (str) the name of the resource group on Azure
        :param storage_name: (str) the name of the storage on Azure
        :return: azure.storage.file.FileService instance
        """
        cached_key = (group_name, storage_name)
        file_service = self._cached_file_services.get(cached_key)

        if file_service is None:
            with self._file_services_lock:
                file_service = self._cached_file_services.get(cached_key)
                if file_service is None:
                    account_key = self._get_storage_account_key(
                        storage_client=storage_client,
                        group_name=group_name,
                        storage_name=storage_name)

                    file_service = FileService(account_name=storage_name,
                                               account_key=account_key)
                    self._cached_file_services[cached_key] = file_service

        return file_service
예제 #19
0
    def upload_file(fname, fpath, coref):
        daemonname = fname.split(".")[0]
        i = 0
        fail_msg = ""

        while True:
            try:
                svc = FileService(account_name=acctname, account_key=acctkey)

                l = [sonicversion, asicname, daemonname, hostname]
                e = []
                while len(e) != len(l):
                    e.append(l[len(e)])
                    svc.create_directory(sharename, "/".join(e))

                log_debug("Remote dir created: " + "/".join(e))

                svc.create_file_from_path(sharename, "/".join(l), fname, fpath)
                log_debug("Remote file created: name{} path{}".format(
                    fname, fpath))
                newcoref = os.path.dirname(
                    coref) + "/" + UPLOAD_PREFIX + os.path.basename(coref)
                os.rename(coref, newcoref)
                break

            except Exception as ex:
                log_err("core uploader failed: Failed during upload (" +
                        coref + ") err: (" + str(ex) + ") retry:" + str(i))
                if not os.path.exists(fpath):
                    break
                i += 1
                time.sleep(PAUSE_ON_FAIL)
예제 #20
0
    def upload_file(fname, fpath):
        daemonname = fname.split(".")[0]
        i = 0
        fail_msg = ""

        while i <= MAX_RETRIES:
            try:
                svc = FileService(account_name=acctname, account_key=acctkey)

                l = [sonicversion, asicname, daemonname, hostname]
                e = []
                while len(e) != len(l):
                    e.append(l[len(e)])
                    svc.create_directory(sharename, "/".join(e))

                log_debug("Remote dir created: " + "/".join(e))

                svc.create_file_from_path(sharename, "/".join(l), fname, fpath)
                log_debug("Remote file created: name{} path{}".format(
                    fname, fpath))
                break

            except Exception as e:
                log_err("core uploader failed: Failed during upload (" +
                        str(e) + ")")
                fail_msg = str(e)
                i += 1
                if i >= MAX_RETRIES:
                    raise Exception("Failed while uploading. msg(" + fail_msg +
                                    ") after " + str(i) + " retries")
                time.sleep(PAUSE_ON_FAIL)
예제 #21
0
def getLatestModel(customer, modelName, storage_account_name,
                   storage_account_key):
    fileService = FileService(account_name=storage_account_name,
                              account_key=storage_account_key)
    if fileService.exists('trainedmodels', customer):
        modelTimestampArr = []
        files = fileService.list_directories_and_files('trainedmodels',
                                                       customer + '/' +
                                                       modelName,
                                                       prefix=modelName)

        for file in files:
            date = file.name.split('.')[0].split('_')[1]
            modelTimestampArr.append(date)

        latestModelFileName = modelName + '_' + max(modelTimestampArr) + '.pkl'
        print(latestModelFileName)

        file = fileService.get_file_to_bytes('trainedmodels',
                                             customer + '/' + modelName,
                                             latestModelFileName)
        model = pickle.loads(file.content)['model']
        return model
    else:
        print('Customer or model not found.')
예제 #22
0
    def move_to_storage_account(self, file, storage='blob'):
        from hugme.__key__ import acc, key
        from datetime import datetime

        print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
              'Movendo arquivo final para o {} storage...'.format(object))

        if storage == 'blob':
            from azure.storage.blob import BlockBlobService, PublicAccess

            block_blob_service = BlockBlobService(account_name=acc,
                                                  account_key=key)
            block_blob_service.set_container_acl(
                'final', public_access=PublicAccess.Container)
            block_blob_service.create_blob_from_path(
                container_name='consumidorgov',
                blob_name='comparativo',
                file_path='consumidor_gov\\data\\' + file,
            )

        elif storage == 'files':
            from azure.storage.file import FileService

            file_service = FileService(account_name=acc, account_key=key)
            file_service.create_file_from_path(
                share_name='complains',
                directory_name='hugme',
                file_name='base.csv',
                local_file_path='' + file,
            )

        else:
            return False
예제 #23
0
 def get_conn(self) -> FileService:
     """Return the FileService object."""
     if not self._conn:
         conn = self.get_connection(self.conn_id)
         service_options = conn.extra_dejson
         self._conn = FileService(account_name=conn.login, account_key=conn.password, **service_options)
     return self._conn
예제 #24
0
def file_storage_connect():
    global file_service
    global file_storage_dir
    global file_storage_share
    global overwrite_remote_files
    file_storage_url = dbparameters['fs_server'].strip()
    file_storage_user = dbparameters['fs_username'].strip()
    file_storage_pwd = dbparameters['fs_password'].strip()
    file_storage_share = dbparameters['fs_share'].strip()
    file_storage_dir = dbparameters['fs_directory_prefix'].strip()
    overwrite_remote_files = dbparameters['overwrite_remote_files'].strip()

    file_service = FileService(account_name=file_storage_user,
                               account_key=file_storage_pwd)
    try:
        if file_service.exists(file_storage_share):
            print(
                'Connection to Azure file storage successfully established...')
            if len(file_storage_dir) > 0 and not file_service.exists(
                    file_storage_share, directory_name=file_storage_dir):
                subdirs = file_storage_dir.split('/')
                subdirfull = ""
                for subdir in subdirs:
                    subdirfull += subdir
                    file_service.create_directory(file_storage_share,
                                                  subdirfull)
                    subdirfull += "/"
                print('Created directory:' + file_storage_dir)
        else:
            print(
                'Filaed to connect to Asure file storage, share does not exist: '
                + file_storage_share)
    except Exception as ex:
        print('Error connecting to Azure file storage: ', ex)
예제 #25
0
파일: custom.py 프로젝트: zackliu/azure-cli
def _get_files_from_afs(cli_ctx, afs, path, expiry):
    """Returns a list of files and directories under given path on mounted Azure File share.

    :param models.AzureFileShareReference afs: Azure file share reference.
    :param str path: path to list files from.
    :param int expiry: SAS expiration time in minutes.
    """
    from azure.storage.file import FileService
    from azure.storage.file.models import File, FilePermissions
    result = []
    service = FileService(afs.account_name, _get_storage_account_key(cli_ctx, afs.account_name, None))
    share_name = afs.azure_file_url.split('/')[-1]
    effective_path = _get_path_for_storage(path)
    if not service.exists(share_name, effective_path):
        return result
    for f in service.list_directories_and_files(share_name, effective_path):
        if isinstance(f, File):
            sas = service.generate_file_shared_access_signature(
                share_name, effective_path, f.name, permission=FilePermissions(read=True),
                expiry=datetime.datetime.utcnow() + datetime.timedelta(minutes=expiry))
            result.append(
                LogFile(
                    f.name, service.make_file_url(share_name, effective_path, f.name, 'https', sas),
                    False, f.properties.content_length))
        else:
            result.append(LogFile(f.name, None, True, None))
    return result
예제 #26
0
 def store_service(self):
     if self._account_type == 'FileStore':
         return FileService(account_name=self._account_name,
                            account_key=self._account_key)
     elif self._account_type == 'BlobStore':
         return BlockBlobService(account_name=self._account_name,
                                 account_key=self._account_key)
예제 #27
0
    def test_sas_signed_identifier(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        file_name = self._create_file()

        access_policy = AccessPolicy()
        access_policy.start = '2011-10-11'
        access_policy.expiry = '2018-10-12'
        access_policy.permission = FilePermissions.READ
        identifiers = {'testid': access_policy}

        resp = self.fs.set_share_acl(self.share_name, identifiers)

        token = self.fs.generate_file_shared_access_signature(self.share_name,
                                                              None,
                                                              file_name,
                                                              id='testid')

        # Act
        service = FileService(
            self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
            request_session=requests.Session(),
        )
        self._set_test_proxy(service, self.settings)
        result = service.get_file_to_bytes(self.share_name, None, file_name)

        # Assert
        self.assertEqual(self.short_byte_data, result.content)
예제 #28
0
 def delete(self, remote_file):
     """Delete file from the cloud. The azure url format is https://myaccount.blob.core.windows.net/mycontainer/myblob.
      Args:
          remote_file(str): The path of the file to be deleted.
      Raises:
          :exc:`~..DriverError`: if the file is not uploaded correctly.
     """
     if 'core.windows.net' not in remote_file:
         self.logger.error(
             "Source or destination must be a azure storage url (format "
             "https://myaccount.blob.core.windows.net/mycontainer/myblob")
         raise DriverError
     parse_url = _parse_url(remote_file)
     key = self.storage_client.storage_accounts.list_keys(
         self.resource_group_name, parse_url.account).keys[0].value
     if parse_url.file_type == 'blob':
         bs = BlockBlobService(account_name=parse_url.account,
                               account_key=key)
         return bs.delete_blob(parse_url.container_or_share_name,
                               parse_url.file)
     elif parse_url.file_type == 'file':
         fs = FileService(account_name=parse_url.account, account_key=key)
         return fs.delete_file(parse_url.container_or_share_name,
                               parse_url.path, parse_url.file)
     else:
         raise ValueError(
             "This azure storage type is not valid. It should be blob or file."
         )
예제 #29
0
def transfer_fileshare_to_blob(config, fileshare_uri, output_model_name):
    ''' NB -- transfer proceeds via local temporary file! '''
    file_service = FileService(config.storage_account_name,
                               config.storage_account_key)
    blob_service = BlockBlobService(config.storage_account_name,
                                    config.storage_account_key)
    blob_service.create_container(config.container_trained_models)
    blob_service.create_container(config.predictions_container)

    uri_core = fileshare_uri.split('.file.core.windows.net/')[1].split('?')[0]
    fields = uri_core.split('/')
    fileshare = fields.pop(0)
    subdirectory = '/'.join(fields[:-1])
    file_name = '{}/{}'.format(output_model_name, fields[-1])

    with TemporaryFile() as f:
        file_service.get_file_to_stream(share_name=fileshare,
                                        directory_name=subdirectory,
                                        file_name=fields[-1],
                                        stream=f)
        f.seek(0)
        if 'predictions' in fields[-1]:
            blob_service.create_blob_from_stream(
                config.predictions_container,
                '{}_predictions_test_set.csv'.format(output_model_name), f)
        else:
            blob_service.create_blob_from_stream(
                config.container_trained_models, file_name, f)

    return
예제 #30
0
def get_backup(gw_account_name, gw_account_key, gw_account_share, backup_local_path):

    """Upload directories and files from $account_name to local $backup_local_path using Azure FileService"""

    print('\nRunning get_backup from the {} and file share {} to local path {}.\n'.format(gw_account_name, gw_account_share, backup_local_path))

    file_service = FileService(account_name=gw_account_name, account_key=gw_account_key)
    share_dirs_list = file_service.list_directories_and_files(gw_account_share)

    for share_dir_name in share_dirs_list:

        backup_local_dir = os.path.join(backup_local_path, share_dir_name.name)

        if not os.path.isdir(backup_local_dir):
            print('Local backup directory {} not found, creating...'.format(backup_local_dir))
            os.makedirs(backup_local_dir)

        share_files_list = file_service.list_directories_and_files(gw_account_share, share_dir_name.name)
        for share_file in share_files_list:
            try:
                print('Getting file: {}'.format(os.path.join('/', share_dir_name.name, share_file.name)))
                # example:
                # file_service.get_file_to_path('gwdevproxydata', 'datanginx-conf.d', 'jm-gw-proxy-dev.domain.tld.conf', '/tmp/jm-gw-proxy-dev.domain.tld.conf-out')
                file_service.get_file_to_path(gw_account_share, share_dir_name.name, share_file.name, os.path.join(backup_local_dir, share_file.name))
            # to pass /data/datahtml/.well-known dir on master host
            except azure.common.AzureMissingResourceHttpError as e:
                print('\nWARNING: {}\n'.format(e))