Esempio n. 1
0
    def upload_file(fname, fpath, coref):
        daemonname = fname.split(".")[0]
        i = 0
        fail_msg = ""

        while True:
            try:
                svc = FileService(account_name=acctname, account_key=acctkey)

                l = [sonicversion, asicname, daemonname, hostname]
                e = []
                while len(e) != len(l):
                    e.append(l[len(e)])
                    svc.create_directory(sharename, "/".join(e))

                log_debug("Remote dir created: " + "/".join(e))

                svc.create_file_from_path(sharename, "/".join(l), fname, fpath)
                log_debug("Remote file created: name{} path{}".format(
                    fname, fpath))
                newcoref = os.path.dirname(
                    coref) + "/" + UPLOAD_PREFIX + os.path.basename(coref)
                os.rename(coref, newcoref)
                break

            except Exception as ex:
                log_err("core uploader failed: Failed during upload (" +
                        coref + ") err: (" + str(ex) + ") retry:" + str(i))
                if not os.path.exists(fpath):
                    break
                i += 1
                time.sleep(PAUSE_ON_FAIL)
Esempio n. 2
0
def main(files):

    configfile = (os.path.dirname(os.path.abspath(__file__)) + '/upload.config')
    print (configfile)

    if not os.path.isfile(configfile):
        print("Settings not found. Please create an upload.config file with the Azure file share account name, access key, file share name, and folder. Each value should be on its own line.")
        exit()
    if len(files) <1:
        print("No files provided for upload. Please supply file paths as arguments.")
        exit()

    # get settings - account, key, share, and folder in subsequent lines
    with open(configfile,"r") as config:
        settings = config.readlines()
        azure_account = settings[0].rstrip()
        azure_key = settings[1].rstrip()
        share = settings[2].rstrip()
        folder = settings[3].rstrip()

    file_service = FileService(account_name=azure_account, account_key=azure_key)

    # Arguments should just be an array of filenames.
    timestamp_suffix = datetime.now().strftime("%Y%m%d-%H%M_")
    for file in files:
        if not os.path.isfile(file):
            print(file, "not found")
        else:
            print("Uploading:", file)
            stampedfile = timestamp_suffix + os.path.basename(file)
            file_service.create_file_from_path(share, folder, stampedfile, file, progress_callback=progress)
            print(stampedfile," uploaded")
Esempio n. 3
0
    def upload_file(fname, fpath):
        daemonname = fname.split(".")[0]
        i = 0
        fail_msg = ""

        while i <= MAX_RETRIES:
            try:
                svc = FileService(account_name=acctname, account_key=acctkey)

                l = [sonicversion, asicname, daemonname, hostname]
                e = []
                while len(e) != len(l):
                    e.append(l[len(e)])
                    svc.create_directory(sharename, "/".join(e))

                log_debug("Remote dir created: " + "/".join(e))

                svc.create_file_from_path(sharename, "/".join(l), fname, fpath)
                log_debug("Remote file created: name{} path{}".format(
                    fname, fpath))
                break

            except Exception as e:
                log_err("core uploader failed: Failed during upload (" +
                        str(e) + ")")
                fail_msg = str(e)
                i += 1
                if i >= MAX_RETRIES:
                    raise Exception("Failed while uploading. msg(" + fail_msg +
                                    ") after " + str(i) + " retries")
                time.sleep(PAUSE_ON_FAIL)
Esempio n. 4
0
    def move_to_storage_account(self, file, storage='blob'):
        from hugme.__key__ import acc, key
        from datetime import datetime

        print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
              'Movendo arquivo final para o {} storage...'.format(object))

        if storage == 'blob':
            from azure.storage.blob import BlockBlobService, PublicAccess

            block_blob_service = BlockBlobService(account_name=acc,
                                                  account_key=key)
            block_blob_service.set_container_acl(
                'final', public_access=PublicAccess.Container)
            block_blob_service.create_blob_from_path(
                container_name='consumidorgov',
                blob_name='comparativo',
                file_path='consumidor_gov\\data\\' + file,
            )

        elif storage == 'files':
            from azure.storage.file import FileService

            file_service = FileService(account_name=acc, account_key=key)
            file_service.create_file_from_path(
                share_name='complains',
                directory_name='hugme',
                file_name='base.csv',
                local_file_path='' + file,
            )

        else:
            return False
Esempio n. 5
0
 def onRecordingComplete(self):
     print("recording completed {}".format(self.filename))
     file_service = FileService(account_name=cfg.storageAc,
                                account_key=cfg.accountkey)
     file_service.create_file_from_path(cfg.fileShare, None,
                                        '{}.flv'.format(self.filename),
                                        'temp/{}.flv'.format(self.filename))
     sharedAccessStorage = SharedAccessSignature(cfg.storageAc,
                                                 cfg.accountkey)
     sasKey = sharedAccessStorage.generate_file(
         cfg.fileShare,
         file_name='{}.flv'.format(self.filename),
         permission=FilePermissions.READ,
         start=datetime.utcnow(),
         expiry=datetime.utcnow() + timedelta(minutes=5))
     downloadLink = cfg.downloadLinkFormat.format(cfg.storageAcUrl,
                                                  cfg.fileShare,
                                                  self.filename, sasKey)
     self.recorderSock.emit('recordingcomplete', {
         'sid': self.sid,
         'download': downloadLink
     })
     os.remove('temp/{}.flv'.format(self.filename))
     time.sleep(cfg.timeBeforeFileDelete)
     file_service.delete_file(cfg.fileShare, None,
                              '{}.flv'.format(self.filename))
     return
Esempio n. 6
0
def run(job, **kwargs):
    resource = kwargs.get('resource')
    create_custom_fields_as_needed()

    storage_account = '{{ storage_account }}'
    file = "{{ file }}"
    azure_storage_file_share_name = '{{ azure_storage_file_share_name }}'
    file_name = Path(file).name
    if file.startswith(settings.MEDIA_URL):
        set_progress("Converting relative URL to filesystem path")
        file = file.replace(settings.MEDIA_URL, settings.MEDIA_ROOT)

    account_key = Resource.objects.filter(name__icontains='{{ storage_account }}')[0].azure_account_key
    fallback_account_key = Resource.objects.filter(name__icontains="{{ storage_account }}")[0].azure_account_key_fallback

    set_progress("Connecting To Azure...")
    file_service = FileService(account_name=storage_account, account_key=account_key)

    set_progress('Creating a file share...')
    file_service.create_share(share_name=azure_storage_file_share_name, quota=1)

    set_progress('Creating a file...')
    if file_service.exists(share_name=azure_storage_file_share_name, file_name=file_name, directory_name=''):
        file_service.create_file_from_path(share_name=azure_storage_file_share_name, file_name=file_name, directory_name='', local_file_path=file)
        return "WARNING", "File with this name already exists", "The file will be updated."
    else:
        file_service.create_file_from_path(share_name=azure_storage_file_share_name, file_name=file_name, directory_name='', local_file_path=file)
        resource.name = azure_storage_file_share_name + '-' + file_name
        resource.azure_storage_account_name = storage_account
        resource.azure_account_key = account_key
        resource.azure_account_key_fallback = fallback_account_key
        resource.azure_storage_file_share_name = azure_storage_file_share_name
        resource.azure_storage_file_name = file_name
        resource.save()
    return "Success", "The File has succesfully been uploaded", ""
Esempio n. 7
0
def shares():
    # Create Container and Share
    global storage_account_key, blob_service, blob_share, file_service, file_share
    sak = storage_client.storage_accounts.list_keys(resourcegroupname,
                                                    storageaccountname)
    storage_account_key = sak.keys[0].value
    cloudstorage_client = CloudStorageAccount(storageaccountname,
                                              storage_account_key)
    blob_service = cloudstorage_client.create_block_blob_service()
    blob_share = blob_service.create_container(
        sharename, public_access=PublicAccess.Container)
    file_service = FileService(account_name=storageaccountname,
                               account_key=storage_account_key)
    file_share = file_service.create_share(sharename)
    # Copy Setup Files to Container and Share
    blob_service.create_blob_from_path(
        sharename,
        filename,
        filename,
    )
    file_service.create_file_from_path(
        sharename,
        '',
        filename,
        filename,
    )
Esempio n. 8
0
 def upload(path: str):
     from azure.storage.file import FileService
     service = FileService(account_name=config['account_name'],
                           account_key=config['account_key'])
     if shared_directory not in service.list_directories_and_files(
             config['share_name']):
         service.create_directory(config['share_name'], shared_directory)
     service.create_file_from_path(config['share_name'], shared_directory,
                                   path.split('/')[-1], path)
Esempio n. 9
0
def create_azure_fileshare(share_prefix, account_name, account_key):
    """
    Generate a unique share name to avoid overlaps in shared infra
    :param share_prefix:
    :param account_name:
    :param account_key:
    :return:
    """

    # FIXME - Need to remove hardcoded directoty link below

    d_dir = './WebInDeploy/bootstrap'
    share_name = "{0}-{1}".format(share_prefix.lower(), str(uuid.uuid4()))
    print('using share_name of: {}'.format(share_name))

    # archive_file_path = _create_archive_directory(files, share_prefix)

    try:
        # ignore SSL warnings - bad form, but SSL Decrypt causes issues with this
        s = requests.Session()
        s.verify = False

        file_service = FileService(account_name=account_name,
                                   account_key=account_key,
                                   request_session=s)

        # print(file_service)
        if not file_service.exists(share_name):
            file_service.create_share(share_name)

        for d in ['config', 'content', 'software', 'license']:
            print('creating directory of type: {}'.format(d))
            if not file_service.exists(share_name, directory_name=d):
                file_service.create_directory(share_name, d)

            # FIXME - We only handle bootstrap files.  May need to handle other dirs

            if d == 'config':
                for filename in os.listdir(d_dir):
                    print('creating file: {0}'.format(filename))
                    file_service.create_file_from_path(
                        share_name, d, filename, os.path.join(d_dir, filename))

    except AttributeError as ae:
        # this can be returned on bad auth information
        print(ae)
        return "Authentication or other error creating bootstrap file_share in Azure"

    except AzureException as ahe:
        print(ahe)
        return str(ahe)
    except ValueError as ve:
        print(ve)
        return str(ve)

    print('all done')
    return share_name
class Azure:
    def __init__(self, ac, key, fileshare):
        self.account_name = ac
        self.account_key = key
        self.fileshare_name = fileshare

        #Create a FileService that is used to call the File Service for the storage account
        self.file_service = FileService(account_name=ac, account_key=key)

        return

    def List_directory(self, directory_path):
        self.generator = self.file_service.list_directories_and_files(
            directory_path)
        print("Files in the directory: " + directory_path)
        for file_or_dir in self.generator:
            print("\t File/Directory name: " + file_or_dir.name)
        return

    def Download(self, loc_directory, fileshare_directory_name):
        local_path = os.path.expanduser("~/" + loc_directory)
        print("\nDownloading the following files to " + local_path)
        self.generator = self.file_service.list_directories_and_files(
            self.fileshare_name + "/" + fileshare_directory_name)
        for file_or_dir in self.generator:
            print("\t File/Directory name: " + file_or_dir.name)
        for file_or_dir in self.generator:
            #full_path_to_file2 = os.path.join(local_path, file_or_dir.name)
            self.file_service.get_file_to_path(self.fileshare_name,
                                               fileshare_directory_name,
                                               file_or_dir.name,
                                               local_path + file_or_dir.name)
        print("\nFiles downloaded to " + local_path)
        return

    def Upload(self, loc_directory, fileshare_directory_name):
        local_path = os.path.expanduser("~/" + loc_directory)
        self.generator = self.file_service.list_directories_and_files(
            self.fileshare_name + "/" + fileshare_directory_name)
        print("\nUploading the following files to " + fileshare_directory_name)
        entries = os.listdir(local_path)
        # for entry in entries:
        #     print(entry)
        for entry in entries:
            self.file_service.create_file_from_path(
                self.fileshare_name,  #Fileshare name
                fileshare_directory_name,  # We want to create this blob in the root directory, so we specify None for the directory_name
                entry,  #name of the file that is created
                local_path + entry,  #file that needs to be uploaded
                content_settings=ContentSettings(
                    content_type='application/vnd.ms-excel'))
        print("The followig files have been uploaded")
        #listing the files in the fileshare_name
        obj.List_directory(fileshare_name + "/" +
                           fileshare_directory_name_upload)
Esempio n. 11
0
def file():
    static_dir_path = "D:\home\site\wwwroot\static"
    static_file_dir_path = static_dir_path + '\\' + 'files'
    account_name = 'hanastragetest'
    account_key = 'acount_key'
    root_share_name = 'root'
    share_name = 'images'
    directory_url = 'https://hanastragetest.file.core.windows.net/' + root_share_name + '/' + share_name

    # create local save directory
    if os.path.exist(static_file_dir_path) is False:
        os.mkdir(static_file_dir_path)

    file_service = FileService(account_name=account_name,
                               account_key=account_key)
    # create share
    file_service.create_share(root_share_name)

    # create directory
    file_service.create_directory(root_share_name, share_name)

    files = os.listdir(static_dir_path)
    for file in files:
        # delete
        if file_service.exists(root_share_name, share_name, file):
            file_service.delete_file(root_share_name, share_name, file)

        # file upload
        file_service.create_file_from_path(
            root_share_name,
            share_name,  # We want to create this blob in the root directory, so we specify None for the directory_name
            file,
            static_dir_path + '\\' + file,
            content_settings=ContentSettings(content_type='image/png'))

    generator = file_service.list_directories_and_files(
        root_share_name, share_name)

    html = ""
    for file in generator:
        # file download
        file_save_path = static_file_dir_path + '\\' + file
        file_service.get_file_to_path(root_share_name, share_name, file,
                                      file_save_path)
        html = "{}<img src='{}'>".format(html, file_save_path)

    result = {
        "result": True,
        "data": {
            "file_or_dir_name":
            [file_or_dir.name for file_or_dir in generator]
        }
    }
    return make_response(json.dumps(result, ensure_ascii=False) + html)
Esempio n. 12
0
def update_datanginxconfd(gw_account_name, gw_account_key, gw_account_share):

    """Upload data from cloned repo to the GW Storage account into the datanginx-conf.d directory with overwriting"""

    print('\nRunning update_confd to the {} and file share {} to the path datanginx-conf.d.\n'.format(gw_account_name, gw_account_share))

    file_service = FileService(account_name=gw_account_name, account_key=gw_account_key)

    configs = glob.glob('*.conf')
    for config in configs:
        print('Uploading config: {}'.format(config))
        file_service.create_file_from_path(gw_account_share, 'datanginx-conf.d', config, config)
def store_trained_model_in_azure(model):
    file_service = FileService(
        account_name='soilhumiditydata293s',
        account_key=
        '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A=='
    )

    file_service.delete_file('model', None, 'model')

    file_service.create_file_from_path(
        'model', None, 'model',
        '/fs/student/aditya_wadaskar/iot/ML_training/model')
Esempio n. 14
0
def num3():
    file = request.form['filename']
    file += ".jpg"
    file_service = FileService(
        account_name='mystorge',
        account_key=
        '0T4f/dzyV7AIw4a9bevK5ysML0qP55CEWEqJyJWXyr6fKRxowLq8tL7mep/MfSc//mcQggeH1+K79A4HUDug3w=='
    )
    file_service.create_file_from_path(
        'image1',
        None,
        file,
        file,
        content_settings=ContentSettings(content_type='image/png'))
    return "<h1> File Uploaded sucessfully</h1>"
    def upload_file(self, path):
        """Upload a file into the default share on the storage account.

        If the share doesn't exist, create it first.
        """
        file_service = FileService(
            account_name=self.account.name,
            account_key=self.key,
        )
        file_service.create_share(self.default_share)
        file_service.create_file_from_path(
            self.default_share,
            None,
            os.path.basename(path),
            path,
        )
        return '/'.join([self.default_share, os.path.basename(path)])
Esempio n. 16
0
def create_azure_fileshare(files, share_prefix, account_name, account_key):
    # generate a unique share name to avoid overlaps in shared infra
    share_name = "{0}-{1}".format(share_prefix.lower(), str(uuid.uuid4()))
    print('using share_name of: {}'.format(share_name))

    archive_file_path = _create_archive_directory(files, share_prefix)

    try:
        # ignore SSL warnings - bad form, but SSL Decrypt causes issues with this
        s = requests.Session()
        s.verify = False

        file_service = FileService(account_name=account_name,
                                   account_key=account_key,
                                   request_session=s)

        # print(file_service)
        if not file_service.exists(share_name):
            file_service.create_share(share_name)

        for d in ['config', 'content', 'software', 'license']:
            print('creating directory of type: {}'.format(d))
            if not file_service.exists(share_name, directory_name=d):
                file_service.create_directory(share_name, d)

            d_dir = os.path.join(archive_file_path, d)
            for filename in os.listdir(d_dir):
                print('creating file: {0}'.format(filename))
                file_service.create_file_from_path(
                    share_name, d, filename, os.path.join(d_dir, filename))

    except AttributeError as ae:
        # this can be returned on bad auth information
        print(ae)
        return "Authentication or other error creating bootstrap file_share in Azure"

    except AzureException as ahe:
        print(ahe)
        return str(ahe)
    except ValueError as ve:
        print(ve)
        return str(ve)

    print('all done')
    return 'Azure file-share {} created successfully'.format(share_name)
Esempio n. 17
0
def upload_scripts(config, job_name, filenames):
    service = FileService(config.storage_account['name'],
                          config.storage_account['key'])
    service.create_directory(config.fileshare_name,
                             job_name,
                             fail_on_exist=False)
    trasfer_file = lambda fname: service.create_file_from_path(
        config.fileshare_name, job_name, os.path.basename(fname), fname)
    for filename in filenames:
        trasfer_file(filename)
Esempio n. 18
0
    def copy(self, source_path, dest_path, account=None, group_name=None):
        """Copy file from a path to another path. The azure url format is https://myaccount.blob.core.windows.net/mycontainer/myblob.
         Args:
             source_path(str): The path of the file to be copied.
             dest_path(str): The destination path where the file is going to be allocated.
         Raises:
             :exc:`~..DriverError`: if the file is not uploaded correctly.
        """
        if 'core.windows.net' not in source_path and 'core.windows.net' not in dest_path:
            self.logger.error(
                "Source or destination must be a azure storage url (format "
                "https://myaccount.blob.core.windows.net/mycontainer/myblob")
            raise DriverError

        # Check if source exists and can read
        if 'core.windows.net' in source_path:
            parse_url = _parse_url(source_path)
            key = self.storage_client.storage_accounts.list_keys(
                self.resource_group_name, parse_url.account).keys[0].value
            if parse_url.file_type == 'blob':
                bs = BlockBlobService(account_name=parse_url.account,
                                      account_key=key)
                return bs.get_blob_to_path(parse_url.container_or_share_name,
                                           parse_url.file, dest_path)
            elif parse_url.file_type == 'file':
                fs = FileService(account_name=parse_url.account,
                                 account_key=key)
                return fs.get_file_to_path(parse_url.container_or_share_name,
                                           parse_url.path, parse_url.file,
                                           dest_path)
            else:
                raise ValueError(
                    "This azure storage type is not valid. It should be blob or file."
                )
        else:
            parse_url = _parse_url(dest_path)
            key = self.storage_client.storage_accounts.list_keys(
                self.resource_group_name, parse_url.account).keys[0].value
            if parse_url.file_type == 'blob':
                bs = BlockBlobService(account_name=parse_url.account,
                                      account_key=key)
                return bs.create_blob_from_path(
                    parse_url.container_or_share_name, parse_url.file,
                    source_path)
            elif parse_url.file_type == 'file':
                fs = FileService(account_name=parse_url.account,
                                 account_key=key)
                return fs.create_file_from_path(
                    parse_url.container_or_share_name, parse_url.path,
                    parse_url.file, source_path)
            else:
                raise ValueError(
                    "This azure storage type is not valid. It should be blob or file."
                )
Esempio n. 19
0
    def saveChanges(self):
        if (self.currentState != Application.DONE_SCANNING):
            return

        gfwlistBackupFile = self.azureFileShareFileName + '.' + datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S" + '.bk')
        # messagebox.showinfo('You are about to save the changes', 'Are you sure?')
        result = messagebox.askquestion('You are about to save the changes', 'Are you sure?', icon='warning')
        if result == 'yes':
            # this is for copying file locally: copyfile(self.gfwlistFile, gfwlistBackupFile)
            azureFileService = FileService(account_name=self.azureAccountName, account_key=self.azureAccountKey)
            sourceUrl = 'https://%s.%s/%s/%s/%s' % (self.azureAccountName, self.azureFileServiceDomain, self.azureFileShareName, self.azureFileShareFileDir, self.azureFileShareFileName)
            azureFileService.copy_file(self.azureFileShareName, self.azureFileShareFileDir, gfwlistBackupFile, sourceUrl)
            # the folliwng is for writing file locally
            with open(self.gfwlistFile, 'w') as f:
                f.write(self.sectionBeforeRules.getvalue())
                f.write(',\n'.join('  "' + str(e) + '"' for e in self.listBox.get(0, END)))
                f.write('\n')
                f.write(self.sectionAfterRules.getvalue())
            
            # then write it to the file share
            azureFileService.create_file_from_path(self.azureFileShareName, self.azureFileShareFileDir, self.azureFileShareFileName, self.gfwlistFile)
Esempio n. 20
0
File: main.py Progetto: ilovecee/ef
def upload():
  AzureStorageAccount = 'effiles'
  key = 'axLykwdLsUwKTDY5flU6ivGrt9obV38k2UMVDCSpLYE3K6jAkwsjWOThQydhuMSWHfx6lTq102gdkas/GyKhEA=='
  up_path = 'uploads'
  path1 = 'efficientfrontier'
  file_service = FileService(account_name = AzureStorageAccount, account_key = key)

  target = os.path.join(APP_ROOT, 'uploads/')

  if request.method == 'POST':
    if not os.path.isdir(target):
      os.mkdir(target)
    

 
    for file in request.files.getlist("file"):
      filename = file.filename
      destination = '/'.join([target, filename])
      #print (destination)
      file.save(destination)       # Save the file to local server
      now = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
      filename_new = os.path.splitext(filename)[0] + '_' + now + os.path.splitext(filename)[1]
      file_service.create_file_from_path (path1, up_path, filename_new, destination)    # Upload the file to Azure Storage Account
      # Create Cursor
      cur = mysql.connection.cursor()

      # Execute
      cur.execute ('INSERT INTO files_uploaded (filename, USER, AzureAccount, AzureShare, Directory) VALUES(%s, %s, %s, %s, %s)', (filename_new, session['email'], AzureStorageAccount, path1, up_path))  

      # Commit to DB
      mysql.connection.commit()

      # Close connection
      cur.close()

      flash ('File Uploaded', 'success')
      os.remove(destination)  # Remove locally saved file
    
  return redirect(url_for('files'))
def main(req: func.HttpRequest) -> func.HttpResponse:
    acc_name = req.params.get('account_name')
    acc_key = req.params.get('account_key')
    container_name = req.params.get('container_name')
    blob_name = req.params.get('blob_name')

    file_service = FileService(account_name=acc_name, account_key=acc_key)

    download_path = "/tmp/" + blob_name
    file_service.get_file_to_path(container_name, None, blob_name,
                                  download_path)

    logging.info("Downloading blob to " + download_path)

    latency, upload_path = video_processing(blob_name, download_path)

    file_service.create_file_from_path(container_name, None,
                                       upload_path.split("/")[FILE_PATH_INDEX],
                                       upload_path)

    logging.info(latency)
    return func.HttpResponse(str(latency))
Esempio n. 22
0
def _main(args):
    with open(args.config_file_path) as data_file:
        file_service = FileService(account_name=args.afs_storage_account,
                                   account_key=args.afs_storage_account_key,
                                   endpoint_suffix=args.afs_endpoint_suffix)
        config_json = json.load(data_file)
        if _check_setups_local(config_json['Installers'], args.ignore):
            for installer in config_json['Installers']:
                print("Uploading {} to AFS ".format(installer['SourcePath']))
                file_service.create_file_from_path(args.afs_name, None,
                                                   installer['RemotePath'],
                                                   installer['SourcePath'])
                if installer['IsMsi'] is False:
                    for patch in installer['Patches']:
                        patch_file_name = os.path.basename(patch)
                        print("Uploading {} to AFS ".format(patch_file_name))
                        file_service.create_file_from_path(
                            args.afs_name, None, patch_file_name, patch)
                #archive_command = r'"{}" cp {} {}'.format(args.azcopy_path, installer['SourcePath'], args.afs_sas_uri)
                #print(archive_command)
                #subprocess.call(archive_command, shell=True)
        else:
            sys.exit(1)
Esempio n. 23
0
class ModelGymClient:
    config = {}
    project_root = ""
    project_name = ""
    user = ""

    def __init__(self,
                 config=None,
                 config_path=MODELGYM_CONFIG["default_config_path"]):
        if config_path:
            self.config = self.__config_by_path(config_path)
        if type(config) is dict:
            self.config.update(config)
        else:
            if config:
                raise TypeError("config must be dictionary!")

        project_root = Path(self.config["local_project_root"]).expanduser()
        self.project_root = project_root
        self.project_name = Path(self.project_root.parts[-1])
        if not project_root.is_dir():
            project_root.mkdir(parents=True, exist_ok=True)
        user_folder = self.project_root / self.config["user"]
        self.user = self.config["user"]
        if not user_folder.is_dir():
            user_folder.mkdir(parents=True, exist_ok=True)

        # self.stub = new_client()
        self.file_service = FileService(
            account_name=self.config['azurefs_acc_name'],
            account_key=self.config['azurefs_acc_key'])
        self.afs_share = self.config['azurefs_share']
        self.__get_client_transport_credentials(
            str(Path(self.config["client_cert"]).expanduser()),
            str(Path(self.config["client_key"]).expanduser()),
            str(Path(self.config["ca_cert"]).expanduser()))
        self.channel = grpc.secure_channel(
            self.config["connect_to"],
            self.creds,
            options=(
                ('grpc.max_send_message_length',
                 self.config["max_msg_size_megabytes"]),
                ('grpc.max_receive_message_length',
                 self.config["max_msg_size_megabytes"]),
            ))
        self.stub = wonderland_pb2_grpc.WonderlandStub(self.channel)
        self.check_user()

    def check_user(self):
        list_folder = self.file_service.list_directories_and_files(
            self.afs_share)
        for folder in list_folder:
            if self.user == folder.name:
                return True
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=self.user)
        return True

    def __get_client_transport_credentials(self, client_cert_path,
                                           client_key_path, ca_cert_path):
        client_cert_path = Path(client_cert_path).expanduser()
        client_key_path = Path(client_key_path).expanduser()
        ca_cert_path = Path(ca_cert_path).expanduser()
        path_ok = [
            client_cert_path.exists(),
            client_key_path.exists(),
            ca_cert_path.exists()
        ]
        if not all(path_ok):
            raise ValueError("One of credentials files does not exist")
        self.creds = grpc.ssl_channel_credentials(
            ca_cert_path.read_bytes(), client_key_path.read_bytes(),
            client_cert_path.read_bytes())

    def __config_by_path(self, path):
        path = Path(path).expanduser()
        if path.exists():
            with path.open() as file:
                config = yaml.load(file)
            return config
        else:
            raise FileNotFoundError(
                "Config {} doesn't exist !!! Check ~/.wonder/config.yaml".
                format(path))

    def eval_model(self, model_info, data_path):
        model_path = self.send_model(model_info)
        job = Job(input=json.dumps({
            "model_path": str(model_path),
            "data_path": str(data_path)
        }),
                  kind="hyperopt")
        job = self.stub.CreateJob(job)
        self.stub.GetJob(RequestWithId(id=job.id))
        return job.id

    def gather_results(self, job_id_list, timeout):
        job_compeleted = {job_id: Job.PENDING for job_id in job_id_list}
        deadline = time.time() + timeout
        while True:
            time.sleep(5)
            for id in job_id_list:
                job = self.stub.GetJob(RequestWithId(id=id))
                job_compeleted[id] = job.status
            if not any(s in job_compeleted.values()
                       for s in (Job.PENDING, Job.RUNNING, Job.PULLED)):
                break
            if time.time() > deadline:
                print("Timeout was expired!")
                break

        results = []
        for i, id in enumerate(job_id_list):
            job = self.stub.GetJob(RequestWithId(id=id))
            if job.status == Job.COMPLETED:
                results += [{}]
            else:
                results.append(None)
            files = {}
            if job.output != "":
                files = json.loads(job.output)
            for file, path in files.items():
                self.file_service.get_file_to_path(
                    share_name=self.afs_share,
                    directory_name=Path(path).parent,
                    file_name=Path(path).name,
                    file_path=str(self.project_root / path))
                if file == 'output':
                    with open(self.project_root / path, "r") as f:
                        results[i]['output'] = json.load(f)
                if file == 'result_model_path':
                    results[i]['result_model_path'] = self.project_root / path
                if file == 'error':
                    with open(self.project_root / path, "r") as f:
                        logging.warning(f.read())
        return results

    def send_model(self, model_info):
        folder = "model-" + ''.join([
            random.choice(string.ascii_letters + string.digits)
            for _ in range(12)
        ])
        model_path = self.project_root / self.user / folder / MODELGYM_CONFIG[
            "model_file"]
        try:
            model_folder = model_path.parent
            model_folder.mkdir()
        except FileExistsError:
            logging.warning("Model folder {} is exist !".format(model_folder))
        except FileNotFoundError:
            logging.warning(
                "Model folder {} is missing !".format(model_folder))
        with (model_path).open(mode="w") as file:
            json.dump(model_info, file, cls=NumpyEncoder)
        afs_path = Path(self.user) / folder / MODELGYM_CONFIG["model_file"]
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=afs_path.parent)
        self.file_service.create_file_from_path(share_name=self.afs_share,
                                                directory_name=afs_path.parent,
                                                file_name=afs_path.name,
                                                local_file_path=model_path,
                                                max_connections=cpu_count())
        return afs_path

    def send_data(self, data_path, push_data=False):
        """
        Copy data to the AFS DATA directory.

        :param data_path: <string>. Specify you data path by string.
        :return: path in the AFS share.
        """
        logging.info("Sending data to AFS")
        checksum = get_data_hash(data_path)[:10]
        data_folder = time.strftime("%Y-%m-%d-%H.%M") + '-' + checksum
        afs_path = Path(MODELGYM_CONFIG["data_folder"]
                        ) / data_folder / MODELGYM_CONFIG["data_file"]

        list_folder = self.file_service.list_directories_and_files(
            self.afs_share, directory_name="DATA")
        for folder in list_folder:
            if checksum == folder.name[-10:]:
                logging.info("Folder for data already exist!")
                afs_path = Path(
                    "DATA") / folder.name / MODELGYM_CONFIG["data_file"]
                logging.info("Data is in the AFS {}".format(folder.name))
                if push_data:
                    logging.warning("Rewriting data")
                    afs_path = Path(
                        MODELGYM_CONFIG["data_folder"]
                    ) / folder.name / MODELGYM_CONFIG["data_file"]
                else:
                    return afs_path
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=afs_path.parent)
        self.file_service.create_file_from_path(share_name=self.afs_share,
                                                directory_name=afs_path.parent,
                                                file_name=afs_path.name,
                                                local_file_path=data_path,
                                                max_connections=cpu_count(),
                                                progress_callback=logbar)
        logging.info("Sending is over")
        return afs_path

    def from_project_root_path(self, path):
        path = Path(path)
        # if not path.exists():
        # logging.warning("{} is missing !!".format(path))
        try:
            relative_path = path.relative_to(self.project_root.parent)
            return str(relative_path)
        except ValueError:
            logging.warning("Path doesn't have project_root folder {}".format(
                self.project_root))
class StorageHelper(object):
    """Handle details related to a single storage account and share.
    Instantiate this object with information sufficient to
    uniquely identify a storage account and a file share within it.
    Then .account can be used to retrieve the Azure SDK for Python
    object corresponding to the account, and .key can be used to
    get an access key for it.
    For both those properties, if the value mentioned doesn't exist,
    it will be created upon first property access.
    """
    def __init__(self,
                 client_data,
                 resource_helper,
                 name,
                 account=None,
                 default_share='share'):
        self.name = name
        self.default_share = default_share
        self._account = account
        self._key = os.environ.get('AZURE_STORAGE_KEY')
        self.resource_helper = resource_helper
        self.client = StorageManagementClient(*client_data)
        self.file_service = FileService(
            account_name=self.account.name,
            account_key=self.key,
        )

    @property
    def account(self):
        """Return the managed StorageAccounts object.
        If no such account exists, create it first.
        """
        if self._account is None:
            print('Creating storage account...')
            # Error to create storage account if it already exists!
            name_check = self.client.storage_accounts.check_name_availability(
                self.name)
            if name_check.name_available:
                storage_creation = self.client.storage_accounts.create(
                    self.resource_helper.group.name, self.name,
                    StorageAccountCreateParameters(
                        sku=StorageAccountSku(StorageSkuName.standard_lrs),
                        kind=StorageKind.storage,
                        location=self.resource_helper.group.location,
                    ))
                storage = storage_creation.result()
            else:
                try:
                    storage = self.client.storage_accounts.get_properties(
                        self.resource_helper.group.name, self.name)
                except CloudError:
                    print('Storage account {} already exists'
                          ' in a resource group other than {}.'.format(
                              self.name, self.resource_helper.group.name))
            print('Got storage account:', storage.name)
            self._account = storage
        return self._account

    @property
    def key(self):
        """Get the first available storage key.
        This will crash if there are no available storage keys,
        which is unlikely since two are created along with a storage account.
        """
        if self._key is None:
            storage_keys = self.client.storage_accounts.list_keys(
                self.resource_helper.group.name, self.account.name)
            self._key = next(iter(storage_keys.keys)).value
        return self._key

    def upload_file(self, path, sharename):
        """Upload a file into the default share on the storage account.
        If the share doesn't exist, create it first.
        """

        self.file_service.create_file_from_path(
            self.default_share if sharename is None else sharename,
            None,
            os.path.basename(path),
            path,
        )
        return '/'.join([self.default_share, os.path.basename(path)])

    def download_file(self, sharename, filename):
        file_service.get_file_to_path(sharename, None, filename, filename)

    def delete_file(self, sharename, filename):
        file_service.delete_file(sharename, None, filename)

    def create_share(self, sharename):
        self.file_service.create_share(sharename)

    def create_directory(self, sharename, directoryname):
        self.file_service.create_directory(sharename, directoryname)

    def list_directories_and_files(self, sharename):
        generator = self.file_service.list_directories_and_files(sharename)
        return [file_or_dir.name for file_or_dir in generator]

    def list_shares(self):
        shares = list(self.file_service.list_shares(include_snapshots=True))
        sharelist = [fileshare.name for fileshare in shares]
        print(sharelist)
        return sharelist
Esempio n. 25
0
class AFSLoader():
    def __init__(self, local_root: Path, afs_creds: dict = None):
        if afs_creds is None:
            afs_creds = get_afs_creds()
        self.afs_name = afs_creds["AFS_NAME"]
        self.afs_key = afs_creds["AFS_KEY"]
        self.afs_share = afs_creds["AFS_SHARE"]
        self.file_service = FileService(account_name=self.afs_name,
                                        account_key=self.afs_key)
        self.local_root = Path(local_root)

    def get_afs_creds(self):
        return self.afs_name, self.afs_key, self.afs_share

    def upload_data_afs(self, data_path: Path, push_data: bool = False):
        """
        Copy data to the AFS directory.

        :param data_path: <Path>. Specify your path to the local data folder.
        :param push_data. If True upload data if it already exists.
        :return: path of the directory in the AFS share.
        """
        logging.info("Sending data to AFS")
        checksum = md5_dir(data_path)[:10]
        afs_path = time.strftime("%Y-%m-%d-%H.%M") + '-' + checksum

        list_folder = self.file_service.list_directories_and_files(
            self.afs_share)
        for folder in list_folder:
            if checksum == folder.name[-10:]:
                logging.info("Folder for data already exist!")
                afs_path = folder.name
                logging.info("Data is in the AFS {}".format(folder.name))
                if push_data:
                    logging.warning("Rewriting data")
                    afs_path = folder.name
                else:
                    return afs_path
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=afs_path)

        for file in Path(data_path).iterdir():
            progress_callback = lambda current, total: logbar(
                current, total, f"Uploading {file.name}")
            self.file_service.create_file_from_path(
                share_name=self.afs_share,
                directory_name=afs_path,
                file_name=file.name,
                local_file_path=str(file),
                max_connections=cpu_count(),
                progress_callback=progress_callback)
        logging.info("Sending is over")
        return afs_path

    def download_data_afs(self, afs_path: Path, dst_path: Path = None):
        afs_path = Path(afs_path)
        if not dst_path:
            assert self.local_root is not None
            dst_path = self.local_root

        list_folder = self.file_service.list_directories_and_files(
            self.afs_share, directory_name=afs_path)
        try:
            os.mkdir(self.local_root / afs_path)
        except FileExistsError:
            print(f"Directory {self.local_root / afs_path} was rewritten ")
        for file in list_folder:
            progress_callback = lambda current, total: logbar(
                current, total, f"Downloading {file.name}")
            self.file_service.get_file_to_path(
                share_name=self.afs_share,
                directory_name=afs_path,
                file_name=file.name,
                file_path=str(dst_path / afs_path / file.name),
                progress_callback=progress_callback)
Esempio n. 26
0
file_service = FileService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
file_service.create_share(share_name='azureml-project', quota=1)
file_service.create_share(share_name='azureml-share', quota=1)

file_service.create_directory('azureml-share', 'Solution1')
file_service.create_directory('azureml-share', 'Solution2')

block_blob_service = BlockBlobService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)

container_name ='telemetry'
block_blob_service.create_container(container_name)  

source=os.environ['AML_ASSETS_URL']
dest='azureml_project.zip'

urllib.request.urlretrieve(source, dest)

with zipfile.ZipFile(dest,"r") as zip_ref:
    zip_ref.extractall("azureml-project")

for root, dirs, files in os.walk('azureml-project', topdown=True):
    directory = os.path.relpath(root, 'azureml-project')
    if directory != '.':
        file_service.create_directory('azureml-project', directory)
    for f in files:
        file_service.create_file_from_path(
            'azureml-project',
            directory,
            f,
            os.path.join(root, f))
    for i in range(1, len(split_dir)+1, 1):
        combined_dir = '\\'.join(split_dir[:i])
        file_service.create_directory(NOTEBOOK_CONFIG['file_share_name'], combined_dir, fail_on_exist=False)

for root, directories, files in os.walk('Share'):
    for file in files:
        regex_pattern = '{0}[\\\\]?'.format('Share').replace('\\', '\\\\')
        upload_directory = re.sub(regex_pattern, '', root)
        print('Uploading {0} to {1}...'.format(os.path.join(root, file), upload_directory))
        if (len(upload_directory) == 0):
            upload_directory = None
        if (upload_directory != None):
            create_directories(upload_directory, file_service)
        file_service.create_file_from_path(          
            NOTEBOOK_CONFIG['file_share_name'], 
            upload_directory,                   
            file,                               
            os.path.join(root, file)            
            )

block_blob_service = BlockBlobService(account_name = NOTEBOOK_CONFIG['storage_account_name'], account_key = NOTEBOOK_CONFIG['storage_account_key'])
block_blob_service.create_container('prereq', public_access = PublicAccess.Container)

for root, directories, files in os.walk('Blob'):
    for file in files:
        block_blob_service.create_blob_from_path( 
            'prereq',                             
            file,                                 
            os.path.join(root, file)              
            )

# create image
Esempio n. 28
0
containername = os.environ['AZURE_CONTAINER_NAME']
subscription_id = os.environ['AZURE_SUBSCRIPTION_ID']
resource_group_params = {'location' : location}
sku = 'standard_ragrs)'
kind = 'BlobStorage'
storage_account_params = {sku:sku,kind:kind,location:location}

# Configure Credentials
credentials = ServicePrincipalCredentials(client_id=os.environ['AZURE_CLIENT_ID'],secret=os.environ['AZURE_CLIENT_SECRET'],tenant=os.environ['AZURE_TENANT_ID'])
resource_client = ResourceManagementClient(credentials, subscription_id)
storage_client = StorageManagementClient(credentials, subscription_id)

# Create Resource Group & Storage Account
resource_client.resource_groups.create_or_update(resourcegroupname, resource_group_params)
create_sa = storage_client.storage_accounts.create(resourcegroupname, storageaccountname, {'location':'eastus','kind':'storage','sku':{'name':'standard_ragrs'}})
create_sa.wait()

# Create Container
sak = storage_client.storage_accounts.list_keys(resourcegroupname, storageaccountname)
storageaccountkey = sak.keys[0].value
storage_client = CloudStorageAccount(storageaccountname, storageaccountkey)
blob_service = storage_client.create_block_blob_service()
blob_service.create_container(containername,public_access=PublicAccess.Blob)

# Copy Files
file_service = FileService(account_name=storageaccountname, account_key=storageaccountkey)
file_service.create_share(containername)
file_service.create_directory(containername, 'directory1')
file_service.create_file_from_path(containername,'directory1','55224azuresetup.ps1','55224azuresetup.ps1',)

Esempio n. 29
0
class AzureFileWriter(FilebaseBaseWriter):
    """
    Writes items to azure file shares. It is a File Based writer, so it has filebase
    option available

        - account_name (str)
            Public acces name of the azure account.

        - account_key (str)
            Public acces key to the azure account.

        - share (str)
            File share name.

        - filebase (str)
            Base path to store the items in the share.

    """

    supported_options = {
        "account_name": {"type": six.string_types, "env_fallback": "EXPORTERS_AZUREWRITER_NAME"},
        "account_key": {"type": six.string_types, "env_fallback": "EXPORTERS_AZUREWRITER_KEY"},
        "share": {"type": six.string_types},
    }

    def __init__(self, options, meta, *args, **kw):
        from azure.storage.file import FileService

        super(AzureFileWriter, self).__init__(options, meta, *args, **kw)
        account_name = self.read_option("account_name")
        account_key = self.read_option("account_key")
        self.azure_service = FileService(account_name, account_key)
        self.share = self.read_option("share")
        self.azure_service.create_share(self.share)
        self.logger.info("AzureWriter has been initiated." "Writing to share {}".format(self.share))
        self.set_metadata("files_counter", Counter())
        self.set_metadata("files_written", [])

    def write(self, dump_path, group_key=None, file_name=None):
        if group_key is None:
            group_key = []
        self._write_file(dump_path, group_key, file_name)

    def _update_metadata(self, dump_path, filebase_path, file_name):
        buffer_info = self.write_buffer.metadata[dump_path]
        file_info = {
            "file_name": file_name,
            "filebase_path": filebase_path,
            "size": buffer_info["size"],
            "number_of_records": buffer_info["number_of_records"],
        }
        files_written = self.get_metadata("files_written")
        files_written.append(file_info)
        self.set_metadata("files_written", files_written)
        self.get_metadata("files_counter")[filebase_path] += 1

    def _ensure_path(self, filebase):
        path = filebase.split("/")
        folders_added = []
        for sub_path in path:
            folders_added.append(sub_path)
            parent = "/".join(folders_added)
            self.azure_service.create_directory(self.share, parent)

    @retry_long
    def _write_file(self, dump_path, group_key, file_name=None):
        filebase_path, file_name = self.create_filebase_name(group_key, file_name=file_name)
        self._ensure_path(filebase_path)
        self.azure_service.create_file_from_path(self.share, filebase_path, file_name, dump_path, max_connections=5)
        self._update_metadata(dump_path, filebase_path, file_name)

    def get_file_suffix(self, path, prefix):
        number_of_keys = self.get_metadata("files_counter").get(path, 0)
        suffix = "{}".format(str(number_of_keys))
        return suffix

    def _check_write_consistency(self):
        from azure.common import AzureMissingResourceHttpError

        for file_info in self.get_metadata("files_written"):
            try:
                afile = self.azure_service.get_file_properties(
                    self.share, file_info["filebase_path"], file_info["file_name"]
                )
                file_size = afile.properties.content_length
                if str(file_size) != str(file_info["size"]):
                    raise InconsistentWriteState(
                        "File {} has unexpected size. (expected {} - got {})".format(
                            file_info["file_name"], file_info["size"], file_size
                        )
                    )
            except AzureMissingResourceHttpError:
                raise InconsistentWriteState("Missing file {}".format(file_info["file_name"]))
        self.logger.info("Consistency check passed")
Esempio n. 30
0
)

# Copy Setup Files to Container and Share
blob_service.create_blob_from_path(
    containername,
    configfilename,
    configfilename,
)
blob_service.create_blob_from_path(
    containername,
    zipfilename,
    zipfilename,
)
file_service.create_file_from_path(
    containername,
    '',
    configfilename,
    configfilename,
)
file_service.create_file_from_path(
    containername,
    '',
    zipfilename,
    zipfilename,
)


# Create Public IP Address
def create_public_ip_address(network_client):
    public_ip_addess_params = {
        'location': location,
        'public_ip_allocation_method': 'Dynamic'
Esempio n. 31
0
        if step % 100 == 0:
            print(step,
                  sess.run([cost, accuracy], feed_dict={
                      X: x_data,
                      Y: y_data
                  }))

    saver.save(
        sess, "./saver/save.{}.ckpt".format(
            datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S")))
    saver.save(sess, "./saver/save.last.ckpt")

    pred = sess.run(prediction, feed_dict={X: x_data})

    pw, pb = sess.run([W, b])
    result = {'W': pw.tolist(), 'b': pb.tolist()}
    print(result)

    with open(RESULT_FILE, 'w') as outfile:
        json.dump(result, outfile)

    for p, y in zip(pred, y_data.flatten()):
        print("[{}] Prediction: {} True Y: {}".format(p == int(y), p, int(y)))

file_service.create_file_from_path(FILE_SHARE, None, RESULT_FILE, RESULT_FILE)

for file in os.listdir(SAVER_FOLDER):
    print(file)
    file_service.create_file_from_path(FILE_SHARE, "saver", file,
                                       SAVER_FOLDER + "/" + file)
Esempio n. 32
0
class AzureFileWriter(FilebaseBaseWriter):
    """
    Writes items to azure file shares. It is a File Based writer, so it has filebase
    option available

        - account_name (str)
            Public acces name of the azure account.

        - account_key (str)
            Public acces key to the azure account.

        - share (str)
            File share name.

        - filebase (str)
            Base path to store the items in the share.

    """
    supported_options = {
        'account_name': {
            'type': six.string_types,
            'env_fallback': 'EXPORTERS_AZUREWRITER_NAME'
        },
        'account_key': {
            'type': six.string_types,
            'env_fallback': 'EXPORTERS_AZUREWRITER_KEY'
        },
        'share': {
            'type': six.string_types
        }
    }

    def __init__(self, options, meta, *args, **kw):
        from azure.storage.file import FileService
        super(AzureFileWriter, self).__init__(options, meta, *args, **kw)
        account_name = self.read_option('account_name')
        account_key = self.read_option('account_key')
        self.azure_service = FileService(account_name, account_key)
        self.share = self.read_option('share')
        self.azure_service.create_share(self.share)
        self.logger.info('AzureWriter has been initiated.'
                         'Writing to share {}'.format(self.share))
        self.set_metadata('files_counter', Counter())
        self.set_metadata('files_written', [])

    def write(self, dump_path, group_key=None, file_name=None):
        if group_key is None:
            group_key = []
        self._write_file(dump_path, group_key, file_name)

    def _update_metadata(self, dump_path, filebase_path, file_name):
        buffer_info = self.write_buffer.metadata[dump_path]
        file_info = {
            'file_name': file_name,
            'filebase_path': filebase_path,
            'size': buffer_info['size'],
            'number_of_records': buffer_info['number_of_records']
        }
        files_written = self.get_metadata('files_written')
        files_written.append(file_info)
        self.set_metadata('files_written', files_written)
        self.get_metadata('files_counter')[filebase_path] += 1

    def _ensure_path(self, filebase):
        path = filebase.split('/')
        folders_added = []
        for sub_path in path:
            folders_added.append(sub_path)
            parent = '/'.join(folders_added)
            self.azure_service.create_directory(self.share, parent)

    @retry_long
    def _write_file(self, dump_path, group_key, file_name=None):
        filebase_path, file_name = self.create_filebase_name(
            group_key, file_name=file_name)
        self._ensure_path(filebase_path)
        self.azure_service.create_file_from_path(
            self.share,
            filebase_path,
            file_name,
            dump_path,
            max_connections=5,
        )
        self._update_metadata(dump_path, filebase_path, file_name)

    def get_file_suffix(self, path, prefix):
        number_of_keys = self.get_metadata('files_counter').get(path, 0)
        suffix = '{}'.format(str(number_of_keys))
        return suffix

    def _check_write_consistency(self):
        from azure.common import AzureMissingResourceHttpError
        for file_info in self.get_metadata('files_written'):
            try:
                afile = self.azure_service.get_file_properties(
                    self.share, file_info['filebase_path'],
                    file_info['file_name'])
                file_size = afile.properties.content_length
                if str(file_size) != str(file_info['size']):
                    raise InconsistentWriteState(
                        'File {} has unexpected size. (expected {} - got {})'.
                        format(file_info['file_name'], file_info['size'],
                               file_size))
            except AzureMissingResourceHttpError:
                raise InconsistentWriteState('Missing file {}'.format(
                    file_info['file_name']))
        self.logger.info('Consistency check passed')