Esempio n. 1
0
def get_backup(gw_account_name, gw_account_key, gw_account_share, backup_local_path):

    """Upload directories and files from $account_name to local $backup_local_path using Azure FileService"""

    print('\nRunning get_backup from the {} and file share {} to local path {}.\n'.format(gw_account_name, gw_account_share, backup_local_path))

    file_service = FileService(account_name=gw_account_name, account_key=gw_account_key)
    share_dirs_list = file_service.list_directories_and_files(gw_account_share)

    for share_dir_name in share_dirs_list:

        backup_local_dir = os.path.join(backup_local_path, share_dir_name.name)

        if not os.path.isdir(backup_local_dir):
            print('Local backup directory {} not found, creating...'.format(backup_local_dir))
            os.makedirs(backup_local_dir)

        share_files_list = file_service.list_directories_and_files(gw_account_share, share_dir_name.name)
        for share_file in share_files_list:
            try:
                print('Getting file: {}'.format(os.path.join('/', share_dir_name.name, share_file.name)))
                # example:
                # file_service.get_file_to_path('gwdevproxydata', 'datanginx-conf.d', 'jm-gw-proxy-dev.domain.tld.conf', '/tmp/jm-gw-proxy-dev.domain.tld.conf-out')
                file_service.get_file_to_path(gw_account_share, share_dir_name.name, share_file.name, os.path.join(backup_local_dir, share_file.name))
            # to pass /data/datahtml/.well-known dir on master host
            except azure.common.AzureMissingResourceHttpError as e:
                print('\nWARNING: {}\n'.format(e))
class Azure:
    def __init__(self, ac, key, fileshare):
        self.account_name = ac
        self.account_key = key
        self.fileshare_name = fileshare

        #Create a FileService that is used to call the File Service for the storage account
        self.file_service = FileService(account_name=ac, account_key=key)

        return

    def List_directory(self, directory_path):
        self.generator = self.file_service.list_directories_and_files(
            directory_path)
        print("Files in the directory: " + directory_path)
        for file_or_dir in self.generator:
            print("\t File/Directory name: " + file_or_dir.name)
        return

    def Download(self, loc_directory, fileshare_directory_name):
        local_path = os.path.expanduser("~/" + loc_directory)
        print("\nDownloading the following files to " + local_path)
        self.generator = self.file_service.list_directories_and_files(
            self.fileshare_name + "/" + fileshare_directory_name)
        for file_or_dir in self.generator:
            print("\t File/Directory name: " + file_or_dir.name)
        for file_or_dir in self.generator:
            #full_path_to_file2 = os.path.join(local_path, file_or_dir.name)
            self.file_service.get_file_to_path(self.fileshare_name,
                                               fileshare_directory_name,
                                               file_or_dir.name,
                                               local_path + file_or_dir.name)
        print("\nFiles downloaded to " + local_path)
        return

    def Upload(self, loc_directory, fileshare_directory_name):
        local_path = os.path.expanduser("~/" + loc_directory)
        self.generator = self.file_service.list_directories_and_files(
            self.fileshare_name + "/" + fileshare_directory_name)
        print("\nUploading the following files to " + fileshare_directory_name)
        entries = os.listdir(local_path)
        # for entry in entries:
        #     print(entry)
        for entry in entries:
            self.file_service.create_file_from_path(
                self.fileshare_name,  #Fileshare name
                fileshare_directory_name,  # We want to create this blob in the root directory, so we specify None for the directory_name
                entry,  #name of the file that is created
                local_path + entry,  #file that needs to be uploaded
                content_settings=ContentSettings(
                    content_type='application/vnd.ms-excel'))
        print("The followig files have been uploaded")
        #listing the files in the fileshare_name
        obj.List_directory(fileshare_name + "/" +
                           fileshare_directory_name_upload)
Esempio n. 3
0
def getLatestModel(customer, modelName, storage_account_name,
                   storage_account_key):
    fileService = FileService(account_name=storage_account_name,
                              account_key=storage_account_key)
    if fileService.exists('trainedmodels', customer):
        modelTimestampArr = []
        files = fileService.list_directories_and_files('trainedmodels',
                                                       customer + '/' +
                                                       modelName,
                                                       prefix=modelName)

        for file in files:
            date = file.name.split('.')[0].split('_')[1]
            modelTimestampArr.append(date)

        latestModelFileName = modelName + '_' + max(modelTimestampArr) + '.pkl'
        print(latestModelFileName)

        file = fileService.get_file_to_bytes('trainedmodels',
                                             customer + '/' + modelName,
                                             latestModelFileName)
        model = pickle.loads(file.content)['model']
        return model
    else:
        print('Customer or model not found.')
Esempio n. 4
0
def _get_files_from_afs(cli_ctx, afs, path, expiry):
    """Returns a list of files and directories under given path on mounted Azure File share.

    :param models.AzureFileShareReference afs: Azure file share reference.
    :param str path: path to list files from.
    :param int expiry: SAS expiration time in minutes.
    """
    from azure.storage.file import FileService
    from azure.storage.file.models import File, FilePermissions
    result = []
    service = FileService(afs.account_name, _get_storage_account_key(cli_ctx, afs.account_name, None))
    share_name = afs.azure_file_url.split('/')[-1]
    effective_path = _get_path_for_storage(path)
    if not service.exists(share_name, effective_path):
        return result
    for f in service.list_directories_and_files(share_name, effective_path):
        if isinstance(f, File):
            sas = service.generate_file_shared_access_signature(
                share_name, effective_path, f.name, permission=FilePermissions(read=True),
                expiry=datetime.datetime.utcnow() + datetime.timedelta(minutes=expiry))
            result.append(
                LogFile(
                    f.name, service.make_file_url(share_name, effective_path, f.name, 'https', sas),
                    False, f.properties.content_length))
        else:
            result.append(LogFile(f.name, None, True, None))
    return result
Esempio n. 5
0
 def upload(path: str):
     from azure.storage.file import FileService
     service = FileService(account_name=config['account_name'],
                           account_key=config['account_key'])
     if shared_directory not in service.list_directories_and_files(
             config['share_name']):
         service.create_directory(config['share_name'], shared_directory)
     service.create_file_from_path(config['share_name'], shared_directory,
                                   path.split('/')[-1], path)
Esempio n. 6
0
def file():
    static_dir_path = "D:\home\site\wwwroot\static"
    static_file_dir_path = static_dir_path + '\\' + 'files'
    account_name = 'hanastragetest'
    account_key = 'acount_key'
    root_share_name = 'root'
    share_name = 'images'
    directory_url = 'https://hanastragetest.file.core.windows.net/' + root_share_name + '/' + share_name

    # create local save directory
    if os.path.exist(static_file_dir_path) is False:
        os.mkdir(static_file_dir_path)

    file_service = FileService(account_name=account_name,
                               account_key=account_key)
    # create share
    file_service.create_share(root_share_name)

    # create directory
    file_service.create_directory(root_share_name, share_name)

    files = os.listdir(static_dir_path)
    for file in files:
        # delete
        if file_service.exists(root_share_name, share_name, file):
            file_service.delete_file(root_share_name, share_name, file)

        # file upload
        file_service.create_file_from_path(
            root_share_name,
            share_name,  # We want to create this blob in the root directory, so we specify None for the directory_name
            file,
            static_dir_path + '\\' + file,
            content_settings=ContentSettings(content_type='image/png'))

    generator = file_service.list_directories_and_files(
        root_share_name, share_name)

    html = ""
    for file in generator:
        # file download
        file_save_path = static_file_dir_path + '\\' + file
        file_service.get_file_to_path(root_share_name, share_name, file,
                                      file_save_path)
        html = "{}<img src='{}'>".format(html, file_save_path)

    result = {
        "result": True,
        "data": {
            "file_or_dir_name":
            [file_or_dir.name for file_or_dir in generator]
        }
    }
    return make_response(json.dumps(result, ensure_ascii=False) + html)
Esempio n. 7
0
def num1():
    t = {}
    i = 1
    file_service = FileService(
        account_name='mystorge',
        account_key=
        '0T4f/dzyV7AIw4a9bevK5ysML0qP55CEWEqJyJWXyr6fKRxowLq8tL7mep/MfSc//mcQggeH1+K79A4HUDug3w=='
    )
    generator = file_service.list_directories_and_files('image1')
    for file_or_dir in generator:
        t[i] = file_or_dir.name
        i += 1
    return render_template("table.html", t=t)
Esempio n. 8
0
def clean(shared_directory, config_path, remove_directory):
    config_path = os.path.expanduser(config_path)
    with open(config_path) as f:
        config = json.load(f)

    from azure.storage.file import FileService
    service = FileService(account_name=config['account_name'],
                          account_key=config['account_key'])
    if service.exists(config['share_name'], shared_directory):
        for file in service.list_directories_and_files(config['share_name'],
                                                       shared_directory):
            service.delete_file(config['share_name'], shared_directory,
                                file.name)
        if remove_directory:
            service.delete_directory(config['share_name'], shared_directory)
Esempio n. 9
0
def download_data():
    file_service = FileService(
        account_name='personimagedata',
        account_key=
        'RNlmOs2+a0l0Vb2Ogxi3fSMA/LKsHgutsgvQE6p/Nu5OrXhmma8XNfnT3qFobkpGqYTecz75MXrmkoTThScJ+w=='
    )
    generator = file_service.list_directories_and_files(SERVER_FOLDER)
    downloaded_files = set(
        filename for filename in os.listdir(os.getcwd() + '/' + DATAFILE))
    for file_or_dir in generator:
        print(file_or_dir.name)
        if file_or_dir.name not in downloaded_files:
            file_service.get_file_to_path(SERVER_FOLDER, None,
                                          file_or_dir.name,
                                          DATAFILE + file_or_dir.name)
Esempio n. 10
0
def load_data(source_dir='./data/final_project',
              scale_configs=True,
              data_from='local'):

    data_dim = 1
    configs = []
    learning_curves = []

    if data_from == 'local':
        for fn in glob.glob(os.path.join(source_dir, "*.json")):
            with open(fn, 'r') as fh:
                tmp = json.load(fh)
                configs.append(tmp['config'])  # list of dicts
                learning_curves.append(tmp['learning_curve'])
    elif data_from == 'azure':
        file_service = FileService(
            account_name='jochenstorage',
            account_key=
            '7onD5l6X5dfmfUC+gXJCnKRn5AwPRXz8lEDaIsTVH5Di5y0wFTWgitc6Rq2TV85Zjx+EOMlRJYsxFHt1R41qPA=='
        )
        generator = file_service.list_directories_and_files(
            'jochenfileshare/dlproject_data/final_project')
        for file_or_dir in generator:
            # print(file_or_dir.name)
            myfile = file_service.get_file_to_bytes(
                'jochenfileshare', 'dlproject_data/final_project',
                file_or_dir.name)
            tmp = json.loads(myfile.content.decode("utf-8"))
            # tmp = json.loads(myfile.content)
            configs.append(tmp['config'])  # list of dicts
            learning_curves.append(tmp['learning_curve'])
    else:
        print("invalid source ", data_from)

    configs = configs_to_arr(configs)  # from list of dicts to np.array

    if scale_configs:
        print("scaling configuration data")
        configs = preprocessing.scale(configs)

    lcs = np.array(learning_curves)
    Y = lcs[:, -1]

    # Keras LSTM expects data as [sample_no, timesteps, feature_no (X.shape[1]) ]
    lcs = lcs.reshape(lcs.shape[0], lcs.shape[1], data_dim)
    Y = Y.reshape(Y.shape[0], 1)

    return configs, lcs, Y
Esempio n. 11
0
def discover_resources(**kwargs):
    discovered_azure_sql = []
    for handler in AzureARMHandler.objects.all():
        set_progress('Connecting to Azure storage \
        files for handler: {}'.format(handler))
        credentials = ServicePrincipalCredentials(
            client_id=handler.client_id,
            secret=handler.secret,
            tenant=handler.tenant_id
        )
        azure_client = storage.StorageManagementClient(
            credentials, handler.serviceaccount)
        azure_resources_client = resources.ResourceManagementClient(
            credentials, handler.serviceaccount)

        for resource_group in azure_resources_client.resource_groups.list():
            try:
                for st in azure_client.storage_accounts.list_by_resource_group(resource_group.name)._get_next().json()['value']:
                    res = azure_client.storage_accounts.list_keys(
                        resource_group.name, st['name'])
                    keys = res.keys
                    file_service = FileService(
                        account_name=st['name'], account_key=keys[1].value)
                    for share in file_service.list_shares():
                        for file in file_service.list_directories_and_files(share_name=share.name).items:
                            if type(file) is File:
                                discovered_azure_sql.append(
                                    {
                                        'name': share.name + '-' + file.name,
                                        'azure_storage_file_name': file.name,
                                        'azure_file_identifier': share.name + '-' + file.name,
                                        'azure_storage_file_share_name': share.name,
                                        'resource_group_name': resource_group.name,
                                        'azure_rh_id': handler.id,
                                        'azure_storage_account_name': st['name'],
                                        'azure_account_key': keys[0].value,
                                        'azure_account_key_fallback': keys[1].value
                                    }
                                )
            except:
                continue

    return discovered_azure_sql
Esempio n. 12
0
 def cleanupBackups(self):
     result = messagebox.askquestion('You are about to save the changes', 'Are you sure?', icon='warning')
     if result == 'yes':
         # files = []
         # for (dirpath, dirname, filenames) in walk(self.gfwlistFileDir):
         #     files.extend(filenames)
         # for f in files:
         #     print (f)
         
         # the following is for cleaning up files locally
         # bkups = glob.glob(os.path.join(self.gfwlistFileDir, '*.bk'))
         # for f in bkups[:len(bkups)-1]:
         #     os.remove(f)
         
         azureFileService = FileService(account_name=self.azureAccountName, account_key=self.azureAccountKey)
         generator = azureFileService.list_directories_and_files(self.azureFileShareName, self.azureFileShareFileDir)
         for fileOrDir in generator:
             if (fileOrDir.name.endswith('.bk')):
                 azureFileService.delete_file(self.azureFileShareName, self.azureFileShareFileDir, fileOrDir.name)
Esempio n. 13
0
def downloadStoredFiles(config, accountKey, sourceDir, targetDir):
    fs = FileService(account_name=config['STORAGE_ACCOUNT'],
                     account_key=accountKey)
    storageLoc = config['STORAGE_LOCATION']
    if not path.exists(targetDir):
        makedirs(targetDir)
    print(
        f'\nFileService: reading files from Azure Storage location="{storageLoc}" directory="{sourceDir}"'
    )
    if not fs.exists(storageLoc, sourceDir):
        return
    dirsFiles = fs.list_directories_and_files(storageLoc, sourceDir)
    fileNames = [
        df.name for df in dirsFiles
        if df.name.endswith('.txt') or df.name.endswith('.csv')
    ]
    for fname in fileNames:
        if path.exists(path.join(targetDir, fname)):
            print(f'already got file={fname}')
        else:
            print(f'downloading file={fname}')
            fs.get_file_to_path(storageLoc, sourceDir, fname,
                                path.join(targetDir, fname))
Esempio n. 14
0
 def list(self, container_or_share_name, container=None, account=None):
     """List the blobs/files inside a container/share_name.
      Args:
          container_or_share_name(str): Name of the container/share_name where we want to list the blobs/files.
          container(bool): flag to know it you are listing files or blobs.
          account(str): The name of the storage account.
     """
     key = self.storage_client.storage_accounts.list_keys(
         self.resource_group_name, account).keys[0].value
     if container:
         bs = BlockBlobService(account_name=account, account_key=key)
         container_list = []
         for i in bs.list_blobs(container_or_share_name).items:
             container_list.append(i.name)
         return container_list
     elif not container:
         fs = FileService(account_name=account, account_key=key)
         container_list = []
         for i in fs.list_directories_and_files(
                 container_or_share_name).items:
             container_list.append(i.name)
         return container_list
     else:
         raise ValueError("You have to pass a value for container param")
Esempio n. 15
0
import configparser
from azure.storage.file import FileService

configs = configparser.ConfigParser()
configs.read("C:/ws/filescripts/config.ini")

storage_key = configs.get("Azure", "storage-key")
share_name = configs.get("Azure", "share-name")
storage_name = configs.get("Azure", "storage-name")

remote_data_path = 'prod/WeeklyReports'
local_data_path = 'C:/data/ford/WeeklyReports/'

file_service = FileService(account_name=share_name, account_key=storage_key)
file_service.set_proxy(host='localhost', port='3128')

shares = list(
    file_service.list_directories_and_files(storage_name, remote_data_path))
file_list = list(map(lambda x: x.name, shares))

for fn in file_list:
    file_service.get_file_to_path(storage_name, remote_data_path, fn,
                                  local_data_path + fn)
# fn = 'MonthlyReport_June_2017.txt'
# file_service.get_file_to_path(storage_name, remote_data_path, fn, local_data_path+fn)
Esempio n. 16
0
#connect to your storage account
from azure.storage.file import FileService
from azure.storage.file import ContentSettings
import os

# Set up
directory = os.fsencode('C:/Users/luopa/Desktop/Cognitive/result')
file_service = FileService(
    account_name='eyc3blob',
    account_key=
    '8lWwXYYTbRtpg02HRd/DiVrSfyF4xNqtGmUzIy1GkLLOG1tXsXGLZG2KbDCz3XerLM1xPV4nl62YgmhAYAAUTQ=='
)

# Create result directory
# file_service.create_directory(share_name='eyc3file/speech', directory_name='result')

# Looping through the documents
generator = file_service.list_directories_and_files('eyc3file/speech')
for file in os.listdir(directory):
    filename = os.fsdecode(file)
    print(filename)
    file_service.create_file_from_path(
        share_name='eyc3file/speech',
        directory_name='result',
        file_name=filename,
        local_file_path='C:/Users/luopa/Desktop/Cognitive/result' + '/' +
        filename)
class StorageHelper(object):
    """Handle details related to a single storage account and share.
    Instantiate this object with information sufficient to
    uniquely identify a storage account and a file share within it.
    Then .account can be used to retrieve the Azure SDK for Python
    object corresponding to the account, and .key can be used to
    get an access key for it.
    For both those properties, if the value mentioned doesn't exist,
    it will be created upon first property access.
    """
    def __init__(self,
                 client_data,
                 resource_helper,
                 name,
                 account=None,
                 default_share='share'):
        self.name = name
        self.default_share = default_share
        self._account = account
        self._key = os.environ.get('AZURE_STORAGE_KEY')
        self.resource_helper = resource_helper
        self.client = StorageManagementClient(*client_data)
        self.file_service = FileService(
            account_name=self.account.name,
            account_key=self.key,
        )

    @property
    def account(self):
        """Return the managed StorageAccounts object.
        If no such account exists, create it first.
        """
        if self._account is None:
            print('Creating storage account...')
            # Error to create storage account if it already exists!
            name_check = self.client.storage_accounts.check_name_availability(
                self.name)
            if name_check.name_available:
                storage_creation = self.client.storage_accounts.create(
                    self.resource_helper.group.name, self.name,
                    StorageAccountCreateParameters(
                        sku=StorageAccountSku(StorageSkuName.standard_lrs),
                        kind=StorageKind.storage,
                        location=self.resource_helper.group.location,
                    ))
                storage = storage_creation.result()
            else:
                try:
                    storage = self.client.storage_accounts.get_properties(
                        self.resource_helper.group.name, self.name)
                except CloudError:
                    print('Storage account {} already exists'
                          ' in a resource group other than {}.'.format(
                              self.name, self.resource_helper.group.name))
            print('Got storage account:', storage.name)
            self._account = storage
        return self._account

    @property
    def key(self):
        """Get the first available storage key.
        This will crash if there are no available storage keys,
        which is unlikely since two are created along with a storage account.
        """
        if self._key is None:
            storage_keys = self.client.storage_accounts.list_keys(
                self.resource_helper.group.name, self.account.name)
            self._key = next(iter(storage_keys.keys)).value
        return self._key

    def upload_file(self, path, sharename):
        """Upload a file into the default share on the storage account.
        If the share doesn't exist, create it first.
        """

        self.file_service.create_file_from_path(
            self.default_share if sharename is None else sharename,
            None,
            os.path.basename(path),
            path,
        )
        return '/'.join([self.default_share, os.path.basename(path)])

    def download_file(self, sharename, filename):
        file_service.get_file_to_path(sharename, None, filename, filename)

    def delete_file(self, sharename, filename):
        file_service.delete_file(sharename, None, filename)

    def create_share(self, sharename):
        self.file_service.create_share(sharename)

    def create_directory(self, sharename, directoryname):
        self.file_service.create_directory(sharename, directoryname)

    def list_directories_and_files(self, sharename):
        generator = self.file_service.list_directories_and_files(sharename)
        return [file_or_dir.name for file_or_dir in generator]

    def list_shares(self):
        shares = list(self.file_service.list_shares(include_snapshots=True))
        sharelist = [fileshare.name for fileshare in shares]
        print(sharelist)
        return sharelist
Esempio n. 18
0
# In[2]:

file_service = FileService(account_name='covidmodels', account_key='')

# In[ ]:

# In[3]:

data = []
labels = []

# In[4]:

import os
kaggle_data = os.listdir('./kaggle_data/')
azure_data = file_service.list_directories_and_files('covid-share/data')

for i, file_or_dir in enumerate(azure_data):
    print(f'COIVD - {file_or_dir.name} | NORMAL - {kaggle_data[i]}')

    # Getting file from storage
    byte_data = file_service.get_file_to_bytes('covid-share', 'data',
                                               file_or_dir.name).content
    np_bytes = np.fromstring(byte_data, np.uint8)

    # Reshape
    az_img = cv2.imdecode(np_bytes, cv2.COLOR_BGR2RGB)
    az_img = cv2.resize(az_img, (224, 224))

    data.append(az_img)
    labels.append(1)
Esempio n. 19
0
class AzureFileManager():
    def __init__(self):
        # fetch config data
        conf = Configuration()
        # create Azure File share service
        self.file_service = FileService(
            account_name=conf.account_name, account_key=conf.account_key)
        # set azure share file name (container)
        self.file_share = conf.file_share

    def upload_file(self, upload_path, file_path):
        if not os.path.isfile(file_path):
            print("Your file is not exists, check your file path and try again.")
            return
        filename = os.path.basename(file_path)
        # remove ' or " from path, if path was empty like "" or '' set upload_path=None, this make upload file to root directory
        upload_path = upload_path.strip().replace("'", '').replace('"', '')
        # remove start and end / or \
        if upload_path.endswith('/') or upload_path.endswith('\\'):
            upload_path = upload_path[:-1]
        if upload_path.startswith('/') or upload_path.startswith('\\'):
            upload_path = upload_path[1:]
        # sanity check
        upload_path = upload_path if len(upload_path) >= 1 else None

        print("Start uploading...")
        try:
            # create sub directories
            self.create_sub_directories(upload_path)
            # upload
            self.file_service.create_file_from_path(
                share_name=self.file_share,  # file_share name in azure
                directory_name=upload_path,  # server directories address. None => root directory
                file_name=filename,          # Name of file to create in azure
                local_file_path=file_path)
            print("'{0}' has been successfully uploaded".format(filename))
        except:
            print("Failed to upload '{0}', please try again".format(filename))

    def download_file(self, file_path):
        """ download file from azure, enter file path in azure """
        # check file path was not empty
        file_path = file_path.strip().replace("'", '').replace('"', '')
        if len(file_path) == 0:
            print("Please enter a file path")
            return
        filename = os.path.basename(file_path)
        dir_path = os.path.dirname(file_path)
        # if parent path was not available, use None => root directory
        dir_path = dir_path if dir_path else None

        print("Downloading...")
        try:
            self.file_service.get_file_to_path(
                share_name=self.file_share,
                directory_name=dir_path,  # The path to the directory in azure
                file_name=filename,  # Name of existing file in azure
                # Path of file to write to local machine
                file_path="{0}".format(filename))
            print(
                "'{0}' has been successfully downloaded and saved in current directory.".format(filename))
        except:
            print("Failed to download '{0}', either file doesn't exist or you are offline.".format(
                filename))

    def get_list_of_files(self, dir_name=None):
        """ show list of all files and all directories in azure"""
        generator = self.file_service.list_directories_and_files(
            share_name=self.file_share,
            directory_name=dir_name)
        parent = "" if dir_name == None else dir_name
        for file_or_dir in generator:
            if not re.match(r"(.[a-z]*[A-Z]*[0-9]*)$", file_or_dir.name):
                # file
                if len(parent) == 0:
                    print(file_or_dir.name)
                else:
                    print("{0}/{1}".format(parent, file_or_dir.name))
            else:
                # dir
                if len(parent) == 0:
                    self.get_list_of_files(file_or_dir.name)
                else:
                    self.get_list_of_files(
                        "{0}/{1}".format(parent, file_or_dir.name))

    def create_sub_directories(self, path):
        """ create sub directories in Azure """
        if path is None:
            return
        dirs = os.path.normpath(path).split(os.path.sep)
        parent = ''
        for dir in dirs:
            parent += dir if len(parent) == 0 else '/'+dir
            self.file_service.create_directory(self.file_share, parent)
Esempio n. 20
0
from azure.storage.file import FileService

file_service = FileService(account_name="account_name",
                           account_key="account_key")

# ファイル一覧表示
generator = file_service.list_directories_and_files('my-file')
for file_or_dir in generator:
    print(file_or_dir.name)

# ローカルのtest.txtをAzure Storageへアップロード
from azure.storage.file import ContentSettings
file_service.create_file_from_path(
    'my-file',
    None,
    'test.txt',
    'test.txt',
    content_settings=ContentSettings(content_type='text/plain'))
Esempio n. 21
0
class AFSLoader():
    def __init__(self, local_root: Path, afs_creds: dict = None):
        if afs_creds is None:
            afs_creds = get_afs_creds()
        self.afs_name = afs_creds["AFS_NAME"]
        self.afs_key = afs_creds["AFS_KEY"]
        self.afs_share = afs_creds["AFS_SHARE"]
        self.file_service = FileService(account_name=self.afs_name,
                                        account_key=self.afs_key)
        self.local_root = Path(local_root)

    def get_afs_creds(self):
        return self.afs_name, self.afs_key, self.afs_share

    def upload_data_afs(self, data_path: Path, push_data: bool = False):
        """
        Copy data to the AFS directory.

        :param data_path: <Path>. Specify your path to the local data folder.
        :param push_data. If True upload data if it already exists.
        :return: path of the directory in the AFS share.
        """
        logging.info("Sending data to AFS")
        checksum = md5_dir(data_path)[:10]
        afs_path = time.strftime("%Y-%m-%d-%H.%M") + '-' + checksum

        list_folder = self.file_service.list_directories_and_files(
            self.afs_share)
        for folder in list_folder:
            if checksum == folder.name[-10:]:
                logging.info("Folder for data already exist!")
                afs_path = folder.name
                logging.info("Data is in the AFS {}".format(folder.name))
                if push_data:
                    logging.warning("Rewriting data")
                    afs_path = folder.name
                else:
                    return afs_path
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=afs_path)

        for file in Path(data_path).iterdir():
            progress_callback = lambda current, total: logbar(
                current, total, f"Uploading {file.name}")
            self.file_service.create_file_from_path(
                share_name=self.afs_share,
                directory_name=afs_path,
                file_name=file.name,
                local_file_path=str(file),
                max_connections=cpu_count(),
                progress_callback=progress_callback)
        logging.info("Sending is over")
        return afs_path

    def download_data_afs(self, afs_path: Path, dst_path: Path = None):
        afs_path = Path(afs_path)
        if not dst_path:
            assert self.local_root is not None
            dst_path = self.local_root

        list_folder = self.file_service.list_directories_and_files(
            self.afs_share, directory_name=afs_path)
        try:
            os.mkdir(self.local_root / afs_path)
        except FileExistsError:
            print(f"Directory {self.local_root / afs_path} was rewritten ")
        for file in list_folder:
            progress_callback = lambda current, total: logbar(
                current, total, f"Downloading {file.name}")
            self.file_service.get_file_to_path(
                share_name=self.afs_share,
                directory_name=afs_path,
                file_name=file.name,
                file_path=str(dst_path / afs_path / file.name),
                progress_callback=progress_callback)
Esempio n. 22
0
  print("Copy des fichiers de sauvegarde sur le répertoire Microsoft AZURE OK !")
  logging.debug("Copy des fichiers de sauvegarde sur le répertoire Microsoft AZURE OK !") # warning 
#  syslog.syslog(syslog.LOG_DEBUG,"Copy des fichiers de sauvegarde sur le répertoire Microsoft AZURE OK !") # warning 

# suppression des fichiers de sauvegarde

  os.remove(repertoire_de_sauvegarde+"/save_"+str(BACKUP_DATE)+"db.sql")
  print("Suppression du fichier "+BACKUP_DATE+"db.sql")
  os.remove(repertoire_de_sauvegarde+"/save_"+str(BACKUP_DATE)+".tar.bz2")
  print("Suppression du fichier "+BACKUP_DATE+".tar.bz2")

# Liste des fichiers ou répertoires de Microsoft AZURE et suppression des anciennes sauvegardes en fonction du nombre de jour 

  print("Liste des sauvegardes: ")
  list_file = file_service.list_directories_and_files(AZURE_REP_BKP)
  for file_or_dir in list_file:
    if ('save_'+str(BACKUP_DATE_OLD)) in file_or_dir.name:
      file_service.delete_file(AZURE_REP_BKP,'save_'+str(BACKUP_DATE_OLD),'save_'+str(BACKUP_DATE_OLD)+'db.sql')
      file_service.delete_file(AZURE_REP_BKP,'save_'+str(BACKUP_DATE_OLD),'save_'+str(BACKUP_DATE_OLD)+'.tar.bz2')
      file_service.delete_directory(AZURE_REP_BKP,'save_'+str(BACKUP_DATE_OLD))
    else:
      print("")
      print(file_or_dir.name)
      logging.warning(file_or_dir.name) # warning 
#      syslog.syslog(syslog.LOG_WARNING, file_or_dir.name) # warning 

  print("")
  print("La sauvegarde c'est terminé correctement !")
  logging.warning("La sauvegarde c'est terminé correctement !") # warning
#  syslog.syslog(syslog.LOG_WARNING,"La sauvegarde c'est terminé correctement !") # warning
Esempio n. 23
0
class ModelGymClient:
    config = {}
    project_root = ""
    project_name = ""
    user = ""

    def __init__(self,
                 config=None,
                 config_path=MODELGYM_CONFIG["default_config_path"]):
        if config_path:
            self.config = self.__config_by_path(config_path)
        if type(config) is dict:
            self.config.update(config)
        else:
            if config:
                raise TypeError("config must be dictionary!")

        project_root = Path(self.config["local_project_root"]).expanduser()
        self.project_root = project_root
        self.project_name = Path(self.project_root.parts[-1])
        if not project_root.is_dir():
            project_root.mkdir(parents=True, exist_ok=True)
        user_folder = self.project_root / self.config["user"]
        self.user = self.config["user"]
        if not user_folder.is_dir():
            user_folder.mkdir(parents=True, exist_ok=True)

        # self.stub = new_client()
        self.file_service = FileService(
            account_name=self.config['azurefs_acc_name'],
            account_key=self.config['azurefs_acc_key'])
        self.afs_share = self.config['azurefs_share']
        self.__get_client_transport_credentials(
            str(Path(self.config["client_cert"]).expanduser()),
            str(Path(self.config["client_key"]).expanduser()),
            str(Path(self.config["ca_cert"]).expanduser()))
        self.channel = grpc.secure_channel(
            self.config["connect_to"],
            self.creds,
            options=(
                ('grpc.max_send_message_length',
                 self.config["max_msg_size_megabytes"]),
                ('grpc.max_receive_message_length',
                 self.config["max_msg_size_megabytes"]),
            ))
        self.stub = wonderland_pb2_grpc.WonderlandStub(self.channel)
        self.check_user()

    def check_user(self):
        list_folder = self.file_service.list_directories_and_files(
            self.afs_share)
        for folder in list_folder:
            if self.user == folder.name:
                return True
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=self.user)
        return True

    def __get_client_transport_credentials(self, client_cert_path,
                                           client_key_path, ca_cert_path):
        client_cert_path = Path(client_cert_path).expanduser()
        client_key_path = Path(client_key_path).expanduser()
        ca_cert_path = Path(ca_cert_path).expanduser()
        path_ok = [
            client_cert_path.exists(),
            client_key_path.exists(),
            ca_cert_path.exists()
        ]
        if not all(path_ok):
            raise ValueError("One of credentials files does not exist")
        self.creds = grpc.ssl_channel_credentials(
            ca_cert_path.read_bytes(), client_key_path.read_bytes(),
            client_cert_path.read_bytes())

    def __config_by_path(self, path):
        path = Path(path).expanduser()
        if path.exists():
            with path.open() as file:
                config = yaml.load(file)
            return config
        else:
            raise FileNotFoundError(
                "Config {} doesn't exist !!! Check ~/.wonder/config.yaml".
                format(path))

    def eval_model(self, model_info, data_path):
        model_path = self.send_model(model_info)
        job = Job(input=json.dumps({
            "model_path": str(model_path),
            "data_path": str(data_path)
        }),
                  kind="hyperopt")
        job = self.stub.CreateJob(job)
        self.stub.GetJob(RequestWithId(id=job.id))
        return job.id

    def gather_results(self, job_id_list, timeout):
        job_compeleted = {job_id: Job.PENDING for job_id in job_id_list}
        deadline = time.time() + timeout
        while True:
            time.sleep(5)
            for id in job_id_list:
                job = self.stub.GetJob(RequestWithId(id=id))
                job_compeleted[id] = job.status
            if not any(s in job_compeleted.values()
                       for s in (Job.PENDING, Job.RUNNING, Job.PULLED)):
                break
            if time.time() > deadline:
                print("Timeout was expired!")
                break

        results = []
        for i, id in enumerate(job_id_list):
            job = self.stub.GetJob(RequestWithId(id=id))
            if job.status == Job.COMPLETED:
                results += [{}]
            else:
                results.append(None)
            files = {}
            if job.output != "":
                files = json.loads(job.output)
            for file, path in files.items():
                self.file_service.get_file_to_path(
                    share_name=self.afs_share,
                    directory_name=Path(path).parent,
                    file_name=Path(path).name,
                    file_path=str(self.project_root / path))
                if file == 'output':
                    with open(self.project_root / path, "r") as f:
                        results[i]['output'] = json.load(f)
                if file == 'result_model_path':
                    results[i]['result_model_path'] = self.project_root / path
                if file == 'error':
                    with open(self.project_root / path, "r") as f:
                        logging.warning(f.read())
        return results

    def send_model(self, model_info):
        folder = "model-" + ''.join([
            random.choice(string.ascii_letters + string.digits)
            for _ in range(12)
        ])
        model_path = self.project_root / self.user / folder / MODELGYM_CONFIG[
            "model_file"]
        try:
            model_folder = model_path.parent
            model_folder.mkdir()
        except FileExistsError:
            logging.warning("Model folder {} is exist !".format(model_folder))
        except FileNotFoundError:
            logging.warning(
                "Model folder {} is missing !".format(model_folder))
        with (model_path).open(mode="w") as file:
            json.dump(model_info, file, cls=NumpyEncoder)
        afs_path = Path(self.user) / folder / MODELGYM_CONFIG["model_file"]
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=afs_path.parent)
        self.file_service.create_file_from_path(share_name=self.afs_share,
                                                directory_name=afs_path.parent,
                                                file_name=afs_path.name,
                                                local_file_path=model_path,
                                                max_connections=cpu_count())
        return afs_path

    def send_data(self, data_path, push_data=False):
        """
        Copy data to the AFS DATA directory.

        :param data_path: <string>. Specify you data path by string.
        :return: path in the AFS share.
        """
        logging.info("Sending data to AFS")
        checksum = get_data_hash(data_path)[:10]
        data_folder = time.strftime("%Y-%m-%d-%H.%M") + '-' + checksum
        afs_path = Path(MODELGYM_CONFIG["data_folder"]
                        ) / data_folder / MODELGYM_CONFIG["data_file"]

        list_folder = self.file_service.list_directories_and_files(
            self.afs_share, directory_name="DATA")
        for folder in list_folder:
            if checksum == folder.name[-10:]:
                logging.info("Folder for data already exist!")
                afs_path = Path(
                    "DATA") / folder.name / MODELGYM_CONFIG["data_file"]
                logging.info("Data is in the AFS {}".format(folder.name))
                if push_data:
                    logging.warning("Rewriting data")
                    afs_path = Path(
                        MODELGYM_CONFIG["data_folder"]
                    ) / folder.name / MODELGYM_CONFIG["data_file"]
                else:
                    return afs_path
        self.file_service.create_directory(share_name=self.afs_share,
                                           directory_name=afs_path.parent)
        self.file_service.create_file_from_path(share_name=self.afs_share,
                                                directory_name=afs_path.parent,
                                                file_name=afs_path.name,
                                                local_file_path=data_path,
                                                max_connections=cpu_count(),
                                                progress_callback=logbar)
        logging.info("Sending is over")
        return afs_path

    def from_project_root_path(self, path):
        path = Path(path)
        # if not path.exists():
        # logging.warning("{} is missing !!".format(path))
        try:
            relative_path = path.relative_to(self.project_root.parent)
            return str(relative_path)
        except ValueError:
            logging.warning("Path doesn't have project_root folder {}".format(
                self.project_root))
Esempio n. 24
0
    # Check if AZURE_BACKUP_FOLDER exists, if not create it
    if not file_service.exists(AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER):
        file_service.create_directory(AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER)

    # Upload
    print("uploading to: '%s/%s/%s'" %
          (AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER, FILENAME))
    file_service.create_file_from_path(AZURE_SHARE_NAME,
                                       AZURE_BACKUP_FOLDER,
                                       FILENAME,
                                       FILENAME,
                                       progress_callback=upload_callback)

    # Cleaning Backup Files
    backup_files = file_service.list_directories_and_files(
        AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER)
    filenames = []
    for file in backup_files:
        filenames.append(file.name)

    files_to_delete = []
    if len(filenames) >= AZURE_KEEP_BACKUPS:
        files_to_delete = filenames[:(len(filenames) - AZURE_KEEP_BACKUPS)]
        for file in files_to_delete:
            file_service.delete_file(AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER,
                                     file)
    end = time.time()
    duration = int(end - start)
    slack_message(success=True, duration=duration, deleted=files_to_delete)
except Exception as e:
    print(e)