Esempio n. 1
0
def file_list(load):
    '''
    Return a list of all files on the file server in a specified environment
    '''
    ret = []
    envs = __opts__.get('azurefs_envs', [])
    storage_conn = azure.get_storage_conn(opts=envs[load['saltenv']])
    result = azure.list_blobs(
        storage_conn=storage_conn,
        container=load['saltenv'],
    )
    for blob in result:
        ret.append(blob)
    return ret
Esempio n. 2
0
def file_list(load):
    '''
    Return a list of all files on the file server in a specified environment
    '''
    ret = []
    envs = __opts__.get('azurefs_envs', [])
    storage_conn = azure.get_storage_conn(opts=envs[load['saltenv']])
    result = azure.list_blobs(
        storage_conn=storage_conn,
        container=load['saltenv'],
    )
    for blob in result:
        ret.append(blob)
    return ret
Esempio n. 3
0
def dir_list(load):
    '''
    Return a list of all directories on the master
    '''
    ret = []
    envs = __opts__.get('azurefs_envs', [])
    storage_conn = azure.get_storage_conn(opts=envs[load['saltenv']])
    result = azure.list_blobs(
        storage_conn=storage_conn,
        container=load['saltenv'],
    )
    for blob in result:
        if '/' not in blob:
            continue
        comps = blob.split('/')
        path = '/'.join(comps[:-1])
        if path not in ret:
            ret.append(path)
    return ret
Esempio n. 4
0
def dir_list(load):
    '''
    Return a list of all directories on the master
    '''
    ret = []
    envs = __opts__.get('azurefs_envs', [])
    storage_conn = azure.get_storage_conn(opts=envs[load['saltenv']])
    result = azure.list_blobs(
        storage_conn=storage_conn,
        container=load['saltenv'],
    )
    for blob in result:
        if '/' not in blob:
            continue
        comps = blob.split('/')
        path = '/'.join(comps[:-1])
        if path not in ret:
            ret.append(path)
    return ret
Esempio n. 5
0
def update():
    '''
    When we are asked to update (regular interval) lets reap the cache
    '''
    base_dir = os.path.join(salt.syspaths.CACHE_DIR, 'azure')
    if not os.path.isdir(base_dir):
        os.makedirs(base_dir)

    try:
        salt.fileserver.reap_fileserver_cache_dir(
            os.path.join(base_dir, 'hash'), find_file)
    except (IOError, OSError):
        # Hash file won't exist if no files have yet been served up
        pass

    data_dict = {}
    if os.listdir(base_dir):
        # Find out what the latest file is, so that we only update files more
        # recent than that, and not the entire filesystem

        all_files = []
        for root, subFolders, files in os.walk(base_dir):
            for fn_ in files:
                full_path = os.path.join(root, fn_)
                all_files.append([
                    os.path.getmtime(full_path),
                    full_path,
                ])
        if all_files:
            all_files.sort()
            all_files.reverse()
            latest_stamp = os.path.getmtime(all_files[0][1])
            format_stamp = time.strftime('%Y-%m-%d %H:%M:%S',
                                         time.localtime(latest_stamp))

        #data_dict={'sysparm_query': 'sys_updated_on > {0}'.format(format_stamp)}

    # Pull in any files that have changed
    envs = __opts__.get('azurefs_envs', [])
    for env in envs:
        storage_conn = azure.get_storage_conn(opts=envs[env])
        result = azure.list_blobs(
            storage_conn=storage_conn,
            container=env,
        )

        # Write out any new files to disk
        for blob in result:
            file_name = os.path.join(base_dir, blob)

            # Make sure the directory exists first
            comps = file_name.split('/')
            file_path = '/'.join(comps[:-1])
            if not os.path.exists(file_path):
                os.makedirs(file_path)

            # Write out the file
            azure.get_blob(
                storage_conn=storage_conn,
                container=env,
                name=blob,
                local_path=file_name,
            )

            time_stamp = time.mktime(
                time.strptime(result[blob]['properties']['last_modified'][0],
                              '%a, %d %b %Y %H:%M:%S %Z'), )
            os.utime(file_name, (time_stamp, time_stamp))
Esempio n. 6
0
def update():
    '''
    When we are asked to update (regular interval) lets reap the cache
    '''
    base_dir = os.path.join(salt.syspaths.CACHE_DIR, 'azure')
    if not os.path.isdir(base_dir):
        os.makedirs(base_dir)

    try:
        salt.fileserver.reap_fileserver_cache_dir(
            os.path.join(base_dir, 'hash'),
            find_file
        )
    except (IOError, OSError):
        # Hash file won't exist if no files have yet been served up
        pass

    data_dict = {}
    if os.listdir(base_dir):
        # Find out what the latest file is, so that we only update files more
        # recent than that, and not the entire filesystem

        all_files = []
        for root, subFolders, files in os.walk(base_dir):
            for fn_ in files:
                full_path = os.path.join(root, fn_)
                all_files.append([
                    os.path.getmtime(full_path),
                    full_path,
                ])
        if all_files:
            all_files.sort()
            all_files.reverse()
            latest_stamp = os.path.getmtime(all_files[0][1])
            format_stamp = time.strftime(
                '%Y-%m-%d %H:%M:%S', time.localtime(latest_stamp)
            )

        #data_dict={'sysparm_query': 'sys_updated_on > {0}'.format(format_stamp)}

    # Pull in any files that have changed
    envs = __opts__.get('azurefs_envs', [])
    for env in envs:
        storage_conn = azure.get_storage_conn(opts=envs[env])
        result = azure.list_blobs(
            storage_conn=storage_conn,
            container=env,
        )

        # Write out any new files to disk
        for blob in result:
            file_name = os.path.join(base_dir, blob)

            # Make sure the directory exists first
            comps = file_name.split('/')
            file_path = '/'.join(comps[:-1])
            if not os.path.exists(file_path):
                os.makedirs(file_path)

            # Write out the file
            azure.get_blob(
                storage_conn=storage_conn,
                container=env,
                name=blob,
                local_path=file_name,
            )

            time_stamp = time.mktime(
                time.strptime(
                    result[blob]['properties']['last_modified'][0],
                    '%a, %d %b %Y %H:%M:%S %Z'
                ),
            )
            os.utime(file_name, (time_stamp, time_stamp))