Example #1
0
            return "%3.1f%s%s" % (num, unit, suffix)
        num /= 1024.0
    return "%.1f%s%s" % (num, 'Yi', suffix)


if len(sys.argv) != 2:
    print(
        f'Usage: {__file__} container_name\n\nWill list all files in container "container_name" with tier and size'
    )
    exit(1)

load_dotenv(find_dotenv())

AZURE_URL, AZURE_KEY = os.getenv("AZURE_URL"), os.getenv("AZURE_KEY")
SERVICE = azure_client.connect_service(AZURE_URL, AZURE_KEY)
CONTAINER = azure_client.connect_container(SERVICE, sys.argv[1], create=False)
BLOB_INFO = azure_client.get_blob_list_information(CONTAINER)

TABLE = prettytable.PrettyTable()
TABLE.field_names = ['Filename', 'Tier', 'Size']
TABLE.align = 'l'

total_size = 0

for fn, bt, sz in BLOB_INFO:
    total_size += sz
    fn = '.../' + fn[-75:] if len(fn) > 75 else fn
    TABLE.add_row([fn, bt, human_readable(sz)])

TABLE.add_row(['TOTALS', 'NA', human_readable(total_size)])
Example #2
0
PARSER.add_argument('--strip-base-folder', '-s', action='store_true')
PARSER.add_argument('--workers', '-w', default=1, type=int)
PARSER.add_argument('--debug', '-d', action='store_true')
PARSER.add_argument('--logfile', '-l')
ARGS = PARSER.parse_args()

if ARGS.logfile:
    handler = logging.FileHandler(ARGS.logfile)
    handler.setFormatter(formatter)
    log.addHandler(handler)

if ARGS.debug:
    log.setLevel(logging.DEBUG)

SERVICE = azure_client.connect_service(AZURE_URL, AZURE_KEY)
CONTAINER = azure_client.connect_container(SERVICE, ARGS.container)

BLOB_FILENAMES = azure_client.get_blob_manifest(CONTAINER)

# Getting the folders filenames and stripping absoulte paths for upload to azure, 
# cutting of first folder name if flagged. Added this flag since the container might be named the same as the folder
# and then it would be silly to have a container named like movies with the only folder being movies in it.
ABS_FOLDER = pathlib.Path(ARGS.folder)
BASE_ABS_FOLDER = pathlib.Path(ARGS.folder).parents[0]

file_list = [z for z in [ y for x in os.walk(ARGS.folder) for y in glob(os.path.join(x[0], '*'))] if not os.path.isdir(z)]

if ARGS.folder.startswith('/'):
    FINAL_FOLDER = str(pathlib.Path(ARGS.folder)).split(str(BASE_ABS_FOLDER) + os.sep)[1]
    azure_filename_list = [ x.split(str(BASE_ABS_FOLDER) + os.sep)[-1] for x in file_list]
    if ARGS.strip_base_folder:
from azure_client.logger import log, formatter

PARSER = argparse.ArgumentParser(
    description='Rehydrate/dehydrate an archive blob')
PARSER.add_argument('--container', '-c', required=True)
PARSER.add_argument('--tier', '-t', default='Cool')
PARSER.add_argument('--priority', '-p', default='Standard')
PARSER.add_argument('--workers', '-w', default=1, type=int)
ARGS = PARSER.parse_args()

load_dotenv(find_dotenv())

AZURE_URL, AZURE_KEY = os.getenv("AZURE_URL"), os.getenv("AZURE_KEY")
SERVICE = azure_client.connect_service(AZURE_URL, AZURE_KEY)
CONTAINER = azure_client.connect_container(SERVICE,
                                           ARGS.container,
                                           create=False)
BLOB_FILENAMES = azure_client.get_blob_manifest(CONTAINER)

q = queue.Queue()


def worker():
    while True:
        try:
            file = q.get()
            BLOB = CONTAINER.get_blob_client(file)
            BLOB_INFO = BLOB.get_blob_properties()
            if BLOB_INFO.blob_tier == ARGS.tier:
                log.info(f'File {file} is already at tier {ARGS.tier}')
                q.task_done()