def lsvault(args):
    region = args.region
    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    response = glacierconn.list_vaults()
    table = None
    while True:
        parse_response(response)
        jdata = json.loads(response.read())
        if response.status == 200 and len(jdata['VaultList']) > 0:
            if not table:
                headers = sorted(jdata['VaultList'][0].keys())
                table = PrettyTable(headers)
            for entry in jdata['VaultList']:
                table.add_row([
                    locale.format('%d', entry[k], grouping=True)
                    if k == 'SizeInBytes' else entry[k] for k in headers
                ])
            if jdata['Marker']:
                response = glacierconn.list_vaults(jdata['Marker'])
            else:
                break
        else:
            break

    if table:
        table.sortby = "VaultName"
        print table
def getarchive(args):
    region = args.region
    vault = args.vault
    archive = args.archive
    filename = args.filename

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)
    gv = glaciercorecalls.GlacierVault(glacierconn, vault)

    jobs = gv.list_jobs()
    found = False
    for job in gv.job_list:
        if job['ArchiveId'] == archive:
            found = True
            # no need to start another archive retrieval
            if filename or not job['Completed']:
                print "ArchiveId: ", archive
            if job['Completed']:
                job2 = glaciercorecalls.GlacierJob(gv, job_id=job['JobId'])
                if filename:
                    ffile = open(filename, "w")
                    for part in iter(
                        (lambda: job2.get_output().read(READ_PART_SIZE)), ''):
                        ffile.write(part)
                    ffile.close()
                else:
                    print job2.get_output().read()
                return
    if not found:
        job = gv.retrieve_archive(archive)
        print "Started"
def deletearchive(args):
    region = args.region
    vault = args.vault
    archive = args.archive
    BOOKKEEPING = args.bookkeeping
    BOOKKEEPING_DOMAIN_NAME = args.bookkeeping_domain_name

    if BOOKKEEPING:
        sdb_conn = boto.connect_sdb(aws_access_key_id=args.aws_access_key,
                                    aws_secret_access_key=args.aws_secret_key)
        domain_name = BOOKKEEPING_DOMAIN_NAME
        try:
            domain = sdb_conn.get_domain(domain_name, validate=True)
        except boto.exception.SDBResponseError:
            domain = sdb_conn.create_domain(domain_name)

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)
    gv = glaciercorecalls.GlacierVault(glacierconn, vault)

    parse_response(gv.delete_archive(archive))

    # TODO: can't find a method for counting right now
    query = 'select * from `%s` where archive_id="%s"' % (
        BOOKKEEPING_DOMAIN_NAME, archive)
    items = domain.select(query)
    for item in items:
        domain.delete_item(item)
def download(args):
    region = args.region
    vault = args.vault
    filename = args.filename
    out_file = args.out_file

    if not filename:
        raise Exception(
            u"You have to pass in the file name or the search term \
                          of it's description to search through archive.")

    args.search_term = filename
    items = search(args, print_results=False)

    n_items = 0
    if not items:
        print "Sorry, didn't find anything."
        return False

    print "Region\tVault\tFilename\tArchive ID"
    for item in items:
        n_items += 1
        archive = item['archive_id']
        vault = item['vault']
        print "%s\t%s\t%s\t%s" % (item['region'], item['vault'],
                                  item['filename'], item['archive_id'])

    if n_items > 1:
        print "You need to uniquely identify file with either region, vault or \
               filename parameters. If that is not enough, use getarchive to \
               specify exactly which archive you want."

        return False

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)
    gv = glaciercorecalls.GlacierVault(glacierconn, vault)

    jobs = gv.list_jobs()
    found = False
    for job in gv.job_list:
        if job['ArchiveId'] == archive:
            found = True
            # no need to start another archive retrieval
            if not job['Completed']:
                print "Waiting for Amazon Glacier to assamble the archive."
            if job['Completed']:
                job2 = glaciercorecalls.GlacierJob(gv, job_id=job['JobId'])
                if out_file:
                    ffile = open(out_file, "w")
                    ffile.write(job2.get_output().read())
                    ffile.close()
                else:
                    print job2.get_output().read()
            return True
    if not found:
        job = gv.retrieve_archive(archive)
        print "Started"
def inventory(args):
    region = args.region
    vault = args.vault
    force = args.force
    BOOKKEEPING = args.bookkeeping
    BOOKKEEPING_DOMAIN_NAME = args.bookkeeping_domain_name

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)
    gv = glaciercorecalls.GlacierVault(glacierconn, vault)
    if force:
        job = gv.retrieve_inventory(format="JSON")
        return True
    try:
        gv.list_jobs()
        inventory_retrievals_done = []
        for job in gv.job_list:
            if job['Action'] == "InventoryRetrieval" and job[
                    'StatusCode'] == "Succeeded":
                d = dateutil.parser.parse(
                    job['CompletionDate']).replace(tzinfo=pytz.utc)
                job['inventory_date'] = d
                inventory_retrievals_done += [job]

        if len(inventory_retrievals_done):
            list.sort(inventory_retrievals_done,
                      key=lambda i: i['inventory_date'],
                      reverse=True)
            job = inventory_retrievals_done[0]
            print "Inventory with JobId:", job['JobId']
            job = glaciercorecalls.GlacierJob(gv, job_id=job['JobId'])
            inventory = json.loads(job.get_output().read())

            if BOOKKEEPING:
                sdb_conn = boto.connect_sdb(
                    aws_access_key_id=args.aws_access_key,
                    aws_secret_access_key=args.aws_secret_key)
                domain_name = BOOKKEEPING_DOMAIN_NAME
                try:
                    domain = sdb_conn.get_domain(domain_name, validate=True)
                except boto.exception.SDBResponseError:
                    domain = sdb_conn.create_domain(domain_name)

                d = dateutil.parser.parse(
                    inventory['InventoryDate']).replace(tzinfo=pytz.utc)
                item = domain.put_attributes("%s" % (d, ), inventory)

            if ((datetime.datetime.utcnow().replace(tzinfo=pytz.utc) - d).days
                    > 1):
                gv.retrieve_inventory(format="JSON")

            render_inventory(inventory)
        else:
            job = gv.retrieve_inventory(format="JSON")
    except Exception, e:
        print "exception: ", e
        print json.loads(e[1])['message']
def abortmultipart(args):
    vault_name = args.vault
    region = args.region

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    if check_vault_name(vault_name):
        response = glaciercorecalls.GlacierVault(
            glacierconn, vault_name).abort_multipart(args.uploadId)
        parse_response(response)
def rmvault(args):
    vault_name = args.vault
    region = args.region

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    if check_vault_name(vault_name):
        response = glaciercorecalls.GlacierVault(glacierconn,
                                                 vault_name).delete_vault()
        parse_response(response)
def mkvault(args):
    vault_name = args.vault
    region = args.region

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    if check_vault_name(vault_name):
        response = glaciercorecalls.GlacierVault(glacierconn,
                                                 vault_name).create_vault()
        parse_response(response)
        print response.getheader("Location")
def describejob(args):
    vault = args.vault
    jobid = args.jobid
    region = args.region
    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    gv = glaciercorecalls.GlacierVault(glacierconn, vault)
    gj = glaciercorecalls.GlacierJob(gv, job_id=jobid)
    gj.job_status()
    print "Archive ID: %s\nJob ID: %s\nCreated: %s\nStatus: %s\n" % (
        gj.archive_id, jobid, gj.created, gj.status_code)
def lsvault(args):
    region = args.region
    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    response = glacierconn.list_vaults()
    parse_response(response)
    jdata = json.loads(response.read())
    vault_list = jdata['VaultList']
    print "Vault name\tARN\tCreated\tSize"
    for vault in vault_list:
        print "%s\t%s\t%s\t%s" % (vault['VaultName'], vault['VaultARN'],
                                  vault['CreationDate'], vault['SizeInBytes'])
def listjobs(args):
    vault_name = args.vault
    region = args.region

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    gv = glaciercorecalls.GlacierVault(glacierconn, name=vault_name)
    response = gv.list_jobs()
    parse_response(response)
    print "Action\tArchive ID\tStatus\tInitiated\tVaultARN\tJob ID"
    for job in gv.job_list:
        print "%s\t%s\t%s\t%s\t%s\t%s" % (
            job['Action'], job['ArchiveId'], job['StatusCode'],
            job['CreationDate'], job['VaultARN'], job['JobId'])
Esempio n. 12
0
def lsvault(args):
    region = args.region
    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    response = glacierconn.list_vaults()
    parse_response(response)
    jdata = json.loads(response.read())
    vault_list = jdata['VaultList']
    table = PrettyTable(["Vault name", "ARN", "Created", "Size"])
    for vault in vault_list:
        table.add_row([
            vault['VaultName'], vault['VaultARN'], vault['CreationDate'],
            locale.format('%d', vault['SizeInBytes'], grouping=True)
        ])
    table.sortby = "Vault name"
    print table
def listjobs(args):
    vault_name = args.vault
    region = args.region

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    gv = glaciercorecalls.GlacierVault(glacierconn, name=vault_name)
    response = gv.list_jobs()
    parse_response(response)
    table = PrettyTable(
        ["Action", "Archive ID", "Status", "Initiated", "VaultARN", "Job ID"])
    for job in gv.job_list:
        table.add_row([
            job['Action'], job['ArchiveId'], job['StatusCode'],
            job['CreationDate'], job['VaultARN'], job['JobId']
        ])
    print table
def describevault(args):
    vault_name = args.vault
    region = args.region

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    if check_vault_name(vault_name):
        response = glaciercorecalls.GlacierVault(glacierconn,
                                                 vault_name).describe_vault()
        parse_response(response)
        jdata = json.loads(response.read())
        table = PrettyTable(
            ["LastInventory", "Archives", "Size", "ARN", "Created"])
        table.add_row([
            jdata['LastInventoryDate'], jdata['NumberOfArchives'],
            locale.format('%d', jdata['SizeInBytes'], grouping=True),
            jdata['VaultARN'], jdata['CreationDate']
        ])
        print table
Esempio n. 15
0
def listmultiparts(args):
    vault_name = args.vault
    region = args.region

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    if check_vault_name(vault_name):
        response = glaciercorecalls.GlacierVault(
            glacierconn, vault_name).list_multipart_uploads()
        parse_response(response)
        jdata = json.loads(response.read())
        print "Marker: ", jdata['Marker']
        if len(jdata['UploadsList']) > 0:
            headers = sorted(jdata['UploadsList'][0].keys())
            table = PrettyTable(headers)
            for entry in jdata['UploadsList']:
                table.add_row([
                    locale.format('%d', entry[k], grouping=True)
                    if k == 'PartSizeInBytes' else entry[k] for k in headers
                ])
            print table
Esempio n. 16
0
def putarchive(args):
    region = args.region
    vault = args.vault
    filename = args.filename
    description = args.description
    stdin = args.stdin
    BOOKKEEPING = args.bookkeeping
    BOOKKEEPING_DOMAIN_NAME = args.bookkeeping_domain_name

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    if BOOKKEEPING:
        sdb_conn = boto.connect_sdb(aws_access_key_id=args.aws_access_key,
                                    aws_secret_access_key=args.aws_secret_key)
        domain_name = BOOKKEEPING_DOMAIN_NAME
        try:
            domain = sdb_conn.get_domain(domain_name, validate=True)
        except boto.exception.SDBResponseError:
            domain = sdb_conn.create_domain(domain_name)

    if description:
        description = " ".join(description)
    else:
        description = filename

    if check_description(description):
        reader = None
        writer = glaciercorecalls.GlacierWriter(glacierconn,
                                                vault,
                                                description=description)

        # if filename is given, use filename then look at stdio if theres something there
        if not stdin:
            try:
                reader = open(filename, 'rb')
            except IOError:
                print "Couldn't access the file given."
                return False
        elif select.select([
                sys.stdin,
        ], [], [], 0.0)[0]:
            reader = sys.stdin
        else:
            print "Nothing to upload."
            return False

        #Read file in chunks so we don't fill whole memory
        for part in iter((lambda: reader.read(READ_PART_SIZE)), ''):
            writer.write(part)
            progress(
                '\rWrote %s bytes.' %
                (locale.format('%d', writer.uploaded_size, grouping=True)))
        writer.close()
        progress('\rWrote %s bytes.\n' %
                 (locale.format('%d', writer.uploaded_size, grouping=True)))

        archive_id = writer.get_archive_id()
        location = writer.get_location()
        sha256hash = writer.get_hash()
        if BOOKKEEPING:
            file_attrs = {
                'region': region,
                'vault': vault,
                'filename': filename,
                'archive_id': archive_id,
                'location': location,
                'description': description,
                'date':
                '%s' % datetime.datetime.utcnow().replace(tzinfo=pytz.utc),
                'hash': sha256hash
            }

            if args.name:
                file_attrs['filename'] = args.name
            elif stdin:
                file_attrs['filename'] = description

            domain.put_attributes(file_attrs['filename'], file_attrs)

        print "Created archive with ID: ", archive_id
        print "Archive SHA256 hash: ", sha256hash
def putarchive(args):
    region = args.region
    vault = args.vault
    filename = args.filename
    description = args.description
    stdin = args.stdin
    BOOKKEEPING = args.bookkeeping
    BOOKKEEPING_DOMAIN_NAME = args.bookkeeping_domain_name

    glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key,
                                                     args.aws_secret_key,
                                                     region=region)

    if BOOKKEEPING:
        sdb_conn = boto.connect_sdb(aws_access_key_id=args.aws_access_key,
                                    aws_secret_access_key=args.aws_secret_key)
        domain_name = BOOKKEEPING_DOMAIN_NAME
        try:
            domain = sdb_conn.get_domain(domain_name, validate=True)
        except boto.exception.SDBResponseError:
            domain = sdb_conn.create_domain(domain_name)

    if description:
        description = " ".join(description)
    else:
        description = filename

    if check_description(description):
        reader = None

        # if filename is given, use filename then look at stdio if theres something there
        if not stdin:
            try:
                reader = open(filename, 'rb')
                total_size = os.path.getsize(filename)
            except IOError:
                print "Couldn't access the file given."
                return False
        elif select.select([
                sys.stdin,
        ], [], [], 0.0)[0]:
            reader = sys.stdin
            total_size = 0
        else:
            print "Nothing to upload."
            return False

        if args.partsize < 0:
            # User did not specify part_size. Compute the optimal value.
            if total_size > 0:
                part_size = max(
                    1, next_power_of_2(total_size / (1024 * 1024 * 10000)))
            else:
                part_size = glaciercorecalls.GlacierWriter.DEFAULT_PART_SIZE / 1024 / 1024
        else:
            part_size = next_power_of_2(args.partsize)

        if total_size > part_size * 1024 * 1024 * 10000:
            # User specified a value that is too small. Adjust.
            part_size = next_power_of_2(total_size / (1024 * 1024 * 10000))

        writer = glaciercorecalls.GlacierWriter(glacierconn,
                                                vault,
                                                description=description,
                                                part_size=(part_size * 1024 *
                                                           1024))

        #Read file in chunks so we don't fill whole memory
        start_time = current_time = previous_time = time.time()
        for part in iter((lambda: reader.read(READ_PART_SIZE)), ''):

            writer.write(part)

            if total_size > 0:
                # Calculate transfer rates in bytes per second.
                current_time = time.time()
                current_rate = int(READ_PART_SIZE /
                                   (current_time - previous_time))
                overall_rate = int(writer.uploaded_size /
                                   (current_time - start_time))

                # Estimate finish time, based on overall transfer rate.
                if overall_rate > 0:
                    time_left = (total_size -
                                 writer.uploaded_size) / overall_rate
                    eta = time.strftime(
                        "%H:%M:%S", time.localtime(current_time + time_left))
                else:
                    time_left = "Unknown"
                    eta = "Unknown"

                progress(
                    '\rWrote %s of %s (%s%%). Rate %s/s, average %s/s, eta %s.'
                    % (size_fmt(writer.uploaded_size), size_fmt(total_size),
                       int(100 * writer.uploaded_size / total_size),
                       size_fmt(current_rate, 2), size_fmt(overall_rate,
                                                           2), eta))

            else:
                progress(
                    '\rWrote %s bytes.' %
                    (locale.format('%d', writer.uploaded_size, grouping=True)))

            previous_time = current_time

        writer.close()
        current_time = time.time()
        if total_size > 0:
            progress('\rWrote %s of %s bytes (%s%%). Transfer rate %s.\n' %
                     (locale.format('%d', writer.uploaded_size, grouping=True),
                      locale.format('%d', total_size, grouping=True),
                      int(100 * writer.uploaded_size / total_size),
                      locale.format('%d', overall_rate, grouping=True)))
        else:
            progress(
                '\rWrote %s bytes.\n' %
                (locale.format('%d', writer.uploaded_size, grouping=True)))

        archive_id = writer.get_archive_id()
        location = writer.get_location()
        sha256hash = writer.get_hash()
        if BOOKKEEPING:
            file_attrs = {
                'region': region,
                'vault': vault,
                'filename': filename,
                'archive_id': archive_id,
                'location': location,
                'description': description,
                'date':
                '%s' % datetime.datetime.utcnow().replace(tzinfo=pytz.utc),
                'hash': sha256hash
            }

            if args.name:
                file_attrs['filename'] = args.name
            elif stdin:
                file_attrs['filename'] = description

            domain.put_attributes(file_attrs['filename'], file_attrs)

        print "Created archive with ID: ", archive_id
        print "Archive SHA256 tree hash: ", sha256hash