Exemplo n.º 1
0
 def _connect_glacier(self):
     '''
     Returns Boto.Glacier.Layer1 object
     '''
     #delete this two lines after test
     from boto.glacier.layer1 import Layer1
     return Layer1()
Exemplo n.º 2
0
def test_describe_job():
    conn = Layer1(region_name="us-west-2")
    vault_name = "my_vault"
    conn.create_vault(vault_name)
    archive_id = conn.upload_archive(
        vault_name, "some stuff", "", "", "some description")
    job_response = conn.initiate_job(vault_name, {
        "ArchiveId": archive_id,
        "Type": "archive-retrieval",
    })
    job_id = job_response['JobId']

    job = conn.describe_job(vault_name, job_id)
    json.loads(job.read().decode("utf-8")).should.equal({
        'CompletionDate': '2013-03-20T17:03:43.221Z',
        'VaultARN': None,
        'RetrievalByteRange': None,
        'SHA256TreeHash': None,
        'Completed': True,
        'InventorySizeInBytes': '0',
        'JobId': job_id,
        'Action': 'InventoryRetrieval',
        'JobDescription': None,
        'SNSTopic': None,
        'ArchiveSizeInBytes': 0,
        'ArchiveId': archive_id,
        'ArchiveSHA256TreeHash': None,
        'CreationDate': '2013-03-20T17:03:43.221Z',
        'StatusMessage': None,
        'StatusCode': 'Succeeded',
    })
Exemplo n.º 3
0
def glacier_vault_inv():
    # Submit a vault inventory job
    init_database()
    init_glconfig()
    timestamp = strftime("%Y-%m-%d %H:%M:%S", gmtime())
    user = getpass.getuser()

    try:
        c.execute('SELECT * FROM jobs')
    except sqlite3.OperationalError:
        c.execute(
            'CREATE TABLE jobs (ID integer primary key, TIMESTAMP text, USER text, VAULT text, JOBID text)'
        )
        conn.commit()

    try:
        glacier_connect = Layer1(aws_access_key_id=key,
                                 aws_secret_access_key=secret,
                                 region_name=region)
        job = glacier_connect.initiate_job(
            vault, {
                'Description': 'inventory-job',
                'Type': 'inventory-retrieval',
                'Format': 'JSON'
            })
        print 'Inventory Job ID: ' + str(job['JobId'])
        c.execute(
            'INSERT INTO jobs (TIMESTAMP,USER,VAULT,JOBID) VALUES (?,?,?,?)',
            (timestamp, user, vault, job['JobId']))
        conn.commit()
    except boto.glacier.exceptions.UnexpectedHTTPResponseError as e:
        error_stamp('upper')
        print e
        error_stamp('lower')
Exemplo n.º 4
0
def uploadToGlacier(tempTarFile=None,
                    DEBUG_MODE=False,
                    GLACIER_VAULT=None,
                    SECRET_ACCESS_KEY=None,
                    ACCESS_KEY=None,
                    GLACIER_REALM=None):
    global logger

    if not tempTarFile:
        return 0
    # Establish a connection to the Glacier
    glacier_vault_in = None
    my_archive = None
    archive_id = None
    try:
        #my_glacier = GlacierConnection(ACCESS_KEY,SECRET_ACCESS_KEY,region=GLACIER_REALM)
        my_glacier = Layer1(aws_access_key_id=ACCESS_KEY,
                            aws_secret_access_key=SECRET_ACCESS_KEY,
                            region_name=GLACIER_REALM)
        if DEBUG_MODE:
            logger.debug("Glacier Connection: %s" % my_glacier)
        # Create a new vault (not neccessary if you already have one!)
        if GLACIER_VAULT:
            #glacier_vault_in = my_glacier.get_vault(GLACIER_VAULT)
            #vaults = my_glacier.get_all_vaults()
            vaults = my_glacier.list_vaults()
            glacier_vault_in = None
            if GLACIER_VAULT not in vaults:
                glacier_vault_in = my_glacier.create_vault(GLACIER_VAULT)
        else:
            GLACIER_VAULT = id_generator(size=16)
            glacier_vault_in = my_glacier.create_vault(GLACIER_VAULT)

        if DEBUG_MODE:
            logger.debug("Glacier Vault: %s" % glacier_vault_in)

        #my_archive = GlacierArchive(tempTarFile)
        uploader = ConcurrentUploader(my_glacier, GLACIER_VAULT,
                                      64 * 1024 * 1024)

        if DEBUG_MODE:
            #logger.debug("Archive created in mem: %s " % my_archive)
            logger.debug("Archive created in mem:%s" % uploader)

        #glacier_vault_in.upload(my_archive)
        archive_id = uploader.upload(tempTarFile, tempTarFile)
        if DEBUG_MODE:
            logger.info("upload created: %s" % glacier_vault_in)
    except Exception, exc:
        if exc.args > 0:
            x, y = exc.args
            errstr = None
            try:
                errstr = json.loads(y.read())
            except:
                errstr = y
            logger.error("Error in glacier upload %s" % errstr)
        else:
            logger.error("Error in glacier upload %s" % (exc))
Exemplo n.º 5
0
    def get_vault_metadata(self):
        glacier_layer1 = Layer1(region_name=self.region_name)

        print("operation starting...")

        vault_metadata = glacier_layer1.describe_vault(self.target_vault_name)

        print("Success! vault metadata: %s" % vault_metadata)
Exemplo n.º 6
0
def init_handlers_from_config():
    """
  Use the config to create handlers for Amazon Glacier
  """
    for region in glacier.regions():
        WG.handlers[region.name] = Layer1(
            aws_access_key_id=WG.app.config["AWS_ACCESS_KEY"],
            aws_secret_access_key=WG.app.config["AWS_SECRET_ACCESS_KEY"],
            region_name=region.name)
Exemplo n.º 7
0
    def get_job_output(self, job_id=None):
        glacier_layer1 = Layer1(region_name=self.region_name)

        print("operation starting...")

        output = glacier_layer1.get_job_output(self.target_vault_name, job_id)
        print output

        return output
Exemplo n.º 8
0
def archive_delete():
    init_database()
    init_glconfig()

    archive_id = raw_input('Enter archive to be deleted: ')
    glacier_connect = Layer1(aws_access_key_id=key,
                             aws_secret_access_key=secret,
                             region_name=region)
    glacier_connect.delete_archive(vault, archive_id)
Exemplo n.º 9
0
    def _connect_glacier(self):
        '''
        Returns Boto.Glacier.Layer1 object
        '''
        #comment for test
        #return __node__['ec2']['connect_glacier']()

        #delete this two lines after test
        from boto.glacier.layer1 import Layer1
        return Layer1()
Exemplo n.º 10
0
def glacier_vault_create():
    # Create Glacier Vaults
    init_database()
    init_glconfig()

    print 'Creating vault: ' + vault
    glacier_connect = Layer1(aws_access_key_id=key,
                             aws_secret_access_key=secret,
                             region_name=region)
    glacier_connect.create_vault(vault)
Exemplo n.º 11
0
def validate_glacier(access_key, secret_access_key, region):
    """
  Validate connection to Amazon Glacier
  If the function executes without errors, all is well.
  """
    tst = Layer1(aws_access_key_id=access_key,
                 aws_secret_access_key=secret_access_key,
                 region_name=region)
    a = tst.list_vaults()
    tst.close()
Exemplo n.º 12
0
    def list_jobs(self, job_id=None):
        glacier_layer1 = Layer1(region_name=self.region_name)

        print("operation starting...")

        if (job_id != None):
            print glacier_layer1.describe_job(self.target_vault_name, job_id)
        else:
            print glacier_layer1.list_jobs(self.target_vault_name,
                                           completed=False)
Exemplo n.º 13
0
    def upload_archive(self, file_name):
        glacier_layer1 = Layer1(region_name=self.region_name)

        uploader = ConcurrentUploader(glacier_layer1, self.target_vault_name,
                                      32 * 1024 * 1024)

        print("operation starting...")

        archive_id = uploader.upload(file_name, file_name)

        print("Success! archive id: '%s'" % archive_id)
Exemplo n.º 14
0
def test_init_glacier_job():
    conn = Layer1(region_name="us-west-2")
    vault_name = "my_vault"
    conn.create_vault(vault_name)
    archive_id = conn.upload_archive(vault_name, "some stuff", "", "", "some description")

    job_response = conn.initiate_job(vault_name, {
        "ArchiveId": archive_id,
        "Type": "archive-retrieval",
    })
    job_id = job_response['JobId']
    job_response['Location'].should.equal("//vaults/my_vault/jobs/{0}".format(job_id))
Exemplo n.º 15
0
 def test_initialiate_multipart_upload(self):
     # Create a vault, initiate a multipart upload,
     # then cancel it.
     glacier = Layer1()
     glacier.create_vault('l1testvault')
     self.addCleanup(glacier.delete_vault, 'l1testvault')
     upload_id = glacier.initiate_multipart_upload(
         'l1testvault', 4 * 1024 * 1024, 'double  spaces  here')['UploadId']
     self.addCleanup(glacier.abort_multipart_upload, 'l1testvault',
                     upload_id)
     response = glacier.list_multipart_uploads('l1testvault')['UploadsList']
     self.assertEqual(len(response), 1)
     self.assertEqual(response[0]['MultipartUploadId'], upload_id)
Exemplo n.º 16
0
def test_get_job_output():
    conn = Layer1(region_name="us-west-2")
    vault_name = "my_vault"
    conn.create_vault(vault_name)
    archive_response = conn.upload_archive(vault_name, "some stuff", "", "", "some description")
    archive_id = archive_response['ArchiveId']
    job_response = conn.initiate_job(vault_name, {
        "ArchiveId": archive_id,
        "Type": "archive-retrieval",
    })
    job_id = job_response['JobId']

    output = conn.get_job_output(vault_name, job_id)
    output.read().decode("utf-8").should.equal("some stuff")
Exemplo n.º 17
0
    def initiate_archive_retrieval_job(self, archive_id):
        glacier_layer1 = Layer1(region_name=self.region_name)

        print("operation starting...")

        job_id = glacier_layer1.initiate_job(
            self.target_vault_name, {
                "Description": "download-archive-job",
                "Type": "archive-retrieval",
                "ArchiveId": archive_id
            })

        print("Success! job id: %s" % (job_id, ))

        return job_id
Exemplo n.º 18
0
    def initiate_vault_inventory_job(self):
        glacier_layer1 = Layer1(region_name=self.region_name)

        print("operation starting...")

        job_id = glacier_layer1.initiate_job(
            self.target_vault_name, {
                "Description": "inventory-job",
                "Type": "inventory-retrieval",
                "Format": "JSON"
            })

        print("Success! inventory job id: %s" % (job_id, ))

        return job_id
Exemplo n.º 19
0
def test_list_glacier_jobs():
    conn = Layer1(region_name="us-west-2")
    vault_name = "my_vault"
    conn.create_vault(vault_name)
    archive_id1 = conn.upload_archive(vault_name, "some stuff", "", "", "some description")['ArchiveId']
    archive_id2 = conn.upload_archive(vault_name, "some other stuff", "", "", "some description")['ArchiveId']

    conn.initiate_job(vault_name, {
        "ArchiveId": archive_id1,
        "Type": "archive-retrieval",
    })
    conn.initiate_job(vault_name, {
        "ArchiveId": archive_id2,
        "Type": "archive-retrieval",
    })

    jobs = conn.list_jobs(vault_name)
    len(jobs['JobList']).should.equal(2)
Exemplo n.º 20
0
def glacier_vault_delete():
    # Delete Glacier Vaults
    init_database()
    init_glconfig()

    print 'Deleting vault: ' + vault
    glacier_connect = Layer1(aws_access_key_id=key,
                             aws_secret_access_key=secret,
                             region_name=region)
    try:
        glacier_connect.delete_vault(vault)
        c.execute('DELETE FROM config WHERE vault=(?)', (vault, ))
        conn.commit()
    except:
        error_stamp('upper')
        print 'Cannot delete vault ' + vault
        print 'It may contain archives that need to be deleted first'
        error_stamp('lower')
Exemplo n.º 21
0
def test_describe_job():
    conn = Layer1(region_name="us-west-2")
    vault_name = "my_vault"
    conn.create_vault(vault_name)
    archive_id = conn.upload_archive(
        vault_name, "some stuff", "", "", "some description")
    job_response = conn.initiate_job(vault_name, {
        "ArchiveId": archive_id,
        "Type": "archive-retrieval",
    })
    job_id = job_response['JobId']

    job = conn.describe_job(vault_name, job_id)
    joboutput = json.loads(job.read().decode("utf-8"))
    
    joboutput.should.have.key('Tier').which.should.equal('Standard')
    joboutput.should.have.key('StatusCode').which.should.equal('InProgress')
    joboutput.should.have.key('VaultARN').which.should.equal('arn:aws:glacier:RegionInfo:us-west-2:012345678901:vaults/my_vault')
Exemplo n.º 22
0
def test_describe_job():
    conn = Layer1(region_name="us-west-2")
    vault_name = "my_vault"
    conn.create_vault(vault_name)
    archive_id = conn.upload_archive(vault_name, "some stuff", "", "",
                                     "some description")
    job_response = conn.initiate_job(vault_name, {
        "ArchiveId": archive_id,
        "Type": "archive-retrieval"
    })
    job_id = job_response["JobId"]

    job = conn.describe_job(vault_name, job_id)
    joboutput = json.loads(job.read().decode("utf-8"))

    joboutput.should.have.key("Tier").which.should.equal("Standard")
    joboutput.should.have.key("StatusCode").which.should.equal("InProgress")
    joboutput.should.have.key("VaultARN").which.should.equal(
        f"arn:aws:glacier:us-west-2:{ACCOUNT_ID}:vaults/my_vault")
Exemplo n.º 23
0
def download_file(command):
  """
  Takes a command and uses it to download the file.
  """
  if 'action' not in command or command['action']!='DOWNLOAD':
    raise ValueError("Command not of type DOWNLOAD")
  ret={}
  handler = Layer1(aws_access_key_id = command['access_key'],aws_secret_access_key = command['secret_access_key'],region_name=command['region_name'])
  f=open(os.path.join(ddir,command['file_name']),'wb')
  num_chunks = int(math.ceil(command['file_size'] / float(dchunk)))
  print "Downloading file %s"%command['file_name']
  for i in xrange(num_chunks):
    byte_range = ((i * dchunk), ((i + 1) * dchunk) - 1)
    dload = handler.get_job_output(command['vault_name'],command['job_id'],byte_range)
    f.write(dload.read())
    print "%g %%"%(100*float(i)/float(num_chunks))
  f.close()
  print "100 %"
  print "Completed."
  return {}
Exemplo n.º 24
0
def upload_file(command):
  """
  Uploads a file from the local machine that is specified in the given command.
  """
  if 'action' not in command or command['action']!="UPLOAD":
    raise ValueError("Command not of type UPLOAD")
  if 'file_pattern' not in command: 
    raise ValueError("Missing file pattern")
  path = command['file_pattern'] 
  if not os.path.exists(path):
    raise ValueError("No valid file for upload found")
  returner={}
  handler = Layer1(aws_access_key_id = command['access_key'],aws_secret_access_key = command['secret_access_key'],region_name=command['region_name'])
  uploader = ConcurrentUploader(handler,command['vault_name'],part_size=uchunk)
  file_size = os.path.getsize(path)
  if file_size==0:
    raise ValueError("File is empty.  Nothing to upload.")
  csum = chunkedmd5(path)
  itime=time.time()
  file_name = os.path.basename(path)
  machine_id = str(command['target']) if client_name == '' else client_name+' ('+str(command['target']) + ')'
  #Construct a meaningful description object for the file
  #The limits are that the description can be no more than 1024 characters in length and must use only ascii characters between 32 and 126 (i.e., 32<=ord(char)<=126)
  dscrip = command['description']+'\\n'
  dscrip = dscrip + "Uploaded at "+str(itime)+'\\n'+ "Full path "+str(path)+'\\n'+ "File size "+str(file_size)+'\\n' + "MD5 "+str(csum)+'\\n' + "Source machine id "+machine_id+'\\n'
  print "Uploading file %s"%file_name
  #Put some validation stuff here...
  #Do the upload
  archive_id = uploader.upload(path,dscrip)
  print "Completed successfully.  Archive ID: %s"%archive_id
  #Done the upload, send the bastard back
  returner['archive_id'] = archive_id
  returner['description'] = dscrip
  returner['file_name'] = file_name
  returner['true_path'] = path
  returner['file_size'] = file_size
  returner['md5sum'] = csum
  returner['insert_time']=int(itime)
  returner['region_name']=command['region_name']
  returner['vault_name'] = command['vault_name']
  return returner
Exemplo n.º 25
0
                                            ".tar.gz ~/vol/* &> /dev/null",
                                            shell=True)
                            print "Archive done for " + volid
                            subprocess.call("sudo umount ~/vol/", shell=True)
                        except Exception as e:
                            logger.error("Error in archive: " + str(e))

        # Upload archive file to AWS Glacier
                        try:
                            vault_name = str(data['vault_name'])
                            part_size = int(data['part_size'])
                            num_threads = int(data['num_threads'])
                            print "Backing archive of " + volid + " to Glacier"
                            glacierconn = Layer1(
                                aws_access_key_id=data['aws_access_key_id'],
                                aws_secret_access_key=data[
                                    'aws_secret_access_key'],
                                region_name=data['region'])
                            vault = glacierconn.describe_vault(vault_name)
                            uploader = ConcurrentUploader(
                                glacierconn,
                                vault_name,
                                part_size=part_size,
                                num_threads=num_threads)
                            archive_id = uploader.upload(
                                (archive_file + ".tar.gz"), "first upload")
                            print "Download the archive using: " + archive_id
                        except Exception as e:
                            logger.error("Glacier error: " + str(e))

        # Detach the volume from temporary instance and associate it back to
Exemplo n.º 26
0
def glacier_vault_inv_out():
    # Read the inventory of the vault (The job requesting the inventory must be submitted first)
    init_database()

    try:
        c.execute('SELECT * FROM jobs')
        rows = c.fetchall()
        if len(rows) > 0:
            print('\r')
            print 'Existing Jobs:'
            print '{:<22} {:<15} {:<50} {:<100}'.format(
                'Time Submitted', 'User', 'Vault', 'Amazon Job ID')
            for row in rows:
                print '{:<22} {:<15} {:<50} {:<100}'.format(
                    row[0], row[1], row[2], row[3])
            print('\r')
            gl_id = 0
            gl_id = raw_input(
                'Choose a Job to get inventory? (default: %s): ' %
                gl_id) or gl_id
            gl_id = int(gl_id)

            if gl_id != 0:
                c.execute(
                    'SELECT jobs.jobid,jobs.vault,config.keyid,config.seckey,config.region FROM jobs INNER JOIN config ON jobs.vault = config.vault WHERE jobs.id =(?)',
                    (gl_id, ))
                x = c.fetchall()
                job = x[0][0]
                vault = x[0][1]
                key = x[0][2]
                secret = x[0][3]
                region = x[0][4]
                try:
                    glacier_connect = Layer1(aws_access_key_id=key,
                                             aws_secret_access_key=secret,
                                             region_name=region)
                    get_job = glacier_connect.get_job_output(vault, job)
                    list = get_job['ArchiveList']
                    for i in list:
                        print 'Archive ID: ' + str(i['ArchiveId'])
                        print '{:<25} {:<20} {:<100}'.format(
                            'Creation Date', 'Size', 'Archive')
                        print '{:<25} {:<20} {:<100}'.format(
                            i['CreationDate'], i['Size'],
                            i['ArchiveDescription']) + '\n'
                except boto.glacier.exceptions.UnexpectedHTTPResponseError as e:
                    error_stamp('upper')
                    print e
                    error_stamp('lower')
                    get_job = glacier_connect.list_jobs(vault, completed=False)
                    list = get_job['JobList']
                    print 'Listing currently active jobs:'
                    print 'Request ID: ' + str(get_job['RequestId']) + '\n'
                    for i in list:
                        print 'Job ID: ' + str(i['JobId'])
                        print 'Status: ' + str(i['CreationDate'])
                        print 'Status: ' + str(i['StatusCode'])
                        print 'Vault ARN: ' + str(i['VaultARN']) + '\n'
                    error_stamp('lower')
                gl_id = 1
            else:
                print str('-') * 121
                print 'ERROR: That is not a valid choice.'
                print str('-') * 121
    except:
        print 'An error has occured'
        pass
Exemplo n.º 27
0
 def __init__(self, *args, **kwargs):
     # Accept a passed in layer1, mainly to allow easier testing
     if "layer1" in kwargs:
         self.layer1 = kwargs["layer1"]
     else:
         self.layer1 = Layer1(*args, **kwargs)
Exemplo n.º 28
0
from boto.glacier.layer1 import Layer1
from boto.glacier.concurrent import ConcurrentUploader
import sys
import os.path

# XXX: replace these with your credentials
ACCESS_KEY = "AWS_ACCESS_KEY"
SECRET_KEY = "AWS_SECRET_KEY"
VAULT_NAME = "VAULT_NAME"
REGION_NAME = 'us-west-2'

try:
    backup_file = sys.argv[1]
except IndexError:
    sys.stderr.write("Usage: {} <file to backup>\n".format(sys.argv[0]))
    sys.exit(1)
if not os.path.isfile(backup_file):
    sys.stderr.write("Bad upload file {}\n".format(backup_file))
    sys.exit(2)

glacier_layer1 = Layer1(aws_access_key_id=ACCESS_KEY,
                        aws_secret_access_key=SECRET_KEY,
                        region_name=REGION_NAME)

uploader = ConcurrentUploader(glacier_layer1, VAULT_NAME, 32*1024*1024)

sys.stdout.write("Uploading {} as {}...".format(
    backup_file, os.path.basename(backup_file)))
archive_id = uploader.upload(backup_file, os.path.basename(backup_file))
sys.stdout.write("done\n")
Exemplo n.º 29
0
    def delete_archive(self, archive_id):
        glacier_layer1 = Layer1(region_name=self.region_name)

        print("operation starting...")

        print glacier_layer1.delete_archive(self.target_vault_name, archive_id)
Exemplo n.º 30
0
import sys
import os.path

from boto.glacier.layer1 import Layer1
from boto.glacier.vault import Vault
from boto.glacier.concurrent import ConcurrentUploader

access_key_id = None
secret_key = None
target_vault_name = 'backups'
region_name = 'us-west-2'
fname = sys.argv[1]

if (os.path.isfile(fname) == False):
    print("Can't find the file to upload!")
    sys.exit(-1)

glacier_layer1 = Layer1(aws_access_key_id=access_key_id,
                        aws_secret_access_key=secret_key,
                        region_name=region_name)

uploader = ConcurrentUploader(glacier_layer1, target_vault_name,
                              32 * 1024 * 1024)

print("operation starting...")

archive_id = uploader.upload(fname, fname)

print("Success! archive id: '%s'" % (archive_id))