def shred(f): ''' Securely erases f with shred ''' process = subprocess.Popen(['shred', '-fuz', f], shell=False) if process.wait() == 0: # wait for shred to complete, check return code LOG.info("Shredded %s" % f) else: # some kind of error occurred; log LOG.error("Shredding %s failed: shred returned %s" % (f, process.returncode))
def encrypt(source_file, destination_dir=TEMPORARY_DIR, key=PUBLIC_KEY_ID): ''' GPG-encrypts source_file with key, saving encrypted file to destination_dir source_file -- absolute path of file to encrypt destination_dir -- absolute path of directory to save encrypted file in key -- keyid of public key to use; must be in gpg keyring Returns path to the encrypted file ''' # Init GPG gpg = gnupg.GPG() # Defaults to current user's $HOME/.gnupg public_keys = gpg.list_keys() assert key in [k['keyid'] for k in public_keys], \ "Could not find PUBLIC_KEY_ID in keyring" # Build encrypted filename and path e_filename = source_file.split("/")[-1] + ".gpg" ef_path = os.path.join(destination_dir, e_filename) # Might be easier just to do this with subprocess # p = subprocess.Popen( # ["gpg", "--output", ef_path, "--recipient", key, source_file], # shell=False # ) # if p.wait() == 0: ... # or use subprocess.call, .check_call, .check_output, etc try: fp = open(source_file, 'rb') encrypted_data = gpg.encrypt_file( fp, # file object to encrypt key, # public key of recipient output=ef_path # path to encrypted file ) fp.close() except IOError as e: LOG.error(e) # Hack - unfortunately, when GPG fails to encrypt a file, it prints an # error message to the console but does not provide a specific error # that python-gnupg can use. So we need to double check. assert os.path.exists(ef_path), \ "GPG encryption failed -- check the public key." LOG.info("Encrypted %s -> %s" % (source_file, ef_path)) return ef_path
def upload_to_s3(local_file, bucket_name=AWS_BUCKET, key_name=None, acl='private'): ''' Uploads local_file to bucket on Amazon S3 key_name is the "filename" on Amazon S3, defaults to the local file's name ''' # Connect to Amazon S3 conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY) bucket = conn.create_bucket(bucket_name) k = Key(bucket) # Set key, defaulting to local file's name if key_name: k.key = key_name else: k.key = local_file.split("/")[-1] # encrypt_key=True for AES-256 encryption while at rest on Amazon's servers k.set_contents_from_filename(local_file, encrypt_key=True) k.set_acl(acl) LOG.info("Uploaded %s to S3 bucket %s" % (local_file, bucket_name))