def test_storing_file_on_s3(mocker, infile): """ Writes contect from one file-like to s3 """ client = mocker.Mock() storage.s3(client, infile, "bucket", "file-name") client.upload_fileobj.assert_called_with(infile, "bucket", "file-name")
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) if args.driver == 's3': client = boto3.client('s3') print(f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: if args.destination: outfile = open(args.destination, 'wb') else: outfile = open(file_name, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def test_storing_file_s3(mocker, infile): # Writes content from one readable to S3 client = mocker.Mock() storage.s3(client, infile, "bucket", "file-name") client.upload_fileobj.assert_called_with(infile, "bucket", "file-name")
def main(): import boto3 from pgbackup import storage, pgdump import sys import time arg = create_parser().parse_args() timestamp = time.strftime("%Y-%d-%b", time.localtime()) if not arg.destination: filename = pgdump.dump_file_name(arg.url, timestamp) else: filename = arg.destination try: dump = pgdump.pg_dump(arg.url) if arg.driver == 's3': client = boto3.client('s3') storage.s3(client, dump.stdout, arg.destination, backup) else: outfile = open(filename, 'wb') storage.local(dump.stdout, outfile) outfile.close() except OSError: print("it errored ") sys.exit(1)
def test_storing_file_on_s3(mocker, infile): ''' Writes content from file-like to s3 ''' client = mocker.Mock() storage.s3(client, infile, 'bucket', 'file-name') client.upload_fileobj.assert_called_with(infile, 'bucket', 'file-name')
def test_storing_file_on_s3(mocker, infile): """ Writes content from one readable to S3. """ client = mocker.Mock() storage.s3(client, infile, "python-academy", "file-name") client.upload_fileobj.assert_called_with(infile, "python-academy", "file-name")
def test_storing_file_on_s3(mocker, infile): """ Writes content from one file-like to S3 """ client = mocker.Mock() storage.s3(client, infile, "bucket", "file-name") client.upload_fileobj.assert_called_with(infile, "bucket", "file-name")
def test_storage_file_on_s3(mocker, infile): """ Writes content from one file-like to s3 """ client = mocker.Mock() mocker.patch.object(client, "upload_fileobj") storage.s3(client, infile, 'bucket-name', 'file-name') client.upload_fileobj.assert_called_with(infile, "bucket-name", "file-name")
def test_storing_file_on_s3(mocker, input_file): """ writes content from readable to s3 """ client = mocker.Mock() storage.s3(client, input_file, "bucket", "file-name") client.upload_fileobj.assert_called_with(input_file, "bucket", "file-name")
def main(): args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') # TODO: create a better name based on the database name and the date storage.s3(client, dump.stdout, args.destination, 'example.sql') else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': storage.s3(client, dump.stdout, args.destination, 'example.sql') else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile)
def test_storage_file_on_s3(mocker): """ Writes content from one file-like to s3 """ # Creates mock object client and uses it as an arg for s3 client = mocker.Mock() mocker.patch.object(client, "upload_fileobj") storage.s3(client, infile, 'bucket-name', 'file-name') client.upload_fileobj.assert_called_with(infile, "bucket-name", "file-name")
def test_storing_file_on_s3(mocker, infile): """ writes content from one file-like to s3 """ # duct-typing; only real requirement is that the client object it receives adheres to a certain method signature (it has an upload_fileobj method) # we mock the client object client = mocker.Mock() # ideal usage storage.s3(client, infile, "bucket", "file-name") client.upload_fileobj.assert_called_with(infile, "bucket", "file-name")
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile)
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime('%Y-%m-%dT%H:%M:%S') filename = pgdump.dump_file_name(args.url, timestamp) print(f'Backup up database to S3 as {filename}') storage.s3(client, dump.stdout, args.destination, filename) else: outfile = open(args.destination, 'wb') print(f'Backup up database locally to {args.destination}') storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() sqldump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()); file_name = pgdump.dump_file_name(args.url, timestamp, ) storage.s3(client, sqldump.stdout, args.destination, file_name) print(f"Backing DB up to {args.destination} in S3 as {file_name}") else: outfile = open(args.destination, 'wb') print(f"Backing DB up to {args.destination} locally") storage.local(sqldump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("backing database up to %s in S3 as %s" % (args.url, file_name)) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'w') print("backing up db locally to %s" % outfile.name) storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("Backup database up to " + args.destination + " in s3 as " + file_name) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print("Backup database locally to " + args.destination) storage.local(dump.stdout, outfile)
def main(): #clase principal que da un orden de ejecucion a los metodos o clases declarados enteriormente import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3', region_name='us-east-1') timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print( f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import pgdump, storage import time args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M", local.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print( f"Backing up db to {args.destination} in S3 with filename: {file_name}" ) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing up db locally with the name: {outfile.namme}") storage.local(dump.stdout, outfile)
def main(): # import the boto3 dependeency only after the main function is called; import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == "s3": client = boto3.client("s3") timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("backing up the database to s3 bucket %s as %s" % (args.destination, file_name)) storage.s3(client, dump.stdout, args.destination, "example.sql") else: outfile = open(args.destination, "w") print("backing up the database locally to %s" % outfile.name) storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': timestamp = time.strftime('%Y-%m-%dT%H-%M', time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) session = boto3.Session( profile_name='acg') #get passed from cli options client = session.client('s3') print( f"[I] Backing database up to {args.destination} in S3 as {file_name}" ) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) print(f"Got dump using url {args.url}") if args.driver == 's3': print(f"Uploading to s3\nBucket name {args.destination}") timestamp = time.strftime("%Y-%m-%dT%H%M%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) client = boto3.client('s3') print(f"Created client; dump database to s3 as file name {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) print(f"dumped to s3") else: print(f"Uploading locally\nDestination file {args.destination}") outfile = open(args.destination, 'wb') print("opened destination file for writing") storage.local(dump.stdout, outfile) print(f"dumped database to destination file")
def main(): import boto3 import time from pgbackup import pgdump, storage #parse arguments #no error handling here is needed args = create_parser().parse_args() #using url off those passed args dump = pgdump.dump(args.url) # we already have a dump here #applying a timestamp if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H-%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) #we use client to pass in print( f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: #local save. transfer contents from dump.stdout into outfile and close them both outfile = open(args.destination, 'wb') print(f"Backing database locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import storage, pgdump import time arg = create_parser().parse_args() dump = pgdump.dump(arg.url) if arg.driver == 's3': client = boto3.client() timestamp = time.strftime('%Y-%m-%dT%H:%M', time.localtime()) file_name = pgdup.dump_file_name(arg.url, timestamp) print( f"Backing up data in a bucket in s3 in the bucket name {arg.destination} and with the name {file_name}" ) storage.s3(client, dump.stdout, arg.destination, file_name) else: outfile = open(arg.destination, 'w+b') print( f"Backing Up database locally at {arg.destination} with the name {outfile.name}" ) storage.local(dump.stdout, outfile)
def create_parser(): # Need to create parser function to return a function. parser = ArgumentParser(description=""" Bash up PostgreSQL database locally or to AWS S3. """) parser.add_argument("url", help="URL of database to backup" # By adding this module you can call an argument based on name parser.add_argument("--driver", '-d', help="how & where to store backup", nargs=2, metavar=("DRIVER", "DESTINATION"), action=DriverAction, required=True) return parser def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H-%M', time.localtime"()) # https://docs.python.org/3/library/time.html#time.strftime file_name = pgdump.dump_file_name(args.url, timestamp) print(f"Backing database up to {args.destination} in S3 as {file_name}") # TODO: create a better name based on the database name and the date storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile) # Joe@Joe-PC MINGW64 ~/Documents/GitHub/PythonDeveloper/argparserpackage (master) # $ pip install -e . The -e means install from local enviroment
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() try: dump = pgdump.dump(args.url) except OSError as err: print('ERROR: ErrorOnDump: {err}') if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) try: storage.s3(client,dump.stdout,args.destination,file_name) print(f'Backing up database to {args.destination} on S3 as {file_name}') except: print(f'ERROR: ErrorOnS3Dump: No such bucket') else: outfile = open(args.destination,'wb') print(f'Backing up database to {args.destination} on local storage as {file_name}') storage.local(dump.stdout,outfile)
import boto3 from pgbackup import storage client = boto3.client('s3') infile = open('dump.sql', 'rb') storage.s3(client, infile, 'qngo-db-backups', infile.name)
Back up PostgreSQL databases locally or to AWS S3. """) parser.add_argument("url", help="URL of database to backup") parser.add_argument("--driver", '-d', help="how & where to store backup", nargs=2, metavar=("DRIVER", "DESTINATION"), action=DriverAction, required=True) return parser def main(): import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) # TODO: create a better name based on the database name and the date storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing dabase up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def test_storing_file_on_s3(mocker, infile): # Writes content client = mocker.Mock() storage.s3(client, infile, "bucket", "file_name") client.upload_fileobj.assert_called_with(infile, "bucket", "file_name")
from pgbackup import storage import boto3 client = boto3.client('s3') infile = open('example.txt', 'rb') storage.s3(client, infile, 'pgbackup-swapnasheel', infile.name)