def main(): import boto3 from pgbackup import storage, pgdump import sys import time arg = create_parser().parse_args() timestamp = time.strftime("%Y-%d-%b", time.localtime()) if not arg.destination: filename = pgdump.dump_file_name(arg.url, timestamp) else: filename = arg.destination try: dump = pgdump.pg_dump(arg.url) if arg.driver == 's3': client = boto3.client('s3') storage.s3(client, dump.stdout, arg.destination, backup) else: outfile = open(filename, 'wb') storage.local(dump.stdout, outfile) outfile.close() except OSError: print("it errored ") sys.exit(1)
def test_dump_file_with_timestamp(): """ pgdump.dump_file_name retunrs the name of the database with timestmap """ timestamp = "2019-12-28T13:14:14" assert pgdump.dump_file_name(url, timestamp) == f"db_one-{timestamp}.sql"
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database """ timestamp = "2017-12-03T13:14:10" assert pgdump.dump_file_name( url, timestamp) == ("sample-2017-12-03T13:14:10.sql")
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database """ timestamp = "2019-01-01T13:14:10" assert pgdump.dump_file_name(url, timestamp) == "db_one-2019-01-01T13:14:10"
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of database with the timestamp """ timestamp = "2018-12-03T13:14" assert pgdump.dump_file_name(url, timestamp) == "db_one-2018-12-03T13:14.sql"
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) if args.driver == 's3': client = boto3.client('s3') print(f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: if args.destination: outfile = open(args.destination, 'wb') else: outfile = open(file_name, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp. """ timestamp = "2021-03-03T16:25:10" assert pgdump.dump_file_name(url, timestamp) == "db_one-2021-03-03T16:25:10.sql"
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp """ timestamp = "2020-10-03T15:25:00" assert pgdump.dump_file_name(url, timestamp) == "db_one-" + timestamp + ".sql"
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp """ timestamp = "2017-12-03T13:14:10" assert pgdump.dump_file_name(url, timestamp) == ("db_one-%s.sql" % timestamp)
def test_dump_file_name_wit_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp appeneded. """ timestamp = "2017-12-03T13:14" assert pgdump.dump_file_name(url, timestamp) == "db_one-2017-12-03T13:14.sql"
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp """ timestamp = "2019-06-23T14:45:10" assert pgdump.dump_file_name(url, timestamp) == "db_one-2019-06-23T14:45:10.sql"
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp appended to it """ timestamp = "2019-03-31T21:40" assert pgdump.dump_file_name(url, timestamp) == "db_one-2019-03-31T21:40.sql"
def test_dump_file_name_with_timestamp(): """ pg_dump.dump_file_name returns the file name with timestamp """ import time now = time.localtime() file_name = pgdump.dump_file_name(url, now) assert file_name == f'db_one-{now}.sql'
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp appended. """ timestamp = "2018-03-06T11:21" assert pgdump.dump_file_name(url, timestamp) == "db_one-2018-03-06T11:21.sql"
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp appended """ timestamp = "2018-05-21T22:03" assert pgdump.dump_file_name(url, timestamp) == "db_name-2018-05-21T22:03.sql"
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile)
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime('%Y-%m-%dT%H:%M:%S') filename = pgdump.dump_file_name(args.url, timestamp) print(f'Backup up database to S3 as {filename}') storage.s3(client, dump.stdout, args.destination, filename) else: outfile = open(args.destination, 'wb') print(f'Backup up database locally to {args.destination}') storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("backing database up to %s in S3 as %s" % (args.url, file_name)) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'w') print("backing up db locally to %s" % outfile.name) storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() sqldump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()); file_name = pgdump.dump_file_name(args.url, timestamp, ) storage.s3(client, sqldump.stdout, args.destination, file_name) print(f"Backing DB up to {args.destination} in S3 as {file_name}") else: outfile = open(args.destination, 'wb') print(f"Backing DB up to {args.destination} locally") storage.local(sqldump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("Backup database up to " + args.destination + " in s3 as " + file_name) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print("Backup database locally to " + args.destination) storage.local(dump.stdout, outfile)
def main(): #clase principal que da un orden de ejecucion a los metodos o clases declarados enteriormente import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3', region_name='us-east-1') timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print( f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import storgae, pgdump args = create_parser.parse_arg() dump = pgdump.dump(agrs.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%m-%d-%YT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print(f"Backing database up to {args.destination} in S3 as {file_name}") # TODO: create a better name based on the database name and the date storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open('args.destination', 'wb') print(f"Backing database up locally to {outfile.name}") storage.local (dump.stdout, outfile )
def main(): import boto3 from pgbackup import pgdump, storage import time args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M", local.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print( f"Backing up db to {args.destination} in S3 with filename: {file_name}" ) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing up db locally with the name: {outfile.namme}") storage.local(dump.stdout, outfile)
def main(): # import the boto3 dependeency only after the main function is called; import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == "s3": client = boto3.client("s3") timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("backing up the database to s3 bucket %s as %s" % (args.destination, file_name)) storage.s3(client, dump.stdout, args.destination, "example.sql") else: outfile = open(args.destination, "w") print("backing up the database locally to %s" % outfile.name) storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': timestamp = time.strftime('%Y-%m-%dT%H-%M', time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) session = boto3.Session( profile_name='acg') #get passed from cli options client = session.client('s3') print( f"[I] Backing database up to {args.destination} in S3 as {file_name}" ) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import time from pgbackup import pgdump, storage from pgbackup.remotes import aws args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': now = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp=now) s3_client = aws.Remote() print(f'Backing database up to AWS S3 as {file_name}...') storage.remote(s3_client, dump.stdout, args.destination, file_name) else: local_file = open(args.destination, 'wb') print('Backing database up to local directory') storage.local(dump.stdout, local_file) print('Done!')
def main(): import time from google.cloud import storage from pgbackup import pgdump, storage as st args = create_parser().parse_args() dump = pgdump.dump(args.host, args.database, args.user, args.port) if args.driver == 'googlestorage': storage_client = storage.Client.from_service_account_json( "fastAI-c0782c1262dd.json") timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.database, timestamp) print( f"Backing database up to {args.destination} in Google Cloud Storage as {file_name}" ) st.google_storage(storage_client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") st.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) print(f"Got dump using url {args.url}") if args.driver == 's3': print(f"Uploading to s3\nBucket name {args.destination}") timestamp = time.strftime("%Y-%m-%dT%H%M%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) client = boto3.client('s3') print(f"Created client; dump database to s3 as file name {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) print(f"dumped to s3") else: print(f"Uploading locally\nDestination file {args.destination}") outfile = open(args.destination, 'wb') print("opened destination file for writing") storage.local(dump.stdout, outfile) print(f"dumped database to destination file")
def main(): import oci import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 'oci': config = oci.config.from_file() object_storage = oci.object_storage.ObjectStorageClient(config) ostorage = oci.object_storage.ObjectStorageClient(config) namespace = object_storage.get_namespace().data outfile = dump.communicate()[0] timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) ostorage.put_object(namespace, args.destination, file_name, outfile) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage #parse arguments #no error handling here is needed args = create_parser().parse_args() #using url off those passed args dump = pgdump.dump(args.url) # we already have a dump here #applying a timestamp if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H-%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) #we use client to pass in print( f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: #local save. transfer contents from dump.stdout into outfile and close them both outfile = open(args.destination, 'wb') print(f"Backing database locally to {outfile.name}") storage.local(dump.stdout, outfile)
def create_parser(): # Need to create parser function to return a function. parser = ArgumentParser(description=""" Bash up PostgreSQL database locally or to AWS S3. """) parser.add_argument("url", help="URL of database to backup" # By adding this module you can call an argument based on name parser.add_argument("--driver", '-d', help="how & where to store backup", nargs=2, metavar=("DRIVER", "DESTINATION"), action=DriverAction, required=True) return parser def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H-%M', time.localtime"()) # https://docs.python.org/3/library/time.html#time.strftime file_name = pgdump.dump_file_name(args.url, timestamp) print(f"Backing database up to {args.destination} in S3 as {file_name}") # TODO: create a better name based on the database name and the date storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile) # Joe@Joe-PC MINGW64 ~/Documents/GitHub/PythonDeveloper/argparserpackage (master) # $ pip install -e . The -e means install from local enviroment
def test_dump_file_name_with_timestamp(): """ pgdump.dump_file_name returns the name of the database with timestamp """ timestamp = "2017-12-03T13:14:10" assert pgdump.dump_file_name(url, timestamp) == f"db_one-{timestamp}.sql"
def test_dump_file_name_without_timestamp(): """ pgdump.dump_file_name returns the name of the database """ assert pgdump.dump_file_name(url) == "db_one.sql"
def test_dump_filename_with_timestamp(): """pgdump.dump_file_name returns the name of the DB with timestamp""" timestamp="2018-08-15T12:10:11" assert pgdump.dump_file_name(url, timestamp) == f"db_one-{timestamp}.sql"