def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) if args.driver == 's3': client = boto3.client('s3') print(f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: if args.destination: outfile = open(args.destination, 'wb') else: outfile = open(file_name, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import storage, pgdump import sys import time arg = create_parser().parse_args() timestamp = time.strftime("%Y-%d-%b", time.localtime()) if not arg.destination: filename = pgdump.dump_file_name(arg.url, timestamp) else: filename = arg.destination try: dump = pgdump.pg_dump(arg.url) if arg.driver == 's3': client = boto3.client('s3') storage.s3(client, dump.stdout, arg.destination, backup) else: outfile = open(filename, 'wb') storage.local(dump.stdout, outfile) outfile.close() except OSError: print("it errored ") sys.exit(1)
def test_storing_file_locally(infile): """ Writes content from one file-like to another """ outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name, 'rb') as f: assert f.read() == b"Testing"
def test_storing_file_locally(infile): """ Writes content from one file-like to another """ outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name, 'rb') as f: assert f.read() == b'Testing'
def test_storing_file_locally(infile): """ Writes contents from one file-like to another """ outfile = tempfile.NamedTemporaryFile(delete=False) #namedfile:guaranteed to have visible name in file system storage.local(infile, outfile) with open(outfile.name, 'rb') as f: assert f.read() == b"Testing"
def test_storing_file_locally(infile): """ write the content from a file-like (pipe) to another file """ outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name) as f: assert f.read() == "Testing"
def test_storing_file_locally(): infile = tempfile.TemporaryFile('r+b') infile.write(b"Testing") infile.seek(0) outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name, 'rb') as f: assert f.read() == b"Testing"
def test_storing_file_locally(infile): """ Write content from one file-like to another """ # Expects a file to read from and one to write to outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name) as f: assert f.read() == "Testing"
def main(): args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') # TODO: create a better name based on the database name and the date storage.s3(client, dump.stdout, args.destination, 'example.sql') else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': storage.s3(client, dump.stdout, args.destination, 'example.sql') else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile)
def test_storing_file_locally(source_file): """ Writes content from one file to another """ dest_file = tempfile.NamedTemporaryFile(delete=False) local(source_file, dest_file) with open(dest_file.name, 'rb') as f: content = f.read() assert content == b'Testing'
def test_storage_file_locally(infile): """ Writes content to from a temp file to another """ infile = tempfile.TemporaryFile('r+b') infile.write(b'testing') infile.seek(0) outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name, 'rb') as f: assert f.read() == b'testing'
def test_storing_file_locally(): """ Writes content from one file-like object to another. """ infile = tempfile.TemporaryFile('r+b') infile.write(b'Testing') infile.seek(0) outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name, 'rb') as f: assert f.read() == b'Testing'
def test_storing_file_locally(): """ Write content from one file like to another """ infile = tempfile.TemporaryFile('r+') infile.write("Testing") infile.seek(0) outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name) as f: assert f.read() == "Testing"
def test_storing_file_locally(infile): """ Writes content from one file-like to another """ # delete=False because if we leave it as the default of true, # then it's going to be deleted as soon as we close the file. # We want our local method to close both of the files that # it's given once it's completed. outfile = tempfile.NamedTemporaryFile(delete=False) storage.local(infile, outfile) with open(outfile.name, 'rb') as f: assert f.read() == b'Testing'
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile)
def main(): import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime('%Y-%m-%dT%H:%M:%S') filename = pgdump.dump_file_name(args.url, timestamp) print(f'Backup up database to S3 as {filename}') storage.s3(client, dump.stdout, args.destination, filename) else: outfile = open(args.destination, 'wb') print(f'Backup up database locally to {args.destination}') storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() sqldump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()); file_name = pgdump.dump_file_name(args.url, timestamp, ) storage.s3(client, sqldump.stdout, args.destination, file_name) print(f"Backing DB up to {args.destination} in S3 as {file_name}") else: outfile = open(args.destination, 'wb') print(f"Backing DB up to {args.destination} locally") storage.local(sqldump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("backing database up to %s in S3 as %s" % (args.url, file_name)) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'w') print("backing up db locally to %s" % outfile.name) storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("Backup database up to " + args.destination + " in s3 as " + file_name) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print("Backup database locally to " + args.destination) storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import pgdump, storage import time args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M", local.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print( f"Backing up db to {args.destination} in S3 with filename: {file_name}" ) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing up db locally with the name: {outfile.namme}") storage.local(dump.stdout, outfile)
def main(): #clase principal que da un orden de ejecucion a los metodos o clases declarados enteriormente import time import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3', region_name='us-east-1') timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print( f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): # import the boto3 dependeency only after the main function is called; import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == "s3": client = boto3.client("s3") timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) print("backing up the database to s3 bucket %s as %s" % (args.destination, file_name)) storage.s3(client, dump.stdout, args.destination, "example.sql") else: outfile = open(args.destination, "w") print("backing up the database locally to %s" % outfile.name) storage.local(dump.stdout, outfile)
def main(): import time from pgbackup import pgdump, storage from pgbackup.remotes import aws args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': now = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp=now) s3_client = aws.Remote() print(f'Backing database up to AWS S3 as {file_name}...') storage.remote(s3_client, dump.stdout, args.destination, file_name) else: local_file = open(args.destination, 'wb') print('Backing database up to local directory') storage.local(dump.stdout, local_file) print('Done!')
def main(): import boto3 from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': timestamp = time.strftime('%Y-%m-%dT%H-%M', time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) session = boto3.Session( profile_name='acg') #get passed from cli options client = session.client('s3') print( f"[I] Backing database up to {args.destination} in S3 as {file_name}" ) storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import time from google.cloud import storage from pgbackup import pgdump, storage as st args = create_parser().parse_args() dump = pgdump.dump(args.host, args.database, args.user, args.port) if args.driver == 'googlestorage': storage_client = storage.Client.from_service_account_json( "fastAI-c0782c1262dd.json") timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.database, timestamp) print( f"Backing database up to {args.destination} in Google Cloud Storage as {file_name}" ) st.google_storage(storage_client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") st.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) print(f"Got dump using url {args.url}") if args.driver == 's3': print(f"Uploading to s3\nBucket name {args.destination}") timestamp = time.strftime("%Y-%m-%dT%H%M%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) client = boto3.client('s3') print(f"Created client; dump database to s3 as file name {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) print(f"dumped to s3") else: print(f"Uploading locally\nDestination file {args.destination}") outfile = open(args.destination, 'wb') print("opened destination file for writing") storage.local(dump.stdout, outfile) print(f"dumped database to destination file")
def main(): import oci import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 'oci': config = oci.config.from_file() object_storage = oci.object_storage.ObjectStorageClient(config) ostorage = oci.object_storage.ObjectStorageClient(config) namespace = object_storage.get_namespace().data outfile = dump.communicate()[0] timestamp = time.strftime("%Y-%m-%dT%H:%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) ostorage.put_object(namespace, args.destination, file_name, outfile) else: outfile = open(args.destination, 'wb') print(f"Backing database up locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 import time from pgbackup import pgdump, storage #parse arguments #no error handling here is needed args = create_parser().parse_args() #using url off those passed args dump = pgdump.dump(args.url) # we already have a dump here #applying a timestamp if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H-%M", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) #we use client to pass in print( f"Backing database up to {args.destination} in S3 as {file_name}") storage.s3(client, dump.stdout, args.destination, file_name) else: #local save. transfer contents from dump.stdout into outfile and close them both outfile = open(args.destination, 'wb') print(f"Backing database locally to {outfile.name}") storage.local(dump.stdout, outfile)
def main(): import boto3 from pgbackup import storage, pgdump import time arg = create_parser().parse_args() dump = pgdump.dump(arg.url) if arg.driver == 's3': client = boto3.client() timestamp = time.strftime('%Y-%m-%dT%H:%M', time.localtime()) file_name = pgdup.dump_file_name(arg.url, timestamp) print( f"Backing up data in a bucket in s3 in the bucket name {arg.destination} and with the name {file_name}" ) storage.s3(client, dump.stdout, arg.destination, file_name) else: outfile = open(arg.destination, 'w+b') print( f"Backing Up database locally at {arg.destination} with the name {outfile.name}" ) storage.local(dump.stdout, outfile)
def create_parser(): # Need to create parser function to return a function. parser = ArgumentParser(description=""" Bash up PostgreSQL database locally or to AWS S3. """) parser.add_argument("url", help="URL of database to backup" # By adding this module you can call an argument based on name parser.add_argument("--driver", '-d', help="how & where to store backup", nargs=2, metavar=("DRIVER", "DESTINATION"), action=DriverAction, required=True) return parser def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() dump = pgdump.dump(args.url) if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H-%M', time.localtime"()) # https://docs.python.org/3/library/time.html#time.strftime file_name = pgdump.dump_file_name(args.url, timestamp) print(f"Backing database up to {args.destination} in S3 as {file_name}") # TODO: create a better name based on the database name and the date storage.s3(client, dump.stdout, args.destination, file_name) else: outfile = open(args.destination, 'wb') storage.local(dump.stdout, outfile) # Joe@Joe-PC MINGW64 ~/Documents/GitHub/PythonDeveloper/argparserpackage (master) # $ pip install -e . The -e means install from local enviroment
def main(): import boto3 import time from pgbackup import pgdump, storage args = create_parser().parse_args() try: dump = pgdump.dump(args.url) except OSError as err: print('ERROR: ErrorOnDump: {err}') if args.driver == 's3': client = boto3.client('s3') timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()) file_name = pgdump.dump_file_name(args.url, timestamp) try: storage.s3(client,dump.stdout,args.destination,file_name) print(f'Backing up database to {args.destination} on S3 as {file_name}') except: print(f'ERROR: ErrorOnS3Dump: No such bucket') else: outfile = open(args.destination,'wb') print(f'Backing up database to {args.destination} on local storage as {file_name}') storage.local(dump.stdout,outfile)