def backup_postgres(site_name, dbname, dbuser): date = strftime("%Y.%m.%d", gmtime()) gpg_file = 'sql-%s.sql.gz.gpg' % date local_file = "%s/vakap-%s" % ('/tmp', gpg_file) s3_dest = "s3://%s/%s/%s" % (env.s3_bucket, site_name, gpg_file) if s3_file_exists(s3_dest): print " - File exists: %s. Skipping." % s3_dest return else: print " - Dumping and encrypting database: %s" % dbname run("""pg_dump --host=127.0.0.1 --clean --username={dbuser} {dbname} \ | gzip | gpg --encrypt --recipient {key} > {local_file}""" .format(dbuser=dbuser, dbname=dbname, key=env.gpg_key, local_file=local_file)) s3_upload(local_file, s3_dest)
def backup_files(site_name, path, tmpdir='/tmp'): from time import gmtime, strftime with cd(path): date = strftime("%Y.%m.%d", gmtime()) gpg_file = 'files-%s.tgz.gpg' % date local_file = "%s/vakap-%s" % (tmpdir, gpg_file) s3_dest = "s3://%s/%s/%s" % (env.s3_bucket, site_name, gpg_file) if s3_file_exists(s3_dest): print " - File exists: %s. Skipping" % s3_dest return else: print " - Taring and gziping directory: %s => %s" % (path, tmpdir) if file_exists('current'): run("tar czh current | gpg --encrypt --recipient %s > %s" % (env.gpg_key, local_file)) else: run("tar czh . | gpg --encrypt --recipient %s > %s" % (env.gpg_key, local_file)) s3_upload(local_file, s3_dest)