def _test_cloud_store_filename(self, config, metadata_provider, provider): """ Store file as encrypted data to cloud. """ database = MetaDataDB(config) database.drop() cloud = Cloud(config, metadata_provider, provider, database).connect() data1 = file("testdata/data1.txt").read() data2 = file("testdata/data2.txt").read() metadata1 = cloud.store_from_filename( "testdata/data1.txt", "testdata/data1.txt") metadata2 = cloud.store_from_filename( "testdata/data2.txt", "testdata/data2.txt") metadata3 = cloud.store_from_filename( "testdata/data2.txt", "testdata/data3.txt") metadata4 = cloud.store_from_filename( "testdata/data2.txt", "testdata/data4.txt") for metadata in cloud.list(): if metadata["key"] == metadata1["key"]: self.assertEqual("testdata/data1.txt", metadata["path"]) if metadata["key"] == metadata2["key"]: self.assertEqual("testdata/data2.txt", metadata["path"]) if metadata["key"] == metadata3["key"]: self.assertEqual("testdata/data3.txt", metadata["path"]) if metadata["key"] == metadata4["key"]: self.assertEqual("testdata/data4.txt", metadata["path"]) cloud.retrieve_to_filename( metadata1, "testdata/new_data1.txt") cloud.retrieve_to_filename( metadata2, "testdata/new_data2.txt") cloud.retrieve_to_filename( metadata3, "testdata/new_data3.txt") cloud.retrieve_to_filename( metadata4, "testdata/new_data4.txt") self.assertEqual(data1, file("testdata/new_data1.txt").read()) self.assertEqual("testdata/data1.txt", metadata1["path"]) self.assertEqual(data2, file("testdata/new_data2.txt").read()) self.assertEqual("testdata/data2.txt", metadata2["path"]) self.assertEqual(data2, file("testdata/new_data3.txt").read()) self.assertEqual("testdata/data3.txt", metadata3["path"]) self.assertEqual(data2, file("testdata/new_data4.txt").read()) self.assertEqual("testdata/data4.txt", metadata4["path"]) cloud.delete(metadata1) cloud.delete(metadata2) cloud.delete(metadata3) cloud.delete(metadata4) cloud.disconnect() os.remove("testdata/new_data1.txt") os.remove("testdata/new_data2.txt") os.remove("testdata/new_data3.txt") os.remove("testdata/new_data4.txt")
def main(): """ Main function for `GPGBackup` tool. """ args = parse_args() if args.version: show_version() config = None metadata_provider = None provider = None try: config = Config(args.config) except ConfigError as e: error_exit(e) metadata_bucket = config.config.get("metadata", "bucket") data_bucket = config.config.get("data", "bucket") # Initialize cloud provider and metadata database. if args.provider == "amazon-s3": metadata_provider = amazon.S3(config, metadata_bucket) provider = amazon.S3( config, data_bucket, encryption_method=args.encryption_method) elif args.provider == "sftp": metadata_provider = sftp.Sftp(config, metadata_bucket) provider = sftp.Sftp( config, data_bucket, encryption_method=args.encryption_method) else: error_exit("Unknown cloud provider: {0}".format(args.provider)) cloud = Cloud(config, metadata_provider, provider, MetaDataDB(config)) input_file = None output_file = None if args.inputfile: input_file = args.inputfile if args.outputfile: output_file = args.outputfile exit_value = 0 try: if args.command == "list": metadata_list = cloud.list() if len(metadata_list) == 0: print "No files found." sys.exit(0) show_files(metadata_list, args.verbose) elif args.command == "list-cloud-keys": # This is a utility command to list keys in cloud. cloud.connect() msg = "Cloud metadata keys: " + str(cloud.metadata_provider) print msg print "=" * len(msg) for metadata in cloud.metadata_provider.list_keys().values(): print "Key: {name}\nSize: {size}\n" \ "Last modified: {last_modified}\n".format(**metadata) msg = "Cloud data keys: " + str(cloud.provider) print msg print "=" * len(msg) for metadata in cloud.provider.list_keys().values(): print "Key: {name}\nSize: {size}\n" \ "Last modified: {last_modified}\n".format(**metadata) cloud.disconnect() elif args.command == "list-cloud-data": # This is a utility command to list raw data in cloud. cloud.connect() msg = "Cloud metadata: " + str(cloud.metadata_provider) print msg print "=" * len(msg) for k, data in cloud.metadata_provider.list().items(): print "Key:", k print "Data:", data msg = "Cloud data: " + str(cloud.provider) print msg print "=" * len(msg) for k, data in cloud.provider.list().items(): print "Key:", k print "Data:", data cloud.disconnect() elif args.command == "sync": cloud.connect() cloud.sync() cloud.disconnect() metadata_list = cloud.list() if len(metadata_list) == 0: print "No files found." sys.exit(0) show_files(metadata_list, args.verbose) elif args.command == "backup": if not input_file: error_exit("Local filename not given.") if not output_file: output_file = input_file if os.path.isdir(input_file): cloud.connect() if not backup_directory(cloud, input_file, output_file): print "File already exists: {0}".format(output_file) exit_value = 1 cloud.disconnect() sys.exit(exit_value) elif os.path.isfile(input_file) or os.path.islink(input_file): cloud.connect() if not backup_file(cloud, input_file, output_file): print "File already exists: {0}".format(output_file) exit_value = 1 cloud.disconnect() sys.exit(exit_value) else: error_exit("No such file or directory: '{0}'".format(input_file)) elif args.command == "restore": if not input_file: error_exit("Cloud filename not given.") input_file = os.path.normpath(input_file) if output_file: output_file = os.path.normpath(output_file) # Get the list of files. cloud_list = cloud.list() # First, check whether we have an exact match. cloud.connect() for metadata in cloud_list: if metadata["path"] != input_file: continue if not output_file: output_file = input_file print "Restoring file:", input_file, "->", output_file cloud.retrieve_to_filename(metadata, output_file) cloud.disconnect() sys.exit(0) # Then, try to find all files, that have the same directory. file_found = False for metadata in cloud_list: if not metadata["path"].startswith(input_file + "/"): continue file_found = True if not output_file: local_file = metadata["path"] else: local_file = output_file + "/" + metadata["path"] print "Restoring file:", metadata["path"], "->", local_file cloud.retrieve_to_filename(metadata, local_file) cloud.disconnect() if file_found: sys.exit(0) error_exit("File not found: " + input_file) elif args.command == "remove": if not input_file: error_exit("Cloud filename not given.") # Get the list of files. cloud_list = cloud.list() # First, check whether we have an exact match. cloud.connect() for metadata in cloud_list: if metadata["path"] != input_file: continue print "Removing file:", input_file cloud.delete(metadata) cloud.disconnect() sys.exit(0) # Then, try to find all files, that have the same directory. file_found = False for metadata in cloud_list: if not metadata["path"].startswith(input_file + "/"): continue file_found = True print "Removing file:", metadata["path"] cloud.delete(metadata) cloud.disconnect() if file_found: sys.exit(0) error_exit("File not found: " + input_file) else: error_exit("Unknown command: {0}".format(args.command)) except Exception as e: cloud.disconnect() error_exit(e)