def storage_mk_dir(cls, folders): """ Creates a directory """ buckets = [] try: buckets = list(DataStorage.list()) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1) for original_path in folders: folder = original_path info = DataStorageWrapper.get_data_storage_item_path_info(folder, buckets) error = info[0] current_bucket_identifier = info[1] relative_path = info[2] if error is not None: click.echo(error, err=True) continue if len(relative_path) == 0: click.echo('Cannot create folder \'{}\': already exists'.format(original_path), err=True) continue click.echo('Creating folder {}...'.format(original_path), nl=False) result = None try: result = DataStorage.create_folder(current_bucket_identifier, relative_path) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1) if result is not None and result.error is None: click.echo('done.') elif result is not None and result.error is not None: click.echo('failed.') click.echo(result.error, err=True)
def delete_object_tags(cls, path, tags, version): if not tags: click.echo("Error: Missing argument \"tags\"", err=True) sys.exit(1) try: root_bucket, relative_path = DataStorage.load_from_uri(path) DataStorage.delete_object_tags(root_bucket.identifier, relative_path, tags, version) except BaseException as e: click.echo(str(e.message), err=True) sys.exit(1)
def set_object_tags(cls, path, tags, version): try: root_bucket, relative_path = DataStorage.load_from_uri(path) updated_tags = DataStorage.set_object_tags(root_bucket.identifier, relative_path, cls.convert_input_pairs_to_json(tags), version) if not updated_tags: raise RuntimeError("Failed to set tags for path '{}'.".format(path)) except BaseException as e: click.echo(str(e.message), err=True) sys.exit(1)
def get_object_tags(cls, path, version): try: root_bucket, relative_path = DataStorage.load_from_uri(path) tags = DataStorage.get_object_tags(root_bucket.identifier, relative_path, version) if not tags: click.echo("No tags available for path '{}'.".format(path)) else: click.echo(cls.create_table(tags)) except BaseException as e: click.echo(str(e.message), err=True) sys.exit(1)
def save_data_storage(cls, name, description, sts_duration, lts_duration, versioning, backup_duration, type, parent_folder, on_cloud, path): directory = None if parent_folder: directory = Folder.load(parent_folder) if directory is None: click.echo("Error: Directory with name '{}' not found! " "Check if it exists and you have permission to read it".format(parent_folder), err=True) sys.exit(1) try: DataStorage.save(name, path, description, sts_duration, lts_duration, versioning, backup_duration, type, directory.id if directory else None, on_cloud) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1)
def mvtodir(cls, name, directory): folder_id = None try: if directory is not "/": if os.path.split(directory)[0]: # case with path folder = Folder.load(directory) else: folder = Folder.load_by_name(directory) if folder is None: click.echo("Directory with name {} does not exist!".format(directory), err=True) sys.exit(1) folder_id = folder.id DataStorage.mvtodir(name, folder_id) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1)
def delete(cls, name, on_cloud, yes): if not yes: if on_cloud: click.confirm( 'Are you sure you want to delete datastorage {} and also delete it from a cloud?'.format(name), abort=True) else: click.confirm( 'Are you sure you want to delete datastorage {}?'.format(name), abort=True) try: DataStorage.delete(name, on_cloud) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1)
def refresh(): credentials = DataStorage.get_temporary_credentials(source_bucket_id, destination_bucket_id, command, versioning=versioning) return dict( access_key=credentials.access_key_id, secret_key=credentials.secret_key, token=credentials.session_token, expiry_time=credentials.expiration, region_name=credentials.region)
def test_save_storage(self, mock): mock.post(mocked_url("datastorage/save"), text=mock_datastorage(self.storage_id, self.name, self.path, self.type, mock_storage_policy(self.backup_duration, self.lts_duration, self.sts_duration, self.versioning))) actual = DataStorage.save(self.name, self.path, self.description, self.sts_duration, self.lts_duration, self.versioning, self.backup_duration, self.type, self.parent_folder_id, self.on_cloud) expected = build_storage_model(identifier=self.storage_id, name=self.name, path=self.path, storage_type=self.type, policy=build_storage_policy(self.backup_duration, self.lts_duration, self.sts_duration, self.versioning)) assert_storages(actual, expected)
def view_acl(cls, identifier, object_type): """ View object permissions """ try: if object_type == 'pipeline': model = Pipeline.get(identifier, load_storage_rules=False, load_run_parameters=False, load_versions=False) identifier = model.identifier elif object_type == 'folder': model = Folder.load(identifier) identifier = model.id elif object_type == 'data_storage': model = DataStorage.get(identifier) identifier = model.identifier permissions_list = User.get_permissions(identifier, object_type) if len(permissions_list) > 0: permissions_table = prettytable.PrettyTable() permissions_table.field_names = [ "SID", "Principal", "Allow", "Deny" ] permissions_table.align = "r" for permission in permissions_list: permissions_table.add_row([ permission.name, permission.principal, permission.get_allowed_permissions_description(), permission.get_denied_permissions_description() ]) click.echo(permissions_table) click.echo() else: click.echo('No user permissions are configured') except ConfigNotFoundError as config_not_found_error: click.echo(str(config_not_found_error), err=True) except requests.exceptions.RequestException as http_error: click.echo('Http error: {}'.format(str(http_error)), err=True) except RuntimeError as runtime_error: click.echo('Error: {}'.format(str(runtime_error)), err=True) except ValueError as value_error: click.echo('Error: {}'.format(str(value_error)), err=True)
def test_policy(self, mock): mock.get(mocked_url("datastorage/find"), text=mock_datastorage(self.storage_id, self.name, self.path, self.type, mock_storage_policy(self.backup_duration, self.lts_duration, self.sts_duration, self.versioning))) mock.post(mocked_url("datastorage/policy"), text=mock_datastorage(self.storage_id, self.name, self.path, self.type, mock_storage_policy(self.backup_duration, self.lts_duration, self.sts_duration, self.versioning))) actual = DataStorage.policy(self.name, self.sts_duration, self.lts_duration, self.backup_duration, self.versioning) expected = build_storage_model(identifier=self.storage_id, name=self.name, path=self.path, storage_type=self.type, policy=build_storage_policy(self.backup_duration, self.lts_duration, self.sts_duration, self.versioning)) assert_storages(actual, expected)
def storage_list(cls, path, show_details, show_versions, recursive, page, show_all): """Lists storage contents """ if path: root_bucket = None original_path = '' try: root_bucket, original_path = DataStorage.load_from_uri(path) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1) if show_versions and not root_bucket.policy.versioning_enabled: click.echo('Error: versioning is not enabled for storage.', err=True) sys.exit(1) if root_bucket is None: click.echo('Storage path "{}" was not found'.format(path), err=True) sys.exit(1) else: relative_path = original_path if original_path != '/' else '' cls.__print_data_storage_contents(root_bucket, relative_path, show_details, recursive, page_size=page, show_versions=show_versions, show_all=show_all) else: # If no argument is specified - list brief details of all buckets cls.__print_data_storage_contents(None, None, show_details, recursive, show_all=show_all)
def set_acl(cls, identifier, object_type, sid, group, allow, deny, inherit): """ Set object permissions """ try: if object_type == 'pipeline': model = Pipeline.get(identifier, load_storage_rules=False, load_run_parameters=False, load_versions=False) identifier = model.identifier elif object_type == 'folder': model = Folder.load(identifier) identifier = model.id elif object_type == 'data_storage': model = DataStorage.get(identifier) identifier = model.identifier all_permissions = User.get_permissions(identifier, object_type) user_permissions = filter( lambda permission: permission.name.lower() == sid.lower() and permission.principal != group, all_permissions) user_mask = 0 if len(user_permissions) == 1: user_mask = user_permissions[0].mask if allow is None and deny is None and inherit is None: raise RuntimeError('You must specify at least one permission') permissions_masks = { 'r': { 'allow': 1, 'deny': 1 << 1, 'inherit': 0, 'group': 1 | 1 << 1 }, 'w': { 'allow': 1 << 2, 'deny': 1 << 3, 'inherit': 0, 'group': 1 << 2 | 1 << 3 }, 'x': { 'allow': 1 << 4, 'deny': 1 << 5, 'inherit': 0, 'group': 1 << 4 | 1 << 5 } } def check_permission(permission): exists_in_allow = allow is not None and permission.lower( ) in allow.lower() exists_in_deny = deny is not None and permission.lower( ) in deny.lower() exists_in_inherit = inherit is not None and permission.lower( ) in inherit.lower() if exists_in_allow + exists_in_deny + exists_in_inherit > 1: raise RuntimeError( 'You cannot set permission (\'{}\') in multiple groups' .format(permission)) check_permission('r') check_permission('w') check_permission('x') def modify_permissions_group(mask, permissions_group_mask, permission_mask): permissions_clear_mask = (1 | 1 << 1 | 1 << 2 | 1 << 3 | 1 << 4 | 1 << 5) ^ permissions_group_mask return (mask & permissions_clear_mask) | permission_mask def modify_permissions(mask, permissions_group_name, permissions): if permissions is not None: for permission in permissions: if permission.lower() not in permissions_masks: raise RuntimeError( 'Unknown permission \'{}\''.format(permission)) else: permissions_group_mask = permissions_masks[ permission.lower()]['group'] permission_mask = permissions_masks[ permission.lower()][permissions_group_name] mask = modify_permissions_group( mask, permissions_group_mask, permission_mask) return mask user_mask = modify_permissions(user_mask, 'allow', allow) user_mask = modify_permissions(user_mask, 'deny', deny) user_mask = modify_permissions(user_mask, 'inherit', inherit) User.grant_permission(identifier, object_type, sid, not group, user_mask) click.echo('Permissions set') except ConfigNotFoundError as config_not_found_error: click.echo(str(config_not_found_error), err=True) except requests.exceptions.RequestException as http_error: click.echo('Http error: {}'.format(str(http_error)), err=True) except RuntimeError as runtime_error: click.echo('Error: {}'.format(str(runtime_error)), err=True) except ValueError as value_error: click.echo('Error: {}'.format(str(value_error)), err=True)
def __print_data_storage_contents(cls, bucket_model, relative_path, show_details, recursive, page_size=None, show_versions=False, show_all=False): items = [] header = None if bucket_model is not None: try: wrapper = S3BucketWrapper(bucket_model, relative_path) manager = S3BucketOperations.get_list_manager(wrapper, show_versions=show_versions) items = manager.list_items(relative_path, recursive=recursive, page_size=page_size, show_all=show_all) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1) else: # If no argument is specified - list brief details of all buckets try: items = list(DataStorage.list()) if not items: click.echo("No datastorages available.") sys.exit(0) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo('Error: %s' % str(error), err=True) sys.exit(1) if recursive and header is not None: click.echo(header) if show_details: items_table = prettytable.PrettyTable() fields = ["Type", "Labels", "Modified", "Size", "Name"] if show_versions: fields.append("Version") items_table.field_names = fields items_table.align = "l" items_table.border = False items_table.padding_width = 2 items_table.align['Size'] = 'r' for item in items: name = item.name changed = '' size = '' labels = '' if item.type is not None and item.type.lower() == 's3': name = item.path if item.changed is not None: if bucket_model is None: # need to wrap into datetime since bucket listing returns str item_datetime = datetime.datetime.strptime(item.changed, '%Y-%m-%d %H:%M:%S') else: item_datetime = item.changed changed = item_datetime.strftime('%Y-%m-%d %H:%M:%S') if item.size is not None: size = item.size if item.labels is not None and len(item.labels) > 0: labels = ', '.join(map(lambda i: i.value, item.labels)) item_type = "-File" if item.delete_marker else item.type row = [item_type, labels, changed, size, name] if show_versions: row.append('') items_table.add_row(row) if show_versions and item.type == 'File': for version in item.versions: version_type = "-File" if version.delete_marker else "+File" version_label = "{} (latest)".format(version.version) if version.latest else version.version labels = ', '.join(map(lambda i: i.value, version.labels)) size = '' if version.size is None else version.size row = [version_type, labels, version.changed.strftime('%Y-%m-%d %H:%M:%S'), size, name, version_label] items_table.add_row(row) click.echo(items_table) click.echo() else: for item in items: click.echo('{}\t\t'.format(item.path), nl=False) click.echo()
def policy(cls, storage_name, sts_duration, lts_duration, backup_duration, versioning): try: DataStorage.policy(storage_name, sts_duration, lts_duration, backup_duration, versioning) except (ConfigNotFoundError, requests.exceptions.RequestException, RuntimeError, ValueError) as error: click.echo(str(error), err=True) sys.exit(1)