class Arguments: comment = graphene.UUID(required=True)
class Queries(graphene.ObjectType): ''' All available GraphQL queries. ''' # super-admin only agent = graphene.Field(Agent, agent_id=graphene.String(required=True)) # super-admin only agent_list = graphene.Field( AgentList, limit=graphene.Int(required=True), offset=graphene.Int(required=True), # ordering customization order_key=graphene.String(), order_asc=graphene.Boolean(), # filters scaling_group=graphene.String(), status=graphene.String(), ) # super-admin only agents = graphene.List( # legacy non-paginated list Agent, scaling_group=graphene.String(), status=graphene.String()) domain = graphene.Field(Domain, name=graphene.String()) # super-admin only domains = graphene.List(Domain, is_active=graphene.Boolean()) group = graphene.Field(Group, id=graphene.UUID(required=True)) groups = graphene.List(Group, domain_name=graphene.String(), is_active=graphene.Boolean()) image = graphene.Field(Image, reference=graphene.String(required=True)) images = graphene.List( Image, is_installed=graphene.Boolean(), is_operation=graphene.Boolean(), ) user = graphene.Field(User, domain_name=graphene.String(), email=graphene.String()) user_from_uuid = graphene.Field(User, domain_name=graphene.String(), user_id=graphene.ID()) users = graphene.List( # legacy non-paginated list User, domain_name=graphene.String(), group_id=graphene.UUID(), is_active=graphene.Boolean(), status=graphene.String()) user_list = graphene.Field( UserList, limit=graphene.Int(required=True), offset=graphene.Int(required=True), # ordering customization order_key=graphene.String(), order_asc=graphene.Boolean(), # filters domain_name=graphene.String(), group_id=graphene.UUID(), is_active=graphene.Boolean(), status=graphene.String()) keypair = graphene.Field(KeyPair, domain_name=graphene.String(), access_key=graphene.String()) keypairs = graphene.List( # legacy non-paginated list KeyPair, domain_name=graphene.String(), email=graphene.String(), is_active=graphene.Boolean()) keypair_list = graphene.Field( KeyPairList, limit=graphene.Int(required=True), offset=graphene.Int(required=True), # ordering customization order_key=graphene.String(), order_asc=graphene.Boolean(), # filters domain_name=graphene.String(), email=graphene.String(), is_active=graphene.Boolean()) # NOTE: maybe add keypairs_from_user_id? keypair_resource_policy = graphene.Field(KeyPairResourcePolicy, name=graphene.String()) keypair_resource_policies = graphene.List(KeyPairResourcePolicy) resource_preset = graphene.Field(ResourcePreset, name=graphene.String()) resource_presets = graphene.List(ResourcePreset) # super-admin only scaling_group = graphene.Field(ScalingGroup, name=graphene.String()) # super-admin only scaling_groups = graphene.List(ScalingGroup, name=graphene.String(), is_active=graphene.Boolean()) # super-admin only scaling_groups_for_domain = graphene.List( ScalingGroup, domain=graphene.String(required=True), is_active=graphene.Boolean()) # super-admin only scaling_groups_for_user_group = graphene.List( ScalingGroup, user_group=graphene.String(required=True), is_active=graphene.Boolean()) # super-admin only scaling_groups_for_keypair = graphene.List( ScalingGroup, access_key=graphene.String(required=True), is_active=graphene.Boolean()) vfolder_list = graphene.Field( # legacy non-paginated list VirtualFolderList, limit=graphene.Int(required=True), offset=graphene.Int(required=True), # ordering customization order_key=graphene.String(), order_asc=graphene.Boolean(), # filters domain_name=graphene.String(), group_id=graphene.UUID(), access_key=graphene.String()) # must be empty for user requests vfolders = graphene.List( # legacy non-paginated list VirtualFolder, domain_name=graphene.String(), group_id=graphene.String(), access_key=graphene.String()) # must be empty for user requests compute_session = graphene.Field( ComputeSession, id=graphene.UUID(required=True), ) compute_container = graphene.Field( ComputeContainer, id=graphene.UUID(required=True), ) compute_session_list = graphene.Field( ComputeSessionList, limit=graphene.Int(required=True), offset=graphene.Int(required=True), # ordering customization order_key=graphene.String(), order_asc=graphene.Boolean(), # filters domain_name=graphene.String(), group_id=graphene.String(), access_key=graphene.String(), status=graphene.String(), ) compute_container_list = graphene.Field( ComputeContainerList, limit=graphene.Int(required=True), offset=graphene.Int(required=True), # ordering customization order_key=graphene.String(), order_asc=graphene.Boolean(), # filters session_id=graphene.ID(required=True), role=graphene.String(), ) legacy_compute_session_list = graphene.Field( LegacyComputeSessionList, limit=graphene.Int(required=True), offset=graphene.Int(required=True), # ordering customization order_key=graphene.String(), order_asc=graphene.Boolean(), # filters domain_name=graphene.String(), group_id=graphene.String(), access_key=graphene.String(), status=graphene.String(), ) legacy_compute_session = graphene.Field( LegacyComputeSession, sess_id=graphene.String(required=True), domain_name=graphene.String(), access_key=graphene.String(), ) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_agent(executor, info, agent_id): manager = info.context['dlmgr'] loader = manager.get_loader('Agent', status=None) return await loader.load(agent_id) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_agents(executor, info, *, scaling_group=None, status=None): return await Agent.load_all(info.context, scaling_group=scaling_group, status=status) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_agent_list( executor, info, limit, offset, *, scaling_group=None, status=None, order_key=None, order_asc=None, ): total_count = await Agent.load_count( info.context, scaling_group=scaling_group, status=status, ) agent_list = await Agent.load_slice( info.context, limit, offset, scaling_group=scaling_group, status=status, order_key=order_key, order_asc=order_asc, ) return AgentList(agent_list, total_count) @staticmethod async def resolve_domain(executor, info, *, name=None): manager = info.context['dlmgr'] name = info.context['user']['domain_name'] if name is None else name if info.context['user']['role'] != UserRole.SUPERADMIN: if name != info.context['user']['domain_name']: # prevent querying other domains if not superadmin raise GenericNotFound('no such domain') loader = manager.get_loader('Domain.by_name') return await loader.load(name) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_domains(executor, info, *, is_active=None): return await Domain.load_all(info.context, is_active=is_active) @staticmethod async def resolve_group(executor, info, id): client_role = info.context['user']['role'] client_domain = info.context['user']['domain_name'] client_user_id = info.context['user']['uuid'] manager = info.context['dlmgr'] loader = manager.get_loader('Group.by_id') group = await loader.load(id) if client_role == UserRole.SUPERADMIN: pass elif client_role == UserRole.ADMIN: if group.domain_name != client_domain: raise InsufficientPrivilege elif client_role == UserRole.USER: client_groups = await Group.get_groups_for_user( info.context, client_user_id) if group.id not in (g.id for g in client_groups): raise InsufficientPrivilege else: raise InvalidAPIParameters('Unknown client role') return group @staticmethod async def resolve_groups(executor, info, *, domain_name=None, is_active=None): client_role = info.context['user']['role'] client_domain = info.context['user']['domain_name'] client_user_id = info.context['user']['uuid'] if client_role == UserRole.SUPERADMIN: pass elif client_role == UserRole.ADMIN: if domain_name is not None and domain_name != client_domain: raise InsufficientPrivilege domain_name = client_domain elif client_role == UserRole.USER: return await Group.get_groups_for_user(info.context, client_user_id) else: raise InvalidAPIParameters('Unknown client role') return await Group.load_all(info.context, domain_name=domain_name, is_active=is_active) @staticmethod async def resolve_image(executor, info, reference): client_role = info.context['user']['role'] client_domain = info.context['user']['domain_name'] item = await Image.load_item(info.context, reference) if client_role == UserRole.SUPERADMIN: pass elif client_role in (UserRole.ADMIN, UserRole.USER): items = await Image.filter_allowed(info.context, [item], client_domain) if not items: raise ImageNotFound item = items[0] else: raise InvalidAPIParameters('Unknown client role') return item @staticmethod async def resolve_images(executor, info, is_installed=None, is_operation=False): client_role = info.context['user']['role'] client_domain = info.context['user']['domain_name'] items = await Image.load_all(info.context, is_installed=is_installed, is_operation=is_operation) if client_role == UserRole.SUPERADMIN: pass elif client_role in (UserRole.ADMIN, UserRole.USER): items = await Image.filter_allowed(info.context, items, client_domain, is_installed=is_installed, is_operation=is_operation) else: raise InvalidAPIParameters('Unknown client role') return items @staticmethod @scoped_query(autofill_user=True, user_key='email') async def resolve_user(executor, info, *, domain_name=None, email=None): manager = info.context['dlmgr'] loader = manager.get_loader('User.by_email', domain_name=domain_name) return await loader.load(email) @staticmethod @scoped_query(autofill_user=True, user_key='user_id') async def resolve_user_from_uuid(executor, info, *, domain_name=None, user_id=None): manager = info.context['dlmgr'] loader = manager.get_loader('User.by_uuid', domain_name=domain_name) # user_id is retrieved as string since it's a GraphQL's generic ID field user_uuid = uuid.UUID(user_id) if isinstance(user_id, str) else user_id return await loader.load(user_uuid) @staticmethod async def resolve_users(executor, info, *, domain_name=None, group_id=None, is_active=None, status=None): from .user import UserRole client_role = info.context['user']['role'] client_domain = info.context['user']['domain_name'] if client_role == UserRole.SUPERADMIN: pass elif client_role == UserRole.ADMIN: if domain_name is not None and domain_name != client_domain: raise InsufficientPrivilege domain_name = client_domain elif client_role == UserRole.USER: # Users cannot query other users. raise InsufficientPrivilege() else: raise InvalidAPIParameters('Unknown client role') return await User.load_all(info.context, domain_name=domain_name, group_id=group_id, is_active=is_active, status=status, limit=100) @staticmethod async def resolve_user_list( executor, info, limit, offset, *, domain_name=None, group_id=None, is_active=None, status=None, order_key=None, order_asc=None, ): from .user import UserRole client_role = info.context['user']['role'] client_domain = info.context['user']['domain_name'] if client_role == UserRole.SUPERADMIN: pass elif client_role == UserRole.ADMIN: if domain_name is not None and domain_name != client_domain: raise InsufficientPrivilege domain_name = client_domain elif client_role == UserRole.USER: # Users cannot query other users. raise InsufficientPrivilege() else: raise InvalidAPIParameters('Unknown client role') total_count = await User.load_count( info.context, domain_name=domain_name, group_id=group_id, is_active=is_active, status=status, ) user_list = await User.load_slice( info.context, limit, offset, domain_name=domain_name, group_id=group_id, is_active=is_active, status=status, order_key=order_key, order_asc=order_asc, ) return UserList(user_list, total_count) @staticmethod @scoped_query(autofill_user=True, user_key='access_key') async def resolve_keypair(executor, info, *, domain_name=None, access_key=None): manager = info.context['dlmgr'] loader = manager.get_loader('KeyPair.by_ak', domain_name=domain_name) return await loader.load(access_key) @staticmethod @scoped_query(autofill_user=False, user_key='email') async def resolve_keypairs(executor, info, *, domain_name=None, email=None, is_active=None): if email is None: return await KeyPair.load_all(info.context, domain_name=domain_name, is_active=is_active, limit=100) else: manager = info.context['dlmgr'] loader = manager.get_loader('KeyPair.by_email', domain_name=domain_name, is_active=is_active) return await loader.load(email) @staticmethod @scoped_query(autofill_user=False, user_key='email') async def resolve_keypair_list( executor, info, limit, offset, *, domain_name=None, email=None, is_active=None, order_key=None, order_asc=None, ): total_count = await KeyPair.load_count( info.context, domain_name=domain_name, email=email, is_active=is_active, ) keypair_list = await KeyPair.load_slice( info.context, limit, offset, domain_name=domain_name, email=email, is_active=is_active, order_key=order_key, order_asc=order_asc, ) return KeyPairList(keypair_list, total_count) @staticmethod async def resolve_keypair_resource_policy(executor, info, name=None): manager = info.context['dlmgr'] client_access_key = info.context['access_key'] if name is None: loader = manager.get_loader('KeyPairResourcePolicy.by_ak') return await loader.load(client_access_key) else: loader = manager.get_loader('KeyPairResourcePolicy.by_name') return await loader.load(name) @staticmethod async def resolve_keypair_resource_policies(executor, info): client_role = info.context['user']['role'] client_access_key = info.context['access_key'] if client_role == UserRole.SUPERADMIN: return await KeyPairResourcePolicy.load_all(info.context) elif client_role == UserRole.ADMIN: # TODO: filter resource policies by domains? return await KeyPairResourcePolicy.load_all(info.context) elif client_role == UserRole.USER: return await KeyPairResourcePolicy.load_all_user( info.context, client_access_key) else: raise InvalidAPIParameters('Unknown client role') @staticmethod async def resolve_resource_preset(executor, info, name): manager = info.context['dlmgr'] loader = manager.get_loader('ResourcePreset.by_name') return await loader.load(name) @staticmethod async def resolve_resource_presets(executor, info): return await ResourcePreset.load_all(info.context) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_scaling_group(executor, info, name): manager = info.context['dlmgr'] loader = manager.get_loader('ScalingGroup.by_name') return await loader.load(name) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_scaling_groups(executor, info, is_active=None): return await ScalingGroup.load_all(info.context, is_active=is_active) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_scaling_groups_for_domain(executor, info, domain, is_active=None): return await ScalingGroup.load_by_domain(info.context, domain, is_active=is_active) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_scaling_groups_for_group(executor, info, user_group, is_active=None): return await ScalingGroup.load_by_group(info.context, user_group, is_active=is_active) @staticmethod @privileged_query(UserRole.SUPERADMIN) async def resolve_scaling_groups_for_keypair(executor, info, access_key, is_active=None): return await ScalingGroup.load_by_keypair(info.context, access_key, is_active=is_active) @staticmethod @scoped_query(autofill_user=False, user_key='user_id') async def resolve_vfolder_list(executor, info, limit, offset, *, domain_name=None, group_id=None, user_id=None, order_key=None, order_asc=None): # TODO: adopt the generic queryfilter language total_count = await VirtualFolder.load_count( info.context, domain_name=domain_name, # scope group_id=group_id, # scope user_id=user_id, # scope ) items = await VirtualFolder.load_slice( info.context, limit, offset, domain_name=domain_name, # scope group_id=group_id, # scope user_id=user_id, # scope order_key=order_key, # order order_asc=order_asc, # order ) return VirtualFolderList(items, total_count) @staticmethod @scoped_query(autofill_user=False, user_key='access_key') async def resolve_compute_container_list( executor, info, limit, offset, *, session_id=None, role=None, domain_name=None, group_id=None, access_key=None, order_key=None, order_asc=None, ): # TODO: adopt the generic queryfilter language total_count = await ComputeContainer.load_count( info.context, session_id, # filter (mandatory) cluster_role=role, # filter domain_name=domain_name, # scope group_id=group_id, # scope access_key=access_key, # scope ) items = await ComputeContainer.load_slice( info.context, limit, offset, # slice session_id, # filter (mandatory) cluster_role=role, # filter domain_name=domain_name, # scope group_id=group_id, # scope access_key=access_key, # scope order_key=order_key, # order order_asc=order_asc, # order ) return ComputeContainerList(items, total_count) @staticmethod @scoped_query(autofill_user=False, user_key='access_key') async def resolve_compute_container( executor, info, container_id, ): # We need to check the group membership of the designated kernel, # but practically a user cannot guess the IDs of kernels launched # by other users and in other groups. # Let's just protect the domain/user boundary here. manager = info.context['dlmgr'] loader = manager.get_loader('ComputeContainer.detail') return await loader.load(container_id) @staticmethod @scoped_query(autofill_user=False, user_key='access_key') async def resolve_compute_session_list( executor, info, limit, offset, *, domain_name=None, group_id=None, access_key=None, status=None, order_key=None, order_asc=None, ): total_count = await ComputeSession.load_count( info.context, status=status, # filter domain_name=domain_name, # scope group_id=group_id, # scope access_key=access_key, # scope ) items = await ComputeSession.load_slice( info.context, limit, offset, # slice status=status, # filter domain_name=domain_name, # scope group_id=group_id, # scope access_key=access_key, # scope order_key=order_key, # order order_asc=order_asc, # order ) return ComputeSessionList(items, total_count) @staticmethod @scoped_query(autofill_user=False, user_key='access_key') async def resolve_compute_session( executor, info, id, *, domain_name=None, access_key=None, ): # We need to check the group membership of the designated kernel, # but practically a user cannot guess the IDs of kernels launched # by other users and in other groups. # Let's just protect the domain/user boundary here. manager = info.context['dlmgr'] loader = manager.get_loader('ComputeSession.detail', domain_name=domain_name, access_key=access_key) return await loader.load(id) @staticmethod @scoped_query(autofill_user=False, user_key='access_key') async def resolve_legacy_compute_session_list( executor, info, limit, offset, *, domain_name=None, group_id=None, access_key=None, status=None, order_key=None, order_asc=None, ): total_count = await LegacyComputeSession.load_count( info.context, domain_name=domain_name, group_id=group_id, access_key=access_key, status=status) items = await LegacyComputeSession.load_slice(info.context, limit, offset, domain_name=domain_name, group_id=group_id, access_key=access_key, status=status, order_key=order_key, order_asc=order_asc) return LegacyComputeSessionList(items, total_count) @staticmethod @scoped_query(autofill_user=False, user_key='access_key') async def resolve_legacy_compute_session( executor, info, sess_id, *, domain_name=None, access_key=None, status=None, ): # We need to check the group membership of the designated kernel, # but practically a user cannot guess the IDs of kernels launched # by other users and in other groups. # Let's just protect the domain/user boundary here. manager = info.context['dlmgr'] loader = manager.get_loader('LegacyComputeSession.detail', domain_name=domain_name, access_key=access_key, status=status) matches = await loader.load(sess_id) if len(matches) == 0: return None elif len(matches) == 1: return matches[0] else: raise TooManyKernelsFound
class Arguments: plan_id = graphene.UUID(required=True)
class Arguments: target_id = graphene.UUID( required=True, name='id', description="ID of the target to be cloned", )
class Query(graphene.ObjectType): all_libraries = graphene.List(LibraryType) camera = graphene.Field(CameraType, id=graphene.UUID(), make=graphene.String(), model=graphene.String()) all_cameras = graphene.List(CameraType, library_id=graphene.UUID()) lens = graphene.Field(LensType, id=graphene.UUID(), name=graphene.String()) all_lenses = graphene.List(LensType, library_id=graphene.UUID()) all_apertures = graphene.List(graphene.Float, library_id=graphene.UUID()) all_exposures = graphene.List(graphene.String, library_id=graphene.UUID()) all_iso_speeds = graphene.List(graphene.Int, library_id=graphene.UUID()) all_focal_lengths = graphene.List(graphene.Float, library_id=graphene.UUID()) all_metering_modes = graphene.List(graphene.String, library_id=graphene.UUID()) all_drive_modes = graphene.List(graphene.String, library_id=graphene.UUID()) all_shooting_modes = graphene.List(graphene.String, library_id=graphene.UUID()) photo = graphene.Field(PhotoNode, id=graphene.UUID()) all_photos = DjangoFilterConnectionField(PhotoNode, filterset_class=PhotoFilter) map_photos = DjangoFilterConnectionField(PhotoNode, filterset_class=PhotoFilter) all_location_tags = graphene.List(LocationTagType, library_id=graphene.UUID(), multi_filter=graphene.String()) all_object_tags = graphene.List(ObjectTagType, library_id=graphene.UUID(), multi_filter=graphene.String()) all_person_tags = graphene.List(PersonTagType, library_id=graphene.UUID(), multi_filter=graphene.String()) all_color_tags = graphene.List(ColorTagType, library_id=graphene.UUID(), multi_filter=graphene.String()) all_style_tags = graphene.List(StyleTagType, library_id=graphene.UUID(), multi_filter=graphene.String()) all_generic_tags = graphene.List(LocationTagType, library_id=graphene.UUID(), multi_filter=graphene.String()) library_setting = graphene.Field(LibrarySetting, library_id=graphene.UUID()) photo_file_metadata = graphene.Field(PhotoMetadataFields, photo_file_id=graphene.UUID()) def resolve_all_libraries(self, info, **kwargs): user = info.context.user return Library.objects.filter(users__user=user) def resolve_camera(self, info, **kwargs): id = kwargs.get('id') make = kwargs.get('make') model = kwargs.get('model') if id is not None: return Camera.objects.get(pk=id) if make is not None and model is not None: return Camera.objects.get(make=make, model=model) return None @login_required def resolve_all_cameras(self, info, **kwargs): user = info.context.user return Camera.objects.filter(library__users__user=user, library__id=kwargs.get('library_id')) def resolve_lens(self, info, **kwargs): id = kwargs.get('id') name = kwargs.get('name') if id is not None: return Lens.objects.get(pk=id) if name is not None: return Lens.objects.get(name=name) return None def resolve_all_lenses(self, info, **kwargs): user = info.context.user return Lens.objects.filter(library__users__user=user, library__id=kwargs.get('library_id')) def resolve_all_apertures(self, info, **kwargs): user = info.context.user return Photo.objects.filter( library__users__user=user, library__id=kwargs.get('library_id')).exclude( aperture__isnull=True).values_list( 'aperture', flat=True).distinct().order_by('aperture') def resolve_all_exposures(self, info, **kwargs): user = info.context.user photo_list = Photo.objects.filter( library__users__user=user, library__id=kwargs.get('library_id')).exclude( exposure__isnull=True).values_list( 'exposure', flat=True).distinct().order_by('exposure') return sorted(photo_list, key=lambda i: float(i.split('/')[0]) / float( i.split('/')[1] if '/' in i else i)) def resolve_all_iso_speeds(self, info, **kwargs): user = info.context.user return Photo.objects.filter( library__users__user=user, library__id=kwargs.get('library_id')).exclude( iso_speed__isnull=True).values_list( 'iso_speed', flat=True).distinct().order_by('iso_speed') def resolve_all_focal_lengths(self, info, **kwargs): user = info.context.user return Photo.objects.filter( library__users__user=user, library__id=kwargs.get('library_id')).exclude( focal_length__isnull=True).values_list( 'focal_length', flat=True).distinct().order_by('focal_length') def resolve_all_metering_modes(self, info, **kwargs): user = info.context.user return Photo.objects.filter( library__users__user=user, library__id=kwargs.get('library_id')).exclude( metering_mode__isnull=True).values_list( 'metering_mode', flat=True).distinct().order_by('metering_mode') def resolve_all_drive_modes(self, info, **kwargs): user = info.context.user return Photo.objects.filter( library__users__user=user, library__id=kwargs.get('library_id')).exclude( drive_mode__isnull=True).values_list( 'drive_mode', flat=True).distinct().order_by('drive_mode') def resolve_all_shooting_modes(self, info, **kwargs): user = info.context.user return Photo.objects.filter( library__users__user=user, library__id=kwargs.get('library_id')).exclude( shooting_mode__isnull=True).values_list( 'shooting_mode', flat=True).distinct().order_by('shooting_mode') def resolve_photo(self, info, **kwargs): id = kwargs.get('id') if id is not None: return Photo.objects.get(pk=id) return None @login_required def resolve_all_photos(self, info, **kwargs): user = info.context.user return Photo.objects.filter(library__users__user=user) @login_required def resolve_map_photos(self, info, **kwargs): user = info.context.user return Photo.objects.filter(library__users__user=user).exclude( latitude__isnull=True, longitude__isnull=True) def resolve_all_location_tags(self, info, **kwargs): user = info.context.user if kwargs.get('multi_filter'): if not kwargs.get('library_id'): raise GraphQLError('library_id not supplied!') filters = kwargs.get('multi_filter').split(' ') photos_list = filter_photos_queryset( filters, Photo.objects.filter(library__users__user=user), kwargs.get('library_id')) return Tag.objects.filter( library__users__user=user, library__id=kwargs.get('library_id'), type='L', photo_tags__photo__in=photos_list).distinct() return Tag.objects.filter(library__users__user=user, library__id=kwargs.get('library_id'), type='L') def resolve_all_object_tags(self, info, **kwargs): user = info.context.user if kwargs.get('multi_filter'): if not kwargs.get('library_id'): raise GraphQLError('library_id not supplied!') filters = kwargs.get('multi_filter').split(' ') photos_list = filter_photos_queryset( filters, Photo.objects.filter(library__users__user=user), kwargs.get('library_id')) return Tag.objects.filter( library__users__user=user, library__id=kwargs.get('library_id'), type='O', photo_tags__photo__in=photos_list).distinct() return Tag.objects.filter(library__users__user=user, library__id=kwargs.get('library_id'), type='O') def resolve_all_person_tags(self, info, **kwargs): user = info.context.user if kwargs.get('multi_filter'): if not kwargs.get('library_id'): raise GraphQLError('library_id not supplied!') filters = kwargs.get('multi_filter').split(' ') photos_list = filter_photos_queryset( filters, Photo.objects.filter(library__users__user=user), kwargs.get('library_id')) return Tag.objects.filter( library__users__user=user, library__id=kwargs.get('library_id'), type='P', photo_tags__photo__in=photos_list).distinct() return Tag.objects.filter(library__users__user=user, library__id=kwargs.get('library_id'), type='P') def resolve_all_color_tags(self, info, **kwargs): user = info.context.user if kwargs.get('multi_filter'): if not kwargs.get('library_id'): raise GraphQLError('library_id not supplied!') filters = kwargs.get('multi_filter').split(' ') photos_list = filter_photos_queryset( filters, Photo.objects.filter(library__users__user=user), kwargs.get('library_id')) return Tag.objects.filter( library__users__user=user, library__id=kwargs.get('library_id'), type='C', photo_tags__photo__in=photos_list).distinct() return Tag.objects.filter(library__users__user=user, library__id=kwargs.get('library_id'), type='C') def resolve_all_style_tags(self, info, **kwargs): user = info.context.user if kwargs.get('multi_filter'): if not kwargs.get('library_id'): raise GraphQLError('library_id not supplied!') filters = kwargs.get('multi_filter').split(' ') photos_list = filter_photos_queryset( filters, Photo.objects.filter(library__users__user=user), kwargs.get('library_id')) return Tag.objects.filter( library__users__user=user, library__id=kwargs.get('library_id'), type='S', photo_tags__photo__in=photos_list).distinct() return Tag.objects.filter(library__users__user=user, library__id=kwargs.get('library_id'), type='S') def resolve_all_generic_tags(self, info, **kwargs): user = info.context.user if kwargs.get('multi_filter'): if not kwargs.get('library_id'): raise GraphQLError('library_id not supplied!') filters = kwargs.get('multi_filter').split(' ') photos_list = filter_photos_queryset( filters, Photo.objects.filter(library__users__user=user), kwargs.get('library_id')) return Tag.objects.filter( library__users__user=user, library__id=kwargs.get('library_id'), type='G', photo_tags__photo__in=photos_list).distinct() return Tag.objects.filter(library__users__user=user, library__id=kwargs.get('library_id'), type='G') def resolve_library_setting(self, info, **kwargs): """Api for library setting query.""" # always pass a dictionary for `library_setting` user = info.context.user libraries = Library.objects.filter(users__user=user, users__owner=True, id=kwargs.get('library_id')) if libraries: library_obj = libraries[0] library_path = library_obj.paths.all()[0] return {"library": library_obj, "source_folder": library_path.path} raise Exception('User is not the owner of library!') def resolve_photo_file_metadata(self, info, **kwargs): """Return metadata for photofile.""" photo_file = PhotoFile.objects.filter(id=kwargs.get('photo_file_id')) if photo_file and os.path.exists(photo_file[0].path): metadata = PhotoMetadata(photo_file[0].path) return {'data': metadata.get_all(), 'ok': True} return {'ok': False}
def __init__(self): super().__init__( Policy, policy_id=graphene.UUID(required=True, name='id'), resolver=self.resolve, )
def __init__(self): super().__init__( User, user_id=graphene.UUID(required=True, name='id'), resolver=self.resolve, )
class Arguments: collection = graphene.UUID(required=True)
class SessionCommons: sess_id = graphene.String() id = graphene.UUID() role = graphene.String() lang = graphene.String() status = graphene.String() status_info = graphene.String() created_at = GQLDateTime() terminated_at = GQLDateTime() agent = graphene.String() container_id = graphene.String() mem_slot = graphene.Int() cpu_slot = graphene.Float() gpu_slot = graphene.Float() num_queries = graphene.Int() cpu_used = graphene.Int() mem_max_bytes = graphene.Int() mem_cur_bytes = graphene.Int() net_rx_bytes = graphene.Int() net_tx_bytes = graphene.Int() io_read_bytes = graphene.Int() io_write_bytes = graphene.Int() io_max_scratch_size = graphene.Int() io_cur_scratch_size = graphene.Int() async def resolve_cpu_used(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return zero_if_none(self.cpu_used) rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'cpu_used') return float(ret) if ret is not None else 0 async def resolve_mem_max_bytes(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return zero_if_none(self.mem_max_bytes) rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'mem_max_bytes') return int(ret) if ret is not None else 0 async def resolve_mem_cur_bytes(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return 0 rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'mem_cur_bytes') return int(ret) if ret is not None else 0 async def resolve_net_rx_bytes(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return zero_if_none(self.net_rx_bytes) rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'net_rx_bytes') return int(ret) if ret is not None else 0 async def resolve_net_tx_bytes(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return zero_if_none(self.net_tx_bytes) rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'net_tx_bytes') return int(ret) if ret is not None else 0 async def resolve_io_read_bytes(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return zero_if_none(self.io_read_bytes) rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'io_read_bytes') return int(ret) if ret is not None else 0 async def resolve_io_write_bytes(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return zero_if_none(self.io_write_bytes) rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'io_write_bytes') return int(ret) if ret is not None else 0 async def resolve_io_max_scratch_size(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return zero_if_none(self.io_max_scratch_size) rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'io_max_scratch_size') return int(ret) if ret is not None else 0 async def resolve_io_cur_scratch_size(self, info): if not hasattr(self, 'status'): return None if self.status not in LIVE_STATUS: return 0 rs = info.context['redis_stat'] ret = await rs.hget(str(self.id), 'io_cur_scratch_size') return int(ret) if ret is not None else 0 @classmethod def from_row(cls, row): if row is None: return None props = { 'sess_id': row['sess_id'], 'id': row['id'], 'role': row['role'], 'lang': row['lang'], 'status': row['status'], 'status_info': row['status_info'], 'created_at': row['created_at'], 'terminated_at': row['terminated_at'], 'agent': row['agent'], 'container_id': row['container_id'], 'mem_slot': row['mem_slot'], 'cpu_slot': row['cpu_slot'], 'gpu_slot': row['gpu_slot'], 'num_queries': row['num_queries'], # live statistics # NOTE: currently graphene always uses resolve methods! 'cpu_used': row['cpu_used'], 'mem_max_bytes': row['mem_max_bytes'], 'mem_cur_bytes': 0, 'net_rx_bytes': row['net_rx_bytes'], 'net_tx_bytes': row['net_tx_bytes'], 'io_read_bytes': row['io_read_bytes'], 'io_write_bytes': row['io_write_bytes'], 'io_max_scratch_size': row['io_max_scratch_size'], 'io_cur_scratch_size': 0, } return cls(**props)
class Arguments: policy_id = graphene.UUID(required=True, name='id')
class Arguments: policy_id = graphene.UUID(required=True, name='id') ultimate = graphene.Boolean(required=False)
class ContractCounterInputType(OpenIMISMutation.Input): id = graphene.UUID(required=True)
class ContractApproveInputType(OpenIMISMutation.Input): id = graphene.UUID(required=True)
class ContractDetailsCreateFromInsureeInputType(OpenIMISMutation.Input): contract_id = graphene.UUID(required=True) policy_holder_insuree_id = graphene.UUID(required=True)
class Arguments: item = graphene.UUID(required=True) name = graphene.String(required=False) collection = graphene.UUID(required=False)
class Report(graphene.ObjectType): """ The Report object type. It can be accessed with getReport() and getReports(). It contains some general Report information Args: timestamp (DateTime): Timestamp for this report timezone (str): Timezone timezone_abbreviation (str) port_count (List(TaskAlert)): Port count ports (List(Port)): Ports involved in this report permissions (List(Permissions)): Permissions for this report result_count (ResultCount): Result count results (List(Results)): Results in this report severity (Severity): host (Host): Host for this report scan_start (DateTime) scan_end (DateTime) error_count (int): Error count errors = (List(Error)): Errors occurred report_format (str): Format from this report """ uuid = graphene.UUID(name='id') name = graphene.String() owner = graphene.String() comment = graphene.String() report_format = graphene.Field(BaseObjectType) creation_time = graphene.DateTime() modification_time = graphene.DateTime() writable = graphene.Boolean() in_use = graphene.Boolean() task = graphene.Field(Task, description="The task for this report") delta_report = graphene.Field( DeltaReport, description="The delta report information" ) user_tags = graphene.Field(EntityUserTags) scan_run_status = graphene.String(description="Scan status of report") scan_start = graphene.DateTime() scan_end = graphene.DateTime() hosts_count = graphene.Field(CountType, description="Host counts") hosts = graphene.List(ReportHost, description="The hosts for this report") closed_cves = graphene.Field(ReportEntities, description="Closed CVE count") vulnerabilities = graphene.Field( ReportEntities, description="Vulnerability count" ) operating_systems = graphene.Field( ReportEntities, description="Operating system count" ) applications = graphene.Field( ReportEntities, description="Application count" ) tls_certificates = graphene.Field( ReportEntities, description="TLS certificate count" ) ports_count = graphene.Field(CountType, description="Port counts") ports = graphene.List(ReportPort, description="The ports in this report") results_count = graphene.Field( ReportResultCount, description="Result counts" ) results = graphene.List(Result, description="The results for this report") severity = graphene.Field(ReportSeverity) error_count = graphene.Field(CountType) errors = graphene.List(Error) permissions = graphene.List(Permission) timestamp = graphene.DateTime() timezone = graphene.String() timezone_abbreviation = graphene.String() uuid = graphene.UUID(name='id') name = graphene.String() @staticmethod def resolve_uuid(root, _info): return parse_uuid(root.outer_report.get('id')) @staticmethod def resolve_name(root, _info): return get_text_from_element(root.outer_report, 'name') @staticmethod def resolve_owner(root, _info): return get_owner(root.outer_report) @staticmethod def resolve_comment(root, _info): return get_text_from_element(root.outer_report, 'comment') @staticmethod def resolve_creation_time(root, _info): return get_datetime_from_element(root.outer_report, 'creation_time') @staticmethod def resolve_modification_time(root, _info): return get_datetime_from_element(root.outer_report, 'modification_time') @staticmethod def resolve_writable(root, _info): return get_boolean_from_element(root.outer_report, 'writable') @staticmethod def resolve_in_use(root, _info): return get_boolean_from_element(root.outer_report, 'in_use') @staticmethod def resolve_user_tags(root, _info): user_tags = root.inner_report.find('user_tags') if user_tags is not None: return user_tags return None @staticmethod def resolve_delta_report(root, _info): delta_report = root.inner_report.find('delta/report') if delta_report is not None: return delta_report return None @staticmethod def resolve_report_format(root, _info): report_format = root.outer_report.find('report_format') if report_format is not None: return report_format return None @staticmethod def resolve_closed_cves(root, _info): closed_cves = root.inner_report.find('closed_cves') if closed_cves is not None: return closed_cves return None @staticmethod def resolve_task(root, _info): task = root.inner_report.find('task') if task is not None: return task return None @staticmethod def resolve_permissions(root, _info): permissions = root.inner_report.find('permissions') if permissions is not None: permissions = permissions.findall('permission') if len(permissions) > 0: return permissions return None @staticmethod def resolve_scan_run_status(root, _info): return get_text_from_element(root.inner_report, 'scan_run_status') @staticmethod def resolve_timezone(root, _info): return get_text_from_element(root.inner_report, 'timezone') @staticmethod def resolve_timezone_abbreviation(root, _info): return get_text_from_element(root.inner_report, 'timezone_abbrev') @staticmethod def resolve_timestamp(root, _info): return get_datetime_from_element(root.inner_report, 'timestamp') @staticmethod def resolve_scan_start(root, _info): return get_datetime_from_element(root.inner_report, 'scan_start') @staticmethod def resolve_scan_end(root, _info): return get_datetime_from_element(root.inner_report, 'scan_end') @staticmethod def resolve_severity(root, _info): return root.inner_report.find('severity') @staticmethod def resolve_applications(root, _info): return root.inner_report.find('apps') @staticmethod def resolve_tls_certificates(root, _info): return root.inner_report.find('ssl_certs') @staticmethod def resolve_operating_systems(root, _info): return root.inner_report.find('os') @staticmethod def resolve_vulnerabilities(root, _info): return root.inner_report.find('vulns') @staticmethod def resolve_ports_count(root, _info): return root.inner_report.find('ports') @staticmethod def resolve_ports(root, _info): ports = root.inner_report.find('ports') if ports is not None: ports = ports.findall('port') if len(ports) > 0: return ports return None @staticmethod def resolve_hosts_count(root, _info): hosts = root.inner_report.find('hosts') return hosts @staticmethod def resolve_hosts(root, _info): hosts = root.inner_report.findall('host') if hosts is not None and len(hosts) > 0: return hosts return None @staticmethod def resolve_results(root, _info): results = root.inner_report.find('results') if results is not None: results = results.findall('result') if len(results) > 0: return results return None @staticmethod def resolve_results_count(root, _info): return root.inner_report.find('result_count') @staticmethod def resolve_error_count(root, _info): errors = root.inner_report.find('errors') if errors is not None: return errors.find('count') return None @staticmethod def resolve_errors(root, _info): errors = root.inner_report.find('errors') if errors is not None: errors = errors.findall('error') if len(errors) > 0: return errors return None
class ContributionPlanBundleDetailsReplaceInputType(ReplaceInputType): contribution_plan_id = graphene.UUID(required=False) date_valid_from = graphene.Date(required=True) date_valid_to = graphene.Date(required=False)
class Arguments: user = graphene.UUID(required=True)
class Override(graphene.ObjectType): class Meta: default_resolver = find_resolver uuid = graphene.UUID(name='id') creation_time = graphene.DateTime() modification_time = graphene.DateTime() active = graphene.Boolean() in_use = graphene.Boolean() orphan = graphene.Boolean() writable = graphene.Boolean() hosts = graphene.List(graphene.String) name = graphene.String() owner = graphene.String() text = graphene.String() severity = SeverityType() new_severity = SeverityType() nvt = graphene.Field(NVT) def resolve_uuid(root, _info): return parse_uuid(root.get('id')) def resolve_owner(root, _info): return get_owner(root) def resolve_creation_time(root, _info): return get_datetime_from_element(root, 'creation_time') def resolve_modification_time(root, _info): return get_datetime_from_element(root, 'modification_time') def resolve_writable(root, _info): return get_boolean_from_element(root, 'writable') def resolve_in_use(root, _info): return get_boolean_from_element(root, 'in_use') def resolve_active(root, _info): return get_boolean_from_element(root, 'active') def resolve_orphan(root, _info): return get_boolean_from_element(root, 'orphan') def resolve_name(root, _info): nvt = get_subelement(root, 'nvt') return get_text_from_element(nvt, 'name') def resolve_text(root, _info): return get_text_from_element(root, 'text') def resolve_hosts(root, _info): hosts = get_text_from_element(root, 'hosts') if hosts is None: return [] return hosts.split(',') def resolve_severity(root, _info): return get_text_from_element(root, 'severity') def resolve_new_severity(root, _info): return get_text_from_element(root, 'new_severity')
class _EnsembleMixin: id = gr.UUID(required=True) size = gr.Int(required=True) active_realizations = gr.List(gr.Int, required=False) time_created = gr.DateTime() time_updated = gr.DateTime() experiment = gr.Field( "ert_shared.dark_storage.graphql.experiments.CreateExperiment") parameter_names = gr.List(gr.String, required=True) response_names = gr.List(gr.String, required=True) userdata = gr.JSONString(required=True) children = gr.List("ert_shared.dark_storage.graphql.updates.Update") parent = gr.Field("ert_shared.dark_storage.graphql.updates.Update") @staticmethod def resolve_id(root: Any, info: "ResolveInfo") -> UUID: return get_id("ensemble", root) @staticmethod def resolve_size(root: Any, info: "ResolveInfo") -> int: return get_size(get_res_from_info(info)) @staticmethod def resolve_active_realizations(root: Any, info: "ResolveInfo") -> List[int]: return get_active_realizations(get_res_from_info(info), root) @staticmethod def resolve_time_created(root: Any, info: "ResolveInfo") -> datetime: return datetime.now() @staticmethod def resolve_time_updated(root: Any, info: "ResolveInfo") -> datetime: raise NotImplementedError @staticmethod def resolve_time_experiment(root: Any, info: "ResolveInfo") -> "Experiment": raise NotImplementedError @staticmethod def resolve_parameter_names(root: Any, info: "ResolveInfo") -> List[str]: return ensemble_parameter_names(get_res_from_info(info)) @staticmethod def resolve_response_names(root: Any, info: "ResolveInfo") -> List[str]: return get_response_names(get_res_from_info(info)) @staticmethod def resolve_userdata(root: Any, info: "ResolveInfo") -> Any: return {"name": root} @staticmethod def resolve_children(root: Any, info: "ResolveInfo") -> List["Update"]: return [] @staticmethod def resolve_parent(root: Any, info: "ResolveInfo") -> "Update": return None @staticmethod def resolve_experiment(root: Any, info: "ResolveInfo") -> "Experiment": return "default"
class Arguments: target_id = graphene.UUID( required=True, name='id', description='ID of the target to be deleted', )
class VirtualFolder(graphene.ObjectType): class Meta: interfaces = (Item, ) host = graphene.String() name = graphene.String() user = graphene.UUID() # User.id group = graphene.UUID() # Group.id creator = graphene.String() # User.email unmanaged_path = graphene.String() usage_mode = graphene.String() permission = graphene.String() ownership_type = graphene.String() max_files = graphene.Int() max_size = graphene.Int() created_at = GQLDateTime() last_used = GQLDateTime() num_files = graphene.Int() cur_size = BigInt() # num_attached = graphene.Int() @classmethod def from_row(cls, row): if row is None: return None return cls( id=row['id'], host=row['host'], name=row['name'], user=row['user'], group=row['group'], creator=row['creator'], unmanaged_path=row['unmanaged_path'], usage_mode=row['usage_mode'], permission=row['permission'], ownership_type=row['ownership_type'], max_files=row['max_files'], max_size=row['max_size'], # in KiB created_at=row['created_at'], last_used=row['last_used'], # num_attached=row['num_attached'], ) async def resolve_num_files(self, info): # TODO: measure on-the-fly return 0 async def resolve_cur_size(self, info): # TODO: measure on-the-fly return 0 @classmethod async def load_count(cls, context, *, domain_name=None, group_id=None, user_id=None): from .user import users async with context['dbpool'].acquire() as conn: j = sa.join(vfolders, users, vfolders.c.user == users.c.uuid) query = (sa.select([sa.func.count(vfolders.c.id) ]).select_from(j).as_scalar()) if domain_name is not None: query = query.where(users.c.domain_name == domain_name) if group_id is not None: query = query.where(vfolders.c.group == group_id) if user_id is not None: query = query.where(vfolders.c.user == user_id) result = await conn.execute(query) count = await result.fetchone() return count[0] @classmethod async def load_slice(cls, context, limit, offset, *, domain_name=None, group_id=None, user_id=None, order_key=None, order_asc=None): from .user import users async with context['dbpool'].acquire() as conn: if order_key is None: _ordering = vfolders.c.created_at else: _order_func = sa.asc if order_asc else sa.desc _ordering = _order_func(getattr(vfolders.c, order_key)) j = sa.join(vfolders, users, vfolders.c.user == users.c.uuid) query = (sa.select([ vfolders ]).select_from(j).order_by(_ordering).limit(limit).offset(offset)) if domain_name is not None: query = query.where(users.c.domain_name == domain_name) if group_id is not None: query = query.where(vfolders.c.group == group_id) if user_id is not None: query = query.where(vfolders.c.user == user_id) return [ cls.from_row(context, r) async for r in conn.execute(query) ] @classmethod async def batch_load_by_user(cls, context, user_uuids, *, domain_name=None, group_id=None): from .user import users async with context['dbpool'].acquire() as conn: # TODO: num_attached count group-by j = sa.join(vfolders, users, vfolders.c.user == users.c.uuid) query = (sa.select([vfolders]).select_from(j).where( vfolders.c.user.in_(user_uuids)).order_by( sa.desc(vfolders.c.created_at))) if domain_name is not None: query = query.where(users.c.domain_name == domain_name) if group_id is not None: query = query.where(vfolders.c.group == group_id) return await batch_multiresult(context, conn, query, cls, user_uuids, lambda row: row['user'])
class CreateTarget(graphene.Mutation): """Create a target""" class Arguments: input_object = CreateTargetInput( required=True, name='input', description='Input ObjectType for creating a new target', ) target_id = graphene.UUID(name='id') @require_authentication def mutate(_root, info, input_object): name = input_object.name comment = input_object.comment alive_test = AliveTest.get(input_object.alive_test) hosts = input_object.hosts exclude_hosts = input_object.exclude_hosts if input_object.ssh_credential_id is not None: ssh_credential_id = str(input_object.ssh_credential_id) ssh_credential_port = input_object.ssh_credential_port else: ssh_credential_id = None ssh_credential_port = None if input_object.smb_credential_id is not None: smb_credential_id = str(input_object.smb_credential_id) else: smb_credential_id = None if input_object.snmp_credential_id is not None: snmp_credential_id = str(input_object.snmp_credential_id) else: snmp_credential_id = None if input_object.esxi_credential_id is not None: esxi_credential_id = str(input_object.esxi_credential_id) else: esxi_credential_id = None allow_simultaneous_ips = input_object.allow_simultaneous_ips reverse_lookup_only = input_object.reverse_lookup_only reverse_lookup_unify = input_object.reverse_lookup_unify port_list_id = str(input_object.port_list_id) gmp = get_gmp(info) resp = gmp.create_target( name, alive_test=alive_test, hosts=hosts, exclude_hosts=exclude_hosts, comment=comment, ssh_credential_id=ssh_credential_id, ssh_credential_port=ssh_credential_port, smb_credential_id=smb_credential_id, snmp_credential_id=snmp_credential_id, esxi_credential_id=esxi_credential_id, allow_simultaneous_ips=allow_simultaneous_ips, reverse_lookup_only=reverse_lookup_only, reverse_lookup_unify=reverse_lookup_unify, port_list_id=port_list_id, ) return CreateTarget(target_id=resp.get('id'))
class CreateAudit(graphene.Mutation): """Creates a new audit. Call with createAudit. Args: input (CreateAuditInput): Input object for CreateAudit """ class Arguments: input_object = CreateAuditInput(required=True, name='input') audit_id = graphene.UUID(name='id') @require_authentication def mutate(root, info, input_object): name = input_object.name alterable = input_object.alterable schedule_periods = input_object.schedule_periods comment = input_object.comment if input_object.alert_ids is not None: alert_ids = [str(alert_id) for alert_id in input_object.alert_ids] else: alert_ids = None if input_object.observers is not None: observers = [str(observer) for observer in input_object.observers] else: observers = None schedule_id = (str(input_object.schedule_id) if input_object.schedule_id is not None else None) scanner_id = (str(input_object.scanner_id) if input_object.scanner_id is not None else None) target_id = (str(input_object.target_id) if input_object.target_id is not None else None) policy_id = (str(input_object.policy_id) if input_object.policy_id is not None else None) if input_object.hosts_ordering is not None: hosts_ordering = get_hosts_ordering_from_string( input_object.hosts_ordering) else: hosts_ordering = None preferences = {} if input_object.apply_overrides is not None: preferences['assets_apply_overrides'] = ( "yes" if input_object.apply_overrides == 1 else "no") if input_object.min_qod is not None: preferences['assets_min_qod'] = input_object.min_qod if input_object.auto_delete is not None: preferences['auto_delete'] = input_object.auto_delete if input_object.auto_delete_data is not None: preferences['auto_delete_data'] = input_object.auto_delete_data if input_object.in_assets is not None: preferences['in_assets'] = ("yes" if input_object.in_assets == 1 else "no") if input_object.scanner_type == 2: if input_object.max_checks is not None: preferences['max_checks'] = input_object.max_checks if input_object.max_hosts is not None: preferences['max_hosts'] = input_object.max_hosts if input_object.source_iface is not None: preferences['source_iface'] = input_object.source_iface else: hosts_ordering = None gmp = get_gmp(info) resp = gmp.create_audit( name, str(policy_id), str(target_id), str(scanner_id), alterable=alterable, comment=comment, alert_ids=alert_ids, hosts_ordering=hosts_ordering, schedule_id=schedule_id, schedule_periods=schedule_periods, observers=observers, preferences=preferences, ) return CreateAudit(audit_id=resp.get('id'))
class Arguments: restore_id = graphene.UUID( required=True, name='id', description='UUID of the entity to restore from the trashcan.', )
class ModifyAuditInput(graphene.InputObjectType): """Input object for modifyAudit. Args: id (UUID): UUID of audit to modify. name (str, optional): The name of the audit. policy_id (UUID, optional): UUID of policy to use by the audit; OpenVAS Default scanners only target_id (UUID, optional): UUID of target to be scanned scanner_id (UUID, optional): UUID of scanner to use alert_ids (List(UUID), optional): List of UUIDs for alerts to be applied to the audit alterable (bool, optional): Whether the audit is alterable. apply_overrides (bool, optional): Whether to apply overrides auto_delete (str, optional): Whether to automatically delete reports, And if yes, "keep", if no, "no" auto_delete_data (int, optional): if auto_delete is "keep", how many of the latest reports to keep comment (str, optional): The comment on the audit. hosts_ordering (str, optional): The order hosts are scanned in; OpenVAS Default scanners only in_assets (bool, optional): Whether to add the audit's results to assets max_checks (int, optional): Maximum concurrently executed NVTs per host; OpenVAS Default scanners only max_hosts (int, optional): Maximum concurrently scanned hosts; OpenVAS Default scanners only observers (Observers, optional): List of names or ids of users which should be allowed to observe this audit min_qod (int, optional): Minimum quality of detection scanner_type (int, optional): Type of scanner, 1-5 schedule_id (UUID, optional): UUID of a schedule when the audit should be run. schedule_periods (int, optional): A limit to the number of times the audit will be scheduled, or 0 for no limit. source_iface (str, optional): Network Source Interface; OpenVAS Default scanners only """ audit_id = graphene.UUID(required=True, description="UUID of audit to modify.", name='id') name = graphene.String(description="Audit name.") policy_id = graphene.UUID(description=("UUID of policy. " "OpenVAS Default scanners only.")) target_id = graphene.UUID(description="UUID of target.") scanner_id = graphene.UUID(description="UUID of scanner.") alert_ids = graphene.List(graphene.UUID, description="List of UUIDs for alerts.") alterable = graphene.Boolean(description="Whether the audit is alterable.") apply_overrides = graphene.Boolean( description="Whether to apply overrides.") auto_delete = graphene.String( description=("Whether to automatically delete reports, " "if yes, 'keep', if no, 'no'" )) # will be enum or bool once frontend is implemented auto_delete_data = graphene.Int( description=("if auto_delete is 'keep', " "how many of the latest reports to keep")) comment = graphene.String(description="Audit comment.") hosts_ordering = graphene.String( description=("The order hosts are scanned in; " "OpenVAS Default scanners only.")) in_assets = graphene.Boolean( description="Whether to add the audit's results to assets.") max_checks = graphene.Int( description=("Maximum concurrently executed NVTs per host; " "OpenVAS Default scanners only.")) max_hosts = graphene.Int( description=("Maximum concurrently scanned hosts; " "OpenVAS Default scanners only.")) min_qod = graphene.Int(description="Minimum quality of detection.") observers = graphene.List(graphene.String) scanner_type = graphene.Int(description="Type of scanner, 1-5." ) # will be enum once frontend is implemented schedule_id = graphene.UUID( description="UUID of a schedule when the audit should be run.") schedule_periods = graphene.Int( description=("A limit to the number of times the " "audit will be scheduled, or 0 for no limit.")) source_iface = graphene.String( description=("Network Source Interface; " "OpenVAS Default scanners only"))
def __init__(self): super().__init__( ScanConfig, config_id=graphene.UUID(required=True, name='id'), resolver=self.resolve, )
class Arguments: audit_id = graphene.UUID(required=True, name='id')
class Arguments: port_range_id = graphene.UUID(required=True, name='id')
class GraphContentInterface(graphene.Interface): id = graphene.UUID() title = graphene.String() abstract_md = graphene.String() abstract = graphene.String() is_published = graphene.Boolean()