class ArticleType(graphene.ObjectType): """ Defines an Article GraphQl object. """ class Meta: interfaces = (graphene.relay.Node, ) title = graphene.String() publication_date = graphene.DateTime() post_author = graphene.Field( UserType, description='User that published the article.') article_authors = graphene.List(graphene.String) abstract = graphene.String() body = graphene.String() pro_votes = graphene.Int() cons_votes = graphene.Int() references = graphene.String() questions = graphene.ConnectionField( 'black_mage.schema.QuestionConnection') # TODO add tags # TODO reports similar_suggestions = graphene.ConnectionField( 'black_mage.schema.SimilarSuggestionConnection') def resolve_article_authors(self, info, **kwargs): return [author for author in self.article_authors.split(';')] def resolve_questions(self, info, **kwargs): return self.question_set.all() def resolve_similar_suggestions(self, info, **kwargs): return self.similar_suggestions.all()
class PortalType(graphene.ObjectType): """ Defines a GraphQl Portal object. """ class Meta: interfaces = (graphene.relay.Node, ) name = graphene.String() founding_datetime = graphene.DateTime() topics = graphene.ConnectionField('black_mage.schema.TopicConnection') news = graphene.ConnectionField('black_mage.schema.NewsConnection') rules = graphene.ConnectionField('black_mage.schema.RuleConnection') members = graphene.ConnectionField(UserConnection) is_public = graphene.Boolean() # TODO - add Chat # TODO - add Tags owner = graphene.Field(UserType) def resolve_topics(self, info, **kwargs): return self.topic_set.all() def resolve_rules(self, info, **kwargs): return self.rule_set.all() def resolve_news(self, info, **kwargs): return self.news_set.all() def resolve_members(self, info, **kwargs): return self.users.all()
class Query(graphene.ObjectType): organizations = graphene.ConnectionField(OrganizationConnection) organization = graphene.Field(OrganizationNode, id=graphene.String()) organization_invites = graphene.ConnectionField( OrganizationInviteConnection) organization_invite = graphene.Field(OrganizationInviteNode, id=graphene.String()) organization_memberships = graphene.ConnectionField( OrganizationMembershipConnection) organization_membership = graphene.Field(OrganizationMembershipNode, id=graphene.String()) def resolve_organizations(self, info, **kwargs): return gql_optimizer.query( Organization.objects.filter(members=info.context.user), info) # TODO 403 or 404? def resolve_organization(self, info, **kwargs): if not info.context.user.is_organization_member(kwargs.get('id')): raise PermissionDeniedException return gql_optimizer.query( Organization.objects.filter(id=kwargs.get('id')), info).first() def resolve_organization_invites(self, info, **kwargs): return gql_optimizer.query( OrganizationInvite.objects.filter(user=info.context.user), info) def resolve_organization_invite(self, info, **kwargs): return gql_optimizer \ .query( OrganizationInvite.objects.filter(id=kwargs.get('id'), user=info.context.user), info, ) \ .first() def resolve_organization_memberships(self, info, **kwargs): return gql_optimizer.query( OrganizationMembership.objects.filter(user=info.context.user), info) def resolve_organization_membership(self, info, **kwargs): return gql_optimizer \ .query( OrganizationMembership.objects.filter(id=kwargs.get('id'), user=info.context.user), info, ) \ .first()
class Job(graphene.ObjectType): id = graphene.String() name = graphene.String() description = graphene.String() status = graphene.String() enqueued_at = graphene.String() started_at = graphene.String() finished_at = graphene.String() log = graphene.ConnectionField(StatusLogEntryConnection, newest_first=graphene.Boolean()) @classmethod def from_rq_job_instance(cls, rq_job): if rq_job is not None: return cls(id=to_global_id('Job', rq_job.id), name=rq_job.meta['name'], description=rq_job.description, status=rq_job.get_status(), enqueued_at=rq_job.enqueued_at, started_at=rq_job.started_at, finished_at=rq_job.ended_at) return None def resolve_log(self, args, context, info): _, job_id = from_global_id(self.id) log_entries = [] if 'newest_first' in args and args.get('newest_first'): raw_log_entries = log_store.get_all(job_id, reverse=True) else: raw_log_entries = log_store.get_all(job_id) for timestamp, message, progress in raw_log_entries: log_entries.append(StatusLogEntry(timestamp=timestamp, message=message, progress=progress)) return log_entries
class AuthorType(graphene.ObjectType): class Meta: interfaces = (graphene.Node, ) first_name = graphene.String() last_name = graphene.String() twitter_account = graphene.String() full_name = graphene.String( args={ 'display': graphene.Argument( AuthorDisplayNameEnum, required=True, description='Display format to use for Full Name of Author - default FIRST_LAST.' ) } ) stories = graphene.ConnectionField('api.query.story.StoryConnection') @staticmethod def resolve_stories(root: Author, info: graphene.ResolveInfo, **_) -> Promise[List[Story]]: return info.context.loaders.stories_from_author.load(root.id) @classmethod def is_type_of(cls, root: Any, _: graphene.ResolveInfo) -> bool: return isinstance(root, Author) @classmethod def get_node(cls, info: graphene.ResolveInfo, id_: str) -> Promise[Author]: return info.context.loaders.author.load(int(id_)) @staticmethod def resolve_full_name(root: Author, info: graphene.ResolveInfo, display): return root.full_name(display)
class Task(graphene.ObjectType): id = graphene.ID() name = graphene.String() description = graphene.String() # TODO check why tasktype is always null in grapqhl responses task_type = graphene.Enum.from_enum(TaskType)() current_status = graphene.Enum.from_enum(TaskState)() current_message = graphene.String() jobs = graphene.ConnectionField(JobConnection) # TODO add reference id? (Analysis --> Timestamp ID, Postprocess --> Analysis ID) @classmethod def from_task_object(cls, task): job_list = [] for job_name in task.jobs: job_inst = task.jobs[job_name] if job_inst is not None: job_list.append(Job.from_rq_job_instance(job_inst)) if isinstance(task, AnalysisTask): type = TaskType.ANALYSIS else: type = TaskType.POSTPROCESS instance = cls(id=to_global_id('Task', task.key), name=task.name, description=task.description, task_type=type, current_status=task.state, current_message=task.fetch_message(), jobs=job_list) return instance def resolve_current_status(self, args, context, info): return self.current_status.value
class PersonNode(DjangoObjectType): relationships = graphene.ConnectionField(lambda: PersonNode, relationship=graphene.Argument( Relationship, required=True)) class Meta: model = Person only_fields = ['name', 'family', 'residence'] interfaces = (relay.Node, ) def resolve_relationships(self, args, context, info): if args['relationship'] == 'parent': return Person.objects.filter(children=self) if args['relationship'] == 'grandparent': return Person.objects.filter(children__children=self) if args['relationship'] == 'child': return Person.objects.filter(parents=self) if args['relationship'] == 'spouse': if self.spouse: return [self.spouse] if args['relationship'] == 'grandchild': return Person.objects.filter(parents__parents=self) if args['relationship'] == 'sibling': return Person.objects.filter(parents=self.parents.all()).exclude( pk=self.pk) return []
class PrivateQuery(KonnektQuery, PublicQuery): viewer = graphene.Field(UserNode) nodes = graphene.ConnectionField( SearchResultConnection, query=graphene.String(description='Value to search for'), node_type=NodeType(required=True)) topic = DjangoFilterConnectionField(TopicNode) profile = DjangoFilterConnectionField(UserProfileNode) topics_by_user = DjangoConnectionField(TopicNode) def resolve_viewer(self, info, *args): user = info.context.user if not user.is_authenticated: raise Exception('Not logged in!') return UserNode.get_node(info, id=user.id) def resolve_nodes(self, info, query=None, node_type=None, first=None, last=None, before=None, after=None): # TODO: Add logic to paginate search based on first, last, before and after params node = UserProfileNode if node_type == UserProfileNode else TopicNode return node.search(query, info) def resolve_topics_by_user(self, info): user = info.context.user.userprofile topics = Topic.objects.filter(answer__author=user) return topics
class TranslationQueries(graphene.ObjectType): translations = graphene.ConnectionField( TranslatableItemConnection, description='Returns list of all translatable items of a given kind.', kind=graphene.Argument(TranslatableKinds, required=True, description='Kind of objects to retrieve.')) def resolve_translations(self, info, kind, **kwargs): if kind == TranslatableKinds.PRODUCT: return resolve_products(info) elif kind == TranslatableKinds.COLLECTION: return resolve_collections(info, query=None) elif kind == TranslatableKinds.CATEGORY: return resolve_categories(info, query=None) elif kind == TranslatableKinds.PAGE: return resolve_pages(info, query=None) elif kind == TranslatableKinds.SHIPPING_METHOD: return resolve_shipping_methods(info) elif kind == TranslatableKinds.VOUCHER: return resolve_vouchers(info, query=None) elif kind == TranslatableKinds.ATTRIBUTE: return resolve_attributes(info) elif kind == TranslatableKinds.ATTRIBUTE_VALUE: return resolve_attribute_values(info) elif kind == TranslatableKinds.VARIANT: return resolve_product_variants(info) elif kind == TranslatableKinds.MENU_ITEM: return resolve_menu_items(info, query=None)
class NewsType(graphene.ObjectType): """ Representação de uma Noticia """ class Meta: interfaces = (graphene.relay.Node, ) # atributos title = graphene.String(description='News title.') body = graphene.String(description='News main content.') pro_votes = graphene.Int( description='Positive votes this news has received.') cons_votes = graphene.Int( description='Negative otes this news has received.') publication_date = graphene.DateTime(description='Publish datetime.') author = graphene.Field(UserType, description='News author.') portal = graphene.Field(PortalType) similar_suggestions = graphene.ConnectionField( 'black_mage.schema.SimilarSuggestionConnection') # TODO question def resolve_portal(self, info, **Kwargs): return self.portal_reference def resolve_similar_suggestions(self, info, **kwargs): return self.similar_suggestions.all()
class PrivateQuery(KonnektQuery, PublicQuery): viewer = graphene.Field(UserNode) search = graphene.ConnectionField(SearchResultConnection, query=graphene.String( description='Value to search for', required=True), node_type=NodeType(required=True)) def resolve_viewer(self, info, *args): return UserNode.get_node(info, id=info.context.user.id) def resolve_search(self, info, query=None, node_type=None, first=None, last=None, before=None, after=None): # TODO: Add logic to paginate search based on first, last, before and after params if node_type == UserProfileNode: if first: return UserProfileNode.search(query, info)[:first] return UserProfileNode.search(query, info) return []
class Analysis(SQLAlchemyObjectType): class Meta: model = AnalysisModel interfaces = (Node,) sample_groups = SQLAlchemyConnectionField(SampleGroup) pipeline = graphene.Field(Pipeline) snapshots = graphene.ConnectionField(lambda: Snapshot) def resolve_sample_groups(self, args, context, info): return db.session.query(SampleGroupModel) \ .join(PlantModel) \ .join(SnapshotModel, SnapshotModel.plant_id == PlantModel.id) \ .join(TimestampModel, SnapshotModel.timestamp_id == TimestampModel.id) \ .options( contains_eager("plants"), contains_eager("plants.snapshots") ).filter(TimestampModel.analyses.any(AnalysisModel.id == self.id)) def resolve_pipeline(self, args, context, info): try: identity = get_jwt_identity() pipeline = get_iap_pipeline(identity.get('username'), self.pipeline_id) return Pipeline.from_grpc_type(pipeline) except NotFoundError as e: raise except UnavailableError as e: raise def resolve_snapshots(self, args, context, info): return db.session.query(SnapshotModel).filter(SnapshotModel.timestamp_id == self.timestamp_id).all()
class Query(graphene.ObjectType): stories = graphene.ConnectionField(StoryConnection) node = graphene.Node.Field() @staticmethod def resolve_stories(root, _: graphene.ResolveInfo, **__) -> Iterable[Story]: return Story.objects.all()
class Page(interface_cls): if not RELAY: # use opaque ids in Relay id = graphene.Int(required=True) title = graphene.String(required=True) url_path = graphene.String() content_type = graphene.String() slug = graphene.String(required=True) path = graphene.String() depth = graphene.Int() seoTitle = graphene.String() numchild = graphene.Int() revision = graphene.Int() first_published_at = graphene.DateTime() last_published_at = graphene.DateTime() latest_revision_created_at = graphene.DateTime() live = graphene.Boolean() go_live_at = graphene.DateTime() expire_at = graphene.DateTime() expired = graphene.Boolean() locked = graphene.Boolean() draft_title = graphene.String() has_unpublished_changes = graphene.Boolean() def resolve_content_type(self, _info: ResolveInfo): self.content_type = ContentType.objects.get_for_model(self) return self.content_type.app_label + '.' + self.content_type.model_class( ).__name__ @classmethod def resolve_type(cls, instance, info: ResolveInfo) -> 'Page': mdl = ContentType.objects.get_for_model(instance).model_class() try: model = registry.pages[mdl] except KeyError: # pragma: no cover raise ValueError("Model %s is not a registered GraphQL type" % mdl) return model def resolve_url_path(self, info: ResolveInfo) -> str: self.url_path = cast(str, self.url_path) url_prefix = url_prefix_for_site(info) url = self.url_path if not self.url_path.startswith( url_prefix) else self.url_path[len(url_prefix):] return url.rstrip('/') if RELAY: children = graphene.ConnectionField(lambda *x: PageConnection) else: children = graphene.List(lambda *x: Page) def resolve_children(self, info: ResolveInfo, **_kwargs): query = wagtailPage.objects.child_of(self) return with_page_permissions( info.context, query.specific()).live().order_by('path').all()
class Query(graphene.ObjectType): boards = graphene.ConnectionField(BoardConnection) board = graphene.Field(BoardNode, id=graphene.String()) board_memberships = graphene.ConnectionField(BoardMembershipConnection) board_membership = graphene.Field(BoardMembershipNode, id=graphene.String()) def resolve_boards(self, info, **kwargs): return gql_optimizer.query( Board.objects.filter( board_memberships__organization_membership__user=info.context. user), info, ) # TODO 403 or 404? def resolve_board(self, info, **kwargs): if not info.context.user.is_board_member(kwargs.get('id')): raise PermissionDeniedException return gql_optimizer.query(Board.objects.filter(id=kwargs.get('id')), info).first() def resolve_board_memberships(self, info, **kwargs): return gql_optimizer.query( BoardMembership.objects.filter( organization_membership__user=info.context.user), info, ) def resolve_board_membership(self, info, **kwargs): return gql_optimizer \ .query( BoardMembership.objects.filter(id=kwargs.get('id'), organization_membership__user=info.context.user), info, ) \ .first()
class PlayerNode(DjangoObjectType): class Meta: model = Player interfaces = (relay.Node,) hand = graphene.ConnectionField( TileConnection, player_id=graphene.String(required=True) ) def resolve_hand(self, info, player_id): if str(self.id) == player_id: return self.hand.order_by("order") return Tile.objects.none()
class HouseNode(DjangoObjectType): name = graphene.String() residents = graphene.ConnectionField(lambda: PersonNode) class Meta: model = House only_fields = ['street', 'number'] interfaces = (relay.Node, ) def resolve_name(self, args, context, info): return str(self) def resolve_residents(self, args, context, info): return self.person_set.all()
class Product(graphene.ObjectType): class Meta: interfaces = (relay.Node, ) name = graphene.String() info = graphene.ConnectionField(ProductInfoConnection) def resolve_info(self, info): return list( models.ProductInfo.query.filter( models.ProductInfo.product == self)) @classmethod def get_node(self, info, id): return models.Product.query.get(id)
class PostprocessingStack(graphene.ObjectType): id = graphene.String() # TODO use graphene ID name = graphene.String() description = graphene.String() scripts = graphene.ConnectionField(PostprocessingScriptConnection) @classmethod def from_grpc_type(cls, grpc_stack_instance): scripts = [] for grpc_script_instance in grpc_stack_instance.scripts: scripts.append( PostprocessingScript.from_grpc_type(grpc_script_instance)) instance = cls(id=grpc_stack_instance.id, name=grpc_stack_instance.name, description=grpc_stack_instance.description, scripts=scripts) return instance
class MemberQuery(graphene.ObjectType): node = graphene.Node.Field() members = graphene.ConnectionField(MemberConnection) me = graphene.Field(MemberType) @staticmethod @superuser_required def resolve_members(root: None, info: graphene.ResolveInfo, **kwargs): return models.Member.objects.all() @staticmethod @login_required def resolve_me(root, info): user = info.context.user # if user.is_anonymous: # raise Exception("Not logged In!") return user.member
class StoryType(graphene.ObjectType): class Meta: interfaces = (graphene.Node, ) title = graphene.String() subtitle = graphene.String() description = graphene.String() published_year = graphene.String() author_name = graphene.String( deprecation_reason='Use `AuthorType.fullName`.', args={ 'display': graphene.Argument( AuthorDisplayNameEnum, default_value=AuthorDisplayNameEnum.FIRST_LAST, description= 'Display format to use for Full Name of Author - default FIRST_LAST.' ) }) author = graphene.Field('api.query.author.AuthorType') passages = graphene.ConnectionField('api.query.passage.PassageConnection') @staticmethod def resolve_author_name(root: Story, info: graphene.ResolveInfo, display): return root.author.full_name(display) @staticmethod def resolve_author(root: Story, info: graphene.ResolveInfo) -> Promise[Author]: return info.context.loaders.author.load(root.author_id) @staticmethod def resolve_passages(root: Story, info: graphene.ResolveInfo, **_) -> Promise[List[Passage]]: return info.context.loaders.passage_from_story.load(root.id) @classmethod def is_type_of(cls, root: Any, _: graphene.ResolveInfo) -> bool: return isinstance(root, Story) @classmethod def get_node(cls, info: graphene.ResolveInfo, id_: str) -> Promise[Story]: return info.context.loaders.story.load(int(id_))
class CharacterType(graphene.ObjectType): class Meta: interfaces = (graphene.Node, ) name = graphene.String() in_passages = graphene.ConnectionField('api.query.passage.PassageConnection') @staticmethod def resolve_in_passages(root: Character, info: graphene.ResolveInfo, **_) -> Promise[List[Passage]]: return info.context.loaders.passage_from_pov_character.load(root.id) @classmethod def is_type_of(cls, root: Any, _: graphene.ResolveInfo) -> bool: return isinstance(root, Character) @classmethod def get_node(cls, info: graphene.ResolveInfo, id_: str) -> Promise[Character]: return info.context.loaders.character.load(int(id_))
class Snapshot(SQLAlchemyObjectType): class Meta: model = SnapshotModel interfaces = (Node, ) images = SQLAlchemyConnectionField(Image, with_type=graphene.String()) analyses = graphene.ConnectionField(lambda: Analysis) def resolve_images(self, args, context, info): conds = list() query = db.session.query(ImageModel).filter( ImageModel.snapshot_id == self.id) if 'with_type' in args: conds.append(ImageModel.type == args.get('with_type')) for cond in conds: query = query.filter(cond) return query.all() def resolve_analyses(self, args, context, info): return db.session.query(AnalysisModel).filter( AnalysisModel.timestamp_id == self.timestamp_id).all()
class Query(graphene.ObjectType): categories = graphene.ConnectionField(CategoryConnection) category = graphene.Field(CategoryNode, id=graphene.String()) comments = graphene.ConnectionField(CommentConnection) comment = graphene.Field(CommentNode, id=graphene.String()) tickets = graphene.ConnectionField(TicketConnection) ticket = graphene.Field(TicketNode, id=graphene.String()) ticket_assignments = graphene.ConnectionField(TicketAssignmentConnection) ticket_assignment = graphene.Field(TicketAssignmentNode, id=graphene.String()) def resolve_categories(self, info, **kwargs): return gql_optimizer \ .query( Category.objects.filter(board__members__user=info.context.user), info, ) def resolve_category(self, info, **kwargs): return gql_optimizer \ .query( Category.objects.filter(id=kwargs.get('id'), board__members__user=info.context.user), info, ) \ .first() def resolve_comments(self, info, **kwargs): return gql_optimizer \ .query( Comment.objects.filter(ticket__category__board__members__user=info.context.user), info, ) def resolve_comment(self, info, **kwargs): return gql_optimizer \ .query( Comment.objects.filter(id=kwargs.get('id'), ticket__category__board__members__user=info.context.user), info, ) \ .first() def resolve_tickets(self, info, **kwargs): return gql_optimizer \ .query( Ticket.objects.filter(category__board__members__user=info.context.user), info, ) def resolve_ticket(self, info, **kwargs): return gql_optimizer \ .query( Ticket.objects.filter(id=kwargs.get('id'), category__board__members__user=info.context.user), info, ) \ .first() def resolve_ticket_assignments(self, info, **kwargs): return gql_optimizer \ .query( TicketAssignment.objects.filter(assignee__organization_membership__user=info.context.user), info, ) def resolve_ticket_assignment(self, info, **kwargs): return gql_optimizer \ .query( Ticket.objects.filter(id=kwargs.get('id'), category__board__members__user=info.context.user), info, ) \ .first()
class Environment(graphene.ObjectType, interfaces=(graphene.relay.Node, GitRepository)): """A type that represents the Environment for a LabBook""" _base_component_data = None # The name of the current branch image_status = graphene.Field(ImageStatus) # Run state container_status = graphene.Field(ContainerStatus) # The LabBook's Base Component base = graphene.Field(BaseComponent) # The LabBook's Base Component's latest revision base_latest_revision = graphene.Int() # The LabBook's Package manager installed dependencies package_dependencies = graphene.ConnectionField(PackageComponentConnection) # A custom docker snippet to be run after all other dependencies and bases have been added. docker_snippet = graphene.String() # A mapping that enumerates where secrets files should be mapped into the Project container. secrets_file_mapping = graphene.ConnectionField( SecretFileMappingConnection) # A list of bundled apps bundled_apps = graphene.List(BundledApp) @classmethod def get_node(cls, info, id): """Method to resolve the object based on it's Node ID""" # Parse the key owner, name = id.split("&") return Environment(id=f"{owner}&{name}", name=name, owner=owner) def resolve_id(self, info): """Resolve the unique Node id for this object""" if not self.owner or not self.name: raise ValueError( "Resolving a Environment Node ID requires owner and name to be set" ) return f"{self.owner}&{self.name}" def helper_resolve_image_status(self, labbook): """Helper to resolve the image status of a labbook""" labbook_image_key = infer_docker_image_name( labbook_name=self.name, owner=self.owner, username=get_logged_in_username()) dispatcher = Dispatcher() lb_jobs = [ dispatcher.query_task(j.job_key) for j in dispatcher.get_jobs_for_labbook(labbook.key) ] for j in lb_jobs: logger.debug("Current job for labbook: status {}, meta {}".format( j.status, j.meta)) # First, check if image exists or not -- The first step of building an image untags any existing ones. # Therefore, we know that if one exists, there most likely is not one being built. try: client = get_docker_client() client.images.get(labbook_image_key) image_status = ImageStatus.EXISTS except (ImageNotFound, requests.exceptions.ConnectionError): image_status = ImageStatus.DOES_NOT_EXIST if any([ j.status == 'failed' and j.meta.get('method') == 'build_image' for j in lb_jobs ]): logger.debug("Image status for {} is BUILD_FAILED".format( labbook.key)) if image_status == ImageStatus.EXISTS: # The indication that there's a failed job is probably lingering from a while back, so don't # change the status to FAILED. Only do that if there is no Docker image. logger.debug( f'Got failed build_image for labbook {labbook.key}, but image exists.' ) else: image_status = ImageStatus.BUILD_FAILED if any([ j.status in ['started'] and j.meta.get('method') == 'build_image' for j in lb_jobs ]): logger.debug( f"Image status for {labbook.key} is BUILD_IN_PROGRESS") # build_image being in progress takes precedence over if image already exists (unlikely event). if image_status == ImageStatus.EXISTS: logger.warning( f'Got build_image for labbook {labbook.key}, but image exists.' ) image_status = ImageStatus.BUILD_IN_PROGRESS if any([ j.status in ['queued'] and j.meta.get('method') == 'build_image' for j in lb_jobs ]): logger.warning( f"build_image for {labbook.key} stuck in queued state") image_status = ImageStatus.BUILD_QUEUED return image_status.value def resolve_image_status(self, info): """Resolve the image_status field""" return info.context.labbook_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda labbook: self.helper_resolve_image_status(labbook)) def resolve_container_status(self, info): """Resolve the image_status field""" # Check if the container is running by looking up the container labbook_key = infer_docker_image_name( labbook_name=self.name, owner=self.owner, username=get_logged_in_username()) try: client = get_docker_client() container = client.containers.get(labbook_key) if container.status == "running": container_status = ContainerStatus.RUNNING else: container_status = ContainerStatus.NOT_RUNNING except (NotFound, requests.exceptions.ConnectionError): container_status = ContainerStatus.NOT_RUNNING return container_status.value def helper_resolve_base(self, labbook): """Helper to resolve the base component object""" # Get base image data from the LabBook if not self._base_component_data: cm = ComponentManager(labbook) self._base_component_data = cm.base_fields return BaseComponent( id= f"{self._base_component_data['repository']}&{self._base_component_data['id']}&" f"{self._base_component_data['revision']}", repository=self._base_component_data['repository'], component_id=self._base_component_data['id'], revision=int(self._base_component_data['revision']), _component_data=self._base_component_data) def resolve_base(self, info): """Method to get the LabBook's base component Args: info: Returns: """ return info.context.labbook_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda labbook: self.helper_resolve_base(labbook)) def helper_resolve_base_latest_revision(self, labbook) -> int: """Helper to resolve the base component's latest revision""" cm = ComponentManager(labbook) if not self._base_component_data: self._base_component_data = cm.base_fields available_bases = cm.bases.get_base_versions( self._base_component_data['repository'], self._base_component_data['id']) # The first item in the available bases list is the latest base. This is a list of tuples (revision, base data) latest_revision, _ = available_bases[0] return int(latest_revision) def resolve_base_latest_revision(self, info): """Method to get the LabBook's base component's latest revision Args: info: Returns: """ return info.context.labbook_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda labbook: self.helper_resolve_base_latest_revision( labbook)) @staticmethod def helper_resolve_package_dependencies(labbook, kwargs): """Helper to resolve the packages""" cm = ComponentManager(labbook) edges = cm.get_component_list("package_manager") if edges: cursors = [ base64.b64encode( "{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Create dataloader keys = [f"{k['manager']}&{k['package']}" for k in lbc.edges] vd = PackageDataloader(keys, labbook, get_logged_in_username()) # Get DevEnv instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): edge_objs.append( PackageComponentConnection.Edge(node=PackageComponent( _dataloader=vd, manager=edge['manager'], package=edge['package'], version=edge['version'], from_base=edge['from_base'], is_valid=True, schema=edge['schema']), cursor=cursor)) return PackageComponentConnection(edges=edge_objs, page_info=lbc.page_info) else: return PackageComponentConnection( edges=[], page_info=graphene.relay.PageInfo(has_next_page=False, has_previous_page=False)) def resolve_package_dependencies(self, info, **kwargs): """Method to get the LabBook's package manager dependencies Args: info: Returns: """ return info.context.labbook_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda labbook: self.helper_resolve_package_dependencies( labbook, kwargs)) @staticmethod def helper_resolve_docker_snippet(labbook): """Helper to get custom docker snippet""" cm = ComponentManager(labbook) docker_components = cm.get_component_list('docker') if len(docker_components) == 1: return '\n'.join(docker_components[0]['content']) elif len(docker_components) > 1: raise ValueError( 'There should only be one custdom docker component') else: return "" def resolve_docker_snippet(self, info): """Method to resolve the docker snippet for this labbook. Right now only 1 snippet is supported""" return info.context.labbook_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda labbook: self.helper_resolve_docker_snippet(labbook)) @staticmethod def helper_resolve_secrets_file_mapping(labbook, kwargs): secrets_store = SecretStore(labbook, get_logged_in_username()) edges = secrets_store.secret_map.keys() if edges: cursors = [ base64.b64encode( "{}".format(cnt).encode("UTF-8")).decode("UTF-8") for cnt, x in enumerate(edges) ] # Process slicing and cursor args lbc = ListBasedConnection(edges, cursors, kwargs) lbc.apply() # Get DevEnv instances edge_objs = [] for edge, cursor in zip(lbc.edges, lbc.cursors): node_obj = SecretFileMapping(owner=labbook.owner, name=labbook.name, filename=edge, mount_path=secrets_store[edge]) edge_objs.append( SecretFileMappingConnection.Edge(node=node_obj, cursor=cursor)) return SecretFileMappingConnection(edges=edge_objs, page_info=lbc.page_info) else: pi = graphene.relay.PageInfo(has_next_page=False, has_previous_page=False) return SecretFileMappingConnection(edges=[], page_info=pi) def resolve_secrets_file_mapping(self, info, **kwargs): return info.context.labbook_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda labbook: self.helper_resolve_secrets_file_mapping( labbook, kwargs)) def helper_resolve_bundled_apps(self, labbook): """Helper to get list of BundledApp objects""" bam = BundledAppManager(labbook) apps = bam.get_bundled_apps() return [ BundledApp(name=self.name, owner=self.owner, app_name=x) for x in apps ] def resolve_bundled_apps(self, info): """Method to resolve the bundled apps for this labbook""" return info.context.labbook_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda labbook: self.helper_resolve_bundled_apps(labbook))
class Query(object): # {{{2 connections all_users = DjangoFilterConnectionField( UserNode, orderBy=graphene.List(of_type=graphene.String)) all_awards = DjangoFilterConnectionField( AwardNode, orderBy=graphene.List(of_type=graphene.String)) all_award_applications = DjangoFilterConnectionField( AwardApplicationNode, orderBy=graphene.List(of_type=graphene.String)) all_userawards = DjangoFilterConnectionField( UserAwardNode, orderBy=graphene.List(of_type=graphene.String)) all_puzzles = graphene.ConnectionField( PuzzleConnection, orderBy=graphene.List(of_type=graphene.String), user=graphene.ID(), status=graphene.Float(), status__gt=graphene.Float(), created__year=graphene.Int(), created__month=graphene.Int(), limit=graphene.Int(), offset=graphene.Int()) all_dialogues = DjangoFilterConnectionField( DialogueNode, orderBy=graphene.List(of_type=graphene.String)) all_chatmessages = DjangoFilterConnectionField( ChatMessageNode, orderBy=graphene.List(of_type=graphene.String), chatroomName=graphene.String()) all_chatrooms = DjangoFilterConnectionField(ChatRoomNode) all_favorite_chatrooms = DjangoFilterConnectionField(FavoriteChatRoomNode) all_comments = DjangoFilterConnectionField( CommentNode, orderBy=graphene.List(of_type=graphene.String)) all_stars = DjangoFilterConnectionField( StarNode, orderBy=graphene.List(of_type=graphene.String)) all_bookmarks = DjangoFilterConnectionField( BookmarkNode, orderBy=graphene.List(of_type=graphene.String)) # {{{2 nodes user = relay.Node.Field(UserNode) award = relay.Node.Field(AwardNode) useraward = relay.Node.Field(UserAwardNode) puzzle = relay.Node.Field(PuzzleNode) hint = relay.Node.Field(HintNode) dialogue = relay.Node.Field(DialogueNode) chatmessage = relay.Node.Field(ChatMessageNode) comment = relay.Node.Field(CommentNode) star = relay.Node.Field(StarNode) bookmark = relay.Node.Field(BookmarkNode) # {{{2 unions puzzle_show_union = relay.ConnectionField(PuzzleShowUnionConnection, id=graphene.ID(required=True)) # {{{2 resolves # {{{3 resolve all def resolve_all_users(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(User.objects, orderBy) def resolve_all_awards(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(Award.objects, orderBy) def resolve_all_award_applications(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(AwardApplication.objects, orderBy) def resolve_all_userawards(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(UserAward.objects, orderBy) def resolve_all_puzzles(self, info, **kwargs): orderBy = kwargs.get("orderBy", []) limit = kwargs.get("limit", None) offset = kwargs.get("offset", None) qs = Puzzle.objects.all() if "starCount" in orderBy or "-starCount" in orderBy: qs = qs.annotate(starCount=Count("star")) if "starSum" in orderBy or "-starSum" in orderBy: qs = qs.annotate(starSum=Sum("star__value")) if "commentCount" in orderBy or "-commentCount" in orderBy: qs = qs.annotate(commentCount=Count("comment")) qs = resolveOrderBy(qs, orderBy) qs = resolveFilter(qs, kwargs, filters=[ "status", "status__gt", "created__year", "created__month" ], filter_fields={"user": User}) total_count = qs.count() qs = resolveLimitOffset(qs, limit, offset) qs = list(qs) return PuzzleConnection(total_count=total_count, edges=[ PuzzleConnection.Edge(node=qs[i], ) for i in range(len(qs)) ]) def resolve_all_dialogues(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(Dialogue.objects, orderBy) def resolve_all_chatmessages(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) chatroomName = kwargs.get("chatroomName", None) qs = resolveOrderBy(ChatMessage.objects, orderBy) if chatroomName: chatroom = ChatRoom.objects.get(name=chatroomName) return qs.filter(chatroom=chatroom) return qs def resolve_all_comments(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(Comment.objects, orderBy) def resolve_all_stars(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(Star.objects, orderBy) def resolve_all_bookmarks(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(Bookmark.objects, orderBy) def resolve_all_award_applications(self, info, **kwargs): orderBy = kwargs.get("orderBy", None) return resolveOrderBy(AwardApplication.objects, orderBy) # {{{3 resolve union def resolve_puzzle_show_union(self, info, **kwargs): _, puzzleId = from_global_id(kwargs["id"]) puzzle = Puzzle.objects.get(id=puzzleId) dialogue_list = Dialogue.objects.filter(puzzle__exact=puzzle) hint_list = Hint.objects.filter(puzzle__exact=puzzle) return sorted(chain(dialogue_list, hint_list), key=lambda x: x.created)
class Query(graphene.ObjectType): node = relay.Node.Field() experiment = relay.Node.Field(Experiment) plant = relay.Node.Field(Plant) sample_group = relay.Node.Field(SampleGroup) snapshot = relay.Node.Field(Snapshot) timestamp = relay.Node.Field(Timestamp) image = relay.Node.Field(Image) analysis = relay.Node.Field(Analysis) postprocess = relay.Node.Field(Postprocess) postprocessing_task = graphene.Field(Task, id=graphene.NonNull(graphene.ID)) analysis_task = graphene.Field(Task, id=graphene.NonNull(graphene.ID)) job = graphene.Field(Job, id=graphene.NonNull(graphene.ID)) analysis_job = graphene.Field(Job, id=graphene.NonNull(graphene.ID)) postprocessing_job = graphene.Field(Job, id=graphene.NonNull(graphene.ID)) experiments = SQLAlchemyConnectionField(Experiment, with_name=graphene.String(), with_scientist=graphene.String()) plants = SQLAlchemyConnectionField(Plant) sample_groups = SQLAlchemyConnectionField(SampleGroup, for_timestamp=graphene.ID(), for_analysis=graphene.ID(), for_postprocess=graphene.ID()) snapshots = SQLAlchemyConnectionField( Snapshot, for_timestamp=graphene.ID(), with_camera_position=graphene.String(), with_measurement_tool=graphene.String(), for_open_timestamp=graphene.Boolean(), for_plant=graphene.ID()) timestamps = SQLAlchemyConnectionField(Timestamp, for_experiment=graphene.ID(), ordered=graphene.Boolean()) images = SQLAlchemyConnectionField(Image, for_snapshot=graphene.ID()) analyses = SQLAlchemyConnectionField(Analysis, for_timestamp=graphene.ID()) postprocessings = SQLAlchemyConnectionField(Postprocess, for_analysis=graphene.ID()) postprocessing_stacks = graphene.ConnectionField( PostprocessingStackConnection, unused_for_analysis=graphene.ID()) pipelines = graphene.ConnectionField(PipelineConnection, unused_for_timestamp=graphene.ID()) analysis_tasks = graphene.ConnectionField(TaskConnection) postprocessing_tasks = graphene.ConnectionField(TaskConnection) # TODO Use Exceptions all over the schema and adapt clients to use them def resolve_analysis_task(self, args, context, info): _, key = from_global_id(args.get('id')) task = AnalysisTask.from_key(redis_db, key) return Task.from_task_object(task) def resolve_postprocessing_task(self, args, context, info): _, key = from_global_id(args.get('id')) task = PostprocessingTask.from_key(redis_db, key) return Task.from_task_object(task) def resolve_job(self, args, context, info): _, job_id = from_global_id(args.get('id')) return Job.from_rq_job_instance(RQJob.fetch(job_id, redis_db)) def resolve_analysis_job(self, args, context, info): _, job_id = from_global_id(args.get('id')) return Job.from_rq_job_instance(analysis_job_queue.fetch_job(job_id)) def resolve_postprocessing_job(self, args, context, info): _, job_id = from_global_id(args.get('id')) return Job.from_rq_job_instance( postprocessing_job_queue.fetch_job(job_id)) def resolve_experiments(self, args, context, info): conds = list() query = db.session.query(ExperimentModel) identity = get_jwt_identity() if not is_admin(identity) and 'with_scientist' in args: raise GraphQLError( 'Filtering by scientist is only allowed as admin') if 'with_scientist' in args: scientist = args.get('with_scientist') conds[0] = ExperimentModel.scientist.like('{}'.format(scientist)) else: conds.append( ExperimentModel.scientist.like('{}'.format( identity.get('username')))) if 'with_name' in args: name = args.get('with_name') conds.append(ExperimentModel.name.like('{}'.format(name))) for cond in conds: query = query.filter(cond) return query.all() def resolve_timestamps(self, args, context, info): cond = True if 'for_experiment' in args: experiment_id = args.get('for_experiment') _, experiment_db_id = from_global_id(experiment_id) cond = (TimestampModel.experiment_id == experiment_db_id) if 'ordered' in args and args.get('ordered') == True: return db.session.query(TimestampModel).filter(cond).order_by( TimestampModel.created_at).all() return db.session.query(TimestampModel).filter(cond).all() def resolve_snapshots(self, args, context, info): conds = list() query = db.session.query(SnapshotModel) if 'for_open_timestamp' in args: query = query.join(TimestampModel) conds.append(TimestampModel.completed.is_(False)) if 'for_timestamp' in args: timestamp_id = args.get('for_timestamp') _, timestamp_db_id = from_global_id(timestamp_id) conds.append((SnapshotModel.timestamp_id == timestamp_db_id)) if 'for_plant' in args: _, plant_db_id = from_global_id(args.get('for_plant')) conds.append((SnapshotModel.plant_id == plant_db_id)) if 'with_camera_position' in args: conds.append((SnapshotModel.camera_position == args.get( 'with_camera_position'))) if 'with_measurement_tool' in args: conds.append((SnapshotModel.measurement_tool == args.get( 'with_measurement_tool'))) for cond in conds: query = query.filter(cond) return query.all() def resolve_images(self, args, context, info): cond = True if 'for_snapshot' in args: snapshot_id = args.get('for_snapshot') _, snapshot_db_id = from_global_id(snapshot_id) cond = (ImageModel.snapshot_id == snapshot_db_id) # TODO add filter for type return db.session.query(ImageModel).filter(cond).all() def resolve_analyses(self, args, context, info): cond = True if 'for_timestamp' in args: timestamp_id = args.get('for_timestamp') _, timestamp_db_id = from_global_id(timestamp_id) cond = (AnalysisModel.timestamp_id == timestamp_db_id) return db.session.query(AnalysisModel).filter(cond).all() def resolve_postprocessings(self, args, context, info): cond = True if 'for_analysis' in args: analysis_id = args.get('for_analysis') _, analysis_db_id = from_global_id(analysis_id) cond = (PostprocessModel.analysis_id == analysis_db_id) return db.session.query(PostprocessModel).filter(cond).all() def resolve_sample_groups(self, args, context, info): cond = True if 'for_timestamp' in args: timestamp_id = args.get('for_timestamp') _, timestamp_db_id = from_global_id(timestamp_id) groups = db.session.query(SampleGroupModel) \ .join(PlantModel) \ .join(SnapshotModel, and_( SnapshotModel.plant_id == PlantModel.id, SnapshotModel.timestamp_id == timestamp_db_id) ) \ .options( contains_eager("plants"), contains_eager("plants.snapshots"), ) return groups.all() elif 'for_analysis' in args: analysis_id = args.get('for_analysis') _, analysis_db_id = from_global_id(analysis_id) groups = db.session.query(SampleGroupModel) \ .join(PlantModel) \ .join(SnapshotModel, and_( SnapshotModel.plant_id == PlantModel.id, SnapshotModel.analyses.any(AnalysisModel.id == analysis_db_id) )) \ .join(AnalysisModel, SnapshotModel.analyses.any(AnalysisModel.id == analysis_db_id)) \ .options( contains_eager("plants"), contains_eager("plants.snapshots"), ) return groups.all() elif 'for_postprocess' in args: postprocess_id = args.get('for_postprocess') _, postprocess_db_id = from_global_id(postprocess_id) groups = db.session.query(SampleGroupModel) \ .join(PlantModel) \ .join(SnapshotModel, and_( SnapshotModel.plant_id == PlantModel.id, SnapshotModel.postprocesses.any(PostprocessModel.id == postprocess_db_id) )) \ .join(PostprocessModel, SnapshotModel.analyses.any(PostprocessModel.id == postprocess_db_id)) \ .options( contains_eager("plants"), contains_eager("plants.snapshots"), ) return groups.all() return db.session.query(SampleGroupModel).filter(cond).all() def resolve_postprocessing_stacks(self, args, context, info): try: identity = get_jwt_identity() grpc_stacks = get_postprocessing_stacks(identity.get('username')) analysis_db_id = None snapshot_hash = None if 'unused_for_analysis' in args: analysis_id = args.get('unused_for_analysis') _, analysis_db_id = from_global_id(analysis_id) snapshots = db.session.query(SnapshotModel).join(TimestampModel) \ .filter(SnapshotModel.excluded == False) \ .filter(TimestampModel.analyses.any(AnalysisModel.id == analysis_db_id)) \ .all() snapshot_hash = PostprocessModel.calculate_snapshot_hash( snapshots) stacks = list() for stack in grpc_stacks: postprocess = None if analysis_db_id is not None: # Filter postprocess = db.session.query(PostprocessModel) \ .filter(PostprocessModel.analysis_id == analysis_db_id) \ .filter(PostprocessModel.postprocessing_stack_id == stack.id) \ .filter(PostprocessModel.snapshot_hash == snapshot_hash) \ .first() if postprocess is None: stacks.append(PostprocessingStack.from_grpc_type(stack)) return stacks except UnavailableError as e: raise def resolve_pipelines(self, args, context, info): try: identity = get_jwt_identity() grpc_pipelines = get_iap_pipelines(identity.get('username')) timestamp_db_id = None if 'unused_for_timestamp' in args: timestamp_id = args.get('unused_for_timestamp') _, timestamp_db_id = from_global_id(timestamp_id) pipelines = list() for pipeline in grpc_pipelines: analysis = None if timestamp_db_id is not None: # Filter analysis = db.session.query(AnalysisModel).filter( AnalysisModel.timestamp_id == timestamp_db_id).filter( AnalysisModel.pipeline_id == pipeline.id).first() if analysis is None: pipelines.append(Pipeline.from_grpc_type(pipeline)) return pipelines except UnavailableError as e: raise def resolve_analysis_tasks(self, args, context, info): identity = get_jwt_identity() username = identity.get('username') if username is None and not current_app.config['PRODUCTION']: username = '******' tasks = get_analysis_task_scheduler().fetch_all_tasks(username) return [Task.from_task_object(task) for task in tasks] def resolve_postprocessing_tasks(self, args, context, info): identity = get_jwt_identity() username = identity.get('username') if username is None and not current_app.config['PRODUCTION']: username = '******' tasks = get_postprocess_task_scheduler().fetch_all_tasks(username) return [Task.from_task_object(task) for task in tasks]
class Query(graphene.ObjectType): node = relay.Node.Field() products = graphene.ConnectionField(ProductConnection) def resolve_products(self, info): return models.Product.query.all()
class Mixin: if RELAY: pages = graphene.ConnectionField(PageConnection) else: pages = graphene.List(Page, token=graphene.String(required=False), parent=graphene.Int()) page = graphene.Field( Page, token=graphene.String(required=False), id=graphene.Int(), url=graphene.String(), revision=graphene.Int(), ) preview = graphene.Field( Page, token=graphene.String(required=False), id=graphene.Int(required=True), ) preview_add = graphene.Field( Page, token=graphene.String(required=False), app_name=graphene.String(), model_name=graphene.String(), parent=graphene.Int(required=True), ) @login_required def resolve_pages(self, info: ResolveInfo, parent: int = None, **_kwargs): # session authentication #if info.context.user.is_anonymous: # raise GraphQLError('You must be logged') query = wagtailPage.objects # prefetch specific type pages selections = set( camelcase_to_underscore(f.name.value) for f in info.field_asts[0].selection_set.selections if not isinstance(f, InlineFragment)) for pf in registry.page_prefetch_fields.intersection(selections): query = query.select_related(pf) if parent is not None: parent_page = wagtailPage.objects.filter(id=parent).first() if parent_page is None: raise ValueError(f'Page id={parent} not found.') query = query.child_of(parent_page) return with_page_permissions( info.context, query.specific()).live().order_by('path').all() def resolve_page(self, info: ResolveInfo, id: int = None, url: str = None, revision: int = None, **_kwargs): # session authentication #if info.context.user.is_anonymous: # raise GraphQLError('You must be logged') query = wagtailPage.objects if id is not None: query = query.filter(id=id) elif url is not None: url_prefix = url_prefix_for_site(info) query = query.filter(url_path=url_prefix + url.rstrip('/') + '/') else: # pragma: no cover raise ValueError("One of 'id' or 'url' must be specified") page = with_page_permissions(info.context, query.specific()).live().first() if page is None: return None if revision is not None: if revision == -1: rev = page.get_latest_revision() else: rev = page.revisions.filter(id=revision).first() if not rev: raise ValueError("Revision %d doesn't exist" % revision) page = rev.as_page_object() page.revision = rev.id return page return page @login_required def resolve_preview(self, info: ResolveInfo, id: int, **_kwargs): # pragma: no cover # session authentication #if info.context.user.is_anonymous: # raise GraphQLError('You must be logged') from wagtail.admin.views.pages import PreviewOnEdit request = info.context view = PreviewOnEdit(args=('%d' % id, ), request=request) return _resolve_preview(request, view) @login_required def resolve_preview_add(self, info: ResolveInfo, app_name: str = 'wagtailcore', model_name: str = 'page', parent: int = None, **_kwargs): # pragma: no cover # session authentication #if info.context.user.is_anonymous: # raise GraphQLError('You must be logged') from wagtail.admin.views.pages import PreviewOnCreate request = info.context view = PreviewOnCreate(args=(app_name, model_name, str(parent)), request=request) page = _resolve_preview(request, view) page.id = 0 # force an id, since our schema assumes page.id is an Int! return page # Show in Menu show_in_menus = graphene.List( Page, token=graphene.String(required=False), ) @login_required def resolve_show_in_menus(self, info: ResolveInfo, **_kwargs): # session authentication #if info.context.user.is_anonymous: # raise GraphQLError('You must be logged') return with_page_permissions( info.context, wagtailPage.objects.filter( show_in_menus=True)).live().order_by('path')