class NameNode(graphene.ObjectType): name = graphene.String() note = graphene.String() start_date = graphene.String() end_date = graphene.String()
class Input: client_mutation_label = graphene.String(max_length=255, required=False) client_mutation_details = graphene.List(graphene.String)
class UpdateInvoiceInput(graphene.InputObjectType): number = graphene.String(description="Invoice number") url = graphene.String(description="URL of an invoice to download.")
class Attribute(ModelObjectType): id = graphene.GlobalID(required=True) input_type = AttributeInputTypeEnum( description=AttributeDescriptions.INPUT_TYPE) entity_type = AttributeEntityTypeEnum( description=AttributeDescriptions.ENTITY_TYPE, required=False) name = graphene.String(description=AttributeDescriptions.NAME) slug = graphene.String(description=AttributeDescriptions.SLUG) type = AttributeTypeEnum(description=AttributeDescriptions.TYPE) unit = MeasurementUnitsEnum(description=AttributeDescriptions.UNIT) choices = FilterConnectionField( AttributeValueCountableConnection, sort_by=AttributeChoicesSortingInput( description="Sort attribute choices."), filter=AttributeValueFilterInput( description="Filtering options for attribute choices."), description=AttributeDescriptions.VALUES, ) value_required = graphene.Boolean( description=AttributeDescriptions.VALUE_REQUIRED, required=True) visible_in_storefront = graphene.Boolean( description=AttributeDescriptions.VISIBLE_IN_STOREFRONT, required=True) filterable_in_storefront = graphene.Boolean( description=AttributeDescriptions.FILTERABLE_IN_STOREFRONT, required=True) filterable_in_dashboard = graphene.Boolean( description=AttributeDescriptions.FILTERABLE_IN_DASHBOARD, required=True) available_in_grid = graphene.Boolean( description=AttributeDescriptions.AVAILABLE_IN_GRID, required=True) translation = TranslationField(AttributeTranslation, type_name="attribute") storefront_search_position = graphene.Int( description=AttributeDescriptions.STOREFRONT_SEARCH_POSITION, required=True) with_choices = graphene.Boolean( description=AttributeDescriptions.WITH_CHOICES, required=True) product_types = ConnectionField( "saleor.graphql.product.types.ProductTypeCountableConnection", required=True, ) product_variant_types = ConnectionField( "saleor.graphql.product.types.ProductTypeCountableConnection", required=True, ) class Meta: description = ( "Custom attribute of a product. Attributes can be assigned to products and " "variants at the product type level.") interfaces = [graphene.relay.Node, ObjectWithMetadata] model = models.Attribute @staticmethod def resolve_choices(root: models.Attribute, info, **kwargs): if root.input_type in AttributeInputType.TYPES_WITH_CHOICES: qs = cast(QuerySet[models.AttributeValue], root.values.all()) else: qs = cast(QuerySet[models.AttributeValue], models.AttributeValue.objects.none()) qs = filter_connection_queryset(qs, kwargs) return create_connection_slice(qs, info, kwargs, AttributeValueCountableConnection) @staticmethod @check_attribute_required_permissions() def resolve_value_required(root: models.Attribute, *_args): return root.value_required @staticmethod @check_attribute_required_permissions() def resolve_visible_in_storefront(root: models.Attribute, *_args): return root.visible_in_storefront @staticmethod @check_attribute_required_permissions() def resolve_filterable_in_storefront(root: models.Attribute, *_args): return root.filterable_in_storefront @staticmethod @check_attribute_required_permissions() def resolve_filterable_in_dashboard(root: models.Attribute, *_args): return root.filterable_in_dashboard @staticmethod @check_attribute_required_permissions() def resolve_storefront_search_position(root: models.Attribute, *_args): return root.storefront_search_position @staticmethod @check_attribute_required_permissions() def resolve_available_in_grid(root: models.Attribute, *_args): return root.available_in_grid @staticmethod def resolve_with_choices(root: models.Attribute, *_args): return root.input_type in AttributeInputType.TYPES_WITH_CHOICES @staticmethod def resolve_product_types(root: models.Attribute, info, **kwargs): from ..product.types import ProductTypeCountableConnection qs = root.product_types.all() return create_connection_slice(qs, info, kwargs, ProductTypeCountableConnection) @staticmethod def resolve_product_variant_types(root: models.Attribute, info, **kwargs): from ..product.types import ProductTypeCountableConnection qs = root.product_variant_types.all() return create_connection_slice(qs, info, kwargs, ProductTypeCountableConnection)
class Dataset(graphene.ObjectType, interfaces=(graphene.relay.Node, GitRepository)): """A type representing a Dataset and all of its contents Datasets are uniquely identified by both the "owner" and the "name" of the Dataset """ # Store collaborator data so it is only fetched once per request _collaborators = None # A short description of the dataset limited to 140 UTF-8 characters description = graphene.String() # The DatasetType for this dataset dataset_type = graphene.Field(DatasetType) # Data schema version of this dataset. It may be behind the most recent version and need # to be upgraded. schema_version = graphene.Int() # Creation date/timestamp in UTC in ISO format created_on_utc = graphene.types.datetime.DateTime() # List of collaborators collaborators = graphene.List(Collaborator) # A boolean indicating if the current user can manage collaborators can_manage_collaborators = graphene.Boolean() # Last modified date/timestamp in UTC in ISO format modified_on_utc = graphene.types.datetime.DateTime() # Connection to Activity Entries activity_records = graphene.relay.ConnectionField(ActivityConnection) # List of all files and directories within the section all_files = graphene.relay.ConnectionField(DatasetFileConnection) # Access a detail record directly, which is useful when fetching detail items detail_record = graphene.Field(ActivityDetailObject, key=graphene.String()) detail_records = graphene.List(ActivityDetailObject, keys=graphene.List(graphene.String)) visibility = graphene.String() # Get the URL of the remote origin default_remote = graphene.String() # Overview Information overview = graphene.Field(DatasetOverview) # Temporary commits behind field before full branching is supported to indicate if a dataset is out of date commits_behind = graphene.Int() @classmethod def get_node(cls, info, id): """Method to resolve the object based on it's Node ID""" # Parse the key owner, name = id.split("&") return Dataset(id="{}&{}".format(owner, name), name=name, owner=owner) def resolve_id(self, info): """Resolve the unique Node id for this object""" if not self.id: if not self.owner or not self.name: raise ValueError( "Resolving a Dataset Node ID requires owner and name to be set" ) self.id = f"{self.owner}&{self.name}" return self.id def resolve_overview(self, info): """""" return DatasetOverview(id=f"{self.owner}&{self.name}", owner=self.owner, name=self.name) def resolve_description(self, info): """Get number of commits the active_branch is behind its remote counterpart. Returns 0 if up-to-date or if local only.""" if not self.description: return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: dataset.description) return self.description def resolve_schema_version(self, info): """Get number of commits the active_branch is behind its remote counterpart. Returns 0 if up-to-date or if local only.""" return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: dataset.schema) def resolve_created_on_utc(self, info): """Return the creation timestamp (if available - otherwise empty string) Args: info: Returns: """ return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: dataset.creation_date) def _fetch_collaborators(self, dataset, info): """Helper method to fetch this dataset's collaborators Args: info: The graphene info object for this requests """ # TODO: Future work will look up remote in LabBook data, allowing user to select remote. default_remote = dataset.client_config.config['git']['default_remote'] admin_service = None for remote in dataset.client_config.config['git']['remotes']: if default_remote == remote: admin_service = dataset.client_config.config['git']['remotes'][ remote]['admin_service'] break # Extract valid Bearer token if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) # Get collaborators from remote service mgr = GitLabManager(default_remote, admin_service, token) try: self._collaborators = [ Collaborator(owner=self.owner, name=self.name, collaborator_username=c[1], permission=ProjectPermissions(c[2]).name) for c in mgr.get_collaborators(self.owner, self.name) ] except ValueError: # If ValueError Raised, assume repo doesn't exist yet self._collaborators = [] def helper_resolve_collaborators(self, dataset, info): """Helper method to fetch this dataset's collaborators and generate the resulting list of collaborators Args: info: The graphene info object for this requests """ self._fetch_collaborators(dataset, info) return self._collaborators def resolve_collaborators(self, info): """Method to get the list of collaborators for a dataset Args: info: Returns: """ if self._collaborators is None: # If here, put the fetch for collaborators in the promise return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: self.helper_resolve_collaborators( dataset, info)) else: return self._collaborators def helper_resolve_can_manage_collaborators(self, dataset, info): """Helper method to fetch this dataset's collaborators and check if user can manage collaborators Args: info: The graphene info object for this requests """ self._fetch_collaborators(dataset, info) username = get_logged_in_username() for c in self._collaborators: if c.collaborator_username == username: if c.permission == ProjectPermissions.OWNER.name: return True return False def resolve_can_manage_collaborators(self, info): """Method to check if the user is the "owner" of the dataset and can manage collaborators Args: info: Returns: """ if self._collaborators is None: # If here, put the fetch for collaborators in the promise return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: self. helper_resolve_can_manage_collaborators(dataset, info)) username = get_logged_in_username() for c in self._collaborators: if c[1] == username and c[2] == ProjectPermissions.OWNER.name: return True return False def resolve_modified_on_utc(self, info): """Return the creation timestamp (if available - otherwise empty string) Args: args: context: info: Returns: """ return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: dataset.modified_on) def helper_resolve_activity_records(self, dataset, kwargs): """Helper method to generate ActivityRecord objects and populate the connection""" # Create instance of ActivityStore for this dataset store = ActivityStore(dataset) if kwargs.get('before') or kwargs.get('last'): raise ValueError( "Only `after` and `first` arguments are supported when paging activity records" ) # Get edges and cursors edges = store.get_activity_records(after=kwargs.get('after'), first=kwargs.get('first')) if edges: cursors = [x.commit for x in edges] else: cursors = [] # Get ActivityRecordObject instances edge_objs = [] for edge, cursor in zip(edges, cursors): edge_objs.append( ActivityConnection.Edge(node=ActivityRecordObject( id=f"dataset&{self.owner}&{self.name}&{edge.commit}", owner=self.owner, name=self.name, _repository_type='dataset', commit=edge.commit, _activity_record=edge), cursor=cursor)) # Create page info based on first commit. Since only paging backwards right now, just check for commit if edges: has_next_page = True # Get the message of the linked commit and check if it is the non-activity record dataset creation commit if len(edges) > 1: if edges[-2].linked_commit != "no-linked-commit": linked_msg = dataset.git.log_entry( edges[-2].linked_commit)['message'] if linked_msg == f"Creating new empty Dataset: {dataset.name}" and "_GTM_ACTIVITY_" not in linked_msg: # if you get here, this is the first activity record has_next_page = False end_cursor = cursors[-1] else: has_next_page = False end_cursor = None page_info = graphene.relay.PageInfo(has_next_page=has_next_page, has_previous_page=False, end_cursor=end_cursor) return ActivityConnection(edges=edge_objs, page_info=page_info) def resolve_activity_records(self, info, **kwargs): """Method to page through branch Refs Args: kwargs: info: Returns: """ return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: self.helper_resolve_activity_records( dataset, kwargs)) def resolve_detail_record(self, info, key): """Method to resolve the detail record object Args: args: info: Returns: """ return ActivityDetailObject( id=f"dataset&{self.owner}&{self.name}&{key}", owner=self.owner, name=self.name, _repository_type='dataset', key=key) def resolve_detail_records(self, info, keys): """Method to resolve multiple detail record objects Args: args: info: Returns: """ return [ ActivityDetailObject(id=f"dataset&{self.owner}&{self.name}&{key}", owner=self.owner, name=self.name, _repository_type='dataset', key=key) for key in keys ] def resolve_dataset_type(self, info, **kwargs): """Method to resolve a DatasetType object for the Dataset Args: args: info: Returns: """ return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: DatasetType(id=dataset.storage_type, storage_type=dataset.storage_type)) def helper_resolve_all_files(self, dataset, kwargs): """Helper method to populate the DatasetFileConnection""" manifest = Manifest(dataset, get_logged_in_username()) if "after" in kwargs: after_index = int(base64.b64decode(kwargs["after"])) else: after_index = 0 # Generate naive cursors edges, indexes = manifest.list(first=kwargs.get("first"), after_index=after_index) cursors = [ base64.b64encode("{}".format(x).encode("UTF-8")).decode("UTF-8") for x in indexes ] edge_objs = [] for edge, cursor in zip(edges, cursors): create_data = { "owner": self.owner, "name": self.name, "key": edge['key'], "_file_info": edge } edge_objs.append( DatasetFileConnection.Edge(node=DatasetFile(**create_data), cursor=cursor)) has_previous_page = False has_next_page = len(edges) > 0 start_cursor = None end_cursor = None if cursors: start_cursor = cursors[0] end_cursor = cursors[-1] if indexes[-1] == len(manifest.manifest) - 1: has_next_page = False if kwargs.get("after"): if int(base64.b64decode(kwargs["after"])) > 0: has_previous_page = True page_info = graphene.relay.PageInfo( has_next_page=has_next_page, has_previous_page=has_previous_page, start_cursor=start_cursor, end_cursor=end_cursor) return DatasetFileConnection(edges=edge_objs, page_info=page_info) def resolve_all_files(self, info, **kwargs): """Resolver for getting all files in a Dataset""" return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: self.helper_resolve_all_files(dataset, kwargs)) @staticmethod def helper_resolve_visibility(dataset, info): # TODO: Future work will look up remote in Dataset data, allowing user to select remote. default_remote = dataset.client_config.config['git']['default_remote'] admin_service = None for remote in dataset.client_config.config['git']['remotes']: if default_remote == remote: admin_service = dataset.client_config.config['git']['remotes'][ remote]['admin_service'] break # Extract valid Bearer token if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) else: raise ValueError( "Authorization header not provided. Must have a valid session to query for collaborators" ) # Get collaborators from remote service mgr = GitLabManager(default_remote, admin_service, token) try: owner = InventoryManager().query_owner(dataset) d = mgr.repo_details(namespace=owner, repository_name=dataset.name) return d.get('visibility') except GitLabException: return "local" def resolve_visibility(self, info): """ Return string indicating visibility of project from GitLab: "public", "private", or "internal". """ return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: self.helper_resolve_visibility(dataset, info)) @staticmethod def helper_resolve_default_remote(dataset): """Helper to extract the default remote from a dataset""" remotes = dataset.git.list_remotes() if remotes: url = [x['url'] for x in remotes if x['name'] == 'origin'] if url: url = url[0] if "http" in url and url[-4:] != ".git": logger.warning( f"Fixing remote URL format: {dataset.name}: {url}") url = f"{url}.git" return url else: dataset.warning( f"There exist remotes in {str(dataset)}, but no origin found." ) return None def resolve_default_remote(self, info): """Return True if no untracked files and no uncommitted changes (i.e., Git repo clean) Args: args: context: info: Returns: """ return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: self.helper_resolve_default_remote(dataset)) @staticmethod def helper_resolve_commits_behind(dataset): """Temporary Helper to get the commits behind for a dataset. Used for linked datasets to see if they are out of date""" bm = BranchManager(dataset) bm.fetch() return bm.get_commits_behind(branch_name='master') def resolve_commits_behind(self, info): """Method to get the commits behind for a dataset. Used for linked datasets to see if they are out of date Args: args: context: info: Returns: """ return info.context.dataset_loader.load( f"{get_logged_in_username()}&{self.owner}&{self.name}").then( lambda dataset: self.helper_resolve_commits_behind(dataset))
class SlideDeck(graphene.ObjectType): """ Defines the SlideDeck type. """ url = graphene.String()
class Arguments: username = graphene.String(required=True) confirmation_code = graphene.String(required=True)
class Input: url = graphene.String() description = graphene.String()
class UserAttribute: id = graphene.GlobalID(description="Name of the person.") email = graphene.String(description="Name of the person.") password = graphene.String(description="Name of the person.")
class Arguments: username = graphene.String()
class User(graphene.ObjectType): id = graphene.ID() username = graphene.String() last_login = graphene.DateTime(required=False)
class Arguments: post_id = graphene.Int(required=True) title = graphene.String() content = graphene.String() thumb = graphene.String()
class Arguments: title = graphene.String() content = graphene.String() thumb = graphene.String()
class OCDBaseNode(graphene.ObjectType): id = graphene.String() created_at = graphene.String() updated_at = graphene.String() extras = graphene.String()
class Arguments(object): token = graphene.String()
class Arguments: username = graphene.String(required=True) password = graphene.String(required=True)
class LemmaInput(graphene.InputObjectType): ID = graphene.ID() lemma = graphene.String() pos = graphene.String() language = graphene.String()
class FulfillmentUpdateTrackingInput(graphene.InputObjectType): tracking_number = graphene.String( description='Fulfillment tracking number') notify_customer = graphene.Boolean( description='If true, send an email notification to the customer.')
class SignupConfirmResponse(graphene.ObjectType): is_successful = graphene.Boolean() error = graphene.String()
class Input: visibility_option = graphene.String() tags = graphene.String() public_institution_date_id = graphene.ID(required=True) mutation_option = graphene.Int(required=True)
class Query(graphene.ObjectType): base = graphene.String()
class FileType(graphene.ObjectType): class Meta: interfaces = (relay.Node, ) path = graphene.String() contents = graphene.String()
class AttributeValue(ModelObjectType): id = graphene.GlobalID(required=True) name = graphene.String(description=AttributeValueDescriptions.NAME) slug = graphene.String(description=AttributeValueDescriptions.SLUG) value = graphene.String(description=AttributeValueDescriptions.VALUE) translation = TranslationField(AttributeValueTranslation, type_name="attribute value") input_type = AttributeInputTypeEnum( description=AttributeDescriptions.INPUT_TYPE) reference = graphene.ID(description="The ID of the attribute reference.") file = graphene.Field(File, description=AttributeValueDescriptions.FILE, required=False) rich_text = JSONString(description=AttributeValueDescriptions.RICH_TEXT, required=False) boolean = graphene.Boolean(description=AttributeValueDescriptions.BOOLEAN, required=False) date = graphene.Date(description=AttributeValueDescriptions.DATE, required=False) date_time = graphene.DateTime( description=AttributeValueDescriptions.DATE_TIME, required=False) class Meta: description = "Represents a value of an attribute." interfaces = [graphene.relay.Node] model = models.AttributeValue @staticmethod @traced_resolver def resolve_input_type(root: models.AttributeValue, info, *_args): def _resolve_input_type(attribute): requester = get_user_or_app_from_context(info.context) if attribute.type == AttributeType.PAGE_TYPE: if requester.has_perm(PagePermissions.MANAGE_PAGES): return attribute.input_type raise PermissionDenied( permissions=[PagePermissions.MANAGE_PAGES]) elif requester.has_perm(ProductPermissions.MANAGE_PRODUCTS): return attribute.input_type raise PermissionDenied( permissions=[ProductPermissions.MANAGE_PRODUCTS]) return (AttributesByAttributeId(info.context).load( root.attribute_id).then(_resolve_input_type)) @staticmethod def resolve_file(root: models.AttributeValue, *_args): if not root.file_url: return return File(url=root.file_url, content_type=root.content_type) @staticmethod def resolve_reference(root: models.AttributeValue, info, **_kwargs): def prepare_reference(attribute): if attribute.input_type != AttributeInputType.REFERENCE: return if attribute.entity_type == AttributeEntityType.PAGE: reference_pk = root.reference_page_id elif attribute.entity_type == AttributeEntityType.PRODUCT: reference_pk = root.reference_product_id else: return reference_id = graphene.Node.to_global_id(attribute.entity_type, reference_pk) return reference_id return (AttributesByAttributeId(info.context).load( root.attribute_id).then(prepare_reference)) @staticmethod def resolve_date_time(root: models.AttributeValue, info, **_kwargs): def _resolve_date(attribute): if attribute.input_type == AttributeInputType.DATE_TIME: return root.date_time return None return (AttributesByAttributeId(info.context).load( root.attribute_id).then(_resolve_date)) @staticmethod def resolve_date(root: models.AttributeValue, info, **_kwargs): def _resolve_date(attribute): if attribute.input_type == AttributeInputType.DATE: return root.date_time return None return (AttributesByAttributeId(info.context).load( root.attribute_id).then(_resolve_date))
class Query(graphene.ObjectType): # pyre-fixme[4]: Attribute must be annotated. node = relay.Node.Field() issues = relay.ConnectionField( IssueConnection, codes=graphene.List(graphene.Int, default_value=["%"]), callables=graphene.List(graphene.String, default_value=["%"]), file_names=graphene.List(graphene.String, default_value=["%"]), min_trace_length_to_sinks=graphene.Int(), max_trace_length_to_sinks=graphene.Int(), min_trace_length_to_sources=graphene.Int(), max_trace_length_to_sources=graphene.Int(), issue_id=graphene.Int(), ) trace = relay.ConnectionField(TraceFrameConnection, issue_id=graphene.ID()) initial_trace_frames = relay.ConnectionField(TraceFrameConnection, issue_id=graphene.Int(), kind=graphene.String()) next_trace_frames = relay.ConnectionField( TraceFrameConnection, issue_id=graphene.Int(), frame_id=graphene.Int(), kind=graphene.String(), ) # Typeahead data. codes = relay.ConnectionField(CodeConnection) paths = relay.ConnectionField(PathConnection) callables = relay.ConnectionField(CallableConnection) file = relay.ConnectionField(FileConnection, path=graphene.String()) filters = relay.ConnectionField(FilterConnection) def resolve_issues( self, info: ResolveInfo, codes: List[int], callables: List[str], file_names: List[str], min_trace_length_to_sinks: Optional[int] = None, max_trace_length_to_sinks: Optional[int] = None, min_trace_length_to_sources: Optional[int] = None, max_trace_length_to_sources: Optional[int] = None, issue_id: Optional[int] = None, **kwargs: Any, ) -> List[IssueQueryResult]: session = get_session(info.context) run_id = Query.latest_run_id(session) builder = ( issues.Query(session, run_id).where_codes_is_any_of( codes).where_callables_is_any_of(callables). where_file_names_is_any_of(file_names).where_trace_length_to_sinks( min_trace_length_to_sinks, max_trace_length_to_sinks).where_trace_length_to_sources( min_trace_length_to_sources, max_trace_length_to_sources).where_issue_id_is(issue_id)) return builder.get() def resolve_trace(self, info: ResolveInfo, issue_id: DBID, **args: Any) -> List[TraceFrameQueryResult]: session = info.context.get("session") run_id = DBID(Query.latest_run_id(session)) issue = issues.Query(session, run_id).where_issue_id_is(int(issue_id)).get()[0] leaf_kinds = Query.all_leaf_kinds(session) builder = issues.Query(session, run_id) sources = builder.get_leaves_issue_instance(session, int(issue.id), SharedTextKind.SOURCE) sinks = builder.get_leaves_issue_instance(session, int(issue.id), SharedTextKind.SINK) postcondition_navigation = trace.Query(session).navigate_trace_frames( leaf_kinds, run_id, sources, sinks, trace.Query(session).initial_trace_frames(int(issue.id), TraceKind.POSTCONDITION), ) precondition_navigation = trace.Query(session).navigate_trace_frames( leaf_kinds, run_id, sources, sinks, trace.Query(session).initial_trace_frames(int(issue.id), TraceKind.PRECONDITION), ) trace_frames = ([ frame_tuple[0] for frame_tuple in reversed(postcondition_navigation) ] + [ TraceFrameQueryResult( id=DBID(0), caller="", caller_port="", callee=issue.callable, callee_port="root", filename=issue.filename, callee_location=issue.location, ) ] + [frame_tuple[0] for frame_tuple in precondition_navigation]) return [ frame._replace( file_content=Query().resolve_file(info, path=frame.filename)) for frame in trace_frames if frame.filename ] def resolve_initial_trace_frames(self, info: ResolveInfo, issue_id: int, kind: str) -> List[TraceFrameQueryResult]: session = info.context.get("session") return trace.Query(session).initial_trace_frames( issue_id, TraceKind.create_from_string(kind)) def resolve_next_trace_frames(self, info: ResolveInfo, issue_id: int, frame_id: int, kind: str) -> List[TraceFrameQueryResult]: session = info.context.get("session") leaf_kinds = Query.all_leaf_kinds(session) run_id = DBID(Query.latest_run_id(session)) trace_kind = TraceKind.create_from_string(kind) if trace_kind == TraceKind.POSTCONDITION: leaf_kind = issues.Query(session, run_id).get_leaves_issue_instance( session, issue_id, SharedTextKind.SOURCE) elif trace_kind == TraceKind.PRECONDITION: leaf_kind = issues.Query(session, run_id).get_leaves_issue_instance( session, issue_id, SharedTextKind.SINK) trace_frame = session.query(TraceFrame).get(frame_id) if trace_frame is None: raise ValueError(f"`{frame_id}` is not a valid trace frame id") return trace.Query(session).next_trace_frames(leaf_kinds, run_id, leaf_kind, trace_frame, visited_ids=set()) def resolve_codes(self, info: ResolveInfo) -> List[typeahead.Code]: session = info.context["session"] return typeahead.all_codes(session) def resolve_paths(self, info: ResolveInfo) -> List[typeahead.Path]: session = info.context["session"] return typeahead.all_paths(session) def resolve_callables(self, info: ResolveInfo) -> List[typeahead.Callable]: session = info.context["session"] return typeahead.all_callables(session) def resolve_file(self, info: ResolveInfo, path: str, **kwargs: Any) -> List[File]: if ".." in path: raise FileNotFoundError("Attempted directory traversal") source_directory = Path( info.context.get("source_directory") or os.getcwd()) contents = (source_directory / path).read_text() return [File(path=path, contents=contents)] def resolve_filters(self, info: ResolveInfo) -> List[filters_module.Filter]: session = info.context["session"] return filters_module.all_filters(session) @staticmethod def all_leaf_kinds( session: Session, ) -> Tuple[Dict[int, str], Dict[int, str], Dict[int, str]]: return ( { int(id): contents for id, contents in session.query( SharedText.id, SharedText.contents).filter( SharedText.kind == SharedTextKind.SOURCE) }, { int(id): contents for id, contents in session.query( SharedText.id, SharedText.contents).filter( SharedText.kind == SharedTextKind.SINK) }, { int(id): contents for id, contents in session.query( SharedText.id, SharedText.contents).filter( SharedText.kind == SharedTextKind.FEATURE) }, ) @staticmethod def latest_run_id(session: Session) -> DBID: return DBID((session.query(func.max( Run.id)).filter(Run.status == RunStatus.FINISHED).scalar()))
class Arguments: url = graphene.String() description = graphene.String()
class MessageField(graphene.ObjectType): message = graphene.String()
class Query(graphene.ObjectType): hello = graphene.String() def resolve_hello(self, args, info): return models.querySome()
class Arguments(object): username = graphene.String() password = graphene.String()
class InvoiceCreateInput(graphene.InputObjectType): number = graphene.String(required=True, description="Invoice number.") url = graphene.String(required=True, description="URL of an invoice to download.")
class LinkNode(graphene.ObjectType): note = graphene.String() url = graphene.String()