Esempio n. 1
0
class Feat(graphene.ObjectType):

    # the weight of the feature
    weight = graphene.Float()
    # a feature is a list of weighted words
    feature = graphene.List(Word)
Esempio n. 2
0
class Query(graphene.ObjectType):
    all_buildings = graphene.List(lambda: Building)
    building_by_id = graphene.Field(Building,
                                    required=True,
                                    osm_id=graphene.Float(required=True))
    buildings_json = graphene.Field(lambda: generic.GenericScalar)
    commerce_json = graphene.Field(lambda: generic.GenericScalar)
    all_commerce = graphene.List(lambda: Commerce)
    commerce_by_id = graphene.Field(Commerce,
                                    required=True,
                                    id=graphene.Int(required=True))

    all_users = graphene.List(User)
    all_messages = graphene.List(Message)

    protected = graphene.String(token=graphene.String())
    my_user = graphene.Field(lambda: User, token=graphene.String())

    @jwt_required
    def resolve_protected(self, context, **kwargs):
        print(get_jwt_identity())
        return ("qqqq")

    @jwt_required
    def resolve_my_user(self, context, **kwargs):
        user_query = User.get_query(context)
        print(get_jwt_identity())
        user = user_query.filter(UserModel.id == get_jwt_identity()).first()
        return user

    def resolve_commerce_json(self, args):
        return GeoJson_commerce

    def resolve_buildings_json(self, args):
        return GeoJson

    def resolve_all_users(self, context, **kwargs):
        user_query = User.get_query(context)
        return user_query

    def resolve_all_messages(self, context, **kwargs):
        message_query = Message.get_query(context)
        return message_query

    def resolve_building_by_id(self, info, osm_id):
        return OSM_ID_2_BUILDING[osm_id]

    def resolve_all_buildings(self, info):
        ab = []
        for k in OSM_ID_2_BUILDING:
            ab.append(SafeDict(OSM_ID_2_BUILDING[k]))
        print(ab)
        return ab

    def resolve_commerce_by_id(self, inf, id):
        return ID_COMMERCE[id]

    def resolve_all_commerce(self, info):
        ab = []
        for k in ID_COMMERCE:
            ab.append(SafeDict(ID_COMMERCE[k]))
        print(ab)
        return ab
Esempio n. 3
0
class Word(graphene.ObjectType):

    weight = graphene.Float()
    word = graphene.String()
Esempio n. 4
0
class ProductQueries(graphene.ObjectType):
    attributes = PrefetchingConnectionField(
        Attribute,
        description='List of the shop\'s attributes.',
        query=graphene.String(description=DESCRIPTIONS['attributes']),
        in_category=graphene.Argument(
            graphene.ID,
            description=dedent(
                '''Return attributes for products belonging to the given
                category.''')),
        in_collection=graphene.Argument(
            graphene.ID,
            description=dedent(
                '''Return attributes for products belonging to the given
                collection.''')),
    )
    categories = PrefetchingConnectionField(
        Category,
        query=graphene.String(description=DESCRIPTIONS['category']),
        level=graphene.Argument(graphene.Int),
        description='List of the shop\'s categories.')
    category = graphene.Field(Category,
                              id=graphene.Argument(graphene.ID, required=True),
                              description='Lookup a category by ID.')
    collection = graphene.Field(Collection,
                                id=graphene.Argument(graphene.ID,
                                                     required=True),
                                description='Lookup a collection by ID.')
    collections = PrefetchingConnectionField(
        Collection,
        query=graphene.String(description=DESCRIPTIONS['collection']),
        description='List of the shop\'s collections.')
    product = graphene.Field(Product,
                             id=graphene.Argument(graphene.ID, required=True),
                             description='Lookup a product by ID.')
    products = PrefetchingConnectionField(
        Product,
        attributes=graphene.List(AttributeScalar,
                                 description='Filter products by attributes.'),
        categories=graphene.List(graphene.ID,
                                 description='Filter products by category.'),
        collections=graphene.List(
            graphene.ID, description='Filter products by collections.'),
        price_lte=graphene.Float(description=dedent(
            '''Filter by price less than or equal to the given value.''')),
        price_gte=graphene.Float(description=dedent('''
                Filter by price greater than or equal to the given value.''')),
        sort_by=graphene.Argument(ProductOrder, description='Sort products.'),
        stock_availability=graphene.Argument(
            StockAvailability,
            description='Filter products by the stock availability'),
        query=graphene.String(description=DESCRIPTIONS['product']),
        description='List of the shop\'s products.')
    product_type = graphene.Field(ProductType,
                                  id=graphene.Argument(graphene.ID,
                                                       required=True),
                                  description='Lookup a product type by ID.')
    product_types = PrefetchingConnectionField(
        ProductType, description='List of the shop\'s product types.')
    product_variant = graphene.Field(ProductVariant,
                                     id=graphene.Argument(graphene.ID,
                                                          required=True),
                                     description='Lookup a variant by ID.')
    product_variants = PrefetchingConnectionField(
        ProductVariant,
        ids=graphene.List(graphene.ID),
        description='Lookup multiple variants by ID')
    report_product_sales = PrefetchingConnectionField(
        ProductVariant,
        period=graphene.Argument(ReportingPeriod,
                                 required=True,
                                 description='Span of time.'),
        description='List of top selling products.')

    def resolve_attributes(self,
                           info,
                           in_category=None,
                           in_collection=None,
                           query=None,
                           **kwargs):
        return resolve_attributes(info, in_category, in_collection, query)

    def resolve_categories(self, info, level=None, query=None, **kwargs):
        return resolve_categories(info, level=level, query=query)

    def resolve_category(self, info, id):
        return graphene.Node.get_node_from_global_id(info, id, Category)

    def resolve_collection(self, info, id):
        return graphene.Node.get_node_from_global_id(info, id, Collection)

    def resolve_collections(self, info, query=None, **kwargs):
        return resolve_collections(info, query)

    def resolve_product(self, info, id):
        return graphene.Node.get_node_from_global_id(info, id, Product)

    def resolve_products(self, info, **kwargs):
        return resolve_products(info, **kwargs)

    def resolve_product_type(self, info, id):
        return graphene.Node.get_node_from_global_id(info, id, ProductType)

    def resolve_product_types(self, info, **kwargs):
        return resolve_product_types(info)

    def resolve_product_variant(self, info, id):
        return graphene.Node.get_node_from_global_id(info, id, ProductVariant)

    def resolve_product_variants(self, info, ids=None, **kwargs):
        return resolve_product_variants(info, ids)

    @permission_required(['order.manage_orders', 'product.manage_products'])
    def resolve_report_product_sales(self, info, period, **kwargs):
        return resolve_report_product_sales(info, period)
class FaceEmotion(graphene.ObjectType):
    title = graphene.String()
    factor = graphene.Float()
Esempio n. 6
0
class PostInterface(SQLAlchemyInterface):
    __doc__ = docs.PostInterface.__doc__

    class Meta:
        model = models.Post
        only_fields = ('creator', 'message_classifier')
        # Don't add id in only_fields in an interface or the the id of Post
        # will be just the primary key, not the base64 type:id

    creation_date = DateTime(description=docs.PostInterface.creation_date)
    modification_date = DateTime(
        description=docs.PostInterface.modification_date)
    subject = graphene.String(lang=graphene.String(),
                              description=docs.PostInterface.subject)
    body = graphene.String(lang=graphene.String(),
                           description=docs.PostInterface.body)
    subject_entries = graphene.List(
        LangStringEntry,
        lang=graphene.String(),
        description=docs.PostInterface.subject_entries)
    body_entries = graphene.List(LangStringEntry,
                                 lang=graphene.String(),
                                 description=docs.PostInterface.body_entries)
    sentiment_counts = graphene.Field(
        SentimentCounts, description=docs.PostInterface.sentiment_counts)
    my_sentiment = graphene.Field(type=SentimentTypes,
                                  description=docs.PostInterface.my_sentiment)
    indirect_idea_content_links = graphene.List(
        IdeaContentLink,
        description=docs.PostInterface.indirect_idea_content_links)
    extracts = graphene.List(Extract, description=docs.PostInterface.extracts)
    parent_id = graphene.ID(description=docs.PostInterface.parent_id)
    db_id = graphene.Int(description=docs.PostInterface.db_id)
    body_mime_type = graphene.String(
        required=True, description=docs.PostInterface.body_mime_type)
    publication_state = graphene.Field(
        type=PublicationStates,
        description=docs.PostInterface.publication_state)
    attachments = graphene.List(Attachment,
                                description=docs.PostInterface.attachments)
    original_locale = graphene.String(
        description=docs.PostInterface.original_locale)
    publishes_synthesis = graphene.Field(
        lambda: Synthesis, description=docs.PostInterface.publishes_synthesis)
    type = graphene.String(description=docs.PostInterface.type)
    discussion_id = graphene.String(
        description=docs.PostInterface.discussion_id)
    modified = graphene.Boolean(description=docs.PostInterface.modified)
    parent_post_creator = graphene.Field(
        lambda: AgentProfile,
        description=docs.PostInterface.parent_post_creator)
    parent_extract_id = graphene.ID(
        description=docs.PostInterface.parent_extract_id)
    keywords = graphene.List(TagResult,
                             description=docs.PostInterface.keywords)
    nlp_sentiment = graphene.Float(
        description=docs.PostInterface.nlp_sentiment)
    tags = graphene.List(Tag, description=docs.PostInterface.tags)

    def resolve_db_id(self, args, context, info):
        return self.id

    def resolve_extracts(self, args, context, info):
        return self.db.query(models.Extract).join(
            models.Content, models.Extract.content == self).options(
                joinedload(models.Extract.text_fragment_identifiers)).order_by(
                    models.Extract.creation_date).all()

    def resolve_subject(self, args, context, info):
        # Use self.subject and not self.get_subject() because we still
        # want the subject even when the post is deleted.
        subject = resolve_langstring(self.subject, args.get('lang'))
        return subject

    def resolve_body(self, args, context, info):
        body = resolve_langstring(self.get_body(), args.get('lang'))
        return body

    def resolve_parent_post_creator(self, args, context, info):
        if self.parent_id:
            post = models.Content.get(self.parent_id)
            return post.creator

    @staticmethod
    @abort_transaction_on_exception
    def _maybe_translate(post, locale, request):
        if request.authenticated_userid == Everyone:
            # anonymous cannot trigger translations
            return
        if locale:
            lpc = LanguagePreferenceCollectionWithDefault(locale)
        else:
            lpc = LanguagePreferenceCollection.getCurrent(request)
        for ls in (post.body, post.subject):
            source_locale = ls.first_original().locale_code
            pref = lpc.find_locale(source_locale)
            target_locale = pref.translate_to_locale
            if not target_locale:
                continue
            target_locale = target_locale.code
            if not ls.closest_entry(target_locale):
                post.maybe_translate(lpc)
        # flush so abort_transaction_on_exception decorator can catch the error
        post.db.flush()

    def resolve_subject_entries(self, args, context, info):
        # Use self.subject and not self.get_subject() because we still
        # want the subject even when the post is deleted.
        PostInterface._maybe_translate(self, args.get('lang'), context)
        subject = resolve_best_langstring_entries(self.subject,
                                                  args.get('lang'))
        return subject

    def resolve_body_entries(self, args, context, info):
        PostInterface._maybe_translate(self, args.get('lang'), context)
        body = resolve_best_langstring_entries(self.get_body(),
                                               args.get('lang'))
        return body

    def resolve_sentiment_counts(self, args, context, info):
        # get the sentiment counts from the cache if it exists instead of
        # tiggering a sql query
        cache = getattr(context, 'sentiment_counts_by_post_id', None)
        if cache is not None:
            sentiment_counts = {
                name: 0
                for name in models.SentimentOfPost.all_sentiments
            }
            sentiment_counts.update(cache[self.id])
        else:
            sentiment_counts = self.sentiment_counts

        return SentimentCounts(
            dont_understand=sentiment_counts['dont_understand'],
            disagree=sentiment_counts['disagree'],
            like=sentiment_counts['like'],
            more_info=sentiment_counts['more_info'],
        )

    def resolve_my_sentiment(self, args, context, info):
        my_sentiment = self.my_sentiment
        if my_sentiment is None:
            return None

        return my_sentiment.name.upper()

    def resolve_indirect_idea_content_links(self, args, context, info):
        # example:
        #  {'@id': 'local:IdeaContentLink/101',
        #   '@type': 'Extract',
        #   'created': '2014-04-25T17:51:52Z',
        #   'idCreator': 'local:AgentProfile/152',
        #   'idIdea': 'local:Idea/52',
        #   'idPost': 'local:Content/1467'},
        # for @type == 'Extract', idIdea may be None
        # @type == 'IdeaRelatedPostLink' for idea links
        links = [
            IdeaContentLink(
                idea_id=models.Idea.get_database_id(link['idIdea']),
                post_id=models.Post.get_database_id(link['idPost']),
                type=link['@type'],
                creation_date=link['created'],
                creator_id=link['idCreator'])
            for link in self.indirect_idea_content_links_with_cache()
        ]
        # only return links with the IdeaRelatedPostLink type
        return [link for link in links if link.type == 'IdeaRelatedPostLink']

    def resolve_parent_id(self, args, context, info):
        if self.parent_id is None:
            return None

        return models.Post.graphene_id_for(self.parent_id)

    def resolve_body_mime_type(self, args, context, info):
        return self.get_body_mime_type()

    def resolve_publication_state(self, args, context, info):
        return self.publication_state.name

    def resolve_original_locale(self, args, context, info):
        entry = self.body.first_original()
        if entry:
            return entry.locale_code

        return u''

    def resolve_type(self, args, context, info):
        return self.__class__.__name__

    def resolve_modified(self, args, context, info):
        return self.get_modification_date() > self.creation_date

    def resolve_parent_extract_id(self, args, context, info):
        if self.parent_extract_id is None:
            return None

        return models.Extract.graphene_id_for(self.parent_extract_id)

    def resolve_keywords(self, args, context, info):
        return [
            TagResult(score=r.score, value=r.value, count=r.count)
            for r in self.nlp_keywords()
        ]

    def resolve_nlp_sentiment(self, args, context, info):
        sentiments = self.watson_sentiments
        if sentiments:
            # assume only one for now
            return sentiments[0].sentiment

    def resolve_tags(self, args, context, info):
        return self.tags
Esempio n. 7
0
class GrapheneRun(graphene.ObjectType):
    id = graphene.NonNull(graphene.ID)
    runId = graphene.NonNull(graphene.String)
    # Nullable because of historical runs
    pipelineSnapshotId = graphene.String()
    repositoryOrigin = graphene.Field(GrapheneRepositoryOrigin)
    status = graphene.NonNull(GrapheneRunStatus)
    pipeline = graphene.NonNull(GraphenePipelineReference)
    pipelineName = graphene.NonNull(graphene.String)
    jobName = graphene.NonNull(graphene.String)
    solidSelection = graphene.List(graphene.NonNull(graphene.String))
    resolvedOpSelection = graphene.List(graphene.NonNull(graphene.String))
    stats = graphene.NonNull(GrapheneRunStatsSnapshotOrError)
    stepStats = non_null_list(GrapheneRunStepStats)
    computeLogs = graphene.Field(
        graphene.NonNull(GrapheneComputeLogs),
        stepKey=graphene.Argument(graphene.NonNull(graphene.String)),
        description="""
        Compute logs are the stdout/stderr logs for a given solid step computation
        """,
    )
    executionPlan = graphene.Field(GrapheneExecutionPlan)
    stepKeysToExecute = graphene.List(graphene.NonNull(graphene.String))
    runConfigYaml = graphene.NonNull(graphene.String)
    runConfig = graphene.NonNull(GrapheneRunConfigData)
    mode = graphene.NonNull(graphene.String)
    tags = non_null_list(GraphenePipelineTag)
    rootRunId = graphene.Field(graphene.String)
    parentRunId = graphene.Field(graphene.String)
    canTerminate = graphene.NonNull(graphene.Boolean)
    assets = non_null_list(GrapheneAsset)
    events = graphene.Field(
        non_null_list(GrapheneDagsterRunEvent),
        after=graphene.Argument(GrapheneCursor),
    )
    startTime = graphene.Float()
    endTime = graphene.Float()
    updateTime = graphene.Float()

    class Meta:
        interfaces = (GraphenePipelineRun,)
        name = "Run"

    def __init__(self, record):
        check.inst_param(record, "record", RunRecord)
        pipeline_run = record.pipeline_run
        super().__init__(
            runId=pipeline_run.run_id,
            status=PipelineRunStatus(pipeline_run.status),
            mode=pipeline_run.mode,
        )
        self._pipeline_run = pipeline_run
        self._run_record = record
        self._run_stats = None

    def resolve_id(self, _graphene_info):
        return self._pipeline_run.run_id

    def resolve_repositoryOrigin(self, _graphene_info):
        return (
            GrapheneRepositoryOrigin(
                self._pipeline_run.external_pipeline_origin.external_repository_origin
            )
            if self._pipeline_run.external_pipeline_origin
            else None
        )

    def resolve_pipeline(self, graphene_info):
        return get_pipeline_reference_or_raise(graphene_info, self._pipeline_run)

    def resolve_pipelineName(self, _graphene_info):
        return self._pipeline_run.pipeline_name

    def resolve_jobName(self, _graphene_info):
        return self._pipeline_run.pipeline_name

    def resolve_solidSelection(self, _graphene_info):
        return self._pipeline_run.solid_selection

    def resolve_resolvedOpSelection(self, _graphene_info):
        return self._pipeline_run.solids_to_execute

    def resolve_pipelineSnapshotId(self, _graphene_info):
        return self._pipeline_run.pipeline_snapshot_id

    def resolve_stats(self, graphene_info):
        return get_stats(graphene_info, self.run_id)

    def resolve_stepStats(self, graphene_info):
        return get_step_stats(graphene_info, self.run_id)

    def resolve_computeLogs(self, _graphene_info, stepKey):
        return GrapheneComputeLogs(runId=self.run_id, stepKey=stepKey)

    def resolve_executionPlan(self, graphene_info):
        if not (
            self._pipeline_run.execution_plan_snapshot_id
            and self._pipeline_run.pipeline_snapshot_id
        ):
            return None

        instance = graphene_info.context.instance

        execution_plan_snapshot = instance.get_execution_plan_snapshot(
            self._pipeline_run.execution_plan_snapshot_id
        )
        return (
            GrapheneExecutionPlan(
                ExternalExecutionPlan(execution_plan_snapshot=execution_plan_snapshot)
            )
            if execution_plan_snapshot
            else None
        )

    def resolve_stepKeysToExecute(self, _graphene_info):
        return self._pipeline_run.step_keys_to_execute

    def resolve_runConfigYaml(self, _graphene_info):
        return yaml.dump(
            self._pipeline_run.run_config, default_flow_style=False, allow_unicode=True
        )

    def resolve_runConfig(self, _graphene_info):
        return self._pipeline_run.run_config

    def resolve_tags(self, _graphene_info):
        return [
            GraphenePipelineTag(key=key, value=value)
            for key, value in self._pipeline_run.tags.items()
            if get_tag_type(key) != TagType.HIDDEN
        ]

    def resolve_rootRunId(self, _graphene_info):
        return self._pipeline_run.root_run_id

    def resolve_parentRunId(self, _graphene_info):
        return self._pipeline_run.parent_run_id

    @property
    def run_id(self):
        return self.runId

    def resolve_canTerminate(self, graphene_info):
        # short circuit if the pipeline run is in a terminal state
        if self._pipeline_run.is_finished:
            return False
        return graphene_info.context.instance.run_coordinator.can_cancel_run(self.run_id)

    def resolve_assets(self, graphene_info):
        return get_assets_for_run_id(graphene_info, self.run_id)

    def resolve_events(self, graphene_info, after=-1):
        events = graphene_info.context.instance.logs_after(self.run_id, cursor=after)
        return [from_event_record(event, self._pipeline_run.pipeline_name) for event in events]

    def _get_run_record(self, instance):
        if not self._run_record:
            self._run_record = instance.get_run_records(PipelineRunsFilter(run_ids=[self.run_id]))[
                0
            ]
        return self._run_record

    def resolve_startTime(self, graphene_info):
        run_record = self._get_run_record(graphene_info.context.instance)
        # If a user has not migrated in 0.13.15, then run_record will not have start_time and end_time. So it will be necessary to fill this data using the run_stats. Since we potentially make this call multiple times, we cache the result.
        if run_record.start_time is None and self._pipeline_run.status in STARTED_STATUSES:
            if self._run_stats is None or self._run_stats.start_time is None:
                self._run_stats = graphene_info.context.instance.get_run_stats(self.runId)
            return self._run_stats.start_time
        return run_record.start_time

    def resolve_endTime(self, graphene_info):
        run_record = self._get_run_record(graphene_info.context.instance)
        if run_record.end_time is None and self._pipeline_run.status in COMPLETED_STATUSES:
            if self._run_stats is None or self._run_stats.end_time is None:
                self._run_stats = graphene_info.context.instance.get_run_stats(self.runId)
            return self._run_stats.end_time
        return run_record.end_time

    def resolve_updateTime(self, graphene_info):
        run_record = self._get_run_record(graphene_info.context.instance)
        return datetime_as_float(run_record.update_timestamp)
 class Arguments:
     name = graphene.String()
     year = graphene.Int()
     note_imdb = graphene.Float()
     genre = graphene.String()
     duration = graphene.Int()
Esempio n. 9
0
class SubRecipeInput(graphene.InputObjectType):
    id = graphene.ID()
    child_recipe = graphene.ID()
    parent_recipe = graphene.ID()
    quantity = graphene.Float()
    measurement = graphene.String()
Esempio n. 10
0
class Weight(graphene.ObjectType):
    unit = WeightUnitsEnum(description="Weight unit.", required=True)
    value = graphene.Float(description="Weight value.", required=True)

    class Meta:
        description = "Represents weight value in a specific weight unit."
Esempio n. 11
0
class PriceRangeInput(graphene.InputObjectType):
    gte = graphene.Float(description="Price greater than or equal to.", required=False)
    lte = graphene.Float(description="Price less than or equal to.", required=False)
Esempio n. 12
0
class Sale(ChannelContextTypeWithMetadata, ModelObjectType):
    id = graphene.GlobalID(required=True)
    name = graphene.String(required=True)
    type = SaleType(required=True)
    start_date = graphene.DateTime(required=True)
    end_date = graphene.DateTime()
    created = graphene.DateTime(required=True)
    updated_at = graphene.DateTime(required=True)
    categories = ConnectionField(
        CategoryCountableConnection,
        description="List of categories this sale applies to.",
    )
    collections = ConnectionField(
        CollectionCountableConnection,
        description="List of collections this sale applies to.",
    )
    products = ConnectionField(
        ProductCountableConnection,
        description="List of products this sale applies to.")
    variants = ConnectionField(
        ProductVariantCountableConnection,
        description=
        f"{ADDED_IN_31} List of product variants this sale applies to.",
    )
    translation = TranslationField(
        SaleTranslation,
        type_name="sale",
        resolver=ChannelContextType.resolve_translation,
    )
    channel_listings = graphene.List(
        graphene.NonNull(SaleChannelListing),
        description="List of channels available for the sale.",
    )
    discount_value = graphene.Float(description="Sale value.")
    currency = graphene.String(description="Currency code for sale.")

    class Meta:
        default_resolver = ChannelContextType.resolver_with_context
        description = (
            "Sales allow creating discounts for categories, collections or products "
            "and are visible to all the customers.")
        interfaces = [relay.Node, ObjectWithMetadata]
        model = models.Sale

    @staticmethod
    def resolve_categories(root: ChannelContext[models.Sale], info, *_args,
                           **kwargs):
        qs = root.node.categories.all()
        return create_connection_slice(qs, info, kwargs,
                                       CategoryCountableConnection)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_channel_listings(root: ChannelContext[models.Sale], info,
                                 **_kwargs):
        return SaleChannelListingBySaleIdLoader(info.context).load(
            root.node.id)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_collections(root: ChannelContext[models.Sale], info, *_args,
                            **kwargs):
        qs = root.node.collections.all()
        qs = ChannelQsContext(qs=qs, channel_slug=root.channel_slug)
        return create_connection_slice(qs, info, kwargs,
                                       CollectionCountableConnection)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_products(root: ChannelContext[models.Sale], info, **kwargs):
        qs = root.node.products.all()
        qs = ChannelQsContext(qs=qs, channel_slug=root.channel_slug)
        return create_connection_slice(qs, info, kwargs,
                                       ProductCountableConnection)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_variants(root: ChannelContext[models.Sale], info, **kwargs):
        qs = root.node.variants.all()
        qs = ChannelQsContext(qs=qs, channel_slug=root.channel_slug)
        return create_connection_slice(qs, info, kwargs,
                                       ProductVariantCountableConnection)

    @staticmethod
    def resolve_discount_value(root: ChannelContext[models.Sale], info,
                               **_kwargs):
        if not root.channel_slug:
            return None

        return (SaleChannelListingBySaleIdAndChanneSlugLoader(
            info.context).load(
                (root.node.id, root.channel_slug
                 )).then(lambda channel_listing: channel_listing.discount_value
                         if channel_listing else None))

    @staticmethod
    def resolve_currency(root: ChannelContext[models.Sale], info, **_kwargs):
        if not root.channel_slug:
            return None

        return (SaleChannelListingBySaleIdAndChanneSlugLoader(
            info.context).load(
                (root.node.id, root.channel_slug
                 )).then(lambda channel_listing: channel_listing.currency
                         if channel_listing else None))
Esempio n. 13
0
class Voucher(ChannelContextTypeWithMetadata, ModelObjectType):
    id = graphene.GlobalID(required=True)
    name = graphene.String()
    code = graphene.String(required=True)
    usage_limit = graphene.Int()
    used = graphene.Int(required=True)
    start_date = graphene.DateTime(required=True)
    end_date = graphene.DateTime()
    apply_once_per_order = graphene.Boolean(required=True)
    apply_once_per_customer = graphene.Boolean(required=True)
    only_for_staff = graphene.Boolean(required=True)
    min_checkout_items_quantity = graphene.Int()
    categories = ConnectionField(
        CategoryCountableConnection,
        description="List of categories this voucher applies to.",
    )
    collections = ConnectionField(
        CollectionCountableConnection,
        description="List of collections this voucher applies to.",
    )
    products = ConnectionField(
        ProductCountableConnection,
        description="List of products this voucher applies to.",
    )
    variants = ConnectionField(
        ProductVariantCountableConnection,
        description=
        f"{ADDED_IN_31} List of product variants this voucher applies to.",
    )
    countries = graphene.List(
        types.CountryDisplay,
        description="List of countries available for the shipping voucher.",
    )
    translation = TranslationField(
        VoucherTranslation,
        type_name="voucher",
        resolver=ChannelContextType.resolve_translation,
    )
    discount_value_type = DiscountValueTypeEnum(
        description=
        "Determines a type of discount for voucher - value or percentage",
        required=True,
    )
    discount_value = graphene.Float(description="Voucher value.")
    currency = graphene.String(description="Currency code for voucher.")
    min_spent = graphene.Field(
        Money, description="Minimum order value to apply voucher.")
    type = VoucherTypeEnum(description="Determines a type of voucher.",
                           required=True)
    channel_listings = graphene.List(
        graphene.NonNull(VoucherChannelListing),
        description="List of availability in channels for the voucher.",
    )

    class Meta:
        default_resolver = ChannelContextType.resolver_with_context
        description = (
            "Vouchers allow giving discounts to particular customers on categories, "
            "collections or specific products. They can be used during checkout by "
            "providing valid voucher codes.")
        interfaces = [relay.Node, ObjectWithMetadata]
        model = models.Voucher

    @staticmethod
    def resolve_categories(root: ChannelContext[models.Voucher], info, *_args,
                           **kwargs):
        qs = root.node.categories.all()
        return create_connection_slice(qs, info, kwargs,
                                       CategoryCountableConnection)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_collections(root: ChannelContext[models.Voucher], info, *_args,
                            **kwargs):
        qs = root.node.collections.all()
        qs = ChannelQsContext(qs=qs, channel_slug=root.channel_slug)
        return create_connection_slice(qs, info, kwargs,
                                       CollectionCountableConnection)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_products(root: ChannelContext[models.Voucher], info, **kwargs):
        qs = root.node.products.all()
        qs = ChannelQsContext(qs=qs, channel_slug=root.channel_slug)
        return create_connection_slice(qs, info, kwargs,
                                       ProductCountableConnection)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_variants(root: ChannelContext[models.Voucher], info, **kwargs):
        qs = root.node.variants.all()
        qs = ChannelQsContext(qs=qs, channel_slug=root.channel_slug)
        return create_connection_slice(qs, info, kwargs,
                                       ProductVariantCountableConnection)

    @staticmethod
    def resolve_countries(root: ChannelContext[models.Voucher], *_args,
                          **_kwargs):
        return [
            types.CountryDisplay(code=country.code, country=country.name)
            for country in root.node.countries
        ]

    @staticmethod
    def resolve_discount_value(root: ChannelContext[models.Voucher], info,
                               **_kwargs):
        if not root.channel_slug:
            return None

        return (VoucherChannelListingByVoucherIdAndChanneSlugLoader(
            info.context).load(
                (root.node.id, root.channel_slug
                 )).then(lambda channel_listing: channel_listing.discount_value
                         if channel_listing else None))

    @staticmethod
    def resolve_currency(root: ChannelContext[models.Voucher], info,
                         **_kwargs):
        if not root.channel_slug:
            return None

        return (VoucherChannelListingByVoucherIdAndChanneSlugLoader(
            info.context).load(
                (root.node.id, root.channel_slug
                 )).then(lambda channel_listing: channel_listing.currency
                         if channel_listing else None))

    @staticmethod
    def resolve_min_spent(root: ChannelContext[models.Voucher], info,
                          **_kwargs):
        if not root.channel_slug:
            return None

        return (VoucherChannelListingByVoucherIdAndChanneSlugLoader(
            info.context).load(
                (root.node.id, root.channel_slug
                 )).then(lambda channel_listing: channel_listing.min_spent
                         if channel_listing else None))

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_channel_listings(root: ChannelContext[models.Voucher], info,
                                 **_kwargs):
        return VoucherChannelListingByVoucherIdLoader(info.context).load(
            root.node.id)
Esempio n. 14
0
class Sale(ChannelContextTypeWithMetadata, CountableDjangoObjectType):
    categories = PrefetchingConnectionField(
        Category, description="List of categories this sale applies to.")
    collections = ChannelContextFilterConnectionField(
        Collection, description="List of collections this sale applies to.")
    products = ChannelContextFilterConnectionField(
        Product, description="List of products this sale applies to.")
    translation = TranslationField(
        SaleTranslation,
        type_name="sale",
        resolver=ChannelContextType.resolve_translation,
    )
    channel_listings = graphene.List(
        graphene.NonNull(SaleChannelListing),
        description="List of channels available for the sale.",
    )
    discount_value = graphene.Float(description="Sale value.")
    currency = graphene.String(description="Currency code for sale.")

    class Meta:
        default_resolver = ChannelContextType.resolver_with_context
        description = (
            "Sales allow creating discounts for categories, collections or products "
            "and are visible to all the customers.")
        interfaces = [relay.Node, ObjectWithMetadata]
        model = models.Sale
        only_fields = ["end_date", "id", "name", "start_date", "type"]

    @staticmethod
    def resolve_categories(root: ChannelContext[models.Sale], *_args,
                           **_kwargs):
        return root.node.categories.all()

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_channel_listings(root: ChannelContext[models.Sale], info,
                                 **_kwargs):
        return SaleChannelListingBySaleIdLoader(info.context).load(
            root.node.id)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_collections(root: ChannelContext[models.Sale], info, *_args,
                            **_kwargs):
        qs = root.node.collections.all()
        return ChannelQsContext(qs=qs, channel_slug=root.channel_slug)

    @staticmethod
    @permission_required(DiscountPermissions.MANAGE_DISCOUNTS)
    def resolve_products(root: ChannelContext[models.Sale], info, **_kwargs):
        qs = root.node.products.all()
        return ChannelQsContext(qs=qs, channel_slug=root.channel_slug)

    @staticmethod
    def resolve_discount_value(root: ChannelContext[models.Sale], info,
                               **_kwargs):
        if not root.channel_slug:
            return None

        return (SaleChannelListingBySaleIdAndChanneSlugLoader(
            info.context).load(
                (root.node.id, root.channel_slug
                 )).then(lambda channel_listing: channel_listing.discount_value
                         if channel_listing else None))

    @staticmethod
    def resolve_currency(root: ChannelContext[models.Sale], info, **_kwargs):
        if not root.channel_slug:
            return None

        return (SaleChannelListingBySaleIdAndChanneSlugLoader(
            info.context).load(
                (root.node.id, root.channel_slug
                 )).then(lambda channel_listing: channel_listing.currency
                         if channel_listing else None))
Esempio n. 15
0
class Edge(graphene.ObjectType):
    """Weighted edges, connections between feature and document nodes."""
    source = graphene.String()
    target = graphene.String()
    weight = graphene.Float()
Esempio n. 16
0
class CaptacaoType(graphene.ObjectType, Resolvers):
    valor = graphene.Float()
    data_recibo = graphene.String(description='Data em formato aaaa-mm-dd')
    cgccpf = graphene.String()
    nome_projeto = graphene.String()
    nome_doador = graphene.String()
Esempio n. 17
0
 class Arguments:
     invoice_id = graphene.ID()
     date = graphene.String()
     method = graphene.Int()
     amount = graphene.Float()
Esempio n. 18
0
class ProjetoType(CommonFields, graphene.ObjectType, Resolvers):
    nome = graphene.String()
    providencia = graphene.String()
    PRONAC = graphene.String(description='Pronac do projeto associado')
    IdPRONAC = graphene.String()
    UF = graphene.String(description="Unidade Federativa")
    data_inicio = graphene.String(description='Data em formato aaaa-mm-dd')
    data_termino = graphene.String(description='Data em formato aaaa-mm-dd')
    ano_projeto = graphene.String()
    acessibilidade = graphene.String()
    objetivos = graphene.String()
    justificativa = graphene.String()
    democratizacao = graphene.String()
    etapa = graphene.String()
    ficha_tecnica = graphene.String()
    resumo = graphene.String()
    sinopse = graphene.String()
    impacto_ambiental = graphene.String()
    especificacao_tecnica = graphene.String()
    estrategia_execucao = graphene.String()
    municipio = graphene.String()
    proponente = graphene.String()
    cgccpf = graphene.String()
    area = graphene.String()
    segmento = graphene.String()
    situacao = graphene.String()
    mecanismo = graphene.String()
    enquadramento = graphene.String()
    valor_solicitado = graphene.Float()
    outras_fontes = graphene.Float()
    valor_captado = graphene.Float()
    valor_proposta = graphene.Float()
    valor_aprovado = graphene.Float()
    valor_projeto = graphene.Float()

    query_warning = (
    "*CUIDADO* - "
    "Utilize apenas para um único projeto(com filtro de pronac) ou "
    "quantidade mínima de projetos.")
    #  Detail info
    certidoes_negativas = graphene.List(CertidoesNegativasType,
                                        description=query_warning)
    divulgacoes = graphene.List(DivulgacaoType,
                                        description=query_warning)
    deslocamentos = graphene.List(DeslocamentoType,
                                        description=query_warning)
    distribuicoes = graphene.List(DistribuicaoType,
                                        description=query_warning)
    readequacoes = graphene.List(ReadequacaoType,
                                        description=query_warning)
    prorrogacoes = graphene.List(ProrrogacaoType,
                                        description=query_warning)

    relacoes_pagamentos = graphene.List(RelacaoPagamentoType,
                                        description=query_warning)
    relatorios_fiscos = graphene.List(RelatorioFiscoType,
                                        description=query_warning)
    bens_de_capital = graphene.List(BensDeCapitalType,
                                        description=query_warning)
    captacoes = graphene.List(CaptacaoType,
                                        description=query_warning)
Esempio n. 19
0
class IngredientBlock(DefaultStreamBlock):
    name = graphene.String()
    quantity = graphene.Float()
    unit = graphene.String()
Esempio n. 20
0
class LegacyComputeSession(graphene.ObjectType):
    """
    Represents a main session.
    """
    class Meta:
        interfaces = (Item, )

    tag = graphene.String()  # Only for ComputeSession
    sess_id = graphene.String()  # legacy
    sess_type = graphene.String()  # legacy
    session_name = graphene.String()
    session_type = graphene.String()
    role = graphene.String()
    image = graphene.String()
    registry = graphene.String()
    domain_name = graphene.String()
    group_name = graphene.String()
    group_id = graphene.UUID()
    scaling_group = graphene.String()
    user_uuid = graphene.UUID()
    access_key = graphene.String()

    status = graphene.String()
    status_changed = GQLDateTime()
    status_info = graphene.String()
    created_at = GQLDateTime()
    terminated_at = GQLDateTime()
    startup_command = graphene.String()
    result = graphene.String()

    # hidable fields by configuration
    agent = graphene.String()
    container_id = graphene.String()

    service_ports = graphene.JSONString()

    occupied_slots = graphene.JSONString()
    occupied_shares = graphene.JSONString()
    mounts = graphene.List(lambda: graphene.List(lambda: graphene.String))
    resource_opts = graphene.JSONString()

    num_queries = BigInt()
    live_stat = graphene.JSONString()
    last_stat = graphene.JSONString()

    user_email = graphene.String()

    # Legacy fields
    lang = graphene.String()
    mem_slot = graphene.Int()
    cpu_slot = graphene.Float()
    gpu_slot = graphene.Float()
    tpu_slot = graphene.Float()
    cpu_used = BigInt()
    cpu_using = graphene.Float()
    mem_max_bytes = BigInt()
    mem_cur_bytes = BigInt()
    net_rx_bytes = BigInt()
    net_tx_bytes = BigInt()
    io_read_bytes = BigInt()
    io_write_bytes = BigInt()
    io_max_scratch_size = BigInt()
    io_cur_scratch_size = BigInt()

    @classmethod
    async def _resolve_live_stat(cls, redis_stat, kernel_id):
        cstat = await redis.execute_with_retries(
            lambda: redis_stat.get(kernel_id, encoding=None))
        if cstat is not None:
            cstat = msgpack.unpackb(cstat)
        return cstat

    async def resolve_live_stat(self, info: graphene.ResolveInfo):
        if not hasattr(self, 'status'):
            return None
        rs = info.context['redis_stat']
        if self.status not in LIVE_STATUS:
            return self.last_stat
        else:
            return await type(self)._resolve_live_stat(rs, str(self.id))

    async def _resolve_legacy_metric(
        self,
        info: graphene.ResolveInfo,
        metric_key,
        metric_field,
        convert_type,
    ):
        if not hasattr(self, 'status'):
            return None
        rs = info.context['redis_stat']
        if self.status not in LIVE_STATUS:
            if self.last_stat is None:
                return convert_type(0)
            metric = self.last_stat.get(metric_key)
            if metric is None:
                return convert_type(0)
            value = metric.get(metric_field)
            if value is None:
                return convert_type(0)
            return convert_type(value)
        else:
            kstat = await type(self)._resolve_live_stat(rs, str(self.id))
            if kstat is None:
                return convert_type(0)
            metric = kstat.get(metric_key)
            if metric is None:
                return convert_type(0)
            value = metric.get(metric_field)
            if value is None:
                return convert_type(0)
            return convert_type(value)

    async def resolve_cpu_used(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'cpu_used', 'current',
                                                 float)

    async def resolve_cpu_using(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'cpu_util', 'pct',
                                                 float)

    async def resolve_mem_max_bytes(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'mem', 'stats.max', int)

    async def resolve_mem_cur_bytes(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'mem', 'current', int)

    async def resolve_net_rx_bytes(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'net_rx', 'stats.rate',
                                                 int)

    async def resolve_net_tx_bytes(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'net_tx', 'stats.rate',
                                                 int)

    async def resolve_io_read_bytes(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'io_read', 'current',
                                                 int)

    async def resolve_io_write_bytes(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'io_write', 'current',
                                                 int)

    async def resolve_io_max_scratch_size(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'io_scratch_size',
                                                 'stats.max', int)

    async def resolve_io_cur_scratch_size(self, info: graphene.ResolveInfo):
        return await self._resolve_legacy_metric(info, 'io_scratch_size',
                                                 'current', int)

    @classmethod
    def parse_row(cls, context, row):
        assert row is not None
        from .user import UserRole
        mega = 2**20
        is_superadmin = (context['user']['role'] == UserRole.SUPERADMIN)
        if is_superadmin:
            hide_agents = False
        else:
            hide_agents = context['config']['manager']['hide-agents']
        return {
            'id':
            row['id'],
            'sess_id':
            row['session_name'],  # legacy, will be deprecated
            'sess_type':
            row['session_type'].name,  # legacy, will be deprecated
            'session_name':
            row['session_name'],
            'session_type':
            row['session_type'].name,
            'role':
            row['role'],
            'tag':
            row['tag'],
            'image':
            row['image'],
            'registry':
            row['registry'],
            'domain_name':
            row['domain_name'],
            'group_name':
            row['name'],  # group.name (group is omitted since use_labels=True is not used)
            'group_id':
            row['group_id'],
            'scaling_group':
            row['scaling_group'],
            'user_uuid':
            row['user_uuid'],
            'access_key':
            row['access_key'],
            'status':
            row['status'].name,
            'status_changed':
            row['status_changed'],
            'status_info':
            row['status_info'],
            'created_at':
            row['created_at'],
            'terminated_at':
            row['terminated_at'],
            'startup_command':
            row['startup_command'],
            'result':
            row['result'].name,
            'service_ports':
            row['service_ports'],
            'occupied_slots':
            row['occupied_slots'].to_json(),
            'mounts':
            row['mounts'],
            'resource_opts':
            row['resource_opts'],
            'num_queries':
            row['num_queries'],
            # optionally hidden
            'agent':
            row['agent'] if not hide_agents else None,
            'container_id':
            row['container_id'] if not hide_agents else None,
            # live_stat is resolved by Graphene
            'last_stat':
            row['last_stat'],
            'user_email':
            row['email'],
            # Legacy fields
            # NOTE: currently graphene always uses resolve methods!
            'cpu_used':
            0,
            'mem_max_bytes':
            0,
            'mem_cur_bytes':
            0,
            'net_rx_bytes':
            0,
            'net_tx_bytes':
            0,
            'io_read_bytes':
            0,
            'io_write_bytes':
            0,
            'io_max_scratch_size':
            0,
            'io_cur_scratch_size':
            0,
            'lang':
            row['image'],
            'occupied_shares':
            row['occupied_shares'],
            'mem_slot':
            BinarySize.from_str(row['occupied_slots'].get('mem', 0)) // mega,
            'cpu_slot':
            float(row['occupied_slots'].get('cpu', 0)),
            'gpu_slot':
            float(row['occupied_slots'].get('cuda.device', 0)),
            'tpu_slot':
            float(row['occupied_slots'].get('tpu.device', 0)),
        }

    @classmethod
    def from_row(cls, context, row):
        if row is None:
            return None
        props = cls.parse_row(context, row)
        return cls(**props)

    @classmethod
    async def load_count(cls,
                         context,
                         *,
                         domain_name=None,
                         group_id=None,
                         access_key=None,
                         status=None):
        if isinstance(status, str):
            status_list = [KernelStatus[s] for s in status.split(',')]
        elif isinstance(status, KernelStatus):
            status_list = [status]
        async with context['dbpool'].acquire() as conn:
            query = (sa.select([sa.func.count(
                kernels.c.session_id)]).select_from(kernels).where(
                    kernels.c.cluster_role == DEFAULT_ROLE).as_scalar())
            if domain_name is not None:
                query = query.where(kernels.c.domain_name == domain_name)
            if group_id is not None:
                query = query.where(kernels.c.group_id == group_id)
            if access_key is not None:
                query = query.where(kernels.c.access_key == access_key)
            if status is not None:
                query = query.where(kernels.c.status.in_(status_list))
            result = await conn.execute(query)
            return await result.scalar()

    @classmethod
    async def load_slice(cls,
                         context,
                         limit,
                         offset,
                         *,
                         domain_name=None,
                         group_id=None,
                         access_key=None,
                         status=None,
                         order_key=None,
                         order_asc=None):
        if isinstance(status, str):
            status_list = [KernelStatus[s] for s in status.split(',')]
        elif isinstance(status, KernelStatus):
            status_list = [status]
        async with context['dbpool'].acquire() as conn:
            if order_key is None:
                _ordering = DEFAULT_SESSION_ORDERING
            else:
                _order_func = sa.asc if order_asc else sa.desc
                _ordering = [_order_func(getattr(kernels.c, order_key))]
            j = (kernels.join(groups, groups.c.id == kernels.c.group_id).join(
                users, users.c.uuid == kernels.c.user_uuid))
            query = (sa.select(
                [kernels, groups.c.name, users.c.email]).select_from(j).where(
                    kernels.c.cluster_role == DEFAULT_ROLE).order_by(
                        *_ordering).limit(limit).offset(offset))
            if domain_name is not None:
                query = query.where(kernels.c.domain_name == domain_name)
            if group_id is not None:
                query = query.where(kernels.c.group_id == group_id)
            if access_key is not None:
                query = query.where(kernels.c.access_key == access_key)
            if status is not None:
                query = query.where(kernels.c.status.in_(status_list))
            return [
                cls.from_row(context, r) async for r in conn.execute(query)
            ]

    @classmethod
    async def batch_load(cls,
                         context,
                         access_keys,
                         *,
                         domain_name=None,
                         group_id=None,
                         status=None):
        async with context['dbpool'].acquire() as conn:
            j = (kernels.join(groups, groups.c.id == kernels.c.group_id).join(
                users, users.c.uuid == kernels.c.user_uuid))
            query = (sa.select([
                kernels, groups.c.name, users.c.email
            ]).select_from(j).where((kernels.c.access_key.in_(access_keys)) & (
                kernels.c.cluster_role == DEFAULT_ROLE)).order_by(
                    sa.desc(
                        sa.func.greatest(
                            kernels.c.created_at,
                            kernels.c.terminated_at,
                            kernels.c.status_changed,
                        ))).limit(100))
            if domain_name is not None:
                query = query.where(kernels.c.domain_name == domain_name)
            if group_id is not None:
                query = query.where(kernels.c.group_id == group_id)
            if status is not None:
                query = query.where(kernels.c.status == status)
            return await batch_result(
                context,
                conn,
                query,
                cls,
                access_keys,
                lambda row: row['access_key'],
            )

    @classmethod
    async def batch_load_detail(cls,
                                context,
                                sess_ids,
                                *,
                                domain_name=None,
                                access_key=None,
                                status=None):
        async with context['dbpool'].acquire() as conn:
            status_list = []
            if isinstance(status, str):
                status_list = [KernelStatus[s] for s in status.split(',')]
            elif isinstance(status, KernelStatus):
                status_list = [status]
            elif status is None:
                status_list = [KernelStatus['RUNNING']]
            j = (kernels.join(groups, groups.c.id == kernels.c.group_id).join(
                users, users.c.uuid == kernels.c.user_uuid))
            query = (sa.select([
                kernels, groups.c.name, users.c.email
            ]).select_from(j).where((kernels.c.cluster_role == DEFAULT_ROLE)
                                    & (kernels.c.session_id.in_(sess_ids))))
            if domain_name is not None:
                query = query.where(kernels.c.domain_name == domain_name)
            if access_key is not None:
                query = query.where(kernels.c.access_key == access_key)
            if status_list:
                query = query.where(kernels.c.status.in_(status_list))
            return await batch_multiresult(
                context,
                conn,
                query,
                cls,
                sess_ids,
                lambda row: row['sess_id'],
            )
Esempio n. 21
0
class BusStopInput(graphene.InputObjectType):
    id = graphene.ID()
    stop_name = graphene.String()
    lat = graphene.Float()
    lon = graphene.Float()
Esempio n. 22
0
def convert_field_to_float(field, registry=None):
    return graphene.Float(
        description=get_field_description(field, registry), required=field.required
    )