class Arguments: max_count = graphene.Int(required=True) lifespan = graphene.Int(required=True) accounts_list = graphene.JSONString() custom_link = graphene.String(required=True) link = graphene.String(required=False) # string derived from url. link_to_profile = graphene.Boolean(required=False) is_toush_profile = graphene.Boolean(required=True) is_toush_feed = graphene.Boolean(required=True) is_toush_events = graphene.Boolean(required=True) username = graphene.Boolean(required=False) first_name = graphene.Boolean(required=False) last_name = graphene.Boolean(required=False) contact_phone = graphene.Boolean(required=False) #profile_picture = #banner = #color scheme = contact_email = graphene.Boolean(required=False) message = graphene.String()
class InvitationType(CsdDjangoObjectType): class Meta: model = Invitation fields = ( 'id', 'expires', 'created', 'updated', 'state', 'invitation_type', 'message', 'invitee_email', ) interfaces = (gql.Node,) csd_filter_fields = {'state': ['exact', 'in']} is_valid = graphene.Boolean() invalid_reason = graphene.String() accept_page = graphene.String() payload = graphene.JSONString() def resolve_payload(self, info): return convert_dict_underscore_to_camel(self.encoded_payload) @queried_by_token def resolve_is_valid(self, info, checked_invitation): return checked_invitation['is_valid'] @queried_by_token def resolve_invalid_reason(self, info, checked_invitation): if checked_invitation['is_valid']: return None return checked_invitation['error'] @queried_by_token def resolve_accept_page(self, info, checked_invitation): if not checked_invitation['is_valid']: return None return checked_invitation['accept_page']
class PageTranslation(BaseTranslationType): content_json = graphene.JSONString( description="Translated description of the page (JSON).", deprecation_reason= f"{DEPRECATED_IN_3X_FIELD} Use the `content` field instead.", ) class Meta: model = page_models.PageTranslation interfaces = [graphene.relay.Node] only_fields = [ "content", "id", "seo_description", "seo_title", "title", ] @staticmethod def resolve_content_json(root: page_models.PageTranslation, _info): content = root.content return content if content is not None else {}
class ProductTranslatableContent(CountableDjangoObjectType): description_json = graphene.JSONString( description="Description of the product (JSON).", deprecation_reason=( f"{DEPRECATED_IN_3X_FIELD} Use the `description` field instead."), ) translation = TranslationField(ProductTranslation, type_name="product") product = graphene.Field( "saleor.graphql.product.types.products.Product", description="Represents an individual item for sale in the storefront.", deprecation_reason= (f"{DEPRECATED_IN_3X_FIELD} Get model fields from the root level queries." ), ) attribute_values = graphene.List( graphene.NonNull(AttributeValueTranslatableContent), required=True, description="List of product attribute values that can be translated.", ) class Meta: model = product_models.Product interfaces = [graphene.relay.Node] only_fields = EXTENDED_TRANSLATABLE_FIELDS @staticmethod def resolve_product(root: product_models.Product, info): return ChannelContext(node=root, channel_slug=None) @staticmethod def resolve_description_json(root: product_models.Product, _info): description = root.description return description if description is not None else {} @staticmethod def resolve_attribute_values(root: product_models.Product, info): return (SelectedAttributesByProductIdLoader(info.context).load( root.id).then(get_translatable_attribute_values))
class OnChatMessageSentAsync(channels_graphql_ws.Subscription): """Test GraphQL subscription with async resolvers. Subscribe to receive messages by user ID. """ # pylint: disable=arguments-differ event = graphene.JSONString() class Arguments: """That is how subscription arguments are defined.""" user_id = UserId() async def subscribe(self, info, user_id=None): """Specify subscription groups when client subscribes.""" del info assert self is None, "Root `self` expected to be `None`!" # Subscribe to the group corresponding to the user. if not user_id is None: return [f"user_{user_id}"] # Subscribe to default group. return [] async def publish(self, info, user_id): """Publish query result to the subscribers.""" del info event = {"user_id": user_id, "payload": self} return OnChatMessageSentAsync(event=event) @classmethod async def notify(cls, user_id, message): """Example of the `notify` classmethod usage.""" # Find the subscription group for user. group = None if user_id is None else f"user_{user_id}" await super().broadcast(group=group, payload=message)
class PageTranslatableContent(CountableDjangoObjectType): content_json = graphene.JSONString( description="Content of the page (JSON).", deprecation_reason=( "Will be removed in Saleor 4.0. Use the `content` field instead."), ) translation = TranslationField(PageTranslation, type_name="page") page = graphene.Field( "saleor.graphql.page.types.Page", description=( "A static page that can be manually added by a shop operator ", "through the dashboard.", ), deprecation_reason=("Will be removed in Saleor 4.0. " "Get model fields from the root level."), ) class Meta: model = page_models.Page interfaces = [graphene.relay.Node] only_fields = [ "content", "id", "seo_description", "seo_title", "title", ] @staticmethod @traced_resolver def resolve_page(root: page_models.Page, info): return (page_models.Page.objects.visible_to_user( info.context.user).filter(pk=root.id).first()) @staticmethod def resolve_content_json(root: page_models.Page, _info): content = root.content return content if content is not None else {}
class ProductInput(graphene.InputObjectType): attributes = graphene.List( AttributeValueInput, description='List of attributes.') publication_date = graphene.types.datetime.Date( description='Publication date. ISO 8601 standard.') category = graphene.ID( description='ID of the product\'s category.', name='category') charge_taxes = graphene.Boolean( description='Determine if taxes are being charged for the product.') collections = graphene.List( graphene.ID, description='List of IDs of collections that the product belongs to.', name='collections') description = graphene.String( description='Product description (HTML/text).') description_json = graphene.JSONString( description='Product description (JSON).') is_published = graphene.Boolean( description='Determines if product is visible to customers.') name = graphene.String(description='Product name.') price = Decimal(description='Product price.') tax_rate = TaxRateType(description='Tax rate.') seo = SeoInput(description='Search engine optimization fields.') weight = WeightScalar( description='Weight of the Product.', required=False) sku = graphene.String( description=dedent("""Stock keeping unit of a product. Note: this field is only used if a product doesn't use variants.""")) quantity = graphene.Int( description=dedent("""The total quantity of a product available for sale. Note: this field is only used if a product doesn't use variants.""")) track_inventory = graphene.Boolean( description=dedent("""Determines if the inventory of this product should be tracked. If false, the quantity won't change when customers buy this item. Note: this field is only used if a product doesn't use variants."""))
class Task(Node, graphene.Interface): id = graphene.ID(required=True) created_at = graphene.DateTime(required=True) modified_at = graphene.DateTime(required=True) created_by_user = graphene.String() created_by_group = graphene.String() slug = graphene.String(required=True) name = graphene.String(required=True) description = graphene.String() is_archived = graphene.Boolean(required=True) address_groups = GroupJexl() meta = graphene.JSONString(required=True) is_multiple_instance = graphene.Boolean(required=True) @classmethod def resolve_type(cls, instance, info): TASK_TYPE = { models.Task.TYPE_SIMPLE: SimpleTask, models.Task.TYPE_COMPLETE_WORKFLOW_FORM: CompleteWorkflowFormTask, models.Task.TYPE_COMPLETE_TASK_FORM: CompleteTaskFormTask, } return TASK_TYPE[instance.type]
class RootQuery(graphene.ObjectType): """Api root query.""" request = graphene.JSONString( service_name=graphene.String(default_value=app.label), query=graphene.String()) @staticmethod async def resolve_request(root, info, service_name, query): handler = info.context['handler'] try: metadata = next( filter(lambda x: x['name'] == service_name, handler.settings['services_meta'])) except StopIteration: return {} else: data = await AsyncHTTPClient().fetch( metadata['public_api_url'], method=handler.request.method, body=json.dumps({'query': query}), headers=handler.request.headers) return json.loads(to_unicode(data.body))
class AttributeValueInput(graphene.InputObjectType): id = graphene.ID(description="ID of the selected attribute.") values = graphene.List( graphene.String, required=False, description=( "The value or slug of an attribute to resolve. " "If the passed value is non-existent, it will be created."), ) file = graphene.String( required=False, description= "URL of the file attribute. Every time, a new value is created.", ) content_type = graphene.String(required=False, description="File content type.") references = graphene.List( graphene.NonNull(graphene.ID), description="List of entity IDs that will be used as references.", required=False, ) rich_text = graphene.JSONString(required=False, description="Text content in JSON format.")
class StoreInput(graphene.InputObjectType): name = graphene.String(description="Store name.", required=True) first_name = graphene.String(description="Given name.", required=True) last_name = graphene.String(description="Family name.", required=True) email = graphene.String(description="The email address of the user.", required=True) password = graphene.String(description="Password.", required=True) description = graphene.JSONString( description="Store full description (JSON).") phone = graphene.String(description="Phone number.") acreage = graphene.Float(description="Store acreage") latlong = graphene.String(description="latlong has format lat,long") seo = SeoInput(description="Search engine optimization fields.") background_image = Upload(description="Background image file.") background_image_alt = graphene.String( description="Alt text for a stores media.") company_name = graphene.String(description="Company or organization.") street_address_1 = graphene.String(description="Address.") street_address_2 = graphene.String(description="Address.") city = graphene.String(description="City.") city_area = graphene.String(description="District.") postal_code = graphene.String(description="Postal code.") country = CountryCodeEnum(description="Country.") country_area = graphene.String(description="State or province.")
class ShippingMethodTranslatableContent(ModelObjectType): id = graphene.GlobalID(required=True) name = graphene.String(required=True) description = graphene.JSONString() translation = TranslationField(ShippingMethodTranslation, type_name="shipping method") shipping_method = graphene.Field( "saleor.graphql.shipping.types.ShippingMethodType", description= ("Shipping method are the methods you'll use to get customer's orders " " to them. They are directly exposed to the customers."), deprecation_reason= (f"{DEPRECATED_IN_3X_FIELD} Get model fields from the root level queries." ), ) class Meta: model = shipping_models.ShippingMethod interfaces = [graphene.relay.Node] @staticmethod @permission_required(ShippingPermissions.MANAGE_SHIPPING) def resolve_shipping_method(root: shipping_models.ShippingMethod, _info): return ChannelContext(node=root, channel_slug=None)
class ResourcePreset(graphene.ObjectType): name = graphene.String() resource_slots = graphene.JSONString() @classmethod def from_row(cls, context, row): if row is None: return None return cls( name=row['name'], resource_slots=row['resource_slots'].to_json(), ) @classmethod async def load_all(cls, context): async with context['dbpool'].acquire() as conn: query = (sa.select([resource_presets ]).select_from(resource_presets)) result = await conn.execute(query) rows = await result.fetchall() return [cls.from_row(context, r) for r in rows] @classmethod async def batch_load_by_name(cls, context, names): async with context['dbpool'].acquire() as conn: query = (sa.select( [resource_presets]).select_from(resource_presets).where( resource_presets.c.name.in_(names)).order_by( resource_presets.c.name)) objs_per_key = OrderedDict() for k in names: objs_per_key[k] = None async for row in conn.execute(query): o = cls.from_row(context, row) objs_per_key[row.name] = o return tuple(objs_per_key.values())
class Category(CountableDjangoObjectType): description_json = graphene.JSONString( description="Description of the category (JSON).", deprecation_reason=( "Will be removed in Saleor 4.0. Use the `description` field instead." ), ) ancestors = PrefetchingConnectionField( lambda: Category, description="List of ancestors of the category." ) products = ChannelContextFilterConnectionField( Product, channel=graphene.String( description="Slug of a channel for which the data should be returned." ), description="List of products in the category.", ) url = graphene.String( description="The storefront's URL for the category.", deprecation_reason="This field will be removed after 2020-07-31.", ) children = PrefetchingConnectionField( lambda: Category, description="List of children of the category." ) background_image = graphene.Field( Image, size=graphene.Int(description="Size of the image.") ) translation = TranslationField(CategoryTranslation, type_name="category") class Meta: description = ( "Represents a single category of products. Categories allow to organize " "products in a tree-hierarchies which can be used for navigation in the " "storefront." ) only_fields = [ "description", "id", "level", "name", "parent", "seo_description", "seo_title", "slug", ] interfaces = [relay.Node, ObjectWithMetadata] model = models.Category @staticmethod def resolve_ancestors(root: models.Category, info, **_kwargs): return root.get_ancestors() @staticmethod def resolve_description_json(root: models.Category, info): description = root.description return description if description is not None else {} @staticmethod def resolve_background_image(root: models.Category, info, size=None, **_kwargs): if root.background_image: return Image.get_adjusted( image=root.background_image, alt=root.background_image_alt, size=size, rendition_key_set="background_images", info=info, ) @staticmethod def resolve_children(root: models.Category, info, **_kwargs): return CategoryChildrenByCategoryIdLoader(info.context).load(root.pk) @staticmethod def resolve_url(root: models.Category, _info): return "" @staticmethod def resolve_products(root: models.Category, info, channel=None, **_kwargs): requestor = get_user_or_app_from_context(info.context) is_staff = requestor_is_staff_member_or_app(requestor) tree = root.get_descendants(include_self=True) if channel is None and not is_staff: channel = get_default_channel_slug_or_graphql_error() qs = models.Product.objects.all() if not is_staff: qs = ( qs.published(channel) .annotate_visible_in_listings(channel) .exclude( visible_in_listings=False, ) ) if channel and is_staff: qs = qs.filter(channel_listings__channel__slug=channel) qs = qs.filter(category__in=tree) return ChannelQsContext(qs=qs, channel_slug=channel) @staticmethod def __resolve_reference(root, _info, **_kwargs): return graphene.Node.get_node_from_global_id(_info, root.id)
class Collection(ChannelContextTypeWithMetadata, CountableDjangoObjectType): description_json = graphene.JSONString( description="Description of the collection (JSON).", deprecation_reason=( "Will be removed in Saleor 4.0. Use the `description` field instead." ), ) products = ChannelContextFilterConnectionField( Product, filter=ProductFilterInput(description="Filtering options for products."), sort_by=ProductOrder(description="Sort products."), description="List of products in this collection.", ) background_image = graphene.Field( Image, size=graphene.Int(description="Size of the image.") ) translation = TranslationField( CollectionTranslation, type_name="collection", resolver=ChannelContextType.resolve_translation, ) channel_listings = graphene.List( graphene.NonNull(CollectionChannelListing), description="List of channels in which the collection is available.", ) class Meta: default_resolver = ChannelContextType.resolver_with_context description = "Represents a collection of products." only_fields = [ "description", "id", "name", "seo_description", "seo_title", "slug", ] interfaces = [relay.Node, ObjectWithMetadata] model = models.Collection @staticmethod def resolve_background_image( root: ChannelContext[models.Collection], info, size=None, **_kwargs ): if root.node.background_image: node = root.node return Image.get_adjusted( image=node.background_image, alt=node.background_image_alt, size=size, rendition_key_set="background_images", info=info, ) @staticmethod def resolve_products(root: ChannelContext[models.Collection], info, **kwargs): requestor = get_user_or_app_from_context(info.context) qs = root.node.products.visible_to_user(requestor, root.channel_slug) return ChannelQsContext(qs=qs, channel_slug=root.channel_slug) @staticmethod @permission_required(ProductPermissions.MANAGE_PRODUCTS) def resolve_channel_listings(root: ChannelContext[models.Collection], info): return CollectionChannelListingByCollectionIdLoader(info.context).load( root.node.id ) @staticmethod def __resolve_reference(root, _info, **_kwargs): return graphene.Node.get_node_from_global_id(_info, root.id) @staticmethod def resolve_description_json(root: ChannelContext[models.Collection], info): description = root.node.description return description if description is not None else {}
class MetaExchange(graphene.Interface): name = graphene.String() instruments = graphene.JSONString() balance = graphene.List(Balance, api_key=graphene.String(), api_secret=graphene.String()) orders = graphene.List(Order, api_key=graphene.String(), api_secret=graphene.String(), order_type=graphene.String()) tickers = graphene.List(Ticker, api_key=graphene.String(), api_secret=graphene.String())
class CheckoutComplete(BaseMutation): order = graphene.Field(Order, description="Placed order.") confirmation_needed = graphene.Boolean( required=True, default_value=False, description=("Set to true if payment needs to be confirmed" " before checkout is complete."), ) confirmation_data = graphene.JSONString( required=False, description=( "Confirmation data used to process additional authorization steps." ), ) class Arguments: checkout_id = graphene.ID(description="Checkout ID.", required=True) store_source = graphene.Boolean( default_value=False, description= ("Determines whether to store the payment source for future usage." ), ) redirect_url = graphene.String( required=False, description=("URL of a view where users should be redirected to " "see the order details. URL in RFC 1808 format."), ) payment_data = graphene.JSONString( required=False, description=( "Client-side generated data required to finalize the payment." ), ) class Meta: description = ( "Completes the checkout. As a result a new order is created and " "a payment charge is made. This action requires a successful " "payment before it can be performed. " "In case additional confirmation step as 3D secure is required " "confirmationNeeded flag will be set to True and no order created " "until payment is confirmed with second call of this mutation.") error_type_class = CheckoutError error_type_field = "checkout_errors" @classmethod def perform_mutation(cls, _root, info, checkout_id, store_source, **data): tracking_code = analytics.get_client_id(info.context) with transaction_with_commit_on_errors(): try: checkout = cls.get_node_or_error( info, checkout_id, only_type=Checkout, field="checkout_id", qs=models.Checkout.objects.select_for_update( of=("self", )).prefetch_related( "gift_cards", "lines__variant__product", Prefetch( "payments", queryset=payment_models.Payment.objects. prefetch_related("order__lines"), ), ).select_related("shipping_method__shipping_zone"), ) except ValidationError as e: checkout_token = from_global_id_strict_type( checkout_id, Checkout, field="checkout_id") order = order_models.Order.objects.get_by_checkout_token( checkout_token) if order: # The order is already created. We return it as a success # checkoutComplete response. Order is anonymized for not logged in # user return CheckoutComplete(order=order, confirmation_needed=False, confirmation_data={}) raise e order, action_required, action_data = complete_checkout( checkout=checkout, payment_data=data.get("payment_data", {}), store_source=store_source, discounts=info.context.discounts, user=info.context.user, tracking_code=tracking_code, redirect_url=data.get("redirect_url"), ) # If gateway returns information that additional steps are required we need # to inform the frontend and pass all required data return CheckoutComplete( order=order, confirmation_needed=action_required, confirmation_data=action_data, )
class UserDefaultSelectionInput(graphene.InputObjectType): course_id = graphene.ID() canvas_course_id = graphene.ID() default_view_type = graphene.String(required=True) default_view_value = graphene.JSONString(required=True)
class PageTranslationInput(SeoTranslationInput): title = graphene.String() content = graphene.JSONString()
class LegacyComputeSession(graphene.ObjectType): """ Represents a master session. """ class Meta: interfaces = (Item, ) tag = graphene.String() # Only for ComputeSession sess_id = graphene.String() # legacy sess_type = graphene.String() # legacy session_name = graphene.String() session_type = graphene.String() role = graphene.String() image = graphene.String() registry = graphene.String() domain_name = graphene.String() group_name = graphene.String() group_id = graphene.UUID() scaling_group = graphene.String() user_uuid = graphene.UUID() access_key = graphene.String() status = graphene.String() status_changed = GQLDateTime() status_info = graphene.String() created_at = GQLDateTime() terminated_at = GQLDateTime() startup_command = graphene.String() result = graphene.String() # hidable fields by configuration agent = graphene.String() container_id = graphene.String() service_ports = graphene.JSONString() occupied_slots = graphene.JSONString() occupied_shares = graphene.JSONString() mounts = graphene.List(lambda: graphene.List(lambda: graphene.String)) resource_opts = graphene.JSONString() num_queries = BigInt() live_stat = graphene.JSONString() last_stat = graphene.JSONString() user_email = graphene.String() # Legacy fields lang = graphene.String() mem_slot = graphene.Int() cpu_slot = graphene.Float() gpu_slot = graphene.Float() tpu_slot = graphene.Float() cpu_used = BigInt() cpu_using = graphene.Float() mem_max_bytes = BigInt() mem_cur_bytes = BigInt() net_rx_bytes = BigInt() net_tx_bytes = BigInt() io_read_bytes = BigInt() io_write_bytes = BigInt() io_max_scratch_size = BigInt() io_cur_scratch_size = BigInt() @classmethod async def _resolve_live_stat(cls, redis_stat, kernel_id): cstat = await redis.execute_with_retries( lambda: redis_stat.get(kernel_id, encoding=None)) if cstat is not None: cstat = msgpack.unpackb(cstat) return cstat async def resolve_live_stat(self, info: graphene.ResolveInfo): if not hasattr(self, 'status'): return None rs = info.context['redis_stat'] if self.status not in LIVE_STATUS: return self.last_stat else: return await type(self)._resolve_live_stat(rs, str(self.id)) async def _resolve_legacy_metric( self, info: graphene.ResolveInfo, metric_key, metric_field, convert_type, ): if not hasattr(self, 'status'): return None rs = info.context['redis_stat'] if self.status not in LIVE_STATUS: if self.last_stat is None: return convert_type(0) metric = self.last_stat.get(metric_key) if metric is None: return convert_type(0) value = metric.get(metric_field) if value is None: return convert_type(0) return convert_type(value) else: kstat = await type(self)._resolve_live_stat(rs, str(self.id)) if kstat is None: return convert_type(0) metric = kstat.get(metric_key) if metric is None: return convert_type(0) value = metric.get(metric_field) if value is None: return convert_type(0) return convert_type(value) async def resolve_cpu_used(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'cpu_used', 'current', float) async def resolve_cpu_using(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'cpu_util', 'pct', float) async def resolve_mem_max_bytes(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'mem', 'stats.max', int) async def resolve_mem_cur_bytes(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'mem', 'current', int) async def resolve_net_rx_bytes(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'net_rx', 'stats.rate', int) async def resolve_net_tx_bytes(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'net_tx', 'stats.rate', int) async def resolve_io_read_bytes(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'io_read', 'current', int) async def resolve_io_write_bytes(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'io_write', 'current', int) async def resolve_io_max_scratch_size(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'io_scratch_size', 'stats.max', int) async def resolve_io_cur_scratch_size(self, info: graphene.ResolveInfo): return await self._resolve_legacy_metric(info, 'io_scratch_size', 'current', int) @classmethod def parse_row(cls, context, row): assert row is not None from .user import UserRole mega = 2 ** 20 is_superadmin = (context['user']['role'] == UserRole.SUPERADMIN) if is_superadmin: hide_agents = False else: hide_agents = context['config']['manager']['hide-agents'] return { 'sess_id': row['sess_id'], # legacy, will be deprecated 'sess_type': row['sess_type'].name, # legacy, will be deprecated 'session_name': row['sess_id'], 'session_type': row['sess_type'].name, 'id': row['id'], # legacy, will be replaced with session UUID 'role': row['role'], 'tag': row['tag'], 'image': row['image'], 'registry': row['registry'], 'domain_name': row['domain_name'], 'group_name': row['name'], # group.name (group is omitted since use_labels=True is not used) 'group_id': row['group_id'], 'scaling_group': row['scaling_group'], 'user_uuid': row['user_uuid'], 'access_key': row['access_key'], 'status': row['status'].name, 'status_changed': row['status_changed'], 'status_info': row['status_info'], 'created_at': row['created_at'], 'terminated_at': row['terminated_at'], 'startup_command': row['startup_command'], 'result': row['result'].name, 'service_ports': row['service_ports'], 'occupied_slots': row['occupied_slots'].to_json(), 'mounts': row['mounts'], 'resource_opts': row['resource_opts'], 'num_queries': row['num_queries'], # optionally hidden 'agent': row['agent'] if not hide_agents else None, 'container_id': row['container_id'] if not hide_agents else None, # live_stat is resolved by Graphene 'last_stat': row['last_stat'], 'user_email': row['email'], # Legacy fields # NOTE: currently graphene always uses resolve methods! 'cpu_used': 0, 'mem_max_bytes': 0, 'mem_cur_bytes': 0, 'net_rx_bytes': 0, 'net_tx_bytes': 0, 'io_read_bytes': 0, 'io_write_bytes': 0, 'io_max_scratch_size': 0, 'io_cur_scratch_size': 0, 'lang': row['image'], 'occupied_shares': row['occupied_shares'], 'mem_slot': BinarySize.from_str( row['occupied_slots'].get('mem', 0)) // mega, 'cpu_slot': float(row['occupied_slots'].get('cpu', 0)), 'gpu_slot': float(row['occupied_slots'].get('cuda.device', 0)), 'tpu_slot': float(row['occupied_slots'].get('tpu.device', 0)), } @classmethod def from_row(cls, context, row): if row is None: return None props = cls.parse_row(context, row) return cls(**props) @classmethod async def load_count(cls, context, *, domain_name=None, group_id=None, access_key=None, status=None): if isinstance(status, str): status_list = [KernelStatus[s] for s in status.split(',')] elif isinstance(status, KernelStatus): status_list = [status] async with context['dbpool'].acquire() as conn: query = ( sa.select([sa.func.count(kernels.c.sess_id)]) .select_from(kernels) .where(kernels.c.role == 'master') .as_scalar() ) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if group_id is not None: query = query.where(kernels.c.group_id == group_id) if access_key is not None: query = query.where(kernels.c.access_key == access_key) if status is not None: query = query.where(kernels.c.status.in_(status_list)) result = await conn.execute(query) count = await result.fetchone() return count[0] @classmethod async def load_slice(cls, context, limit, offset, *, domain_name=None, group_id=None, access_key=None, status=None, order_key=None, order_asc=None): if isinstance(status, str): status_list = [KernelStatus[s] for s in status.split(',')] elif isinstance(status, KernelStatus): status_list = [status] async with context['dbpool'].acquire() as conn: if order_key is None: _ordering = DEFAULT_SESSION_ORDERING else: _order_func = sa.asc if order_asc else sa.desc _ordering = [_order_func(getattr(kernels.c, order_key))] j = (kernels.join(groups, groups.c.id == kernels.c.group_id) .join(users, users.c.uuid == kernels.c.user_uuid)) query = ( sa.select([kernels, groups.c.name, users.c.email]) .select_from(j) .where(kernels.c.role == 'master') .order_by(*_ordering) .limit(limit) .offset(offset) ) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if group_id is not None: query = query.where(kernels.c.group_id == group_id) if access_key is not None: query = query.where(kernels.c.access_key == access_key) if status is not None: query = query.where(kernels.c.status.in_(status_list)) return [cls.from_row(context, r) async for r in conn.execute(query)] @classmethod async def batch_load(cls, context, access_keys, *, domain_name=None, group_id=None, status=None): async with context['dbpool'].acquire() as conn: j = (kernels.join(groups, groups.c.id == kernels.c.group_id) .join(users, users.c.uuid == kernels.c.user_uuid)) query = ( sa.select([kernels, groups.c.name, users.c.email]) .select_from(j) .where( (kernels.c.access_key.in_(access_keys)) & (kernels.c.role == 'master') ) .order_by( sa.desc(sa.func.greatest( kernels.c.created_at, kernels.c.terminated_at, kernels.c.status_changed, )) ) .limit(100)) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if group_id is not None: query = query.where(kernels.c.group_id == group_id) if status is not None: query = query.where(kernels.c.status == status) return await batch_result( context, conn, query, cls, access_keys, lambda row: row['access_key'], ) @classmethod async def batch_load_detail(cls, context, sess_ids, *, domain_name=None, access_key=None, status=None): async with context['dbpool'].acquire() as conn: status_list = [] if isinstance(status, str): status_list = [KernelStatus[s] for s in status.split(',')] elif isinstance(status, KernelStatus): status_list = [status] elif status is None: status_list = [KernelStatus['RUNNING']] j = (kernels.join(groups, groups.c.id == kernels.c.group_id) .join(users, users.c.uuid == kernels.c.user_uuid)) query = (sa.select([kernels, groups.c.name, users.c.email]) .select_from(j) .where((kernels.c.role == 'master') & (kernels.c.sess_id.in_(sess_ids)))) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if access_key is not None: query = query.where(kernels.c.access_key == access_key) if status_list: query = query.where(kernels.c.status.in_(status_list)) return await batch_multiresult( context, conn, query, cls, sess_ids, lambda row: row['sess_id'], )
class ComputeContainer(graphene.ObjectType): class Meta: interfaces = (Item, ) # identity role = graphene.String() hostname = graphene.String() session_id = graphene.UUID() # owner session # image image = graphene.String() registry = graphene.String() # status status = graphene.String() status_changed = GQLDateTime() status_info = graphene.String() created_at = GQLDateTime() terminated_at = GQLDateTime() starts_at = GQLDateTime() # resources agent = graphene.String() container_id = graphene.String() resource_opts = graphene.JSONString() occupied_slots = graphene.JSONString() live_stat = graphene.JSONString() last_stat = graphene.JSONString() @classmethod def parse_row(cls, context, row): assert row is not None from .user import UserRole is_superadmin = (context['user']['role'] == UserRole.SUPERADMIN) if is_superadmin: hide_agents = False else: hide_agents = context['config']['manager']['hide-agents'] return { # identity 'id': row['id'], 'role': row['role'], 'hostname': None, # TODO: implement 'session_id': row['id'], # master container's ID == session ID # image 'image': row['image'], 'registry': row['registry'], # status 'status': row['status'].name, 'status_changed': row['status_changed'], 'status_info': row['status_info'], 'created_at': row['created_at'], 'terminated_at': row['terminated_at'], 'starts_at': row['starts_at'], 'occupied_slots': row['occupied_slots'].to_json(), # resources 'agent': row['agent'] if not hide_agents else None, 'container_id': row['container_id'] if not hide_agents else None, 'resource_opts': row['resource_opts'], # statistics # live_stat is resolved by Graphene 'last_stat': row['last_stat'], } @classmethod def from_row(cls, context, row): if row is None: return None props = cls.parse_row(context, row) return cls(**props) async def resolve_live_stat(self, info: graphene.ResolveInfo): if not hasattr(self, 'status'): return None rs = info.context['redis_stat'] if self.status in LIVE_STATUS: raw_live_stat = await redis.execute_with_retries( lambda: rs.get(str(self.id), encoding=None)) if raw_live_stat is not None: live_stat = msgpack.unpackb(raw_live_stat) return live_stat return None else: return self.last_stat @classmethod async def load_count(cls, context, session_id, *, role=None, domain_name=None, group_id=None, access_key=None): async with context['dbpool'].acquire() as conn: query = ( sa.select([sa.func.count(kernels.c.id)]) .select_from(kernels) # TODO: use "owner session ID" when we implement multi-container session .where(kernels.c.id == session_id) .as_scalar() ) if role is not None: query = query.where(kernels.c.role == role) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if group_id is not None: query = query.where(kernels.c.group_id == group_id) if access_key is not None: query = query.where(kernels.c.access_key == access_key) result = await conn.execute(query) count = await result.fetchone() return count[0] @classmethod async def load_slice(cls, context, limit, offset, session_id, *, role=None, domain_name=None, group_id=None, access_key=None, order_key=None, order_asc=None): async with context['dbpool'].acquire() as conn: if order_key is None: _ordering = DEFAULT_SESSION_ORDERING else: _order_func = sa.asc if order_asc else sa.desc _ordering = [_order_func(getattr(kernels.c, order_key))] j = ( kernels .join(groups, groups.c.id == kernels.c.group_id) .join(users, users.c.uuid == kernels.c.user_uuid) ) query = ( sa.select([kernels, groups.c.name, users.c.email]) .select_from(j) # TODO: use "owner session ID" when we implement multi-container session .where(kernels.c.id == session_id) .order_by(*_ordering) .limit(limit) .offset(offset) ) if role is not None: query = query.where(kernels.c.role == role) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if group_id is not None: query = query.where(kernels.c.group_id == group_id) if access_key is not None: query = query.where(kernels.c.access_key == access_key) return [cls.from_row(context, r) async for r in conn.execute(query)] @classmethod async def batch_load_by_session(cls, context, session_ids): async with context['dbpool'].acquire() as conn: query = ( sa.select([kernels]) .select_from(kernels) # TODO: use "owner session ID" when we implement multi-container session .where(kernels.c.id.in_(session_ids)) ) return await batch_multiresult( context, conn, query, cls, session_ids, lambda row: row['id'], ) @classmethod async def batch_load_detail(cls, context, container_ids, *, domain_name=None, access_key=None): async with context['dbpool'].acquire() as conn: j = ( kernels .join(groups, groups.c.id == kernels.c.group_id) .join(users, users.c.uuid == kernels.c.user_uuid) ) query = ( sa.select([kernels, groups.c.name, users.c.email]) .select_from(j) .where( (kernels.c.id.in_(container_ids)) )) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if access_key is not None: query = query.where(kernels.c.access_key == access_key) return await batch_result( context, conn, query, cls, container_ids, lambda row: row['id'], )
class Product(ChannelContextTypeWithMetadata, CountableDjangoObjectType): description_json = graphene.JSONString( description="Description of the product (JSON).", deprecation_reason=( "Will be removed in Saleor 4.0. Use the `description` field instead." ), ) url = graphene.String( description="The storefront URL for the product.", required=True, deprecation_reason="This field will be removed after 2020-07-31.", ) thumbnail = graphene.Field( Image, description="The main thumbnail for a product.", size=graphene.Argument(graphene.Int, description="Size of thumbnail."), ) pricing = graphene.Field( ProductPricingInfo, address=destination_address_argument, description=( "Lists the storefront product's pricing, the current price and discounts, " "only meant for displaying." ), ) is_available = graphene.Boolean( address=destination_address_argument, description="Whether the product is in stock and visible or not.", ) tax_type = graphene.Field( TaxType, description="A type of tax. Assigned by enabled tax gateway" ) attributes = graphene.List( graphene.NonNull(SelectedAttribute), required=True, description="List of attributes assigned to this product.", ) channel_listings = graphene.List( graphene.NonNull(ProductChannelListing), description="List of availability in channels for the product.", ) media_by_id = graphene.Field( graphene.NonNull(lambda: ProductMedia), id=graphene.Argument(graphene.ID, description="ID of a product media."), description="Get a single product media by ID.", ) image_by_id = graphene.Field( lambda: ProductImage, id=graphene.Argument(graphene.ID, description="ID of a product image."), description="Get a single product image by ID.", deprecation_reason=( "Will be removed in Saleor 4.0. Use the `mediaById` field instead." ), ) variants = graphene.List( ProductVariant, description="List of variants for the product." ) media = graphene.List( graphene.NonNull(lambda: ProductMedia), description="List of media for the product.", ) images = graphene.List( lambda: ProductImage, description="List of images for the product.", deprecation_reason=( "Will be removed in Saleor 4.0. Use the `media` field instead." ), ) collections = graphene.List( lambda: Collection, description="List of collections for the product." ) translation = TranslationField( ProductTranslation, type_name="product", resolver=ChannelContextType.resolve_translation, ) available_for_purchase = graphene.Date( description="Date when product is available for purchase. " ) is_available_for_purchase = graphene.Boolean( description="Whether the product is available for purchase." ) class Meta: default_resolver = ChannelContextType.resolver_with_context description = "Represents an individual item for sale in the storefront." interfaces = [relay.Node, ObjectWithMetadata] model = models.Product only_fields = [ "category", "charge_taxes", "description", "id", "name", "slug", "product_type", "seo_description", "seo_title", "updated_at", "weight", "default_variant", "rating", ] @staticmethod def resolve_default_variant(root: ChannelContext[models.Product], info): default_variant_id = root.node.default_variant_id if default_variant_id is None: return None def return_default_variant_with_channel_context(variant): return ChannelContext(node=variant, channel_slug=root.channel_slug) return ( ProductVariantByIdLoader(info.context) .load(default_variant_id) .then(return_default_variant_with_channel_context) ) @staticmethod def resolve_category(root: ChannelContext[models.Product], info): category_id = root.node.category_id if category_id is None: return None return CategoryByIdLoader(info.context).load(category_id) @staticmethod def resolve_description_json(root: ChannelContext[models.Product], info): description = root.node.description return description if description is not None else {} @staticmethod def resolve_tax_type(root: ChannelContext[models.Product], info): tax_data = info.context.plugins.get_tax_code_from_object_meta(root.node) return TaxType(tax_code=tax_data.code, description=tax_data.description) @staticmethod def resolve_thumbnail(root: ChannelContext[models.Product], info, *, size=255): def return_first_thumbnail(product_media): if product_media: image = product_media[0] oembed_data = image.oembed_data if oembed_data.get("thumbnail_url"): return Image( alt=oembed_data["title"], url=oembed_data["thumbnail_url"] ) url = get_product_image_thumbnail(image, size, method="thumbnail") alt = image.alt return Image(alt=alt, url=info.context.build_absolute_uri(url)) return None return ( MediaByProductIdLoader(info.context) .load(root.node.id) .then(return_first_thumbnail) ) @staticmethod def resolve_url(*_args): return "" @staticmethod def resolve_pricing(root: ChannelContext[models.Product], info, address=None): if not root.channel_slug: return None country_code = get_user_country_context( address, info.context.site.settings.company_address ) context = info.context channel_slug = str(root.channel_slug) product_channel_listing = ProductChannelListingByProductIdAndChannelSlugLoader( context ).load((root.node.id, channel_slug)) variants = ProductVariantsByProductIdLoader(context).load(root.node.id) variants_channel_listing = ( VariantsChannelListingByProductIdAndChannelSlugLoader(context).load( (root.node.id, channel_slug) ) ) collections = CollectionsByProductIdLoader(context).load(root.node.id) channel = ChannelBySlugLoader(context).load(channel_slug) def calculate_pricing_info(discounts): def calculate_pricing_with_channel(channel): def calculate_pricing_with_product_channel_listings( product_channel_listing, ): def calculate_pricing_with_variants(variants): def calculate_pricing_with_variants_channel_listings( variants_channel_listing, ): def calculate_pricing_with_collections(collections): if not variants_channel_listing: return None availability = get_product_availability( product=root.node, product_channel_listing=product_channel_listing, variants=variants, variants_channel_listing=variants_channel_listing, collections=collections, discounts=discounts, channel=channel, manager=context.plugins, country=Country(country_code), local_currency=get_currency_for_country( country_code ), ) return ProductPricingInfo(**asdict(availability)) return collections.then(calculate_pricing_with_collections) return variants_channel_listing.then( calculate_pricing_with_variants_channel_listings ) return variants.then(calculate_pricing_with_variants) return product_channel_listing.then( calculate_pricing_with_product_channel_listings ) return channel.then(calculate_pricing_with_channel) return ( DiscountsByDateTimeLoader(context) .load(info.context.request_time) .then(calculate_pricing_info) ) @staticmethod def resolve_is_available(root: ChannelContext[models.Product], info, address=None): if not root.channel_slug: return None channel_slug = str(root.channel_slug) country_code = get_user_country_context( address, info.context.site.settings.company_address ) def calculate_is_available(product_channel_listing): in_stock = is_product_in_stock(root.node, country_code, channel_slug) is_visible = False if product_channel_listing: is_visible = product_channel_listing.is_available_for_purchase() return is_visible and in_stock return ( ProductChannelListingByProductIdAndChannelSlugLoader(info.context) .load((root.node.id, channel_slug)) .then(calculate_is_available) ) @staticmethod def resolve_attributes(root: ChannelContext[models.Product], info): return SelectedAttributesByProductIdLoader(info.context).load(root.node.id) @staticmethod def resolve_media_by_id(root: ChannelContext[models.Product], info, id): pk = get_database_id(info, id, ProductMedia) try: return root.node.media.get(pk=pk) except models.ProductMedia.DoesNotExist: raise GraphQLError("Product media not found.") @staticmethod def resolve_image_by_id(root: ChannelContext[models.Product], info, id): pk = get_database_id(info, id, ProductMedia) try: return root.node.media.get(pk=pk) except models.ProductMedia.DoesNotExist: raise GraphQLError("Product image not found.") @staticmethod def resolve_media(root: ChannelContext[models.Product], info, **_kwargs): return MediaByProductIdLoader(info.context).load(root.node.id) @staticmethod def resolve_images(root: ChannelContext[models.Product], info, **_kwargs): return ImagesByProductIdLoader(info.context).load(root.node.id) @staticmethod def resolve_variants(root: ChannelContext[models.Product], info, **_kwargs): requestor = get_user_or_app_from_context(info.context) is_staff = requestor_is_staff_member_or_app(requestor) if is_staff and not root.channel_slug: variants = ProductVariantsByProductIdLoader(info.context).load(root.node.id) elif is_staff and root.channel_slug: variants = ProductVariantsByProductIdAndChannel(info.context).load( (root.node.id, root.channel_slug) ) else: variants = AvailableProductVariantsByProductIdAndChannel(info.context).load( (root.node.id, root.channel_slug) ) def map_channel_context(variants): return [ ChannelContext(node=variant, channel_slug=root.channel_slug) for variant in variants ] return variants.then(map_channel_context) @staticmethod @permission_required(ProductPermissions.MANAGE_PRODUCTS) def resolve_channel_listings(root: ChannelContext[models.Product], info, **_kwargs): return ProductChannelListingByProductIdLoader(info.context).load(root.node.id) @staticmethod def resolve_collections(root: ChannelContext[models.Product], info, **_kwargs): requestor = get_user_or_app_from_context(info.context) is_staff = requestor_is_staff_member_or_app(requestor) def return_collections(collections): if is_staff: return [ ChannelContext(node=collection, channel_slug=root.channel_slug) for collection in collections ] dataloader_keys = [ (collection.id, str(root.channel_slug)) for collection in collections ] CollectionChannelListingLoader = ( CollectionChannelListingByCollectionIdAndChannelSlugLoader ) channel_listings = CollectionChannelListingLoader(info.context).load_many( dataloader_keys ) def return_visible_collections(channel_listings): visible_collections = [] channel_listings_dict = { channel_listing.collection_id: channel_listing for channel_listing in channel_listings if channel_listing } for collection in collections: channel_listing = channel_listings_dict.get(collection.id) if channel_listing and channel_listing.is_visible: visible_collections.append(collection) return [ ChannelContext(node=collection, channel_slug=root.channel_slug) for collection in visible_collections ] return channel_listings.then(return_visible_collections) return ( CollectionsByProductIdLoader(info.context) .load(root.node.id) .then(return_collections) ) @staticmethod def __resolve_reference(root: ChannelContext[models.Product], _info, **_kwargs): return graphene.Node.get_node_from_global_id(_info, root.node.id) @staticmethod def resolve_weight(root: ChannelContext[models.Product], _info, **_kwargs): return convert_weight_to_default_weight_unit(root.node.weight) @staticmethod def resolve_is_available_for_purchase(root: ChannelContext[models.Product], info): if not root.channel_slug: return None channel_slug = str(root.channel_slug) def calculate_is_available_for_purchase(product_channel_listing): if not product_channel_listing: return None return product_channel_listing.is_available_for_purchase() return ( ProductChannelListingByProductIdAndChannelSlugLoader(info.context) .load((root.node.id, channel_slug)) .then(calculate_is_available_for_purchase) ) @staticmethod def resolve_available_for_purchase(root: ChannelContext[models.Product], info): if not root.channel_slug: return None channel_slug = str(root.channel_slug) def calculate_available_for_purchase(product_channel_listing): if not product_channel_listing: return None return product_channel_listing.available_for_purchase return ( ProductChannelListingByProductIdAndChannelSlugLoader(info.context) .load((root.node.id, channel_slug)) .then(calculate_available_for_purchase) ) @staticmethod def resolve_product_type(root: ChannelContext[models.Product], info): return ProductTypeByIdLoader(info.context).load(root.node.product_type_id)
class Query(Formula1Query): greeting = graphene.JSONString() def resolve_greeting(root, info): return "Hello, World!"
class ActionType(graphene.ObjectType): type = graphene.Field(ActionTypes) payload = graphene.JSONString()
class ComputeSession(graphene.ObjectType): class Meta: interfaces = (Item, ) # identity tag = graphene.String() name = graphene.String() type = graphene.String() # image image = graphene.String() # image for the master registry = graphene.String() # image registry for the master cluster_template = graphene.String() # ownership domain_name = graphene.String() group_name = graphene.String() group_id = graphene.UUID() user_email = graphene.String() user_id = graphene.UUID() access_key = graphene.String() created_user_email = graphene.String() created_user_id = graphene.UUID() # status status = graphene.String() status_changed = GQLDateTime() status_info = graphene.String() created_at = GQLDateTime() terminated_at = GQLDateTime() starts_at = GQLDateTime() startup_command = graphene.String() result = graphene.String() # resources resource_opts = graphene.JSONString() scaling_group = graphene.String() service_ports = graphene.JSONString() mounts = graphene.List(lambda: graphene.String) occupied_slots = graphene.JSONString() # statistics num_queries = BigInt() # owned containers (aka kernels) containers = graphene.List(lambda: ComputeContainer) # relations dependencies = graphene.List(lambda: ComputeSession) @classmethod def parse_row(cls, context, row): assert row is not None return { # identity 'id': row['id'], 'tag': row['tag'], 'name': row['sess_id'], 'type': row['sess_type'].name, # image 'image': row['image'], 'registry': row['registry'], 'cluster_template': None, # TODO: implement # ownership 'domain_name': row['domain_name'], 'group_name': row['name'], # group.name (group is omitted since use_labels=True is not used) 'group_id': row['group_id'], 'user_email': row['email'], 'user_id': row['user_uuid'], 'access_key': row['access_key'], 'created_user_email': None, # TODO: implement 'created_user_id': None, # TODO: implement # status 'status': row['status'].name, 'status_changed': row['status_changed'], 'status_info': row['status_info'], 'created_at': row['created_at'], 'terminated_at': row['terminated_at'], 'starts_at': row['starts_at'], 'startup_command': row['startup_command'], 'result': row['result'].name, # resources 'resource_opts': row['resource_opts'], 'scaling_group': row['scaling_group'], 'service_ports': row['service_ports'], 'mounts': row['mounts'], 'occupied_slots': row['occupied_slots'].to_json(), # TODO: sum of owned containers # statistics 'num_queries': row['num_queries'], } @classmethod def from_row(cls, context: Mapping[str, Any], row: RowProxy) -> Optional[ComputeSession]: if row is None: return None props = cls.parse_row(context, row) return cls(**props) async def resolve_containers( self, info: graphene.ResolveInfo, ) -> Iterable[ComputeContainer]: manager = info.context['dlmgr'] loader = manager.get_loader('ComputeContainer.by_session') return await loader.load(self.id) async def resolve_dependencies( self, info: graphene.ResolveInfo, ) -> Iterable[ComputeSession]: manager = info.context['dlmgr'] loader = manager.get_loader('ComputeSession.by_dependency') return await loader.load(self.id) @classmethod async def load_count(cls, context, *, domain_name=None, group_id=None, access_key=None, status=None): if isinstance(status, str): status_list = [KernelStatus[s] for s in status.split(',')] elif isinstance(status, KernelStatus): status_list = [status] async with context['dbpool'].acquire() as conn: query = ( sa.select([sa.func.count(kernels.c.id)]) .select_from(kernels) .where(kernels.c.role == 'master') .as_scalar() ) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if group_id is not None: query = query.where(kernels.c.group_id == group_id) if access_key is not None: query = query.where(kernels.c.access_key == access_key) if status is not None: query = query.where(kernels.c.status.in_(status_list)) result = await conn.execute(query) count = await result.fetchone() return count[0] @classmethod async def load_slice(cls, context, limit, offset, *, domain_name=None, group_id=None, access_key=None, status=None, order_key=None, order_asc=None): if isinstance(status, str): status_list = [KernelStatus[s] for s in status.split(',')] elif isinstance(status, KernelStatus): status_list = [status] async with context['dbpool'].acquire() as conn: if order_key is None: _ordering = DEFAULT_SESSION_ORDERING else: _order_func = sa.asc if order_asc else sa.desc _ordering = [_order_func(getattr(kernels.c, order_key))] j = ( kernels .join(groups, groups.c.id == kernels.c.group_id) .join(users, users.c.uuid == kernels.c.user_uuid) ) query = ( sa.select([kernels, groups.c.name, users.c.email]) .select_from(j) .where(kernels.c.role == 'master') .order_by(*_ordering) .limit(limit) .offset(offset) ) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if group_id is not None: query = query.where(kernels.c.group_id == group_id) if access_key is not None: query = query.where(kernels.c.access_key == access_key) if status is not None: query = query.where(kernels.c.status.in_(status_list)) return [cls.from_row(context, r) async for r in conn.execute(query)] @classmethod async def batch_load_by_dependency(cls, context, session_ids): async with context['dbpool'].acquire() as conn: j = sa.join( kernels, kernel_dependencies, kernels.c.id == kernel_dependencies.c.depends_on, ) query = ( sa.select([kernels]) .select_from(j) .where( (kernels.c.role == 'master') & (kernel_dependencies.c.kernel_id.in_(session_ids)) ) ) return await batch_multiresult( context, conn, query, cls, session_ids, lambda row: row['id'], ) @classmethod async def batch_load_detail(cls, context, session_ids, *, domain_name=None, access_key=None): async with context['dbpool'].acquire() as conn: j = ( kernels .join(groups, groups.c.id == kernels.c.group_id) .join(users, users.c.uuid == kernels.c.user_uuid) ) query = ( sa.select([kernels, groups.c.name, users.c.email]) .select_from(j) .where( (kernels.c.role == 'master') & (kernels.c.id.in_(session_ids)) )) if domain_name is not None: query = query.where(kernels.c.domain_name == domain_name) if access_key is not None: query = query.where(kernels.c.access_key == access_key) return await batch_result( context, conn, query, cls, session_ids, lambda row: row['id'], )
class AttributeValueTranslationInput(NameTranslationInput): rich_text = graphene.JSONString()
class CheckoutComplete(BaseMutation): order = graphene.Field(Order, description="Placed order.") confirmation_needed = graphene.Boolean( required=True, default_value=False, description=("Set to true if payment needs to be confirmed" " before checkout is complete."), ) confirmation_data = graphene.JSONString( required=False, description=( "Confirmation data used to process additional authorization steps." ), ) class Arguments: checkout_id = graphene.ID(description="Checkout ID.", required=True) store_source = graphene.Boolean( default_value=False, description= ("Determines whether to store the payment source for future usage." ), ) redirect_url = graphene.String( required=False, description=("URL of a view where users should be redirected to " "see the order details. URL in RFC 1808 format."), ) payment_data = graphene.JSONString( required=False, description=( "Client-side generated data required to finalize the payment." ), ) class Meta: description = ( "Completes the checkout. As a result a new order is created and " "a payment charge is made. This action requires a successful " "payment before it can be performed. " "In case additional confirmation step as 3D secure is required " "confirmationNeeded flag will be set to True and no order created " "until payment is confirmed with second call of this mutation.") error_type_class = CheckoutError error_type_field = "checkout_errors" @classmethod def perform_mutation(cls, _root, info, checkout_id, store_source, **data): tracking_code = analytics.get_client_id(info.context) with transaction_with_commit_on_errors(): try: checkout = cls.get_node_or_error( info, checkout_id, only_type=Checkout, field="checkout_id", ) except ValidationError as e: _type, checkout_token = from_global_id_or_error( checkout_id, only_type=Checkout, field="checkout_id") order = order_models.Order.objects.get_by_checkout_token( checkout_token) if order: if not order.channel.is_active: raise ValidationError({ "channel": ValidationError( "Cannot complete checkout with inactive channel.", code=CheckoutErrorCode.CHANNEL_INACTIVE.value, ) }) # The order is already created. We return it as a success # checkoutComplete response. Order is anonymized for not logged in # user return CheckoutComplete(order=order, confirmation_needed=False, confirmation_data={}) raise e manager = info.context.plugins lines = fetch_checkout_lines(checkout) variants_id = {line.variant.id for line in lines} validate_variants_available_in_channel( variants_id, checkout.channel, CheckoutErrorCode.UNAVAILABLE_VARIANT_IN_CHANNEL, ) checkout_info = fetch_checkout_info(checkout, lines, info.context.discounts, manager) order, action_required, action_data = complete_checkout( manager=manager, checkout_info=checkout_info, lines=lines, payment_data=data.get("payment_data", {}), store_source=store_source, discounts=info.context.discounts, user=info.context.user, site_settings=info.context.site.settings, tracking_code=tracking_code, redirect_url=data.get("redirect_url"), ) # If gateway returns information that additional steps are required we need # to inform the frontend and pass all required data return CheckoutComplete( order=order, confirmation_needed=action_required, confirmation_data=action_data, )
class ShippingPriceTranslationInput(NameTranslationInput): description = graphene.JSONString( description="Translated shipping method description (JSON).")
class TranslationInput(NameTranslationInput, SeoTranslationInput): description = graphene.JSONString()
class Arguments: name = graphene.String() length = graphene.Int() geometry = graphene.JSONString()