class DjangoDebug(ObjectType): class Meta: description = "Debugging information for the current query." sql = List(DjangoDebugSQL, description="Executed SQL queries for this API query.")
class Arguments: command = String(default_value='nudge') workflows = List(String, required=True)
class Arguments: workflows = List(String, required=True) command = String(default_value='set_verbosity') level = String(description="""levels: `INFO`, `WARNING`, `NORMAL`, `CRITICAL`, `ERROR`, `DEBUG`""", required=True)
parts = [None] * (4 - dil_count) + parts parts += [state] return tuple(parts) # ** Query Related **# # Field args (i.e. for queries etc): class SortArgs(InputObjectType): keys = List(String, default_value=['id']) reverse = Boolean(default_value=False) jobs_args = dict( ids=List(ID, default_value=[]), exids=List(ID, default_value=[]), states=List(String, default_value=[]), exstates=List(String, default_value=[]), sort=SortArgs(default_value=None), ) all_jobs_args = dict( workflows=List(ID, default_value=[]), exworkflows=List(ID, default_value=[]), ids=List(ID, default_value=[]), exids=List(ID, default_value=[]), states=List(String, default_value=[]), exstates=List(String, default_value=[]), sort=SortArgs(default_value=None), )
class SortArgs(InputObjectType): keys = List(String, default_value=['id']) reverse = Boolean(default_value=False)
class Arguments: pk_set = List(ID)
class NFTS(ObjectType): uri = List(JSONString) address = String() images = List(String) name = String()
class BaseMutation(GraphqlPermissionMixin, MutationErrorHandler, ObjectType): lookup_field_description = None # description for ID field lookup_field = None lookup_url_kwarg = None input_field_name = None input_type_name = None output_field_name = None output_field_description = None @classmethod def __init_subclass_with_meta__(cls, name=None, _meta=None, convert_choices_to_enum=True, **options): name = name or '{}Response'.format(cls.__name__) assert _meta, "{} _meta instance is required".format(cls.__name__) model = getattr(_meta, "model", None) assert model, "model is missing in _meta - {}".format(cls.__name__) arguments_props = cls._get_argument_fields() _meta.arguments_props = arguments_props _meta.convert_choices_to_enum = convert_choices_to_enum super(BaseMutation, cls).__init_subclass_with_meta__(_meta=_meta, **options, name=name) class Meta: abstract = True ok = Boolean( description="Boolean field that return mutation result request.") errors = List(ErrorType, description="Errors list for the field") @classmethod def _get_update_input_type_name(cls): model = cls.get_model() default = to_camel_case("{}_Update_{}".format( cls.__name__, model._meta.model_name.capitalize())) return cls.input_type_name or default @classmethod def _get_create_input_type_name(cls): model = cls.get_model() default = to_camel_case("{}_Create_{}".format( cls.__name__, model._meta.model_name.capitalize())) return cls.input_type_name or default @classmethod def get_errors(cls, errors): extra_types = cls.get_extra_types(obj=None, info=None) errors_dict = { cls.output_field_name: None, "ok": False, "errors": errors } extra_types.update(errors_dict) return cls(**extra_types) @classmethod def get_extra_types(cls, obj, info): """ define values for any extra types provided on your mutation class :param obj: Object :param info: graphene info instance :return: `Dict()` """ return dict() @classmethod def perform_mutate(cls, obj, info): extra_types = cls.get_extra_types(obj, info) resp = {cls.output_field_name: obj, "ok": True, "errors": None} extra_types.update(resp) return cls(**extra_types) @classmethod def save(cls, serialized_obj, **kwargs): raise NotImplementedError( '`save` method needs to be implemented'.format(cls.__name__)) @classmethod def get_lookup_field_name(cls): model = cls.get_model() return cls.lookup_field or model._meta.pk.name @classmethod def get_model(cls): model = cls._meta.model return model @classmethod def _get_output_type(cls): output_field = getattr(cls, "Output", None) if output_field: delattr(cls, "Output") return output_field @classmethod def _bundle_all_arguments(cls, args_type, input_fields): input_field_name = cls.input_field_name if input_field_name: arguments = OrderedDict( {input_field_name: Argument(args_type, required=True)}) arguments.update(cls._meta.arguments_props) return arguments argument = OrderedDict({ i: Argument(type_=t.type, description=t.description, name=t.name, required=getattr(t, 'required', None)) for i, t in input_fields.items() }) argument.update(cls._meta.arguments_props) return argument @classmethod def _get_output_fields(cls, model, only_fields, exclude_fields): output_type = gd_registry().get_type_for_model( model) or gde_registry().get_type_for_model(model) if not output_type: factory_kwargs = { "model": model, 'only_fields': only_fields, 'exclude_fields': exclude_fields, 'skip_registry': False, 'description': cls.output_field_description } output_type = factory_type("output", DjangoObjectType, **factory_kwargs) output = Field( output_type, description=cls.output_field_description or "Result can `{}` or `Null` if any error message(s)".format( model._meta.model_name.capitalize())) return output @classmethod def _get_argument_fields(cls): input_class = getattr(cls, "Arguments", None) if not input_class: input_class = getattr(cls, "Input", None) if input_class: warn_deprecation(( "Please use {name}.Arguments instead of {name}.Input." "Input is now only used in ClientMutationID.\nRead more: " "https://github.com/graphql-python/graphene/blob/2.0/UPGRADE-v2.0.md#mutation-input" ).format(name=cls.__name__)) arguments_props = {} if input_class: arguments_props = props(input_class) return arguments_props @classmethod def base_args_setup(cls): raise NotImplementedError( '`base_args_setup` needs to be implemented'.format(cls.__name__)) @classmethod def _init_create_args(cls): cls._action = CREATE input_fields = cls.base_args_setup() argument_type = type( cls._get_create_input_type_name(), (InputObjectType, ), OrderedDict(input_fields), ) return cls._bundle_all_arguments(argument_type, input_fields=input_fields) @classmethod def _init_update_args(cls): cls._action = UPDATE input_fields = cls.base_args_setup() pk_name = cls.lookup_url_kwarg or cls.get_lookup_field_name() if not input_fields.get(pk_name) and not cls._meta.arguments_props.get( pk_name): input_fields.update({ pk_name: Argument(ID, required=True, description=cls.lookup_field_description or "Django object unique identification field") }) argument_type = type( cls._get_update_input_type_name(), (InputObjectType, ), OrderedDict(input_fields), ) return cls._bundle_all_arguments(argument_type, input_fields=input_fields) @classmethod def _init_delete_args(cls): cls._action = DELETE pk_name = cls.lookup_url_kwarg or cls.get_lookup_field_name() input_fields = OrderedDict({ pk_name: Argument(ID, required=True, description=cls.lookup_field_description or "Django object unique identification field") }) argument = input_fields argument.update(cls._meta.arguments_props) return argument def __build_input_data(self, root, info, **kwargs): data = {} if self.input_field_name: data = kwargs.get(self.input_field_name) else: data.update(**kwargs) request_type = info.context.META.get("CONTENT_TYPE", "") if "multipart/form-data" in request_type: data.update( {name: value for name, value in info.context.FILES.items()}) return data def create(self, root, info, **kwargs): self.check_permissions(request=info.context) try: data = self.__build_input_data(root, info, **kwargs) obj = self.perform_create(root, info, data, **kwargs) assert obj is not None, ( '`perform_create()` did not return an object instance.') return self.perform_mutate(obj, info) except Exception as e: return self._handle_errors(e) def update(self, root, info, **kwargs): self.check_permissions(request=info.context) try: data = self.__build_input_data(root, info, **kwargs) existing_obj = self.get_object(root, info, data=data, **kwargs) if existing_obj: self.check_object_permissions(request=info.context, obj=existing_obj) obj = self.perform_update(root=root, info=info, data=data, instance=existing_obj, **kwargs) assert obj is not None, ( '`perform_update()` did not return an object instance.') return self.perform_mutate(obj, info) else: pk = data.get(self.get_lookup_field_name()) errors = self.construct_error( field="id", message="A {} obj with id: {} do not exist".format( self.get_model().__name__, pk)) return self.get_errors(errors) except Exception as e: return self._handle_errors(e) def delete(self, root, info, **kwargs): self.check_permissions(request=info.context) try: pk = kwargs.get(self.get_lookup_field_name()) data = self.__build_input_data(root, info, **kwargs) old_obj = self.get_object(root, info, data, **kwargs) if old_obj: self.check_object_permissions(request=info.context, obj=old_obj) self.perform_delete(info=info, obj=old_obj, **kwargs) if not old_obj.id: old_obj.id = pk return self.perform_mutate(old_obj, info) else: errors = self.construct_error( field="id", message="A {} obj with id: {} do not exist".format( self.get_model().__name__, pk)) return self.get_errors(errors) except Exception as e: return self._handle_errors(e) def get_object(self, root, info, data, **kwargs): look_up_field = self.get_lookup_field_name() lookup_url_kwarg = self.lookup_url_kwarg or look_up_field lookup_url_kwarg_value = data.get(lookup_url_kwarg) or kwargs.get( lookup_url_kwarg) filter_kwargs = {look_up_field: lookup_url_kwarg_value} return get_Object_or_None(self.get_model(), **filter_kwargs) @classmethod def resolver_wrapper(cls, resolver): def wrap(*args, **kwargs): instance = cls() instance._action = resolver.__name__ resolver_func = getattr(instance, instance._action) return resolver_func(*args, **kwargs) return wrap @classmethod def Field(cls, args, name=None, description=None, deprecation_reason=None, required=False, resolver=None, **kwargs): """ Mount instance of mutation Field. """ return Field(cls._meta.output, args=args, name=name, description=description or cls._meta.description, deprecation_reason=deprecation_reason, required=required, resolver=cls.resolver_wrapper(resolver), **kwargs)
class PartnerAllocations(ObjectType): id = ID() name = String() code = String() allocated_time = Field(AllocatedTime) science_time = List(ScienceTime)
class GQLInputAddress(InputObjectType): type = String(required=True) coordinates = List(Float, required=True)
class CommonAttributes(object): title = String(required=True) content = String() images = List(String)
class GQLInputCoverageArea(InputObjectType): type = String(required=True) coordinates = List(List(List(List(Float, required=True))))
class Query(object): note = Field(NoteType, note_id=ID()) student = Field(StudentType, user_id=ID(), email=String()) school = Field(SchoolType, school_id=ID(), name=String()) parent = Field(ParentType, user_id=ID(), email=String()) instructor = Field(InstructorType, user_id=ID(), email=String()) admin = Field(AdminType, user_id=ID(), email=String()) user_info = Field(UserInfoType, user_id=ID(), user_name=String()) user_type = Field(String, user_name=String()) email_from_token = Field(String, token=String()) notes = List(NoteType, user_id=ID(required=True)) students = List(StudentType, grade=ID()) schools = List(SchoolType, district=String()) parents = List(ParentType) instructors = List(InstructorType, subject=String()) admins = List(AdminType, admin_type=String()) user_infos = List(UserInfoType, user_ids=List(ID)) instructor_ooo = List(InstructorOutOfOfficeType, instructor_id=ID(required=True)) instructor_availability = List(InstructorAvailabilityType, instructor_id=ID(required=True)) @login_required def resolve_note(self, info, **kwargs): note_id = kwargs.get('note_id') if note_id: return Note.objects.get(id=note_id) return None @login_required def resolve_student(self, info, **kwargs): user_id = kwargs.get('user_id') email = kwargs.get('email') if user_id: return Student.objects.get(user=user_id) if email: return Student.objects.get(user__email=email) return None @login_required def resolve_school(self, info, **kwargs): school_id = kwargs.get('school_id') name = kwargs.get('name') if school_id: return School.objects.get(id=school_id) if name: return School.objects.get(name=name) return None @login_required def resolve_parent(self, info, **kwargs): user_id = kwargs.get('user_id') email = kwargs.get('email') if user_id: return Parent.objects.get(user=user_id) if email: return Parent.objects.get(user__email=email) return None @login_required def resolve_instructor(self, info, **kwargs): user_id = kwargs.get('user_id') email = kwargs.get('email') if user_id: return Instructor.objects.get(user=user_id) if email: return Instructor.objects.get(user__email=email) return None @login_required def resolve_admin(self, info, **kwargs): user_id = kwargs.get('user_id') email = kwargs.get('email') if user_id: return Admin.objects.get(user=user_id) if email: return Admin.objects.get(user__email=email) return None def resolve_user_type(self, info, **kwargs): user_name = kwargs.get('user_name') if user_name: if Student.objects.filter(user__email=user_name).exists(): return "Student" if Instructor.objects.filter(user__email=user_name).exists(): return "Instructor" if Parent.objects.filter(user__email=user_name).exists(): return "Parent" if Admin.objects.filter(user__email=user_name).exists(): return "Admin" return None @login_required def resolve_user_info(self, info, **kwargs): user_id = kwargs.get('user_id') user_name = kwargs.get('user_name') if user_name: if Student.objects.filter(user__email=user_name).exists(): return Student.objects.get(user__email=user_name) if Instructor.objects.filter(user__email=user_name).exists(): return Instructor.objects.get(user__email=user_name) if Parent.objects.filter(user__email=user_name).exists(): return Parent.objects.get(user__email=user_name) if Admin.objects.filter(user__email=user_name).exists(): return Admin.objects.get(user__email=user_name) if user_id: if Student.objects.filter(user=user_id).exists(): return Student.objects.get(user=user_id) if Instructor.objects.filter(user=user_id).exists(): return Instructor.objects.get(user=user_id) if Parent.objects.filter(user=user_id).exists(): return Parent.objects.get(user=user_id) if Admin.objects.filter(user=user_id).exists(): return Admin.objects.get(user=user_id) return None @login_required def resolve_notes(self, info, **kwargs): user_id = kwargs.get('user_id') return Note.objects.filter(user=user_id) def resolve_students(self, info, **kwargs): grade = kwargs.get('grade') if grade: return Student.objects.filter(grade=grade) return Student.objects.all() @login_required def resolve_schools(self, info, **kwargs): district = kwargs.get('district') queryset = School.objects if district: queryset = queryset.filter(district=district) return queryset.all() @login_required def resolve_admins(self, info, **kwargs): admin_type = kwargs.get('admin_type') if admin_type: return Admin.objects.filter(admin_type=admin_type) return Admin.objects.all() def resolve_parents(self, info, **kwargs): return Parent.objects.all() @login_required def resolve_instructors(self, info, **kwargs): return Instructor.objects.all() @login_required def resolve_instructor_ooo(self, info, **kwargs): instructor_id = kwargs.get('instructor_id') return InstructorOutOfOffice.objects.filter(instructor=instructor_id) @login_required def resolve_instructor_availability(self, info, **kwargs): instructor_id = kwargs.get('instructor_id') return InstructorAvailability.objects.filter(instructor=instructor_id) def resolve_user_infos(self, info, user_ids): user_list = [] for user_id in user_ids: if Student.objects.filter(user=user_id).exists(): user_list.append(Student.objects.get(user=user_id)) if Instructor.objects.filter(user=user_id).exists(): user_list.append(Instructor.objects.get(user=user_id)) if Parent.objects.filter(user=user_id).exists(): user_list.append(Parent.objects.get(user=user_id)) if Admin.objects.filter(user=user_id).exists(): user_list.append(Admin.objects.get(user=user_id)) return user_list def resolve_email_from_token(self, info, token): return jwt.decode(token, settings.SECRET_KEY, algorithms=['HS256'])["email"]
class ParentType(DjangoObjectType): student_list = List(ID, source='student_list') class Meta: model = Parent
def convert_scalar_list_to_list(type, attribute, registry=None): return List(String, description=attribute.attr_name)
class Arguments: content = String(required=True) target_languages = List(String, required=True)
class Arguments: mode = String() pk_set = List(ID)
class Project(ObjectType): # noqa pylint: disable=too-many-instance-attributes """Formstack Project Class.""" name = String() findings = List(Finding) has_forces = Boolean() open_vulnerabilities = Int() closed_vulnerabilities = Int() current_month_authors = Int() current_month_commits = Int() subscription = String() comments = List(Comment) tags = List(String) deletion_date = String() pending_closing_check = Int() last_closing_vuln = Int() max_severity = Float() max_open_severity = Float() mean_remediate = Int() total_findings = Int() users = List(User) total_treatment = GenericScalar() drafts = List(Finding) description = String() remediated_over_time = String() events = List(Event) user_deletion = String() def __init__(self, project_name, description=''): """Class constructor.""" self.name = project_name self.subscription = '' self.comments = [] self.tags = [] self.deletion_date = '' self.open_vulnerabilities = 0 self.closed_vulnerabilities = 0 self.current_month_authors = 0 self.current_month_commits = 0 self.pending_closing_check = 0 self.last_closing_vuln = 0 self.max_severity = 0.0 self.max_open_severity = 0.0 self.mean_remediate = 0 self.total_findings = 0 self.total_treatment = {} self.description = description self.remediated_over_time = [] def __str__(self): """String representation of entity.""" return self.name def resolve_name(self, info): """Resolve name attribute.""" del info return self.name @get_entity_cache def resolve_remediated_over_time(self, info): "Resolve remediated over time" del info remediated_over_time = project_domain.get_attributes(self.name, ['remediated_over_time']) remediate_over_time_decimal = remediated_over_time.get('remediated_over_time', {}) remediated_twelve_weeks = [lst_rem[-12:] for lst_rem in remediate_over_time_decimal] self.remediated_over_time = json.dumps( remediated_twelve_weeks, use_decimal=True) return self.remediated_over_time def resolve_has_forces(self, info): """Resolve if the project has the Forces service.""" del info attributes = project_domain.get_attributes(self.name, ['has_forces']) self.has_forces = attributes.get('has_forces', False) return self.has_forces @get_entity_cache def resolve_findings(self, info): """Resolve findings attribute.""" util.cloudwatch_log(info.context, 'Security: Access to {project} ' 'findings'.format(project=self.name)) finding_ids = finding_domain.filter_deleted_findings( project_domain.list_findings(self.name)) findings_loader = info.context.loaders['finding'] self.findings = findings_loader.load_many(finding_ids).then( lambda findings: [finding for finding in findings if finding.current_state != 'DELETED']) return self.findings @get_entity_cache def resolve_open_vulnerabilities(self, info): """Resolve open vulnerabilities attribute.""" finding_ids = finding_domain.filter_deleted_findings( project_domain.list_findings(self.name)) vulns_loader = info.context.loaders['vulnerability'] self.open_vulnerabilities = vulns_loader.load_many(finding_ids).then( lambda findings: sum([ len([vuln for vuln in vulns if vuln_domain.get_current_state(vuln) == 'open' and (vuln.current_approval_status != 'PENDING' or vuln.last_approved_status)]) for vulns in findings ])) return self.open_vulnerabilities @get_entity_cache def resolve_closed_vulnerabilities(self, info): """Resolve closed vulnerabilities attribute.""" finding_ids = finding_domain.filter_deleted_findings( project_domain.list_findings(self.name)) vulns_loader = info.context.loaders['vulnerability'] self.closed_vulnerabilities = vulns_loader.load_many(finding_ids).then( lambda findings: sum([ len([vuln for vuln in vulns if vuln_domain.get_current_state(vuln) == 'closed' and (vuln.current_approval_status != 'PENDING' or vuln.last_approved_status)]) for vulns in findings ])) return self.closed_vulnerabilities @get_entity_cache def resolve_pending_closing_check(self, info): """Resolve pending closing check attribute.""" del info self.pending_closing_check = project_domain.get_pending_closing_check( self.name) return self.pending_closing_check @get_entity_cache def resolve_last_closing_vuln(self, info): """Resolve days since last closing vuln attribute.""" del info last_closing_vuln = project_domain.get_attributes(self.name, ['last_closing_date']) self.last_closing_vuln = last_closing_vuln.get('last_closing_date', 0) return self.last_closing_vuln @get_entity_cache def resolve_max_severity(self, info): """Resolve maximum severity attribute.""" finding_ids = finding_domain.filter_deleted_findings( project_domain.list_findings(self.name)) findings_loader = info.context.loaders['finding'] self.max_severity = findings_loader.load_many(finding_ids).then( lambda findings: max([ finding.severity_score for finding in findings if finding.current_state != 'DELETED']) if findings else 0) return self.max_severity @get_entity_cache def resolve_max_open_severity(self, info): """Resolve maximum severity in open vulnerability attribute.""" del info max_open_severity = project_domain.get_attributes(self.name, ['max_open_severity']) self.max_open_severity = max_open_severity.get('max_open_severity', 0) return self.max_open_severity @get_entity_cache def resolve_mean_remediate(self, info): """Resolve mean to remediate a vulnerability attribute.""" del info mean_remediate = project_domain.get_attributes(self.name, ['mean_remediate']) self.mean_remediate = mean_remediate.get('mean_remediate', 0) return self.mean_remediate @get_entity_cache def resolve_total_findings(self, info): """Resolve total findings attribute.""" finding_ids = finding_domain.filter_deleted_findings( project_domain.list_findings(self.name)) findings_loader = info.context.loaders['finding'] findings = findings_loader.load_many(finding_ids).then( lambda findings: [finding for finding in findings if finding.current_state != 'DELETED']) self.total_findings = findings.then(len) return self.total_findings @get_entity_cache def resolve_total_treatment(self, info): """Resolve total treatment attribute.""" del info total_treatment = project_domain.get_attributes(self.name, ['total_treatment']) total_treatment_decimal = total_treatment.get('total_treatment', {}) self.total_treatment = json.dumps( total_treatment_decimal, use_decimal=True) return self.total_treatment @get_entity_cache def resolve_current_month_authors(self, info): """Resolve current month authors attribute.""" del info self.current_month_authors = \ project_domain.get_current_month_authors(self.name) return self.current_month_authors @get_entity_cache def resolve_current_month_commits(self, info): """Resolve current month commits attribute.""" del info self.current_month_commits = \ project_domain.get_current_month_commits(self.name) return self.current_month_commits @get_entity_cache def resolve_subscription(self, info): """Resolve subscription attribute.""" del info project_info = project_domain.get_attributes(self.name, ['type']) if project_info: self.subscription = project_info.get('type', '') else: self.subscription = '' return self.subscription @get_entity_cache def resolve_deletion_date(self, info): """Resolve deletion date attribute.""" del info self.deletion_date = '' historic_deletion = project_domain.get_historic_deletion(self.name) if historic_deletion: self.deletion_date = historic_deletion[-1].get('deletion_date', '') return self.deletion_date @get_entity_cache def resolve_user_deletion(self, info): """Resolve deletion date attribute.""" del info self.user_deletion = '' historic_deletion = project_domain.get_historic_deletion(self.name) if historic_deletion and historic_deletion[-1].get('deletion_date'): self.user_deletion = historic_deletion[-1].get('user', '') return self.user_deletion @enforce_authz def resolve_comments(self, info): user_data = util.get_jwt_content(info.context) curr_user_role = get_user_role(user_data) self.comments = [ Comment(**comment) for comment in project_domain.list_comments( self.name, curr_user_role)] return self.comments @get_entity_cache def resolve_tags(self, info): """ Resolve project tags """ del info project_data = project_domain.get_attributes(self.name, ['tag']) self.tags = project_data['tag'] if project_data and 'tag' in project_data else [] return self.tags @enforce_authz @get_entity_cache def resolve_users(self, info): """ Resolve project users """ init_email_list = project_domain.get_users(self.name) user_email_list = util.user_email_filter( init_email_list, util.get_jwt_content(info.context)['user_email']) self.users = [User(self.name, user_email) for user_email in user_email_list if user_domain.get_data(user_email, 'role') in ['customer', 'customeradmin']] return self.users @enforce_authz def resolve_drafts(self, info): """ Resolve drafts attribute """ util.cloudwatch_log(info.context, 'Security: Access to {project} ' 'drafts'.format(project=self.name)) finding_ids = finding_domain.filter_deleted_findings( project_domain.list_drafts(self.name)) findings_loader = info.context.loaders['finding'] self.drafts = findings_loader.load_many(finding_ids).then( lambda drafts: [draft for draft in drafts if draft.current_state != 'DELETED']) return self.drafts @enforce_authz def resolve_events(self, info): """ Resolve project events """ util.cloudwatch_log( info.context, f'Security: Access to {self.name} events') event_ids = project_domain.list_events(self.name) events_loader = info.context.loaders['event'] self.events = events_loader.load_many(event_ids) return self.events def resolve_description(self, info): """ Resolve project description """ del info return self.description
class MovieQuery(ObjectType): movie_list = None get_movie = Field(List(MovieType), id=Int(), title=String(), genre_name=String(), genre_id=Int(), company_id=Int(), company_name=String()) async def resolve_get_movie(self, info, id=None, title=None, genre_name=None, genre_id=None, company_id=None, company_name=None): movie_list = [] query_args = [] if id: query_args.append({"match": {"id": id}}) if title: query_args.append({"match": {"title": title}}) if genre_name: query_args.append({"match": {"genres.name": genre_name}}) if genre_id: query_args.append({"match": {"genres.id": genre_id}}) if company_id: query_args.append( {"match": { "production_companies.id": company_id }}) if company_name: query_args.append( {"match": { "production_companies.name": company_name }}) if id or title or genre_name or genre_id or company_id or company_name: res = es.search(index="movies", body={ "size": 1000, "query": { "bool": { "must": query_args } } }) for hit in res['hits']['hits']: movie_list.append(hit['_source']) else: res = es.search(index="movies", body={ "size": 1000, "query": { "match_all": {} } }) for hit in res['hits']['hits']: movie_list.append(hit['_source']) return movie_list
class Device(TangoNodeType, Interface): """This class represent a device.""" name = String() state = String() properties = List(DeviceProperty, pattern=String()) attributes = List(DeviceAttribute, pattern=String()) commands = List(DeviceCommand, pattern=String()) server = Field(DeviceInfo) # device_class = String() # server = String() pid = Int() started_date = String() stopped_date = String() exported = Boolean() def resolve_state(self, info): """This method fetch the state of the device. :return: State of the device. :rtype: str """ try: proxy = proxies.get(self.name) return proxy.state() except (PyTango.DevFailed, PyTango.ConnectionFailed, PyTango.CommunicationFailed, PyTango.DeviceUnlocked): return "UNKNOWN" except Exception as e: return str(e) def resolve_properties(self, info, pattern="*"): """This method fetch the properties of the device. :param pattern: Pattern for filtering the result. Returns only properties that matches the pattern. :type pattern: str :return: List of properties for the device. :rtype: List of DeviceProperty """ props = db.get_device_property_list(self.name, pattern) return [DeviceProperty(name=p, device=self.name) for p in props] def resolve_attributes(self, info, pattern="*"): """This method fetch all the attributes and its' properties of a device. :param pattern: Pattern for filtering the result. Returns only properties that match the pattern. :type pattern: str :return: List of attributes of the device. :rtype: List of DeviceAttribute """ proxy = proxies.get(self.name) attr_infos = proxy.attribute_list_query() rule = re.compile(fnmatch.translate(pattern), re.IGNORECASE) sorted_info = sorted(attr_infos, key=attrgetter("name")) result = [] # TODO: Ensure that result is passed properly, refresh mutable # arguments copy or pointer ...? Tests are passing ... def append_to_result(result, klass, attr_info): if attr_info.writable == PyTango._tango.AttrWriteType.WT_UNKNOWN: wt = 'READ_WITH_WRITE' else: wt = attr_info.writable result.append( klass(name=attr_info.name, device=self.name, writable=wt, datatype=PyTango.CmdArgType.values[attr_info.data_type], dataformat=attr_info.data_format, label=attr_info.label, unit=attr_info.unit, description=attr_info.description, displevel=attr_info.disp_level, minvalue=None if attr_info.min_value == "Not specified" else attr_info.min_value, maxvalue=None if attr_info.max_value == "Not specified" else attr_info.max_value, minalarm=None if attr_info.min_alarm == "Not specified" else attr_info.min_alarm, maxalarm=None if attr_info.max_alarm == "Not specified" else attr_info.max_alarm)) for attr_info in sorted_info: if rule.match(attr_info.name): if str(attr_info.data_format) == "SCALAR": append_to_result(result, ScalarDeviceAttribute, attr_info) if str(attr_info.data_format) == "SPECTRUM": append_to_result(result, SpectrumDeviceAttribute, attr_info) if str(attr_info.data_format) == "IMAGE": append_to_result(result, ImageDeviceAttribute, attr_info) return result def resolve_commands(self, info, pattern="*"): """This method fetch all the commands of a device. :param pattern: Pattern for filtering of the result. Returns only commands that match the pattern. :type pattern: str :return: List of commands of the device. :rtype: List of DeviceCommand """ proxy = proxies.get(self.name) cmd_infos = proxy.command_list_query() rule = re.compile(fnmatch.translate(pattern), re.IGNORECASE) def create_device_command(cmd_info): return DeviceCommand(name=cmd_info.cmd_name, tag=cmd_info.cmd_tag, displevel=cmd_info.disp_level, intype=cmd_info.in_type, intypedesc=cmd_info.in_type_desc, outtype=cmd_info.out_type, outtypedesc=cmd_info.out_type_desc) return [ create_device_command(a) for a in sorted(cmd_infos, key=attrgetter("cmd_name")) if rule.match(a.cmd_name) ] def resolve_server(self, info): """ This method fetch the server infomation of a device. :return: List server info of a device. :rtype: List of DeviceInfo """ proxy = proxies.get(self.name) dev_info = proxy.info() return DeviceInfo(id=dev_info.server_id, host=dev_info.server_host) def resolve_exported(self, info): """ This method fetch the infomation about the device if it is exported or not. :return: True if exported, False otherwise. :rtype: bool """ return self.info.exported def resolve_pid(self, info): return self.info.pid def resolve_started_date(self, info): return self.info.started_date def resolve_stopped_date(self, info): return self.info.stopped_date @property def info(self): """This method fetch all the information of a device.""" if not hasattr(self, "_info"): self._info = db.get_device_info(self.name) return self._info
class Query(ObjectType): vp = List(NFTS, wa=String()) getglobalgallery = List(NFTS) getlatestgallery = List(NFTS) getusergallery = List(NFTS, wa=String()) globalnfts = List(NFTS) random = List(NFTS) addtoglobal = List(Boolean, wa=String(), tkid=String()) addtousergallery = List(Boolean, us=String(), wa=String(), tkid=String()) removefromusergallery = List(Boolean, us=String(), wa=String(), tkid=String()) def resolve_vp(self, info, wa): contract_address, token_id = get_address(wa) uri, image_links = get_uri(contract_address, token_id, wa) stuff = { "uri": uri, "address": wa, "images": image_links, "owner": "You" } return [stuff] def resolve_random(self, info): # contract_address, token_id, owner_address, owner = get_random_address() contract_address, token_id = get_latest_opensea() uri, image_links = get_uri(contract_address, token_id, "") stuff = {"uri": uri, "address": "", "images": image_links, "name": ""} return [stuff] def resolve_getglobalgallery(self, info): uri, image_links = get_global_gallery() print(uri) print(image_links) stuff = { "uri": uri, "address": "Global", "images": image_links, "owner": "Users" } return [stuff] def resolve_getusergallery(self, info, wa): uri, image_links = get_user_gallery(wa) stuff = {"uri": uri, "address": wa, "images": image_links, "owner": wa} return [stuff] def resolve_addtoglobal(self, info, wa, tkid): print(wa, tkid) tkid = int(tkid) create_nft(wa, tkid) return [True] def resolve_addtousergallery(self, info, us, wa, tkid): print(us, wa, tkid) tkid = int(tkid) add_to_gallery(us, wa, tkid) return [True] def resolve_removefromusergallery(self, info, us, wa, tkid): print(us, wa, tkid) tkid = int(tkid) remove_from_gallery(us, wa, tkid) return [True] def resolve_getlatestgallery(self, info): uri, images = get_latest_gallery() stuff = { "uri": uri, "address": "Global", "images": images, "owner": "Users" } return [stuff]
def __init__(self, _type, *args, **kwargs): super(MongoengineListField, self).__init__(List(_type), *args, **kwargs)
class Arguments: workflows = List(String, required=True) command = String(default_value='put_external_trigger') event_message = String(required=True) event_id = String(required=True)
class SearchAnswersFilterType(InputObjectType): """Lookup type to search in answers.""" questions = List(graphene.String) value = graphene.types.generic.GenericScalar(required=True) lookup = SearchLookupMode(required=False)
class Arguments: workflows = List(String, required=True) command = String(default_value='expire_broadcast') cutoff = String(description="""String""")
class Query(ObjectType): stats = Field(Stats) eip = Field(EIP, eip_id=ID(required=True)) eips = List( EIP, limit=Int(default_value=100), offset=Int(default_value=0), tag=String(), category=String(), status=String(), search=String(), ) commits = List( Commit, limit=Int(default_value=100), offset=Int(default_value=0), eip_id=Int(), search=String(), ) errors = List(Error) tags = List(Tag, eip_id=ID()) categories = List(Category) statuses = List(Status) def resolve_eip(_, info, eip_id): with yield_session() as sess: return sess.query(DBEIP).filter(DBEIP.eip_id == eip_id).options( joinedload('tags')).one_or_none() def resolve_eips(_, info, limit, offset, tag=None, category=None, status=None, search=None): with yield_session() as sess: if tag: return sess.query(DBEIP).filter( DBEIP.tags.any(tag_name=tag)).order_by( DBEIP.eip_id).limit(limit).offset(offset).options( joinedload('tags')).all() elif category: return sess.query(DBEIP).filter( DBEIP.category == category.upper()).order_by( DBEIP.eip_id).limit(limit).offset(offset).options( joinedload('tags')).all() elif status: return sess.query(DBEIP).filter( DBEIP.status == status.upper()).order_by( DBEIP.eip_id).limit(limit).offset(offset).options( joinedload('tags')).all() else: if search: return DBEIP.search(sess, search).order_by( DBEIP.eip_id).limit(limit).offset(offset).options( joinedload('tags')).all() else: return sess.query(DBEIP).order_by( DBEIP.eip_id).limit(limit).offset(offset).options( joinedload('tags')).all() def resolve_commits(_, info, limit, offset, eip_id=None, search=None): with yield_session() as sess: if eip_id: # TODO: add search with eip_id return sess.query(DBCommit).filter( DBCommit.eips.any(eip_id=eip_id)).order_by( DBCommit.committed_date.desc()).limit(limit).offset( offset).all() else: if search: return DBCommit.search(sess, search).order_by( DBCommit.committed_date.desc()).limit(limit).offset( offset).all() else: return sess.query(DBCommit).order_by( DBCommit.committed_date.desc()).limit(limit).offset( offset).all() def resolve_errors(_, info): with yield_session() as sess: return sess.query(DBError).all() def resolve_stats(_, info): return AttrDict({ 'eips': get_total_eips(), 'commits': get_total_commits(), 'contributors': get_total_committers(), 'errors': get_total_errors(), }) def resolve_tags(_, info, eip_id=None): if eip_id: return get_eip_tags(eip_id) else: return get_all_tags() def resolve_categories(_, info): return get_categories_with_totals() def resolve_statuses(_, info): return get_statuses_with_totals()
class Arguments: workflows = List(String, required=True) command = String(default_value='reload_suite')
def __init__(self, of_type, exclude_fields=(), include_fields=(), operator=None, order_by=(), *args, **kwargs): columns_dict = self.build_columns_dict(of_type) if include_fields: columns_dict = {k: columns_dict[k] for k in include_fields} for exclude_field in exclude_fields: if exclude_field in columns_dict.keys(): del columns_dict[exclude_field] kwargs.update(**columns_dict) kwargs['operator'] = String( description="Operator to use for filtering") kwargs['order_by'] = List( String, description="Fields to use for results ordering") default_operator = get_operator_function(operator) if isinstance(order_by, six.string_types): order_by = (order_by, ) default_order_by = order_by def filters_resolver(self, info, **kwargs): operator = default_operator if 'operator' in kwargs: operator = get_operator_function(kwargs['operator']) query = of_type.get_query(info) for (k, v) in kwargs.items(): if hasattr(of_type._meta.model, k): query = query.filter( operator( get_snake_or_camel_attr(of_type._meta.model, k), v)) order_by = default_order_by if 'order_by' in kwargs: order_by = kwargs['order_by'] for order_by_item in order_by: if order_by_item.lower().endswith(' asc'): order_by_item = order_by_item[:-len(' asc')] query = query.order_by( get_snake_or_camel_attr(of_type._meta.model, order_by_item).asc()) elif order_by_item.lower().endswith(' desc'): order_by_item = order_by_item[:-len(' desc')] query = query.order_by( get_snake_or_camel_attr(of_type._meta.model, order_by_item).desc()) else: query = query.order_by( get_snake_or_camel_attr(of_type._meta.model, order_by_item)) query_transformer = getattr(info.parent_type.graphene_type, 'query_' + info.field_name, False) if callable(query_transformer): transformed_query = query_transformer( info.parent_type.graphene_type(), info, query, **kwargs) if transformed_query: query = transformed_query return query.all() kwargs['resolver'] = filters_resolver super(SQLAlchemyList, self).__init__(of_type, *args, **kwargs)
class Query(ObjectType): """Query objects for GraphQL API""" node = relay.Node.Field() # gets all entries product_class_list = SQLAlchemyConnectionField( schemas.ProductClass, sort=Argument(utils.sort_enum_for([ models.ProductClass, models.ProductType ])), search=SearchProductClass(), before=String(), after=String(), first=Int(), last=Int() ) pipelines_list = SQLAlchemyConnectionField( schemas.Pipelines, sort=Argument(utils.sort_enum_for([ models.Pipelines, models.PipelineStage, models.GroupPypelines, models.TgUser ])), search=SearchPipelinesList(), before=String(), after=String(), first=Int(), last=Int()) modules_list = SQLAlchemyConnectionField( schemas.Modules, sort=Argument(utils.sort_enum_for([ models.Modules, models.TgUser, models.Pipelines ])), search=SearchModulesList(), before=String(), after=String(), first=Int(), last=Int() ) group_pypelines_list = SQLAlchemyConnectionField(schemas.GroupPypelines) pipelines_modules_list = SQLAlchemyConnectionField( schemas.PipelinesModules) pipeline_stage_list = SQLAlchemyConnectionField(schemas.PipelineStage) pipeline_status_list = SQLAlchemyConnectionField(schemas.PipelineStatus) product_type_list = SQLAlchemyConnectionField( schemas.ProductType, sort=Argument(utils.sort_enum_for(models.ProductType)), search=SearchProductType(), before=String(), after=String(), first=Int(), last=Int() ) mask_list = SQLAlchemyConnectionField(schemas.Mask) map_list = SQLAlchemyConnectionField(schemas.Map) catalog_list = SQLAlchemyConnectionField(schemas.Catalog) filters_list = SQLAlchemyConnectionField(schemas.Filters) session_list = SQLAlchemyConnectionField(schemas.Session) # gets list by filters release_tag_list = SQLAlchemyConnectionField( schemas.ReleaseTag, only_available=Boolean(), sort=Argument(utils.sort_enum_for([models.ReleaseTag, models.Fields])), search=SearchReleaseTag(), before=String(), after=String(), first=Int(), last=Int() ) fields_list = SQLAlchemyConnectionField( schemas.Fields, only_available=Boolean(), sort=Argument(utils.sort_enum_for([ models.Fields, models.ReleaseTag ])), search=SearchFieldList(), before=String(), after=String(), first=Int(), last=Int() ) processes_list = SQLAlchemyConnectionField( schemas.Processes, all_instances=Boolean(), running=Boolean(), failure=Boolean(), success=Boolean(), published=Boolean(), removed=Boolean(), saved=Boolean(), sort=Argument(utils.sort_enum_for([ models.Processes, models.TgUser, models.Fields, models.ReleaseTag, models.ProcessStatus ])), search=SearchProcessList(), before=String(), after=String(), first=Int(), last=Int() ) products_list = SQLAlchemyConnectionField( schemas.Products, tag_id=Int(), field_id=Int(), type_id=Int(), class_id=Int(), display_name=String(), band=String(), sort=Argument(utils.sort_enum_for([ models.Products, models.ReleaseTag, models.Fields, models.ProductClass, models.ProductType ])), search=SearchProductsList(), before=String(), after=String(), first=Int(), last=Int() ) # gets by field unique product_class_by_class_name = Field( lambda: schemas.ProductClass, name=String()) pipelines_by_name = Field(lambda: schemas.Pipelines, name=String()) modules_by_name = Field(lambda: schemas.Modules, name=String()) process_by_process_id = Field(lambda: schemas.Processes, process_id=Int()) product_by_product_id = Field(lambda: schemas.Products, product_id=Int()) # gets list by unique field fields_by_tag_id = List( lambda: schemas.Fields, tag_id=Int(), only_available=Boolean() ) pipelines_by_stage_id_and_tag_id_and_field_id = relay.ConnectionField( schemas.PipelinesExecutionConnection, stage_id=Int(), tag_id=Int(), field_id=Int(), before=String(), after=String(), first=Int(), last=Int()) pipelines_by_stage_id = relay.ConnectionField( schemas.PipelinesStageConnection, stage_id=Int(), before=String(), after=String(), first=Int(), last=Int()) processes_by_tag_id_and_field_id_and_pipeline_id = List( lambda: schemas.Processes, pipeline_id=Int(), tag_id=Int(), field_id=Int(), ) products_by_process_id = List(lambda: schemas.Products, process_id=Int()) process_components_by_process_id = List( lambda: schemas.ProcessComponent, process_id=Int() ) comments_by_process_id = List(lambda: schemas.Comments, process_id=Int()) fields_by_tagname = List(lambda: schemas.Fields, tagname=String()) product_class_by_type_id = List( lambda: schemas.ProductClass, type_id=Int()) git_info = relay.ConnectionField(schemas.GitInfoConnection) time_profile = relay.ConnectionField( schemas.TimeProfileConnection, process_id=Int()) output_products_by_pipeline = relay.ConnectionField( schemas.ProductsByPipelineConnection, pipeline_id=Int()) input_products_by_pipeline = relay.ConnectionField( schemas.ProductsByPipelineConnection, pipeline_id=Int()) def resolve_product_class_list(self, info, sort=list(), search=None, **args): query = schemas.ProductClass.get_query(info) tables = utils.selected_tables_from_arguments(sort) if utils.is_valid_search(search): tables = tables.union(utils.selected_tables_from_arguments( search['columns'] )) if 'product_type' in tables: query = query.join(models.ProductType) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) return query.order_by(*sort) def resolve_pipelines_list(self, info, sort=list(), search=None, **args): query = schemas.Pipelines.get_query(info) tables = utils.selected_tables_from_arguments(sort) if utils.is_valid_search(search): tables = tables.union( utils.selected_tables_from_arguments( search['columns'] ) ) if 'tg_user' in tables: query = query.join(models.TgUser) if 'group_pypelines' in tables: query = query.join(models.GroupPypelines) if 'pipeline_stage' in tables: query = query.join(models.PipelineStage) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) return query.distinct().order_by(*sort) def resolve_modules_list(self, info, sort=list(), search=None, **args): query = schemas.Modules.get_query(info) tables = utils.selected_tables_from_arguments(sort) if utils.is_valid_search(search): tables = tables.union( utils.selected_tables_from_arguments( search['columns'])) if 'tg_user' in tables: query = query.join(models.TgUser) if 'pipelines' in tables: query = query.join(models.PipelinesModules) query = query.join(models.Pipelines) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) return query.distinct().order_by(*sort) def resolve_product_type_list( self, info, sort=list(), search=None, **args): query = schemas.ProductType.get_query(info) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) return query.order_by(*sort) def resolve_pipelines_by_stage_id_and_tag_id_and_field_id( self, info, stage_id=None, tag_id=None, field_id=None, **args): sub_query = db_session.query( func.distinct(models.Pipelines.pipeline_id).label('pipeline_id'), models.Pipelines.name.label('pipeline_name'), models.Pipelines.display_name.label('pipeline_display_name'), models.Pipelines.pipeline_status_id.label('pipeline_status_id'), models.PipelineStage.display_name.label('stage_display_name'), func.count(func.distinct(models.Processes.process_id)).label('process_count'), func.max(models.Processes.process_id).label('last_process_id'), ).select_from( models.Pipelines ).join( models.Pipelines.processes ).join( models.PipelineStage ).join( models.ProcessStatus ).group_by( models.Pipelines.pipeline_id, models.PipelineStage.pipeline_stage_id ).order_by( models.PipelineStage.display_name, models.Pipelines.display_name ) _filters = list() # _filters.append(models.Processes.flag_removed == False) _filters.append(models.Processes.instance == INSTANCE) # The link between the table processes and release_tag table depends on # the table fields if field_id or tag_id: sub_query = sub_query.join( models.ProcessFields).join( models.Fields) if field_id: _filters.append(models.ProcessFields.field_id == field_id) if tag_id: sub_query = sub_query.join(models.ReleaseTag) _filters.append(models.ReleaseTag.tag_id == tag_id) if stage_id: _filters.append(models.Pipelines.pipeline_stage_id == stage_id) sub_query = sub_query.filter(and_(*_filters)).subquery() query = db_session.query( sub_query, models.Processes.start_time.label('last_process_start_time'), models.Processes.end_time.label('last_process_end_time'), models.ProcessStatus.name.label('last_process_status'), ).join( sub_query, models.Processes.process_id == sub_query.c.last_process_id).join( models.ProcessStatus).all() result = list() for row in query: result.append(schemas.PipelinesExecution(**row._asdict())) return result def resolve_pipelines_by_stage_id( self, info, stage_id=None, **args): query = db_session.query( func.distinct(models.Pipelines.pipeline_id).label('pipeline_id'), models.Pipelines.name.label('pipeline_name'), models.Pipelines.display_name.label('pipeline_display_name'), models.Pipelines.pipeline_status_id.label('pipeline_status_id'), models.PipelineStage.display_name.label('stage_display_name'), ).select_from( models.Pipelines ).join( models.PipelineStage ).group_by( models.Pipelines.pipeline_id, models.PipelineStage.pipeline_stage_id ).filter( models.Pipelines.pipeline_stage_id == stage_id ).order_by( models.PipelineStage.display_name, models.Pipelines.display_name ) result = list() for row in query.all(): result.append(schemas.PipelinesStage(**row._asdict())) return result def resolve_processes_by_tag_id_and_field_id_and_pipeline_id( self, info, pipeline_id, tag_id=None, field_id=None): query = schemas.Processes.get_query( info ).join( models.ProcessPipeline ).join( models.ProcessStatus ).order_by( models.Processes.process_id.desc() ) _filters = list() _filters.append(models.ProcessPipeline.pipeline_id == pipeline_id) # _filters.append(models.Processes.flag_removed == False) _filters.append(models.Processes.instance == INSTANCE) if field_id or tag_id: query = query.join( models.ProcessFields).join( models.Fields) if field_id: _filters.append(models.ProcessFields.field_id == field_id) if tag_id: query = query.join(models.ReleaseTag) _filters.append(models.ReleaseTag.tag_id == tag_id) return query.filter(and_(*_filters)) def resolve_products_by_process_id(self, info, process_id): query = schemas.Products.get_query(info) return query.filter_by( process_id=process_id ).order_by( models.Products.product_id ) def resolve_process_components_by_process_id(self, info, process_id): query = schemas.ProcessComponent.get_query(info) return query.filter_by( process_id=process_id ).order_by( models.ProcessComponent.module_id ) def resolve_comments_by_process_id(self, info, process_id): query = schemas.Comments.get_query(info) return query.filter_by( process_id=process_id ).order_by( models.Comments.date ) def resolve_process_by_process_id(self, info, process_id): query = schemas.Processes.get_query(info) return query.filter_by( process_id=process_id ).order_by( models.Processes.process_id ).one_or_none() def resolve_product_by_product_id(self, info, product_id): query = schemas.Products.get_query(info) return query.filter_by( product_id=product_id ).order_by( models.Products.product_id ).one_or_none() def resolve_release_tag_list(self, info, only_available=True, sort=list(), search=None, **args): """ Returns available release list(default behavior) Arguments: info -- is the resolver info. Keyword Arguments: only_available {bool} -- if False returns all entries regardless of status. (default: {True}) sort {list} -- columns list to sorting. e.g.: ["name_asc"] """ query = schemas.ReleaseTag.get_query(info) query = query.join(models.Fields) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) if only_available: query = query.filter_by(status=True) return query.order_by(*sort) def resolve_fields_list(self, info, only_available=True, sort=list(), search=None, **args): """ Returns available field list (default behavior) Arguments: info -- is the resolver info. Keyword Arguments: only_available {bool} -- if False returns all entries regardless of status. (default: {True}) sort {list} -- columns list to sorting. e.g.: ["field_name_asc"] """ query = schemas.Fields.get_query(info) if only_available: query = query.filter_by(status=True) tables = utils.selected_tables_from_arguments(sort) if utils.is_valid_search(search): tables = tables.union(utils.selected_tables_from_arguments( search['columns'] )) if 'release_tag' in tables: query = query.join(models.ReleaseTag) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) return query.order_by(*sort) def resolve_processes_list( self, info, all_instances=None, running=None, failure=None, success=None, published=None, saved=None, removed=False, sort=list(), search=None, **args): query = schemas.Processes.get_query(info) if removed is False: query = query.filter_by(flag_removed=False) else: query = query.filter(models.Processes.flag_removed.isnot(False)) if not all_instances: query = query.filter_by(instance=INSTANCE) if running is True: query = query.filter(models.Processes.end_time.is_(None)) elif running is False: query = query.filter(models.Processes.end_time.isnot(None)) if failure is True: query = query.filter(models.Processes.status_id == 3) if success is True: query = query.filter(models.Processes.status_id == 1) if published is True: query = query.filter(models.Processes.published_date.isnot(None)) elif published is False: query = query.filter(models.Processes.published_date.is_(None)) if removed is True: query = query.filter(models.Processes.flag_removed.isnot(False)) if saved is True: query = query.join(models.SavedProcesses) elif saved is False: query = query.outerjoin( models.SavedProcesses ).filter(models.SavedProcesses.process_id.is_(None)) query = query.join(models.ProcessStatus) query = query.outerjoin(models.ProcessFields).outerjoin(models.Fields) query = query.outerjoin(models.ReleaseTag) query = query.join(models.Session).join(models.TgUser) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) return query.distinct().order_by(*sort) def resolve_products_list( self, info, tag_id=None, field_id=None, type_id=None, class_id=None, display_name=None, band=None, search=None, sort=list(), **args): query = schemas.Products.get_query(info) query = query.join(models.ProductField) query = query.outerjoin(models.Tables) query = query.join(models.Processes) query = query.filter(models.Processes.flag_removed == False) query = query.outerjoin( models.Mask, models.Mask.table_id == models.Tables.table_id) query = query.outerjoin( models.Map, models.Map.table_id == models.Tables.table_id) query = query.outerjoin( models.Fields, models.ProductField.field_id == models.Fields.field_id) query = query.outerjoin( models.ReleaseTag, models.Fields.release_tag_id == models.ReleaseTag.tag_id) query = query.join(models.ProductClass) query = query.join(models.ProductType) if tag_id: query = query.filter(models.ReleaseTag.tag_id == tag_id) if field_id: query = query.filter(models.Fields.field_id == field_id) if band: query = query.filter( or_(models.Mask.filter == band, models.Map.filter == band)) if class_id: query = query.filter(models.ProductClass.class_id == class_id) if display_name: query = query.filter(models.Products.display_name == display_name) if type_id: query = query.filter(models.ProductType.type_id == type_id) _filters = utils.prepare_sqlalchemy_filters_casting_columns_to_str( search) if _filters: query = query.filter(or_(*_filters)) return query.distinct().order_by(*sort) def resolve_fields_by_tag_id(self, info, tag_id, only_available=True): query = schemas.Fields.get_query(info) if only_available: query = query.filter_by(status=True) return query.filter(models.Fields.release_tag_id == tag_id) def resolve_fields_by_tagname(self, info, tagname): query = schemas.Fields.get_query(info) query = query.join(models.ReleaseTag) return query.filter(models.ReleaseTag.name == tagname) def resolve_product_class_by_class_name(self, info, name): query = schemas.ProductClass.get_query(info) return query.filter( models.ProductClass.class_name == name).one_or_none() def resolve_product_class_by_type_id(self, info, type_id): query = schemas.ProductClass.get_query(info) query = query.join(models.ProductType) return query.filter(models.ProductType.type_id == type_id) def resolve_pipelines_by_name(self, info, name): query = schemas.Pipelines.get_query(info) return query.filter(models.Pipelines.name == name).one_or_none() def resolve_modules_by_name(self, info, name): query = schemas.Modules.get_query(info) return query.filter(models.Modules.name == name).one_or_none() def resolve_git_info(self, info, **args): current_branch = subprocess.check_output( ["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip() last_commit = subprocess.check_output( ["git", "log", "-1", "--format=%H"]).strip() last_commit_date = subprocess.check_output( ["git", "log", "-1", "--format=%cd"]).strip() last_commit_author = subprocess.check_output( ["git", "log", "-1", "--pretty=format:%an"]).strip() last_tag = subprocess.check_output( ["git", "describe", "--tags"]).strip() return [schemas.GitInfo( current_branch=current_branch.decode("utf-8"), last_commit=last_commit.decode("utf-8"), last_commit_date=last_commit_date.decode("utf-8"), last_commit_author=last_commit_author.decode("utf-8"), last_tag=last_tag.decode("utf-8") )] def resolve_time_profile(self, info, process_id, **args): l_modules = list() modules = db_session.query( func.distinct(models.Modules.module_id).label('module_id'), models.Modules.name.label('name'), models.Modules.display_name.label('display_name') ).select_from( models.JobRuns ).join( models.Modules ).filter( models.JobRuns.process_id == process_id ).all() for module in modules: query = db_session.query( models.JobRuns.hid.label('hid'), models.JobRuns.start_time.label('start_time'), models.JobRuns.end_time.label('end_time') ).select_from( models.JobRuns ).join( models.Modules ).filter( models.JobRuns.process_id == process_id, models.Modules.module_id == module.module_id ).all() jobs = list() for row in query: jobs.append(schemas.JobRuns(**row._asdict())) l_modules.append(schemas.TimeProfile( display_name=module.display_name, module_name=module.name, jobs=jobs )) return l_modules def resolve_output_products_by_pipeline(self, info, pipeline_id, **args): l_modules = list() query = db_session.query( func.distinct(models.Modules.module_id).label('module_id'), models.Modules.name.label('module_name'), models.Modules.display_name.label('display_name'), models.Modules.version.label('version'), models.Modules.version_date.label('version_date'), ).select_from( models.Modules ).join( models.ModuleOutput ).join( models.PipelinesModules ).filter( models.PipelinesModules.pipeline_id == pipeline_id ) for module in query.all(): query = db_session.query( models.ProductClass.display_name ).select_from( models.ProductClass ).join( models.ModuleOutput ).filter( models.ModuleOutput.module_id == module.module_id ) _products = list() for row in query.all(): _products.append(row.display_name) l_modules.append(schemas.ProductsByPipeline( display_name=module.display_name, module_name=module.module_name, version=module.version, version_date=module.version_date, products=_products )) return l_modules def resolve_input_products_by_pipeline(self, info, pipeline_id, **args): l_modules = list() l_modules_from_pipeline_input = set() # get modules from pipeline_input query = db_session.query( func.distinct(models.PipelineInput.module_id).label('module_id'), models.Modules.name.label('module_name'), models.Modules.display_name.label('display_name'), models.Modules.version.label('version'), models.Modules.version_date.label('version_date'), ).select_from( models.PipelineInput ).join( models.Modules ).filter( models.PipelineInput.pipeline_id == pipeline_id ) for module in query.all(): l_modules_from_pipeline_input.add(module.module_id) query = db_session.query( models.ProductClass.display_name ).select_from( models.ProductClass ).join( models.PipelineInput ).filter( and_( models.PipelineInput.module_id == module.module_id, models.PipelineInput.pipeline_id == pipeline_id ) ) _products = list() for row in query.all(): _products.append(row.display_name) l_modules.append(schemas.ProductsByPipeline( display_name=module.display_name, module_name=module.module_name, version=module.version, version_date=module.version_date, products=_products )) # get modules from module_input query = db_session.query( func.distinct(models.Modules.module_id).label('module_id'), models.Modules.name.label('module_name'), models.Modules.display_name.label('display_name'), models.Modules.version.label('version'), models.Modules.version_date.label('version_date'), ).select_from( models.Modules ).join( models.ModuleInput ).join( models.PipelinesModules ).join( models.Pipelines ).filter( models.Pipelines.pipeline_id == pipeline_id ) for module in query.all(): if module.module_id in l_modules_from_pipeline_input: continue query = db_session.query( models.ProductClass.display_name ).select_from( models.ProductClass ).join( models.ModuleInput ).filter( models.ModuleInput.module_id == module.module_id ) _products = list() for row in query.all(): _products.append(row.display_name) l_modules.append(schemas.ProductsByPipeline( display_name=module.display_name, module_name=module.module_name, version=module.version, version_date=module.version_date, products=_products )) return l_modules
class LogTypeResults(graphene.ObjectType): results = List(LogType, required=True) total = Int()