def activity_stream_delete(sender, instance, **kwargs): if not activity_stream_enabled: return # Inventory delete happens in the task system rather than request-response-cycle. # If we trigger this handler there we may fall into db-integrity-related race conditions. # So we add flag verification to prevent normal signal handling. This funciton will be # explicitly called with flag on in Inventory.schedule_deletion. changes = {} if isinstance(instance, Inventory): if not kwargs.get('inventory_delete_flag', False): return # Add additional data about child hosts / groups that will be deleted changes['coalesced_data'] = { 'hosts_deleted': instance.hosts.count(), 'groups_deleted': instance.groups.count() } elif isinstance(instance, (Host, Group)) and instance.inventory.pending_deletion: return # accounted for by inventory entry, above _type = type(instance) if getattr(_type, '_deferred', False): return changes.update(model_to_dict(instance, model_serializer_mapping())) object1 = camelcase_to_underscore(instance.__class__.__name__) if type(instance) == OAuth2AccessToken: changes['token'] = CENSOR_VALUE activity_entry = get_activity_stream_class()( operation='delete', changes=json.dumps(changes), object1=object1, actor=get_current_user_or_none()) activity_entry.save() connection.on_commit(lambda: emit_activity_stream_change(activity_entry))
def activity_stream_create(sender, instance, created, **kwargs): if created and activity_stream_enabled: # TODO: remove deprecated_group conditional in 3.3 # Skip recording any inventory source directly associated with a group. if isinstance(instance, InventorySource) and instance.deprecated_group: return _type = type(instance) if getattr(_type, '_deferred', False): return object1 = camelcase_to_underscore(instance.__class__.__name__) changes = model_to_dict(instance, model_serializer_mapping) # Special case where Job survey password variables need to be hidden if type(instance) == Job: if 'extra_vars' in changes: changes['extra_vars'] = instance.display_extra_vars() if type(instance) == OAuth2AccessToken: changes['token'] = '*************' activity_entry = ActivityStream(operation='create', object1=object1, changes=json.dumps(changes), actor=get_current_user_or_none()) #TODO: Weird situation where cascade SETNULL doesn't work # it might actually be a good idea to remove all of these FK references since # we don't really use them anyway. if instance._meta.model_name != 'setting': # Is not conf.Setting instance activity_entry.save() getattr(activity_entry, object1).add(instance) else: activity_entry.setting = conf_to_dict(instance) activity_entry.save()
def activity_stream_update(sender, instance, **kwargs): if instance.id is None: return if not activity_stream_enabled: return try: old = sender.objects.get(id=instance.id) except sender.DoesNotExist: return new = instance changes = model_instance_diff(old, new, model_serializer_mapping()) if changes is None: return _type = type(instance) if getattr(_type, '_deferred', False): return object1 = camelcase_to_underscore(instance.__class__.__name__) activity_entry = get_activity_stream_class()( operation='update', object1=object1, changes=json.dumps(changes), actor=get_current_user_or_none()) if instance._meta.model_name != 'setting': # Is not conf.Setting instance activity_entry.save() getattr(activity_entry, object1).add(instance.pk) else: activity_entry.setting = conf_to_dict(instance) activity_entry.save() connection.on_commit(lambda: emit_activity_stream_change(activity_entry))
def counts(since): counts = {} for cls in (models.Organization, models.Team, models.User, models.Inventory, models.Credential, models.Project, models.JobTemplate, models.WorkflowJobTemplate, models.Host, models.Schedule, models.CustomInventoryScript, models.NotificationTemplate): counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count() venvs = get_custom_venv_choices() counts['custom_virtualenvs'] = len([ v for v in venvs if os.path.basename(v.rstrip('/')) != 'ansible' ]) inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind'))) inv_counts['normal'] = inv_counts.get('', 0) inv_counts.pop('', None) inv_counts['smart'] = inv_counts.get('smart', 0) counts['inventories'] = inv_counts counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates counts['active_host_count'] = models.Host.objects.active_count() active_sessions = Session.objects.filter(expire_date__gte=now()).count() active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count() active_anonymous_sessions = active_sessions - active_user_sessions counts['active_sessions'] = active_sessions counts['active_user_sessions'] = active_user_sessions counts['active_anonymous_sessions'] = active_anonymous_sessions counts['running_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('running', 'waiting',)).count() counts['pending_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('pending',)).count() return counts
def activity_stream_create(sender, instance, created, **kwargs): if created and activity_stream_enabled: _type = type(instance) if getattr(_type, '_deferred', False): return object1 = camelcase_to_underscore(instance.__class__.__name__) changes = model_to_dict(instance, model_serializer_mapping()) # Special case where Job survey password variables need to be hidden if type(instance) == Job: changes['credentials'] = ['{} ({})'.format(c.name, c.id) for c in instance.credentials.iterator()] changes['labels'] = [label.name for label in instance.labels.iterator()] if 'extra_vars' in changes: changes['extra_vars'] = instance.display_extra_vars() if type(instance) == OAuth2AccessToken: changes['token'] = CENSOR_VALUE activity_entry = get_activity_stream_class()(operation='create', object1=object1, changes=json.dumps(changes), actor=get_current_user_or_none()) # TODO: Weird situation where cascade SETNULL doesn't work # it might actually be a good idea to remove all of these FK references since # we don't really use them anyway. if instance._meta.model_name != 'setting': # Is not conf.Setting instance activity_entry.save() getattr(activity_entry, object1).add(instance.pk) else: activity_entry.setting = conf_to_dict(instance) activity_entry.save() connection.on_commit(lambda: emit_activity_stream_change(activity_entry))
def test_instance_group_order_persistence(get, post, admin, source_model): # create several instance groups in random order total = 5 pks = list(range(total)) random.shuffle(pks) instances = [InstanceGroup.objects.create(name='iso-%d' % i) for i in pks] view_name = camelcase_to_underscore(source_model.__class__.__name__) url = reverse('api:{}_instance_groups_list'.format(view_name), kwargs={'pk': source_model.pk}) # associate them all for instance in instances: post(url, {'associate': True, 'id': instance.id}, admin, expect=204) for _ in range(10): # remove them all for instance in instances: post(url, {'disassociate': True, 'id': instance.id}, admin, expect=204) resp = get(url, admin) assert resp.data['count'] == 0 # add them all in random order before = sorted(instances, key=lambda x: random.random()) for instance in before: post(url, {'associate': True, 'id': instance.id}, admin, expect=204) resp = get(url, admin) assert resp.data['count'] == total assert [ig['name'] for ig in resp.data['results']] == [ig.name for ig in before]
def activity_stream_delete(sender, instance, **kwargs): if not activity_stream_enabled: return # TODO: remove deprecated_group conditional in 3.3 # Skip recording any inventory source directly associated with a group. if isinstance(instance, InventorySource) and instance.deprecated_group: return # Inventory delete happens in the task system rather than request-response-cycle. # If we trigger this handler there we may fall into db-integrity-related race conditions. # So we add flag verification to prevent normal signal handling. This funciton will be # explicitly called with flag on in Inventory.schedule_deletion. if isinstance(instance, Inventory) and not kwargs.get( 'inventory_delete_flag', False): return _type = type(instance) if getattr(_type, '_deferred', False): return changes = model_to_dict(instance) object1 = camelcase_to_underscore(instance.__class__.__name__) if type(instance) == OAuth2AccessToken: changes['token'] = TOKEN_CENSOR activity_entry = ActivityStream(operation='delete', changes=json.dumps(changes), object1=object1, actor=get_current_user_or_none()) activity_entry.save()
def description(self): template_list = [] for klass in inspect.getmro(type(self)): template_basename = camelcase_to_underscore(klass.__name__) template_list.append('api/%s.md' % template_basename) context = self.get_description_context() description = render_to_string(template_list, context) if context.get('deprecated') and context.get('swagger_method') is None: # render deprecation messages at the very top description = '\n'.join([render_to_string('api/_deprecated.md', context), description]) return description
def counts(since, **kwargs): counts = {} for cls in ( models.Organization, models.Team, models.User, models.Inventory, models.Credential, models.Project, models.JobTemplate, models.WorkflowJobTemplate, models.Host, models.Schedule, models.NotificationTemplate, ): counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count() inv_counts = dict( models.Inventory.objects.order_by().values_list('kind').annotate( Count('kind'))) inv_counts['normal'] = inv_counts.get('', 0) inv_counts.pop('', None) inv_counts['smart'] = inv_counts.get('smart', 0) counts['inventories'] = inv_counts counts['unified_job'] = models.UnifiedJob.objects.exclude( launch_type='sync').count() # excludes implicit project_updates counts['active_host_count'] = models.Host.objects.active_count() active_sessions = Session.objects.filter(expire_date__gte=now()).count() active_user_sessions = models.UserSessionMembership.objects.select_related( 'session').filter(session__expire_date__gte=now()).count() active_anonymous_sessions = active_sessions - active_user_sessions counts['active_sessions'] = active_sessions counts['active_user_sessions'] = active_user_sessions counts['active_anonymous_sessions'] = active_anonymous_sessions counts['running_jobs'] = (models.UnifiedJob.objects.exclude( launch_type='sync').filter(status__in=( 'running', 'waiting', )).count()) counts['pending_jobs'] = models.UnifiedJob.objects.exclude( launch_type='sync').filter(status__in=('pending', )).count() if connection.vendor == 'postgresql': with connection.cursor() as cursor: cursor.execute( f"select count(*) from pg_stat_activity where datname=\'{connection.settings_dict['NAME']}\'" ) counts['database_connections'] = cursor.fetchone()[0] else: # We should be using postgresql, but if we do that change that ever we should change the below value counts['database_connections'] = 1 return counts
def reverse_gfk(content_object, request): ''' Computes a reverse for a GenericForeignKey field. Returns a dictionary of the form { '<type>': reverse(<type detail>) } for example { 'organization': '/api/v1/organizations/1/' } ''' if content_object is None or not hasattr(content_object, 'get_absolute_url'): return {} return { camelcase_to_underscore(content_object.__class__.__name__): content_object.get_absolute_url(request=request) }
def emit_event_detail(event): if ( settings.UI_LIVE_UPDATES_ENABLED is False and event.event not in MINIMAL_EVENTS ): return cls = event.__class__ relation = { JobEvent: 'job_id', AdHocCommandEvent: 'ad_hoc_command_id', ProjectUpdateEvent: 'project_update_id', InventoryUpdateEvent: 'inventory_update_id', SystemJobEvent: 'system_job_id', }[cls] url = '' if isinstance(event, JobEvent): url = '/api/v2/job_events/{}'.format(event.id) if isinstance(event, AdHocCommandEvent): url = '/api/v2/ad_hoc_command_events/{}'.format(event.id) group = camelcase_to_underscore(cls.__name__) + 's' timestamp = event.created.isoformat() consumers.emit_channel_notification( '-'.join([group, str(getattr(event, relation))]), { 'id': event.id, relation.replace('_id', ''): getattr(event, relation), 'created': timestamp, 'modified': timestamp, 'group_name': group, 'url': url, 'stdout': event.stdout, 'counter': event.counter, 'uuid': event.uuid, 'parent_uuid': getattr(event, 'parent_uuid', ''), 'start_line': event.start_line, 'end_line': event.end_line, 'event': event.event, 'event_data': getattr(event, 'event_data', {}), 'failed': event.failed, 'changed': event.changed, 'event_level': getattr(event, 'event_level', ''), 'play': getattr(event, 'play', ''), 'role': getattr(event, 'role', ''), 'task': getattr(event, 'task', ''), } )
def get_description(self, request, html=False): self.request = request template_list = [] for klass in inspect.getmro(type(self)): template_basename = camelcase_to_underscore(klass.__name__) template_list.append('api/%s.md' % template_basename) context = self.get_description_context() # "v2" -> 2 default_version = int(settings.REST_FRAMEWORK['DEFAULT_VERSION'].lstrip('v')) request_version = get_request_version(self.request) if request_version is not None and request_version < default_version: context['deprecated'] = True description = render_to_string(template_list, context) if context.get('deprecated') and context.get('swagger_method') is None: # render deprecation messages at the very top description = '\n'.join([render_to_string('api/_deprecated.md', context), description]) return description
def activity_stream_update(sender, instance, **kwargs): if instance.id is None: return if not activity_stream_enabled: return try: old = sender.objects.get(id=instance.id) except sender.DoesNotExist: return new = instance changes = model_instance_diff(old, new, model_serializer_mapping) if changes is None: return object1 = camelcase_to_underscore(instance.__class__.__name__) activity_entry = ActivityStream( operation='update', object1=object1, changes=json.dumps(changes), actor=get_current_user_or_none()) activity_entry.save() if instance._meta.model_name != 'setting': # Is not conf.Setting instance getattr(activity_entry, object1).add(instance)
def activity_stream_associate(sender, instance, **kwargs): if not activity_stream_enabled: return if kwargs['action'] in ['pre_add', 'pre_remove']: if kwargs['action'] == 'pre_add': action = 'associate' elif kwargs['action'] == 'pre_remove': action = 'disassociate' else: return obj1 = instance _type = type(instance) if getattr(_type, '_deferred', False): return object1 = camelcase_to_underscore(obj1.__class__.__name__) obj_rel = sender.__module__ + "." + sender.__name__ for entity_acted in kwargs['pk_set']: obj2 = kwargs['model'] obj2_id = entity_acted obj2_actual = obj2.objects.filter(id=obj2_id) if not obj2_actual.exists(): continue obj2_actual = obj2_actual[0] _type = type(obj2_actual) if getattr(_type, '_deferred', False): return if isinstance(obj2_actual, Role) and obj2_actual.content_object is not None: obj2_actual = obj2_actual.content_object object2 = camelcase_to_underscore( obj2_actual.__class__.__name__) else: object2 = camelcase_to_underscore(obj2.__name__) # Skip recording any inventory source, or system job template changes here. if isinstance(obj1, InventorySource) or isinstance( obj2_actual, InventorySource): continue if isinstance(obj1, SystemJobTemplate) or isinstance( obj2_actual, SystemJobTemplate): continue if isinstance(obj1, SystemJob) or isinstance( obj2_actual, SystemJob): continue activity_entry = get_activity_stream_class()( changes=json.dumps( dict(object1=object1, object1_pk=obj1.pk, object2=object2, object2_pk=obj2_id, action=action, relationship=obj_rel)), operation=action, object1=object1, object2=object2, object_relationship_type=obj_rel, actor=get_current_user_or_none()) activity_entry.save() getattr(activity_entry, object1).add(obj1.pk) getattr(activity_entry, object2).add(obj2_actual.pk) # Record the role for RBAC changes if 'role' in kwargs: role = kwargs['role'] if role.content_object is not None: obj_rel = '.'.join([ role.content_object.__module__, role.content_object.__class__.__name__, role.role_field ]) # If the m2m is from the User side we need to # set the content_object of the Role for our entry. if type(instance) == User and role.content_object is not None: getattr(activity_entry, role.content_type.name.replace(' ', '_')).add( role.content_object) activity_entry.role.add(role) activity_entry.object_relationship_type = obj_rel activity_entry.save() connection.on_commit( lambda: emit_activity_stream_change(activity_entry))
'').startswith('$encrypted$'): serializer.validated_data['LOG_AGGREGATOR_PASSWORD'] = getattr( settings, 'LOG_AGGREGATOR_PASSWORD', '') try: class MockSettings: pass mock_settings = MockSettings() for k, v in serializer.validated_data.items(): setattr(mock_settings, k, v) AWXProxyHandler().perform_test(custom_settings=mock_settings) if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP': return Response(status=status.HTTP_201_CREATED) except LoggingConnectivityException as e: return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(status=status.HTTP_200_OK) # Create view functions for all of the class-based views to simplify inclusion # in URL patterns and reverse URL lookups, converting CamelCase names to # lowercase_with_underscore (e.g. MyView.as_view() becomes my_view). this_module = sys.modules[__name__] for attr, value in list(locals().items()): if isinstance(value, type) and issubclass(value, APIView): name = camelcase_to_underscore(attr) view = value.as_view() setattr(this_module, name, view)