class InvalidDefaultModel(models.Model): field = JSONField(default={})
class UnallowedModel(models.Model): field = JSONField()
class Service(ResourceBase): """Service Class to represent remote Geo Web Services""" type = models.CharField( max_length=10, choices=enumerations.SERVICE_TYPES ) method = models.CharField( max_length=1, choices=( (enumerations.LOCAL, _('Local')), (enumerations.CASCADED, _('Cascaded')), (enumerations.HARVESTED, _('Harvested')), (enumerations.INDEXED, _('Indexed')), (enumerations.LIVE, _('Live')), (enumerations.OPENGEOPORTAL, _('OpenGeoPortal')) ) ) # with service, version and request etc stripped off base_url = models.URLField( unique=True, db_index=True ) proxy_base = models.URLField( null=True, blank=True ) version = models.CharField( max_length=10, null=True, blank=True ) # Should force to slug? name = models.CharField( max_length=255, unique=True, db_index=True ) description = models.CharField( max_length=255, null=True, blank=True ) online_resource = models.URLField( False, null=True, blank=True ) fees = models.CharField( max_length=1000, null=True, blank=True ) access_constraints = models.CharField( max_length=255, null=True, blank=True ) connection_params = models.TextField( null=True, blank=True ) extra_queryparams = models.TextField( null=True, blank=True ) operations = JSONField( default=dict, null=True, blank=True ) username = models.CharField( max_length=50, null=True, blank=True ) password = models.CharField( max_length=50, null=True, blank=True ) api_key = models.CharField( max_length=255, null=True, blank=True ) workspace_ref = models.URLField( False, null=True, blank=True ) store_ref = models.URLField( null=True, blank=True ) resources_ref = models.URLField( null=True, blank=True ) profiles = models.ManyToManyField( settings.AUTH_USER_MODEL, through='ServiceProfileRole' ) first_noanswer = models.DateTimeField( null=True, blank=True ) noanswer_retries = models.PositiveIntegerField( null=True, blank=True ) external_id = models.IntegerField( null=True, blank=True ) parent = models.ForeignKey( 'services.Service', null=True, blank=True, on_delete=models.CASCADE, related_name='service_set' ) probe = models.IntegerField( default=200 ) # Supported Capabilities def __str__(self): return str(self.name) @property def service_url(self): parsed_url = urlparse(self.base_url) encoded_get_args = self.extra_queryparams service_url = ParseResult( parsed_url.scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, encoded_get_args, parsed_url.fragment ) service_url = service_url.geturl() if not self.proxy_base else urljoin( settings.SITEURL, reverse('service_proxy', args=[self.id])) return service_url @property def ptype(self): # Return the gxp ptype that should be used to display layers return enumerations.GXP_PTYPES[self.type] if self.type else None @property def service_type(self): # Return the gxp ptype that should be used to display layers return [x for x in enumerations.SERVICE_TYPES if x[0] == self.type][0][1] def get_absolute_url(self): return '/services/%i' % self.id def probe_service(self): from geonode.utils import http_client try: resp, content = http_client.request(self.service_url) return resp.status_code except Exception: return 404 class Meta: # custom permissions, # change and delete are standard in django-guardian permissions = ( ('add_resourcebase_from_service', 'Can add resources to Service'), ('change_resourcebase_metadata', 'Can change resources metadata'), )
class AbstractScannerResult(ModelBase): # Store the "raw" results of a scanner. results = JSONField(default=list) scanner = models.PositiveSmallIntegerField(choices=SCANNERS.items()) has_matches = models.BooleanField(null=True) state = models.PositiveSmallIntegerField(choices=RESULT_STATES.items(), null=True, blank=True, default=UNKNOWN) version = models.ForeignKey( 'versions.Version', related_name='%(class)ss', on_delete=models.CASCADE, null=True, ) class Meta(ModelBase.Meta): abstract = True indexes = [ models.Index(fields=('has_matches', )), models.Index(fields=('state', )), ] def add_yara_result(self, rule, tags=None, meta=None): """This method is used to store a Yara result.""" self.results.append({ 'rule': rule, 'tags': tags or [], 'meta': meta or {} }) def extract_rule_names(self): """This method parses the raw results and returns the (matched) rule names. Not all scanners have rules that necessarily match.""" if self.scanner == YARA: return sorted({result['rule'] for result in self.results}) if self.scanner == CUSTOMS and 'matchedRules' in self.results: return self.results['matchedRules'] # We do not have support for the remaining scanners (yet). return [] def save(self, *args, **kwargs): rule_model = self._meta.get_field('matched_rules').related_model matched_rules = rule_model.objects.filter( scanner=self.scanner, name__in=self.extract_rule_names(), # See: https://github.com/mozilla/addons-server/issues/13143 is_active=True, ) self.has_matches = bool(matched_rules) # Save the instance first... super().save(*args, **kwargs) # ...then add the associated rules. for scanner_rule in matched_rules: self.matched_rules.add(scanner_rule) def get_scanner_name(self): return SCANNERS.get(self.scanner) def get_pretty_results(self): return json.dumps(self.results, indent=2) def get_files_by_matched_rules(self): res = defaultdict(list) if self.scanner is YARA: for item in self.results: res[item['rule']].append(item['meta'].get('filename', '???')) elif self.scanner is CUSTOMS: scanMap = self.results.get('scanMap', {}) for filename, rules in scanMap.items(): for ruleId, data in rules.items(): if data.get('RULE_HAS_MATCHED', False): res[ruleId].append(filename) return res def can_report_feedback(self): return self.state == UNKNOWN and self.scanner not in [WAT, MAD] def can_revert_feedback(self): return self.state != UNKNOWN and self.scanner not in [WAT, MAD] def get_git_repository(self): return { CUSTOMS: settings.CUSTOMS_GIT_REPOSITORY, YARA: settings.YARA_GIT_REPOSITORY, }.get(self.scanner) @classmethod def run_action(cls, version): """Try to find and execute an action for a given version, based on the scanner results and associated rules. If an action is found, it is run synchronously from this method, not in a task. """ log.info('Checking rules and actions for version %s.', version.pk) try: mad_result = cls.objects.filter(version=version, scanner=MAD).get() customs = mad_result.results.get('scanners', {}).get('customs', {}) customs_score = customs.get('score', 0.5) customs_models_agree = customs.get('result_details', {}).get('models_agree', True) if (customs_score <= 0.01 or customs_score >= 0.99 or not customs_models_agree): log.info('Flagging version %s for human review by MAD.', version.pk) _flag_for_human_review_by_scanner(version, MAD) except cls.DoesNotExist: log.info('No MAD scanner result for version %s.', version.pk) pass rule_model = cls.matched_rules.rel.model result_query_name = cls._meta.get_field( 'matched_rules').related_query_name() rule = ( rule_model.objects.filter(**{ f'{result_query_name}__version': version, 'is_active': True }).order_by( # The `-` sign means descending order. '-action').first()) if not rule: log.info('No action to execute for version %s.', version.pk) return action_id = rule.action action_name = ACTIONS.get(action_id, None) if not action_name: raise Exception('invalid action %s' % action_id) ACTION_FUNCTIONS = { NO_ACTION: _no_action, FLAG_FOR_HUMAN_REVIEW: _flag_for_human_review, DELAY_AUTO_APPROVAL: _delay_auto_approval, DELAY_AUTO_APPROVAL_INDEFINITELY: _delay_auto_approval_indefinitely, DELAY_AUTO_APPROVAL_INDEFINITELY_AND_RESTRICT: (_delay_auto_approval_indefinitely_and_restrict), } action_function = ACTION_FUNCTIONS.get(action_id, None) if not action_function: raise Exception('no implementation for action %s' % action_id) # We have a valid action to execute, so let's do it! log.info('Starting action "%s" for version %s.', action_name, version.pk) action_function(version) log.info('Ending action "%s" for version %s.', action_name, version.pk)
class ChatRoom(models.Model): """Model for storing the room information related to the template bot Attributes: uuid (uuid): Unique ID for a generated chat room created_on (datetime): Time of room creation current_state (str): Represents the current state of the bot (redundant) bot_id (uuid): Bot ID which maps to an existing bot from `apps.chatbox.models.Chatbox` bot_is_active (bool): Is the bot active or not? num_msgs (int): Total number of messages for the current room """ visitor_id = models.PositiveIntegerField(default=1, db_column='visitor_id') room_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False, db_column='room_id') created_on = models.DateTimeField(_('chatroom created on'), default=current_utc_time) room_name = models.CharField(max_length=1000, null=True) messages = JSONField(db_column='messages', default=list) variables = JSONField(db_column='variables', default=dict) bot_id = models.UUIDField(db_column='bot_id') chatbot_type = models.CharField(max_length=10, db_column='chatbot_type', default='website') bot_is_active = models.BooleanField(default=False, db_column='bot_is_active') num_msgs = models.PositiveIntegerField(default=0, db_column='num_msgs') is_lead = models.BooleanField(default=False, db_column='is_lead') STATUS_LIST = (('pending', 'Pending'), ('resolve', 'Resolve'), ('abandoned', 'Abandoned'), ('private_note', 'Private Note'), ('critical', 'Critical')) status = models.CharField(max_length=25, choices=STATUS_LIST, default='pending', null=True, blank=True) takeover = models.BooleanField(default=False) end_time = models.DateTimeField(db_column='end_time', null=True) recent_messages = JSONField(db_column='recent_messages', default=list) admin_id = models.IntegerField(null=True, blank=True) utm_source = models.CharField(max_length=25, null=True, blank=True) utm_medium = models.CharField(max_length=25, null=True, blank=True) utm_campaign = models.CharField(max_length=25, null=True, blank=True) utm_term = models.CharField(max_length=25, null=True, blank=True) utm_content = models.CharField(max_length=25, null=True, blank=True) website_url = models.URLField(max_length=255, blank=True, db_column="website_url") channel_id = models.CharField(max_length=255, blank=True, db_column="channel_id") ASSIGNMENT_TYPE = ( ('', 'NONE'), ('automatic', 'Automatic'), ('manual', 'Manual'), ) assignment_type = models.CharField(max_length=10, choices=ASSIGNMENT_TYPE, default='', null=True) assigned_operator = models.EmailField(_('email address'), null=True, blank=True) team_assignment_type = models.CharField(max_length=10, choices=ASSIGNMENT_TYPE, default='', null=True) assigned_team = models.IntegerField(null=True, blank=True) end_chat = models.BooleanField(default=False) updated_on = models.DateTimeField(db_column='updated_on', null=True) def save(self, *args, **kwargs): if 'new_visitor' in kwargs: try: if kwargs['new_visitor'] == True: if self._state.adding: if 'using' in kwargs: last_id = ChatRoom.objects.using( kwargs['using']).filter( admin_id=self.admin_id).aggregate( largest=models.Max('visitor_id')) last_id = last_id['largest'] else: last_id = ChatRoom.objects.filter( admin_id=self.admin_id).aggregate( largest=models.Max('visitor_id')) last_id = last_id['largest'] if last_id is not None: self.visitor_id = last_id + 1 self.room_name = f"Visitor{self.visitor_id}" else: self.visitor_id = 1 self.room_name = f"Visitor{self.visitor_id}" except Exception as ex: print(ex) finally: del kwargs['new_visitor'] if 'preview' in kwargs: try: if kwargs['preview'] == True: self.channel_id = 'preview' finally: del kwargs['preview'] if 'standalone' in kwargs: try: if kwargs['standalone'] == True: self.channel_id = 'standalone page' finally: del kwargs['standalone'] send_update = False if 'send_update' in kwargs and kwargs['send_update'] == True: print("SENDING UPDATE..........") del kwargs['send_update'] send_update = True if 'all_team' in kwargs: all_team = kwargs['all_team'] del kwargs['all_team'] else: all_team = False if 'assigned_operator' in kwargs: assigned_operator = kwargs['assigned_operator'] if len(assigned_operator) == 1: if hasattr(assigned_operator[0], 'email'): self.assigned_operator = assigned_operator[0].email # self.assigned_team_name = assigned_operator[0].team_member.name # if 'is_team_assignment' in kwargs and kwargs['is_team_assignment'] == True: # self.assigned_team_name = self.assigned_operator.team_member.name # del kwargs['is_team_assignment'] # else: self.assigned_team_name = None logger.info(f'operator_here---->{assigned_operator}') else: self.assigned_team = assigned_operator.first().team_member.id if all_team: self.assigned_team_name = '<All>' self.assigned_operator = '<All>' else: self.assigned_team_name = assigned_operator.first( ).team_member.name self.assigned_operator = assigned_operator.first( ).team_member.name del kwargs['assigned_operator'] else: assigned_operator = None self.assigned_team_name = None if 'assignment_type' in kwargs: self.assignment_type = kwargs['assignment_type'] del kwargs['assignment_type'] if 'assigner' in kwargs: assigner = kwargs['assigner'] del kwargs['assigner'] else: assigner = None if 'one_to_one' in kwargs: one_to_one = True del kwargs['one_to_one'] else: one_to_one = False if 'operator_partner' in kwargs: operator_partner = kwargs['operator_partner'] del kwargs['operator_partner'] else: operator_partner = False super(ChatRoom, self).save(*args, **kwargs) if send_update: fields = ( 'bot_id', 'room_id', 'room_name', 'created_on', 'updated_on', 'bot_is_active', 'variables', 'status', 'takeover', 'assignment_type', 'assigned_operator', ) if hasattr(settings, 'CELERY_TASK') and settings.CELERY_TASK == True: # Use Celery try: user = User.objects.get(pk=self.admin_id) owner_id = user.uuidassigned_operator_id bot = Chatbox.objects.get(pk=self.bot_id) if owner_id is not None: field_dict = { field: getattr(self, field) if field not in ( 'room_id', 'bot_id', 'created_on', 'updated_on') else str(getattr(self, field)) for field in fields } field_dict['bot_type'] = bot.chatbot_type field_dict['is_deleted'] = bot.is_deleted field_dict['owner'] = bot.owner.email if self.assigned_team_name == '<All>': team_list_queryset = Teams.objects.filter( owner__id=self.admin_id).values_list('name', flat=True) # team_list = list(map(str, team_list_queryset)) team_list = [] for team in team_list_queryset: team_list.append(str(team)) else: team_list = [self.assigned_team_name] _ = tasks.send_update.delay(str(owner_id), field_dict, team_name=team_list) except Exception as ex: print(ex) else: # Status has changed. This is an update try: channel_layer = get_channel_layer() user = User.objects.get(pk=self.admin_id) owner_id = user.uuid print('OWNER_ID', owner_id) bot = Chatbox.objects.get(pk=self.bot_id) if owner_id is not None: field_dict = { field: getattr(self, field) if field not in ( 'room_id', 'bot_id', 'created_on', 'updated_on') else str(getattr(self, field)) for field in fields } # Add bot_info field_dict['bot_type'] = bot.chatbot_type field_dict['is_deleted'] = bot.is_deleted field_dict['owner'] = bot.owner.email if assigned_operator is not None: # Update operator <-> owner mappings if self.room_id is not None and self.status is not None: assigned_team_op = [ str(op_uuid.uuid) for op_uuid in assigned_operator ] if self.assigned_team_name is not None: previous_team = cache.get( f'TEAM_{self.room_id}') logger.info( f'Previous Team---->{previous_team}') if previous_team is not None: dummy_fields = field_dict.copy() dummy_fields['bot_is_active'] = False for team in previous_team: _team = team.replace(" ", "") async_to_sync( channel_layer.group_send )('team_{}_{}'.format( owner_id, str(_team)), { 'type': 'listing_channel_event', **dummy_fields, }) else: client = cache.get( f"CLIENT_MAP_{owner_id}") if client is not None and str( self.room_id) in client: operators = client[str( self.room_id)] dummy_fields = field_dict.copy() dummy_fields[ 'bot_is_active'] = False for op in operators: async_to_sync( channel_layer.group_send )(f'listing_channel_{str(op)}', { 'type': 'listing_channel_event', **dummy_fields }) if self.assigned_team_name == '<All>': team_list_queryset = Teams.objects.filter( owner__id=self.admin_id ).values_list('name', flat=True) # team_list = list(map(str, team_list_queryset)) team_list = [] for team in team_list_queryset: team_list.append(str(team)) else: team_list = [self.assigned_team_name] tasks.update_operator_mappings( str(owner_id), assigned_team_op, str(self.room_id), self.status, team_list) else: client = cache.get( f"CLIENT_MAP_{owner_id}") if client is not None and str( self.room_id) in client: operators = client[str(self.room_id)] dummy_fields = field_dict.copy() dummy_fields['bot_is_active'] = False for op in operators: async_to_sync( channel_layer.group_send )(f'listing_channel_{str(op)}', { 'type': 'listing_channel_event', **dummy_fields }) logger.info( f"checkout operator--->{client[str(self.room_id)]}" ) tasks.update_operator_mappings( str(owner_id), assigned_team_op, str(self.room_id), self.status) else: pass async_to_sync(channel_layer.group_send)( f'listing_channel_{owner_id}', { 'type': 'listing_channel_event', **field_dict }) client_map = cache.get(f"CLIENT_MAP_{owner_id}") operator_team_name = cache.get(f"TEAM_{self.room_id}") if client_map is not None and str( self.room_id) in client_map: operator_id = client_map[str(self.room_id)] if assigner is not None and str( assigner.uuid) not in operator_id: operator_id.append(str(assigner.uuid)) logger.info( f'operator_assignment--->team...{operator_team_name}' ) if operator_team_name is not None and not one_to_one: logger.info( f'operator_assignment--->team...{operator_team_name}' ) for team in operator_team_name: _team = team.replace(" ", "") async_to_sync(channel_layer.group_send)( 'team_{}_{}'.format( owner_id, str(_team)), { 'type': 'listing_channel_event', **field_dict, }) else: if len(operator_id) != 0: logger.info( f'operator_assignment--->operator') for operator in operator_id: async_to_sync( channel_layer.group_send)( 'listing_channel_' + str(operator), { 'type': 'listing_channel_event', **field_dict, }) except Exception as ex: print(ex)
class CaseTestPlanModel(models.Model): name = models.CharField(verbose_name="计划名称", max_length=30, null=False, help_text="测试计划名") description = models.TextField(verbose_name="描述", null=True, blank=True, default='', help_text="描述说明") parallel = models.BooleanField(verbose_name="是否并行方式执行", default=False, help_text="是否并行执行") timer_enable = models.BooleanField(verbose_name="是否启用定时器", default=False, help_text="是否启用定时器") crontab = models.CharField(verbose_name="crontab", max_length=300, null=True, blank=True, help_text="crontab") plan_id = models.CharField(verbose_name="计划id", max_length=50, null=False, help_text="计划编号") case_paths = JSONField(verbose_name="case路径集合", help_text="case路径集合", null=False) env_file = models.CharField(verbose_name="环境配置文件路径", max_length=300, null=True, blank=True, help_text="环境配置文件路径") project_id = models.IntegerField(verbose_name="项目id", null=False, help_text="项目id") gitlab_url = models.CharField(max_length=100, verbose_name="gitlab地址", help_text="gitlab地址") gitlab_project_name = models.CharField(max_length=100, verbose_name="gitlab项目名", help_text="gitlab项目名") branch_name = models.CharField(max_length=20, verbose_name="当前分支", help_text="分支名称") create_user = models.CharField(verbose_name="创建人", max_length=30, null=False, help_text="创建人") create_date = models.DateTimeField(verbose_name="创建时间", auto_now_add=True, help_text="创建时间") update_date = models.DateTimeField(verbose_name="更新时间", auto_now=True, help_text="更新时间") class Meta: db_table = "case_test_plan" verbose_name = verbose_name_plural = "case测试计划" def __str__(self): return self.name
class Model(models.Model): field = JSONField(default=None)
def test_validation_error(self): field = JSONField() msg = "Value must be valid JSON." value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}") with self.assertRaisesMessage(ValidationError, msg): field.clean({"uuid": value}, None)
class Model(models.Model): field = JSONField()
class Model(models.Model): field = JSONField() class Meta: required_db_features = {"supports_json_field"}
class Model(models.Model): field = JSONField() class Meta: ordering = ["field__value"]
class JSONModel(models.Model): value = JSONField()
class LogEntry(models.Model): """ Represents an entry in the audit log. The content type is saved along with the textual and numeric (if available) primary key, as well as the textual representation of the object when it was saved. It holds the action performed and the fields that were changed in the transaction. If AuditlogMiddleware is used, the actor will be set automatically. Keep in mind that editing / re-saving LogEntry instances may set the actor to a wrong value - editing LogEntry instances is not recommended (and it should not be necessary). """ class Action: """ The actions that Auditlog distinguishes: creating, updating and deleting objects. Viewing objects is not logged. The values of the actions are numeric, a higher integer value means a more intrusive action. This may be useful in some cases when comparing actions because the ``__lt``, ``__lte``, ``__gt``, ``__gte`` lookup filters can be used in queries. The valid actions are :py:attr:`Action.CREATE`, :py:attr:`Action.UPDATE` and :py:attr:`Action.DELETE`. """ CREATE = 0 UPDATE = 1 DELETE = 2 choices = ( (CREATE, _("create")), (UPDATE, _("update")), (DELETE, _("delete")), ) content_type = models.ForeignKey( to="contenttypes.ContentType", on_delete=models.CASCADE, related_name="+", verbose_name=_("content type"), ) object_pk = models.CharField( db_index=True, max_length=255, verbose_name=_("object pk") ) object_id = models.BigIntegerField( blank=True, db_index=True, null=True, verbose_name=_("object id") ) object_repr = models.TextField(verbose_name=_("object representation")) action = models.PositiveSmallIntegerField( choices=Action.choices, verbose_name=_("action"), db_index=True ) changes = models.TextField(blank=True, verbose_name=_("change message")) actor = models.ForeignKey( to=settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, blank=True, null=True, related_name="+", verbose_name=_("actor"), ) remote_addr = models.GenericIPAddressField( blank=True, null=True, verbose_name=_("remote address") ) timestamp = models.DateTimeField(auto_now_add=True, verbose_name=_("timestamp")) additional_data = JSONField( blank=True, null=True, verbose_name=_("additional data") ) objects = LogEntryManager() class Meta: get_latest_by = "timestamp" ordering = ["-timestamp"] verbose_name = _("log entry") verbose_name_plural = _("log entries") def __str__(self): if self.action == self.Action.CREATE: fstring = _("Created {repr:s}") elif self.action == self.Action.UPDATE: fstring = _("Updated {repr:s}") elif self.action == self.Action.DELETE: fstring = _("Deleted {repr:s}") else: fstring = _("Logged {repr:s}") return fstring.format(repr=self.object_repr) @property def changes_dict(self): """ :return: The changes recorded in this log entry as a dictionary object. """ try: return json.loads(self.changes) except ValueError: return {} @property def changes_str(self, colon=": ", arrow=" \u2192 ", separator="; "): """ Return the changes recorded in this log entry as a string. The formatting of the string can be customized by setting alternate values for colon, arrow and separator. If the formatting is still not satisfying, please use :py:func:`LogEntry.changes_dict` and format the string yourself. :param colon: The string to place between the field name and the values. :param arrow: The string to place between each old and new value. :param separator: The string to place between each field. :return: A readable string of the changes in this log entry. """ substrings = [] for field, values in self.changes_dict.items(): substring = "{field_name:s}{colon:s}{old:s}{arrow:s}{new:s}".format( field_name=field, colon=colon, old=values[0], arrow=arrow, new=values[1], ) substrings.append(substring) return separator.join(substrings) @property def changes_display_dict(self): """ :return: The changes recorded in this log entry intended for display to users as a dictionary object. """ # Get the model and model_fields from auditlog.registry import auditlog model = self.content_type.model_class() model_fields = auditlog.get_model_fields(model._meta.model) changes_display_dict = {} # grab the changes_dict and iterate through for field_name, values in self.changes_dict.items(): # try to get the field attribute on the model try: field = model._meta.get_field(field_name) except FieldDoesNotExist: changes_display_dict[field_name] = values continue values_display = [] # handle choices fields and Postgres ArrayField to get human readable version choices_dict = None if getattr(field, "choices") and len(field.choices) > 0: choices_dict = dict(field.choices) if ( hasattr(field, "base_field") and isinstance(field.base_field, Field) and getattr(field.base_field, "choices") and len(field.base_field.choices) > 0 ): choices_dict = dict(field.base_field.choices) if choices_dict: for value in values: try: value = ast.literal_eval(value) if type(value) is [].__class__: values_display.append( ", ".join( [choices_dict.get(val, "None") for val in value] ) ) else: values_display.append(choices_dict.get(value, "None")) except ValueError: values_display.append(choices_dict.get(value, "None")) except: values_display.append(choices_dict.get(value, "None")) else: try: field_type = field.get_internal_type() except AttributeError: # if the field is a relationship it has no internal type and exclude it continue for value in values: # handle case where field is a datetime, date, or time type if field_type in ["DateTimeField", "DateField", "TimeField"]: try: value = parser.parse(value) if field_type == "DateField": value = value.date() elif field_type == "TimeField": value = value.time() elif field_type == "DateTimeField": value = value.replace(tzinfo=timezone.utc) value = value.astimezone(gettz(settings.TIME_ZONE)) value = formats.localize(value) except ValueError: pass # check if length is longer than 140 and truncate with ellipsis if len(value) > 140: value = "{}...".format(value[:140]) values_display.append(value) verbose_name = model_fields["mapping_fields"].get( field.name, getattr(field, "verbose_name", field.name) ) changes_display_dict[verbose_name] = values_display return changes_display_dict
def test_invalid_encoder(self): msg = "The encoder parameter must be a callable object." with self.assertRaisesMessage(ValueError, msg): JSONField(encoder=DjangoJSONEncoder())
class Model(models.Model): field = JSONField(default=callable_default)
def test_invalid_decoder(self): msg = "The decoder parameter must be a callable object." with self.assertRaisesMessage(ValueError, msg): JSONField(decoder=CustomJSONDecoder())
class BlocklistSubmission(ModelBase): SIGNOFF_PENDING = 0 SIGNOFF_APPROVED = 1 SIGNOFF_REJECTED = 2 SIGNOFF_AUTOAPPROVED = 3 SIGNOFF_PUBLISHED = 4 SIGNOFF_STATES = { SIGNOFF_PENDING: 'Pending', SIGNOFF_APPROVED: 'Approved', SIGNOFF_REJECTED: 'Rejected', SIGNOFF_AUTOAPPROVED: 'No Sign-off', SIGNOFF_PUBLISHED: 'Published to Blocks', } SIGNOFF_STATES_FINISHED = ( SIGNOFF_REJECTED, SIGNOFF_PUBLISHED, ) ACTION_ADDCHANGE = 0 ACTION_DELETE = 1 ACTIONS = { ACTION_ADDCHANGE: 'Add/Change', ACTION_DELETE: 'Delete', } FakeBlock = namedtuple('FakeBlock', ('id', 'guid', 'min_version', 'max_version', 'is_imported_from_legacy_regex', 'current_adu')) action = models.SmallIntegerField(choices=ACTIONS.items(), default=ACTION_ADDCHANGE) input_guids = models.TextField() to_block = JSONField(default=list) min_version = models.CharField(max_length=255, blank=False, default=Block.MIN) max_version = models.CharField(max_length=255, blank=False, default=Block.MAX) url = models.CharField(max_length=255, blank=True) reason = models.TextField(blank=True) updated_by = models.ForeignKey(UserProfile, null=True, on_delete=models.SET_NULL) legacy_id = models.BooleanField( default=False, help_text='Include in legacy xml blocklist too, as well as new v3', db_column='include_in_legacy', verbose_name='In legacy blocklist') signoff_by = models.ForeignKey(UserProfile, null=True, on_delete=models.SET_NULL, related_name='+') signoff_state = models.SmallIntegerField(choices=SIGNOFF_STATES.items(), default=SIGNOFF_PENDING) class Meta: db_table = 'blocklist_blocklistsubmission' def __str__(self): guids = splitlines(self.input_guids) repr = [', '.join(guids)] if self.url: repr.append(str(self.url)) if self.reason: repr.append(str(self.reason)) # A single uuid-style guid is 38, but otherwise these string limits # are pretty arbitrary and just so the str repr isn't _too_ long. trimmed = [rep if len(rep) < 40 else rep[0:37] + '...' for rep in repr] return f'{self.get_signoff_state_display()}: {"; ".join(trimmed)}' @property def in_legacy_blocklist(self): # This is for consitency with Block. Should be a boolean anyway. return bool(self.legacy_id) def get_changes_from_block(self, block): # return a dict with properties that are different from a given block, # as a dict of property_name: (old_value, new_value). changes = {} properties = ('min_version', 'max_version', 'url', 'reason', 'in_legacy_blocklist') for prop in properties: if getattr(self, prop) != getattr(block, prop): changes[prop] = (getattr(block, prop), getattr(self, prop)) return changes def clean(self): min_vint = addon_version_int(self.min_version) max_vint = addon_version_int(self.max_version) if min_vint > max_vint: raise ValidationError( _('Min version can not be greater than Max version')) def get_blocks_submitted(self, load_full_objects_threshold=1_000_000_000): blocks = self.block_set.all().order_by('id') if blocks.count() > load_full_objects_threshold: # If we'd be returning too many Block objects, fake them with the # minimum needed to display the link to the Block change page. blocks = [ self.FakeBlock( id=block.id, guid=block.guid, min_version=None, max_version=None, is_imported_from_legacy_regex=None, current_adu=None, ) for block in blocks ] return blocks def can_user_signoff(self, signoff_user): require_different_users = not settings.DEBUG different_users = (self.updated_by and signoff_user and self.updated_by != signoff_user) return not require_different_users or different_users def all_adu_safe(self): threshold = settings.DUAL_SIGNOFF_AVERAGE_DAILY_USERS_THRESHOLD return all((lambda du: du <= threshold)(block['average_daily_users']) for block in self.to_block) def has_version_changes(self): block_ids = [block['id'] for block in self.to_block] has_new_blocks = any(not id_ for id_ in block_ids) blocks_with_version_changes_qs = Block.objects.filter( id__in=block_ids).exclude(min_version=self.min_version, max_version=self.max_version) return has_new_blocks or blocks_with_version_changes_qs.exists() def update_if_signoff_not_needed(self): is_pending = self.signoff_state == self.SIGNOFF_PENDING add_action = self.action == self.ACTION_ADDCHANGE if ((is_pending and self.all_adu_safe()) or (is_pending and add_action and not self.has_version_changes())): self.update(signoff_state=self.SIGNOFF_AUTOAPPROVED) @property def is_submission_ready(self): """Has this submission been signed off, or sign-off isn't required.""" return (self.signoff_state == self.SIGNOFF_AUTOAPPROVED or (self.signoff_state == self.SIGNOFF_APPROVED and self.can_user_signoff(self.signoff_by))) def _serialize_blocks(self): def serialize_block(block): return { 'id': block.id, 'guid': block.guid, 'average_daily_users': block.current_adu, } processed = self.process_input_guids( self.input_guids, self.min_version, self.max_version, load_full_objects=False, filter_existing=(self.action == self.ACTION_ADDCHANGE)) return [ serialize_block(block) for block in processed.get('blocks', []) ] def save(self, *args, **kwargs): if self.input_guids and not self.to_block: # serialize blocks so we can save them as JSON self.to_block = self._serialize_blocks() super().save(*args, **kwargs) @classmethod def _get_fake_blocks_from_guids(cls, guids): addons = list( Block.get_addons_for_guids_qs(guids).values_list( 'guid', 'average_daily_users', named=True)) adu_lookup = { addon.guid: addon.average_daily_users for addon in addons } # And then any existing block instances block_qs = Block.objects.filter(guid__in=guids).values_list( 'id', 'guid', 'min_version', 'max_version', 'legacy_id', named=True) blocks = { block.guid: cls.FakeBlock( id=block.id, guid=block.guid, min_version=block.min_version, max_version=block.max_version, is_imported_from_legacy_regex=block.legacy_id.startswith('*'), current_adu=adu_lookup.get(block.guid, -1), ) for block in block_qs } for addon in addons: block = blocks.get(addon.guid) if block: # it's an existing block continue # create a new instance block = cls.FakeBlock( id=None, guid=addon.guid, min_version=Block.MIN, max_version=Block.MAX, is_imported_from_legacy_regex=False, current_adu=adu_lookup.get(addon.guid, -1), ) blocks[addon.guid] = block return list(blocks.values()) @classmethod def process_input_guids(cls, input_guids, v_min, v_max, *, load_full_objects=True, filter_existing=True): """Process a line-return separated list of guids into a list of invalid guids, a list of guids that are blocked already for v_min - vmax, and a list of Block instances - including new Blocks (unsaved) and existing partial Blocks. If `filter_existing` is False, all existing blocks are included. If `load_full_objects=False` is passed the Block instances are fake (namedtuples) with only minimal data available in the "Block" objects: Block.guid, Block.current_adu, Block.min_version, Block.max_version, Block.is_imported_from_legacy_regex """ all_guids = set(splitlines(input_guids)) unfiltered_blocks = (Block.get_blocks_from_guids(all_guids) if load_full_objects else cls._get_fake_blocks_from_guids(all_guids)) if len(all_guids) == 1 or not filter_existing: # We special case a single guid to always update it. blocks = unfiltered_blocks existing_guids = [] else: # unfiltered_blocks contains blocks that don't need to be updated. blocks = [ block for block in unfiltered_blocks if not block.id or block.min_version != v_min or block.max_version != v_max ] existing_guids = [ block.guid for block in unfiltered_blocks if block not in blocks ] blocks.sort(key=lambda block: block.current_adu, reverse=True) invalid_guids = list(all_guids - set(existing_guids) - {block.guid for block in blocks}) return { 'invalid_guids': invalid_guids, 'existing_guids': existing_guids, 'blocks': blocks, } def save_to_block_objects(self): assert self.is_submission_ready assert self.action == self.ACTION_ADDCHANGE submit_legacy_switch = waffle.switch_is_active( 'blocklist_legacy_submit') fields_to_set = [ 'min_version', 'max_version', 'url', 'reason', 'updated_by', ] all_guids_to_block = [block['guid'] for block in self.to_block] for guids_chunk in chunked(all_guids_to_block, 100): blocks = save_guids_to_blocks(guids_chunk, self, fields_to_set=fields_to_set) if submit_legacy_switch: if self.in_legacy_blocklist: legacy_publish_blocks(blocks) else: legacy_delete_blocks(blocks) self.save() self.update(signoff_state=self.SIGNOFF_PUBLISHED) def delete_block_objects(self): assert self.is_submission_ready assert self.action == self.ACTION_DELETE block_ids_to_delete = [block['id'] for block in self.to_block] submit_legacy_switch = waffle.switch_is_active( 'blocklist_legacy_submit') for ids_chunk in chunked(block_ids_to_delete, 100): blocks = list(Block.objects.filter(id__in=ids_chunk)) Block.preload_addon_versions(blocks) for block in blocks: block_activity_log_delete(block, submission_obj=self) if submit_legacy_switch: legacy_delete_blocks(blocks) self.save() Block.objects.filter(id__in=ids_chunk).delete() self.update(signoff_state=self.SIGNOFF_PUBLISHED) @classmethod def get_submissions_from_guid(cls, guid, excludes=SIGNOFF_STATES_FINISHED): return (cls.objects.exclude(signoff_state__in=excludes).filter( to_block__contains={'guid': guid}))
def test_custom_encoder(self): field = JSONField(encoder=DjangoJSONEncoder) value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}") field.clean({"uuid": value}, None)
class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name='ExceptionEvent', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(db_index=True)), ('received', models.DateTimeField(db_index=True)), ('error_type', models.CharField(max_length=255, db_index=True)), ('error_data', models.TextField(default='')), ], ), migrations.CreateModel( name='Host', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(unique=True, max_length=255)), ('ip', models.GenericIPAddressField()), ('active', models.BooleanField(default=True)), ], ), migrations.CreateModel( name='Metric', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=255, db_index=True)), ], ), migrations.CreateModel( name='MetricLabel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.TextField(default='', blank=True)), ], ), migrations.CreateModel( name='MetricValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('valid_from', models.DateTimeField(db_index=True)), ('valid_to', models.DateTimeField(db_index=True)), ('value', models.CharField(max_length=255)), ('value_num', models.DecimalField(default=None, null=True, max_digits=16, decimal_places=4, blank=True)), ('value_raw', models.TextField(default=None, null=True, blank=True)), ('data', JSONField(default=dict)), ('label', models.ForeignKey(to='monitoring.MetricLabel', on_delete=models.CASCADE)), ], ), migrations.CreateModel( name='MonitoredResource', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(default='', max_length=255, blank=True)), ('type', models.CharField(default='', max_length=255, choices=[('', 'No resource'), ('layer', 'Layer'), ('map', 'Map'), ('document', 'Document'), ('style', 'Style'), ('admin', 'Admin'), ('other', 'Other')])), ], ), migrations.CreateModel( name='RequestEvent', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(db_index=True)), ('received', models.DateTimeField(db_index=True)), ('ows_type', models.CharField(default='other', max_length=255, choices=[('TMS', 'TMS'), ('WMS-C', 'WMS-C'), ('WMTS', 'WMTS'), ('WCS', 'WCS'), ('WFS', 'WFS'), ('WMS', 'WMS'), ('WPS', 'WPS'), ('other', 'Other')])), ('host', models.CharField(default='', max_length=255, blank=True)), ('request_path', models.CharField(default='', max_length=255)), ('resources', models.TextField( default='', help_text='Resources name (style, layer, document, map)', blank=True)), ('request_method', models.CharField(max_length=16, choices=[('GET', 'GET'), ('POST', 'POST'), ('HEAD', 'HEAD'), ('OPTIONS', 'OPTIONS'), ('PUT', 'PUT'), ('DELETE', 'DELETE')])), ('response_status', models.PositiveIntegerField()), ('response_size', models.PositiveIntegerField(default=0)), ('response_time', models.PositiveIntegerField( default=0, help_text='Response processing time in ms')), ('response_type', models.CharField(default='', max_length=255, null=True, blank=True)), ('user_agent', models.CharField(default=None, max_length=255, null=True, blank=True)), ('user_agent_family', models.CharField(default=None, max_length=255, null=True, blank=True)), ('client_ip', models.GenericIPAddressField()), ('client_lat', models.DecimalField(default=None, null=True, max_digits=8, decimal_places=5, blank=True)), ('client_lon', models.DecimalField(default=None, null=True, max_digits=8, decimal_places=5, blank=True)), ('client_country', models.CharField(default=None, max_length=255, null=True, blank=True)), ('client_region', models.CharField(default=None, max_length=255, null=True, blank=True)), ('client_city', models.CharField(default=None, max_length=255, null=True, blank=True)), ('custom_id', models.CharField(default=None, max_length=255, null=True, db_index=True, blank=True)), ], ), migrations.CreateModel( name='Service', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(unique=True, max_length=255)), ('check_interval', models.DurationField(default=datetime.timedelta(0, 60))), ('last_check', models.DateTimeField(null=True, blank=True)), ('active', models.BooleanField(default=True)), ('notes', models.TextField(null=True, blank=True)), ('url', models.URLField(default='', null=True, blank=True)), ('host', models.ForeignKey(to='monitoring.Host', on_delete=models.CASCADE)), ], ), migrations.CreateModel( name='ServiceType', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(unique=True, max_length=255, choices=[ ('geonode', 'GeoNode'), ('geoserver', 'GeoServer'), ('hostgeoserver', 'Host (GeoServer)'), ('hostgeonode', 'Host (GeoNode)') ])), ], ), migrations.CreateModel( name='ServiceTypeMetric', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('metric', models.ForeignKey(to='monitoring.Metric', on_delete=models.CASCADE)), ('service_type', models.ForeignKey(to='monitoring.ServiceType', on_delete=models.CASCADE)), ], ), migrations.AddField( model_name='service', name='service_type', field=models.ForeignKey(to='monitoring.ServiceType', on_delete=models.CASCADE), ), migrations.AddField( model_name='requestevent', name='service', field=models.ForeignKey(to='monitoring.Service', on_delete=models.CASCADE), ), migrations.AlterUniqueTogether( name='monitoredresource', unique_together=set([('name', 'type')]), ), migrations.AddField( model_name='metricvalue', name='resource', field=models.ForeignKey(to='monitoring.MonitoredResource', on_delete=models.CASCADE), ), migrations.AddField( model_name='metricvalue', name='service', field=models.ForeignKey(to='monitoring.Service', on_delete=models.CASCADE), ), migrations.AddField( model_name='metricvalue', name='service_metric', field=models.ForeignKey(to='monitoring.ServiceTypeMetric', on_delete=models.CASCADE), ), migrations.AddField( model_name='exceptionevent', name='request', field=models.ForeignKey(to='monitoring.RequestEvent', on_delete=models.CASCADE), ), migrations.AddField( model_name='exceptionevent', name='service', field=models.ForeignKey(to='monitoring.Service', on_delete=models.CASCADE), ), ]