def __init__(self, profile): for i in range(1, profile + 1): self.fields.append( PositiveIntegerField(verbose_name="Schuss {}".format(i)))
class UniqueIdentifier(models.Model): referent = GenericForeignKey() content_type = ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE) object_id = PositiveIntegerField(null=True, blank=True) created = models.DateTimeField(db_index=True, auto_now_add=True) name = CharField(max_length=255)
class Program(Model): """ A program is a single step in a pipeline, a program that's run. Example Command: command_line: samtools view -bS ${sam}.sam | samtools sort | samtools index ${sam} inputs: ${file}.sam outputs: @{file}.bam The {file} part in the original filename is taken from the input and used For the output filename. The output file may end up in a different directory. stampy.py -g ${fasta} -h ${fasta} -o @{file}.sam -f sam -M ${fastq1}.fastq ${fastq2}.fastq; """ ER1 = "Input '%s' no file matches pattern '%s'" ER2 = "Input '%s' file '%s' doesn't exist." ER3 = "Input '%s' not available in inputs" PARSER = re.compile(r'(?P<io>\$|\@)(?P<prefix>[^{]*)' \ r'{(?P<literal>"?)(?P<name>[-\w]+)"?\}(?P<suffix>[^\s;|>]*)') name = CharField(max_length=128) description = TextField(null=True, blank=True,\ help_text='Describe the program and what it does in detail.') command_line = TextField( help_text='Write the command line using replacement syntax ' 'for inputs and outputs') keep = BooleanField(default=True,\ help_text="Should the output files be kept or deleted.") wait_for_files = BooleanField(default=False,\ help_text="Wait for files, use this if the cluster is not syncing files before"\ "running the job and causing errors.") files = ManyToManyField('ProgramFile', related_name='use_in', blank=True,\ help_text="Files used when running this program") test_files = ManyToManyField('ProgramFile', related_name='test_in',\ blank=True, help_text="Files to test just this program.") memory = CharField(max_length=16, default='1000M', choices=[ ('500M', '500MB'), ('1G', '1GB'), ('2G', '2GB'), ('5G', '5GB'), ('10G', '10GB'), ('20G', '20GB'), ('50G', '50GB'), ('100G', '100GB'), ], help_text="Amount of memory to request") limit = CharField( max_length=12, null=True, blank=True, help_text="Specify amount of time to limit this process, " "[days]-[hours]:[minutes]:[seconds]") threads = PositiveIntegerField(default=1) def __str__(self): return self.name def io(self, outputs=True, inputs=True): """Parse the command line for input and output variables inputs ALWAYS come out first and then outputs. inputs - Should inputs be yielded, set to false to ignore outputs - Should outputs be yielded, set to false to ignore Yields regex match items items. """ # We will stash the outputs for the post-section so the outs = [] for match in self.PARSER.finditer(self.command_line): data = match.groups() + match.span() if data[0] == '@' and outputs is True: outs.append(data) elif data[0] == '$' and inputs is True: yield data for data in outs: yield data def prepare_files(self, for_test=False, output_dir=None, **inputs): """ Prepares the files by testing the command line's required filenames while causing an error if there are pipeline mismatches. This is the main call for making a ready to use command line from this program given a set of input filenames. - for_test - If True, will use test_files as a source of input data - output_dir - Should be set to the location where output files need to be saved to. - **inputs - A dictionary of inputs named in the command line which override any program files or test_files. Yields a (name, filename) tuple suitable for a dictionary of both input and output filenames. """ errors, files_out = [], {} # First take the files stored against this specific program. files_in = file_as_inputs(self.files) bins = getattr(settings, 'PIPELINE_BIN', None) if bins is not None: files_in['bin'] = [os.path.join(bins, d) for d in os.listdir(bins)] if output_dir is None: output_dir = '/tmp' # When running tests, we include inputs from the test files too. if for_test is True: file_as_inputs(self.test_files, files_in) # Override both files and test_files with inputs from the caller. # This is so programs can have reasonable defaults when running as # well as having either tests for the program or the whole pipeline. for name, value in inputs.items(): if not isinstance(value, (tuple, list)): value = [value] for file_in in value: files_in[name].append(file_in) for (_io, prefix, literal, name, suffix, start, end) in self.io(): try: if literal: fname = name + suffix else: args = (_io, prefix, name, suffix) fname = self.prepare_file(files_in, files_out, *args) if '/' not in fname: fname = os.path.join(output_dir, fname) yield ((_io, name, start, end), fname) except ValueError as err: errors.append(str(err)) if errors: raise PrepareError("Error preparing command: \n * " + \ "\n * ".join(errors)) def prepare_file(self, files_in, files_out, io, prefix, name, suffix): # Generic finder pattern pattern = "^%s(?P<name>[^/]*)%s$" % (prefix, suffix) # Get the file input from a list of available files. # It's reversed so the last added file is first. filenames = list(reversed(files_in[name])) for fname in filenames: ret = re.match(pattern, os.path.basename(fname)) if ret: files_out[name] = ret.groupdict()['name'] if io == '$': return fname # For the pair-ended files, the file input is actually not a real # file input, but it needs to be made available to the output elif io == '@' and name not in files_out: files_out[name] = fname if io != '@': if not filenames: raise ValueError(self.ER3 % name) raise ValueError(self.ER1 % (name, pattern)) # Populate / generate filename for output if name in files_out: # Make a new filename from the input's middle section # and the prefix and suffix named above. return ''.join([prefix, files_out[name], suffix]) raise ValueError("Output '%s' unmatched from inputs." % name) def prepare_command(self, files): """ Prepares the command line itself with the given files, all files must already be full paths prepared by prepare_files or a suitable test. """ # Now we have inputs and outputs prepared, we can construct a command # With all the input and output filenames in the right places. cmd = self.command_line for match in reversed(list(self.PARSER.finditer(cmd))): data = match.groupdict() key = (data['io'], data['name']) + match.span() if key not in files: raise PrepareError("Can't find file %s in %s" % (str(key), str(files))) filename = files[key] if ' ' in filename: filename = '"%s"' % filename # Replace the command line section that matches with out filename (start, end) = match.span() cmd = cmd[:start] + filename + cmd[end:] if self.wait_for_files: # Add a process to wait for files to exist template = "while [[ ! `head -c 10 \"{}\"` ]]; do sleep 30; done\n" for key in files: # It must be an input file that DOES NOT already exist if key[0] == '$' and (not os.path.isfile(files[key]) or 1): cmd = template.format(files[key]) + cmd # Do the line replacer after the matching to preserve file placements. cmd = cmd.replace('\r', '').replace('\n\n', '\n') cmd = cmd.replace(';\n', '<NL>') cmd = cmd.replace('\n', ' && ') cmd = cmd.replace('<NL>', '\n') return cmd
class Dog(ExportModelOperationsMixin("dog"), Model): name = CharField(max_length=100, unique=True) breed = CharField(max_length=100, blank=True, null=True) age = PositiveIntegerField(blank=True, null=True)
class RepairModifications(SoftDeletionModel): item_used = ForeignKey(Item, on_delete=models.CASCADE) quantity = PositiveIntegerField(null=True) amount = PositiveIntegerField(null=True)
class AbstractTemplateBase(Base): ct = CTypeForeignKey(editable=False).set_tags(viewable=False) status_id = PositiveIntegerField(editable=False).set_tags(viewable=False) # TODO: avoid deletion of status # function_fields = Base.function_fields.new(_VerboseStatusField()) creation_label = pgettext_lazy('billing', 'Create a template') save_label = pgettext_lazy('billing', 'Save the template') _verbose_status_cache = None class Meta(Base.Meta): abstract = True verbose_name = pgettext_lazy('billing', 'Template') verbose_name_plural = pgettext_lazy('billing', 'Templates') def get_absolute_url(self): return reverse('billing__view_template', args=(self.id,)) @staticmethod def get_clone_absolute_url(): return '' def get_edit_absolute_url(self): return reverse('billing__edit_template', args=(self.id,)) def get_delete_absolute_url(self): # Means that TemplateBase can not be deleted directly # (because it is closely linked to its RecurrentGenerator) return '' @staticmethod def get_lv_absolute_url(): return reverse('billing__list_templates') def get_verbose_status(self): warnings.warn('models.AbstractTemplateBase.get_verbose_status() is deprecated ; ' 'use function_fields.TemplateBaseVerboseStatusField instead.', DeprecationWarning ) vstatus = self._verbose_status_cache if vstatus is None or vstatus.id != self.status_id: status_model = self.ct.model_class()._meta.get_field('status').remote_field.model try: vstatus = status_model.objects.get(id=self.status_id) except status_model.DoesNotExist as e: logger.warning('Invalid status in TemplateBase(id=%s) [%s]', self.id, e) vstatus = status_model(id=self.status_id, name='') self._verbose_status_cache = vstatus return vstatus.name @property def verbose_status(self): warnings.warn('AbstractTemplateBase.verbose_status is deprecated ' '(see get_verbose_status() warning).', DeprecationWarning ) return self.get_verbose_status() def create_entity(self): "This method is used by the generation job" instance_class = self.ct.model_class() instance = instance_class() instance.build(self) # Common rules for the recurrent generation of a "base" object for billing app. # See base's child for specific rules instance.generate_number() instance.expiration_date = instance.issuing_date + timedelta(days=30) # TODO: user configurable rules ??? instance.additional_info = self.additional_info instance.payment_terms = self.payment_terms instance.save() return instance
class SimRun(Model): ''' corresponds to a simulation run of a particular instrument - but SimRun is decoupled from the rest of the db ''' owner_username = CharField(max_length=200, blank=True, null=True) group_name = CharField(max_length=200, blank=True, null=True) instr_displayname = CharField(max_length=200, blank=True, null=True) neutrons = PositiveIntegerField(default=1000000) seed = PositiveIntegerField(default=0) scanpoints = PositiveIntegerField(default=1) gravity = BooleanField(default=False) params_str = CharField(max_length=1000) force_run = BooleanField(default=False) enable_cachefrom = BooleanField(default=False) # meta-fields below this line created = DateTimeField('date created', default=timezone.now) started = DateTimeField('date started', blank=True, null=True) complete = DateTimeField('date complete', blank=True, null=True) failed = DateTimeField('date failed', blank=True, null=True) fail_str = CharField(max_length=1000, blank=True, null=True) data_folder = CharField(max_length=200, blank=True, null=True) plot_files_str = CharField(max_length=2000, default='[]') plot_files_log_str = CharField(max_length=2000, default='[]') data_files_str = CharField(max_length=2000, default='[]') @property def plot_files(self): return json.loads(self.plot_files_str) @plot_files.setter def plot_files(self, pf): self.plot_files_str = json.dumps(pf) @property def plot_files_log(self): return json.loads(self.plot_files_log_str) @plot_files_log.setter def plot_files_log(self, pf): self.plot_files_log_str = json.dumps(pf) @property def data_files(self): return json.loads(self.data_files_str) @data_files.setter def data_files(self, df): self.data_files_str = json.dumps(df) @property def params(self): return json.loads(self.params_str) @params.setter def params(self, p): self.params_str = json.dumps(p) def status(self): if self.complete: return 'Complete' elif self.failed: return 'Error' elif self.started: return 'Running' else: return 'Init' def __str__(self): return "%s_%s_%s" % (self.owner_username, self.instr_displayname, str(self.created.strftime("%Y%m%d_%H%M%S")))
class PlaceFilterList(generics.ListAPIView): """Вывод мест в зоопарке""" queryset = Place.objects.annotate( dcount=Value(0, output_field=PositiveIntegerField())) serializer_class = PlaceFilterSerializer filterset_class = PlaceFilter
class Character(Model): # I want every field of this Model to have a default so it can be left empty during construction # Description fields name = CharField(max_length=128, blank=True, default='') alignment_law_axis = CharField(max_length=8, default=AlignmentLawAxis.TRUE.name, choices=enum_to_choices(AlignmentLawAxis)) alignment_good_axis = CharField(max_length=8, default=AlignmentGoodAxis.NEUTRAL.name, choices=enum_to_choices(AlignmentGoodAxis)) player = CharField(max_length=128, blank=True, default='') deity = CharField(max_length=128, blank=True, default='') homeland = CharField(max_length=128, blank=True, default='') race = ForeignKey(Race, null=True, default=None, on_delete=SET_NULL) size = CharField(max_length=16, default=Size.MEDIUM.name, choices=enum_to_choices(Size)) gender = CharField(max_length=8, default=Gender.MALE.name, choices=enum_to_choices(Gender)) age = PositiveIntegerField(default=0) height = PositiveIntegerField(default=0, help_text='expressed in inches') weight = PositiveIntegerField(default=0, help_text='expressed in pounds') hair = CharField(max_length=32, blank=True, default='') eyes = CharField(max_length=32, blank=True, default='') experience = PositiveIntegerField( default=0) # TODO change so it can be tracked per session languages = ManyToManyField(Language, blank=True) def alignment_readable(self): return f'{AlignmentLawAxis[self.alignment_law_axis].value} {AlignmentGoodAxis[self.alignment_good_axis].value}' def size_readable(self): return Size[self.size].value def gender_readable(self): return Gender[self.gender].value def height_readable(self): return f'{self.height // 12}\' {self.height % 12}"' def weight_readable(self): return f'{self.weight} lbs.' base_strength = PositiveIntegerField(default=10) base_dexterity = PositiveIntegerField(default=10) base_constitution = PositiveIntegerField(default=10) base_intelligence = PositiveIntegerField(default=10) base_wisdom = PositiveIntegerField(default=10) base_charisma = PositiveIntegerField(default=10) copper = PositiveIntegerField(default=0) silver = PositiveIntegerField(default=0) gold = PositiveIntegerField(default=0) platinum = PositiveIntegerField(default=0) def strength(self): return self.base_strength + get_total_bonus(self, 'strength') def strength_modifier(self): return get_modifier(self.strength()) def dexterity(self): return self.base_dexterity + get_total_bonus(self, 'dexterity') def dexterity_modifier(self): return get_modifier(self.dexterity()) def constitution(self): return self.base_constitution + get_total_bonus(self, 'constitution') def constitution_modifier(self): return get_modifier(self.constitution()) def intelligence(self): return self.base_intelligence + get_total_bonus(self, 'intelligence') def intelligence_modifier(self): return get_modifier(self.intelligence()) def wisdom(self): return self.base_wisdom + get_total_bonus(self, 'wisdom') def wisdom_modifier(self): return get_modifier(self.wisdom()) def charisma(self): return self.base_charisma + get_total_bonus(self, 'charisma') def charisma_modifier(self): return get_modifier(self.charisma()) # def class_and_level(self): # levels = self.levels.values('character_class__name').annotate(Count("id")).order_by() # output = ', '.join(f"{level['character_class__name']} {level['id__count']}" for level in levels) # return f'{self.level()}: {output}' def class_and_level(self): # TODO this works but is hideous compared to the code above (which only shows the classes in a different # order than I would like levels = self.levels.values('character_class__name').annotate( Count("id")).order_by() levels = { level['character_class__name']: level['id__count'] for level in levels } o = self.levels.values('character_class__name').order_by( 'character_level') res = [] for l in o: if l['character_class__name'] in levels: res.append( f"{l['character_class__name']} {levels[l['character_class__name']]}" ) del levels[l['character_class__name']] output = ', '.join(res) return f'{self.level()}: {output}' def level(self): return self.levels.count() def hit_dice(self): return self.level() def get_starting_hit_points(self): # TODO error handling, but I want to handle the specific error return self.levels.filter( character_level=1).get().character_class.hit_die def hit_points(self): return self.get_starting_hit_points() \ + sum(roll.result for roll in self.roll_set.all()) \ + get_modifier(self.constitution()) * self.level() \ + get_total_bonus(self, 'hit points') def damage_resistance(self): return 0 + get_total_bonus(self, 'damage resistance') def initiative(self): return get_modifier(self.dexterity()) + get_total_bonus( self, 'initiative') def safe_race(self): return self.race or Race.objects.get(name='Human') def base_speed(self): return self.safe_race().base_speed + get_total_bonus( self, 'base speed') def speed_with_armor(self): return self.base_speed() # TODO check gear for armor def fly_speed(self): speed = self.get_best_speed('fly') return max(self.safe_race().fly_speed, speed) + get_total_bonus( self, 'fly speed') def fly_maneuverability(self): try: # TODO will not work with more than 1 fly speed present return self.speed_set.filter(type='fly').get().maneuverability except: return self.safe_race().fly_maneuverability def swim_speed(self): speed = self.get_best_speed('swim') return max(self.safe_race().swim_speed, speed) + get_total_bonus( self, 'swim speed') def climb_speed(self): speed = self.get_best_speed('climb') return max(self.safe_race().climb_speed, speed) + get_total_bonus( self, 'climb speed') def burrow_speed(self): speed = self.get_best_speed('burrow') return max(self.safe_race().burrow_speed, speed) + get_total_bonus( self, 'burrow speed') def get_best_speed(self, type): # TODO maneuverability try: return max(s.speed for s in self.speed_set.filter(type=type).all()) except ValueError: return 0 # TODO refactor the 3 AC calculations into a coherent whole def armor_class(self): try: max_dex = min(item.max_dex for item in self.ac_items() if item.max_dex is not None) dexterity_modifier = min(max_dex, self.dexterity_modifier()) except ValueError: # If there is no AC item with a max dex dexterity_modifier = self.dexterity_modifier() return 10 + dexterity_modifier + get_total_bonus(self, 'armor class') def armor_class_modifiers(self): return '' def armor_class_notes(self): #TODO make notes and modifier for everything, but make them show only when present return ', '.join(note.value for note in self.note_set.filter(to='armor class')) def touch_armor_class(self): return self.armor_class() - get_total_bonus( self, 'armor class', bonus_types=['armor', 'shield', 'natural armor']) def flatfooted_armor_class(self): return 10 + get_total_bonus(self, 'armor class') - get_total_bonus( self, 'armor class', bonus_types=['dodge']) def fortitude_save(self): levels = self.levels.values('character_class__id').annotate( Count("id")).order_by() base_save = 0 for level in levels: character_class = CharacterClass.objects.filter( id=level['character_class__id']).get() base_save += self.get_base_save( character_class.good_fortitude_progression, level['id__count'], character_class.is_prestige_class) return base_save + self.constitution_modifier() + get_total_bonus( self, 'fortitude save') def reflex_save(self): levels = self.levels.values('character_class__id').annotate( Count("id")).order_by() base_save = 0 for level in levels: character_class = CharacterClass.objects.filter( id=level['character_class__id']).get() base_save += self.get_base_save( character_class.good_reflex_progression, level['id__count'], character_class.is_prestige_class) return base_save + self.dexterity_modifier() + get_total_bonus( self, 'reflex save') def will_save(self): levels = self.levels.values('character_class__id').annotate( Count("id")).order_by() base_save = 0 for level in levels: character_class = CharacterClass.objects.filter( id=level['character_class__id']).get() base_save += self.get_base_save( character_class.good_will_progression, level['id__count'], character_class.is_prestige_class) return base_save + self.wisdom_modifier() + get_total_bonus( self, 'will save') def get_base_save(self, good, level, prestige): # TODO move to backend if prestige: if good: return level // 2 return (level + 1) // 3 if good: return 2 + (level // 2) return level // 3 def immunities(self): # TODO distint? return ', '.join(immunity.to for immunity in self.immunity_set.all()) def save_modifiers(self): return ', \n'.join(modifier.value for modifier in self.conditionalmodifier_set.filter( to='saves').all()) def base_attack_bonus(self): levels = self.levels.values('character_class__id').annotate( Count("id")).order_by() base_attack = 0 for level in levels: character_class = CharacterClass.objects.filter( id=level['character_class__id']).get() base_attack += self.get_base_attack( character_class.base_attack_progression, level['id__count']) return base_attack def get_base_attack(self, type, level): # TODO move to backend if type == 'full': return level if type == 'half': return level // 2 # '3/4' return (level * 3) // 4 def spell_resistance(self): return 0 + get_total_bonus(self, 'spell resistance') def combat_maneuver_bonus(self): return self.base_attack_bonus() + self.strength_modifier( ) + self.get_special_size_modifier(self.size) def get_special_size_modifier(self, size): # TODO move to backend and move mapping out of the function mod = { Size.FINE: -8, Size.DIMINUTIVE: -4, Size.TINY: -2, Size.SMALL: -1, Size.MEDIUM: 0, Size.LARGE: 1, Size.HUGE: 2, Size.GARGANTUAN: 4, Size.COLOSSAL: 8, } return mod[Size[size]] def combat_maneuver_defense(self): return 10 + self.base_attack_bonus() + self.strength_modifier() + self.dexterity_modifier() + \ self.get_special_size_modifier(self.size) + get_total_bonus(self, 'armor class', bonus_types=[ 'circumstance', 'deflection', 'dodge', 'insight', 'luck', 'morale', 'profane', 'sacred', 'penalty' ]) def attacks(self): return self.attack_set.all() def full_attacks(self): return self.fullattack_set.all() def attacks_notes(self): return ', '.join(note.value for note in self.note_set.filter(to='attacks')) def skill_bonus(self, skill): trained_bonus = 3 if (self.trained(skill) and self.class_skill(skill)) else 0 return self.rank_set.filter(to=skill).count() + trained_bonus + self.get_ability_modifier(skill.key_ability) + \ get_total_bonus(self, skill.name.lower()) def get_ability_modifier(self, name): return getattr(self, f'{name}_modifier')() def trained(self, skill): return self.rank_set.filter(to=skill).exists() def class_skill(self, skill): return self.classskill_set.filter(skill=skill).exists() or \ self.levels.filter(character_class__class_skills=skill).exists() def languages_readable(self): return ', '.join(language.name for language in self.languages.all()) def ac_items(self): return [] # TODO def light_load(self): return get_carrying_capacity(self.strength())[0] # TODO size, pedalism def medium_load(self): return get_carrying_capacity(self.strength())[1] # TODO size, pedalism def heavy_load(self): return get_carrying_capacity(self.strength())[2] # TODO size, pedalism #def get_total_bonus(self, ...): # return calculate_bonus(self.bonus_set.filter(...)) def attack_bonus(self, specific_weapon): # TODO proficiency, weapon properties (enhancement) return self.base_attack_bonus() + self.strength_modifier( ) + get_total_bonus(self, 'attack') def critical(self, specific_weapon): return specific_weapon.critical() def damage_type(self, specific_weapon): return specific_weapon.damage_type() def range(self, specific_weapon): return specific_weapon.range() def ammunition(self, specific_weapon): return specific_weapon.ammunition def damage(self, specific_weapon): damage_bonus = self.strength_modifier() + get_total_bonus( self, 'damage') damage = specific_weapon.damage() return f'{damage[0]}d{damage[1]}+{damage_bonus}' def __str__(self) -> str: return self.name
class Game(Model): class Meta: verbose_name = '[Game] Game' verbose_name_plural = '[Game] Games' class Options: json = False objects = GameManager() name = CharField('Game Name', max_length=64) start = DateTimeField('Game Start', null=True) ports = ManyToManyField('scorebot_db.Port', blank=True) end = DateTimeField('Game Finish', null=True, blank=True) mode = PositiveIntegerField('Game Mode', default=0, choices=GAME_MODES) status = PositiveIntegerField('Game Status', default=0, choices=GAME_STATUS) settings = ForeignKey('scorebot_db.Settings', on_delete=SET_NULL, null=True, blank=True, related_name='games') goldteam = OneToOneField('scorebot_db.Team', null=True, blank=True, editable=False, on_delete=SET_NULL, related_name='gold') grayteam = OneToOneField('scorebot_db.ScoreTeam', null=True, blank=True, editable=False, on_delete=SET_NULL, related_name='gray') def __str__(self): if self.start is not None and self.end is not None: return '[Game] %s (%s - %s) %s - %s, Teams: %d' % ( self.name, self.get_mode_display(), self.get_status_display(), self.start.strftime('%m/%d/%y %H:%M'), self.end.strftime('%m/%d/%y %H:%M'), self.teams.all().count()) elif self.start is not None: return '[Game] %s (%s - %s) %s, Teams: %d' % ( self.name, self.get_mode_display(), self.get_status_display(), self.start.strftime('%m/%d/%y %H:%M'), self.teams.all().count()) return '[Game] %s (%s - %s) Teams: %d' % ( self.name, self.get_mode_display(), self.get_status_display(), self.teams.all().count()) def __len__(self): if self.start is not None and self.end is not None: return (self.end - self.start).seconds elif self.start is not None: return (now() - self.start).seconds return 0 def get_path(self): return self.name def get_name(self): return self.name def __bool__(self): return self.status == GAME_RUNNING and self.start is not None def get_json(self): return { 'name': self.name, 'mode': self.get_mode_display(), 'status': self.get_status_display(), 'end': (self.end.isoformat() if self.end is not None else None), 'start': (self.start.isoformat() if self.start is not None else None), 'teams': [ team.get_json() for team in get('PlayerTeam').objects.filter(game=self) ] } def start_game(self): self.status = GAME_RUNNING self.start = now() self.save() Events.info('Game "%s" was started!' % self.name) General.info('Game "%s" was started!' % self.name) self.event('Game %s has been started!' % self.name) def get_team_list(self): teams = get('ScoringTeam').objects.filter(game=self) return { 'id': self.id, 'name': self.name, 'teams': [{ 'id': team.id, 'name': team.name, 'token': str(team.token.uid) } for team in teams] } def event(self, message): Events.debug('Event occured "%s".' % message) def get_setting(self, name): General.error('Setting: "%s" requested!' % name) if self.settings is not None: try: return getattr(self.settings, name) except AttributeError: pass return GAME_SETTING_DEFAULT.get(name, None) def save(self, *args, **kwargs): Model.save(self, *args, **kwargs) if self.goldteam is None: goldteam = new('Team', False) goldteam.game = self goldteam.name = 'Gold Team' goldteam.save() self.goldteam = goldteam Model.save(self, *args, **kwargs) if self.grayteam is None: grayteam = new('ScoreTeam', False) grayteam.game = self grayteam.name = 'Gray Team' grayteam.save() self.grayteam = grayteam Model.save(self, *args, **kwargs) def get_scoreboard(self, old=False): if old: return { 'name': escape(self.name), 'message': 'This is Scorebot', 'mode': self.mode, 'teams': [ team.get_scoreboard(old) for team in get('PlayerTeam').objects.filter(game=self) ], 'events': [], 'credit': '' } return None
class Domain(CleanSave, TimestampedModel): """A `Domain`. :ivar name: The DNS stuffix for this zone :ivar authoritative: MAAS manages this (forward) DNS zone. :ivar objects: An instance of the class :class:`DomainManager`. """ class Meta(DefaultMeta): """Needed for South to recognize this model.""" verbose_name = "Domain" verbose_name_plural = "Domains" objects = DomainManager() name = DomainNameField( max_length=256, editable=True, null=False, blank=False, unique=True, validators=[validate_domain_name]) # We manage the forward zone. authoritative = NullBooleanField( default=True, db_index=True, editable=True) # Default TTL for this Domain. # If None and not overridden lower, then we will use the global default. ttl = PositiveIntegerField(default=None, null=True, blank=True) def update_kms_srv(self, kms_host=-1): # avoid recursive imports from maasserver.models import ( DNSData, DNSResource, ) # Since None and '' are both valid values, we use -1 as the "I want the # default value" indicator, and fetch the Config value accordingly. if kms_host == -1: kms_host = Config.objects.get_config('windows_kms_host') if kms_host is None or kms_host == '': # No more Config.windows_kms_host, so we need to delete the kms # host entries that we may have created. The for loop is over 0 or # 1 DNSResource records for dnsrr in self.dnsresource_set.filter(name='_vlmcs._tcp'): dnsrr.dnsdata_set.filter( rrtype='SRV', rrdata__startswith='0 0 1688 ' ).delete() else: # force kms_host to be an FQDN (with trailing dot.) validate_domain_name(kms_host) if not kms_host.endswith('.'): kms_host += '.' # The windows_kms_host config parameter only manages priority 0, # weight 0, port 1688. To do something different, use the # dnsresources api. srv_data = "0 0 1688 %s" % (kms_host) dnsrr, _ = DNSResource.objects.get_or_create( domain_id=self.id, name='_vlmcs._tcp', defaults={}) srv, created = DNSData.objects.update_or_create( dnsresource_id=dnsrr.id, rrtype='SRV', rrdata__startswith="0 0 1688 ", defaults=dict(rrdata=srv_data)) def get_base_ttl(self, rrtype, default_ttl): # If there is a Resource Record set, which has a non-None TTL, then it # wins. Otherwise our ttl if we have one, or the passed-in default. from maasserver.models import DNSData rrset = DNSData.objects.filter( rrtype=rrtype, ttl__isnull=False).filter( Q(dnsresource__name='@') | Q(dnsresource__name='')).filter( dnsresource__domain_id=self.id) if rrset.count() > 0: return rrset.first().ttl elif self.ttl is not None: return self.ttl else: return default_ttl @property def resource_count(self): """How many DNSResource names are attached to this domain.""" from maasserver.models.dnsresource import DNSResource return DNSResource.objects.filter(domain_id=self.id).count() @property def resource_record_count(self): """How many total Resource Records come from non-Nodes.""" count = 0 for resource in self.dnsresource_set.all(): count += len(resource.ip_addresses.all()) count += len(resource.dnsdata_set.all()) return count def add_delegations(self, mapping, ns_host_name, dns_ip_list, default_ttl): """Find any subdomains that need to be added to this domain, and add them. This function updates the mapping to add delegations and any needed glue records for any domains that are descendants of this one. These are not in the database, because they may be multi-lable (foo.bar.maas and maas are domains, but bar.maas isn't), and we don't want to allow multi-label elements in the model, due to the extreme complexity it introduces. """ # Recursive includes. from maasserver.models.dnsresource import separate_fqdn subdomains = Domain.objects.filter(name__endswith="." + self.name) possible = subdomains[:] # Anything with an intervening domain should not be delegated from # this domain. for middle in possible: subdomains = subdomains.exclude(name__endswith="." + middle.name) for subdomain in subdomains: nsttl = subdomain.get_base_ttl('NS', default_ttl) ttl = subdomain.get_base_ttl('A', default_ttl) # Strip off this domain name from the end of the resource name. name = subdomain.name[:-len(self.name) - 1] # If we are authoritative for the subdomain, then generate the NS # and any needed glue records. These will automatically be in the # child zone. if subdomain.authoritative: mapping[name].rrset.add((nsttl, 'NS', ns_host_name)) if ns_host_name.endswith("." + self.name): # The ns_host_name lives in a subdomain of this subdomain, # and we are authoritative for that. We need to add glue # to this subdomain. ns_name = separate_fqdn(ns_host_name, 'NS', self.name)[0] for addr in dns_ip_list: if IPAddress(addr).version == 4: mapping[ns_name].rrset.add((ttl, 'A', addr)) else: mapping[ns_name].rrset.add((ttl, 'AAAA', addr)) # Also return any NS RRset from the dnsdata for the '@' label in # that zone. Add glue records for NS hosts as needed. for lhs in subdomain.dnsresource_set.filter(name='@'): for data in lhs.dnsdata_set.filter(rrtype='NS'): mapping[name].rrset.add((ttl, data.rrtype, data.rrdata)) # Figure out if we need to add glue, and generate it if # needed. if data.rrdata == '@': # This glue is the responsibility of the admin. continue if not data.rrdata.endswith("."): # Non-qualified NSRR, append the domain. fqdn = "%s.%s." % (data.rrdata, subdomain.name) elif not data.rrdata.endswith("%s." % subdomain.name): continue else: # NSRR is an FQDN in or under subdomain. fqdn = data.rrdata # If we get here, then the NS host is in subdomain, or some # subdomain thereof, and is not '@' in the subdomain. # Strip the trailing dot, and split the FQDN. h_name, d_name = separate_fqdn(fqdn[:-1], 'NS') # Make sure we look in the right domain for the addresses. if d_name == subdomain.name: nsrrset = subdomain.dnsresource_set.filter(name=h_name) else: nsdomain = Domain.objects.filter(name=d_name) if not nsdomain.exists(): continue else: nsdomain = nsdomain[0] nsrrset = nsdomain.dnsresource_set.filter(name=h_name) h_name = fqdn[:-len(subdomain.name) - 2] for nsrr in nsrrset: for addr in nsrr.get_addresses(): if IPAddress(addr).version == 4: mapping[h_name].rrset.add((ttl, 'A', addr)) else: mapping[h_name].rrset.add((ttl, 'AAAA', addr)) def __str__(self): return "name=%s" % self.get_name() def __unicode__(self): return "name=%s" % self.get_name() def is_default(self): """Is this the default domain?""" return self.id == 0 def get_name(self): """Return the name of the domain.""" return self.name def delete(self): if self.is_default(): raise ValidationError( "This domain is the default domain, it cannot be deleted.") super(Domain, self).delete() def save(self, *args, **kwargs): created = self.id is None super(Domain, self).save(*args, **kwargs) if created: self.update_kms_srv() # If there is a DNSResource in our parent domain that matches this # domain name, the migrate the DNSResource to the new domain. parent = Domain.objects.filter(name=".".join(self.name.split('.')[1:])) if parent.exists(): me = parent[0].dnsresource_set.filter(name=self.name.split('.')[0]) for rr in me: rr.name = '@' rr.domain = self rr.save() def clean_name(self): # Automatically strip any trailing dot from the domain name. if self.name is not None and self.name.endswith('.'): self.name = self.name[:-1] def clean(self, *args, **kwargs): super(Domain, self).clean(*args, **kwargs) self.clean_name() def render_json_for_related_rrdata(self, for_list=False): """Render a representation of this domain's related non-IP data, suitable for converting to JSON. :return: data""" from maasserver.models import ( DNSData, StaticIPAddress, ) rr_mapping = DNSData.objects.get_hostname_dnsdata_mapping( self, raw_ttl=True) # Smash the IP Addresses in the rrset mapping, so that the far end # only needs to worry about one thing. ip_mapping = StaticIPAddress.objects.get_hostname_ip_mapping( self, raw_ttl=True) for hostname, info in ip_mapping.items(): hostname = hostname[:-len(self.name) - 1] if info.system_id is not None: rr_mapping[hostname].system_id = info.system_id rr_mapping[hostname].node_type = info.node_type for ip in info.ips: if IPAddress(ip).version == 6: rr_mapping[hostname].rrset.add((info.ttl, 'AAAA', ip)) else: rr_mapping[hostname].rrset.add((info.ttl, 'A', ip)) data = [] for hostname, info in rr_mapping.items(): data += [{ 'name': hostname, 'system_id': info.system_id, 'node_type': info.node_type, 'ttl': ttl, 'rrtype': rrtype, 'rrdata': rrdata } for ttl, rrtype, rrdata in info.rrset ] return data
class Action(Model): user = ForeignKey(User) action = CharField(max_length=50) object_type = CharField(max_length=50) object_id = PositiveIntegerField() timestamp = DateTimeField(auto_now=True)
class Event(Schedulable): ''' An Event is a schedulable item with a conference model item as its payload. ''' objects = InheritanceManager() eventitem = ForeignKey(EventItem, on_delete=CASCADE, related_name="scheduler_events") starttime = DateTimeField(blank=True) max_volunteer = PositiveIntegerField(default=0) approval_needed = BooleanField(default=False) max_commitments = PositiveIntegerField(default=0) def has_commitment_space(self, commitment_class_name): from scheduler.models import Ordering return (Ordering.objects.filter( allocation__event=self, class_name=commitment_class_name).count() < self.max_commitments) @property def foreign_event_id(self): return self.eventitem.eventitem_id # New - fits scheduling API refactor def set_locations(self, locations): ''' Takes a LIST of locations, removes all existing location settings and replaces them with the given list. Locations are expected to be location items ''' from scheduler.models import ResourceAllocation for assignment in self.resources_allocated.all(): if assignment.resource.as_subtype.__class__.__name__ == "Location": assignment.delete() for location in locations: if location is not None: try: loc = Location.objects.select_subclasses().get( _item=location) except: loc = Location(_item=location) loc.save() ra = ResourceAllocation(resource=loc, event=self) ra.save() # New - from refactoring @property def people(self): people = [] for booking in self.resources_allocated.all(): if booking.resource.as_subtype.__class__.__name__ == "Worker": person = Person(booking=booking) if hasattr(booking, 'label'): person.label = booking.label.text people += [person] return people # New - from refactoring def allocate_person(self, person): ''' allocated worker for the new model - right now, focused on create uses the Person from the data_transfer objects. ''' from scheduler.idd import get_schedule from scheduler.models import ( Ordering, ResourceAllocation, ) warnings = [] time_format = GBE_DATETIME_FORMAT worker = None if person.public_id: item = WorkerItem.objects.get(pk=person.public_id) worker = Worker(_item=item, role=person.role) else: worker = Worker(_item=person.user.profile, role=person.role) # TODO is there a leak here? what happens to old workers # that aren't linked?? worker.save() if person.users: users = person.users else: users = [worker.workeritem.user_object] for user in users: for conflict in get_schedule( user=user, start_time=self.start_time, end_time=self.end_time).schedule_items: if not person.booking_id or (person.booking_id != conflict.booking_id): warnings += [ Warning(code="SCHEDULE_CONFLICT", user=user, occurrence=conflict.event) ] if person.booking_id: allocation = ResourceAllocation.objects.get(id=person.booking_id) allocation.resource = worker allocation.event = self else: allocation = ResourceAllocation(event=self, resource=worker) allocation.save() if person.commitment: ordering, created = Ordering.objects.get_or_create( allocation=allocation) if person.commitment.role is not None: ordering.role = person.commitment.role if person.commitment.order: ordering.order = person.commitment.order ordering.class_name = person.commitment.class_name ordering.class_id = person.commitment.class_id ordering.save() if self.extra_volunteers() > 0: warnings += [ Warning(code="OCCURRENCE_OVERBOOKED", details="Over booked by %s volunteers" % (self.extra_volunteers())) ] if person.label: # refactor from scheduler.models import Label l, created = Label.objects.get_or_create(allocation=allocation) l.text = person.label l.save() return BookingResponse(warnings=warnings, booking_id=allocation.pk, occurrence=self) def role_count(self, role="Volunteer"): allocations = self.resources_allocated.all() participants = allocations.filter(resource__worker__role=role).count() return participants @property def event_type_name(self): ''' Get event type name. Uses a database call ''' return self.event_type.__name__ @property def event_type(self): ''' Get event's underlying type (ie, conference model) ''' return type(self.as_subtype) @property def as_subtype(self): ''' Get the representation of this Event as its underlying conference type ''' return EventItem.objects.get_subclass(eventitem_id=self.eventitem_id) @property def duration(self): return self.eventitem.child().sched_duration def __str__(self): return self.eventitem.describe @property def location(self): l = Location.objects.filter(allocations__event=self) if len(l) > 0: return l[0]._item else: return None # or what?? def extra_volunteers(self): ''' The difference between the max suggested # of volunteers and the actual number > 0 if there are too many volunteers for the max. The number will be the # of people over booked (if there are 3 spaces, and 4 volunteers, the value returned is 1) = 0 if it is at capacity < 0 if it is fewer than the max, the abosolute value is the amount of space remaining (if there are 4 spaces, and 3 volunteers, the value will be -1) ''' count = Worker.objects.filter(allocations__event=self, role='Volunteer').count() return count - self.max_volunteer # New with Scheduler API @property def labels(self): return self.eventlabel_set.values_list('text', flat=True)
class DisagProfile(Profile): type_name = gettext_lazy("DISAG Profile") manual_profile = False sch_choices = ( ("LG10", "LG 10er-Band"), ("LG5", "LG 5er-Band"), ("LGES", "LG Einzelscheibe"), ("LP", "LP"), ("ZS", "Zimmerstutzen 15m"), ("LS1", "Laufende Scheibe; ein Spiegel"), ("LS2", "Laufende Scheibe; doppel Spiegel"), ("KK5", "50m Scheibe"), ("GK10", "100m - Scheibe für Groß und Kleinkaliber"), ("GK5", "Kombischeibe 5-kreisig mit weißem Scheibenspiegel"), ("LPSF", "LP Schnellfeuer"), ("SCHFE", "Schnellfeuer- und Duell Scheibe."), ("USE1", "Benutzerdefiniert 1"), ("USE2", "Benutzerdefiniert 2"), ) ria_choices = ( ("GR", "Ganze Ringe"), ("ZR", "Zehntel Ringe"), ("KR", "Keine Ringe"), ) kal_choices = ( ("22", "22"), ("6MM", "6MM"), ("6.5MM", "6.5MM"), ("7MM", "7MM"), ("30", "30"), ("303", "303"), ("8MM", "8MM"), ("32", "32"), ("33", "33"), ("9MM", "9MM"), ("357", "357"), ("36", "36"), ("38", "38"), ("40", "40"), ("44", "44"), ("45", "45"), ("50", "50"), ("52", "52"), ("54", "54"), ("58", "58"), ) rib_choices = ( ("RB", "Ringberührungsmethode"), ("MI", "Schußlochmittelpunkt für Vorderlader."), ) tea_choices = ( ("KT", "Keine Teilerwertung"), ("ZT", "Teilerwertung mit zehntel Teiler"), ("HT", "Teilerwertung mit hundertstel Teiler"), ) sch = CharField(max_length=5, null=False, choices=sch_choices, verbose_name="Scheibentype") ria = CharField(max_length=2, null=False, choices=ria_choices, verbose_name="Ringauswertung") kal = CharField(max_length=5, null=True, choices=kal_choices, verbose_name="Kalibereinstellung", blank=True) rib = CharField(max_length=2, null=False, choices=rib_choices, verbose_name="Ringberechnung") tea = CharField(max_length=2, null=False, choices=tea_choices, verbose_name="Teilerauswertung") teg = PositiveIntegerField(null=True, verbose_name="Teilergrenze", blank=True) ssc = PositiveIntegerField(null=True, verbose_name="Schußzahl pro Scheibe", blank=True) seg = PositiveIntegerField(null=True, verbose_name="Schußzahl Gesamt", blank=True) szi = PositiveIntegerField(null=True, verbose_name="Schußzahl pro Zwischensumme", blank=True) ksd = BooleanField(default=False, verbose_name="Kein Scheibenaufdruck") tem = BooleanField(default=False, verbose_name="Teiler auf der Scheibe nur markieren") def get_profile(self, drt=None): config = list() config.append("SCH={}".format(self.sch)) config.append("RIA={}".format(self.ria)) if self.kal is not None: config.append("KAL={}".format(self.kal)) config.append("RIB={}".format(self.rib)) config.append("TEA={}".format(self.tea)) if self.teg is not None: config.append("TEG={}".format(self.teg)) if self.ssc is not None: config.append("SSC={}".format(self.ssc)) if self.seg is not None: config.append("SEG={}".format(self.seg)) if self.szi is not None: config.append("SZI={}".format(self.szi)) if self.ksd: config.append("KSD") if self.tem is not None: config.append("TEM") if drt: config.append("DRT={}".format(drt)) return ";".join(config) def get_type(self): return self.__class__.__name__
class Job(Model): """A job represents a work which has to be done in the 'background' (ie: another process than the processes which respond to the clients). They are useful for periodic tasks (eg: polling data, like emails, from another server) or long tasks (eg: generating a lot of data). The type of the job (see creme_core.creme_jobs.base.JobType) determines if the job is periodic, pseudo-periodic or not periodic. Periodic & pseudo-periodic (see JobType for the difference between them) Jobs must be 'system' Job: - they are created in 'populate' scripts. - they have no user. - they can not be deleted, but they can be disabled (see 'enabled' field). - periodic Jobs must have their 'periodicity' field filled. - pseudo-periodic Jobs should not have their 'periodicity' field filled, because it is useless ; the value settings.PSEUDO_PERIOD is used as security period instead. Not periodic Jobs are user Jobs: - they are dynamically created by a view. - they must have their 'user' filled; it correspond to the User which have created the Job, & who owns it. The Job should act with the credentials of this User. - A view which creates a Job should check settings.MAX_JOBS_PER_USER before creating a Job, and redirect to the jobs list view if the Job can not be created (tip: you can use Job.not_finished_jobs()). - They have to be deleted once they are finished, in order to create other user Jobs. The 'reference_run' field is always filled (in an automatic way at least), but does not means anything for not periodic Jobs ; in this case it is only the creation date, which is not very useful. The 'reference_run' is used to compute the time of each execution, which must be something like: reference_run + N * periodicity """ STATUS_WAIT = 1 STATUS_ERROR = 10 STATUS_OK = 20 type_id = CharField(_('Type of job'), max_length=48, editable=False) user = CremeUserForeignKey(verbose_name=_('User'), null=True, editable=False) enabled = BooleanField(_('Enabled'), default=True, editable=False) language = CharField(_('Language'), max_length=10, editable=False) # created = CreationDateTimeField(_('Creation date')) reference_run = DateTimeField(_('Reference run')) periodicity = DatePeriodField(_('Periodicity'), null=True) last_run = DateTimeField(_('Last run'), null=True, editable=False) ack_errors = PositiveIntegerField( default=0, editable=False) # Number of errors of communication with the queue. status = PositiveSmallIntegerField( _('Status'), editable=False, default=STATUS_WAIT, choices=( (STATUS_WAIT, _('Waiting')), (STATUS_ERROR, _('Error')), (STATUS_OK, _('Completed successfully')), ), ) error = TextField(_('Error'), null=True, editable=False) raw_data = TextField( editable=False ) # It stores the Job's parameters # TODO: use a JSONField ? class Meta: app_label = 'creme_core' verbose_name = _('Job') verbose_name_plural = _('Jobs') ordering = ('id', ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if not self.language: self.language = get_language() self.__init_refreshing_cache() def __init_refreshing_cache(self): self._old_periodicity = self.periodicity self._old_reference_run = self.reference_run self._old_enabled = self.enabled def __str__(self): return str(self.type) def __repr__(self): return '<Job type="{type}" id="{id}">'.format(type=self.type_id, id=self.id) def get_absolute_url(self): return reverse('creme_core__job', args=(self.id, )) def get_delete_absolute_url(self): return reverse('creme_core__delete_job', args=(self.id, )) def get_edit_absolute_url(self): return reverse('creme_core__edit_job', args=(self.id, )) @property def data(self): return jsonloads(self.raw_data) # TODO: cache @data.setter def data(self, value): self.raw_data = jsondumps(value) @property def description(self): # TODO: cache ? try: return self.type.get_description(self) except Exception: logger.exception( 'Error when building the description of the job id="%s"', self.id) return () def check_owner(self, user): return user.is_superuser or self.user == user def check_owner_or_die(self, user): if not self.check_owner(user): raise PermissionDenied('You are not the owner of this job') @property def is_finished(self): return self.status != self.STATUS_WAIT @classmethod def not_finished_jobs(cls, user): return cls.objects.filter(user=user, status=cls.STATUS_WAIT) @property def progress(self): jtype = self.type if jtype is not None: return jtype.progress(self) @property def real_periodicity(self): periodicity = self.periodicity if periodicity is None and self.user_id is None: periodicity = HoursPeriod(value=settings.PSEUDO_PERIOD) return periodicity def _update_ack_errors(self, incr): Job.objects.filter(id=self.id).update(ack_errors=F('ack_errors') + incr) def forget_ack_errors(self): self._update_ack_errors(-self.ack_errors) def get_config_form_class(self): "@see JobType.get_config_form_class()" jtype = self.type return jtype.get_config_form_class(self) if jtype is not None else None def refresh(self, force=False): """Ask to the JobManager to refresh the job if it's needed, because the next runs should be earlier, or disabled. @param force: Boolean ; <True> means the message is sent even if no field has changed. """ from ..core.job import JobManagerQueue queue_error = False enabled = self.enabled reference_run = self.reference_run periodicity = self.periodicity if self._old_enabled != enabled or \ self._old_reference_run != reference_run or \ self._old_periodicity != periodicity or \ force: # NB: we sent all the fields values in order to get a more robust system # (even if a REFRESH-message is lost, the next one is complete). data = { 'enabled': enabled, 'reference_run': dt_to_ISO8601(reference_run), } if periodicity: data['periodicity'] = periodicity.as_dict() queue_error = JobManagerQueue.get_main_queue().refresh_job( self, data) self.__init_refreshing_cache() return queue_error def update(self, refresh_data, date_period_registry=date_period_registry): """Update the fields with information generated by refresh(). Notice that the instance is not saved. @param refresh_data: Dictionary. See data sent on queue by refresh(). @param date_period_registry: Instance of creme_core.utils.date_period.DatePeriodRegistry. @return: True if the instance has changed. """ changed = False get = refresh_data.get enabled = get('enabled') if enabled is not None: if self.enabled != enabled: self.enabled = enabled changed = True ref_run_str = get('reference_run') if ref_run_str is not None: ref_run = dt_from_ISO8601(ref_run_str) if self.reference_run != ref_run: self.reference_run = ref_run changed = True periodicity_dict = get('periodicity') if periodicity_dict is not None: periodicity = date_period_registry.deserialize(periodicity_dict) if self.periodicity != periodicity: self.periodicity = periodicity changed = True return changed @atomic def save(self, *args, **kwargs): from ..core.job import JobManagerQueue created = self.pk is None if created and self.reference_run is None: self.reference_run = now() if self.user_id is None: # System job self.reference_run = round_hour(self.reference_run) # super(Job, self).save(*args, **kwargs) super().save(*args, **kwargs) queue_error = False if created: if self.user_id is not None: queue_error = JobManagerQueue.get_main_queue().start_job(self) elif self.user_id is None: # System job queue_error = self.refresh() if queue_error: self._update_ack_errors(1) @property def stats(self): jtype = self.type return jtype.get_stats(self) if jtype is not None else [] @property def type(self): from ..core.job import job_type_registry return job_type_registry.get(self.type_id) @type.setter def type(self, value): # TODO: check that it is in job_type_registry ? self.type_id = value.id
class MonitorProfile(Model): product = ForeignKey('configs.AbstractProduct', on_delete=CASCADE, verbose_name=_('Product')) watchlist = ForeignKey('watchlists.Watchlist', on_delete=CASCADE, verbose_name=_('Watchlist')) type = ForeignKey('configs.Type', null=True, blank=True, on_delete=SET_NULL, verbose_name=_('Type')) price = FloatField(verbose_name=_('Price')) comparator = CharField(max_length=6, default='__lt__', choices=COMPARATOR_CHOICES, verbose_name=_('Comparator')) color = CharField(max_length=20, default='danger', choices=COLOR_CHOICES, verbose_name=_('Color')) info = TextField(null=True, blank=True, verbose_name=_('Monitor Info')) action = TextField(null=True, blank=True, verbose_name=_('Action')) period = TextField(null=True, blank=True, verbose_name=_('Period')) is_active = BooleanField(default=False, verbose_name=_('Is Active')) months = ManyToManyField('configs.Month', verbose_name=_('Monitor Months')) always_display = BooleanField(default=False, verbose_name=_('Always Display')) row = PositiveIntegerField(null=True, blank=True, verbose_name=_('Row')) update_time = DateTimeField(auto_now=True, null=True, blank=True, verbose_name=_('Updated')) class Meta: verbose_name = _('Monitor Profile') verbose_name_plural = _('Monitor Profile') def __str__(self): return str('product: %s, watchlist: %s, price: %s' % (self.product.name, self.watchlist.name, self.price)) def __unicode__(self): return str('product: %s, watchlist: %s, price: %s' % (self.product.name, self.watchlist.name, self.price)) def sibling(self): return MonitorProfile.objects.exclude(id=self.id).filter(type=self.type, product=self.product, watchlist=self.watchlist) def watchlist_items(self): return WatchlistItem.objects.filter(product__parent=self.product) def product_list(self): items = WatchlistItem.objects.filter_by_product(product=self.product).filter(parent=self.watchlist) # items = self.watchlist_items().filter(parent=self.watchlist) if not items: return [self.product] else: return [item.product for item in items] def sources(self): sources = list() items = WatchlistItem.objects.filter_by_product(product=self.product).filter(parent=self.watchlist) for i in items: sources += i.sources.all() return list(set(sources)) def active_compare(self, price): if self.comparator == '__gt__': return price > self.price if self.comparator == '__gte__': return price >= self.price if self.comparator == '__lt__': return price < self.price if self.comparator == '__lte__': return price <= self.price @property def format_price(self): d = dict(COMPARATOR_CHOICES) comparator = d[str(self.comparator)] return '{0}{1:g}{2}'.format(comparator, self.price, self.product.unit.price_unit) @property def less(self): return ['__lt__', '__lte__'] @property def greater(self): return ['__gt__', '__gte__'] @property def price_range(self): low_price = None up_price = None if self.comparator in self.less: sibling = self.sibling().filter(comparator__in=self.less).order_by('price') last_obj = sibling.filter(price__lt=self.price).last() if last_obj: low_price = last_obj.price else: low_price = 0 up_price = self.price if self.comparator in self.greater: sibling = self.sibling().filter(comparator__in=self.greater).order_by('price') next_obj = sibling.filter(price__gt=self.price).first() if next_obj: up_price = next_obj.price else: up_price = 2 ** 50 low_price = self.price return [low_price, up_price] @property def low_price(self): return self.price_range[0] @property def up_price(self): return self.price_range[1]
class FailedUrl(Model): path = TextField(unique=True) num_occurrences = PositiveIntegerField(default=0)
class Product(Model): """ This model describes the products. """ name = CharField(max_length=255, unique=True) product_slug = AutoSlugField( max_length=255, populate_from=('name'), unique=True, help_text=_('A short label, generally used in URLs.')) image = ImageField(upload_to='photos', verbose_name=_('product')) caption = CharField( max_length=255, blank=True, help_text=_('This is a short description on thumbnail')) description = TextField(_('description'), blank=True) quantity = PositiveIntegerField(null=True, default=0) old_price = DecimalField(null=True, max_digits=10, decimal_places=2, blank=True) price = DecimalField(null=True, max_digits=10, decimal_places=2, blank=True) is_active = BooleanField(default=True, verbose_name=_('active')) is_featured = BooleanField(default=False, verbose_name=_('in offer')) is_offer = BooleanField(default=False, verbose_name=_('featured'), help_text=_('Will display as product in offer.')) date_created = DateTimeField(_("Date created"), auto_now_add=True) date_updated = DateTimeField(auto_now=True) sub_category = ManyToManyField(Subcategory) # Managers objects = Manager() active = ActiveProductManager() featured = FeaturedProductManager() offer = OfferProductManager() def __str__(self): return self.name class Meta: verbose_name_plural = 'products' ordering = ['-date_created'] def get_absolute_url(self): return reverse('product-detail', kwargs={ 'product_slug': self.product_slug, 'pk': self.id })
def test_PositiveIntegerField(self): lazy_func = lazy(lambda: 1, int) self.assertIsInstance(PositiveIntegerField().get_prep_value(lazy_func()), int)
class AggregationNode(CTENode, Model): price = PositiveIntegerField()
class Subscriber(Model): """ TODO: - Create an "extra" JSON field to save custom-bussiness-related subscriber data. (using plan_id for this now) - Many ladiaria custom fields like "lento_pdf" should be removed (ladiaria will be using them in "extra"). - Keep newsletters M2M relations for those newsletters that were discontinued (their publication or category have changed its has_newsletter attr from True to False) now this M2M rows are removed when the subscriber is saved, for example in the admin or by the user itself using the edit profile page. """ contact_id = PositiveIntegerField(u'CRM id', unique=True, editable=True, blank=True, null=True) user = OneToOneField(User, verbose_name=u'usuario', related_name='subscriber', blank=True, null=True) # TODO: ver la posibilidad de eliminarlo ya que es el "first_name" del modelo django.contrib.auth.models.User name = CharField(u'nombre', max_length=255, validators=[alphanumeric]) # agregamos estos campos para unificar la info de User y Subscriber address = CharField(u'dirección', max_length=255, blank=True, null=True) country = CharField(u'país', max_length=50, blank=True, null=True) city = CharField(u'ciudad', max_length=64, blank=True, null=True) province = CharField(u'departamento', max_length=20, choices=settings.THEDAILY_PROVINCE_CHOICES, blank=True, null=True) profile_photo = ImageField(upload_to='perfiles', blank=True, null=True) document = CharField(u'documento', max_length=50, blank=True, null=True) phone = CharField(u'teléfono', max_length=20) date_created = DateTimeField(u'fecha de registro', auto_now_add=True, editable=False) downloads = PositiveIntegerField(u'descargas', default=0, blank=True, null=True) pdf = BooleanField(default=False) lento_pdf = BooleanField(u'pdf L.', default=False) ruta = PositiveSmallIntegerField(blank=True, null=True) plan_id = TextField(blank=True, null=True) ruta_lento = PositiveSmallIntegerField(blank=True, null=True) ruta_fs = PositiveSmallIntegerField(blank=True, null=True) newsletters = ManyToManyField(Publication, blank=True, limit_choices_to={'has_newsletter': True}) category_newsletters = ManyToManyField( Category, blank=True, limit_choices_to={'has_newsletter': True}) allow_news = BooleanField(u'acepta novedades', default=True) allow_promotions = BooleanField(u'acepta promociones', default=True) allow_polls = BooleanField(u'acepta encuestas', default=True) # TODO: explain the utility of this field or remove it. subscription_mode = CharField(max_length=1, null=True, blank=True, default=None) last_paid_subscription = DateTimeField(u'Ultima subscripcion comienzo', null=True, blank=True) def save(self, *args, **kwargs): if self.document: non_decimal = re.compile(r'[^\d]+') self.document = non_decimal.sub('', self.document) super(Subscriber, self).save(*args, **kwargs) def download(self, pdfinstance): try: download = SubscriberEditionDownloads.objects.get( subscriber=self, edition=pdfinstance) download.downloads += 1 except SubscriberEditionDownloads.DoesNotExist: download = SubscriberEditionDownloads() download.subscriber = self download.edition = pdfinstance download.downloads = 1 download.save() self.downloads += 1 self.save() def is_subscriber(self, pub_slug=settings.DEFAULT_PUB): try: if self.user: if self.user.is_staff: return True elif pub_slug in getattr( settings, 'THEDAILY_IS_SUBSCRIBER_CUSTOM_PUBLICATIONS', ()): is_subscriber_custom = __import__( settings.THEDAILY_IS_SUBSCRIBER_CUSTOM_MODULE, fromlist=['is_subscriber']).is_subscriber return is_subscriber_custom(self, pub_slug) else: return self.user.has_perm('thedaily.es_suscriptor_%s' % pub_slug) except User.DoesNotExist: # rare, but we saw once this exception happen pass return False def is_digital_only(self): """ Returns True only if this subcriber is subscribed only to the "digital" edition """ # TODO 2nd release: implement return self.is_subscriber() def make_token(self): return TimestampSigner().sign(self.user.username) def check_token(self, token): try: key = '%s:%s' % (self.user.username, token) TimestampSigner().unsign(key, max_age=60 * 60 * 48) # Valid 2 days except (BadSignature, SignatureExpired): return False return True def user_is_active(self): return self.user and self.user.is_active user_is_active.short_description = u'user act.' user_is_active.boolean = True def is_subscriber_any(self): return any( self.is_subscriber(pub_slug) for pub_slug in getattr( settings, 'THEDAILY_IS_SUBSCRIBER_ANY', Publication.objects.values_list('slug', flat=True))) def get_publication_newsletters_ids(self, exclude_slugs=[]): return list( self.newsletters.filter(has_newsletter=True).exclude( slug__in=exclude_slugs).values_list('id', flat=True)) def get_category_newsletters_ids(self, exclude_slugs=[]): return list( self.category_newsletters.filter(has_newsletter=True).exclude( slug__in=exclude_slugs).values_list('id', flat=True)) def get_newsletters_slugs(self): return list(self.newsletters.values_list('slug', flat=True)) + \ list(self.category_newsletters.values_list('slug', flat=True)) def get_newsletters(self): return ', '.join(self.get_newsletters_slugs()) get_newsletters.short_description = u'newsletters' def updatecrmuser_publication_newsletters(self, exclude_slugs=[]): if self.contact_id: try: updatecrmuser( self.contact_id, u'newsletters', json.dumps( self.get_publication_newsletters_ids(exclude_slugs))) except requests.exceptions.RequestException: pass def updatecrmuser_category_newsletters(self, exclude_slugs=[]): if self.contact_id: try: updatecrmuser( self.contact_id, u'area_newsletters', json.dumps( self.get_category_newsletters_ids(exclude_slugs))) except requests.exceptions.RequestException: pass def get_downloads(self, edition=None): if not edition: return self.downloads else: qs = self.edition_downloads.filter(edition=edition) if qs.count() == 0: return 0 else: return qs[0].downloads def __unicode__(self): return self.name or self.get_full_name() def get_full_name(self): if not self.user.first_name and not self.user.last_name: return u"Usuario sin nombre" else: return self.user.get_full_name() def get_latest_article_visited(self): """ Returns info about the latest visit to an article from this subscriber. """ # Search in mongodb first, if none found then search in db-table mdb = core_articleviewedby_mdb.posts.find({ 'user': self.user.id }).sort('viewed_at', pymongo.DESCENDING) if mdb.count(): latest = mdb[0] return (latest.get('article'), latest.get('viewed_at')) else: try: latest = self.user.articleviewedby_set.latest('viewed_at') except ArticleViewedBy.DoesNotExist: pass else: return (latest.article_id, latest.viewed_at) @property def user_email(self): return self.user.email if self.user else None @permalink def get_absolute_url(self): return '/admin/thedaily/subscriber/%i/' % self.id class Meta: verbose_name = u'suscriptor' permissions = (("es_suscriptor_%s" % settings.DEFAULT_PUB, "Es suscriptor actualmente"), )
class ValueNamedNode(NamedNode): v = PositiveIntegerField()
class ItemCategory(SoftDeletionModel): category = CharField(max_length=64, unique=True) code_prefix = CharField(max_length=3) quantity = PositiveIntegerField() minimum_quantity = PositiveIntegerField()
class DNSData(CleanSave, TimestampedModel): """A `DNSData`. :ivar rrtype: Type of resource record :ivar rrdata: right-hand side of the DNS Resource Record. """ class Meta(DefaultMeta): """Needed for South to recognize this model.""" verbose_name = "DNSData" verbose_name_plural = "DNSData" objects = DNSDataManager() dnsresource = ForeignKey( DNSResource, editable=True, blank=False, null=False, help_text="DNSResource which is the left-hand side.", on_delete=CASCADE, ) # TTL for this resource. Should be the same for all records of the same # RRType on a given label. (BIND will complain and pick one if they are not # all the same.) If None, then we inherit from the parent Domain, or the # global default. ttl = PositiveIntegerField(default=None, null=True, blank=True) rrtype = CharField( editable=True, max_length=8, blank=False, null=False, unique=False, validators=[validate_rrtype], help_text="Resource record type", ) rrdata = TextField( editable=True, blank=False, null=False, help_text="Entire right-hand side of the resource record.", ) def __unicode__(self): return "%s %s" % (self.rrtype, self.rrdata) def __str__(self): return "%s %s" % (self.rrtype, self.rrdata) @property def fqdn(self): return self.dnsresource.fqdn def clean_rrdata(self, *args, **kwargs): """verify that the rrdata matches the spec for the resource type. """ self.rrtype = self.rrtype.upper() if self.rrtype == "CNAME": # Depending on the query, this can be quite a few different # things... Make sure it meets the more general case. if re.compile(CNAME_SPEC).search(self.rrdata) is None: raise ValidationError(INVALID_CNAME_MSG) elif self.rrtype == "SSHFP": # SSHFP is <algo> <fptype> <fingerprint>. Do minimal checking so # that we support future algorithms and types. spec = re.compile( r"^(?P<algo>[0-9]+)\s+(?P<fptype>[0-9]+)\s+(?P<fp>.*)$") res = spec.search(self.rrdata) if res is None: raise ValidationError(INVALID_SSHFP_MSG) # No further checking. elif self.rrtype == "TXT": # TXT is freeform, we simply pass it through pass elif self.rrtype == "MX": spec = re.compile(r"^(?P<pref>[0-9]+)\s+(?P<mxhost>.+)$") res = spec.search(self.rrdata) if res is None: raise ValidationError(INVALID_MX_MSG) pref = int(res.groupdict()["pref"]) mxhost = res.groupdict()["mxhost"] if pref < 0 or pref > 65535: raise ValidationError(INVALID_MX_MSG) validate_domain_name(mxhost) elif self.rrtype == "NS": validate_domain_name(self.rrdata) elif self.rrtype == "SRV": spec = re.compile( r"^(?P<pri>[0-9]+)\s+(?P<weight>[0-9]+)\s+(?P<port>[0-9]+)\s+" r"(?P<target>.*)") res = spec.search(self.rrdata) if res is None: raise ValidationError(INVALID_SRV_MSG) srv_host = res.groupdict()["target"] pri = int(res.groupdict()["pri"]) weight = int(res.groupdict()["weight"]) port = int(res.groupdict()["port"]) if pri < 0 or pri > 65535: raise ValidationError(INVALID_SRV_MSG) if weight < 0 or weight > 65535: raise ValidationError(INVALID_SRV_MSG) if port < 0 or port > 65535: raise ValidationError(INVALID_SRV_MSG) # srv_host can be '.', in which case "the service is decidedly not # available at this domain." Otherwise, it must be a valid name # for an Address RRSet. if srv_host != ".": validate_domain_name(srv_host) def clean(self, *args, **kwargs): self.clean_rrdata(*args, **kwargs) # Force uppercase for the RR Type names. self.rrtype = self.rrtype.upper() # make sure that we don't create things that we shouldn't. # CNAMEs can only exist as a single resource, and only if there are no # other resource records on the name. See how many CNAME and other # items that saving this would create, and reject things if needed. if self.id is None: num_cname = DNSData.objects.filter( dnsresource_id=self.dnsresource_id, rrtype="CNAME").count() if self.rrtype == "CNAME": num_other = (DNSData.objects.filter( dnsresource__id=self.dnsresource_id).exclude( rrtype="CNAME").count()) # account for ipaddresses num_other += self.dnsresource.ip_addresses.count() if num_other > 0: raise ValidationError(CNAME_AND_OTHER_MSG) elif num_cname > 0: raise ValidationError(MULTI_CNAME_MSG) else: if num_cname > 0: raise ValidationError(CNAME_AND_OTHER_MSG) rrset = DNSData.objects.filter( rrtype=self.rrtype, dnsresource_id=self.dnsresource.id).exclude(ttl=self.ttl) if rrset.count() > 0: maaslog.warning(DIFFERENT_TTL_MSG % (self.ttl, rrset.first().ttl)) super().clean(*args, **kwargs)
class OutSourcedItems(SoftDeletionModel): item = CharField(max_length=64) quantity = PositiveIntegerField() unit_price = DecimalField(max_digits=10, decimal_places=2)
class SubqueryCount(Subquery): template = "(SELECT count(*) FROM (%(subquery)s) as sq)" output_field = PositiveIntegerField()
class Board(Model): name = CharField(max_length=50) root_post = OneToOneField(Post, on_delete=models.PROTECT) rank = PositiveIntegerField(default=1) category = IntegerField(default=0)
class Task(TimeStampedModel): class Meta: verbose_name = _("Task") verbose_name_plural = _("Tasks") name = CharField(max_length=255) kind = CampaignKindField() # http://stackoverflow.com/questions/10052220/advantages-to-using-urlfield-over-textfield#comment49011703_10052288 link = URLField(max_length=2000) reward = ProjectMoneyField() campaign = ForeignKey( Campaign, related_name='tasks', on_delete=CASCADE, ) status = ActiveInactiveStatusField() max_interactions = PositiveIntegerField( validators=[MinValueValidator(1)], verbose_name=_("Maximum allowed interactions"), ) expired = BooleanField(default=False) def __str__(self): return f"{ActiveInactiveStatus(self.status).label} " \ f"{self.reward} " \ f"\"{self.name}\" " \ f"(#{self.pk})" def interact(self, user: settings.AUTH_USER_MODEL): # if TaskStatus.objects.filter(task=self, user=user, interacted=True).exists(): # return False TaskStatus.objects.create( task=self, user=user, interacted=True, ) # if self.max_interactions == self.task_status.all().count(): # self.status = ActiveInactiveStatus.INACTIVE wallet = Wallet.objects.filter(user=user, kind=WalletKind.MAIN).first() wallet.balance += self.reward wallet.save() return True def clean(self): self._validate_task_kind_matches_campaign_kind() def save(self, force_insert=False, force_update=False, using=None, update_fields=None): self.full_clean() super().save(force_insert, force_update, using, update_fields) def _validate_task_kind_matches_campaign_kind(self): if self.kind != self.campaign.kind: raise ValidationError( _("Task kind must match its campaign's."), code=BUSINESS_LOGIC_ERROR_CODE, )
class PipelineRun(TimeStampedModel): """ A pipeline run is a representation of a single time when a given pipeline was submitted to the cluster. Each program attached to the pipeline creates a ProgramRun object attached to this PipelineRun each ProgramRun is a program submitted to the cluster with any returned values or printed strings attached. """ name = SlugField(max_length=128, db_index=True) pipeline = ForeignKey(Pipeline, related_name='runs') run_as_test = PositiveIntegerField(null=True, blank=True,\ help_text="Every (x) days, run this pipeline-run as a test.") clean_files = TextField( null=True, blank=True, help_text="List of extra files to remove when job is finished") def __str__(self): return "Pipeline Run %s" % self.name def text_status(self): """Generate a textual status of the run""" def bitset(*args): """Turn a set of booleans into a bit array and then into an integer""" return int( ''.join(reversed([str(int(bool(arg))) for arg in args])), 2) return ''.join([ '_> =!!?!'[bitset(progrun.is_submitted, progrun.is_complete, progrun.is_error)] for progrun in self.programs.all() ]) def get_absolute_url(self): """Return a link to this pipeline run for review""" return reverse('pipeline:run', kwargs={'pk': self.pk}) def clean_filenames(self): """Return a list of fully qualified cleanable filenames""" if (self.clean_files or "").strip(): return self.clean_files.split("\n") return [] def clean_the_files(self): """Deletes any of the files marked for cleaning""" for fname in self.clean_filenames(): try: os.unlink(fname) except (OSError, IOError): pass def run(self, commit=True, **kwargs): """Run this pipeline run (creates ProgramRun objects)""" runs = [] if not commit: self.test_programs = [] if 'clean_files' in kwargs: self.clean_files = '\n'.join(kwargs['clean_files']) if commit: self.save() for pipe in self.pipeline.programs.all(): if commit: run, _ = ProgramRun.objects.get_or_create(piperun=self, **pipe.prepare( self.pk)) else: run = ProgramRun(piperun=self, **pipe.prepare(self.pk)) self.test_programs.append(run) runs.append(run) for prev, run, foll in tripplet(runs): if not run.is_submitted: if not run.submit( commit=commit, previous=prev, follower=foll, **kwargs): return False else: data = get_job_manager().status(run.job_id, clean=False) if data.get('finished', None) and data.get('return', 1) != 1: raise JobSubmissionError("Existing job already failed.") # Sort out the filenames for the next call in the chain for package, filename in run.program.prepare_files(**kwargs): name = package[1] if name in kwargs: if isinstance(kwargs[name], list): kwargs[name].append(filename) else: kwargs[name] = [kwargs[name], filename] else: kwargs[name] = [filename] return True def all_programs(self): """Returns all the program runs with unrun pipelines appended""" ret = [] runs = dict((p.program_id, p) for p in self.programs.all()) for pipe in self.pipeline.programs.all(): ret.append(runs.get(pipe.program_id, pipe)) return ret def stop_all(self, msg='All Stopped'): """Forcefully stop all processes in this pipeline run""" return all([program.stop(msg=msg) for program in self.programs.all()]) def update_all(self): """ Update all pipeline project runs with their running status returns True if all programs are complete. False if any are still running. """ qset = self.programs.filter( Q(is_submitted=False) | Q(is_complete=False)) if all([program.update_status() for program in qset]): if qset.count(): # Clean up step for all programs for program in self.programs.filter(program__keep=False): program.delete_output_files() self.clean_the_files() return True return False def get_errors(self): """Return true if programs in the pipeline have errors""" self.update_all() qset = self.programs.filter(is_error=True) if qset.count() == 0: return None return '\n---\n'.join(qset.values_list('error_text', flat=True))
class Migration(migrations.Migration): dependencies = [ ('tree', '0001_initial'), ] operations = [ migrations.CreateModel( name='MPTTPlace', fields=[ ('id', AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', CharField(max_length=50, unique=True, default=get_random_name)), ('lft', PositiveIntegerField(db_index=True, editable=False)), ('rght', PositiveIntegerField(db_index=True, editable=False)), ('tree_id', PositiveIntegerField(db_index=True, editable=False)), ('level', PositiveIntegerField(db_index=True, editable=False)), ('parent', TreeForeignKey('self', blank=True, null=True)), ], managers=[ ('_default_manager', Manager()), ], ), migrations.CreateModel( name='TreePlace', fields=[ ('id', AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', CharField(max_length=50, unique=True, default=get_random_name)), ('parent', ForeignKey('self', blank=True, null=True)), ('path', PathField()), ], ), CreateTreeTrigger('TreePlace', order_by=('name', )), CreateTreeIndex('TreePlace'), migrations.CreateModel( name='TreebeardALPlace', fields=[ ('id', AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', CharField(max_length=50, unique=True, default=get_random_name)), ('parent', ForeignKey('self', blank=True, null=True)), ], ), migrations.CreateModel( name='TreebeardMPPlace', fields=[ ('id', AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('path', CharField(max_length=255, unique=True)), ('depth', PositiveIntegerField()), ('numchild', PositiveIntegerField(default=0)), ('name', CharField(max_length=50, unique=True, default=get_random_name)), ], ), migrations.CreateModel( name='TreebeardNSPlace', fields=[ ('id', AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('lft', PositiveIntegerField(db_index=True)), ('rgt', PositiveIntegerField(db_index=True)), ('tree_id', PositiveIntegerField(db_index=True)), ('depth', PositiveIntegerField(db_index=True)), ('name', CharField(max_length=50, unique=True, default=get_random_name)), ], ), ]