class Request(models.Model): """superclass for request of a Meeting, either on Student or on Teacher side A User can only have one active request at a time. Also adding a created field, that makes it possible to prune old requests We also keep track of a list of failed_matches that should make sure the same Match isn't tried more than once """ user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, to_field='uuid') # Setting related_name to '+' --> no reverse relation from User necessary (for now) failed_matches = models.ManyToManyField(settings.AUTH_USER_MODEL, related_name='+', blank=True) created = models.DateTimeField(auto_now_add=True) is_manual_deleted = models.BooleanField(default=False) is_active = models.BooleanField(default=True) deactivated = models.DateTimeField(null=True) last_poll = models.DateTimeField(default=timezone.now) # How many are connected? connected_count = models.PositiveIntegerField(default=0) notifications = GenericRelation('notify.Notification') meeting = models.OneToOneField("roulette.Meeting", on_delete=models.SET_NULL, null=True, default=None, related_name='+') def _successful(self): if self.meeting: return self.meeting.duration >= timedelta(minutes=5) return False _successful.boolean = True successful = property(_successful) @property def duration(self): if self.deactivated is not None: return self.deactivated - self.created else: return timezone.now() - self.created def manual_delete(self): self.is_manual_deleted = True self.save() self.delete() def deactivate(self, connection_lost=False): if self.is_active: self.is_active = False self.deactivated = timezone.now() self._deactivate_match(connection_lost) self.save() @abc.abstractmethod def _deactivate_match(self, connection_lost): return NotImplemented def get_match(self): match = self.match_set.filter(failed=False, successful=False) if match: return match.get() else: return None class Meta: abstract = True
class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('contenttypes', '0001_initial'), ] operations = [ migrations.CreateModel( name='Order', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('date', models.DateField(auto_now_add=True)), ('open', models.BooleanField(default=True)), ('last_change', models.DateTimeField(auto_now=True)), ('waiting_payment', models.BooleanField(default=False)), ], options={}, bases=(models.Model, ), ), migrations.CreateModel( name='Invoice', fields=[ ('order', models.OneToOneField(primary_key=True, serialize=False, to='orders.Order')), ('date', models.DateField(auto_now_add=True)), ], options={}, bases=(models.Model, ), ), migrations.CreateModel( name='OrderItem', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('description', models.CharField(max_length=150)), ('value', models.DecimalField(max_digits=6, decimal_places=2)), ('object_id', models.PositiveIntegerField()), ('content_type', models.ForeignKey(to='contenttypes.ContentType')), ('order', models.ForeignKey(to='orders.Order')), ], options={}, bases=(models.Model, ), ), migrations.CreateModel( name='Payment', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('date', models.DateField(auto_now_add=True)), ('payed', models.BooleanField(default=False)), ('payment_ref', models.CharField(max_length=150, null=True, blank=True)), ('value_inc', models.DecimalField(default=0, max_digits=6, decimal_places=2)), ('order', models.ForeignKey(related_name='payment', to='orders.Order')), ], options={}, bases=(models.Model, ), ), migrations.CreateModel( name='PaymentType', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=50)), ('accounting', models.BooleanField(default=False)), ], options={}, bases=(models.Model, ), ), migrations.CreateModel( name='Vat', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('valid_from', models.DateField()), ('name', models.CharField(max_length=25)), ('rate', models.DecimalField(default=0, max_digits=5, decimal_places=4)), ], options={}, bases=(models.Model, ), ), migrations.AddField( model_name='payment', name='type', field=models.ForeignKey(to='orders.PaymentType'), preserve_default=True, ), migrations.AddField( model_name='orderitem', name='vat', field=models.ForeignKey(to='orders.Vat'), preserve_default=True, ), migrations.AddField( model_name='order', name='ordered_by', field=models.ForeignKey(to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]
class Post(models.Model): """A Post for an RSS feed .. attribute:: feed The feed which this is a post for. .. attribute:: title The title of the post. .. attribute:: link Link to the original article. .. attribute:: content The posts content in full-text/HTML. .. attribute:: guid The GUID for this post (unique for :class:`Feed`) .. attribute:: author Name of this posts author. .. attribute:: date_published The date this post was published. .. attribute:: date_updated The date this post was last changed/updated. .. attribute:: enclosures List of media attachments for this post. """ feed = models.ForeignKey(Feed, null=False, blank=False) title = models.CharField(_(u"title"), max_length=1000) # using 2048 for long URLs, only work for MySQL 5.0.3 + link = models.URLField(_(u"link"), max_length=2048) content = models.TextField(_(u"content"), blank=True, null=True) article_content = models.TextField( _(u"article content"), blank=True, null=True, help_text=_('''The full article content retrieved from the URL.''')) article_content_length = models.PositiveIntegerField(blank=True, null=True, editable=False, db_index=True) article_content_error_code = models.CharField( max_length=25, blank=True, null=True, db_index=True, help_text= _('Any HTTP error code received while attempting to download the article.' )) article_content_error_reason = models.CharField( max_length=200, blank=True, null=True, help_text= _('Any HTTP error reason received while attempting to download the article.' )) article_content_success = models.NullBooleanField(default=None, db_index=True) article_content_error = models.TextField(blank=True, null=True) article_ngrams_extracted = models.BooleanField(default=False, editable=False, db_index=True) article_ngrams_extracted_datetime = models.DateTimeField(blank=True, null=True, editable=False, db_index=True) article_ngram_counts = PickledObjectField(blank=True, null=True, compress=True, help_text=_('{ngram:count}')) guid = models.CharField(_(u"guid"), max_length=200, blank=True) author = models.CharField(_(u"author"), max_length=50, blank=True) date_published = models.DateField(_(u"date published"), db_index=True) date_updated = models.DateTimeField(_(u"date updated")) enclosures = models.ManyToManyField(Enclosure, blank=True) categories = models.ManyToManyField(Category) objects = PostManager() class Meta: # sorting on anything else than id is catastrophic for # performance # even an ordering by id is not smart # ordering = ["-id"] verbose_name = _(u"post") verbose_name_plural = _(u"posts") def auto_guid(self): """Automatically generate a new guid from the metadata available.""" content = "|".join((self.title, self.link, self.author)) content = content.encode('utf-8') return hashlib.md5(content).hexdigest() def __str__(self): return u"%s" % self.title def save(self, *args, **kwargs): old = None if self.id: old = type(self).objects.get(id=self.id) self.article_content = (self.article_content or '').strip() if self.article_content: self.article_content = force_text(self.article_content, errors='replace') else: self.article_content = None self.article_content_length = len((self.article_content or '').strip()) if old and old.article_content != self.article_content: self.article_ngrams_extracted = False # self.article_content_success = bool((self.article_content or '').strip()) # if self.article_content_success: # self.article_content_error = None super(Post, self).save(*args, **kwargs) @property def date_published_naturaldate(self): date = self.date_published as_datetime = datetime(date.year, date.month, date.day, tzinfo=utc) return text_type(naturaldate(as_datetime)) @property def date_updated_naturaldate(self): return text_type(naturaldate(self.date_updated)) def retrieve_article_content(self, force=False): assert settings.FEEDZ_ARTICLE_EXTRACTOR, 'No extractor specified.' extractor = get_article_extractor_func() if self.article_content and not force: return self.article_content = extractor( self.link, # only_mime_types=conf.GET_ARTICLE_CONTENT_ONLY_MIME_TYPES, # ignore_robotstxt=True, # userAgent=ua.random, ) self.article_content_error_code = None self.article_content_error_reason = None self.article_content_success = bool((self.article_content or '').strip()) self.article_content_error = None self.save() @classmethod def needs_update(cls, *args, **kwargs): return bool(Post.objects.all_ngramless().only('id').exists()) stripable = True @classmethod def do_update(cls, stripe=None, print_status=None, post_ids=None, force=False, *args, **kwargs): tmp_debug = settings.DEBUG settings.DEBUG = False print_status = print_status or ( lambda message, count=0, total=0: sys.stdout.write(message + '\n')) try: stripe_num, stripe_mod = parse_stripe(stripe) if force: q = Post.objects.all_ngramable() else: q = Post.objects.all_ngramless() q = q.only('id') if post_ids: q = q.filter(id__in=post_ids) if stripe is not None: # Note, we need to escape modulo operator twice since QuerySet does # additional parameter interpolation. q = q.extra( where=['((id %%%% %i) = %i)' % (stripe_mod, stripe_num)]) total = q.count() i = 0 print_status('%i total records.' % (total, )) for post in q.iterator(): i += 1 message = '%i of %i %.2f%%' % (i, total, i / float(total) * 100) print_status(message=message, total=total, count=i) post.extract_ngrams(force=force) reset_queries() gc.collect() finally: settings.DEBUG = tmp_debug @property def ngramable_text(self): return (self.content or '') + ' ' + (self.article_content or '') @property def ngramable_tokens(self): content = self.ngramable_text text = content.strip().lower() text = re.sub(r'[^a-zA-Z0-9]+', ' ', text, flags=re.DOTALL) text = re.sub(r'[\s\t\n\r]+', ' ', text, flags=re.DOTALL) text = text.strip().split(' ') return text def get_ngrams(self, min_n=1, max_n=3, min_text_length=4): """ Returns a dictionary of the form {ngram:occurrence_count}. """ text = self.ngramable_tokens ngrams = [] for n in range(min_n, max_n + 1): ngrams.extend(ngrams_iter(sequence=text, n=n)) ngram_counts = {} for ngram in ngrams: ngram = (' '.join(ngram)).strip() if len(ngram) < min_text_length: continue ngram_counts.setdefault(ngram, 0) ngram_counts[ngram] += 1 return ngram_counts @commit_on_success def extract_ngrams(self, force=False): if not force and (self.article_ngrams_extracted or not self.article_content_length): return ngram_counts = self.get_ngrams() self.article_ngram_counts = ngram_counts self.article_ngrams_extracted = True self.article_ngrams_extracted_datetime = datetime.now() self.save() @classmethod @commit_on_success def clear_ngrams(cls, post_ids=None, force=False): q = cls.objects.filter(article_ngrams_extracted=True).only('id') if post_ids: q = q.filter(id__in=post_ids) total = q.count() i = 0 for self in q.iterator(): i += 1 if i == 1 or not i % 100 or i == total: sys.stdout.write('\rClearing ngrams %i of %i %.02f%%.' % (i, total, float(i) / total * 100)) sys.stdout.flush() commit() self.article_ngram_counts = None self.article_ngrams_extracted = False self.article_ngrams_extracted_datetime = None self.save()
class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Collection', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(auto_now_add=True, db_index=True)), ('updated', models.DateTimeField(auto_now=True, db_index=True)), ('uuid', models.UUIDField(default=uuid.uuid4, editable=False, db_index=True)), ('name', models.CharField(max_length=250, db_index=True)), ('slug', models.SlugField(editable=False, blank=True)), ('visibility', models.PositiveIntegerField(default=0, choices=[(0, 'private'), (1, 'public')])), ('description', models.TextField(null=True, blank=True)), ], options={ 'ordering': ('name', ), 'verbose_name': 'Collection', 'verbose_name_plural': 'Collections', }, ), migrations.CreateModel( name='CollectionItem', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('uuid', models.UUIDField(default=uuid.uuid4, editable=False, db_index=True)), ('object_id', models.PositiveIntegerField()), ('content_type', models.ForeignKey(to='contenttypes.ContentType')), ], options={ 'verbose_name': 'Collection Item', 'verbose_name_plural': 'Collection Items', }, ), migrations.CreateModel( name='CollectionMaintainer', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('collection', models.ForeignKey(to='collection.Collection')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='CollectionMember', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(auto_now_add=True, db_index=True)), ('updated', models.DateTimeField(auto_now=True, db_index=True)), ('added_by', models.ForeignKey( on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)), ('collection', models.ForeignKey(related_name='members', to='collection.Collection')), ('item', models.ForeignKey(to='collection.CollectionItem')), ], ), migrations.AddField( model_name='collection', name='items', field=models.ManyToManyField(to='collection.CollectionItem', through='collection.CollectionMember', blank=True), ), migrations.AddField( model_name='collection', name='maintainers', field=models.ManyToManyField( to=settings.AUTH_USER_MODEL, through='collection.CollectionMaintainer', blank=True), ), migrations.AddField( model_name='collection', name='owner', field=models.ForeignKey(related_name='owned_collections', to=settings.AUTH_USER_MODEL, null=True), ), migrations.AlterUniqueTogether( name='collectionmember', unique_together=set([('collection', 'item')]), ), ]
class Profile(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) slug = sluggable_fields.SluggableField( decider=ProfileSlug, populate_from="name", slugify=slugify_user, unique=True ) is_public = models.BooleanField(default=True) name = models.CharField(max_length=200, validators=[validate_sluggable_name]) image = thumbnail.ImageField( upload_to=upload_path.auto_cleaned_path_stripped_uuid4, blank=True ) city_or_town = models.CharField(max_length=100, blank=True) country = models.CharField(max_length=100, blank=True) linkedin_url = models.URLField(max_length=400, blank=True) facebook_url = models.URLField(max_length=400, blank=True) personal_website_url = models.URLField(max_length=400, blank=True) lat = models.FloatField(null=True, blank=True, default=None) lon = models.FloatField(null=True, blank=True, default=None) cause_areas = postgres_fields.ArrayField( enum.EnumField(CauseArea), blank=True, default=list ) cause_areas_other = models.TextField( blank=True, validators=[MaxLengthValidator(2000)] ) available_to_volunteer = models.BooleanField(null=True, blank=True, default=None) open_to_job_offers = models.BooleanField(null=True, blank=True, default=None) expertise_areas = postgres_fields.ArrayField( enum.EnumField(ExpertiseArea), blank=True, default=list ) expertise_areas_other = models.TextField( blank=True, validators=[MaxLengthValidator(2000)] ) career_interest_areas = postgres_fields.ArrayField( enum.EnumField(ExpertiseArea), blank=True, default=list ) available_as_speaker = models.BooleanField(null=True, blank=True, default=None) topics_i_speak_about = models.TextField( blank=True, validators=[MaxLengthValidator(2000)] ) organisational_affiliations = postgres_fields.ArrayField( enum.EnumField(OrganisationalAffiliation), blank=True, default=list ) summary = models.TextField(blank=True, validators=[MaxLengthValidator(2000)]) giving_pledges = postgres_fields.ArrayField( enum.EnumField(GivingPledge), blank=True, default=list ) local_groups = models.ManyToManyField(LocalGroup, through="Membership", blank=True) legacy_record = models.PositiveIntegerField( null=True, default=None, editable=False, unique=True ) slugs = contenttypes_fields.GenericRelation(ProfileSlug) objects = ProfileManager() class Meta: ordering = ["name", "slug"] def __str__(self): return self.name def get_absolute_url(self): return urls.reverse("profile", args=[self.slug]) def geocode(self): self.lat = None self.lon = None if self.city_or_town and self.country: location = geocoders.Nominatim(timeout=10).geocode( f"{self.city_or_town}, {self.country}" ) if location: self.lat = location.latitude self.lon = location.longitude return self def get_pretty_cause_areas(self): return prettify_property_list( CauseArea, self.cause_areas, self.cause_areas_other ) def get_pretty_expertise(self): return prettify_property_list( ExpertiseArea, self.expertise_areas, self.expertise_areas_other ) def get_pretty_career_interest_areas(self): return prettify_property_list(ExpertiseArea, self.career_interest_areas) def get_pretty_giving_pledges(self): if self.giving_pledges: return ", ".join(map(GivingPledge.label, self.giving_pledges)) else: return "N/A" def get_pretty_organisational_affiliations(self): if self.organisational_affiliations: return ", ".join( map(OrganisationalAffiliation.label, self.organisational_affiliations) ) else: return "N/A" def get_pretty_local_groups(self): if self.local_groups: return ", ".join( [ "{local_group}".format(local_group=x.name) for x in self.local_groups.all() ] ) else: return "N/A" def write_data_export_zip(self, request, response): with zipfile.ZipFile(response, mode="w") as zip_file: with zip_file.open( f"{self.slug}.json", mode="w" ) as json_binary_file, io.TextIOWrapper(json_binary_file) as json_file: json.dump( { "email": self.user.email, "date_joined": self.user.date_joined.isoformat(), "last_login": self.user.last_login.isoformat(), "url": request.build_absolute_uri(self.get_absolute_url()), "is_public": self.is_public, "name": self.name, "city_or_town": self.city_or_town, "country": self.country, "cause_areas": list(map(CauseArea.label, self.cause_areas)), "cause_areas_other": self.cause_areas_other, "available_to_volunteer": self.available_to_volunteer, "open_to_job_offers": self.open_to_job_offers, "expertise_areas": list( map(ExpertiseArea.label, self.expertise_areas) ), "expertise_areas_other": self.expertise_areas_other, "career_interest_areas": list( map(ExpertiseArea.label, self.career_interest_areas) ), "available_as_speaker": self.available_as_speaker, "topics_i_speak_about": self.topics_i_speak_about, "organisational_affiliations": list( map( OrganisationalAffiliation.label, self.organisational_affiliations, ) ), "summary": self.summary, "giving_pledges": list( map(GivingPledge.label, self.giving_pledges) ), "member_of_local_groups": [ request.build_absolute_uri(local_group.get_absolute_uri()) for local_group in self.local_groups.all() ], "organiser_of_local_groups": [ request.build_absolute_uri(local_group.get_absolute_uri()) for local_group in self.user.localgroup_set.all() ], "aliases": [ request.build_absolute_uri( urls.reverse("profile", kwargs={"slug": slug.slug}) ) for slug in self.slugs.filter(redirect=True) ], "legacy_hub_url": ( self.legacy_record and request.build_absolute_uri( urls.reverse( "profile_legacy", kwargs={"legacy_record": self.legacy_record}, ) ) ), }, json_file, indent=2, ) if self.image: with self.image.open() as image_src_file, zip_file.open( self.slug + pathlib.PurePath(self.image.name).suffix, mode="w" ) as image_dst_file: shutil.copyfileobj(image_src_file, image_dst_file) def image_placeholder(self): return f"Avatar{self.id % 10}.png" def has_cause_area_details(self): cause_area_details_exist = [ len(self.cause_areas) > 0, len(self.cause_areas_other) > 0, len(self.giving_pledges) > 0, self.available_to_volunteer, ] return any(cause_area_details_exist) def has_career_details(self): career_details_exist = [ len(self.expertise_areas), len(self.expertise_areas_other), self.open_to_job_offers, ] return any(career_details_exist) def has_community_details(self): community_details_exist = [ len(self.organisational_affiliations) > 0, self.local_groups.exists(), self.user.localgroup_set.exists(), self.available_as_speaker, len(self.topics_i_speak_about) > 0, ] return any(community_details_exist)
class JobHostSummary(CreatedModifiedModel): """ Per-host statistics for each job. """ class Meta: app_label = 'main' unique_together = [('job', 'host_name')] verbose_name_plural = _('job host summaries') ordering = ('-pk', ) job = models.ForeignKey( 'Job', related_name='job_host_summaries', on_delete=models.CASCADE, editable=False, ) host = models.ForeignKey('Host', related_name='job_host_summaries', null=True, default=None, on_delete=models.SET_NULL, editable=False) host_name = models.CharField( max_length=1024, default='', editable=False, ) changed = models.PositiveIntegerField(default=0, editable=False) dark = models.PositiveIntegerField(default=0, editable=False) failures = models.PositiveIntegerField(default=0, editable=False) ignored = models.PositiveIntegerField(default=0, editable=False) ok = models.PositiveIntegerField(default=0, editable=False) processed = models.PositiveIntegerField(default=0, editable=False) rescued = models.PositiveIntegerField(default=0, editable=False) skipped = models.PositiveIntegerField(default=0, editable=False) failed = models.BooleanField(default=False, editable=False, db_index=True) def __str__(self): host = getattr_dne(self, 'host') hostname = host.name if host else 'N/A' return '%s changed=%d dark=%d failures=%d ignored=%d ok=%d processed=%d rescued=%d skipped=%s' % ( hostname, self.changed, self.dark, self.failures, self.ignored, self.ok, self.processed, self.rescued, self.skipped, ) def get_absolute_url(self, request=None): return reverse('api:job_host_summary_detail', kwargs={'pk': self.pk}, request=request) def save(self, *args, **kwargs): # If update_fields has been specified, add our field names to it, # if it hasn't been specified, then we're just doing a normal save. if self.host is not None: self.host_name = self.host.name update_fields = kwargs.get('update_fields', []) self.failed = bool(self.dark or self.failures) update_fields.append('failed') super(JobHostSummary, self).save(*args, **kwargs)
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin, CustomVirtualEnvMixin, WebhookMixin): """ A job applies a project (with playbook) to an inventory source with a given credential. It represents a single invocation of ansible-playbook with the given parameters. """ class Meta: app_label = 'main' ordering = ('id', ) job_template = models.ForeignKey( 'JobTemplate', related_name='jobs', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) hosts = models.ManyToManyField( 'Host', related_name='jobs', editable=False, through='JobHostSummary', ) artifacts = JSONBlob( default=dict, blank=True, editable=False, ) scm_revision = models.CharField( max_length=1024, blank=True, default='', editable=False, verbose_name=_('SCM Revision'), help_text=_( 'The SCM Revision from the Project used for this job, if available' ), ) project_update = models.ForeignKey( 'ProjectUpdate', blank=True, null=True, default=None, on_delete=models.SET_NULL, help_text= _('The SCM Refresh task used to make sure the playbooks were available for the job run' ), ) job_slice_number = models.PositiveIntegerField( blank=True, default=0, help_text=_( "If part of a sliced job, the ID of the inventory slice operated on. " "If not part of sliced job, parameter is not used."), ) job_slice_count = models.PositiveIntegerField( blank=True, default=1, help_text=_( "If ran as part of sliced jobs, the total number of slices. " "If 1, job is not part of a sliced job."), ) def _get_parent_field_name(self): return 'job_template' @classmethod def _get_task_class(cls): from awx.main.tasks.jobs import RunJob return RunJob def _global_timeout_setting(self): return 'DEFAULT_JOB_TIMEOUT' @classmethod def _get_unified_job_template_class(cls): return JobTemplate def get_absolute_url(self, request=None): return reverse('api:job_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): return urljoin(settings.TOWER_URL_BASE, "/#/jobs/playbook/{}".format(self.pk)) @property def event_class(self): if self.has_unpartitioned_events: return UnpartitionedJobEvent return JobEvent def copy_unified_job(self, **new_prompts): # Needed for job slice relaunch consistency, do no re-spawn workflow job # target same slice as original job new_prompts['_prevent_slicing'] = True new_prompts.setdefault('_eager_fields', {}) new_prompts['_eager_fields'][ 'job_slice_number'] = self.job_slice_number new_prompts['_eager_fields']['job_slice_count'] = self.job_slice_count return super(Job, self).copy_unified_job(**new_prompts) def get_passwords_needed_to_start(self): return self.passwords_needed_to_start def _get_hosts(self, **kwargs): Host = JobHostSummary._meta.get_field('host').related_model kwargs['job_host_summaries__job__pk'] = self.pk return Host.objects.filter(**kwargs) def retry_qs(self, status): """ Returns Host queryset that will be used to produce the `limit` field in a retry on a subset of hosts """ kwargs = {} if status == 'all': pass elif status == 'failed': # Special case for parity with Ansible .retry files kwargs['job_host_summaries__failed'] = True elif status in ['ok', 'changed', 'unreachable']: if status == 'unreachable': status_field = 'dark' else: status_field = status kwargs['job_host_summaries__{}__gt'.format(status_field)] = 0 else: raise ParseError( _('{status_value} is not a valid status option.').format( status_value=status)) return self._get_hosts(**kwargs) @property def task_impact(self): if self.launch_type == 'callback': count_hosts = 2 else: # If for some reason we can't count the hosts then lets assume the impact as forks if self.inventory is not None: count_hosts = self.inventory.total_hosts if self.job_slice_count > 1: # Integer division intentional count_hosts = ( count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count else: count_hosts = 5 if self.forks == 0 else self.forks return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1 @property def successful_hosts(self): return self._get_hosts(job_host_summaries__ok__gt=0) @property def failed_hosts(self): return self._get_hosts(job_host_summaries__failures__gt=0) @property def changed_hosts(self): return self._get_hosts(job_host_summaries__changed__gt=0) @property def dark_hosts(self): return self._get_hosts(job_host_summaries__dark__gt=0) @property def unreachable_hosts(self): return self.dark_hosts @property def skipped_hosts(self): return self._get_hosts(job_host_summaries__skipped__gt=0) @property def processed_hosts(self): return self._get_hosts(job_host_summaries__processed__gt=0) @property def ignored_hosts(self): return self._get_hosts(job_host_summaries__ignored__gt=0) @property def rescued_hosts(self): return self._get_hosts(job_host_summaries__rescued__gt=0) def notification_data(self, block=5): data = super(Job, self).notification_data() all_hosts = {} # NOTE: Probably related to job event slowness, remove at some point -matburt if block and self.status != 'running': summaries = self.job_host_summaries.all() while block > 0 and not len(summaries): time.sleep(1) block -= 1 else: summaries = self.job_host_summaries.all() for h in self.job_host_summaries.all(): all_hosts[h.host_name] = dict( failed=h.failed, changed=h.changed, dark=h.dark, failures=h.failures, ok=h.ok, processed=h.processed, skipped=h.skipped, rescued=h.rescued, ignored=h.ignored, ) data.update( dict( inventory=self.inventory.name if self.inventory else None, project=self.project.name if self.project else None, playbook=self.playbook, credential=getattr(self.machine_credential, 'name', None), limit=self.limit, extra_vars=self.display_extra_vars(), hosts=all_hosts, )) return data def _resources_sufficient_for_launch(self): return not (self.inventory_id is None or self.project_id is None) def display_artifacts(self): """ Hides artifacts if they are marked as no_log type artifacts. """ artifacts = self.artifacts if artifacts.get('_ansible_no_log', False): return "$hidden due to Ansible no_log flag$" return artifacts @property def is_container_group_task(self): return bool(self.instance_group and self.instance_group.is_container_group) @property def preferred_instance_groups(self): if self.organization is not None: organization_groups = [ x for x in self.organization.instance_groups.all() ] else: organization_groups = [] if self.inventory is not None: inventory_groups = [ x for x in self.inventory.instance_groups.all() ] else: inventory_groups = [] if self.job_template is not None: template_groups = [ x for x in self.job_template.instance_groups.all() ] else: template_groups = [] selected_groups = template_groups + inventory_groups + organization_groups if not selected_groups: return self.global_instance_groups return selected_groups def awx_meta_vars(self): r = super(Job, self).awx_meta_vars() if self.project: for name in JOB_VARIABLE_PREFIXES: r['{}_project_revision'.format( name)] = self.project.scm_revision r['{}_project_scm_branch'.format( name)] = self.project.scm_branch if self.scm_branch: for name in JOB_VARIABLE_PREFIXES: r['{}_job_scm_branch'.format(name)] = self.scm_branch if self.job_template: for name in JOB_VARIABLE_PREFIXES: r['{}_job_template_id'.format(name)] = self.job_template.pk r['{}_job_template_name'.format(name)] = self.job_template.name return r ''' JobNotificationMixin ''' def get_notification_templates(self): if not self.job_template: return NotificationTemplate.objects.none() return self.job_template.notification_templates def get_notification_friendly_name(self): return "Job" def _get_inventory_hosts(self, only=[ 'name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id' ]): if not self.inventory: return [] return self.inventory.hosts.only(*only) def start_job_fact_cache(self, destination, modification_times, timeout=None): self.log_lifecycle("start_job_fact_cache") os.makedirs(destination, mode=0o700) hosts = self._get_inventory_hosts() if timeout is None: timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT if timeout > 0: # exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds` timeout = now() - datetime.timedelta(seconds=timeout) hosts = hosts.filter(ansible_facts_modified__gte=timeout) for host in hosts: filepath = os.sep.join(map(str, [destination, host.name])) if not os.path.realpath(filepath).startswith(destination): system_tracking_logger.error( 'facts for host {} could not be cached'.format( smart_str(host.name))) continue try: with codecs.open(filepath, 'w', encoding='utf-8') as f: os.chmod(f.name, 0o600) json.dump(host.ansible_facts, f) except IOError: system_tracking_logger.error( 'facts for host {} could not be cached'.format( smart_str(host.name))) continue # make note of the time we wrote the file so we can check if it changed later modification_times[filepath] = os.path.getmtime(filepath) def finish_job_fact_cache(self, destination, modification_times): self.log_lifecycle("finish_job_fact_cache") for host in self._get_inventory_hosts(): filepath = os.sep.join(map(str, [destination, host.name])) if not os.path.realpath(filepath).startswith(destination): system_tracking_logger.error( 'facts for host {} could not be cached'.format( smart_str(host.name))) continue if os.path.exists(filepath): # If the file changed since we wrote it pre-playbook run... modified = os.path.getmtime(filepath) if modified > modification_times.get(filepath, 0): with codecs.open(filepath, 'r', encoding='utf-8') as f: try: ansible_facts = json.load(f) except ValueError: continue host.ansible_facts = ansible_facts host.ansible_facts_modified = now() host.save() system_tracking_logger.info( 'New fact for inventory {} host {}'.format( smart_str(host.inventory.name), smart_str(host.name)), extra=dict( inventory_id=host.inventory.id, host_name=host.name, ansible_facts=host.ansible_facts, ansible_facts_modified=host. ansible_facts_modified.isoformat(), job_id=self.id, ), ) else: # if the file goes missing, ansible removed it (likely via clear_facts) host.ansible_facts = {} host.ansible_facts_modified = now() system_tracking_logger.info( 'Facts cleared for inventory {} host {}'.format( smart_str(host.inventory.name), smart_str(host.name))) host.save()
class Application(models.Model): form = models.ForeignKey(Form, null=False, blank=False) number = models.PositiveIntegerField(default=1, blank=True) created = models.DateTimeField(auto_now_add=True) state = models.CharField( max_length=50, choices=APPLICATION_STATES, verbose_name="State of the application", null=True, default='submitted' ) email = models.EmailField(null=True, blank=True) newsletter_optin = models.BooleanField(default=False) rsvp_status = models.CharField( max_length=50, choices=RSVP_STATUSES, verbose_name="RSVP status", default='waiting' ) rsvp_yes_code = models.CharField(max_length=24, null=True) rsvp_no_code = models.CharField(max_length=24, null=True) def save(self, *args, **kwargs): if self.pk is None: self.number = Application.objects.filter(form=self.form).count() + 1 super(Application, self).save(*args, **kwargs) @property def average_score(self): """ Return the average score for this Application. """ scores = [s.score for s in self.scores.all() if (s.score and s.score > 0)] if not scores: return 0 else: return sum(scores) / float(len(scores)) def variance(self): data = [s.score for s in self.scores.all() if s.score] n = len(data) if n == 0: return 0 c = sum(data) / float(len(data)) if n < 2: return 0 ss = sum((x-c)**2 for x in data) ss -= sum((x-c) for x in data)**2/len(data) assert not ss < 0, 'negative sum of square deviations: %f' % ss return ss / (n-1) def stdev(self): return self.variance() ** 0.5 def generate_code(self): return ''.join([random.choice(string.ascii_letters + string.digits) for i in range(24)]) def get_rsvp_yes_code(self): if not self.rsvp_yes_code: self.rsvp_yes_code = self.generate_code() self.save() return self.rsvp_yes_code def get_rsvp_no_code(self): if not self.rsvp_no_code: self.rsvp_no_code = self.generate_code() self.save() return self.rsvp_no_code @classmethod def get_by_rsvp_code(self, code, page): """ Returns application and RSVP status or None """ try: application = self.objects.get(rsvp_yes_code=code, form__page=page) return application, 'yes' except self.DoesNotExist: try: application = self.objects.get(rsvp_no_code=code, form__page=page) return application, 'no' except self.DoesNotExist: return None, None return None, None @property def is_accepted(self): return self.state == 'accepted' def is_scored_by_user(self, user): """ Returns true if the given user has scored this application or false if they have not, or there is a zero score. """ return self.scores.filter(user=user, score__gt=0).exists() def __str__(self): return str(self.pk)
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Item', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('condicion', models.BooleanField(default=True)), ('title', models.CharField(max_length=140)), ('due_date', models.DateField(blank=True, null=True)), ('completed', models.BooleanField(default=False)), ('completed_date', models.DateField(blank=True, null=True)), ('note', models.TextField()), ('priority', models.PositiveIntegerField( choices=[(1, 'high'), (2, 'medium'), (3, 'normal'), (4, 'low')])), ('active', models.BooleanField(default=True)), ('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False)), ('assigned_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='todo_assigned_to', to=settings.AUTH_USER_MODEL)), ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='created_by+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['priority'], }, ), migrations.CreateModel( name='List', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('condicion', models.BooleanField(default=True)), ('name', models.CharField(max_length=60)), ('slug', models.SlugField(max_length=100)), ('priority', models.PositiveIntegerField( choices=[(1, 'high'), (2, 'medium'), (3, 'normal'), (4, 'low')])), ('active', models.BooleanField(default=True)), ('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False)), ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='created_by+', to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name_plural': 'Lists', 'ordering': ['name'], }, ), migrations.AddField( model_name='item', name='list', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to='todo.List'), ), migrations.AlterUniqueTogether( name='list', unique_together={('author', 'slug')}, ), ]
class Migration(migrations.Migration): initial = True dependencies = [ ('sessions', '0001_initial'), ] operations = [ migrations.CreateModel( name='Cart', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], options={ 'verbose_name': 'cart', 'verbose_name_plural': 'carts', }, ), migrations.CreateModel( name='Category', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=50, null=True, verbose_name='name')), ('slug', models.SlugField(editable=False, unique=True)), ('order', models.PositiveSmallIntegerField(blank=True, default=0, null=True, verbose_name='order')), ], options={ 'verbose_name': 'category', 'verbose_name_plural': 'categories', 'ordering': ('order',), }, ), migrations.CreateModel( name='Item', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100, verbose_name='name')), ('slug', models.SlugField(editable=False, unique=True)), ('description', models.CharField(max_length=250, verbose_name='description')), ('price', models.PositiveIntegerField()), ('image', models.ImageField(upload_to='shop/', verbose_name='image')), ('available', models.BooleanField(default=True, verbose_name='available')), ('promoted', models.BooleanField(default=False, verbose_name='promoted')), ('added', models.DateTimeField(auto_now_add=True, verbose_name='date')), ('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='shop.Category', verbose_name='category')), ], options={ 'verbose_name': 'item', 'verbose_name_plural': 'items', 'ordering': ('-added',), }, ), migrations.CreateModel( name='Tag', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=20, verbose_name='name')), ], options={ 'verbose_name': 'tag', 'verbose_name_plural': 'tags', }, ), migrations.CreateModel( name='Order', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100, verbose_name='Full name')), ('email', models.EmailField(max_length=254, verbose_name='Email')), ('gsm_number', models.CharField(blank=True, max_length=20, null=True, verbose_name='Phone number')), ('identity_number', models.CharField(max_length=11, verbose_name='Identity number')), ('address', models.CharField(max_length=200, verbose_name='Address')), ('city', models.CharField(max_length=50, verbose_name='City')), ('country', models.CharField(max_length=50, verbose_name='Country')), ('zipcode', models.CharField(max_length=6, verbose_name='Zip code')), ('notes', models.TextField(blank=True, null=True, verbose_name='notes')), ('time', models.DateTimeField(auto_now_add=True)), ('cart', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='shop.Cart', verbose_name='cart')), ], ), migrations.CreateModel( name='ItemAlternative', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('description', models.CharField(max_length=250, verbose_name='description')), ('price', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True, verbose_name='price')), ('available', models.BooleanField(default=True, verbose_name='available')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.Item')), ], options={ 'verbose_name': 'alternative', 'verbose_name_plural': 'alternatives', }, ), migrations.AddField( model_name='item', name='tags', field=models.ManyToManyField(blank=True, to='shop.Tag', verbose_name='tags'), ), migrations.CreateModel( name='CartItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('added', models.DateTimeField(auto_now_add=True, verbose_name='date')), ('paid', models.BooleanField(default=False, verbose_name='paid')), ('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.Cart', verbose_name='cart')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.Item', verbose_name='item')), ], ), migrations.AddField( model_name='cart', name='items', field=models.ManyToManyField(through='shop.CartItem', to='shop.Item'), ), migrations.AddField( model_name='cart', name='session', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sessions.Session', verbose_name='session'), ), ]
class Migration(migrations.Migration): initial = True dependencies = [ ('shop', '0001_initial'), ] operations = [ migrations.CreateModel( name='customer', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('first_name', models.CharField(max_length=50)), ('last_name', models.CharField(max_length=50)), ('email', models.EmailField(max_length=254)), ('address', models.CharField(max_length=250)), ('address2', models.CharField(blank=True, max_length=250, null=True)), ('address3', models.CharField(blank=True, max_length=250, null=True)), ('postal_code', models.CharField(max_length=20)), ('city', models.CharField(max_length=100)), ('country', models.CharField(choices=[('option 1', 'UK'), ('option 2', 'USA')], default='UK', max_length=10)), ], options={ 'ordering': ('-created', ), }, ), migrations.CreateModel( name='ItemOption', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('price', models.DecimalField(decimal_places=2, max_digits=10)), ('quantity', models.PositiveIntegerField(default=1)), ('name', models.CharField(db_index=True, max_length=200)), ], ), migrations.CreateModel( name='Order', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('agree_terms', models.BooleanField(default=False)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('paid', models.BooleanField(default=False)), ], options={ 'ordering': ('-created', ), }, ), migrations.CreateModel( name='OrderItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('price', models.DecimalField(decimal_places=2, max_digits=10)), ('total_price', models.DecimalField(decimal_places=2, max_digits=10, null=True)), ('quantity', models.PositiveIntegerField(default=1)), ('name', models.CharField(db_index=True, max_length=200)), ('sender_first_name', models.CharField(max_length=50)), ('sender_last_name', models.CharField(max_length=50)), ('sender_email', models.EmailField(max_length=254)), ('child_first_name', models.CharField(max_length=50)), ('child_last_name', models.CharField(max_length=50)), ('child_address', models.CharField(max_length=250)), ('child_address2', models.CharField(blank=True, max_length=250, null=True)), ('child_address3', models.CharField(blank=True, max_length=250, null=True)), ('child_postal_code', models.CharField(max_length=20)), ('child_city', models.CharField(max_length=100)), ('child_country', models.CharField(choices=[('UK', 'UK'), ('USA', 'USA')], default='UK', max_length=10)), ('child_boy_girl', models.CharField(choices=[('M', 'Boy'), ('F', 'Girl')], default='Boy', max_length=10)), ('child_age', models.IntegerField(blank=True, null=True)), ('child_birth_year', models.IntegerField(default=2013)), ('child_birth_month', models.CharField(choices=[('1', 'January'), ('2', 'February'), ('3', 'March'), ('4', 'April'), ('5', 'May'), ('6', 'June'), ('7', 'July'), ('8', 'August'), ('9', 'September'), ('10', 'October'), ('11', 'November'), ('12', 'December')], default=1, max_length=10)), ('child_birth_day', models.CharField(choices=[('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'), ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'), ('29', '29'), ('30', '30'), ('31', '31')], default=1, max_length=10)), ('child_age_time', models.CharField(choices=[ ('Years', 'Years'), ('Year', 'Year'), ('Month', 'Months'), ('Month', 'Month'), ('Days', 'Days') ], default='Years', max_length=10)), ('child_relative_name', models.CharField(max_length=50)), ('child_friend_name', models.CharField(max_length=50)), ('child_present', models.CharField(max_length=200)), ('child_achievement', models.CharField(max_length=200)), ('letter_design', models.CharField(choices=[('1', 'letter1'), ('2', 'letter2'), ('3', 'letter3'), ('4', 'letter4'), ('5', 'letter5')], default='1', max_length=10)), ('pdf_download', models.BooleanField(default=False)), ('agree_terms', models.BooleanField(default=False)), ('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='orders.Order')), ('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_items', to='shop.Product')), ], ), migrations.AddField( model_name='itemoption', name='orderitem', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='order_option_id', to='orders.OrderItem'), ), migrations.AddField( model_name='itemoption', name='product', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='order_option', to='shop.Product'), ), migrations.AddField( model_name='customer', name='order', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='customer_order', to='orders.Order'), ), ]
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BeerModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='', max_length=254)), ('style', models.CharField(default='', max_length=254)), ('ibu', models.IntegerField(default='')), ('calories', models.IntegerField(default='')), ('abv', models.IntegerField(default='')), ('location', models.CharField(default='', max_length=254)), ('created', models.DateTimeField(auto_now_add=True)), ('user', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name_plural': 'Beers', }, ), migrations.CreateModel( name='RateModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('aroma', models.PositiveIntegerField(choices=[(5, 5), (4, 4), (3, 3), (2, 2), (1, 1)], default='5')), ('appearance', models.PositiveIntegerField(choices=[(5, 5), (4, 4), (3, 3), (2, 2), (1, 1)], default='5')), ('taste', models.PositiveIntegerField(choices=[(10, 10), (9, 9), (8, 8), (7, 7), (6, 6), (5, 5), (4, 4), (3, 3), (2, 2), (1, 1)], default='10')), ('beer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='beer_tracker.BeerModel')), ('user', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name_plural': 'Ratings', }, ), ]
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('contenttypes', '0002_remove_content_type_name'), ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200)), ('slug', models.SlugField(max_length=200, unique=True)), ('overview', models.TextField(blank=True, null=True)), ('created', models.DateTimeField(auto_now_add=True)), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='courses_created', to=settings.AUTH_USER_MODEL)), ('students', models.ManyToManyField(blank=True, related_name='courses_joined', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ('-created',), }, ), migrations.CreateModel( name='Subject', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200)), ('slug', models.SlugField(max_length=200, unique=True)), ], options={ 'ordering': ('title',), }, ), migrations.CreateModel( name='Video', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=250)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('url', models.URLField()), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='video_related', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Text', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=250)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('content', models.TextField()), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='text_related', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Module', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200)), ('description', models.TextField(blank=True)), ('order', courses.fields.OrderField(blank=True)), ('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='modules', to='courses.Course')), ], options={ 'ordering': ['order'], }, ), migrations.CreateModel( name='Image', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=250)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('file', models.FileField(upload_to='images')), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='image_related', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='File', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=250)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('file', models.FileField(upload_to='files')), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='file_related', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='course', name='subject', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='courses', to='courses.Subject'), ), migrations.CreateModel( name='Content', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('object_id', models.PositiveIntegerField()), ('order', courses.fields.OrderField(blank=True)), ('content_type', models.ForeignKey(limit_choices_to={'model__in': ('text', 'video', 'image', 'file')}, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ('module', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contents', to='courses.Module')), ], options={ 'ordering': ['order'], }, ), ]
class Delivery(models.Model): """ Model for a specific Delivery (past, current or future) """ date = models.DateField() week_number = models.PositiveIntegerField() is_populated = models.BooleanField(default=False) week_0 = models.BooleanField(default=False) week_1 = models.BooleanField(default=True) def save(self, *args, **kwargs): self.week_0 = self.week_number()%2==0 self.week_1 = self.week_number()%2==1 super(Delivery, self).save(*args, **kwargs) def __str__ (self): return "%s #%s" % (self.date, self.week_number()) def week_number(self): """ return the week number. This is a custom numbering where 2018-01-01 is week 0. ISO week numbering is not used to make sure customer keeps getting every second week at the turn of the year. """ # antal dagar sedan 1 januari 2018 # Första veckan 2018 är vecka 0, per defintion days = (self.date - date(year=2018, month=1, day=1)).days return floor(days / 7) def is_week_1(self): """ return true if week_number is in group 1 """ return self.week_number() % 2 == 1 def is_week_0(self): """ return true if week_number is in group 0 """ return self.week_number() % 2 == 0 def forecast(self): """ return a queryset representing the customers who will recive this delivery baesd on frequency, cancelations etc. Can not forecast already populated delivery """ if self.is_populated: raise AttributeError( 'Can not forecast an already populated delivery') if self.is_week_0(): query = Customer.objects.filter(week_0=True) else: query = Customer.objects.filter(week_1=True) query = query.exclude(cancels__pk=self.pk) return query def populate(self): ''' "låser" kunder som ingår i denna leverans ''' if self.date < date.today(): raise AttributeError( 'Can not populate delivery in the past') if self.is_populated: raise AttributeError( 'Can not populate an already populated delivery') for customer in self.forecast(): delivery_reciver = DeliveryReciver(delivery=self, customer=customer) delivery_reciver.save() self.is_populated = True self.save()
class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, NotificationFieldsModel): ''' Concrete base class for unified job templates. ''' # status inherits from related jobs. Thus, status must be able to be set to any status that a job status is settable to. JOB_STATUS_CHOICES = [ ('new', _('New')), # Job has been created, but not started. ('pending', _('Pending')), # Job has been queued, but is not yet running. ('waiting', _('Waiting')), # Job is waiting on an update/dependency. ('running', _('Running')), # Job is currently running. ('successful', _('Successful')), # Job completed successfully. ('failed', _('Failed')), # Job completed, but with failures. ('error', _('Error')), # The job was unable to run. ('canceled', _('Canceled')), # The job was canceled before completion. ] COMMON_STATUS_CHOICES = JOB_STATUS_CHOICES + [ ('never updated', _('Never Updated')), # A job has never been run using this template. ] PROJECT_STATUS_CHOICES = COMMON_STATUS_CHOICES + [ ('ok', _('OK')), # Project is not configured for SCM and path exists. ('missing', _('Missing')), # Project path does not exist. ] INVENTORY_SOURCE_STATUS_CHOICES = COMMON_STATUS_CHOICES + [ ( 'none', _('No External Source') ), # Inventory source is not configured to update from an external source. ] JOB_TEMPLATE_STATUS_CHOICES = COMMON_STATUS_CHOICES DEPRECATED_STATUS_CHOICES = [ # No longer used for Project / Inventory Source: ('updating', _('Updating')), # Same as running. ] ALL_STATUS_CHOICES = OrderedDict(PROJECT_STATUS_CHOICES + INVENTORY_SOURCE_STATUS_CHOICES + JOB_TEMPLATE_STATUS_CHOICES + DEPRECATED_STATUS_CHOICES).items() # NOTE: Working around a django-polymorphic issue: https://github.com/django-polymorphic/django-polymorphic/issues/229 base_manager_name = 'base_objects' class Meta: app_label = 'main' # unique_together here is intentionally commented out. Please make sure sub-classes of this model # contain at least this uniqueness restriction: SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')] #unique_together = [('polymorphic_ctype', 'name')] old_pk = models.PositiveIntegerField( null=True, default=None, editable=False, ) current_job = models.ForeignKey( 'UnifiedJob', null=True, default=None, editable=False, related_name='%(class)s_as_current_job+', on_delete=models.SET_NULL, ) last_job = models.ForeignKey( 'UnifiedJob', null=True, default=None, editable=False, related_name='%(class)s_as_last_job+', on_delete=models.SET_NULL, ) last_job_failed = models.BooleanField( default=False, editable=False, ) last_job_run = models.DateTimeField( null=True, default=None, editable=False, ) #on_missed_schedule = models.CharField( # max_length=32, # choices=[], #) next_job_run = models.DateTimeField( null=True, default=None, editable=False, ) next_schedule = models.ForeignKey( # Schedule entry responsible for next_job_run. 'Schedule', null=True, default=None, editable=False, related_name='%(class)s_as_next_schedule+', on_delete=models.SET_NULL, ) status = models.CharField( max_length=32, choices=ALL_STATUS_CHOICES, default='ok', editable=False, ) credentials = models.ManyToManyField( 'Credential', related_name='%(class)ss', ) labels = models.ManyToManyField("Label", blank=True, related_name='%(class)s_labels') instance_groups = models.ManyToManyField( 'InstanceGroup', blank=True, ) def get_absolute_url(self, request=None): real_instance = self.get_real_instance() if real_instance != self: return real_instance.get_absolute_url(request=request) else: return '' def unique_error_message(self, model_class, unique_check): # If polymorphic_ctype is part of a unique check, return a list of the # remaining fields instead of the error message. if len(unique_check) >= 2 and 'polymorphic_ctype' in unique_check: return [x for x in unique_check if x != 'polymorphic_ctype'] else: return super(UnifiedJobTemplate, self).unique_error_message(model_class, unique_check) @classmethod def invalid_user_capabilities_prefetch_models(cls): if cls != UnifiedJobTemplate: return [] return ['project', 'inventorysource', 'systemjobtemplate'] @classmethod def _submodels_with_roles(cls): ujt_classes = [ c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate'] ] ct_dict = ContentType.objects.get_for_models(*ujt_classes) return [ct.id for ct in ct_dict.values()] @classmethod def accessible_pk_qs(cls, accessor, role_field): ''' A re-implementation of accessible pk queryset for the "normal" unified JTs. Does not return inventory sources or system JTs, these should be handled inside of get_queryset where it is utilized. ''' # do not use this if in a subclass if cls != UnifiedJobTemplate: return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field) return ResourceMixin._accessible_pk_qs( cls, accessor, role_field, content_types=cls._submodels_with_roles()) def _perform_unique_checks(self, unique_checks): # Handle the list of unique fields returned above. Replace with an # appropriate error message for the remaining field(s) in the unique # check and cleanup the errors dictionary. errors = super(UnifiedJobTemplate, self)._perform_unique_checks(unique_checks) for key, msgs in errors.items(): if key != NON_FIELD_ERRORS: continue for msg in msgs: if isinstance(msg, (list, tuple)): if len(msg) == 1: new_key = msg[0] else: new_key = NON_FIELD_ERRORS model_class = self.get_real_concrete_instance_class() errors.setdefault(new_key, []).append( self.unique_error_message(model_class, msg)) errors[key] = [x for x in msgs if not isinstance(x, (list, tuple))] for key, msgs in errors.items(): if not msgs: del errors[key] return errors def validate_unique(self, exclude=None): # Make sure we set the polymorphic_ctype before validating, and omit # it from the list of excluded fields. self.pre_save_polymorphic() if exclude and 'polymorphic_ctype' in exclude: exclude = [x for x in exclude if x != 'polymorphic_ctype'] return super(UnifiedJobTemplate, self).validate_unique(exclude) @property # Alias for backwards compatibility. def current_update(self): return self.current_job @property # Alias for backwards compatibility. def last_update(self): return self.last_job @property # Alias for backwards compatibility. def last_update_failed(self): return self.last_job_failed @property # Alias for backwards compatibility. def last_updated(self): return self.last_job_run def update_computed_fields(self): Schedule = self._meta.get_field('schedules').related_model related_schedules = Schedule.objects.filter( enabled=True, unified_job_template=self, next_run__isnull=False).order_by('-next_run') if related_schedules.exists(): self.next_schedule = related_schedules[0] self.next_job_run = related_schedules[0].next_run self.save(update_fields=['next_schedule', 'next_job_run']) def save(self, *args, **kwargs): # If update_fields has been specified, add our field names to it, # if it hasn't been specified, then we're just doing a normal save. update_fields = kwargs.get('update_fields', []) # Update status and last_updated fields. if not getattr(_inventory_updates, 'is_updating', False): updated_fields = self._set_status_and_last_job_run(save=False) for field in updated_fields: if field not in update_fields: update_fields.append(field) # Do the actual save. try: super(UnifiedJobTemplate, self).save(*args, **kwargs) except ValueError: # A fix for https://trello.com/c/S4rU1F21 # Does not resolve the root cause. Tis merely a bandaid. if 'scm_delete_on_next_update' in update_fields: update_fields.remove('scm_delete_on_next_update') super(UnifiedJobTemplate, self).save(*args, **kwargs) def _get_current_status(self): # Override in subclasses as needed. if self.current_job and self.current_job.status: return self.current_job.status elif not self.last_job: return 'never updated' elif self.last_job_failed: return 'failed' else: return 'successful' def _get_last_job_run(self): # Override in subclasses as needed. if self.last_job: return self.last_job.finished def _set_status_and_last_job_run(self, save=True): status = self._get_current_status() last_job_run = self._get_last_job_run() return self.update_fields(status=status, last_job_run=last_job_run, save=save) def _can_update(self): # Override in subclasses as needed. return False @property def can_update(self): return self._can_update() def update(self, **kwargs): if self.can_update: unified_job = self.create_unified_job() unified_job.signal_start(**kwargs) return unified_job @classmethod def _get_unified_job_class(cls): ''' Return subclass of UnifiedJob that is created from this template. ''' raise NotImplementedError # Implement in subclass. @classmethod def _get_unified_job_field_names(cls): ''' Return field names that should be copied from template to new job. ''' raise NotImplementedError # Implement in subclass. @property def notification_templates(self): ''' Return notification_templates relevant to this Unified Job Template ''' # NOTE: Derived classes should implement return NotificationTemplate.objects.none() def create_unified_job(self, **kwargs): ''' Create a new unified job based on this unified job template. ''' new_job_passwords = kwargs.pop('survey_passwords', {}) eager_fields = kwargs.pop('_eager_fields', None) unified_job_class = self._get_unified_job_class() fields = self._get_unified_job_field_names() unallowed_fields = set(kwargs.keys()) - set(fields) if unallowed_fields: raise Exception('Fields {} are not allowed as overrides.'.format( unallowed_fields)) unified_job = copy_model_by_class(self, unified_job_class, fields, kwargs) if eager_fields: for fd, val in eager_fields.items(): setattr(unified_job, fd, val) # Set the unified job template back-link on the job parent_field_name = unified_job_class._get_parent_field_name() setattr(unified_job, parent_field_name, self) # For JobTemplate-based jobs with surveys, add passwords to list for perma-redaction if hasattr(self, 'survey_spec') and getattr(self, 'survey_enabled', False): for password in self.survey_password_variables(): new_job_passwords[password] = REPLACE_STR if new_job_passwords: unified_job.survey_passwords = new_job_passwords kwargs[ 'survey_passwords'] = new_job_passwords # saved in config object for relaunch unified_job.save() # Labels and credentials copied here if kwargs.get('credentials'): Credential = UnifiedJob._meta.get_field( 'credentials').related_model cred_dict = Credential.unique_dict(self.credentials.all()) prompted_dict = Credential.unique_dict(kwargs['credentials']) # combine prompted credentials with JT cred_dict.update(prompted_dict) kwargs['credentials'] = [cred for cred in cred_dict.values()] from awx.main.signals import disable_activity_stream with disable_activity_stream(): copy_m2m_relationships(self, unified_job, fields, kwargs=kwargs) if 'extra_vars' in kwargs: unified_job.handle_extra_data(kwargs['extra_vars']) if not getattr(self, '_deprecated_credential_launch', False): # Create record of provided prompts for relaunch and rescheduling unified_job.create_config_from_prompts(kwargs) return unified_job @classmethod def get_ask_mapping(cls): ''' Creates dictionary that maps the unified job field (keys) to the field that enables prompting for the field (values) ''' mapping = {} for field in cls._meta.fields: if isinstance(field, AskForField): mapping[field.allows_field] = field.name return mapping @classmethod def _get_unified_jt_copy_names(cls): return cls._get_unified_job_field_names() def copy_unified_jt(self): ''' Returns saved object, including related fields. Create a copy of this unified job template. ''' unified_jt_class = self.__class__ fields = self._get_unified_jt_copy_names() unified_jt = copy_model_by_class(self, unified_jt_class, fields, {}) time_now = now() unified_jt.name = unified_jt.name.split( '@', 1)[0] + ' @ ' + time_now.strftime('%I:%M:%S %p') unified_jt.save() copy_m2m_relationships(self, unified_jt, fields) return unified_jt def _accept_or_ignore_job_kwargs(self, **kwargs): ''' Override in subclass if template accepts _any_ prompted params ''' errors = {} if kwargs: for field_name in kwargs.keys(): errors[field_name] = [_("Field is not allowed on launch.")] return ({}, kwargs, errors) def accept_or_ignore_variables(self, data, errors=None): ''' If subclasses accept any `variables` or `extra_vars`, they should define _accept_or_ignore_variables to place those variables in the accepted dict, according to the acceptance rules of the template. ''' if errors is None: errors = {} if not isinstance(data, dict): try: data = parse_yaml_or_json(data, silent_failure=False) except ParseError as exc: errors['extra_vars'] = [str(exc)] return ({}, data, errors) if hasattr(self, '_accept_or_ignore_variables'): # SurveyJobTemplateMixin cannot override any methods because of # resolution order, forced by how metaclass processes fields, # thus the need for hasattr check return self._accept_or_ignore_variables(data, errors) elif data: errors['extra_vars'] = [ _('Variables {list_of_keys} provided, but this template cannot accept variables.' .format(list_of_keys=', '.join(data.keys()))) ] return ({}, data, errors)
class TrackInList(Lara_apiModel, models.Model): musiclist = models.ForeignKey(MusicList, on_delete=models.CASCADE) musictrack = models.ForeignKey(MusicTrack, on_delete=models.CASCADE) visits = models.PositiveIntegerField(default=0)
class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique, UnifiedJobTypeStringMixin, TaskManagerUnifiedJobMixin): ''' Concrete base class for unified job run by the task engine. ''' STATUS_CHOICES = UnifiedJobTemplate.JOB_STATUS_CHOICES LAUNCH_TYPE_CHOICES = [ ('manual', _('Manual')), # Job was started manually by a user. ('relaunch', _('Relaunch')), # Job was started via relaunch. ('callback', _('Callback')), # Job was started via host callback. ('scheduled', _('Scheduled')), # Job was started from a schedule. ('dependency', _('Dependency')), # Job was started as a dependency of another job. ('workflow', _('Workflow')), # Job was started from a workflow job. ('sync', _('Sync')), # Job was started from a project sync. ('scm', _('SCM Update')) # Job was created as an Inventory SCM sync. ] PASSWORD_FIELDS = ('start_args', ) # NOTE: Working around a django-polymorphic issue: https://github.com/django-polymorphic/django-polymorphic/issues/229 base_manager_name = 'base_objects' class Meta: app_label = 'main' old_pk = models.PositiveIntegerField( null=True, default=None, editable=False, ) unified_job_template = models.ForeignKey( 'UnifiedJobTemplate', null=True, # Some jobs can be run without a template. default=None, editable=False, related_name='%(class)s_unified_jobs', on_delete=models.SET_NULL, ) launch_type = models.CharField( max_length=20, choices=LAUNCH_TYPE_CHOICES, default='manual', editable=False, ) schedule = models.ForeignKey( # Which schedule entry was responsible for starting this job. 'Schedule', null=True, default=None, editable=False, on_delete=models.SET_NULL, ) dependent_jobs = models.ManyToManyField( 'self', editable=False, related_name='%(class)s_blocked_jobs+', ) execution_node = models.TextField( blank=True, default='', editable=False, help_text=_("The node the job executed on."), ) notifications = models.ManyToManyField( 'Notification', editable=False, related_name='%(class)s_notifications', ) cancel_flag = models.BooleanField( blank=True, default=False, editable=False, ) status = models.CharField( max_length=20, choices=STATUS_CHOICES, default='new', editable=False, ) failed = models.BooleanField( default=False, editable=False, ) started = models.DateTimeField( null=True, default=None, editable=False, help_text=_("The date and time the job was queued for starting."), ) finished = models.DateTimeField( null=True, default=None, editable=False, help_text=_("The date and time the job finished execution."), ) elapsed = models.DecimalField( max_digits=12, decimal_places=3, editable=False, help_text=_("Elapsed time in seconds that the job ran."), ) job_args = prevent_search( models.TextField( blank=True, default='', editable=False, )) job_cwd = models.CharField( max_length=1024, blank=True, default='', editable=False, ) job_env = prevent_search( JSONField( blank=True, default={}, editable=False, )) job_explanation = models.TextField( blank=True, default='', editable=False, help_text= _("A status field to indicate the state of the job if it wasn't able to run and capture stdout" ), ) start_args = prevent_search( models.TextField( blank=True, default='', editable=False, )) result_stdout_text = models.TextField( blank=True, default='', editable=False, ) result_stdout_file = models.TextField( # FilePathfield? blank=True, default='', editable=False, ) result_traceback = models.TextField( blank=True, default='', editable=False, ) celery_task_id = models.CharField( max_length=100, blank=True, default='', editable=False, ) labels = models.ManyToManyField("Label", blank=True, related_name='%(class)s_labels') instance_group = models.ForeignKey( 'InstanceGroup', blank=True, null=True, default=None, on_delete=models.SET_NULL, help_text=_('The Rampart/Instance group the job was run under'), ) credentials = models.ManyToManyField( 'Credential', related_name='%(class)ss', ) def get_absolute_url(self, request=None): real_instance = self.get_real_instance() if real_instance != self: return real_instance.get_absolute_url(request=request) else: return '' def get_ui_url(self): real_instance = self.get_real_instance() if real_instance != self: return real_instance.get_ui_url() else: return '' @classmethod def _get_task_class(cls): raise NotImplementedError # Implement in subclasses. @classmethod def supports_isolation(cls): return False @classmethod def _get_parent_field_name(cls): return 'unified_job_template' # Override in subclasses. @classmethod def _get_unified_job_template_class(cls): ''' Return subclass of UnifiedJobTemplate that applies to this unified job. ''' raise NotImplementedError # Implement in subclass. def _global_timeout_setting(self): "Override in child classes, None value indicates this is not configurable" return None def _resources_sufficient_for_launch(self): return True def __unicode__(self): return u'%s-%s-%s' % (self.created, self.id, self.status) @property def log_format(self): return '{} {} ({})'.format(get_type_for_model(type(self)), self.id, self.status) def _get_parent_instance(self): return getattr(self, self._get_parent_field_name(), None) def _update_parent_instance_no_save(self, parent_instance, update_fields=[]): def parent_instance_set(key, val): setattr(parent_instance, key, val) if key not in update_fields: update_fields.append(key) if parent_instance: if self.status in ('pending', 'waiting', 'running'): if parent_instance.current_job != self: parent_instance_set('current_job', self) # Update parent with all the 'good' states of it's child if parent_instance.status != self.status: parent_instance_set('status', self.status) elif self.status in ('successful', 'failed', 'error', 'canceled'): if parent_instance.current_job == self: parent_instance_set('current_job', None) parent_instance_set('last_job', self) parent_instance_set('last_job_failed', self.failed) return update_fields def _update_parent_instance(self): parent_instance = self._get_parent_instance() if parent_instance: update_fields = self._update_parent_instance_no_save( parent_instance) parent_instance.save(update_fields=update_fields) def save(self, *args, **kwargs): """Save the job, with current status, to the database. Ensure that all data is consistent before doing so. """ # If update_fields has been specified, add our field names to it, # if it hasn't been specified, then we're just doing a normal save. update_fields = kwargs.get('update_fields', []) # Get status before save... status_before = self.status or 'new' # If this job already exists in the database, retrieve a copy of # the job in its prior state. if self.pk: self_before = self.__class__.objects.get(pk=self.pk) if self_before.status != self.status: status_before = self_before.status # Sanity check: Is this a failure? Ensure that the failure value # matches the status. failed = bool(self.status in ('failed', 'error', 'canceled')) if self.failed != failed: self.failed = failed if 'failed' not in update_fields: update_fields.append('failed') # Sanity check: Has the job just started? If so, mark down its start # time. if self.status == 'running' and not self.started: self.started = now() if 'started' not in update_fields: update_fields.append('started') # Sanity check: Has the job just completed? If so, mark down its # completion time, and record its output to the database. if self.status in ('successful', 'failed', 'error', 'canceled') and not self.finished: # Record the `finished` time. self.finished = now() if 'finished' not in update_fields: update_fields.append('finished') # If we have a start and finished time, and haven't already calculated # out the time that elapsed, do so. if self.started and self.finished and not self.elapsed: td = self.finished - self.started elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0) else: elapsed = 0.0 if self.elapsed != elapsed: self.elapsed = str(elapsed) if 'elapsed' not in update_fields: update_fields.append('elapsed') # Ensure that the job template information is current. if self.unified_job_template != self._get_parent_instance(): self.unified_job_template = self._get_parent_instance() if 'unified_job_template' not in update_fields: update_fields.append('unified_job_template') # Okay; we're done. Perform the actual save. result = super(UnifiedJob, self).save(*args, **kwargs) # If status changed, update the parent instance. if self.status != status_before: self._update_parent_instance() # Done. return result def delete(self): if self.result_stdout_file != "": try: os.remove(self.result_stdout_file) except Exception: pass super(UnifiedJob, self).delete() def copy_unified_job(self, limit=None): ''' Returns saved object, including related fields. Create a copy of this unified job for the purpose of relaunch ''' unified_job_class = self.__class__ unified_jt_class = self._get_unified_job_template_class() parent_field_name = unified_job_class._get_parent_field_name() fields = unified_jt_class._get_unified_job_field_names() + [ parent_field_name ] create_data = {"launch_type": "relaunch"} if limit: create_data["limit"] = limit prompts = self.launch_prompts() if self.unified_job_template and prompts: prompts['_eager_fields'] = create_data unified_job = self.unified_job_template.create_unified_job( **prompts) else: unified_job = copy_model_by_class(self, unified_job_class, fields, {}) for fd, val in create_data.items(): setattr(unified_job, fd, val) unified_job.save() # Labels coppied here copy_m2m_relationships(self, unified_job, fields) return unified_job def launch_prompts(self): ''' Return dictionary of prompts job was launched with returns None if unknown ''' JobLaunchConfig = self._meta.get_field('launch_config').related_model try: config = self.launch_config return config.prompts_dict() except JobLaunchConfig.DoesNotExist: return None def create_config_from_prompts(self, kwargs): ''' Create a launch configuration entry for this job, given prompts returns None if it can not be created ''' if self.unified_job_template is None: return None JobLaunchConfig = self._meta.get_field('launch_config').related_model config = JobLaunchConfig(job=self) valid_fields = self.unified_job_template.get_ask_mapping().keys() if hasattr(self, 'extra_vars'): valid_fields.extend(['survey_passwords', 'extra_vars']) for field_name, value in kwargs.items(): if field_name not in valid_fields: raise Exception( 'Unrecognized launch config field {}.'.format(field_name)) if field_name == 'credentials': continue key = field_name if key == 'extra_vars': key = 'extra_data' setattr(config, key, value) config.save() job_creds = (set(kwargs.get('credentials', [])) - set(self.unified_job_template.credentials.all())) if job_creds: config.credentials.add(*job_creds) return config def result_stdout_raw_handle(self, attempt=0): """Return a file-like object containing the standard out of the job's result. """ msg = { 'pending': 'Waiting for results...', 'missing': 'stdout capture is missing', } if self.result_stdout_text: return StringIO(self.result_stdout_text) else: if not os.path.exists(self.result_stdout_file) or os.stat( self.result_stdout_file).st_size < 1: return StringIO(msg['missing' if self.finished else 'pending']) # There is a potential timing issue here, because another # process may be deleting the stdout file after it is written # to the database. # # Therefore, if we get an IOError (which generally means the # file does not exist), reload info from the database and # try again. try: return codecs.open(self.result_stdout_file, "r", encoding='utf-8') except IOError: if attempt < 3: self.result_stdout_text = type(self).objects.get( id=self.id).result_stdout_text return self.result_stdout_raw_handle(attempt=attempt + 1) else: return StringIO( msg['missing' if self.finished else 'pending']) def _escape_ascii(self, content): # Remove ANSI escape sequences used to embed event data. content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content) # Remove ANSI color escape sequences. content = re.sub(r'\x1b[^m]*m', '', content) return content def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False): content = self.result_stdout_raw_handle().read() if redact_sensitive: content = UriCleaner.remove_sensitive(content) if escape_ascii: content = self._escape_ascii(content) return content @property def result_stdout_raw(self): return self._result_stdout_raw() @property def result_stdout(self): return self._result_stdout_raw(escape_ascii=True) @property def result_stdout_size(self): try: return os.stat(self.result_stdout_file).st_size except Exception: return len(self.result_stdout) def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False): return_buffer = u"" if end_line is not None: end_line = int(end_line) stdout_lines = self.result_stdout_raw_handle().readlines() absolute_end = len(stdout_lines) for line in stdout_lines[int(start_line):end_line]: return_buffer += line if int(start_line) < 0: start_actual = len(stdout_lines) + int(start_line) end_actual = len(stdout_lines) else: start_actual = int(start_line) if end_line is not None: end_actual = min(int(end_line), len(stdout_lines)) else: end_actual = len(stdout_lines) if redact_sensitive: return_buffer = UriCleaner.remove_sensitive(return_buffer) if escape_ascii: return_buffer = self._escape_ascii(return_buffer) return return_buffer, start_actual, end_actual, absolute_end def result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=False): return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive) def result_stdout_limited(self, start_line=0, end_line=None, redact_sensitive=False): return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True) @property def workflow_job_id(self): workflow_job = self.get_workflow_job() if workflow_job: return workflow_job.pk return None @property def spawned_by_workflow(self): return self.launch_type == 'workflow' def get_workflow_job(self): if self.spawned_by_workflow: try: return self.unified_job_node.workflow_job except UnifiedJob.unified_job_node.RelatedObjectDoesNotExist: pass return None @property def workflow_node_id(self): if self.spawned_by_workflow: try: return self.unified_job_node.pk except UnifiedJob.unified_job_node.RelatedObjectDoesNotExist: pass return None @property def celery_task(self): try: if self.celery_task_id: return TaskResult.objects.get(task_id=self.celery_task_id) except TaskResult.DoesNotExist: pass def get_passwords_needed_to_start(self): return [] def handle_extra_data(self, extra_data): if hasattr(self, 'extra_vars') and extra_data: extra_data_dict = {} try: extra_data_dict = parse_yaml_or_json(extra_data, silent_failure=False) except Exception as e: logger.warn("Exception deserializing extra vars: " + str(e)) evars = self.extra_vars_dict evars.update(extra_data_dict) self.update_fields(extra_vars=json.dumps(evars)) @property def can_start(self): return bool(self.status in ('new', 'waiting')) @property def can_schedule(self): if getattr(self, 'passwords_needed_to_start', None): return False JobLaunchConfig = self._meta.get_field('launch_config').related_model try: self.launch_config if self.unified_job_template is None: return False return True except JobLaunchConfig.DoesNotExist: return False @property def task_impact(self): raise NotImplementedError # Implement in subclass. def websocket_emit_data(self): ''' Return extra data that should be included when submitting data to the browser over the websocket connection ''' websocket_data = dict() if self.spawned_by_workflow: websocket_data.update( dict(workflow_job_id=self.workflow_job_id, workflow_node_id=self.workflow_node_id)) return websocket_data def _websocket_emit_status(self, status): try: status_data = dict(unified_job_id=self.id, status=status) if status == 'waiting': if self.instance_group: status_data[ 'instance_group_name'] = self.instance_group.name else: status_data['instance_group_name'] = None status_data.update(self.websocket_emit_data()) status_data['group_name'] = 'jobs' emit_channel_notification('jobs-status_changed', status_data) if self.spawned_by_workflow: status_data['group_name'] = "workflow_events" emit_channel_notification( 'workflow_events-' + str(self.workflow_job_id), status_data) except IOError: # includes socket errors logger.exception( '%s failed to emit channel msg about status change', self.log_format) def websocket_emit_status(self, status): connection.on_commit(lambda: self._websocket_emit_status(status)) def notification_data(self): return dict(id=self.id, name=self.name, url=self.get_ui_url(), created_by=smart_text(self.created_by), started=self.started.isoformat() if self.started is not None else None, finished=self.finished.isoformat() if self.finished is not None else None, status=self.status, traceback=self.result_traceback) def pre_start(self, **kwargs): if not self.can_start: self.job_explanation = u'%s is not in a startable state: %s, expecting one of %s' % ( self._meta.verbose_name, self.status, str(('new', 'waiting'))) self.save(update_fields=['job_explanation']) return (False, None) needed = self.get_passwords_needed_to_start() try: start_args = json.loads(decrypt_field(self, 'start_args')) except Exception: start_args = None if start_args in (None, ''): start_args = kwargs opts = dict([(field, start_args.get(field, '')) for field in needed]) if not all(opts.values()): missing_fields = ', '.join([k for k, v in opts.items() if not v]) self.job_explanation = u'Missing needed fields: %s.' % missing_fields self.save(update_fields=['job_explanation']) return (False, None) if 'extra_vars' in kwargs: self.handle_extra_data(kwargs['extra_vars']) return (True, opts) def start_celery_task(self, opts, error_callback, success_callback, queue): kwargs = { 'link_error': error_callback, 'link': success_callback, 'queue': None, 'task_id': None, } if not self.celery_task_id: raise RuntimeError("Expected celery_task_id to be set on model.") kwargs['task_id'] = self.celery_task_id task_class = self._get_task_class() from awx.main.models.ha import InstanceGroup ig = InstanceGroup.objects.get(name=queue) args = [self.pk] if ig.controller_id: if self.supports_isolation(): # case of jobs and ad hoc commands isolated_instance = ig.instances.order_by('-capacity').first() args.append(isolated_instance.hostname) else: # proj & inv updates, system jobs run on controller queue = ig.controller.name kwargs['queue'] = queue task_class().apply_async(args, opts, **kwargs) def start(self, error_callback, success_callback, **kwargs): ''' Start the task running via Celery. ''' (res, opts) = self.pre_start(**kwargs) if res: self.start_celery_task(opts, error_callback, success_callback) return res def signal_start(self, **kwargs): """Notify the task runner system to begin work on this task.""" # Sanity check: Are we able to start the job? If not, do not attempt # to do so. if not self.can_start: return False # Get any passwords or other data that are prerequisites to running # the job. needed = self.get_passwords_needed_to_start() opts = dict([(field, kwargs.get(field, '')) for field in needed]) if not all(opts.values()): return False # Sanity check: If we are running unit tests, then run synchronously. if getattr(settings, 'CELERY_UNIT_TEST', False): return self.start(None, None, **kwargs) # Save the pending status, and inform the SocketIO listener. self.update_fields(start_args=json.dumps(kwargs), status='pending') self.websocket_emit_status("pending") from awx.main.scheduler.tasks import run_job_launch connection.on_commit(lambda: run_job_launch.delay(self.id)) # Each type of unified job has a different Task class; get the # appropirate one. # task_type = get_type_for_model(self) # Actually tell the task runner to run this task. # FIXME: This will deadlock the task runner #from awx.main.tasks import notify_task_runner #notify_task_runner.delay({'id': self.id, 'metadata': kwargs, # 'task_type': task_type}) # Done! return True @property def can_cancel(self): return bool(self.status in CAN_CANCEL) def _force_cancel(self): # Update the status to 'canceled' if we can detect that the job # really isn't running (i.e. celery has crashed or forcefully # killed the worker). task_statuses = ('STARTED', 'SUCCESS', 'FAILED', 'RETRY', 'REVOKED') try: taskmeta = self.celery_task if not taskmeta or taskmeta.status not in task_statuses: return from celery import current_app i = current_app.control.inspect() for v in (i.active() or {}).values(): if taskmeta.task_id in [x['id'] for x in v]: return for v in (i.reserved() or {}).values(): if taskmeta.task_id in [x['id'] for x in v]: return for v in (i.revoked() or {}).values(): if taskmeta.task_id in [x['id'] for x in v]: return for v in (i.scheduled() or {}).values(): if taskmeta.task_id in [x['id'] for x in v]: return instance = self.__class__.objects.get(pk=self.pk) if instance.can_cancel: instance.status = 'canceled' update_fields = ['status'] if not instance.job_explanation: instance.job_explanation = 'Forced cancel' update_fields.append('job_explanation') instance.save(update_fields=update_fields) self.websocket_emit_status("canceled") except Exception: # FIXME: Log this exception! if settings.DEBUG: raise def _build_job_explanation(self): if not self.job_explanation: return 'Previous Task Canceled: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \ (self.model_to_str(), self.name, self.id) return None def cancel(self, job_explanation=None, is_chain=False): if self.can_cancel: if not is_chain: map( lambda x: x.cancel(job_explanation=self. _build_job_explanation(), is_chain=True), self.get_jobs_fail_chain()) if not self.cancel_flag: self.cancel_flag = True cancel_fields = ['cancel_flag'] if self.status in ('pending', 'waiting', 'new'): self.status = 'canceled' cancel_fields.append('status') if job_explanation is not None: self.job_explanation = job_explanation cancel_fields.append('job_explanation') self.save(update_fields=cancel_fields) self.websocket_emit_status("canceled") if settings.CELERY_BROKER_URL.startswith('amqp://'): self._force_cancel() return self.cancel_flag @property def preferred_instance_groups(self): ''' Return Instance/Rampart Groups preferred by this unified job templates ''' if not self.unified_job_template: return [] template_groups = [ x for x in self.unified_job_template.instance_groups.all() ] return template_groups @property def global_instance_groups(self): from awx.main.models.ha import InstanceGroup default_instance_group = InstanceGroup.objects.filter(name='tower') if default_instance_group.exists(): return [default_instance_group.first()] return [] def awx_meta_vars(self): ''' The result of this method is used as extra_vars of a job launched by AWX, for purposes of client playbook hooks ''' r = {} for name in ('awx', 'tower'): r['{}_job_id'.format(name)] = self.pk r['{}_job_launch_type'.format(name)] = self.launch_type if self.created_by: for name in ('awx', 'tower'): r['{}_user_id'.format(name)] = self.created_by.pk r['{}_user_name'.format(name)] = self.created_by.username else: wj = self.get_workflow_job() if wj: for name in ('awx', 'tower'): r['{}_workflow_job_id'.format(name)] = wj.pk r['{}_workflow_job_name'.format(name)] = wj.name if wj.created_by: for name in ('awx', 'tower'): r['{}_user_id'.format(name)] = wj.created_by.pk r['{}_user_name'.format(name)] = wj.created_by.username if self.schedule: for name in ('awx', 'tower'): r['{}_schedule_id'.format(name)] = self.schedule.pk r['{}_schedule_name'.format(name)] = self.schedule.name return r
class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='BrowserBotsLauncherSessionCode', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('code', models.CharField(max_length=10)), ('is_only_record', models.BooleanField(default=True, unique=True)), ], ), migrations.CreateModel( name='ChatMessage', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('channel', models.CharField(max_length=255)), ('nickname', models.CharField(max_length=255)), ('body', models.TextField()), ('timestamp', models.FloatField(default=time.time)), ], ), migrations.CreateModel( name='CompletedGroupWaitPage', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('page_index', models.PositiveIntegerField()), ('id_in_subsession', models.PositiveIntegerField(default=0)), ], ), migrations.CreateModel( name='CompletedSubsessionWaitPage', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('page_index', models.PositiveIntegerField()), ], ), migrations.CreateModel( name='FailedSessionCreation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('pre_create_id', models.CharField(db_index=True, max_length=100)), ('message', models.CharField(max_length=300)), ('traceback', models.TextField(default='')), ], ), migrations.CreateModel( name='PageCompletion', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('app_name', models.CharField(max_length=300)), ('page_index', models.PositiveIntegerField()), ('page_name', models.CharField(max_length=300)), ('time_stamp', models.PositiveIntegerField()), ('seconds_on_page', models.PositiveIntegerField()), ('subsession_pk', models.PositiveIntegerField()), ('auto_submitted', models.BooleanField()), ], ), migrations.CreateModel( name='PageTimeout', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('page_index', models.PositiveIntegerField()), ('expiration_time', models.FloatField()), ], ), migrations.CreateModel( name='Participant', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('vars', otree.db.serializedfields._PickleField(default=dict)), ('label', otree.db.models.StringField(max_length=50, null=True)), ('id_in_session', otree.db.models.PositiveIntegerField(null=True)), ('payoff', otree.db.models.CurrencyField(default=0, null=True)), ('time_started', otree.db.models.DateTimeField(null=True)), ('mturk_assignment_id', otree.db.models.StringField(max_length=50, null=True)), ('mturk_worker_id', otree.db.models.StringField(max_length=50, null=True)), ('_index_in_subsessions', otree.db.models.PositiveIntegerField(default=0, null=True)), ('_index_in_pages', otree.db.models.PositiveIntegerField(db_index=True, default=0, null=True)), ('_waiting_for_ids', otree.db.models.StringField(max_length=300, null=True)), ('code', otree.db.models.StringField(default=otree.common_internal.random_chars_8, max_length=16, null=True, unique=True)), ('visited', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], db_index=True, default=False)), ('ip_address', otree.db.models.GenericIPAddressField(null=True)), ('_last_page_timestamp', otree.db.models.PositiveIntegerField(null=True)), ('_last_request_timestamp', otree.db.models.PositiveIntegerField(null=True)), ('is_on_wait_page', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)), ('_current_page_name', otree.db.models.StringField(max_length=200, null=True, verbose_name='page')), ('_current_app_name', otree.db.models.StringField(max_length=200, null=True, verbose_name='app')), ('_round_number', otree.db.models.PositiveIntegerField(null=True)), ('_current_form_page_url', otree.db.models.URLField(null=True)), ('_max_page_index', otree.db.models.PositiveIntegerField(null=True)), ('_browser_bot_finished', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)), ('_is_bot', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)), ('is_browser_bot', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)), ], options={ 'ordering': ['pk'], }, bases=(otree.models.varsmixin._SaveTheChangeWithCustomFieldSupport, otree_save_the_change.mixins.SaveTheChange, models.Model), ), migrations.CreateModel( name='ParticipantLockModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('participant_code', models.CharField(max_length=16, unique=True)), ('locked', models.BooleanField(default=False)), ], ), migrations.CreateModel( name='ParticipantRoomVisit', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('room_name', models.CharField(max_length=50)), ('participant_label', models.CharField(max_length=200)), ('tab_unique_id', models.CharField(max_length=20, unique=True)), ('last_updated', models.FloatField()), ], ), migrations.CreateModel( name='ParticipantToPlayerLookup', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('participant_code', models.CharField(max_length=20)), ('page_index', models.PositiveIntegerField()), ('app_name', models.CharField(max_length=300)), ('player_pk', models.PositiveIntegerField()), ('subsession_pk', models.PositiveIntegerField()), ('session_pk', models.PositiveIntegerField()), ('url', models.CharField(max_length=300)), ('participant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Participant')), ], ), migrations.CreateModel( name='RoomToSession', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('room_name', models.CharField(max_length=255, unique=True)), ], ), migrations.CreateModel( name='Session', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('vars', otree.db.serializedfields._PickleField(default=dict)), ('config', otree.db.serializedfields._PickleField(default=dict, null=True)), ('label', otree.db.models.StringField(blank=True, help_text='For internal record-keeping', max_length=300, null=True)), ('experimenter_name', otree.db.models.StringField(blank=True, help_text='For internal record-keeping', max_length=300, null=True)), ('ready_for_browser', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)), ('code', otree.db.models.StringField(default=otree.common_internal.random_chars_8, max_length=16, null=True, unique=True)), ('mturk_HITId', otree.db.models.StringField(blank=True, help_text='Hit id for this session on MTurk', max_length=300, null=True)), ('mturk_HITGroupId', otree.db.models.StringField(blank=True, help_text='Hit id for this session on MTurk', max_length=300, null=True)), ('mturk_num_participants', otree.db.models.IntegerField(default=-1, help_text='Number of participants on MTurk', null=True)), ('mturk_use_sandbox', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=True, help_text='Should this session be created in mturk sandbox?')), ('archived', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], db_index=True, default=False)), ('comment', otree.db.models.LongStringField(blank=True, null=True)), ('_anonymous_code', otree.db.models.StringField(db_index=True, default=otree.common_internal.random_chars_10, max_length=10, null=True)), ('_pre_create_id', otree.db.models.StringField(db_index=True, max_length=255, null=True)), ('is_demo', otree.db.models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], default=False)), ('_admin_report_app_names', otree.db.models.LongStringField(default='', null=True)), ('_admin_report_num_rounds', otree.db.models.StringField(default='', max_length=255, null=True)), ('num_participants', otree.db.models.PositiveIntegerField(null=True)), ], options={ 'ordering': ['pk'], }, bases=(otree.models.varsmixin._SaveTheChangeWithCustomFieldSupport, otree_save_the_change.mixins.SaveTheChange, models.Model), ), migrations.CreateModel( name='UndefinedFormModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.AddField( model_name='roomtosession', name='session', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Session'), ), migrations.AddField( model_name='participant', name='session', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Session'), ), migrations.AddField( model_name='pagetimeout', name='participant', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Participant'), ), migrations.AddField( model_name='pagecompletion', name='participant', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Participant'), ), migrations.AddField( model_name='pagecompletion', name='session', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Session'), ), migrations.AddField( model_name='completedsubsessionwaitpage', name='session', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Session'), ), migrations.AddField( model_name='completedgroupwaitpage', name='session', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='otree.Session'), ), migrations.AddField( model_name='chatmessage', name='participant', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='chat_messages_core', to='otree.Participant'), ), migrations.AlterUniqueTogether( name='participanttoplayerlookup', unique_together=set([('participant', 'page_index')]), ), migrations.AlterIndexTogether( name='participanttoplayerlookup', index_together=set([('participant', 'page_index')]), ), migrations.AlterIndexTogether( name='participant', index_together=set([('session', 'mturk_worker_id', 'mturk_assignment_id')]), ), migrations.AlterIndexTogether( name='pagetimeout', index_together=set([('participant', 'page_index')]), ), migrations.AlterIndexTogether( name='completedsubsessionwaitpage', index_together=set([('page_index', 'session')]), ), migrations.AlterIndexTogether( name='completedgroupwaitpage', index_together=set([('page_index', 'session', 'id_in_subsession')]), ), migrations.AlterIndexTogether( name='chatmessage', index_together=set([('channel', 'timestamp')]), ), ]
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin): """ A job template is a reusable job definition for applying a project (with playbook) to an inventory source with a given credential. """ FIELDS_TO_PRESERVE_AT_COPY = [ 'labels', 'instance_groups', 'credentials', 'survey_spec' ] FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential'] SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] class Meta: app_label = 'main' ordering = ('name', ) job_type = models.CharField( max_length=64, choices=NEW_JOB_TYPE_CHOICES, default='run', ) host_config_key = prevent_search( models.CharField( max_length=1024, blank=True, default='', )) ask_diff_mode_on_launch = AskForField( blank=True, default=False, ) ask_limit_on_launch = AskForField( blank=True, default=False, ) ask_tags_on_launch = AskForField(blank=True, default=False, allows_field='job_tags') ask_skip_tags_on_launch = AskForField( blank=True, default=False, ) ask_job_type_on_launch = AskForField( blank=True, default=False, ) ask_verbosity_on_launch = AskForField( blank=True, default=False, ) ask_inventory_on_launch = AskForField( blank=True, default=False, ) ask_credential_on_launch = AskForField(blank=True, default=False, allows_field='credentials') ask_scm_branch_on_launch = AskForField(blank=True, default=False, allows_field='scm_branch') job_slice_count = models.PositiveIntegerField( blank=True, default=1, help_text=_( "The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1." ), ) admin_role = ImplicitRoleField( parent_role=['organization.job_template_admin_role']) execute_role = ImplicitRoleField( parent_role=['admin_role', 'organization.execute_role'], ) read_role = ImplicitRoleField( parent_role=[ 'organization.auditor_role', 'inventory.organization.auditor_role', # partial support for old inheritance via inventory 'execute_role', 'admin_role', ], ) @classmethod def _get_unified_job_class(cls): return Job @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set([ 'name', 'description', 'organization', 'survey_passwords', 'labels', 'credentials', 'job_slice_number', 'job_slice_count', 'execution_environment' ]) @property def validation_errors(self): """ Fields needed to start, which cannot be given on launch, invalid state. """ validation_errors = {} if self.inventory is None and not self.ask_inventory_on_launch: validation_errors['inventory'] = [ _("Job Template must provide 'inventory' or allow prompting for it." ), ] if self.project is None: validation_errors['project'] = [ _("Job Templates must have a project assigned."), ] return validation_errors @property def resources_needed_to_start(self): return [ fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd)) ] def clean_forks(self): if settings.MAX_FORKS > 0 and self.forks > settings.MAX_FORKS: raise ValidationError( _(f'Maximum number of forks ({settings.MAX_FORKS}) exceeded.')) return self.forks def create_job(self, **kwargs): """ Create a new job based on this template. """ return self.create_unified_job(**kwargs) def get_effective_slice_ct(self, kwargs): actual_inventory = self.inventory if self.ask_inventory_on_launch and 'inventory' in kwargs: actual_inventory = kwargs['inventory'] if actual_inventory: return min(self.job_slice_count, actual_inventory.hosts.count()) else: return self.job_slice_count def save(self, *args, **kwargs): update_fields = kwargs.get('update_fields', []) # if project is deleted for some reason, then keep the old organization # to retain ownership for organization admins if self.project and self.project.organization_id != self.organization_id: self.organization_id = self.project.organization_id if 'organization' not in update_fields and 'organization_id' not in update_fields: update_fields.append('organization_id') return super(JobTemplate, self).save(*args, **kwargs) def validate_unique(self, exclude=None): """Custom over-ride for JT specifically because organization is inferred from project after full_clean is finished thus the organization field is not yet set when validation happens """ errors = [] for ut in JobTemplate.SOFT_UNIQUE_TOGETHER: kwargs = {'name': self.name} if self.project: kwargs['organization'] = self.project.organization_id else: kwargs['organization'] = None qs = JobTemplate.objects.filter(**kwargs) if self.pk: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.append('%s with this (%s) combination already exists.' % (JobTemplate.__name__, ', '.join(set(ut) - {'polymorphic_ctype'}))) if errors: raise ValidationError(errors) def create_unified_job(self, **kwargs): prevent_slicing = kwargs.pop('_prevent_slicing', False) slice_ct = self.get_effective_slice_ct(kwargs) slice_event = bool(slice_ct > 1 and (not prevent_slicing)) if slice_event: # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs[ '_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class( ) kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True elif self.job_slice_count > 1 and (not prevent_slicing): # Unique case where JT was set to slice but hosts not available kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['job_slice_count'] = 1 elif prevent_slicing: kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields'].setdefault('job_slice_count', 1) job = super(JobTemplate, self).create_unified_job(**kwargs) if slice_event: for idx in range(slice_ct): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job def get_absolute_url(self, request=None): return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request) def can_start_without_user_input(self, callback_extra_vars=None): """ Return whether job template can be used to start a new job without requiring any user input. """ variables_needed = False if callback_extra_vars: extra_vars_dict = parse_yaml_or_json(callback_extra_vars) for var in self.variables_needed_to_start: if var not in extra_vars_dict: variables_needed = True break elif self.variables_needed_to_start: variables_needed = True prompting_needed = False # The behavior of provisioning callback should mimic # that of job template launch, so prompting_needed should # not block a provisioning callback from creating/launching jobs. if callback_extra_vars is None: for ask_field_name in set(self.get_ask_mapping().values()): if getattr(self, ask_field_name): prompting_needed = True break return not prompting_needed and not self.passwords_needed_to_start and not variables_needed def _accept_or_ignore_job_kwargs(self, **kwargs): exclude_errors = kwargs.pop('_exclude_errors', []) prompted_data = {} rejected_data = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables( kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})) if accepted_vars: prompted_data['extra_vars'] = accepted_vars if rejected_vars: rejected_data['extra_vars'] = rejected_vars # Handle all the other fields that follow the simple prompting rule for field_name, ask_field_name in self.get_ask_mapping().items(): if field_name not in kwargs or field_name == 'extra_vars' or kwargs[ field_name] is None: continue new_value = kwargs[field_name] old_value = getattr(self, field_name) field = self._meta.get_field(field_name) if isinstance(field, models.ManyToManyField): old_value = set(old_value.all()) new_value = set(kwargs[field_name]) - old_value if not new_value: continue if new_value == old_value: # no-op case: Fields the same as template's value # counted as neither accepted or ignored continue elif field_name == 'scm_branch' and old_value == '' and self.project and new_value == self.project.scm_branch: # special case of "not provided" for branches # job template does not provide branch, runs with default branch continue elif getattr(self, ask_field_name): # Special case where prompts can be rejected based on project setting if field_name == 'scm_branch': if not self.project: rejected_data[field_name] = new_value errors_dict[field_name] = _('Project is missing.') continue if kwargs[ 'scm_branch'] != self.project.scm_branch and not self.project.allow_override: rejected_data[field_name] = new_value errors_dict[field_name] = _( 'Project does not allow override of branch.') continue # accepted prompt prompted_data[field_name] = new_value else: # unprompted - template is not configured to accept field on launch rejected_data[field_name] = new_value # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: errors_dict[field_name] = _( 'Field is not configured to prompt on launch.') if 'prompts' not in exclude_errors and (not getattr( self, 'ask_credential_on_launch', False)) and self.passwords_needed_to_start: errors_dict['passwords_needed_to_start'] = _( 'Saved launch configurations cannot provide passwords needed to start.' ) needed = self.resources_needed_to_start if needed: needed_errors = [] for resource in needed: if resource in prompted_data: continue needed_errors.append( _("Job Template {} is missing or undefined.").format( resource)) if needed_errors: errors_dict['resources_needed_to_start'] = needed_errors return prompted_data, rejected_data, errors_dict @property def cache_timeout_blocked(self): if Job.objects.filter( job_template=self, status__in=[ 'pending', 'waiting', 'running' ]).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10): logger.error( "Job template %s could not be started because there are more than %s other jobs from that template waiting to run" % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))) return True return False def _can_update(self): return self.can_start_without_user_input() @property def notification_templates(self): # Return all notification_templates defined on the Job Template, on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added once that is # available after the rbac pr base_notification_templates = NotificationTemplate.objects error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[ self, self.project ])) started_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_started__in=[ self, self.project ])) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[ self, self.project ])) # Get Organization NotificationTemplates if self.organization is not None: error_notification_templates = set( error_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_errors=self. organization))) started_notification_templates = set( started_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_started=self. organization))) success_notification_templates = set( success_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_success=self. organization))) return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates)) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(unified_job_template=self)
class Client(models.Model): id = UuidField(primary_key=True) person = models.OneToOneField(Person) idRecord = models.PositiveIntegerField() legacyRecord = models.CharField(max_length=15) admission_date = models.DateField(null=True) active = models.BooleanField(default=True) comments = models.TextField(blank=True) objects = ClientManager() payment_condition = models.ForeignKey('PaymentCondition') def __unicode__(self): return (u"%s" % (self.person.name.title(), )) \ if self.person.is_company() \ else (u"%s" % (self.person.name.title())) class Meta: ordering = ['person'] permissions = ( ("client_add", "Can add clients"), ("client_change", "Can change clients"), ("client_list", "Can list clients"), ("client_write", "Can write clients"), ) def revision(self): return reversion.get_for_object(self).order_by( '-revision__date_created').latest( 'revision__date_created').revision def revision_created(self): return reversion.get_for_object(self).order_by( 'revision__date_created').latest('revision__date_created').revision def referrals_charged(self): return self.referral_set.charged().filter(client=self) def referrals_discharged(self): return self.referral_set.discharged().filter(client=self) def employees(self): from gestorpsi.person.models import CompanyClient return CompanyClient.objects.filter( company__person__client=self).filter(client__active=True) def family_members(self): family_list = [] for i in Family.objects.filter(Q(client=self) | Q( client_related=self)).order_by('-active', '-responsible', 'relation_level', 'client__person__name', 'client_related__person__name'): responsable = False id = i.client.id if not i.client == self else i.client_related.id name = i.client if not i.client == self else i.client_related dict = {} if not i.client == self: for x, y in FAMILY_RELATION_REVERSE: dict[x] = y.__unicode__() relation_level = dict[i.relation_level] else: if i.responsible: responsable = True relation_level = i.get_relation_level_display() family_list.append([id, name, relation_level, responsable, i.id, i.active, i.comment]) # id = client selected id, i.id = family relation id return family_list def family_members_active(self): family_list = [] for i in self.family_members(): if i[5]: family_list.append(i) return family_list def is_company(self): return True if hasattr(self.person, 'company') else False def is_active(self): return True if self.active else False def is_busy(self, start_time, end_time): ''' check if client is busy in schedule for selected range filter 1: start time not in occurrence range filter 2: end time not in occurrence range filter 3: occurrence range are not between asked values note for exclude filters: presence = 4 -> occurrence unmarked presence = 5 -> occurrence rescheduled ''' from gestorpsi.schedule.models import ScheduleOccurrence queryset = ScheduleOccurrence.objects.filter(event__referral__client=self) \ .exclude(occurrenceconfirmation__presence=4) \ .exclude(occurrenceconfirmation__presence=5) return True if \ queryset.filter(start_time__lte=start_time, end_time__gt=start_time) or \ queryset.filter(start_time__lt=end_time, end_time__gte=end_time) or \ queryset.filter(start_time__gte=start_time, end_time__lte=end_time) \ else False def set_active(self): self.active = True self.save() def set_deactive(self): self.active = False self.save() def list_item_title(self): return u"%s" % (self.person.name) def list_item_url(self): return "/client/%s/home/" % (self.pk) def list_item_title_aditional(self): if not self.person.age: return "" return u"%s %s" % (self.person.age, _(u"years")) def list_item_description(self): return u"%s" % (self.person.get_first_phone()) def list_item_extra_links(self): html = '' for r in self.referrals_charged(): html += u"<a title='%s' href='/client/%s/referral/%s/' \ style='color:#%s;'><div class='service_name_html' \ style='background-color:#%s;'> </div></a>"\ % (r, self.pk, r.pk, r.service.font_color, r.service.color) html += '<a class="admit" href="/admission/%s/" \ title="%s"><img src="/media/img/22/ico_reg.png"></a>'\ % (self.pk, _('Admission Details')) return u"%s" % (html) list_item_extra_links.allow_tags = True
class JobOptions(BaseModel): """ Common options for job templates and jobs. """ class Meta: abstract = True diff_mode = models.BooleanField( default=False, help_text= _("If enabled, textual changes made to any templated files on the host are shown in the standard output" ), ) job_type = models.CharField( max_length=64, choices=JOB_TYPE_CHOICES, default='run', ) inventory = models.ForeignKey( 'Inventory', related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) project = models.ForeignKey( 'Project', related_name='%(class)ss', null=True, default=None, blank=True, on_delete=models.SET_NULL, ) playbook = models.CharField( max_length=1024, default='', blank=True, ) scm_branch = models.CharField( max_length=1024, default='', blank=True, help_text=_( 'Branch to use in job run. Project default used if blank. ' 'Only allowed if project allow_override field is set to true.'), ) forks = models.PositiveIntegerField( blank=True, default=0, ) limit = models.TextField( blank=True, default='', ) verbosity = models.PositiveIntegerField( choices=VERBOSITY_CHOICES, blank=True, default=0, ) extra_vars = prevent_search( accepts_json(models.TextField( blank=True, default='', ))) job_tags = models.TextField( blank=True, default='', ) force_handlers = models.BooleanField( blank=True, default=False, ) skip_tags = models.CharField( max_length=1024, blank=True, default='', ) start_at_task = models.CharField( max_length=1024, blank=True, default='', ) become_enabled = models.BooleanField(default=False, ) allow_simultaneous = models.BooleanField(default=False, ) timeout = models.IntegerField( blank=True, default=0, help_text= _("The amount of time (in seconds) to run before the task is canceled." ), ) use_fact_cache = models.BooleanField( default=False, help_text= _("If enabled, the service will act as an Ansible Fact Cache Plugin; persisting " "facts at the end of a playbook run to the database and caching facts for use by Ansible." ), ) extra_vars_dict = VarsDictProperty('extra_vars', True) @property def machine_credential(self): return self.credentials.filter(credential_type__kind='ssh').first() @property def network_credentials(self): return list(self.credentials.filter(credential_type__kind='net')) @property def cloud_credentials(self): return list(self.credentials.filter(credential_type__kind='cloud')) @property def vault_credentials(self): return list(self.credentials.filter(credential_type__kind='vault')) @property def passwords_needed_to_start(self): '''Return list of password field names needed to start the job.''' needed = [] # Unsaved credential objects can not require passwords if not self.pk: return needed for cred in self.credentials.all(): needed.extend(cred.passwords_needed) return needed
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Activity', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('ref_code', models.CharField(blank=True, max_length=15, null=True)), ('activity_type', models.CharField(max_length=125)), ('shipping', models.DecimalField(decimal_places=2, default=50, max_digits=10)), ('loc1_latitude', models.CharField(blank=True, max_length=25, null=True)), ('loc1_longitude', models.CharField(blank=True, max_length=25, null=True)), ('loc2_latitude', models.CharField(max_length=25)), ('loc2_longitude', models.CharField(max_length=25)), ('contact_number', models.CharField(blank=True, max_length=125, null=True)), ('auth_id', models.CharField(blank=True, max_length=125, null=True)), ('capture_id', models.CharField(blank=True, max_length=125, null=True)), ('cod', models.BooleanField(default=False)), ('pickup', models.BooleanField(default=False)), ('pickup_address', models.CharField(blank=True, max_length=125, null=True)), ('is_ordered', models.BooleanField(default=False)), ('date_ordered', models.DateTimeField(blank=True, null=True)), ('is_paid', models.BooleanField(default=False)), ('date_paid', models.DateTimeField(blank=True, null=True)), ], ), migrations.CreateModel( name='Category', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=50, null=True)), ], ), migrations.CreateModel( name='CategoryGroup', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=50, null=True)), ], ), migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('price', models.DecimalField(decimal_places=2, max_digits=30)), ('description', models.TextField(default='', max_length=4000)), ('thumbnail', models.ImageField(upload_to='photos/%Y/%m/%d/')), ('photo_1', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')), ('photo_2', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')), ('photo_3', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')), ('photo_4', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')), ('photo_5', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')), ('photo_6', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')), ('sale_price', models.DecimalField(blank=True, decimal_places=2, max_digits=30, null=True)), ('sale_price_start_date', models.DateTimeField(blank=True, default=None, null=True)), ('sale_price_end_date', models.DateTimeField(blank=True, default=None, null=True)), ('views', models.PositiveIntegerField(default=0)), ('orders', models.PositiveIntegerField(default=0)), ('date_published', models.DateTimeField(blank=True, default=django.utils.timezone.now)), ('is_published', models.BooleanField(default=True)), ('categories', models.ManyToManyField(to='logistics.Category')), ], ), migrations.CreateModel( name='Seller', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('contact', models.CharField(max_length=50)), ('loc1_latitude', models.CharField(blank=True, max_length=25, null=True)), ('loc1_longitude', models.CharField(blank=True, max_length=25, null=True)), ('banner', models.ImageField(blank=True, upload_to='photos/sellers/%Y/%m/%d/')), ], ), migrations.CreateModel( name='RiderReview', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('rating', models.CharField(choices=[('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5')], default='5', max_length=1)), ('comment', models.TextField(blank=True, max_length=4000, null=True)), ('rider', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='riders_reviews', to=settings.AUTH_USER_MODEL)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rider_reviews', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='ProductReview', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('rating', models.CharField(choices=[('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5')], default='5', max_length=1)), ('comment', models.TextField(blank=True, max_length=4000, null=True)), ('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products_reviews', to='logistics.product')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_reviews', to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( model_name='product', name='seller', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products', to='logistics.seller'), ), migrations.CreateModel( name='Item', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('quantity', models.PositiveIntegerField(default=1, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(10)])), ('is_ordered', models.BooleanField(default=False)), ('date_ordered', models.DateTimeField(blank=True, null=True)), ('ordered_price', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)), ('is_delivered', models.BooleanField(default=False)), ('date_delivered', models.DateTimeField(blank=True, null=True)), ('activity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='activity', to='logistics.activity')), ('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_items', to='logistics.product')), ], ), migrations.CreateModel( name='Favorite', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='favorited_by', to='logistics.product')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='favorites', to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( model_name='category', name='category', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='category_group', to='logistics.categorygroup'), ), migrations.AddField( model_name='activity', name='store', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='logistics.seller'), ), migrations.AddField( model_name='activity', name='user', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='orders', to=settings.AUTH_USER_MODEL), ), ]
class Migration(migrations.Migration): dependencies = [ ('shop', '0001_initial'), ] operations = [ migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('processed', models.BooleanField(default=False, verbose_name='Перезвонили')), ('name', models.CharField(blank=True, max_length=200, null=True, verbose_name='Имя')), ('phone', models.CharField(blank=True, max_length=200, null=True, verbose_name='Номер Телефона')), ('date', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания заявки')), ], options={ 'verbose_name': 'Заявка связаться с нами', 'verbose_name_plural': 'Заявки связаться с нами', 'ordering': ['-date'], }, ), migrations.CreateModel( name='Order', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('processed', models.BooleanField(default=False, verbose_name='Обработан')), ('date', models.DateTimeField(auto_now_add=True, verbose_name='Дата')), ('customer', models.CharField(default='', max_length=200, verbose_name='Покупатель')), ('phone_number', models.CharField(default='', max_length=20, verbose_name='Номер телефона')), ('total', models.PositiveIntegerField(default=0, verbose_name='Сумма заказа')), ], options={ 'verbose_name': 'Заказ', 'verbose_name_plural': 'Заказы', 'ordering': ['-date'], }, ), migrations.AlterModelOptions( name='productimage', options={ 'ordering': ['id'], 'verbose_name': 'Изображение', 'verbose_name_plural': 'Изображения' }, ), migrations.RemoveField( model_name='product', name='price_no_sale', ), migrations.RemoveField( model_name='product', name='price_sale', ), migrations.AddField( model_name='product', name='prev_price', field=models.PositiveIntegerField(blank=True, default=None, null=True, verbose_name='Цена без скидки'), ), migrations.AddField( model_name='product', name='price', field=models.PositiveSmallIntegerField(default=0, verbose_name='Цена'), ), migrations.RemoveField( model_name='product', name='category', ), migrations.AddField( model_name='product', name='category', field=models.ManyToManyField(to='shop.ProductCategory', verbose_name='Категории'), ), migrations.CreateModel( name='OrderItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='', max_length=300, verbose_name='Название')), ('size', models.CharField(default='', max_length=50, verbose_name='Размер')), ('price', models.PositiveIntegerField(default=0, verbose_name='Цена')), ('quantity', models.PositiveIntegerField(default=0, verbose_name='Количество')), ('subtotal', models.PositiveIntegerField(default=0, verbose_name='Сумма')), ('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.Order', verbose_name='Заказ')), ], ), ]
class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name='Image', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('isMain', models.BooleanField(default=False, verbose_name='Главное изображение')), ('image', models.ImageField(upload_to=shop.models.Image.get_picture_url, verbose_name='Изображение')), ], options={ 'verbose_name': 'Изображение', 'verbose_name_plural': 'Изображения', }, ), migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=250, verbose_name='Название')), ('price_without_sale', models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Цена без скидки')), ('sale', models.PositiveIntegerField(default=0, verbose_name='Скидка')), ('price', models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Цена')), ('slug', models.SlugField(max_length=250, unique=True, verbose_name='Slug')), ('description', models.TextField(verbose_name='Описание')), ('stock', models.PositiveIntegerField(default=0, verbose_name='На складе')), ('published', models.BooleanField(default=True, verbose_name='Активно')), ('created', models.DateTimeField(auto_now_add=True, verbose_name='Создано')), ('updated', models.DateTimeField(auto_now=True, verbose_name='Изменено')), ], options={ 'verbose_name': 'Product', 'verbose_name_plural': 'Products', }, ), migrations.AlterIndexTogether( name='product', index_together={('id', 'slug')}, ), migrations.AddField( model_name='image', name='product', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name='images', to='shop.Product', verbose_name='Товар'), ), ]
class Migration(migrations.Migration): dependencies = [ ('common', '0002_auto_20201024_2246'), ('users', '0001_initial'), ] operations = [ migrations.CreateModel( name='Person', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('first_name', models.CharField(max_length=100, null=True)), ('second_name', models.CharField(max_length=100)), ('last_name', models.CharField(max_length=100)), ('second_last_name', models.CharField(max_length=100)), ('created_at', models.DateTimeField(auto_now_add=True, null=True)), ('updated_at', models.DateTimeField(auto_now=True, null=True)), ], options={ 'verbose_name': 'Persona', 'verbose_name_plural': 'Personas', 'db_table': 'users_person', 'default_permissions': (), }, ), migrations.CreateModel( name='Psychologist', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('second_name', models.CharField(max_length=100, null=True)), ('second_last_name', models.CharField(max_length=100, null=True)), ('updated_at', models.DateTimeField(auto_now=True, null=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'default_permissions': (), }, ), migrations.CreateModel( name='ProfileImage', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('profile_image', models.ImageField(max_length=255, upload_to='profile_picture')), ('created_at', models.DateTimeField(auto_now_add=True, null=True)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'db_table': 'users_profile_image', 'default_permissions': (), }, ), migrations.CreateModel( name='PersonPhone', fields=[ ('phone_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='common.Phone')), ('order', models.PositiveIntegerField(default=1)), ('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='phones', to='users.Person')), ], options={ 'verbose_name': 'Teléfono de persona', 'verbose_name_plural': 'Teléfonos de personas', 'db_table': 'users_person_phone', 'default_permissions': (), }, bases=('common.phone',), ), migrations.CreateModel( name='Manager', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('second_name', models.CharField(max_length=100, null=True)), ('second_last_name', models.CharField(max_length=100, null=True)), ('updated_at', models.DateTimeField(auto_now=True, null=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'default_permissions': (), }, ), migrations.CreateModel( name='Doctor', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('second_name', models.CharField(max_length=100, null=True)), ('second_last_name', models.CharField(max_length=100, null=True)), ('updated_at', models.DateTimeField(auto_now=True, null=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'default_permissions': (), }, ), migrations.CreateModel( name='Coordinator', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('second_name', models.CharField(max_length=100, null=True)), ('second_last_name', models.CharField(max_length=100, null=True)), ('updated_at', models.DateTimeField(auto_now=True, null=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'default_permissions': (), }, ), migrations.CreateModel( name='Academic', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('second_name', models.CharField(max_length=100, null=True)), ('second_last_name', models.CharField(max_length=100, null=True)), ('updated_at', models.DateTimeField(auto_now=True, null=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'default_permissions': (), }, ), migrations.CreateModel( name='Sibling', fields=[ ('person_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='users.Person')), ('birth_date', models.DateField(blank=True, null=True)), ('academic_degree', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='common.AcademicDegree')), ('attitude', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='common.Attitude')), ('gender', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='common.Gender')), ('relationship', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='common.Relationship')), ], options={ 'verbose_name': 'Hermano', 'verbose_name_plural': 'Hermanos', 'db_table': 'users_sibling', 'default_permissions': (), }, bases=('users.person',), ), migrations.CreateModel( name='EmergencyContact', fields=[ ('person_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='users.Person')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'default_permissions': (), }, bases=('users.person',), ), ]
class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name='Mistake', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')), ('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')), ('is_deleted', models.BooleanField(db_index=True, default=False, verbose_name='是否软删除')), ('index', models.CharField(default='', max_length=32, verbose_name='题号')), ('content', models.CharField(default='', max_length=1000, verbose_name='题目')), ('options', jsonfield.fields.JSONField(max_length=1000, verbose_name='选项内容')), ], options={ 'verbose_name': '错题', 'verbose_name_plural': '错题', }, ), migrations.CreateModel( name='Summary', fields=[ ('creator', models.CharField(blank=True, default='system', max_length=64, null=True, verbose_name='创建人')), ('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')), ('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')), ('updated_by', models.CharField(blank=True, default='system', max_length=64, null=True, verbose_name='修改人')), ('end_at', models.DateTimeField(blank=True, null=True, verbose_name='结束时间')), ('is_deleted', models.BooleanField(db_index=True, default=False, verbose_name='是否软删除')), ('id', models.CharField(max_length=80, primary_key=True, serialize=False, verbose_name='ID')), ('subject_title', models.CharField(default='', max_length=64, verbose_name='科目名称')), ('chapter', models.CharField(default='', max_length=32, verbose_name='章节号')), ('content', models.CharField(default='', max_length=1000, verbose_name='总结内容')), ('lft', models.PositiveIntegerField(editable=False)), ('rght', models.PositiveIntegerField(editable=False)), ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), ('level', models.PositiveIntegerField(editable=False)), ('parent', mptt.fields.TreeForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='miscollection.Summary', verbose_name='上一目录')), ], options={ 'verbose_name': '总结', 'verbose_name_plural': '总结', }, managers=[ ('_objects', django.db.models.manager.Manager()), ], ), ]
class Migration(migrations.Migration): initial = True dependencies = [ ('shop', '0001_initial'), ('coupon', '0001_initial'), ] operations = [ migrations.CreateModel( name='Order', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('first_name', models.CharField(max_length=50)), ('last_name', models.CharField(max_length=50)), ('email', models.EmailField(max_length=254)), ('address', models.CharField(max_length=250)), ('postal_code', models.CharField(max_length=20)), ('city', models.CharField(max_length=100)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('paid', models.BooleanField(default=False)), ('discount', models.IntegerField( default=0, validators=[ django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(100000) ])), ('coupon', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='order_coupon', to='coupon.Coupon')), ], options={ 'ordering': ['-created'], }, ), migrations.CreateModel( name='OrderItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('price', models.DecimalField(decimal_places=2, max_digits=10)), ('quantity', models.PositiveIntegerField(default=1)), ('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='order.Order')), ('product', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='order_products', to='shop.Product')), ], ), migrations.CreateModel( name='OrderTransaction', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('mechant_order_id', models.CharField(blank=True, max_length=120, null=True)), ('transaction_id', models.CharField(blank=True, max_length=120, null=True)), ('amount', models.PositiveIntegerField(default=0)), ('transaction_status', models.CharField(blank=True, max_length=220, null=True)), ('type', models.CharField(blank=True, max_length=120)), ('created', models.DateTimeField(auto_now_add=True)), ('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='order.Order')), ], options={ 'ordering': ['-created'], }, ), ]
class Migration(migrations.Migration): dependencies = [ ('structure', '0014_servicesettings_options'), ] operations = [ migrations.CreateModel( name='Flavor', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=150, verbose_name='name')), ('uuid', uuidfield.fields.UUIDField(unique=True, max_length=32, editable=False, blank=True)), ('backend_id', models.CharField(max_length=255, db_index=True)), ('cores', models.PositiveSmallIntegerField(help_text=b'Number of cores in a VM')), ('ram', models.PositiveIntegerField(help_text=b'Memory size in MiB')), ('disk', models.PositiveIntegerField(help_text=b'Root disk size in MiB')), ('settings', models.ForeignKey(related_name='+', blank=True, to='structure.ServiceSettings', null=True)), ], options={ 'abstract': False, }, bases=(nodeconductor.logging.log.LoggableMixin, models.Model), ), migrations.CreateModel( name='Image', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=150, verbose_name='name')), ('uuid', uuidfield.fields.UUIDField(unique=True, max_length=32, editable=False, blank=True)), ('backend_id', models.CharField(max_length=255, db_index=True)), ('min_disk', models.PositiveIntegerField(default=0, help_text=b'Minimum disk size in MiB')), ('min_ram', models.PositiveIntegerField(default=0, help_text=b'Minimum memory size in MiB')), ('settings', models.ForeignKey(related_name='+', blank=True, to='structure.ServiceSettings', null=True)), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='OpenStackService', fields=[ ('service_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='structure.Service')), ], options={ 'abstract': False, }, bases=(nodeconductor.logging.log.LoggableMixin, 'structure.service'), ), migrations.CreateModel( name='OpenStackServiceProjectLink', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('state', django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')])), ('tenant_id', models.CharField(max_length=64, blank=True)), ('internal_network_id', models.CharField(max_length=64, blank=True)), ('availability_zone', models.CharField(help_text=b'Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True)), ('project', models.ForeignKey(to='structure.Project')), ('service', models.ForeignKey(to='openstack.OpenStackService')), ], options={ 'abstract': False, }, bases=(nodeconductor.logging.log.LoggableMixin, models.Model), ), migrations.AddField( model_name='openstackservice', name='projects', field=models.ManyToManyField(related_name='openstack_services', through='openstack.OpenStackServiceProjectLink', to='structure.Project'), preserve_default=True, ), ]
class SeoMetadata(models.Model): view_name = models.CharField(verbose_name=_('View Name'), max_length=30, blank=True, null=True) content_type = models.ForeignKey( ContentType, null=True, blank=True, verbose_name=_('Model'), on_delete=models.deletion.CASCADE, ) object_id = models.PositiveIntegerField(null=True, blank=True, verbose_name=_('Id')) content_object = GenericForeignKey('content_type', 'object_id') lang_code = models.CharField(verbose_name=_('Language'), max_length=2, choices=settings.SEO_LANGUAGES, default=settings.DEFAULT_LANG_CODE) has_parameters = models.BooleanField( default=False, help_text=_("This indicates if the SEOMetadata path contains \ parameters.")) path = models.CharField(verbose_name=_('Path'), max_length=200, db_index=True, null=True, blank=False, help_text=_("This should be an absolute path, \ excluding the domain name. Example: '/foo/bar/'. \ You can also capture parameters using '{X}' \ notation, where X is a positive number.")) # SEO Info title = models.CharField( verbose_name=_('Title'), max_length=100, blank=False, null=True, help_text=_("Here you can make use of the parameters captured in the \ URL using the same '{X}' notation.")) description = models.CharField( verbose_name=_('Description'), max_length=200, blank=False, null=True, help_text=_("Here you can make use of the parameters captured in the \ URL using the same '{X}' notation.")) priority = models.IntegerField( verbose_name=_("Priority"), help_text=_("Priority when duplicated. Higher the most priority. \ Default 0"), blank=False, null=False, default=0) class Meta: verbose_name = _('SEO Path Metadata') verbose_name_plural = _('SEO Path Metadata') ordering = ('path', 'lang_code') def __str__(self): return "Language: %s | URL: %s" % (self.lang_code, self.path) def get_metadata(self): result = {} for item in settings.SEO_FIELDS: result[item] = getattr(self, item) return result
class Migration(migrations.Migration): dependencies = [ ('projects', '0017_add_project_attribute_image'), ] operations = [ migrations.AlterModelOptions( name='projectphase', options={ 'ordering': ('index', ), 'verbose_name': 'project phase', 'verbose_name_plural': 'project phases' }, ), migrations.AlterModelOptions( name='projectphasesection', options={ 'ordering': ('index', ), 'verbose_name': 'project phase section', 'verbose_name_plural': 'project phase sections' }, ), migrations.AlterModelOptions( name='projectphasesectionattribute', options={ 'ordering': ('index', ), 'verbose_name': 'project phase section attribute', 'verbose_name_plural': 'project phase section attributes' }, ), migrations.AlterModelOptions( name='attribute', options={ 'ordering': ('identifier', ), 'verbose_name': 'attribute', 'verbose_name_plural': 'attributes' }, ), migrations.AlterField( model_name='attribute', name='value_type', field=models.CharField(choices=[('integer', 'integer'), ('short_string', 'short string'), ('long_string', 'long string'), ('boolean', 'boolean'), ('date', 'date'), ('user', 'user'), ('geometry', 'geometry'), ('image', 'image')], max_length=64, verbose_name='value type'), ), migrations.AlterField( model_name='projectphase', name='index', field=models.PositiveIntegerField(verbose_name='index'), ), migrations.AlterField( model_name='projectphasesection', name='index', field=models.PositiveIntegerField(verbose_name='index'), ), migrations.AlterField( model_name='projectphasesectionattribute', name='index', field=models.PositiveIntegerField(verbose_name='index'), ), migrations.AlterUniqueTogether( name='projectphase', unique_together=set(), ), migrations.AlterUniqueTogether( name='projectphasesection', unique_together=set(), ), migrations.AlterUniqueTogether( name='projectphasesectionattribute', unique_together=set(), ), ]