def test_value_to_string(self): field = JSONField(u"test") field.set_attributes_from_name("json") obj = JSONFieldTestModel(json='''{ "spam": "eggs" }''') self.assertEquals(u'{"spam": "eggs"}', field.value_to_string(obj))
def test_formfield(self): from jsonfield.forms import JSONFormField from jsonfield.widgets import JSONWidget field = JSONField(u"test") field.set_attributes_from_name("json") formfield = field.formfield() self.assertEquals(type(formfield), JSONFormField) self.assertEquals(type(formfield.widget), JSONWidget)
class StatsyObject(models.Model): group = models.ForeignKey('StatsyGroup', blank=True, null=True, related_name='statsy_object_list', verbose_name='group') event = models.ForeignKey('StatsyEvent', blank=True, null=True, related_name='statsy_object_list', verbose_name='event') user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, related_name='statsy_object_list', verbose_name='user') label = models.CharField(max_length=255, blank=True, null=True, verbose_name='label') content_type = models.ForeignKey(ContentType, blank=True, null=True) object_id = models.PositiveIntegerField(blank=True, null=True) content_object = GenericForeignKey('content_type', 'object_id') created_at = models.DateTimeField(blank=True, null=True, db_index=True, verbose_name='created at') value_types = ('float', 'text') value = ValueDescriptor(value_types=value_types) float_value = models.FloatField(blank=True, null=True, verbose_name='float value') text_value = models.CharField(max_length=255, blank=True, null=True, verbose_name='text value') url = models.URLField(blank=True, null=True, verbose_name='url') duration = models.IntegerField(blank=True, null=True, verbose_name='duration') extra = JSONField(blank=True, null=True, max_length=1024, verbose_name='extra') objects = StatsyQuerySet.as_manager() class Meta: verbose_name = 'Statsy Object' verbose_name_plural = 'Statsy Objects' index_together = ['content_type', 'object_id', 'user'] ordering = ('-created_at', ) permissions = (('stats_view', 'Can view stats'), ) def __str__(self): if self.label: return '{0}:{1}:{2} {3}'.format( self.group, self.event, self.label, self.created_at.strftime('%d/%m/%Y %H:%M')) return '{0}:{1} {2}'.format(self.group, self.event, self.created_at.strftime('%d/%m/%Y %H:%M')) @classmethod def create(cls, **kwargs): value = kwargs.pop('value', None) if 'created_at' not in kwargs: kwargs['created_at'] = datetime.now() new_object = cls(**kwargs) new_object.value = value new_object.save() return new_object def serialize(self): return { 'id': self.pk, 'group_id': self.group_id, 'event_id': self.event_id, 'user_id': self.user_id, 'label': self.label, 'content_type_id': self.content_type_id, 'object_id': self.object_id, 'created_at': self.created_at.strftime('%Y-%m-%d %H:%M:%S'), 'value': self.value, 'url': self.url, 'duration': self.duration, 'extra': self.extra }
class Group(RedwoodGroup): cache = JSONField(null=True) # needed for otree redwood; this should replace the need for the get_timeout_seconds method # in pages.QueueService, but for some reason does has no effect. This is essentially a wrapper # for the timeout_seconds variable anyway. def period_length(self): g_index = self.get_player_by_id(1).participant.vars[ self.round_number]['group'] return Constants.config[g_index][self.round_number - 1]['settings']['duration'] # takes in the data transferred back and forth by channels, # and generates a list representing the queue, where each element in the list # IMPORTANT: this list represents the the entire queue, including players in the service room, # organized by when they arrived. This means that the 0th element in the returned list is the # first person to have entered the service room, and the last element in the list is the person # in the back of the queue. def queue_state(self, data): queue = {} for p in self.get_players(): pp = data[str(p.id_in_group)] queue[pp['pos']] = pp['id'] return [queue.get(k) for k in sorted(queue)] def new_metadata(self, g_index, requester_id, requestee_id, swap_method): m = OrderedDict() m['subsession_id'] = self.subsession_id m['round'] = self.round_number m['group_id'] = g_index m['requester_id'] = requester_id m['requester_pos_init'] = 'N/A' m['requester_pos_final'] = 'N/A' m['requester_bid'] = 'N/A' m['requestee_id'] = requestee_id m['requestee_pos_init'] = 'N/A' m['requestee_pos_final'] = 'N/A' m['requestee_bid'] = 'N/A' m['request_timestamp_absolute'] = 'N/A' m['response_timestamp_absolute'] = 'N/A' m['request_timestamp_relative'] = 'N/A' m['response_timestamp_relative'] = 'N/A' m['swap_method'] = swap_method m['status'] = 'N/A' m['message'] = 'N/A' m['transaction_price'] = 'N/A' return m """ On swap event: this is a method defined by redwood. It is called when channel.send() is called in the javascript. That happens when 1) someone starts a trade request by pressing the trade button, 2) someone responds to a trade request by pressing the yes or no button, 3) someone enters the service room and the entire queue moves forward. This method essentially defines a state machine. Each player has a state, represented by a dictionary with keys: id: id in group; a number from 1 to Constants.players_per_group, pos: position in queue at time of input; a number from -Constants.players_per_group to Constants.players_per_group, in_trade: boolean - true if this player has 1) requested a trade and awaits a response; 2) has been requested and has not yet responded, last_trade_request: timestamp of the last time this player clicked the trade button, requested: if this player has been requested to swap, the id of the player who made the request; None, or a number from 1 to Constants.players_per_group, requesting: if this player has made a request to swap, the id of the player who the request was made to; None, or a number from 1 to Constants.players_per_group, accepted: status of trade acceptance; 2 if requesting/no response/not in trade, 1 if accepted, 0 if declined, alert: the current alert displayed to a player; a value in Constants.alert_messages, num_players_queue: the number of players who have not entered the service room at time of input; a number from 0 to Constants.players_per_group, num_players_service: the number of players who have entered the service room at time of input; a number from 0 to Constants.players_per_group, next; boolean: true if someone's service time has just run out, false otherwise; this is true when someone has passed into the service room, and everyone in the queue should move forward one position. The state machine takes in the state of each player, and alters the states of that player and other players accordingly. Note that upon this method being called, only one player's state can be different than it was directly before the method was called; because each time an event occurs, (request, response, or next) this method gets called. After updating all player's states, sends the data back to the client. - Need to ensure that this is true; otherwise, we might need a queue of pending events """ def _on_swap_event(self, event=None, **kwargs): duration = self.period_length() start_time = event.value['start_time'] # updates states of all players involved in the most recent event that triggered this # method call for p in self.get_players(): """ fields 'requesting', 'accepted', and 'next' of the player who initiated the event will be updated client-side; all other fields (the aggregate of which is the player state) are updated here player states; every player in the round is in exactly one of these states upon the initiation of an event (when this method gets called) - reset: no event that involves this player has been initiated by the most recent call to this method. There is no case for this, as the player's state is not updated. - service_clean: this player is not in trade and service time has run out - service_dirty: this player is in trade and service time has run out. This is an extension of service_clean. - service_other: other player's service time has run out - requesting_clean: player is not in_trade and requesting someone who is not in_trade - requesting_dirty: player is not in_trade and requesting someone who is in_trade the JS should make this impossible (disable trade button) - accepting: player is in_trade and accepting - declining: player is in_trade and declining """ # gets this player's dict from the transmitted event p1 = event.value[str(p.id_in_group)] g_index = p.participant.vars[self.round_number]['group'] swap_method = Constants.config[g_index][ self.round_number - 1]['settings']['swap_method'] # someone has entered the service room if p1['next'] == True: if p1['pos'] == 0: # service_clean p1['alert'] = Constants.alert_messages['next_self'] # service_dirty if p1['in_trade']: p2_id = str(p1['requested']) p2 = event.value[p2_id] metadata = self.new_metadata(g_index, p2['id'], p1['id'], swap_method) metadata['request_timestamp_absolute'] = p2[ 'last_trade_request'] p1['in_trade'] = False p2['in_trade'] = False p1['requested'] = None p2['requesting'] = None p1['accepted'] = 2 # this should be unnecessary p1['bid'] = None p2['bid'] = None metadata['transaction_price'] = 0 metadata['status'] = 'cancelled' metadata['requester_pos_final'] = p2['pos'] metadata['requestee_pos_final'] = p1['pos'] timestamp = datetime.now().strftime('%s') metadata['response_timestamp_absolute'] = timestamp p2['last_trade_request'] = None event.value[p2_id] = p2 event.value[str(p.id_in_group)] = p1 #metadata['queue'] = self.queue_state(event.value) self.subsession.dump_metadata(duration, start_time, metadata) # service_other elif p1['pos'] > 0: # this is the only case I know of where you can get the same alert twice in a row (except none) # if you get the same alert twice in a row the alert will not display because the watch function # that displays alerts only get called when the alert changes. if p1['alert'] == Constants.alert_messages['next_queue']: p1['alert'] = Constants.alert_messages['next_queue2'] else: p1['alert'] = Constants.alert_messages['next_queue'] else: p1['alert'] = Constants.alert_messages['none'] p1['next'] = False # someone has initiated a trade request elif not p1['in_trade'] and p1['requesting'] != None: p2 = event.value[str(p1['requesting'])] metadata = self.new_metadata(g_index, p1['id'], p2['id'], swap_method) metadata['requester_pos_init'] = p1['pos'] metadata['requestee_pos_init'] = p2['pos'] timestamp = p1['last_trade_request'] metadata['request_timestamp_absolute'] = timestamp if swap_method == 'cut': temp = p2['pos'] for i in event.value: if i != 'metadata' and i != str(p.id_in_group): if (event.value[i]['pos'] < p1['pos'] and event.value[i]['pos'] >= p2['pos']): event.value[i][ 'alert'] = Constants.alert_messages[ 'cutted'] event.value[i]['pos'] += 1 p1['pos'] = temp p1['alert'] = Constants.alert_messages['cutting'] metadata['requester_pos_final'] = p1['pos'] metadata['requestee_pos_final'] = p2['pos'] metadata['status'] = 'cut' p1['requesting'] = None p1['last_trade_request'] = None event.value[str(p.id_in_group)] = p1 self.subsession.dump_metadata(duration, start_time, metadata) else: # requesting_clean if not p2['in_trade']: # print('CORRECT ') message = p1.get('message') # print(message) p1['in_trade'] = True p2['in_trade'] = True p2['requested'] = p1['id'] # reworked double auction if swap_method == 'double': p2['other_bid'] = p1['bid'] else: p2['bid'] = p1['bid'] p2['message'] = message p1['alert'] = Constants.alert_messages['requesting'] p2['alert'] = Constants.alert_messages['requested'] event.value[str(p1['requesting'])] = p2 self.subsession.dump_metadata(duration, start_time, metadata) # requesting_dirty; the js should prevent the logic from ever reaching this else: p1['requesting'] = None p1['alert'] = Constants.alert_messages['unv_other'] # someone has responded to a trade request elif p1['in_trade'] and p1['requested'] != None: if p1['accepted'] != 2: p2_id = str(p1['requested']) p2 = event.value[p2_id] metadata = self.new_metadata(g_index, p2_id, p1['id'], swap_method) metadata['request_timestamp_absolute'] = p2[ 'last_trade_request'] timestamp = datetime.now().strftime('%s') metadata['response_timestamp_absolute'] = int( timestamp) * 1000 metadata['requester_bid'] = p2.get('bid', 'N/A') metadata['requestee_bid'] = p1.get('bid', 'N/A') # declining if p1['accepted'] == 0: p1['in_trade'] = False p2['in_trade'] = False p1['requested'] = None p2['requesting'] = None p1['accepted'] = 2 p1['alert'] = Constants.alert_messages['declining'] p2['alert'] = Constants.alert_messages['declined'] p2['bid'] = None p1['bid'] = None metadata['status'] = 'declined' # accepting elif p1['accepted'] == 1: p1['in_trade'] = False p2['in_trade'] = False p1['requested'] = None p2['requesting'] = None p1['accepted'] = 2 temp = p1['pos'] p1['pos'] = p2['pos'] p2['pos'] = temp p1['alert'] = Constants.alert_messages['accepting'] p2['alert'] = Constants.alert_messages['accepted'] # fix for typeError when accepting a swap during which # the swapMethod is 'swap' if swap_method == 'swap': p2['bid'] = None elif swap_method == 'token': p2['tokens'] -= 1 p1['tokens'] += 1 p2['bid'] = -float(p1['bid']) elif swap_method == 'take/Leave': p2['bid'] = -float(p1['bid']) else: # reworked double auction p2['other_bid'] = p1['bid'] av_bid = (float(p1['bid']) + float(p2['bid'])) / 2 p2['average_bid'] = -av_bid p1['average_bid'] = av_bid metadata['status'] = 'accepted' metadata['requester_pos_final'] = p2['pos'] metadata['requestee_pos_final'] = p1['pos'] message = p1.get('message', 'N/A') if message == '': message = 'N/A' metadata['message'] = message if swap_method == 'take/Leave': metadata['transaction_price'] = p1.get('bid') elif swap_method == 'double': metadata['transaction_price'] = p1.get('average_bid') else: metadata['transaction_price'] = 'N/A' p2['last_trade_request'] = None event.value[p2_id] = p2 event.value[str(p.id_in_group)] = p1 self.subsession.dump_metadata(duration, start_time, metadata) event.value[str(p.id_in_group)] = p1 # partially redundant # broadcast the updated data out to all subjects self.send('swap', event.value) # cache state of queue so that client pages will not reset on reload self.cache = event.value # manually save all updated fields to db. otree redwood thing self.save()
from django.db import models from django.db.models.fields import Field from jsonfield import JSONField from jsonLookup import hasLookup JSONField.register_lookup(hasLookup) class User(models.Model): name=models.CharField(max_length=50) properties=JSONField() # Create your models here.
class Feature(models.Model): """ Model to represent features created in the application. """ STATUS_VALUES = [ 'Unassigned', 'In work', 'Awaiting review', 'In review', 'Completed' ] #'Assigned' STATUS_CHOICES = [(choice, choice) for choice in STATUS_VALUES] aoi = models.ForeignKey(AOI, related_name='features', editable=False) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) objects = models.GeoManager() analyst = models.ForeignKey(User, editable=False) template = models.ForeignKey("FeatureType", on_delete=models.PROTECT) status = models.CharField(max_length=15, choices=STATUS_CHOICES, default='In work') # Allow the user to save their own properties properties = JSONField(load_kwargs={}, blank=True, null=True) # These help the user identify features when data is exposed outside of the application (Geoserver). job = models.ForeignKey(Job, editable=False) project = models.ForeignKey(Project, editable=False) #Try this vs having individual models the_geom = models.GeometryField(blank=True, null=True) def geoJSON(self, as_json=True, using_style_template=True): """ Returns geoJSON of the feature. Try to conform to https://github.com/mapbox/simplestyle-spec/tree/master/1.0.0 """ properties_main = self.properties or {} properties_built = dict( id=self.id, status=self.status, analyst=self.analyst.username, created_at=datetime.strftime(self.created_at, '%Y-%m-%dT%H:%M:%S%Z'), updated_at=datetime.strftime(self.updated_at, '%Y-%m-%dT%H:%M:%S%Z'), ) properties_template = self.template.properties or {} # properties_template can return a list from it's backing model, make sure we get the Dict if type(properties_template) == types.ListType: properties_template = properties_template[0] # srj: if using_style_template set, we're styling object from its feature id, else we'll # just use the style properties (which should already be included if defined for feature) # (we may want to set some defaults later on to make sure) if using_style_template: properties_built['template'] = self.template.id if hasattr( self.template, "id") else None properties = dict(properties_built.items() + properties_main.items() + properties_template.items()) feature_type = FeatureType.objects.get(id=self.template.id) geojson = SortedDict() geojson["type"] = "Feature" geojson["properties"] = properties geojson["geometry"] = json.loads(self.the_geom.json) if feature_type and using_style_template: geojson["style"] = feature_type.style_to_geojson() else: geojson["style"] = feature_type.style if (as_json): return clean_dumps(geojson) else: for key in properties: if isinstance(properties[key], str) or isinstance( properties[key], unicode): properties[key] = properties[key].replace( '<', '<l').replace('>', '>').replace( "javascript:", "j_script-") return geojson def json_item(self, show_detailed_properties=False): properties_main = self.properties or {} #Pull the County data if it exists, otherwise find it and add it back to the object if properties_main.has_key('county'): county = properties_main['county'] else: county_list = Counties.objects.filter( poly__contains=self.the_geom.centroid.wkt) if len(county_list): county = str(county_list[0].name) else: county = "Unknown" self.properties = properties_main if not show_detailed_properties: if 'linked_items' in properties_main: properties_main['linked_items'] = True else: properties_main['linked_items'] = False properties_built = dict( id=self.id, feature_type=str(self.template.name) if hasattr( self.template, "name") else self.template.id, analyst=str(self.analyst.username), workcell_id=self.aoi.id, status=str(self.status), county=county # created_at=datetime.strftime(self.created_at, '%Y-%m-%dT%H:%M:%S%Z'), # updated_at=datetime.strftime(self.updated_at, '%Y-%m-%dT%H:%M:%S%Z'), ) properties_feature = dict(self.template.properties or {}) properties = dict(properties_main.items() + properties_built.items() + properties_feature.items()) return properties def __unicode__(self): return "Feature created for {0}".format(self.aoi.name) def clean(self): obj_geom_type = self.the_geom.geom_type.lower() template_geom_type = self.template.type.lower() if obj_geom_type != template_geom_type: error_text = "Feature type {0} does not match the template's feature type {1}." raise ValidationError( error_text.format(obj_geom_type, template_geom_type)) class Meta: ordering = ( '-updated_at', 'aoi', )
class LtiUserData(models.Model): user = models.ForeignKey(User) edx_lti_parameters = JSONField(default={}) custom_key = models.CharField(max_length=190, null=False, default='') class Meta: app_label = "django_lti_tool_provider" unique_together = (("user", "custom_key"), ) @property def _required_params(self): return ["lis_result_sourcedid", "lis_outcome_service_url"] def _validate_lti_grade_request(self, grade): def _log_and_throw(message): _logger.error(message) raise ValueError(message) if not 0 <= grade <= 1: _log_and_throw( "Grade should be in range [0..1], got {grade}".format( grade=grade)) if not self.edx_lti_parameters: _log_and_throw("LTI grade parameters is not set".format( params=self._required_params)) empty_parameters = [ parameter for parameter in self._required_params if not self.edx_lti_parameters.get(parameter, '') ] if empty_parameters: parameters_repr = ", ".join(empty_parameters) _log_and_throw( "Following required LTI parameters are not set: {parameters}". format(parameters=parameters_repr)) def send_lti_grade(self, grade): """ Instantiates DjangoToolProvider using stored lti parameters and sends grade """ self._validate_lti_grade_request(grade) provider = DjangoToolProvider(settings.LTI_CLIENT_KEY, settings.LTI_CLIENT_SECRET, self.edx_lti_parameters) outcome = provider.post_replace_result(grade) _logger.info( u"LTI grade request was %(successful)s. Description is %(description)s", dict(successful="successful" if outcome.is_success() else "unsuccessful", description=outcome.description)) return outcome @classmethod def get_or_create_by_parameters(cls, user, authentication_manager, lti_params, create=True): """ Gets a user's LTI user data, creating the user if they do not exist. If create is False, it will raise LtiUserData.DoesNotExist should no data exist for the user. This function also does a bit of sanity checking to make sure the current user_id matches the stored lti user_id, raising WrongUserError if not. """ custom_key = authentication_manager.vary_by_key(lti_params) # implicitly tested by test_views if custom_key is None: custom_key = '' if create: lti_user_data, created = LtiUserData.objects.get_or_create( user=user, custom_key=custom_key) else: # Could omit it, but it would change the signature. created = False lti_user_data = LtiUserData.objects.get(user=user, custom_key=custom_key) if lti_user_data.edx_lti_parameters.get( 'user_id', lti_params['user_id']) != lti_params['user_id']: # TODO: not covered by test message = u"LTI parameters for user found, but anonymous user id does not match." _logger.error(message) raise WrongUserError(message) return lti_user_data, created @classmethod def store_lti_parameters(cls, user, authentication_manager, lti_params): """ Stores LTI parameters into the DB, creating or updating record as needed """ lti_user_data, created = cls.get_or_create_by_parameters( user, authentication_manager, lti_params) lti_user_data.edx_lti_parameters = lti_params if not created: _logger.debug(u"Replaced LTI parameters for user %s", user.username) lti_user_data.save() return lti_user_data def __unicode__(self): return u"{classname} for {user} and (vary_key: {custom_key})".format( classname=self.__class__.__name__, user=self.user, custom_key=self.custom_key)
class Translation(DirtyFieldsMixin, models.Model): entity = models.ForeignKey(Entity) locale = models.ForeignKey(Locale) user = models.ForeignKey(User, null=True, blank=True) string = models.TextField() # 0=zero, 1=one, 2=two, 3=few, 4=many, 5=other, null=no plural forms plural_form = models.SmallIntegerField(null=True, blank=True) date = models.DateTimeField(default=timezone.now) approved = models.BooleanField(default=False) approved_user = models.ForeignKey(User, related_name='approvers', null=True, blank=True) approved_date = models.DateTimeField(null=True, blank=True) fuzzy = models.BooleanField(default=False) # extra stores data that we want to save for the specific format # this translation is stored in, but that we otherwise don't care # about. extra = JSONField(default=extra_default) @property def latest_activity(self): """ Return the date and user associated with the latest activity on this translation. """ if self.approved_date is not None and self.approved_date > self.date: return {'date': self.approved_date, 'user': self.approved_user} else: return {'date': self.date, 'user': self.user} def __unicode__(self): return self.string def save(self, imported=False, *args, **kwargs): super(Translation, self).save(*args, **kwargs) # Only one translation can be approved at a time for any # Entity/Locale. if self.approved: (Translation.objects.filter( entity=self.entity, locale=self.locale, plural_form=self.plural_form).exclude(pk=self.pk).update( approved=False, approved_user=None, approved_date=None)) if not self.memory_entries.exists(): TranslationMemoryEntry.objects.create( source=self.entity.string, target=self.string, entity=self.entity, translation=self, locale=self.locale) if not imported: # Update stats AFTER changing approval status. stats = update_stats(self.entity.resource, self.locale) # Whenever a translation changes, mark the entity as having # changed in the appropriate locale. We could be smarter about # this but for now this is fine. if self.approved: self.entity.mark_changed(self.locale) # Check and update the latest translation where necessary. self.check_latest_translation(self.entity.resource.project) self.check_latest_translation(self.locale) self.check_latest_translation(stats) project_locale = utils.get_object_or_none( ProjectLocale, project=self.entity.resource.project, locale=self.locale) if project_locale: self.check_latest_translation(project_locale) def check_latest_translation(self, instance): """ Check if the given model instance has a `latest_activity` attribute and, if it does, see if this translation is more recent than it. If so, replace it and save. """ latest = instance.latest_translation if latest is None or self.latest_activity[ 'date'] > latest.latest_activity['date']: instance.latest_translation = self instance.save(update_fields=['latest_translation']) def delete(self, stats=True, *args, **kwargs): super(Translation, self).delete(*args, **kwargs) if stats: update_stats(self.entity.resource, self.locale) # Mark entity as changed before deleting. This is skipped during # bulk delete operations, but we shouldn't be bulk-deleting # translations anyway. if self.approved: self.entity.mark_changed(self.locale) def serialize(self): return { 'pk': self.pk, 'string': self.string, 'approved': self.approved, 'fuzzy': self.fuzzy, }
class Address(models.Model): """ Model to store addresses """ address_line1 = models.CharField("Address line 1", max_length=100) address_line2 = models.CharField("Address line 2", max_length=100, blank=True, null=True) city = models.CharField(max_length=50, blank=False) state_province = models.CharField("State/Province", max_length=40, blank=True) postal_code = models.CharField("Postal Code", max_length=10) country = models.ForeignKey(Country, blank=False) geom = models.PointField(null=True, blank=True) objects = models.GeoManager() geocode_results = JSONField(null=True, blank=True) def get_row_display(self): return '{city}, {state_province}, {postal_code}' \ .format(address_line1=self.address_line1, address_line2=self.address_line2, city=self.city, state_province=self.state_province, postal_code=self.postal_code, country=self.country.iso_code) def get_full_display(self): return '{line1}{line2}, {city}, {state_province}, {postal_code}' \ .format(line1=self.address_line1, line2=' ' + self.address_line2 if self.address_line2 else '', city=self.city, state_province=self.state_province, postal_code=self.postal_code, country=self.country.iso_code) @classmethod def create_from_string(cls, query_string, dry_run=False): g = GoogleV3(api_key=settings.GOOGLE_API_KEY) try: results = g.geocode(query_string, timeout=10) except GeocoderQuotaExceeded: sleep(0.5) results = g.geocode(query=query_string) if results and results.latitude and results.longitude: params = dict(geom=Point(results.longitude, results.latitude)) filter_components = lambda n: [ c for c in results.raw['address_components'] if n in c['types'] ] # noqa postal_codes = filter_components('postal_code') countries = filter_components('country') states = filter_components('administrative_area_level_1') cities = filter_components('locality') street_numbers = filter_components('street_number') street_names = filter_components('route') if postal_codes: params['postal_code'] = postal_codes[0]['short_name'] if countries: params['country'], _ = Country.objects.get_or_create( iso_code=countries[0]['short_name']) if states: params['state_province'] = states[0]['short_name'] if cities: params['city'] = cities[0]['long_name'] if street_numbers and street_names: params['address_line1'] = '{0} {1}'.format( street_numbers[0]['short_name'], street_names[0]['short_name']) if not dry_run: try: objs = cls.objects.get(**params) except cls.DoesNotExist: objs = cls.objects.create(**params) return objs else: print 'Create new address with these parameters: {0}'.format( params) return cls(**params) def geocode(self): g = GoogleV3() query_string = self.get_row_display() try: results = g.geocode(query=query_string) except GeocoderQuotaExceeded: sleep(0.5) results = g.geocode(query=query_string) except GeocoderTimedOut: sleep(0.5) results = g.geocode(query=query_string) if results and results.latitude and results.longitude: self.geom = Point(results.longitude, results.latitude) self.geocode_results = results.raw self.save() @classmethod def batch_geocode(cls): for row in cls.objects.filter(geom__isnull=True, geocode_results__isnull=True): print row.get_row_display() row.geocode() def __unicode__(self): return "%s, %s %s - %s" % (self.address_line1, self.city, self.state_province, self.country.iso_code) class Meta: verbose_name_plural = "Addresses" unique_together = ("address_line1", "address_line2", "postal_code", "city", "state_province", "country")
class Repository(models.Model): id = models.BigIntegerField(primary_key=True) owner = models.ForeignKey('users.GithubUser', on_delete=models.CASCADE, related_name='repositories') name = models.CharField(max_length=100) full_name = models.CharField(max_length=255) private = models.BooleanField() description = models.TextField(blank=True) default_branch = models.CharField(max_length=100, default='master') fork = models.BooleanField() homepage = models.URLField(blank=True) created_at = models.DateTimeField() updated_at = models.DateTimeField() has_issues = models.BooleanField() has_projects = models.BooleanField() has_wiki = models.BooleanField() has_pages = models.BooleanField() has_downloads = models.BooleanField() archived = models.BooleanField() mirror_url = models.URLField(blank=True) data = JSONField(default=dict) objects = RepositoryManager() publics = PublicRepositoryManager() def _get_remote_repo(self) -> GHRepository: g = github.Github(settings.GITHUB_USER, settings.GITHUB_TOKEN) return g.get_repo(self.id) def synchronize(self): from users.models import GithubUser remote_repo = self._get_remote_repo() self.name = remote_repo.name self.full_name = remote_repo.full_name self.full_name = remote_repo.full_name self.private = remote_repo.private self.description = remote_repo.description or '' self.default_branch = remote_repo.default_branch self.fork = remote_repo.fork self.homepage = remote_repo.homepage or '' self.created_at = remote_repo.created_at.replace(tzinfo=pytz.UTC) self.updated_at = remote_repo.updated_at.replace(tzinfo=pytz.UTC) self.has_issues = remote_repo.has_issues self.has_projects = remote_repo.has_projects self.has_wiki = remote_repo.has_wiki self.has_pages = False # TODO: not available self.has_downloads = remote_repo.has_downloads self.archived = remote_repo.archived self.mirror_url = remote_repo.mirror_url or '' self.data = remote_repo.raw_data self.owner = GithubUser.objects.get_or_retrieve( remote_repo.owner.login) def update_stars(self): from users.models import Star, GithubUser for star in self._get_remote_repo().get_stargazers_with_dates(): user = GithubUser.objects.get_or_retrieve(star.user.login) Star.objects.get_or_create( repo=self, user=user, defaults=dict(created_at=star.starred_at.replace( tzinfo=pytz.UTC))) def update_subscribers(self): from users.models import Watch, GithubUser for watcher in self._get_remote_repo().get_subscribers(): user = GithubUser.objects.get_or_retrieve(watcher.login) Watch.objects.get_or_create( repo=self, user=user, defaults=dict(created_at=timezone.now())) def update_forks(self): from users.models import Fork, GithubUser for repo in self._get_remote_repo().get_forks(): user = GithubUser.objects.get_or_retrieve(repo.owner.login) Fork.objects.get_or_create( user=user, repo=self, defaults=dict(created_at=timezone.now()))
class Build(models.Model): """Build data.""" project = models.ForeignKey( Project, verbose_name=_('Project'), related_name='builds') version = models.ForeignKey( Version, verbose_name=_('Version'), null=True, related_name='builds') type = models.CharField( _('Type'), max_length=55, choices=BUILD_TYPES, default='html') state = models.CharField( _('State'), max_length=55, choices=BUILD_STATE, default='finished') date = models.DateTimeField(_('Date'), auto_now_add=True) success = models.BooleanField(_('Success'), default=True) setup = models.TextField(_('Setup'), null=True, blank=True) setup_error = models.TextField(_('Setup error'), null=True, blank=True) output = models.TextField(_('Output'), default='', blank=True) error = models.TextField(_('Error'), default='', blank=True) exit_code = models.IntegerField(_('Exit code'), null=True, blank=True) commit = models.CharField( _('Commit'), max_length=255, null=True, blank=True) _config = JSONField(_('Configuration used in the build'), default=dict) length = models.IntegerField(_('Build Length'), null=True, blank=True) builder = models.CharField( _('Builder'), max_length=255, null=True, blank=True) cold_storage = models.NullBooleanField( _('Cold Storage'), help_text='Build steps stored outside the database.') # Manager objects = BuildQuerySet.as_manager() CONFIG_KEY = '__config' class Meta(object): ordering = ['-date'] get_latest_by = 'date' index_together = [['version', 'state', 'type']] def __init__(self, *args, **kwargs): super(Build, self).__init__(*args, **kwargs) self._config_changed = False @property def previous(self): """ Returns the previous build to the current one. Matching the project and version. """ date = self.date or timezone.now() if self.project is not None and self.version is not None: return ( Build.objects .filter( project=self.project, version=self.version, date__lt=date, ) .order_by('-date') .first() ) return None @property def config(self): """ Get the config used for this build. Since we are saving the config into the JSON field only when it differs from the previous one, this helper returns the correct JSON used in this Build object (it could be stored in this object or one of the previous ones). """ if self.CONFIG_KEY in self._config: return Build.objects.get(pk=self._config[self.CONFIG_KEY])._config return self._config @config.setter def config(self, value): """ Set `_config` to value. `_config` should never be set directly from outside the class. """ self._config = value self._config_changed = True def save(self, *args, **kwargs): # noqa """ Save object. To save space on the db we only save the config if it's different from the previous one. If the config is the same, we save the pk of the object that has the **real** config under the `CONFIG_KEY` key. """ if self.pk is None or self._config_changed: previous = self.previous if (previous is not None and self._config and self._config == previous.config): previous_pk = previous._config.get(self.CONFIG_KEY, previous.pk) self._config = {self.CONFIG_KEY: previous_pk} super(Build, self).save(*args, **kwargs) self._config_changed = False def __str__(self): return ugettext( 'Build {project} for {usernames} ({pk})'.format( project=self.project, usernames=' '.join( self.project.users.all().values_list('username', flat=True), ), pk=self.pk, )) def get_absolute_url(self): return reverse('builds_detail', args=[self.project.slug, self.pk]) @property def finished(self): """Return if build has a finished state.""" return self.state == BUILD_STATE_FINISHED
class UserPosition(models.Model): """The position of a user on an issue.""" user = models.ForeignKey('auth.User', db_index=True, help_text="The user who created this position.", on_delete=models.CASCADE) position = models.ForeignKey(IssuePosition, db_index=True, help_text="The position the user choses.", on_delete=models.CASCADE) created = models.DateTimeField(auto_now_add=True, db_index=True) district = models.CharField( max_length=4, db_index=True, help_text= "The state and district, in uppercase without any spaces, of the user at the time the user took this posiiton." ) metadata = JSONField( help_text="Other information stored with the position.") def __unicode__(self): return self.created.isoformat() + " " + unicode( self.user) + "/" + unicode(self.position) def can_change_position(self): # Can the user change his position? Not if he made any calls about it. return not CallLog.objects.filter(position=self).exists() def get_current_targets(self): # Returns either a list of a string with a reason why there are no targets to call. # ugh, data collection error when this was first launched if len(self.district) <= 2: return "unknown" # get all of the chambers any related bills are currently being considered in issue = self.position.issue.get() chambers = set() for rb in RelatedBill.objects.filter( issue=issue).select_related("bill"): chambers.add(rb.bill.current_status_chamber) # get the represenative or senators as appropriate, and check for various # error conditions along the way. from person.models import PersonRole, RoleType from us import stateapportionment targets = [] if len(chambers) == 0 or "House" in chambers or "Unknown" in chambers: targets.extend( PersonRole.objects.filter(current=True, role_type=RoleType.representative, state=self.district[0:2], district=int(self.district[2:]))) if len(targets) == 0 and ( (len(chambers) == 1 and "House" in chambers) or stateapportionment[self.district[0:2]] == "T"): return "house-vacant" if len(chambers) == 0 or "Senate" in chambers or "Unknown" in chambers: targets.extend( PersonRole.objects.filter(current=True, role_type=RoleType.senator, state=self.district[0:2])) if len(chambers) == 1 and "Senate" in chambers and len( targets) == 0: if stateapportionment[self.district[0:2]] == "T": return "no-senators" return "senate-vacant" if len(targets) == 0: return "vacant" # make sure we have a phone number on file def is_valid_phone(phone): if not phone: return False if len("".join(c for c in phone if unicode.isdigit(c))) != 10: return False return True targets = [t for t in targets if is_valid_phone(t.phone)] if len(targets) == 0: return "no-phone" # filter out anyone the user has already called targets = [ t for t in targets if not CallLog.has_made_successful_call(self, t) ] if len(targets) == 0: return "all-calls-made" return targets
class Email(models.Model): """ A model to hold email information. """ PRIORITY_CHOICES = [(PRIORITY.low, _("low")), (PRIORITY.medium, _("medium")), (PRIORITY.high, _("high")), (PRIORITY.now, _("now"))] STATUS_CHOICES = [(STATUS.sent, _("sent")), (STATUS.failed, _("failed")), (STATUS.queued, _("queued"))] from_email = models.CharField(_("Email From"), max_length=254, validators=[validate_email_with_name]) to = CommaSeparatedEmailField(_("Email To")) cc = CommaSeparatedEmailField(_("Cc")) bcc = CommaSeparatedEmailField(_("Bcc")) subject = models.CharField(_("Subject"), max_length=989, blank=True) message = models.TextField(_("Message"), blank=True) html_message = models.TextField(_("HTML Message"), blank=True) """ Emails with 'queued' status will get processed by ``send_queued`` command. Status field will then be set to ``failed`` or ``sent`` depending on whether it's successfully delivered. """ status = models.PositiveSmallIntegerField( _("Status"), choices=STATUS_CHOICES, db_index=True, blank=True, null=True) priority = models.PositiveSmallIntegerField(_("Priority"), choices=PRIORITY_CHOICES, blank=True, null=True) created = models.DateTimeField(auto_now_add=True, db_index=True) last_updated = models.DateTimeField(db_index=True, auto_now=True) scheduled_time = models.DateTimeField(_('The scheduled sending time'), blank=True, null=True, db_index=True) headers = JSONField(_('Headers'), blank=True, null=True) template = models.ForeignKey('post_office.EmailTemplate', blank=True, null=True, verbose_name=_('Email template'), on_delete=models.CASCADE) context = context_field_class(_('Context'), blank=True, null=True) backend_alias = models.CharField(_('Backend alias'), blank=True, default='', max_length=64) class Meta: app_label = 'post_office' verbose_name = pgettext_lazy("Email address", "Email") verbose_name_plural = pgettext_lazy("Email addresses", "Emails") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._cached_email_message = None def __str__(self): return '%s' % self.to def email_message(self): """ Returns Django EmailMessage object for sending. """ if self._cached_email_message: return self._cached_email_message return self.prepare_email_message() def prepare_email_message(self): """ Returns a django ``EmailMessage`` or ``EmailMultiAlternatives`` object, depending on whether html_message is empty. """ if get_override_recipients(): self.to = get_override_recipients() if self.template is not None: engine = get_template_engine() subject = engine.from_string(self.template.subject).render(self.context) plaintext_message = engine.from_string(self.template.content).render(self.context) multipart_template = engine.from_string(self.template.html_content) html_message = multipart_template.render(self.context) else: subject = smart_str(self.subject) plaintext_message = self.message multipart_template = None html_message = self.html_message connection = connections[self.backend_alias or 'default'] if html_message: if plaintext_message: msg = EmailMultiAlternatives( subject=subject, body=plaintext_message, from_email=self.from_email, to=self.to, bcc=self.bcc, cc=self.cc, headers=self.headers, connection=connection) msg.attach_alternative(html_message, "text/html") else: msg = EmailMultiAlternatives( subject=subject, body=html_message, from_email=self.from_email, to=self.to, bcc=self.bcc, cc=self.cc, headers=self.headers, connection=connection) msg.content_subtype = 'html' if hasattr(multipart_template, 'attach_related'): multipart_template.attach_related(msg) else: msg = EmailMessage( subject=subject, body=plaintext_message, from_email=self.from_email, to=self.to, bcc=self.bcc, cc=self.cc, headers=self.headers, connection=connection) for attachment in self.attachments.all(): if attachment.headers: mime_part = MIMENonMultipart(*attachment.mimetype.split('/')) mime_part.set_payload(attachment.file.read()) for key, val in attachment.headers.items(): try: mime_part.replace_header(key, val) except KeyError: mime_part.add_header(key, val) msg.attach(mime_part) else: msg.attach(attachment.name, attachment.file.read(), mimetype=attachment.mimetype or None) attachment.file.close() self._cached_email_message = msg return msg def dispatch(self, log_level=None, disconnect_after_delivery=True, commit=True): """ Sends email and log the result. """ try: self.email_message().send() status = STATUS.sent message = '' exception_type = '' except Exception as e: status = STATUS.failed message = str(e) exception_type = type(e).__name__ # If run in a bulk sending mode, reraise and let the outer # layer handle the exception if not commit: raise if commit: self.status = status self.save(update_fields=['status']) if log_level is None: log_level = get_log_level() # If log level is 0, log nothing, 1 logs only sending failures # and 2 means log both successes and failures if log_level == 1: if status == STATUS.failed: self.logs.create(status=status, message=message, exception_type=exception_type) elif log_level == 2: self.logs.create(status=status, message=message, exception_type=exception_type) return status def save(self, *args, **kwargs): self.full_clean() return super().save(*args, **kwargs)
class Credentials(ConnectorMixin, ShareableOrgMixin, BaseModel): """ Credentials for access """ connector = models.CharField(_('connection type'), choices=app_settings.CONNECTORS, max_length=128, db_index=True) params = JSONField(_('parameters'), default=dict, help_text=_('global connection parameters'), load_kwargs={'object_pairs_hook': collections.OrderedDict}, dump_kwargs={'indent': 4}) auto_add = models.BooleanField(_('auto add'), default=False, help_text=_('automatically add these credentials ' 'to the devices of this organization; ' 'if no organization is specified will ' 'be added to all the new devices')) class Meta: verbose_name = _('Access credentials') verbose_name_plural = verbose_name def __str__(self): return '{0} ({1})'.format(self.name, self.get_connector_display()) def save(self, *args, **kwargs): super().save(*args, **kwargs) self.auto_add_to_devices() def auto_add_to_devices(self): """ When ``auto_add`` is ``True``, adds the credentials to each relevant ``Device`` and ``DeviceConnection`` objects """ if not self.auto_add: return devices = Device.objects.exclude(config=None) org = self.organization if org: devices = devices.filter(organization=org) # exclude devices which have been already added devices = devices.exclude(deviceconnection__credentials=self) for device in devices: conn = DeviceConnection(device=device, credentials=self, enabled=True) conn.full_clean() conn.save() @classmethod def auto_add_credentials_to_device(cls, instance, created, **kwargs): """ Adds relevant credentials as ``DeviceConnection`` when a device is created, this is called from a post_save signal receiver hooked to the ``Config`` model (why ``Config`` and not ``Device``? because at the moment we can automatically create a DeviceConnection if we have a ``Config`` object) """ if not created: return device = instance.device # select credentials which # - are flagged as auto_add # - belong to the same organization of the device # OR # belong to no organization (hence are shared) conditions = (models.Q(organization=device.organization) | models.Q(organization=None)) credentials = cls.objects.filter(conditions) \ .filter(auto_add=True) for cred in credentials: conn = DeviceConnection(device=device, credentials=cred, enabled=True) conn.full_clean() conn.save()
class DeviceConnection(ConnectorMixin, TimeStampedEditableModel): _connector_field = 'update_strategy' device = models.ForeignKey('config.Device', on_delete=models.CASCADE) credentials = models.ForeignKey(Credentials, on_delete=models.CASCADE) update_strategy = models.CharField(_('update strategy'), help_text=_('leave blank to determine automatically'), choices=app_settings.UPDATE_STRATEGIES, max_length=128, blank=True, db_index=True) enabled = models.BooleanField(default=True, db_index=True) params = JSONField(_('parameters'), default=dict, blank=True, help_text=_('local connection parameters (will override ' 'the global parameters if specified)'), load_kwargs={'object_pairs_hook': collections.OrderedDict}, dump_kwargs={'indent': 4}) # usability improvements is_working = models.NullBooleanField(default=None) failure_reason = models.CharField(_('reason of failure'), max_length=128, blank=True) last_attempt = models.DateTimeField(blank=True, null=True) class Meta: verbose_name = _('Device connection') verbose_name_plural = _('Device connections') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._initial_is_working = self.is_working def clean(self): cred_org = self.credentials.organization if cred_org and cred_org != self.device.organization: raise ValidationError({ 'credentials': _('The organization of these credentials doesn\'t ' 'match the organization of the device') }) if not self.update_strategy and self.device._has_config(): try: self.update_strategy = app_settings.CONFIG_UPDATE_MAPPING[self.device.config.backend] except KeyError as e: raise ValidationError({ 'update_stragy': _('could not determine update strategy ' ' automatically, exception: {0}'.format(e)) }) elif not self.update_strategy: raise ValidationError({ 'update_strategy': _('the update strategy can be determined automatically ' 'only if the device has a configuration specified, ' 'because it is inferred from the configuration backend. ' 'Please select the update strategy manually.') }) self._validate_connector_schema() def get_addresses(self): """ returns a list of ip addresses that can be used to connect to the device (used to pass a list of ip addresses to a DeviceConnection instance) """ address_list = [] if self.device.management_ip: address_list.append(self.device.management_ip) if self.device.last_ip and self.device.last_ip != self.device.management_ip: address_list.append(self.device.last_ip) return address_list def get_params(self): params = self.credentials.params.copy() params.update(self.params) return params def set_connector(self, connector): self.connector_instance = connector def connect(self): try: self.connector_instance.connect() except Exception as e: self.is_working = False self.failure_reason = str(e) else: self.is_working = True self.failure_reason = '' finally: self.last_attempt = timezone.now() self.save() return self.is_working def disconnect(self): self.connector_instance.disconnect() def update_config(self): self.connect() if self.is_working: try: self.connector_instance.update_config() except Exception as e: logger.exception(e) else: self.device.config.set_status_applied() self.disconnect() def save(self, *args, **kwargs): super().save(*args, **kwargs) if self.is_working != self._initial_is_working: self.send_is_working_changed_signal() self._initial_is_working = self.is_working def send_is_working_changed_signal(self): is_working_changed.send(sender=self.__class__, is_working=self.is_working, instance=self)
class PersonRole(models.Model): """Terms held in office by Members of Congress, Presidents, and Vice Presidents. Each term corresponds with an election, meaning each term in the House covers two years (one 'Congress'), as President/Vice President four years, and in the Senate six years (three 'Congresses').""" person = models.ForeignKey('person.Person', related_name='roles', on_delete=models.CASCADE) role_type = models.IntegerField( choices=RoleType, db_index=True, help_text= "The type of this role: a U.S. senator, a U.S. congressperson, a U.S. president, or a U.S. vice president." ) current = models.BooleanField( default=False, choices=[(False, "No"), (True, "Yes")], db_index=True, help_text= "Whether the role is currently held, or if this is archival information." ) startdate = models.DateField( db_index=True, help_text="The date the role began (when the person took office).") enddate = models.DateField( db_index=True, help_text= "The date the role ended (when the person resigned, died, etc.)") # http://en.wikipedia.org/wiki/Classes_of_United_States_Senators senator_class = models.IntegerField( choices=SenatorClass, blank=True, null=True, db_index=True, help_text= "For senators, their election class, which determines which years they are up for election. (It has nothing to do with seniority.)" ) # None for representatives senator_rank = models.IntegerField( choices=SenatorRank, blank=True, null=True, help_text= "For senators, their state rank, i.e. junior or senior. For historical data, this is their last known rank." ) # None for representatives # http://en.wikipedia.org/wiki/List_of_United_States_congressional_districts district = models.IntegerField( blank=True, null=True, db_index=True, help_text= "For representatives, the number of their congressional district. 0 for at-large districts, -1 in historical data if the district is not known." ) # None for senators/presidents state = models.CharField( choices=sorted(statenames.items()), max_length=2, blank=True, db_index=True, help_text= "For senators and representatives, the two-letter USPS abbrevation for the state or territory they are serving. Values are the abbreviations for the 50 states (each of which have at least one representative and two senators, assuming no vacancies) plus DC, PR, and the island territories AS, GU, MP, and VI (all of which have a non-voting delegate), and for really old historical data you will also find PI (Philippines, 1907-1946), DK (Dakota Territory, 1861-1889), and OR (Orleans Territory, 1806-1811) for non-voting delegates." ) party = models.CharField( max_length=255, blank=True, null=True, db_index=True, help_text= "The political party of the person. If the person changes party, it is usually the most recent party during this role." ) caucus = models.CharField( max_length=255, blank=True, null=True, help_text= "For independents, the party that the legislator caucuses with. If changed during a term, the most recent." ) website = models.CharField( max_length=255, blank=True, help_text= "The URL to the official website of the person during this role, if known." ) phone = models.CharField( max_length=64, blank=True, null=True, help_text= "The last known phone number of the DC congressional office during this role, if known." ) leadership_title = models.CharField( max_length=255, blank=True, null=True, help_text= "The last known leadership role held during this role, if any.") extra = JSONField( blank=True, null=True, help_text="Additional schema-less information stored with this object." ) # API api_recurse_on = ('person', ) api_additional_fields = { "title": "get_title_abbreviated", "title_long": "get_title", "description": "get_description", "congress_numbers": "congress_numbers", } api_example_parameters = {"current": "true", "sort": "state"} class Meta: pass # ordering = ['startdate'] # causes prefetch_related to be slow def __str__(self): return '[%s] %s / %s to %s / %s / %s' % ( self.id, self.person.fullname, self.startdate, self.enddate, self.get_role_type_display(), ",".join( str(c) for c in (self.congress_numbers() or []))) def get_office_id(self): if self.role_type in (RoleType.president, RoleType.vicepresident): return RoleType.by_value(self.role_type).key if self.role_type == RoleType.senator: return ("sen", self.state, self.senator_class) if self.role_type == RoleType.representative: return ("rep", self.state, self.district) raise ValueError() def continues_from(self, prev): if self.startdate - prev.enddate > datetime.timedelta(days=120): return False if self.role_type != prev.role_type: return False if self.senator_class != prev.senator_class: return False if self.state != prev.state: return False if self.district != prev.district: return False return True def get_title(self): """The long form of the title used to prefix the names of people with this role: Representative, Senator, President, or Resident Commissioner.""" return self.get_title_name(short=False) def get_title_abbreviated(self): """The title used to prefix the names of people with this role: Rep., Sen., President, Del. (delegate), or Commish. (resident commissioner).""" return self.get_title_name(short=True) def get_title_name(self, short): if self.role_type == RoleType.president: return 'President' if short else 'President of the United States' if self.role_type == RoleType.vicepresident: return 'Vice President' if short else 'Vice President of the United States' if self.role_type == RoleType.senator: return 'Sen.' if short else 'Senator' if self.role_type == RoleType.representative: if self.state not in stateapportionment: # All of the former 'states' were territories that sent delegates. return 'Rep.' if short else 'Representative' if self.state == 'PR': return 'Commish.' if short else 'Resident Commissioner' if stateapportionment[self.state] == 'T': # These folks are also commonly called delegates, but out of respect # for their disenfranchised constituents we refer to them as representatives. return 'Rep.' if short else 'Representative' return 'Rep.' if short else 'Representative' def state_name(self): if not self.state: return "the United States" return statenames[self.state] def state_name_article(self): if not self.state: return "the United States" ret = statenames[self.state] if self.state in ("DC", "MP", "VI", "PI", "OL"): ret = "the " + ret return ret def get_description(self): """A description of this role, e.g. Delegate for District of Columbia At Large.""" from django.contrib.humanize.templatetags.humanize import ordinal if self.role_type in (RoleType.president, RoleType.vicepresident): return self.get_title_name(False) if self.role_type == RoleType.senator: js = "" if self.current and self.senator_rank: js = self.get_senator_rank_display() + " " return js + self.get_title_name(False) + " for " + statenames[ self.state] if self.role_type == RoleType.representative: if self.district == -1 or stateapportionment.get(self.state) in ( "T", None ): # unknown district / current territories and former state-things, all of which send/sent delegates return self.get_title_name( False) + " for " + self.state_name_article() elif self.district == 0: return self.get_title_name(False) + " for " + statenames[ self.state] + " At Large" else: return self.get_title_name(False) + " for " + statenames[ self.state] + "'s " + ordinal( self.district) + " congressional district" def get_description_natural(self): """A description in HTML of this role in sentence form, e.g. the delegate for the District of Columbia's at-large district.""" from website.templatetags.govtrack_utils import ordinalhtml (statename, statename_article) = (self.state_name_article(), "") if statename.startswith("the "): (statename, statename_article) = (statename[4:], "the ") statename = '%s<a href="/congress/members/%s">%s</a>' % ( statename_article, self.state, statename) if self.role_type in (RoleType.president, RoleType.vicepresident): return self.get_title_name(False) if self.role_type == RoleType.senator: js = "a " if self.current and self.senator_rank: js = "the " + self.get_senator_rank_display().lower() + " " return js + "senator from " + statename if self.role_type == RoleType.representative: if stateapportionment.get(self.state) in ( "T", None ): # current territories and former state-things, all of which send/sent delegates return "the %s from %s" % (self.get_title_name(False).lower(), statename) else: if self.district == -1: return "the representative for " + statename elif self.district == 0: return "the representative for " + statename + "\u2019s at-large district" else: return "the representative for " + statename + "\u2019s " + ordinalhtml( self.district) + " congressional district" def congress_numbers(self): """The Congressional sessions (Congress numbers) that this role spans, as a list from the starting Congress number through consecutive numbers to the ending Congress number.""" # Senators can span Congresses, so return a range. c1 = get_congress_from_date(self.startdate, range_type="start") c2 = get_congress_from_date(self.enddate, range_type="end") if not c1 or not c2: return None return list(range(c1, c2 + 1)) # congress number only, not session def most_recent_congress_number(self): n = self.congress_numbers() if not n: return None n = n[-1] if n > settings.CURRENT_CONGRESS: n = settings.CURRENT_CONGRESS # we don't ever mean to ask for a future one (senators, PR res com) return n def get_party(self): # If the person didn't change parties, just return the party. # Otherwise return "most recently a PARTY1 (year1-year2) and before that (year3-4), and ..." if self.party is None: return "(unknown party)" from parser.processor import Processor parties = (self.extra or {}).get("party_affiliations", []) def a_an(word): return "a" if word[0].lower() not in "aeiou" else "an" if len(parties) <= 1: return a_an(self.party) + " " + self.party + ( "" if not self.caucus else " caucusing with the " + self.caucus + "s") parties = [ "%s %s%s (%d-%s)" % ( a_an(entry["party"]), entry["party"], "" if not entry.get("caucus") else " caucusing with the " + entry["caucus"] + "s", Processor.parse_datetime(entry["start"]).year, "" if self.current and i == len(parties) - 1 else PersonRole.round_down_enddate( Processor.parse_datetime(entry["end"])).year, ) for i, entry in enumerate(parties) ] if self.current: most_recent_descr1 = "" most_recent_descr2 = ", " else: most_recent_descr1 = "most recently " most_recent_descr2 = " and " most_recent = parties.pop(-1) if len(parties) > 1: parties[-1] = "and " + parties[-1] return most_recent_descr1 + most_recent + most_recent_descr2 + "previously " + ( ", " if len(parties) > 2 else " ").join(parties) def get_party_on_date(self, when): if self.extra and "party_affiliations" in self.extra: for pa in self.extra["party_affiliations"]: if pa['start'] <= when.date().isoformat() <= pa['end']: return pa['party'] return self.party @property def is_territory(self): # a current territory return stateapportionment.get(self.state) == "T" @property def is_historical_territory(self): # a historical territory # note: self.state is "" for presidents/vps return self.state and stateapportionment.get(self.state) is None def create_events(self, prev_role, next_role): now = datetime.datetime.now().date() from events.models import Feed, Event with Event.update(self) as E: f = self.person.get_feed() if not prev_role or not self.continues_from(prev_role): E.add("termstart", self.startdate, f) if not next_role or not next_role.continues_from(self): if self.enddate <= now: # because we're not sure of end date until it happens E.add("termend", self.enddate, f) def render_event(self, eventid, feeds): self.person.role = self # affects name generation return { "type": "Elections and Offices", "date_has_no_time": True, "date": self.startdate if eventid == "termstart" else self.enddate, "title": self.person.name + (" takes office as " if eventid == "termstart" else " leaves office as ") + self.get_description(), "url": self.person.get_absolute_url(), "body_text_template": "{{name}} {{verb}} {{term}}.", "body_html_template": "<p>{{name}} {{verb}} {{term}}.</p>", "context": { "name": self.person.name, "verb": ("takes office as" if eventid == "termstart" else "leaves office as"), "term": self.get_description(), } } def logical_dates(self, round_end=False): startdate = None enddate = None prev_role = None found_me = False for role in self.person.roles.filter( role_type=self.role_type, senator_class=self.senator_class, state=self.state, district=self.district).order_by('startdate'): if found_me and not role.continues_from(prev_role): break if prev_role == None or not role.continues_from(prev_role): startdate = role.startdate enddate = PersonRole.round_down_enddate(role.enddate, do_rounding=round_end) prev_role = role if role.id == self.id: found_me = True if not found_me: raise Exception("Didn't find myself?!") return (startdate, enddate) @staticmethod def round_down_enddate(d, do_rounding=True): # If a date ends in the first three days of January, round it down to # December 31 of the previou year, so that we can show a nice year for # term end dates (2011-2012 rather than 2011-2013 which misleadingly # implies it was more than a few days in 2013 that probably weren't # in session anyway). if do_rounding: if d.month == 1 and d.day <= 3: return datetime.date(d.year - 1, 12, 31) return d def next_election_year(self): # For current terms, roles end at the end of a Congress on Jan 3. # The election occurs in the year before. # # EXCEPT: Senators appointed to fill a term may be up for re-election # by special election sooner than the term end date stored in our # data. The end date is thus not known because it will be when the # special election winner is certified. if not self.current: raise ValueError() if (self.extra or {}).get("end-type") == "special-election": return self.enddate.year return self.enddate.year - 1 def is_up_for_election(self): if not self.current: return False if settings.CURRENT_ELECTION_DATE is None: return False # no election cycle is current if settings.CURRENT_ELECTION_DATE < datetime.datetime.now().date(): return False # election is over return self.next_election_year( ) == settings.CURRENT_ELECTION_DATE.year # is up this cycle def did_election_just_happen(self): if not self.current: return False if settings.CURRENT_ELECTION_DATE is None: return False # no election cycle is current if settings.CURRENT_ELECTION_DATE > datetime.datetime.now().date(): return False # election hasn't happened yet return self.next_election_year( ) == settings.CURRENT_ELECTION_DATE.year # is up this cycle def get_most_recent_session_stats(self): # Which Congress and session's end date is the most recently covered by this role? errs = [] congresses = self.congress_numbers() for congress, session, sd, ed in reversed(get_all_sessions()): if congress not in congresses: continue if self.startdate < ed <= self.enddate: try: return self.person.get_session_stats(session) except ValueError as e: errs.append(str(e)) raise ValueError("No statistics are available for this role: %s" % "; ".join(errs)) def opposing_party(self): if self.party == "Democrat": return "Republican" if self.party == "Republican": return "Democrat" return None def get_sort_key(self): # As it happens, our enums define a good sort order between senators and representatives. return (self.role_type, self.senator_rank)
class Repository(models.Model): """ A remote VCS repository that stores resource files for a project. """ FILE = 'file' GIT = 'git' HG = 'hg' SVN = 'svn' TRANSIFEX = 'transifex' TYPE_CHOICES = ( (FILE, 'File'), (GIT, 'Git'), (HG, 'HG'), (SVN, 'SVN'), (TRANSIFEX, 'Transifex'), ) project = models.ForeignKey(Project, related_name='repositories') type = models.CharField(max_length=255, blank=False, default='file', choices=TYPE_CHOICES) url = models.CharField("URL", max_length=2000, blank=True) """ Prefix of the resource URL, used for direct downloads. To form a full URL, relative path must be appended. """ permalink_prefix = models.CharField("Permalink prefix", max_length=2000, blank=True) """ Mapping of locale codes to VCS revisions of each repo at the last sync. If this isn't a multi-locale repo, the mapping has a single key named "single_locale" with the revision. """ last_synced_revisions = JSONField(blank=True, default=dict) source_repo = models.BooleanField(default=False, help_text=""" If true, this repo contains the source strings directly in the root of the repo. Checkouts of this repo will have "templates" appended to the end of their path so that they are detected as source directories. """) @property def multi_locale(self): """ Checks if url contains locale code variable. System will replace this variable by the locale codes of all enabled locales for the project during pulls and commits. """ return '{locale_code}' in self.url @property def checkout_path(self): """ Path where the checkout for this repo is located. Does not include a trailing path separator. """ path_components = [self.project.checkout_path] # Include path components from the URL in case it has locale # information, like https://hg.mozilla.org/gaia-l10n/fr/. # No worry about overlap between repos, any overlap of locale # directories is an error already. path_components += urlparse(self.url).path.split('/') if self.multi_locale: path_components = [ c for c in path_components if c != '{locale_code}' ] if self.source_repo: path_components.append('templates') # Remove trailing separator for consistency. return os.path.join(*path_components).rstrip(os.sep) @property def can_commit(self): """True if we can commit strings back to this repo.""" return self.type in ('svn', 'git', 'hg') @cached_property def locales(self): """ Yield an iterable of Locales whose strings are stored within this repo. """ from pontoon.sync.utils import locale_directory_path locales = [] # Use list since we're caching the result. for locale in self.project.locales.all(): try: locale_directory_path(self.checkout_path, locale.code) locales.append(locale) except IOError: pass # Directory missing, not in this repo. return locales def locale_checkout_path(self, locale): """ Path where the checkout for the given locale for this repo is located. If this repo is not a multi-locale repo, a ValueError is raised. """ if not self.multi_locale: raise ValueError( 'Cannot get locale_checkout_path for non-multi-locale repos.') return os.path.join(self.checkout_path, locale.code) def locale_url(self, locale): """ URL for the repo for the given locale. If this repo is not a multi-locale repo, a ValueError is raised. """ if not self.multi_locale: raise ValueError( 'Cannot get locale_url for non-multi-locale repos.') return self.url.format(locale_code=locale.code) def url_for_path(self, path): """ Determine the locale-specific repo URL for the given path. If this is not a multi-locale repo, raise a ValueError. If no repo is found for the given path, also raise a ValueError. """ for locale in self.project.locales.all(): if path.startswith(self.locale_checkout_path(locale)): return self.locale_url(locale) raise ValueError('No repo found for path: {0}'.format(path)) def pull(self): """ Pull changes from VCS. Returns the revision(s) of the repo after pulling. """ if not self.multi_locale: update_from_vcs(self.type, self.url, self.checkout_path) return { 'single_locale': get_revision(self.type, self.checkout_path) } else: current_revisions = {} for locale in self.project.locales.all(): checkout_path = self.locale_checkout_path(locale) update_from_vcs(self.type, self.locale_url(locale), checkout_path) current_revisions[locale.code] = get_revision( self.type, checkout_path) return current_revisions def commit(self, message, author, path): """Commit changes to VCS.""" # For multi-locale repos, figure out which sub-repo corresponds # to the given path. url = self.url if self.multi_locale: url = self.url_for_path(path) return commit_to_vcs(self.type, path, message, author, url) class Meta: unique_together = ('project', 'url') ordering = ['id']
class Job(WorkFlowBaseModel): juuid = models.CharField(max_length=128, null=False, default=None) priority = models.CharField(max_length=10) threshold = models.CharField(max_length=5) progress = models.CharField(max_length=5) create_time = models.DateTimeField(auto_now_add=True) schedule_time = models.DateTimeField(null=True) finish_time = models.DateTimeField(null=True) pause_time = models.DateTimeField(null=True) state_update_time = models.DateTimeField(null=True) latest_heartbeat = models.CharField(max_length=32) timeout = models.CharField(max_length=10) control_action = models.CharField(max_length=32) user = models.CharField(max_length=32) server_ip = models.CharField(max_length=128) state = models.CharField(max_length=64) is_manual = models.BooleanField(default=False) is_idempotency = models.BooleanField(default=False) task_type = models.CharField(max_length=128, default="") tasks_plan = JSONField(default={}) fail_rate = models.IntegerField(default=0) fail_count = models.IntegerField(default=0) concurrency = models.IntegerField(default=1) continue_after_stage_finish = models.BooleanField(default=False) confirms = JSONField(default={}) executor = models.CharField(max_length=128, default=None) owner = models.ForeignKey("auth.User", related_name="jobs", on_delete=models.CASCADE) class Meta(WorkFlowBaseModel.Meta): db_table = "gearbull_job" def is_finish(self): """判断job是否处于完成状态""" if self.state in JobStatus.FINISH_STATES: return True else: return False def has_task_unfinished(self): """ 检查job是否有未完成的tasks""" unfinish_states = [JobStatus] tasks = FuxiTaskRuntime.objects.filter(job_id=self.job_id) for t in tasks: if t.state in JobStatus.UNFINISH_STATES: return True return False @classmethod def create_job(cls, job_info): try: u = User.objects.get(username=job_info.get("user", "u1")) job_runtime = Job( owner=u, juuid=job_info["juuid"], server_ip=job_info["server_ip"], task_type=job_info["task_type"], state=job_info["state"], params=job_info["params"], priority=job_info.get("priority", 0), threshold=job_info.get("threshold", 100), timeout=job_info.get("timeout", 259200), # 3天 user=job_info.get("user", "u1"), fail_rate=job_info.get("fail_rate", 10), concurrency=job_info.get("concurrency", 1), confirms=job_info.get("confirms", {}), continue_after_stage_finish=job_info.get( "continue_after_stage_finish", True), is_idempotency=job_info.get("is_idempotency", True), executor=job_info.get("executor", "WorkFlowExecutor"), ) job_runtime.save() except KeyError as e: logger.error(traceback.format_exc()) raise exceptions.WFFailedRequest(msgs.miss_params % str(e)) except Exception as e: logger.error(traceback.format_exc()) raise exceptions.WFFailedRequest(msgs.save_job_error % (str(e), str(job_info))) finally: connection.close() logger.info(msgs.create_job_succ % job_runtime.id) return job_runtime @classmethod def redo(cls, job_id, redo_tasks_states=[JobStatus.FAILED]): """ 重做job, 可以指定重做task的状态,默认重做 fail的tasks """ try: (status, ip_addr) = subprocess.getstatusoutput("hostname -i") _job = Job.objects.get(id=job_id) _job_params = copy.deepcopy(_job.params) _entities = [] for t in _job.task_set.filter( Q(state=JobStatus.FAILED) | Q(state=JobStatus.ERROR)): _entities.append(t.entity_name) _job_params["entities"] = {1: _entities} job_runtime = Job( priority=_job.priority, threshold=_job.threshold, timeout=_job.timeout, user=_job.user, server_ip=ip_addr, params=_job_params, task_type=_job.task_type, fail_rate=_job.fail_rate, concurrency=_job.concurrency, continue_after_stage_finish=_job.continue_after_stage_finish, state=JobStatus.NEW, ) job_runtime.save() except KeyError as e: raise exceptions.WFFailedRequest(msgs.miss_params % str(e)) except Exception as e: logger.error(traceback.format_exc()) raise exceptions.WFFailedRequest(msgs.save_job_error % (str(e), str(_job_params))) finally: connection.close() logger.info(msgs.create_job_succ % job_runtime.id) return job_runtime def update_job_info(self, info): """ 更新job_runtime表的信息 :param info: dict类型,key为job的属性,value为要更新到的值 :return: """ is_state_change = False if "state" in info: job_state = Job.objects.get(id=self.id).state if job_state != info["state"]: is_state_change = True for k, v in list(info.items()): if hasattr(self, k): setattr(self, k, v) self.save() connection.close() if is_state_change: on_job_state_change(self, info["state"]) def pause(self): if self.state == JobStatus.CANCELLED: return self.state = JobStatus.PAUSED self.pause_time = timezone.now() self.save() def resume(self): if self.state == JobStatus.CANCELLED: return self.state = JobStatus.RUNNING self.save() def cancel(self): self.state = JobStatus.CANCELLED self.save() def confirm(self): self.state = JobStatus.NEW self.save() @property def is_paused(self): j = Job.objects.get(id=self.id) return JobStatus.PAUSED == j.state
class Entity(DirtyFieldsMixin, models.Model): resource = models.ForeignKey(Resource, related_name='entities') string = models.TextField() string_plural = models.TextField(blank=True) key = models.TextField(blank=True) comment = models.TextField(blank=True) order = models.PositiveIntegerField(default=0) source = JSONField(blank=True, default=list) # List of paths to source code files obsolete = models.BooleanField(default=False) changed_locales = models.ManyToManyField( Locale, through='ChangedEntityLocale', help_text='List of locales in which translations for this entity have ' 'changed since the last sync.') @property def marked(self): return utils.mark_placeables(self.string) @property def marked_plural(self): return utils.mark_placeables(self.string_plural) @property def cleaned_key(self): """ Get cleaned key, without the source string and Translate Toolkit separator. """ key = self.key.split(KEY_SEPARATOR)[0] if key == self.string: key = '' return key def __unicode__(self): return self.string def has_changed(self, locale): """ Check if translations in the given locale have changed since the last sync. """ return locale in self.changed_locales.all() def mark_changed(self, locale): """ Mark the given locale as having changed translations since the last sync. """ ChangedEntityLocale.objects.get_or_create(entity=self, locale=locale) def get_translation(self, plural_form=None): """Get fetched translation of a given entity.""" translations = self.fetched_translations if plural_form is not None: translations = [ t for t in translations if t.plural_form == plural_form ] if translations: translation = sorted(translations, key=lambda k: (not k.approved, k.date))[0] return { 'fuzzy': translation.fuzzy, 'string': translation.string, 'approved': translation.approved, 'pk': translation.pk } else: return { 'fuzzy': False, 'string': None, 'approved': False, 'pk': None } @classmethod def for_project_locale(self, project, locale, paths=None, search=None): """Get project entities with locale translations.""" entities = self.objects.filter(resource__project=project, resource__stats__locale=locale, obsolete=False) # Filter by search parameters if search: keyword = search.get('keyword', None) i = '' if search.get('casesensitive', None) else 'i' entity_query = Q() # Empty object if search.get('sources', None): entity_query |= Q(**{'string__%scontains' % i: keyword}) | Q( **{'string_plural__%scontains' % i: keyword}) if search.get('translations', None): entity_query |= Q( **{'translation__string__%scontains' % i: keyword}) if search.get('comments', None): entity_query |= Q(**{'comment__%scontains' % i: keyword}) if search.get('keys', None): entity_query |= Q(**{'key__%scontains' % i: keyword}) entities = entities.filter(entity_query).distinct() # Filter by path elif paths: try: subpage = Subpage.objects.get(project=project, name__in=paths) paths = subpage.resources.values_list("path") except Subpage.DoesNotExist: pass entities = entities.filter(resource__path__in=paths) or entities entities = entities.prefetch_related( 'resource', Prefetch('translation_set', queryset=Translation.objects.filter(locale=locale), to_attr='fetched_translations')) entities_array = [] for entity in entities: translation_array = [] has_suggestions = False if entity.string_plural == "": translation_array.append(entity.get_translation()) if len(entity.fetched_translations) > 1: has_suggestions = True else: for plural_form in range(0, locale.nplurals or 1): translation_array.append( entity.get_translation(plural_form)) if len([ t for t in entity.fetched_translations if t.plural_form == plural_form ]) > 1: has_suggestions = True entities_array.append({ 'pk': entity.pk, 'original': entity.string, 'marked': entity.marked, 'original_plural': entity.string_plural, 'marked_plural': entity.marked_plural, 'key': entity.cleaned_key, 'path': entity.resource.path, 'format': entity.resource.format, 'comment': entity.comment, 'order': entity.order, 'source': entity.source, 'obsolete': entity.obsolete, 'translation': translation_array, 'has_suggestions': has_suggestions, }) return sorted(entities_array, key=lambda k: k['order'])
class StatData(models.Model): name = models.CharField(max_length=128) stats = JSONField(default={}) stat_time = models.CharField(max_length=128) create_at = models.DateTimeField(auto_now_add=True)
class Profile(models.Model): """ A user profile """ PRIVATE = 'private' PUBLIC_TO_MM = 'public_to_mm' PUBLIC = 'public' ACCOUNT_PRIVACY_CHOICES = ( (PRIVATE, 'Private'), (PUBLIC_TO_MM, 'Public to logged in users'), (PUBLIC, 'Public to everyone'), ) # Defined in edX UserProfile model MALE = 'm' FEMALE = 'f' OTHER = 'o' GENDER_CHOICES = ( (MALE, 'Male'), (FEMALE, 'Female'), (OTHER, 'Other/Prefer Not to Say'), ) LEVEL_OF_EDUCATION_CHOICES = ( (DOCTORATE, 'Doctorate'), (MASTERS, "Master's or professional degree"), (BACHELORS, "Bachelor's degree"), (ASSOCIATE, "Associate degree"), (HIGH_SCHOOL, "High school"), (JUNIOR_HIGH_SCHOOL, "Junior high school"), (ELEMENTARY, "Elementary school"), (NO_FORMAL_EDUCATION, "No formal education"), (OTHER_EDUCATION, "Other education"), ) user = models.OneToOneField(User) # Is the profile filled out yet? filled_out = models.BooleanField(default=False) agreed_to_terms_of_service = models.BooleanField(default=False) # is the user a verified micromaster user? verified_micromaster_user = models.BooleanField(default=False) # Defining these here instead of in User to avoid Django's 30 character max limit first_name = models.TextField(blank=True, null=True) last_name = models.TextField(blank=True, null=True) preferred_name = models.TextField(blank=True, null=True) account_privacy = models.TextField( default=PUBLIC_TO_MM, choices=ACCOUNT_PRIVACY_CHOICES, ) # Has user opted to receive email? email_optin = models.BooleanField(default=False) edx_employer = models.CharField( max_length=255, blank=True, null=True, ) edx_job_title = models.CharField( max_length=255, blank=True, null=True, ) edx_name = models.TextField(blank=True, null=True) edx_bio = models.TextField(blank=True, null=True) city = models.TextField(blank=True, null=True) country = models.TextField(blank=True, null=True) state_or_territory = models.CharField( max_length=255, blank=True, null=True, ) birth_country = models.TextField(blank=True, null=True) nationality = models.TextField(blank=True, null=True) has_profile_image = models.BooleanField(default=False) edx_requires_parental_consent = models.NullBooleanField() date_of_birth = models.DateField(blank=True, null=True) edx_level_of_education = models.TextField( max_length=6, choices=LEVEL_OF_EDUCATION_CHOICES, blank=True, null=True, ) edx_goals = models.TextField(blank=True, null=True) preferred_language = models.TextField(blank=True, null=True) edx_language_proficiencies = JSONField(blank=True, null=True) gender = models.CharField( max_length=6, choices=GENDER_CHOICES, blank=True, null=True, ) edx_mailing_address = models.TextField(blank=True, null=True) date_joined_micromasters = models.DateTimeField(blank=True, null=True, auto_now_add=True) linkedin = JSONField(blank=True, null=True) student_id = models.IntegerField(blank=True, null=True, unique=True) @transaction.atomic def save(self, *args, **kwargs): """need to handle setting the student_id number""" if self.id is None or self.student_id is None: max_id = Profile.objects.aggregate(Max('student_id')) max_id = max_id['student_id__max'] or 1 iteration_limit = 1000 while Profile.objects.filter(student_id=max_id).exists() and iteration_limit > 0: max_id += 1 iteration_limit -= 1 self.student_id = max_id super(Profile, self).save(*args, **kwargs) def __str__(self): return 'Profile for "{0}"'.format(self.user.username) @property def pretty_printed_student_id(self): """pretty prints the student id for easy display""" return "MMM{0:06}".format(self.student_id) if self.student_id else "" @property def email(self): """email of user""" return self.user.email
class Runtime(models.Model): data = JSONField(default={}) create_at = models.DateTimeField(auto_now_add=True)
class Layer(models.Model): """ A layer object that can be added to any map. """ name = models.CharField( max_length=200, help_text='Name that will be displayed within GeoQ') type = models.CharField(choices=SERVICE_TYPES, max_length=75) url = models.CharField( help_text= 'URL of service. If WMS or ESRI, can be any valid URL. Otherwise, the URL will require a local proxy', max_length=500) layer = models.CharField( max_length=800, null=True, blank=True, help_text= 'Layer names can sometimes be comma-separated, and are not needed for data layers (KML, GeoRSS, GeoJSON...)' ) image_format = models.CharField( null=True, blank=True, choices=IMAGE_FORMATS, max_length=75, help_text= 'The MIME type of the image format to use for tiles on WMS layers (image/png, image/jpeg image/gif...). Double check that the server exposes this exactly - some servers push png instead of image/png.' ) styles = models.CharField( null=True, blank=True, max_length=200, help_text= 'The name of a style to use for this layer (only useful for WMS layers if the server exposes it.)' ) transparent = models.BooleanField( default=True, help_text= 'If WMS or overlay, should the tiles be transparent where possible?') refreshrate = models.PositiveIntegerField( blank=True, null=True, verbose_name="Layer Refresh Rate", help_text= 'Layer refresh rate in seconds for vector/data layers (will not refresh WMS layers)' ) description = models.TextField( max_length=800, null=True, blank=True, help_text= 'Text to show in layer chooser, please be descriptive - this will soon be searchable' ) attribution = models.CharField( max_length=200, null=True, blank=True, help_text= "Attribution from layers to the map display (will show in bottom of map when layer is visible)." ) token = models.CharField( max_length=400, null=True, blank=True, help_text= 'Authentication token, if required (usually only for secure layer servers)' ) ## Advanced layer options objects = models.GeoManager() extent = models.PolygonField(null=True, blank=True, help_text='Extent of the layer.') layer_parsing_function = models.CharField( max_length=100, blank=True, null=True, help_text= 'Advanced - The javascript function used to parse a data service (GeoJSON, GeoRSS, KML), needs to be an internally known parser. Contact an admin if you need data parsed in a new way.' ) enable_identify = models.BooleanField( default=False, help_text= 'Advanced - Allow user to click map to query layer for details. The map server must support queries for this layer.' ) info_format = models.CharField( max_length=75, null=True, blank=True, choices=INFO_FORMATS, help_text='Advanced - what format the server returns for an WMS-I query' ) root_field = models.CharField( max_length=100, null=True, blank=True, help_text= 'Advanced - For WMS-I (queryable) layers, the root field returned by server. Leave blank for default (will usually be "FIELDS" in returned XML).' ) fields_to_show = models.CharField( max_length=200, null=True, blank=True, help_text= 'Fields to show when someone uses the identify tool to click on the layer. Leave blank for all.' ) downloadableLink = models.URLField( max_length=400, null=True, blank=True, help_text= 'URL of link to supporting tool (such as a KML document that will be shown as a download button)' ) layer_params = JSONField( null=True, blank=True, help_text= 'JSON key/value pairs to be sent to the web service. ex: {"crs":"urn:ogc:def:crs:EPSG::4326"}' ) dynamic_params = JSONField( null=True, blank=True, help_text= 'URL Variables that may be modified by the analyst. ex: "date"') spatial_reference = models.CharField( max_length=32, blank=True, null=True, default="EPSG:4326", help_text= 'The spatial reference of the service. Should be in ESPG:XXXX format.' ) constraints = models.TextField( null=True, blank=True, help_text='Constrain layer data displayed to certain feature types') disabled = models.BooleanField( default=False, blank=True, help_text="If unchecked, Don't show this layer when listing all layers" ) layer_info_link = models.URLField( null=True, blank=True, help_text='URL of info about the service, or a help doc or something', max_length=500) created_at = models.DateTimeField(auto_now_add=True, null=True) updated_at = models.DateTimeField(auto_now_add=True, null=True) ## Primarily for http://trac.osgeo.org/openlayers/wiki/OpenLayersOptimization additional_domains = models.TextField( null=True, blank=True, help_text= 'Semicolon seperated list of additional domains for the layer. Only used if you want to cycle through domains for load-balancing' ) def __unicode__(self): return '{0}'.format(self.name) def get_layer_urls(self): """ Returns a list of urls for the layer. """ urls = [] if getattr(self, 'additional_domains'): map(urls.append, (domain for domain in self.additional_domains.split(";") if domain)) return urls def get_absolute_url(self): return reverse('layer-update', args=[self.id]) def get_layer_params(self): """ Returns the layer_params attribute, which should be json """ return self.layer_params def layer_json(self): return { "id": self.id, "name": self.name, "format": self.image_format, "type": self.type, "url": self.url, "subdomains": self.get_layer_urls(), "layer": self.layer, "transparent": self.transparent, "layerParams": self.layer_params, "dynamicParams": self.dynamic_params, "refreshrate": self.refreshrate, "token": self.token, "attribution": self.attribution, "spatialReference": self.spatial_reference, "layerParsingFunction": self.layer_parsing_function, "enableIdentify": self.enable_identify, "rootField": self.root_field, "infoFormat": self.info_format, "fieldsToShow": self.fields_to_show, "description": self.description, "downloadableLink": self.downloadableLink, "layer_info_link": self.layer_info_link, "styles": self.styles, } class Meta: ordering = ["name"]
class ExclusionProfile(models.Model): """ An exclusion profile represents a list of job exclusions that can be associated with a user profile. """ name = models.CharField(max_length=255, unique=True) is_default = models.BooleanField(default=False) exclusions = models.ManyToManyField(JobExclusion, related_name="profiles") flat_exclusion = JSONField(blank=True, default={}) author = models.ForeignKey(User, related_name="exclusion_profiles_authored") def save(self, *args, **kwargs): super(ExclusionProfile, self).save(*args, **kwargs) self.update_flat_exclusions() # update the old default profile if self.is_default: ExclusionProfile.objects.filter(is_default=True).exclude( id=self.id).update(is_default=False) def update_flat_exclusions(self): # this is necessary because the ``job_types`` come back in the form of # ``Mochitest (18)`` or ``Reftest IPC (Ripc)`` so we must split these # back out. # same deal for ``platforms`` # todo: update if/when chunking policy changes # when we change chunking, we will likely only get back the name, # so we'll just compare that to the ``job_type_name`` field. def split_combo(combos): list1 = [] list2 = [] for combo in combos: first, sep, second = combo.rpartition(' (') list1.append(first) list2.append(second.rstrip(')')) return list1, list2 query = None for exclusion in self.exclusions.all().select_related("info"): info = exclusion.info option_collection_hashes = info['option_collection_hashes'] job_type_names, job_type_symbols = split_combo(info['job_types']) platform_names, platform_arch = split_combo(info['platforms']) new_query = Q(repository__in=info['repos'], machine_platform__in=platform_names, job_type_name__in=job_type_names, job_type_symbol__in=job_type_symbols, option_collection_hash__in=option_collection_hashes) query = (query | new_query) if query else new_query self.flat_exclusion = {} if query: signatures = ReferenceDataSignatures.objects.filter( query).values_list('repository', 'signature') self.flat_exclusion = defaultdict(list) # group the signatures by repo, so the queries don't have to be # so long when getting jobs for repo, sig in signatures: self.flat_exclusion[repo].append(sig) super(ExclusionProfile, self).save(force_insert=False, force_update=True) class Meta: db_table = 'exclusion_profile'
class FeatureType(models.Model): FEATURE_TYPES = ( ('Point', 'Point'), ('LineString', 'Line'), ('Polygon', 'Polygon'), # ('Text', 'Text'), # ('Overlay', 'Overlay'), #TODO: Support overlay images. Should these be features? ) name = models.CharField(max_length=200) type = models.CharField(choices=FEATURE_TYPES, max_length=25) category = models.CharField( max_length=25, default="", blank=True, null=True, help_text= "An optional group to make finding this feature type easier. e.g. 'FEMA'" ) order = models.IntegerField( default=0, null=True, blank=True, help_text= 'Optionally specify the order features should appear on the edit menu. Lower numbers appear sooner.' ) properties = JSONField( load_kwargs={}, blank=True, null=True, help_text= 'Metadata added to properties of individual features. Should be in JSON format, e.g. {"severity":"high", "mapText":"Text to Show instead of icon"}' ) style = JSONField( load_kwargs={}, blank=True, null=True, help_text= 'Any special CSS style that features of this types should have. e.g. {"opacity":0.7, "color":"red", "backgroundColor":"white", "mapTextStyle":"white_overlay", "iconUrl":"path/to/icon.png"}' ) icon = models.ImageField( upload_to="static/featuretypes/", blank=True, null=True, help_text= "Upload an icon (now only in Admin menu) of the FeatureType here, will override style iconUrl if set" ) #property_names = models.TextField(blank=True, null=True) def to_json(self): icon = "" if self.icon: icon = "/images/" + str(self.icon) return clean_dumps( dict(id=self.id, properties=self.properties, category=self.category, order=self.order, name=self.name, type=self.type, style=self.style, icon=icon)) def style_to_geojson(self): local_style = self.style if local_style and local_style.has_key('color'): local_style['stroke-color'] = local_style['color'] local_style['fill-color'] = local_style['color'] local_style.pop('color', None) if local_style and local_style.has_key('weight'): local_style['stroke-width'] = local_style['weight'] local_style.pop('weight', None) if local_style and local_style.has_key('fill'): local_style['fill-opacity'] = local_style['fill'] local_style.pop('fill', None) if local_style and local_style.has_key('iconUrl'): local_style[ 'external-graphic'] = SERVER_URL + local_style['iconUrl'] local_style.pop('iconUrl', None) return local_style def iconized(self, height=25): style_html = "height:" + str(height) + "px; " src = "/static/leaflet/images/gray-marker-icon.png" bgColor = "" opacity = "1" style = self.style or {} if self.icon: src = str("/images/" + str(self.icon)) elif style.has_key('iconUrl'): src = str(style['iconUrl']) if style.has_key('weight'): style_html = style_html + "border-width:" + str( style['weight']) + "px; " if style.has_key('color'): color = str(style['color']) style_html = style_html + "border-color:" + color + "; " bgColor = color if style.has_key('fill'): bgColor = str(style['fill']) if style.has_key('opacity'): opacity = str(style['opacity']) if self.type == "Point": html = "<img src='" + src + "' style='" + style_html + "vertical-align:initial;' />" elif self.type == "LineString": style_html = style_html + "background-color:" + bgColor + "; " html = "<span style='" + style_html + "border-radius:4px; display:inline-block; opacity:" + opacity + "; width:5px; margin-left:3px; margin-right:5px;'> </span>" else: style_html = style_html + "background-color:" + bgColor + "; " html = "<span style='" + style_html + "border-radius:4px; display:inline-block; opacity:" + opacity + "; width:" + str( height) + "px;'> </span>" return html def style_json(self): return clean_dumps(self.style) def featuretypes(self): return FeatureType.objects.all() def get_absolute_url(self): return reverse('feature-type-update', args=[self.id]) def __unicode__(self): return self.name class Meta: ordering = ['-category', 'order', 'name', 'id']
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='FwJob', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('status', models.CharField(choices=[(b'WAITING', 'waiting'), (b'STARTED', 'started'), (b'FINISHED', 'finished'), (b'FAILED', 'failed')], default=b'WAITING', max_length=10, verbose_name='satus')), ('job_data', JSONField(verbose_name='job data')), ('build_log', models.TextField(blank=True, verbose_name='build log')), ('creation_date', models.DateTimeField(default=datetime.datetime.now, editable=False, verbose_name='creation date')), ], options={ 'ordering': ['-pk'], }, managers=[ ('started', django.db.models.manager.Manager()), ], ), migrations.CreateModel( name='FwProfile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.SlugField(default=b'node', max_length=15, verbose_name='name')), ('slug', autoslug.fields.AutoSlugField( editable=False, populate_from=altermeshfc.firmcreator.models. fwprofile_get_slug, unique=True, verbose_name='slug')), ('description', models.TextField(verbose_name='description')), ('creation_date', models.DateTimeField(default=datetime.datetime.now, editable=False, verbose_name='creation date')), ('include_packages', models.TextField(blank=True, verbose_name='include packages')), ('include_files', JSONField(verbose_name='include files')), ('openwrt_revision', models.CharField(max_length=50, verbose_name='openwrt revision')), ('devices', models.TextField(default=b'TLWDR4300', verbose_name='devices')), ('based_on', models.ForeignKey( blank=True, help_text= 'Create fw profile based on this profile. Leave it on default if you are not sure.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='firmcreator.FwProfile', verbose_name='based on')), ], options={ 'ordering': ['network__name', 'name'], 'verbose_name': 'firmware profile', 'verbose_name_plural': 'firmware profiles', }, ), migrations.CreateModel( name='Network', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField( help_text= 'also acts as the default public ESSID. Eg: quintanalibre.org.ar', max_length=100, unique=True, verbose_name='name')), ('web', models.CharField(blank=True, max_length=200)), ('slug', autoslug.fields.AutoSlugField(blank=True, editable=False, populate_from=b'name', unique=True)), ('description', models.TextField(verbose_name='description')), ('latitude', models.FloatField(blank=True, null=True, verbose_name='GEO latitude')), ('longitude', models.FloatField(blank=True, null=True, verbose_name='GEO longitude')), ('admins', models.ManyToManyField(blank=True, related_name='network_admin_set', to=settings.AUTH_USER_MODEL)), ('user', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='network_user_set', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['name'], 'verbose_name': 'network', 'verbose_name_plural': 'networks', }, ), migrations.CreateModel( name='SSHKey', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=40, verbose_name='name')), ('key', altermeshfc.firmcreator.fields.PublicKeyField( verbose_name='ssh key')), ('auto_add', models.BooleanField( default=False, verbose_name='automaticaly add this key to my profiles')), ('user', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user')), ], options={ 'verbose_name': 'SSH key', 'verbose_name_plural': 'SSH keys', }, ), migrations.AddField( model_name='fwprofile', name='network', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to='firmcreator.Network', verbose_name='network'), ), migrations.AddField( model_name='fwprofile', name='ssh_keys', field=models.ManyToManyField(blank=True, to='firmcreator.SSHKey', verbose_name=b'SSH keys'), ), migrations.AddField( model_name='fwjob', name='profile', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to='firmcreator.FwProfile', verbose_name='profile'), ), migrations.AddField( model_name='fwjob', name='user', field=models.ForeignKey( editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user'), ), migrations.AlterUniqueTogether( name='fwprofile', unique_together=set([('network', 'name')]), ), ]
class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False organization = FlexibleForeignKey('sentry.Organization') projects = models.ManyToManyField('sentry.Project', related_name='releases', through=ReleaseProject) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=64) # ref might be the branch name being released ref = models.CharField(max_length=64, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey('sentry.User', null=True, blank=True) # materialized stats commit_count = BoundedPositiveIntegerField(null=True) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True) last_deploy_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_release' unique_together = (('organization', 'version'), ) __repr__ = sane_repr('organization', 'version') @staticmethod def is_valid_version(value): return not (any(c in value for c in BAD_RELEASE_CHARS) or value in ('.', '..') or not value) @classmethod def get_cache_key(cls, organization_id, version): return 'release:3:%s:%s' % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return 'releasecommits:{}:{}'.format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version, ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ('%s-%s' % (project.slug, version))[:64] releases = list( cls.objects.filter(organization_id=project.organization_id, version__in=[version, project_version], projects=project)) if releases: try: release = [ r for r in releases if r.version == project_version ][0] except IndexError: release = releases[0] else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) except IntegrityError: release = cls.objects.get( organization_id=project.organization_id, version=version) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F('flags').bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) return release @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import (ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, Group, GroupRelease, GroupResolution) model_list = (ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, GroupRelease, GroupResolution) for release in from_releases: for model in model_list: if hasattr(model, 'release'): update_kwargs = {'release': to_release} else: update_kwargs = {'release_id': to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update( **update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update( **update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update( first_release=to_release) release.delete() @property def short_version(self): version = self.version match = _dotted_path_prefix_re.match(version) if match is not None: version = version[match.end():] if _sha1_re.match(version): return version[:7] return version def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create(release=self, name=name, defaults={ 'date_added': date_added, 'organization_id': self.organization_id, })[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F('flags').bitor( Project.flags.has_releases), ) except IntegrityError: return False else: return True def set_refs(self, refs, user, fetch=False): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all(), ).exclude(version=self.version).order_by('-date_added').first() names = {r['repository'] for r in refs} repos = list( Repository.objects.filter( organization_id=self.organization_id, name__in=names, )) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository('Invalid repository names: %s' % ','.join(invalid_repos)) for ref in refs: repo = repos_by_name[ref['repository']] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref['commit'], )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={ 'commit': commit, }) if fetch: fetch_commits.apply_async( kwargs={ 'release_id': self.id, 'user_id': user.id, 'refs': refs, 'prev_release_id': prev_release and prev_release.id, }) def set_commits(self, commit_list): """ Bind a list of commits to this release. These should be ordered from newest to oldest. This will clear any existing commit log and replace it with the given commits. """ from sentry.models import (Commit, CommitAuthor, Group, GroupCommitResolution, GroupResolution, GroupStatus, ReleaseCommit, Repository) from sentry.plugins.providers.repository import RepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get('message', '')) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter(release=self, ).delete() authors = {} repos = {} commit_author_by_commit = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get( 'repository') or 'organization-{}'.format( self.organization_id) if repo_name not in repos: repos[ repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name, )[0] else: repo = repos[repo_name] author_email = data.get('author_email') if author_email is None and data.get('author_name'): author_email = (re.sub(r'[^a-zA-Z0-9\-_\.]*', '', data['author_name']).lower() + '@localhost') if not author_email: author = None elif author_email not in authors: authors[ author_email] = author = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults={ 'name': data.get('author_name'), })[0] if data.get('author_name' ) and author.name != data['author_name']: author.update(name=data['author_name']) else: author = authors[author_email] defaults = { 'message': data.get('message'), 'author': author, 'date_added': data.get('timestamp') or timezone.now(), } commit, created = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=data['id'], defaults=defaults, ) if author is None: author = commit.author commit_author_by_commit[commit.id] = author patch_set = data.get('patch_set', []) for patched_file in patch_set: CommitFileChange.objects.get_or_create( organization_id=self.organization.id, commit=commit, filename=patched_file['path'], type=patched_file['type'], ) if not created: update_kwargs = {} if commit.message is None and defaults[ 'message'] is not None: update_kwargs['message'] = defaults['message'] if commit.author_id is None and defaults[ 'author'] is not None: update_kwargs['author'] = defaults['author'] if update_kwargs: commit.update(**update_kwargs) ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) if latest_commit is None: latest_commit = commit self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False, ).values_list('commit__author_id', flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) commit_resolutions = list( GroupCommitResolution.objects.filter( commit_id__in=ReleaseCommit.objects.filter( release=self).values_list('commit_id', flat=True), ).values_list( 'group_id', 'commit_id')) user_by_author = {None: None} for group_id, commit_id in commit_resolutions: author = commit_author_by_commit.get(commit_id) if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ 'release': self, 'type': GroupResolution.Type.in_release, 'status': GroupResolution.Status.resolved, 'actor_id': actor.id if actor else None, }, ) Group.objects.filter( id=group_id, ).update(status=GroupStatus.RESOLVED)
def test_db_prep_save(self): field = JSONField(u"test") field.set_attributes_from_name("json") self.assertEquals(None, field.get_db_prep_save(None, connection=None)) self.assertEquals('{"spam": "eggs"}', field.get_db_prep_save({"spam": "eggs"}, connection=None))
class Game(models.Model): player = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, related_name='ludo_player') players = models.ManyToManyField(User, blank=True, related_name='ludo_players') players_played = models.ManyToManyField(User, blank=True, related_name='ludo_played') state = JSONField() rolls = JSONField() def __str__(self): return f'{self.state}' def __repr__(self): return f'Game [{self.id}]: {self.state} ({self.players.all()})' def color(self): players = self.players.all() total_players = len(players) if self.player == players[0]: return 'blue' elif total_players > 1 and self.player == players[1]: return 'red' elif total_players > 2 and self.player == players[2]: return 'green' elif total_players > 3 and self.player == players[3]: return 'yellow' else: return None def move(self, token): color = self.color() roll = self.rolls[f'{self.player.id}'] strtoken = f'{color}-{token}' knock = False # Check if in base, move to entrance if found if strtoken in self.state['bases'][color]: knock = self.move_knock(self.state['fields'][ENTRANCES[color]]) self.state['fields'][ENTRANCES[color]] = strtoken self.state['bases'][color][token - 1] = None # Move on field elif strtoken in self.state['fields']: field_index = self.state['fields'].index(strtoken) move_to = self.field_wrap(field_index + roll) # Move into home or further on the field entrance_plus_index = self.entrance_wrap(color, field_index) if field_index < entrance_plus_index and move_to >= ENTRANCES[ color]: move_to = self.field_wrap(move_to - entrance_plus_index) self.state['homes'][color][move_to] = strtoken else: knock = self.move_knock(self.state['fields'][move_to]) self.state['fields'][move_to] = strtoken self.state['fields'][field_index] = None # Move further into home elif strtoken in self.state['homes'][color]: field_index = self.state['homes'][color].index(strtoken) move_to = field_index + roll self.state['homes'][color][move_to] = strtoken self.state['homes'][color][field_index] = None return knock def move_knock(self, knock_token): # Knock enemy token out if one exists on that field if knock_token: knock_color = self.token_color(knock_token) knock_index = self.token_index(knock_token) - 1 self.state['bases'][knock_color][knock_index] = knock_token return True return False def available_actions(self): color = self.color() roll = self.rolls[f'{self.player.id}'] actions = [] # Spawn tokens on 6 if entrance clear of friendlies entrance = self.state['fields'][ENTRANCES[color]] if roll == 6 and (not entrance or color not in entrance): for strtoken in self.filter(self.state['bases'][color]): actions.append(f'move-{self.token_index(strtoken)}') # Move any tokens further up the field or into home for field_index, strtoken in enumerate(self.state['fields']): if strtoken and color == self.token_color(strtoken): move_to = self.field_wrap(field_index + roll) field_to = self.state['fields'][move_to] # Check if in range of home (not at start) or empty or enemy field entrance_plus_index = self.entrance_wrap(color, field_index) valid = True if field_index < entrance_plus_index and move_to >= ENTRANCES[ color]: move_to = self.field_wrap(move_to - entrance_plus_index) if move_to > 3: valid = False else: field_to = self.state['homes'][color][move_to] if valid and (not field_to or color not in field_to): actions.append(f'move-{self.token_index(strtoken)}') # Move tokens in home further into home for field_index, strtoken in enumerate(self.state['homes'][color]): if strtoken: move_to = field_index + roll if move_to <= 3: field_to = self.state['homes'][color][move_to] if not field_to: actions.append(f'move-{self.token_index(strtoken)}') return actions def filter(self, l, color=None): return [x for x in l if x] def token_color(self, strtoken): return strtoken.split('-')[0] def token_index(self, strtoken): return int(strtoken.split('-')[1]) def field_wrap(self, field_index): return field_index % (39 + 1) def entrance_wrap(self, color, field_index): # Helper entrance index on red's 39 -> 0 jump # 34 is minimum breaking point if color == 'red' and field_index > 34: return ENTRANCES[color] + 40 return ENTRANCES[color]
class Zone(Model): zone_id = SlugField(primary_key=True) widget_id = SlugField(null=True) data = JSONField(null=True)
class Experiment(models.Model): name = models.CharField(primary_key=True, max_length=128) description = models.TextField(default="", blank=True, null=True) alternatives = JSONField(default="{}", blank=True) relevant_goals = models.TextField(default="", null=True, blank=True) switch_key = models.CharField(default="", max_length=50, null=True, blank=True) state = models.IntegerField(default=CONTROL_STATE, choices=STATES) start_date = models.DateTimeField(default=datetime.datetime.now, blank=True, null=True, db_index=True) end_date = models.DateTimeField(blank=True, null=True) def __unicode__(self): return self.name @classmethod def show_alternative(self, experiment_name, experiment_user, alternative, experiment_manager): """ does the real work """ try: experiment = experiment_manager[ experiment_name] # use cache where possible except KeyError: return alternative == CONTROL_GROUP if experiment.state == CONTROL_STATE: return alternative == CONTROL_GROUP if experiment.state == GARGOYLE_STATE: if not gargoyle.is_active(experiment.switch_key, experiment_user.request): return alternative == CONTROL_GROUP if experiment.state != ENABLED_STATE and experiment.state != GARGOYLE_STATE: raise Exception("Invalid experiment state %s!" % experiment.state) # Add new alternatives to experiment model if alternative not in experiment.alternatives: experiment.alternatives[alternative] = {} experiment.alternatives[alternative]['enabled'] = True experiment.save() # Lookup User alternative assigned_alternative = experiment_user.get_enrollment(experiment) # No alternative so assign one if assigned_alternative is None: assigned_alternative = random.choice( experiment.alternatives.keys()) experiment_user.set_enrollment(experiment, assigned_alternative) return alternative == assigned_alternative def to_dict(self): data = { 'name': self.name, 'edit_url': reverse('experiments:results', kwargs={'name': self.name}), 'start_date': self.start_date, 'end_date': self.end_date, 'state': self.state, 'switch_key': self.switch_key, 'description': self.description, 'relevant_goals': self.relevant_goals, } return data def to_dict_serialized(self): return simplejson.dumps(self.to_dict(), cls=DjangoJSONEncoder) def save(self, *args, **kwargs): # Delete existing switch if getattr(settings, 'EXPERIMENTS_SWITCH_AUTO_DELETE', True): try: Switch.objects.get(key=Experiment.objects.get( name=self.name).switch_key).delete() except (Switch.DoesNotExist, Experiment.DoesNotExist): pass # Create new switch if self.switch_key and getattr(settings, 'EXPERIMENTS_SWITCH_AUTO_CREATE', True): try: Switch.objects.get(key=self.switch_key) except Switch.DoesNotExist: Switch.objects.create( key=self.switch_key, label=getattr(settings, 'EXPERIMENTS_SWITCH_LABEL', "Experiment: %s") % self.name, description=self.description) if not self.switch_key and self.state == 2: self.state = 0 super(Experiment, self).save(*args, **kwargs) def delete(self, *args, **kwargs): # Delete existing switch if getattr(settings, 'EXPERIMENTS_SWITCH_AUTO_DELETE', True): try: Switch.objects.get(key=Experiment.objects.get( name=self.name).switch_key).delete() except Switch.DoesNotExist: pass super(Experiment, self).delete(*args, **kwargs)
class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False project = FlexibleForeignKey('sentry.Project') version = models.CharField(max_length=64) # ref might be the branch name being released ref = models.CharField(max_length=64, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey('sentry.User', null=True, blank=True) class Meta: app_label = 'sentry' db_table = 'sentry_release' unique_together = (('project', 'version'),) __repr__ = sane_repr('project_id', 'version') @classmethod def get_cache_key(cls, project_id, version): return 'release:2:%s:%s' % (project_id, md5(version).hexdigest()) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( project=project, version=version, ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added): cache_key = cls.get_cache_key(project.id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get release = cls.objects.get_or_create( project=project, version=version, defaults={ 'date_added': date_added, }, )[0] cache.set(cache_key, release, 3600) return release @property def short_version(self): if len(self.version) == 40: return self.version[:12] return self.version
class Room(models.Model): start_date = models.DateField(default=timezone.now()) end_date = models.DateField(default=timezone.now()) rent_type = models.IntegerField(default=-1) # 양도 1, 기간대여 2, 룸메이트 3 roomate_num = models.IntegerField(default=-1) # 구하는 룸메이트 수, 양도,기간 대여라면 0 building_type = models.IntegerField(default=-1) # 아파트 1, 오피스텔 2, 빌라 3 period = models.IntegerField(default=-1) # 하루 1, 월세 2, 전세 3 cost = models.IntegerField(default=-1) # 만원 단위로, 2만원이라면 2 university = models.IntegerField(default=-1) gender = models.IntegerField(default=-1) # 남자 1, 여자 2 addr_gu = models.CharField(max_length=20) addr_dong = models.CharField(max_length=20) image1 = models.ImageField(blank=False, upload_to='images/') image2 = models.ImageField(blank=True, null=True, upload_to='images/') image3 = models.ImageField(blank=True, null=True, upload_to='images/') detail = models.TextField(default="", blank=True, null=True) deposit = models.IntegerField(default=-1) # 보증금 rooms = models.IntegerField(default=-1) options = JSONField( default={ 'wifi': 0, 'air_conditioner': 0, 'tv': 0, 'doorlock': 0, 'washer': 0, 'induction': 0, 'closet': 0, 'desk': 0, 'bed': 0, 'duplex': 0 }) address = models.CharField(max_length=50, default=" ") address_detail = models.CharField(max_length=50, default=" ") creator = models.CharField(max_length=150, default=" ") title = models.CharField(max_length=50, default=" ") def pGender(self): if self.gender == 1: return "남" if self.gender == 2: return "여" def pRentType(self): if self.rent_type == 1: return "양도" if self.rent_type == 2: return "대여" if self.rent_type == 3: return "룸메이트" def pBuildingType(self): if self.building_type == 1: return "아파트" if self.building_type == 2: return "오피스텔" if self.building_type == 3: return "빌라" def pPeriod(self): if self.period == 1: return "하루 " if self.period == 2: return "월 " if self.period == 2: return "전세 " def pCost(self): if self.period == 1: return str(self.cost) if self.period == 2: return str(self.cost) + "/" + str(self.deposit) if self.period == 3: return str(self.cost) def pOptions(self): list = [ "wifi", "air_conditioner", "tv", "doorlock", "washer", "induction", "closet", "dest", "bed", "duplex" ] for x in list: if x in self.options: self.options
class SponsorPage(models.Model): name = models.CharField(_('name'), max_length=120) sponsors = JSONField(_('sponsors'), default={}) _html = models.TextField(blank=True, editable=False) sprite = models.ImageField(upload_to='sponsorzy/sprite', blank=True) def populated_sponsors(self): result = [] offset = 0 for column in self.sponsors: result_group = {'name': column['name'], 'sponsors': []} sponsor_objects = Sponsor.objects.in_bulk(column['sponsors']) for sponsor_pk in column['sponsors']: try: sponsor = sponsor_objects[sponsor_pk] except KeyError: pass else: result_group['sponsors'].append((offset, sponsor)) offset -= sponsor.size()[1] result.append(result_group) return result def render_sprite(self): sponsor_ids = [] for column in self.sponsors: sponsor_ids.extend(column['sponsors']) sponsors = Sponsor.objects.in_bulk(sponsor_ids) total_width = 0 total_height = 0 for sponsor in sponsors.values(): w, h = sponsor.size() total_width = max(total_width, w) total_height += h if not total_height: return sprite = Image.new('RGBA', (total_width, total_height)) offset = 0 for i, sponsor_id in enumerate(sponsor_ids): sponsor = sponsors[sponsor_id] simg = Image.open(sponsor.logo.path) thumb_size = sponsor.size() # FIXME: This is too complicated now. if simg.size[0] > thumb_size[0] or simg.size[1] > thumb_size[1]: size = ( min(thumb_size[0], simg.size[0] * thumb_size[1] / simg.size[1]), min(thumb_size[1], simg.size[1] * thumb_size[0] / simg.size[0]) ) simg = simg.resize(size, Image.ANTIALIAS) sprite.paste(simg, ( int((thumb_size[0] - simg.size[0]) / 2), int(offset + (thumb_size[1] - simg.size[1]) / 2), )) offset += thumb_size[1] imgstr = BytesIO() sprite.save(imgstr, 'png') if self.sprite: self.sprite.delete(save=False) self.sprite.save('sponsorzy/sprite/%s-%d.png' % (self.name, time.time()), ContentFile(imgstr.getvalue()), save=False) def html(self): return self._html html = property(fget=html) def save(self, *args, **kwargs): self.render_sprite() self._html = render_to_string('sponsors/page.html', { 'sponsors': self.populated_sponsors(), 'page': self, }) return super(SponsorPage, self).save(*args, **kwargs) def __unicode__(self): return self.name