class Ground(polymodel.PolyModel): ''' Root class Stuff thats on the ground layer. ''' # has a shape definition from open maps nodes = ndb.LocalStructuredProperty(Node, repeated=True) def package(self, packaged={}): packaged.update({ 'id': self.key.id(), 'types': self.class_, 'nodes': [node.package() for node in self.nodes] }) try: subtype = packaged['subtype'] packaged['types'].append(subtype) del packaged['subtype'] except: pass return packaged
class AuthIPWhitelistAssignments(ndb.Model): """A singleton entity with "identity -> AuthIPWhitelist to use" mapping. Entity key is ip_whitelist_assignments_key(). Parent entity is root_key(). See AuthIPWhitelist for more info about IP whitelists. """ class Assignment(ndb.Model): # Identity name to limit by IP whitelist. Unique key in 'assignments' list. identity = IdentityProperty() # Name of IP whitelist to use (see AuthIPWhitelist). ip_whitelist = ndb.StringProperty() # Why the assignment was created. comment = ndb.StringProperty() # When the assignment was created. created_ts = ndb.DateTimeProperty(auto_now_add=True) # Who created the assignment. created_by = IdentityProperty() # Holds all the assignments. assignments = ndb.LocalStructuredProperty(Assignment, repeated=True)
class GlobalStats(NdbModel): NAMESPACE = NAMESPACE name = ndb.StringProperty() token_count = ndb.IntegerProperty() unlocked_count = ndb.IntegerProperty() value = ndb.FloatProperty() # Value in dollar # Value per other currency currencies = ndb.LocalStructuredProperty(CurrencyValue, repeated=True) # type: list[CurrencyValue] market_cap = ndb.ComputedProperty(lambda self: (self.value or 0) * self.unlocked_count, indexed=False) @property def id(self): return self.key.id().decode('utf-8') @classmethod def create_key(cls, currency): return ndb.Key(cls, currency, namespace=NAMESPACE) @classmethod def list(cls): return cls.query()
class InstanceTemplateRevision(ndb.Model): """A specific revision of an instance template in the config. Key: id: Checksum of the instance template config. parent: InstanceTemplate. """ # List of ndb.Keys for the InstanceGroupManagers. active = ndb.KeyProperty(kind=InstanceGroupManager, repeated=True) # Enable external network with automatic IP assignment. auto_assign_external_ip = ndb.BooleanProperty(indexed=False) # rpc_messages.Dimensions describing instances created from this template. dimensions = msgprop.MessageProperty(rpc_messages.Dimensions) # Disk size in GiB for instances created from this template. disk_size_gb = ndb.IntegerProperty(indexed=False) # Disk type for instances created from this template. disk_type = ndb.StringProperty(indexed=False) # List of ndb.Keys for drained InstanceGroupManagers. drained = ndb.KeyProperty(kind=InstanceGroupManager, repeated=True) # Name of the image for instances created from this template. image_name = ndb.StringProperty(indexed=False) # Project containing the image specified by image_name. image_project = ndb.StringProperty(indexed=False) # GCE machine type for instances created from this template. machine_type = ndb.StringProperty(indexed=False) # Initial metadata to apply when creating instances from this template. metadata = ndb.JsonProperty() # Minimum CPU platform for instances created from this template. min_cpu_platform = ndb.StringProperty(indexed=False) # Network URL for this template. network_url = ndb.StringProperty(indexed=False) # Project to create the instance template in. project = ndb.StringProperty(indexed=False) # List of service accounts available to instances created from this template. service_accounts = ndb.LocalStructuredProperty(ServiceAccount, repeated=True) # Initial list of tags to apply when creating instances from this template. tags = ndb.StringProperty(indexed=False, repeated=True) # URL of the instance template created from this entity. url = ndb.StringProperty(indexed=False)
class Contact(EndpointsModel): """A person or group that can be used as a creator or a contact.""" class Command(EndpointsModel): class CommandType(messages.Enum): TAKE_A_NOTE = 1 POST_AN_UPDATE = 2 """A single menu command that is part of a Contact.""" type = msgprop.EnumProperty(CommandType, required=True) class ContactType(messages.Enum): INDIVIDUAL = 1 GROUP = 2 _message_fields_schema = ("id", "acceptCommands", "acceptTypes", "displayName", "imageUrls", "phoneNumber", "priority", "source", "type") user = EndpointsUserProperty(required=True, raise_unauthorized=True) acceptCommands = ndb.LocalStructuredProperty(Command, repeated=True) acceptTypes = ndb.StringProperty(repeated=True) displayName = ndb.StringProperty(required=True) imageUrls = ndb.StringProperty(repeated=True) phoneNumber = ndb.StringProperty() priority = ndb.IntegerProperty() source = ndb.StringProperty() speakableName = ndb.StringProperty() type = msgprop.EnumProperty(ContactType) def IdSet(self, value): if not isinstance(value, basestring): raise TypeError("ID must be a string.") self.UpdateFromKey(ndb.Key("User", self.user.email(), Contact, value)) @EndpointsAliasProperty(setter=IdSet, required=True) def id(self): if self.key is not None: return self.key.pairs()[1][1]
class User(ndb.Model): """ Authentication class - stores account credentials - stores game related bonuses - technologies - stores game related metadata - race - gold and turns """ username = ndb.StringProperty(required=True, indexed=False) password = ndb.StringProperty(required=True, indexed=False, validator=hash_password) date_created = ndb.DateTimeProperty(auto_now_add=True) date_updated = ndb.DateTimeProperty(auto_now=True) profile = ndb.LocalStructuredProperty(UserProfile, required=True) username_lc = ndb.ComputedProperty(lambda self: self.username.lower()) @classmethod def get_by_password(cls, username, password): user = cls.query(cls.username_lc == username.lower()).get() if user is not None and user.check_password(password): return user return None @classmethod def exists(cls, username): query = cls.username_lc == username.lower() return cls.query(query).count() > 0 @classmethod def new(cls, username, password, race): return cls(username=username, password=password, profile=UserProfile(race_id=race)) def check_password(self, password): return security.check_password_hash(password, self.password)
class Organization(ndb.Model): """Model that represents an organization.""" #: Field storing identifier of the organization. org_id = ndb.StringProperty( required=True, verbose_name=translation.ugettext('Organization ID')) #: Field storing name of the organization. name = ndb.StringProperty(required=True, verbose_name=translation.ugettext('Name')) name.group = translation.ugettext("1. Public Info") #: Description of the organization. description = ndb.TextProperty(required=True, indexed=False) #: URL to an image with organization logo. logo_url = ndb.StringProperty(indexed=False, validator=db.link_validator) #: Contact channels to the organization. contact = ndb.LocalStructuredProperty(contact_model.Contact, default=contact_model.Contact()) #: Field storing a reference to the program in which #: the organization participates. program = ndb.KeyProperty(required=True) #: Status of the organization status = msgprop.EnumProperty(Status, required=True, default=Status.APPLYING) #: Specifies whether the organization has participated in the program before is_veteran = ndb.BooleanProperty(required=True, default=False) #: Collection of tags that describe the organization. tags = ndb.StringProperty(repeated=True) #: Main license that is used by the organization. license = ndb.StringProperty(choices=licenses.LICENSES)
class TagIndex(ndb.Model): """A custom index of builds by a tag. Entity key: Entity id is a build tag in the same "<key>:<value>" format that builds use. TagIndex has no parent. """ MAX_ENTRY_COUNT = 1000 # if incomplete, this TagIndex should not be used in search. # It is set to True if there are more than MAX_ENTRY_COUNT builds # for this tag. permanently_incomplete = ndb.BooleanProperty() # entries is a superset of all builds that have the tag equal to the id of # this entity. It may contain references to non-existent builds or builds that # do not actually have this tag; such builds must be ignored. # # It is sorted by build id in descending order. entries = ndb.LocalStructuredProperty(TagIndexEntry, repeated=True, indexed=False)
class Event(ndb.Model): name = ndb.StringProperty(indexed = True) alias = ndb.StringProperty(indexed = True) sport = ndb.StringProperty(indexed = True) caption = ndb.TextProperty() business_id = ndb.KeyProperty(kind = 'Business') #updated for a parent event child_events = ndb.KeyProperty(kind = 'Event', repeated = True) #updated for a parent event parent_event_id = ndb.KeyProperty(kind = 'Event', repeated = False) #updated for child event playground_id = ndb.KeyProperty(kind = 'Playground', repeated = False) #updated for child event description = ndb.TextProperty() #updated for both parent and child event address = ndb.StructuredProperty(Address, repeated = False) contact_info = ndb.StructuredProperty(ContactInfo, repeated = False) #updated for both parent and child event custom_info = ndb.LocalStructuredProperty(CustomInfo, repeated = True) start_datetime = ndb.DateTimeProperty(indexed = True) end_datetime = ndb.DateTimeProperty(indexed = True) review_stats = ndb.StructuredProperty(ReviewStats, repeated = False) status = ndb.IntegerProperty(indexed = True) #0 - pending creation, 1 - pending approval, 2 - approved featured = ndb.BooleanProperty() registration_open = ndb.BooleanProperty() owners = ndb.KeyProperty(kind=User, repeated=True) #A list of users who has the permission to update this created_by = ndb.KeyProperty(kind=User) created_on = ndb.DateTimeProperty(auto_now_add = True) updated_by = ndb.KeyProperty(kind=User) updated_on = ndb.DateTimeProperty(auto_now = True)
class AuthIPWhitelistAssignments(ndb.Model, AuthVersionedEntityMixin): """A singleton entity with "identity -> AuthIPWhitelist to use" mapping. Entity key is ip_whitelist_assignments_key(). Parent entity is root_key(). See AuthIPWhitelist for more info about IP whitelists. """ # Disable useless in-process per-request cache. _use_cache = False class Assignment(ndb.Model): # Identity name to limit by IP whitelist. Unique key in 'assignments' list. identity = IdentityProperty() # Name of IP whitelist to use (see AuthIPWhitelist). ip_whitelist = ndb.StringProperty() # Why the assignment was created. comment = ndb.StringProperty() # When the assignment was created. created_ts = ndb.DateTimeProperty() # Who created the assignment. created_by = IdentityProperty() # Holds all the assignments. assignments = ndb.LocalStructuredProperty(Assignment, repeated=True)
class BaseSuspectedCL(ndb.Model): """Represents base information about a suspected cl.""" # Repo or project name of the suspected CL, eg: chromium, etc. repo_name = ndb.StringProperty(indexed=True) # The Git hash revision of the suspected CL. revision = ndb.StringProperty(indexed=False) # The commit position of the suspected CL. # Might not be available for some repo. commit_position = ndb.IntegerProperty(indexed=False) # Time when the CL was identified as a suspect for the first time. identified_time = ndb.DateTimeProperty(indexed=True) # Time when the CL was updated. updated_time = ndb.DateTimeProperty(indexed=True) # The revert CL of this suspected CL. # Set only if Findit creates the reverting CL. revert_cl = ndb.LocalStructuredProperty(RevertCL, compressed=True) # A flag to indicate if revert is supposed to be done for this suspected CL. # It will be updated to True when Findit tries to revert it. should_be_reverted = ndb.BooleanProperty(indexed=True, default=False) # Status of the process of reverting culprit. revert_status = ndb.IntegerProperty(indexed=False, default=None) # The time the sheriff decided to take action on reverting this suspected CL. # If Findit's revert CL was committed, this will be the timestamp the sheriff # commited it. If the sheriff committed their own, this will be the timestamp # their revert CL was created. None if this suspected cl is a false positive. sheriff_action_time = ndb.DateTimeProperty(indexed=False) # The reason why creating revert is skipped. skip_revert_reason = ndb.StringProperty(indexed=False) # The ID of the pipeline that is reverting the culprit, if any. This value # should be None if the culprit is not in the process of being reverted. revert_pipeline_id = ndb.StringProperty(indexed=False) # Time when the revert is craeted. revert_created_time = ndb.DateTimeProperty(indexed=True) # Status of the process of submitting revert. # The statuses are described in analysis_status.py revert_submission_status = ndb.IntegerProperty(indexed=False, default=None) # The ID of the pipeline that is submitting revert of the culprit, if any. # This value should be None if a revert is not being submitted. submit_revert_pipeline_id = ndb.StringProperty(indexed=False) # Time when the revert is committed. revert_committed_time = ndb.DateTimeProperty(indexed=True) # When the code-review of this culprit was notified. cr_notification_time = ndb.DateTimeProperty(indexed=True) # The status of code-review notification: None, RUNNING, COMPLETED, ERROR. cr_notification_status = ndb.IntegerProperty(indexed=True) @property def cr_notification_processed(self): return self.cr_notification_status in (analysis_status.COMPLETED, analysis_status.RUNNING) @property def cr_notified(self): return self.cr_notification_status == analysis_status.COMPLETED @property def revert_cl_url(self): return self.revert_cl.revert_cl_url if self.revert_cl else None @property def project_name(self): return self.repo_name @classmethod def _CreateKey(cls, repo_name, revision): # pragma: no cover return ndb.Key(cls.__name__, '%s/%s' % (repo_name, revision)) @classmethod def Create(cls, repo_name, revision, commit_position): # pragma: no cover instance = cls(key=cls._CreateKey(repo_name, revision)) instance.repo_name = repo_name instance.revision = revision instance.commit_position = commit_position instance.identified_time = time_util.GetUTCNow() return instance @classmethod def Get(cls, repo_name, revision): # pragma: no cover return cls._CreateKey(repo_name, revision).get() def to_dict(self): """Overloads ndb.Model's to_dict() method to include @property fields.""" result_dict = super(BaseSuspectedCL, self).to_dict() result_dict[ 'cr_notification_processed'] = self.cr_notification_processed result_dict['cr_notified'] = self.cr_notified result_dict['revert_cl_url'] = self.revert_cl_url result_dict['project_name'] = self.repo_name result_dict['key'] = self.key.urlsafe() return result_dict def GetCulpritLink(self): # pragma: no cover. raise NotImplementedError() def GenerateRevertReason(self, build_id, commit_position, revision, sample_step_name=None): # pragma: no cover. raise NotImplementedError()
class Image(ndb.Model): image = ndb.BlobKeyProperty() comments = ndb.LocalStructuredProperty(Comment, repeated=True)
class Profile(ndb.Model): """Model that represents profile that is registered on per-program basis for a user. Parent: melange.models.user.User """ #: A reference to program entity to which the profile corresponds. #: Each profile is created for exactly one program. If the same #: user participates in more of them, a separate profile must be created #: for each. program = ndb.KeyProperty(required=True) #: Required field storing a name that is to be displayed publicly. # Can be a real name or a nick name or some other public alias. # Public names can be any valid UTF-8 text. public_name = ndb.StringProperty(required=True) #: Required field storing first name of the profile. Can only be ASCII, #: not UTF-8 text, because it may be used as a shipping address #: and such characters may not be printable. first_name = ndb.StringProperty(required=True) #: Required field storing last name of the profile. Can only be ASCII, #: not UTF-8 text, because it may be used as a shipping address #: and such characters may not be printable. last_name = ndb.StringProperty(required=True) #: Optional field storing a URL to an image, for example a personal photo #: or a cartoon avatar. May be displayed publicly. photo_url = ndb.StringProperty(validator=db.link_validator) #: Optional field storing an identifier of Avatar figure which has been #: chosen by the profile. avatar = ndb.StringProperty(required=False) #: Contact options to the profile. contact = ndb.LocalStructuredProperty( contact_model.Contact, default=contact_model.Contact()) #: Residential address of the registered profile. It is assumed that #: the person resides at this address. residential_address = ndb.StructuredProperty( address_model.Address, required=True) #: Shipping address of the registered profile. All possible program related #: packages will be sent to this address. shipping_address = ndb.StructuredProperty(address_model.Address) #: Birth date of the registered profile. birth_date = ndb.DateProperty(required=True) #: Field storing chosen T-Shirt style. tee_style = msgprop.EnumProperty(TeeStyle) #: Field storing chosen T-Shirt size. tee_size = msgprop.EnumProperty(TeeSize) #: Field storing gender of the registered profile. gender = msgprop.EnumProperty(Gender) #: Field storing answers to the question how the registered profile heard #: about the program. program_knowledge = ndb.TextProperty() #: Field storing student specific information which is relevant and set only #: if the registered profile has a student role for the program. student_data = ndb.StructuredProperty(StudentData) #: Field storing whether the registered profile has #: a student role for the program is_student = ndb.ComputedProperty(lambda self: bool(self.student_data)) #: Field storing keys of rejected organizations on behalf of which #: the registered profile applied to the program. It indicates that the #: user administers such an organization, but it has not been accepted #: into the program. #: This field is used to distinguish between organization administrators #: for accepted organizations and users whose organizations unsuccessfully #: applied. The second groups may still have some privileges. rejected_for = ndb.KeyProperty(repeated=True) #: Field storing keys of organizations for which the registered profile #: has a mentor role. #: This information is also stored in a connection entity between the #: specified organization and this profile. mentor_for = ndb.KeyProperty(repeated=True) #: Field storing whether the registered profile has a mentor #: role for at least one organization in the program. is_mentor = ndb.ComputedProperty(lambda self: bool(self.mentor_for)) #: Field storing keys of organizations for which the registered profile #: has an organization administrator role. #: This information is also stored in a connection entity between the #: specified organization and this profile. #: Please note that organization administrator is considered a mentor as well. #: Therefore, each key, which is present in this field, can be also found #: in mentor_for field. admin_for = ndb.KeyProperty(repeated=True) #: Field storing whether the registered profile has an organization #: administrator role for at least one organization in the program. is_admin = ndb.ComputedProperty(lambda self: bool(self.admin_for)) #: Field storing type of program wide messages that have been sent to #: the profile so far. sent_messages = msgprop.EnumProperty(MessageType, repeated=True) #: Field storing the status of the registered profile. status = msgprop.EnumProperty(Status, default=Status.ACTIVE) #: Field storing keys of Terms Of Service documents that have been accepted #: by the registered profile. accepted_tos = ndb.KeyProperty(repeated=True) #: Notification settings for the registered profile. notification_settings = ndb.StructuredProperty( NotificationSettings, required=True, default=NotificationSettings()) @property def profile_id(self): """Unique identifier of the registered profile on per program basis. It is the same as the identifier of the underlying user entity. It means that all profiles for the same user for different programs hold the same identifier. May be displayed publicly and used as parts of various URLs that are specific to this profile. """ return self.key.parent().id() @property def legal_name(self): """Full, legal name associated with the profile.""" return '%s %s' % (self.first_name, self.last_name) @property def ship_to_address(self): """Address to which all program packages should be shipped.""" if self.shipping_address: address = address_model.Address(**self.shipping_address.to_dict()) if not address.name: address.name = self.legal_name else: address = address_model.Address(**self.residential_address.to_dict()) address.name = self.legal_name return address
class TeamInvites(EndpointsModel): """ Class for message purposes only """ _message_fields_schema = ("invites", ) invites = ndb.LocalStructuredProperty(TeamInvite, repeated=True)
class Order(ndb.Model): # statuses CREATING = -2 UNKNOWN = -1 NOT_APPROVED = 1 APPROVED = 2 CLOSED = 3 CANCELED = 4 STATUS_MAPPING = { NOT_APPROVED: [ u'не подтверждена', 'waiting for confirmation', 'not confirmed', ], APPROVED: [ u'новая', 'new', u'ждет отправки', u'в пути', 'on the way', u'готовится', 'in progress', u'готово', 'ready', ], CLOSED: [ u'закрыта', 'closed', u'доставлена', 'delivered', ], CANCELED: [ u'отменена', 'cancelled', ], CREATING: [u'создается'] } PUSH_STATUSES = { UNKNOWN: u"Неизвестно", NOT_APPROVED: u"Ожидает подтверждения", APPROVED: u"Подтвержден", CANCELED: u"Отменен", CLOSED: u"Выполнен" } date = ndb.DateTimeProperty() sum = ndb.FloatProperty(indexed=False) initial_sum = ndb.FloatProperty(indexed=False) discount_sum = ndb.FloatProperty(default=0) bonus_sum = ndb.FloatProperty(default=0) items = ndb.JsonProperty() is_delivery = ndb.BooleanProperty(default=False) address = ndb.JsonProperty() venue_id = ndb.StringProperty() # actually iiko organization id delivery_terminal_id = ndb.StringProperty() customer = ndb.KeyProperty() order_id = ndb.StringProperty() number = ndb.StringProperty() status = ndb.IntegerProperty(default=CREATING) comment = ndb.StringProperty(indexed=False) payment_type = ndb.StringProperty(indexed=False) alfa_order_id = ndb.StringProperty(indexed=False) source = ndb.StringProperty(choices=SOURCE_CHOICES, default=APP_SOURCE) created_in_iiko = ndb.DateTimeProperty() created = ndb.DateTimeProperty(auto_now_add=True) updated = ndb.DateTimeProperty(auto_now=True) cancel_requested = ndb.BooleanProperty(default=False, indexed=False) rate = ndb.LocalStructuredProperty(OrderRate) # TODO Need to check english statuses(may be incorrect) @classmethod def parse_status(cls, status): status = status.lower() for status_value, strings in cls.STATUS_MAPPING.items(): for string in strings: if string in status: return status_value logging.warning("Unknown status: %s", status) return cls.UNKNOWN def set_status(self, status): self.status = self.parse_status(status) @classmethod def order_by_id(cls, order_id): return cls.query(cls.order_id == order_id).get() def to_dict(self): customer = self.customer.get() customer_id = customer.customer_id if customer else None serialized = { 'customerId': customer_id, 'restoId': self.key.id(), 'orderId': self.order_id, 'number': self.number, 'status': self.status, 'sum': self.sum, 'items': self.items, 'venueId': self.venue_id, 'address': self.address } return serialized def admin_dict(self, images_map): customer = self.customer.get() for item in self.items: item['images'] = images_map.get(item['id'], []) return { 'order_id': self.order_id, 'number': self.number, 'address': self.address, 'createdDate': int(time.mktime(self.created_in_iiko.timetuple())), 'deliveryDate': int(time.mktime(self.date.timetuple())), 'client_id': customer.customer_id, 'phone': customer.phone, 'client_name': customer.name, 'client_custom_data': customer.custom_data, 'comment': self.comment, 'sum': self.sum, 'items': self.items, 'venue_id': self.delivery_terminal_id, 'status': self.status, 'cancel_requested': self.cancel_requested, } def get_change_logs(self): return OrderChangeLog.query(OrderChangeLog.order_id == self.order_id ).order(-OrderChangeLog.created).fetch() @classmethod def load_from_object(cls, iiko_order): from methods.orders.change import do_load order_id = iiko_order['orderId'] org_id = iiko_order['organization'] order = cls.order_by_id(order_id) return do_load(order, order_id, org_id, iiko_order) @classmethod def load(cls, order_id, org_id): from methods.orders.change import do_load order = cls.order_by_id(order_id) return do_load(order, order_id, org_id) def reload(self): from methods.orders.change import do_load return do_load(self, self.order_id, self.venue_id)
class Seed(ndb.Model): #id = author:name placements = ndb.LocalStructuredProperty(Placement, repeated=True) flags = ndb.StringProperty(repeated=True) hidden = ndb.BooleanProperty(default=False) description = ndb.TextProperty() players = ndb.IntegerProperty(default=1) author = ndb.StringProperty(required=True) name = ndb.StringProperty(required=True) def mode(self): mode_opt = [ int(f[5:]) for f in self.flags if f.lower().startswith("mode=") ] return GameMode(mode_opt[0]) if mode_opt else None def shared(self): shared_opt = [ f[7:].replace("+", " ").split(" ") for f in self.flags if f.lower().startswith("shared=") ] return [share_map[i] for i in shared_opt[0] if i in share_map] if shared_opt else [] @staticmethod def from_plando(lines, author, name, desc): s = Seed(id="%s:%s" % (author, name), name=name, author=author, description=desc) rawFlags, _, s.name = lines[0].partition("|") s.flags = [ flag.replace(" ", "+") for flag in rawFlags.split(",") if not flag.lower().startswith("sync") ] for line in lines[1:]: loczone, _, stuffs = line.partition(":") loc, _, zone = loczone.partition("|") plc = Placement(location=loc, zone=zone) for stuff in stuffs.split(","): player, _, codeid = stuff.partition(".") if int(player) > s.players: s.players = int(player) code, _, id = codeid.partition("|") plc.stuff.append(Stuff(code=code, id=id, player=player)) s.placements.append(plc) return s def to_plando_lines(self): outlines = ["%s|%s" % (",".join(self.flags), self.name)] for p in self.placements: outlines.append(p.location + ":" + ",".join( ["%s.%s|%s" % (s.player, s.code, s.id) for s in p.stuff])) return outlines def to_lines(self, player=1, extraFlags=[]): return ["%s|%s" % (",".join(extraFlags + self.flags), self.name)] + [ "|".join((str(p.location), s.code, s.id, p.zone)) for p in self.placements for s in p.stuff if int(s.player) == player ]
class CompanyNew(ndb.Model): COFFEE_CITY = "02b1b1f7-4ec8-11e4-80cc-0025907e32e9" EMPATIKA_OLD = "95e4a970-b4ea-11e3-8bac-50465d4d1d14" EMPATIKA = "5cae16f4-4039-11e5-80d2-d8d38565926f" MIVAKO = "6a05d004-e03d-11e3-bae4-001b21b8a590" ORANGE_EXPRESS = "768c213e-5bc1-4135-baa3-45f719dbad7e" SUSHILAR = "a9d16dff-7680-43f1-b1a1-74784bc75f60" DIMASH = "d3b9ba12-ee62-11e4-80cf-d8d38565926f" TYKANO = "a637b109-218f-11e5-80c1-d8d385655247" BURGER_CLUB = "e7985b2c-a21b-11e4-80d2-0025907e32e9" PANDA = "09ac1efb-2578-11e5-80d2-d8d38565926f" SUSHI_TIME = "8b939502-9ec3-11e3-bae4-001b21b8a590" OMNOMNOM = "f3417644-308b-11e5-80c1-d8d385655247" HLEB = "986a6089-d0d7-11e5-80d8-d8d38565926f" CHAIHANA_LOUNGE = "d40c6833-6dda-11e5-80c1-d8d385655247" GIOTTO = "01e35456-40f8-11e5-80d2-d8d38565926f" HOUSE_MAFIA = "4a4b5985-a977-11e5-80d2-d8d38565926f" BON_APPETIT = "610ebd80-ada9-11e3-bae4-001b21b8a590" KUKSU = "4c015147-eaa4-11e5-80d8-d8d38565926f" iiko_login = ndb.StringProperty() platius_login = ndb.StringProperty() iiko_org_id = ndb.StringProperty() platius_org_id = ndb.StringProperty() address = ndb.StringProperty(indexed=False) latitude = ndb.FloatProperty(indexed=False) longitude = ndb.FloatProperty(indexed=False) delivery_types = ndb.KeyProperty(kind=DeliveryType, repeated=True) payment_types = ndb.KeyProperty(kind=PaymentType, repeated=True) menu_categories = ndb.StringProperty(repeated=True, indexed=False) app_name = ndb.StringProperty( repeated=True ) # TODO REMOVE: part of user-agent to identify app in alfa handler app_title = ndb.StringProperty() alpha_login = ndb.StringProperty(indexed=False) alpha_pass = ndb.StringProperty(indexed=False) card_button_text = ndb.StringProperty() card_button_subtext = ndb.StringProperty() is_iiko_system = ndb.BooleanProperty(default=False) new_endpoints = ndb.BooleanProperty(default=False) invitation_settings = ndb.LocalStructuredProperty(InvitationSettings) branch_gift_enable = ndb.BooleanProperty(default=False) rbcn_mobi = ndb.StringProperty(indexed=False) auto_token = ndb.StringProperty(default='') review_enable = ndb.BooleanProperty(default=False) description = ndb.StringProperty() min_order_sum = ndb.IntegerProperty() email = ndb.StringProperty() support_emails = ndb.StringProperty(repeated=True) site = ndb.StringProperty() cities = ndb.StringProperty(repeated=True) phone = ndb.StringProperty() schedule = ndb.JsonProperty() holiday_schedule = ndb.StringProperty(indexed=False, default='') icon1 = ndb.BlobProperty() icon2 = ndb.BlobProperty() icon3 = ndb.BlobProperty() icon4 = ndb.BlobProperty() company_icon = ndb.BlobProperty() color = ndb.StringProperty() analytics_key = ndb.StringProperty() ios_push_channel = ndb.StringProperty() android_push_channel = ndb.StringProperty() additional_categories = ndb.LocalStructuredProperty(AdditionalCategory, repeated=True) iiko_stop_lists_enabled = ndb.BooleanProperty(default=False) email_for_orders = ndb.StringProperty(indexed=False, repeated=True) @classmethod def get_payment_types(cls, venue_id): venue = cls.get_by_iiko_id(venue_id) output = [] for item in ndb.get_multi(venue.payment_types): output.append(item.to_dict()) return output def get_payment_type(self, type_id): if type_id: for item in ndb.get_multi(self.payment_types): if item.type_id == int(type_id): return item return None @classmethod def get_delivery_types(cls, company_id): company = cls.get_by_id(int(company_id)) output = [] for item in ndb.get_multi(company.delivery_types): output.append(item.to_dict()) return output @classmethod def get_by_iiko_id(cls, iiko_org_id): return cls.query(cls.iiko_org_id == iiko_org_id).get() def get_timezone_offset(self): from methods import maps result = memcache.get('venue_%s_timezone' % self.iiko_org_id) if not result: result = maps.get_timezone_by_coords(self.latitude, self.longitude) memcache.set('venue_%s_timezone' % self.iiko_org_id, result, time=24 * 3600) return result
class PostsubmitReport(ndb.Model): """Represents a postsubmit code coverage report.""" # The Gitiles commit. gitiles_commit = ndb.StructuredProperty(GitilesCommit, indexed=True, required=True) # An optional increasing numeric number assigned to each commit. commit_position = ndb.IntegerProperty(indexed=True, required=False) # Timestamp when the commit was committed. commit_timestamp = ndb.DateTimeProperty(indexed=True, required=True) # TODO(crbug.com/939443): Make it required once data are backfilled. # Name of the luci builder that generates the data. bucket = ndb.StringProperty(indexed=True, required=False) builder = ndb.StringProperty(indexed=True, required=False) # Manifest of all the code checkouts when the coverage report is generated. # In descending order by the length of the relative path in the root checkout. manifest = ndb.LocalStructuredProperty(DependencyRepository, repeated=True, indexed=False) # The top level coverage metric of the report. # For Clang based languages, the format is a list of 3 dictionaries # corresponds to 'line', 'function' and 'region' respectively, and each dict # has format: {'covered': 9526650, 'total': 12699841, 'name': u'|name|'} summary_metrics = ndb.JsonProperty(indexed=False, required=True) # The build id that uniquely identifies the build. build_id = ndb.IntegerProperty(indexed=False, required=True) # Used to control if a report is visible to the users, and the main use case # is to quanrantine a 'bad' report. All the reports are visible to admins. visible = ndb.BooleanProperty(indexed=True, default=False, required=True) @classmethod def _CreateKey(cls, server_host, project, ref, revision, bucket, builder): return ndb.Key( cls, '%s$%s$%s$%s$%s$%s' % (server_host, project, ref, revision, bucket, builder)) @classmethod def Create(cls, server_host, project, ref, revision, bucket, builder, commit_timestamp, manifest, summary_metrics, build_id, visible, commit_position=None): key = cls._CreateKey(server_host, project, ref, revision, bucket, builder) gitiles_commit = GitilesCommit(server_host=server_host, project=project, ref=ref, revision=revision) return cls(key=key, gitiles_commit=gitiles_commit, bucket=bucket, builder=builder, commit_position=commit_position, commit_timestamp=commit_timestamp, manifest=manifest, summary_metrics=summary_metrics, build_id=build_id, visible=visible) @classmethod def Get(cls, server_host, project, ref, revision, bucket, builder): entity = cls._CreateKey(server_host, project, ref, revision, bucket, builder).get() if entity: return entity # TODO(crbug.com/939443): Remove following code once data are backfilled. legacy_key = ndb.Key( cls, '%s$%s$%s$%s' % (server_host, project, ref, revision)) return legacy_key.get()
class LSPContact(ndb.Model): name = ndb.StringProperty() addresses = ndb.LocalStructuredProperty(SPAddress, repeated=True)
class FlakeAnalysisRequest(VersionedModel): """Represents a request to analyze a flake. The name of the flake will be the key, and the model is versioned. """ # Name of the flake. Could be a step name, or a test name. # Assume there are no step and test with the same name. name = ndb.StringProperty(indexed=True) # Indicate whether the flake is a step or a test. is_step = ndb.BooleanProperty(indexed=True, default=True) # Indicate whether the flake is run on Swarming for some configuration. swarmed = ndb.BooleanProperty(indexed=False, default=False) # Indicate whether analysis on this flake is supported. supported = ndb.BooleanProperty(indexed=False, default=False) # The bug id for this flake on Monorail. bug_id = ndb.IntegerProperty(indexed=False) # The emails of users who request analysis of this flake. user_emails = ndb.StringProperty(indexed=False, repeated=True) # Whether the user emails have been obscured. user_emails_obscured = ndb.BooleanProperty(indexed=True, default=False) # When was the last edit of the email list. user_emails_last_edit = ndb.DateTimeProperty(indexed=True) # The build steps in which the flake occurred. build_steps = ndb.LocalStructuredProperty(BuildStep, compressed=True, repeated=True) # Executed analyses on different test configurations. analyses = ndb.KeyProperty(MasterFlakeAnalysis, repeated=True) # Arguments number differs from overridden method - pylint: disable=W0221 @classmethod def Create(cls, name, is_step, bug_id): instance = super(cls, FlakeAnalysisRequest).Create(key=name) instance.name = name instance.is_step = is_step instance.bug_id = bug_id return instance def AddBuildStep(self, master_name, builder_name, build_number, step_name, reported_time): """Adds a build step in which the flake is found.""" for s in self.build_steps: if s.master_name == master_name and s.builder_name == builder_name: # For the same builder/tester, only analyze the earliest build. # TODO: re-evaluate cases that flakes might be re-introduced in between. if s.build_number <= build_number: return False s.build_number = build_number s.reported_time = reported_time return True self.build_steps.append( BuildStep.Create(master_name, builder_name, build_number, step_name, reported_time)) return True def CopyFrom(self, other): """Copies all states from the given request.""" assert isinstance(other, FlakeAnalysisRequest) self.is_step = other.is_step self.bug_id = other.bug_id self.user_emails = other.user_emails self.build_steps = other.build_steps self.analyses = other.analyses @property def on_cq(self): """Returns True if the flake is on Commit Queue.""" return any(step.on_cq for step in self.build_steps) def _GetNormalizedConfigurationNames(self, master_name, builder_name): for build_step in self.build_steps: if ((build_step.master_name == master_name and build_step.builder_name == builder_name) or (build_step.wf_master_name == master_name and build_step.wf_builder_name == builder_name)): return build_step.wf_master_name, build_step.wf_builder_name return None, None def FindMatchingAnalysisForConfiguration(self, master_name, builder_name): # Returns the analysis that corresponds to the requested master and builder. normalized_master_name, normalized_builder_name = ( self._GetNormalizedConfigurationNames(master_name, builder_name)) if not normalized_master_name or not normalized_builder_name: return None for analysis_key in self.analyses: analysis_master_name, analysis_builder_name = ( MasterFlakeAnalysis.GetBuildConfigurationFromKey(analysis_key)) if (analysis_master_name == normalized_master_name and analysis_builder_name == normalized_builder_name): return analysis_key.get() return None
class Sweeps(EndpointsModel): """ Class for message purposes only """ _message_fields_schema = ("sweeps", "team_key") team_key = ndb.KeyProperty(kind=RoseboticsTeam) sweeps = ndb.LocalStructuredProperty(Sweep, repeated=True)
class DeliveryZone(ndb.Model): CITY = 0 DISTRICT = 1 ZONE = 2 DEFAULT = 3 NEAREST = 4 RADIUS = 5 SEARCH_TYPES = (CITY, DISTRICT, ZONE, DEFAULT, NEAREST, RADIUS) SEARCH_MAP = { CITY: u'По городу', DISTRICT: u'По району', ZONE: u'Собственная зона', DEFAULT: u'По умолчанию', NEAREST: u'Ближайшая', RADIUS: u'Радиус' } search_type = ndb.IntegerProperty(choices=SEARCH_TYPES, default=CITY) value = ndb.IntegerProperty() sequence_number = ndb.IntegerProperty() address = ndb.LocalStructuredProperty(Address) status = ndb.IntegerProperty(choices=STATUS_CHOICES, default=STATUS_AVAILABLE) price = ndb.IntegerProperty(default=0) min_sum = ndb.IntegerProperty(default=0) free_delivery_sum = ndb.IntegerProperty() comment = ndb.StringProperty() geo_ribs = ndb.LocalStructuredProperty(GeoRib, repeated=True) @classmethod def get(cls, zone_key): from models.config.config import config, AUTO_APP, RESTO_APP from methods.proxy.resto.company import get_delivery_zone app_kind = config.APP_KIND if app_kind == AUTO_APP: return cls.get_by_id(zone_key.id()) elif app_kind == RESTO_APP: return get_delivery_zone(zone_key) @staticmethod def generate_sequence_number(): fastcounter.incr("delivery_zones", delta=100, update_interval=1) return fastcounter.get_count("delivery_zones") + random.randint(1, 100) @staticmethod def get_zones_in_order(): return sorted([zone for zone in DeliveryZone.query().fetch()], key=lambda zone: zone.sequence_number) def get_previous(self): zones = self.get_zones_in_order() index = zones.index(self) if index == 0: return None else: return zones[index - 1] def get_next(self): zones = self.get_zones_in_order() index = zones.index(self) if index == len(zones) - 1: return None else: return zones[index + 1] @property def polygon(self): points = [] for rib in self.geo_ribs: points.append(rib.point1) return points def is_included(self, address): if not address.get('coordinates'): return False if not address['coordinates'].get( 'lat') or not address['coordinates'].get('lon'): return False c = GeoPtProxy(lat=address['coordinates']['lat'], lon=address['coordinates']['lon']) d = GeoPtProxy(lat=90.0, lon=180) amount = 0 for rib in self.geo_ribs: a = GeoPtProxy(lat=rib.point1.lat, lon=rib.point1.lon) b = GeoPtProxy(lat=rib.point2.lat, lon=rib.point2.lon) result = GeoPtProxy.square(a, b, c) * GeoPtProxy.square(a, b, d) < 0.0 \ and GeoPtProxy.square(c, d, a) * GeoPtProxy.square(c, d, b) < 0.0 if result: amount += 1 logging.info('address = %s' % address) logging.info('in zone = %s' % (amount % 2 == 1)) return amount % 2 == 1
class TrackProgress(EndpointsModel): """ Class for message purposes only """ _message_fields_schema = ("name", "progress", "unit_progress") name = ndb.StringProperty() progress = ndb.FloatProperty() unit_progress = ndb.LocalStructuredProperty(UnitProgress, repeated=True)
class DeliveryType(ndb.Model): MAX_DAYS = 7 ONE_DAY_SEC = 86400 MODE_SLOTS = 0 MODE_EXACT_TIME = 1 MODE_DATE_AND_SLOTS = 2 MODE_DUAL = 3 delivery_type = ndb.IntegerProperty(choices=DELIVERY_TYPES) status = ndb.IntegerProperty(choices=STATUS_CHOICES, default=STATUS_UNAVAILABLE) min_time = ndb.IntegerProperty(default=0) max_time = ndb.IntegerProperty(default=ONE_DAY_SEC * MAX_DAYS) dual_mode = ndb.BooleanProperty(default=False) delivery_zones = ndb.KeyProperty(kind=DeliveryZone, repeated=True) delivery_slots = ndb.KeyProperty(kind=DeliverySlot, repeated=True) item_restrictions = ndb.KeyProperty(kind=MenuItem, repeated=True) category_restrictions = ndb.KeyProperty(kind=MenuCategory, repeated=True) schedule_restriction = ndb.LocalStructuredProperty(Schedule) today_schedule = ndb.LocalStructuredProperty(DaySchedule) default = ndb.BooleanProperty(default=False) @classmethod def create(cls, delivery_type): delivery = cls(id=delivery_type, delivery_type=delivery_type) delivery.put() return delivery def get_mode(self): if not self.delivery_slots: return self.MODE_EXACT_TIME slot_type = self.delivery_slots[0].get().slot_type if slot_type in (DeliverySlot.STRINGS, DeliverySlot.HOURS_FROM_MIDNIGHT): return self.MODE_DATE_AND_SLOTS if self.dual_mode: return self.MODE_DUAL return self.MODE_SLOTS def dict(self): mode = self.get_mode() return { 'id': str(self.delivery_type), 'name': DELIVERY_MAP[self.delivery_type], 'time_picker_min': self.min_time, 'time_picker_max': self.max_time, 'slots': [ slot.dict() for slot in sorted( [slot.get() for slot in self.delivery_slots], key=lambda x: x.value) ], 'mode': mode, 'time_required': mode in (self.MODE_EXACT_TIME, self.MODE_DATE_AND_SLOTS), 'default': self.default, }
class Anomaly(internal_only_model.InternalOnlyModel): """Represents a change-point or step found in the data series for a test. An Anomaly can be an upward or downward change, and can represent an improvement or a regression. """ # Whether the alert should only be viewable by internal users. internal_only = ndb.BooleanProperty(indexed=True, default=False) # The time the alert fired. timestamp = ndb.DateTimeProperty(indexed=True, auto_now_add=True) # TODO(dberris): Remove these after migrating all issues to use the issues # repeated field, to allow an anomaly to be represented in multiple issues on # different Monorail projects. # === DEPRECATED START === # Note: -1 denotes an invalid alert and -2 an ignored alert. # By default, this is None, which denotes a non-triaged alert. bug_id = ndb.IntegerProperty(indexed=True) # This is the project to which an anomaly is associated with, in the issue # tracker service. project_id = ndb.StringProperty(indexed=True, default='chromium') # === DEPRECATED END === # AlertGroups used for grouping groups = ndb.KeyProperty(indexed=True, repeated=True) # This is the list of issues associated with the anomaly. We're doing this to # allow a single anomaly to be represented in multiple issues in different # issue trackers. issues = ndb.StructuredProperty(Issue, indexed=True, repeated=True) # This field aims to replace the 'bug_id' field serving as a state indicator. state = ndb.StringProperty( default='untriaged', choices=['untriaged', 'triaged', 'ignored', 'invalid']) # The subscribers who recieve alerts subscriptions = ndb.LocalStructuredProperty(Subscription, repeated=True) subscription_names = ndb.StringProperty(indexed=True, repeated=True) # Each Alert is related to one Test. test = ndb.KeyProperty(indexed=True) statistic = ndb.StringProperty(indexed=True) # We'd like to be able to query Alerts by Master, Bot, and Benchmark names. master_name = ndb.ComputedProperty( lambda self: utils.TestPath(self.test).split('/')[0], indexed=True) bot_name = ndb.ComputedProperty( lambda self: utils.TestPath(self.test).split('/')[1], indexed=True) benchmark_name = ndb.ComputedProperty( lambda self: utils.TestPath(self.test).split('/')[2], indexed=True) # Each Alert has a revision range it's associated with; however, # start_revision and end_revision could be the same. start_revision = ndb.IntegerProperty(indexed=True) end_revision = ndb.IntegerProperty(indexed=True) # The revisions to use for display, if different than point id. display_start = ndb.IntegerProperty(indexed=False) display_end = ndb.IntegerProperty(indexed=False) # Ownership data, mapping e-mails to the benchmark's owners' emails and # component as the benchmark's Monorail component ownership = ndb.JsonProperty() # The number of points before and after this anomaly that were looked at # when finding this anomaly. segment_size_before = ndb.IntegerProperty(indexed=False) segment_size_after = ndb.IntegerProperty(indexed=False) # The medians of the segments before and after the anomaly. median_before_anomaly = ndb.FloatProperty(indexed=False) median_after_anomaly = ndb.FloatProperty(indexed=False) # The standard deviation of the segments before the anomaly. std_dev_before_anomaly = ndb.FloatProperty(indexed=False) # The number of points that were used in the before/after segments. # This is also returned by FindAnomalies window_end_revision = ndb.IntegerProperty(indexed=False) # In order to estimate how likely it is that this anomaly is due to noise, # t-test may be performed on the points before and after. The t-statistic, # degrees of freedom, and p-value are potentially-useful intermediary results. t_statistic = ndb.FloatProperty(indexed=False) degrees_of_freedom = ndb.FloatProperty(indexed=False) p_value = ndb.FloatProperty(indexed=False) # Whether this anomaly represents an improvement; if false, this anomaly is # considered to be a regression. is_improvement = ndb.BooleanProperty(indexed=True, default=False) # Whether this anomaly recovered (i.e. if this is a step down, whether there # is a corresponding step up later on, or vice versa.) recovered = ndb.BooleanProperty(indexed=True, default=False) # If the TestMetadata alerted upon has a ref build, store the ref build. ref_test = ndb.KeyProperty(indexed=False) # The corresponding units from the TestMetaData entity. units = ndb.StringProperty(indexed=False) recipe_bisects = ndb.KeyProperty(repeated=True, indexed=False) pinpoint_bisects = ndb.StringProperty(repeated=True, indexed=False) # Additional Metadata # ==== # # Timestamps for the earliest and latest Row we used to determine whether this # is an anomaly. We use this to compute time-to-detection. earliest_input_timestamp = ndb.DateTimeProperty() latest_input_timestamp = ndb.DateTimeProperty() @property def percent_changed(self): """The percent change from before the anomaly to after.""" if self.median_before_anomaly == 0.0: return sys.float_info.max difference = self.median_after_anomaly - self.median_before_anomaly return 100 * difference / self.median_before_anomaly @property def absolute_delta(self): """The absolute change from before the anomaly to after.""" return self.median_after_anomaly - self.median_before_anomaly @property def direction(self): """Whether the change is numerically an increase or decrease.""" if self.median_before_anomaly < self.median_after_anomaly: return UP return DOWN def GetDisplayPercentChanged(self): """Gets a string showing the percent change.""" if abs(self.percent_changed) == sys.float_info.max: return FREAKIN_HUGE else: return '%.1f%%' % abs(self.percent_changed) def GetDisplayAbsoluteChanged(self): """Gets a string showing the absolute change.""" if abs(self.absolute_delta) == sys.float_info.max: return FREAKIN_HUGE else: return '%f' % abs(self.absolute_delta) def GetRefTestPath(self): if not self.ref_test: return None return utils.TestPath(self.ref_test) def SetIsImprovement(self, test=None): """Sets whether the alert is an improvement for the given test.""" if not test: test = self.GetTestMetadataKey().get() # |self.direction| is never equal to |UNKNOWN| (see the definition above) # so when the test improvement direction is |UNKNOWN|, |self.is_improvement| # will be False. self.is_improvement = (self.direction == test.improvement_direction) def GetTestMetadataKey(self): """Get the key for the TestMetadata entity of this alert. We are in the process of converting from Test entities to TestMetadata. Until this is done, it's possible that an alert may store either Test or TestMetadata in the 'test' KeyProperty. This gets the TestMetadata key regardless of what's stored. """ return utils.TestMetadataKey(self.test) @classmethod @ndb.tasklet def QueryAsync(cls, bot_name=None, bug_id=None, count_limit=0, deadline_seconds=50, inequality_property=None, is_improvement=None, key=None, keys_only=False, limit=100, master_name=None, max_end_revision=None, max_start_revision=None, max_timestamp=None, min_end_revision=None, min_start_revision=None, min_timestamp=None, recovered=None, subscriptions=None, start_cursor=None, test=None, test_keys=None, test_suite_name=None, project_id=None): if key: # This tasklet isn't allowed to catch the internal_only AssertionError. alert = yield ndb.Key(urlsafe=key).get_async() raise ndb.Return(([alert], None, 1)) # post_filters can cause results to be empty, depending on the shape of the # data and which filters are applied in the query and which filters are # applied after the query. Automatically chase cursors until some results # are found, but stay under the request timeout. results = [] deadline = time.time() + deadline_seconds while not results and time.time() < deadline: query = cls.query() equality_properties = [] if subscriptions: # Empty subscriptions is not allowed in query query = query.filter(cls.subscription_names.IN(subscriptions)) equality_properties.append('subscription_names') inequality_property = 'key' if is_improvement is not None: query = query.filter(cls.is_improvement == is_improvement) equality_properties.append('is_improvement') inequality_property = 'key' if bug_id is not None: if bug_id == '': query = query.filter(cls.bug_id == None) equality_properties.append('bug_id') inequality_property = 'key' elif bug_id != '*': query = query.filter(cls.bug_id == int(bug_id)) equality_properties.append('bug_id') inequality_property = 'key' # bug_id='*' translates to bug_id != None, which is handled with the # other inequality filters. if project_id is not None: query = query.filter(cls.project_id == project_id) equality_properties.append('project_id') inequality_property = 'key' if recovered is not None: query = query.filter(cls.recovered == recovered) equality_properties.append('recovered') inequality_property = 'key' if test or test_keys: if not test_keys: test_keys = [] if test: test_keys += [ utils.OldStyleTestKey(test), utils.TestMetadataKey(test) ] query = query.filter(cls.test.IN(test_keys)) query = query.order(cls.key) equality_properties.append('test') inequality_property = 'key' if master_name: query = query.filter(cls.master_name == master_name) equality_properties.append('master_name') inequality_property = 'key' if bot_name: query = query.filter(cls.bot_name == bot_name) equality_properties.append('bot_name') inequality_property = 'key' if test_suite_name: query = query.filter(cls.benchmark_name == test_suite_name) equality_properties.append('benchmark_name') inequality_property = 'key' query, post_filters = cls._InequalityFilters( query, equality_properties, inequality_property, bug_id, min_end_revision, max_end_revision, min_start_revision, max_start_revision, min_timestamp, max_timestamp) if post_filters: keys_only = False query = query.order(-cls.timestamp, cls.key) futures = [ query.fetch_page_async(limit, start_cursor=start_cursor, keys_only=keys_only) ] if count_limit: futures.append(query.count_async(count_limit)) query_duration = timing.WallTimeLogger('query_duration') with query_duration: yield futures results, start_cursor, more = futures[0].get_result() if count_limit: count = futures[1].get_result() else: count = len(results) logging.info('query_results_count=%d', len(results)) if results: logging.info('duration_per_result=%f', query_duration.seconds / len(results)) if post_filters: results = [ alert for alert in results if all( post_filter(alert) for post_filter in post_filters) ] if not more: start_cursor = None if not start_cursor: break raise ndb.Return((results, start_cursor, count)) @classmethod def _InequalityFilters(cls, query, equality_properties, inequality_property, bug_id, min_end_revision, max_end_revision, min_start_revision, max_start_revision, min_timestamp, max_timestamp): # A query cannot have more than one inequality filter. # inequality_property allows users to decide which property to filter in the # query, which can significantly affect performance. If other inequalities # are specified, they will be handled by post_filters. # If callers set inequality_property without actually specifying a # corresponding inequality filter, then reset the inequality_property and # compute it automatically as if it were not specified. if inequality_property == 'start_revision': if min_start_revision is None and max_start_revision is None: inequality_property = None elif inequality_property == 'end_revision': if min_end_revision is None and max_end_revision is None: inequality_property = None elif inequality_property == 'timestamp': if min_timestamp is None and max_timestamp is None: inequality_property = None elif inequality_property == 'bug_id': if bug_id != '*': inequality_property = None elif inequality_property == 'key': if equality_properties == [ 'subscription_names' ] and (min_start_revision or max_start_revision): # Use the composite index (subscription_names, start_revision, # -timestamp). See index.yaml. inequality_property = 'start_revision' else: inequality_property = None if inequality_property is None: # Compute a default inequality_property. # We prioritise the 'min' filters first because that lets us limit the # amount of data the Datastore instances might handle. if min_start_revision: inequality_property = 'start_revision' elif min_end_revision: inequality_property = 'end_revision' elif min_timestamp: inequality_property = 'timestamp' elif max_start_revision: inequality_property = 'start_revision' elif max_end_revision: inequality_property = 'end_revision' elif max_timestamp: inequality_property = 'timestamp' elif bug_id == '*': inequality_property = 'bug_id' post_filters = [] if not inequality_property: return query, post_filters if not datastore_hooks.IsUnalteredQueryPermitted(): # _DatastorePreHook will filter internal_only=False. index.yaml does not # specify indexes for `internal_only, $inequality_property, -timestamp`. # Use post_filters for all inequality properties. inequality_property = '' if bug_id == '*': if inequality_property == 'bug_id': logging.info('filter:bug_id!=None') query = query.filter(cls.bug_id != None).order(cls.bug_id) else: logging.info('post_filter:bug_id!=None') post_filters.append(lambda a: a.bug_id != None) # Apply the min filters before the max filters, because that lets us # optimise the query application for more recent data, reducing the amount # of data post-processing. if min_start_revision: min_start_revision = int(min_start_revision) if inequality_property == 'start_revision': logging.info('filter:min_start_revision=%d', min_start_revision) query = query.filter(cls.start_revision >= min_start_revision) query = query.order(cls.start_revision) else: logging.info('post_filter:min_start_revision=%d', min_start_revision) post_filters.append( lambda a: a.start_revision >= min_start_revision) if min_end_revision: min_end_revision = int(min_end_revision) if inequality_property == 'end_revision': logging.info('filter:min_end_revision=%d', min_end_revision) query = query.filter(cls.end_revision >= min_end_revision) query = query.order(cls.end_revision) else: logging.info('post_filter:min_end_revision=%d', min_end_revision) post_filters.append( lambda a: a.end_revision >= min_end_revision) if min_timestamp: if inequality_property == 'timestamp': logging.info('filter:min_timestamp=%d', time.mktime(min_timestamp.utctimetuple())) query = query.filter(cls.timestamp >= min_timestamp) else: logging.info('post_filter:min_timestamp=%d', time.mktime(min_timestamp.utctimetuple())) post_filters.append(lambda a: a.timestamp >= min_timestamp) if max_start_revision: max_start_revision = int(max_start_revision) if inequality_property == 'start_revision': logging.info('filter:max_start_revision=%d', max_start_revision) query = query.filter(cls.start_revision <= max_start_revision) query = query.order(-cls.start_revision) else: logging.info('post_filter:max_start_revision=%d', max_start_revision) post_filters.append( lambda a: a.start_revision <= max_start_revision) if max_end_revision: max_end_revision = int(max_end_revision) if inequality_property == 'end_revision': logging.info('filter:max_end_revision=%d', max_end_revision) query = query.filter(cls.end_revision <= max_end_revision) query = query.order(-cls.end_revision) else: logging.info('post_filter:max_end_revision=%d', max_end_revision) post_filters.append( lambda a: a.end_revision <= max_end_revision) if max_timestamp: if inequality_property == 'timestamp': logging.info('filter:max_timestamp=%d', time.mktime(max_timestamp.utctimetuple())) query = query.filter(cls.timestamp <= max_timestamp) else: logging.info('post_filter:max_timestamp=%d', time.mktime(max_timestamp.utctimetuple())) post_filters.append(lambda a: a.timestamp <= max_timestamp) return query, post_filters
class Venue(ndb.Model): title = ndb.StringProperty(required=True, indexed=False) address = ndb.StructuredProperty(Address) description = ndb.StringProperty(indexed=False) extra_info = ndb.StringProperty() pic = ndb.StringProperty(indexed=False) coordinates = ndb.GeoPtProperty(required=True, indexed=False) schedule = ndb.LocalStructuredProperty(Schedule) time_break = ndb.LocalStructuredProperty(Schedule, repeated=True) delivery_types = ndb.LocalStructuredProperty(DeliveryType, repeated=True) phones = ndb.StringProperty(repeated=True) # use for sending sms emails = ndb.StringProperty(repeated=True) # use for sending email problem = ndb.StringProperty(indexed=False) active = ndb.BooleanProperty(required=True, default=False) type_deliveries = ndb.IntegerProperty(repeated=True) timezone_offset = ndb.IntegerProperty(default=3) # hours offset timezone_name = ndb.StringProperty() stop_lists = ndb.KeyProperty(kind=MenuItem, repeated=True) single_modifiers_stop_list = ndb.KeyProperty(kind=SingleModifier, repeated=True) group_choice_modifier_stop_list = ndb.KeyProperty(kind=GroupModifierChoice, repeated=True) promo_restrictions = ndb.KeyProperty(kind=Promo, repeated=True) payment_restrictions = ndb.KeyProperty(kind=PaymentType, repeated=True) wallet_restriction = ndb.BooleanProperty(default=False) default = ndb.BooleanProperty(default=False) legal = ndb.KeyProperty(LegalInfo) called_phone = ndb.StringProperty() @classmethod def get(cls, venue_id): from models.config.config import config, AUTO_APP, RESTO_APP, DOUBLEB_APP app_kind = config.APP_KIND if app_kind == AUTO_APP: return cls.get_by_id(int(venue_id)) elif app_kind in [RESTO_APP, DOUBLEB_APP]: if app_kind == DOUBLEB_APP: venue_id = int(venue_id) for venue in cls.fetch_venues(app_kind): if venue.key.id() == venue_id: return venue @classmethod def fetch_venues(cls, *args, **kwargs): from models.config.config import config, AUTO_APP, RESTO_APP, DOUBLEB_APP from methods.proxy.resto.venues import get_venues as resto_get_venues from methods.proxy.doubleb.venues import get_venues as doubleb_get_venues app_kind = config.APP_KIND if app_kind == AUTO_APP: return cls.query(*args, **kwargs).fetch() elif app_kind in [RESTO_APP, DOUBLEB_APP]: if app_kind == RESTO_APP: venues = resto_get_venues() else: venues = doubleb_get_venues() for venue in venues[:]: for name, value in kwargs.items(): if getattr(venue, name) != value: venues.remove(venue) return venues @classmethod def get_cities(cls): cities = [] for venue in cls.query(cls.active == True).fetch(): if venue.address.city not in cities: cities.append(venue.address.city) return cities def dynamic_info(self): items = [] for item in self.stop_lists: item = item.get() if item.status != STATUS_AVAILABLE and self.key in item.restrictions: continue items.append(str(item.key.id())) return { 'stop_list': { 'items': items, 'single_modifiers': [str(item.id()) for item in self.single_modifiers_stop_list], 'group_modifier_choices': [ str(item.get().choice_id) for item in self.group_choice_modifier_stop_list ] } } def get_delivery_type(self, delivery_type): for delivery in self.delivery_types: if delivery.delivery_type == delivery_type: return delivery def dict(self, user_location=None): distance = 0 if user_location: distance = location.distance(user_location, self.coordinates) dct = { 'id': str(self.key.id()), 'company_namespace': self.key.namespace(), 'distance': distance, 'title': self.title, 'address': self.description, 'pic': self.pic, 'lat': self.coordinates.lat, 'lon': self.coordinates.lon, 'coordinates': str(self.coordinates), 'is_open': self.is_open(), 'deliveries': [ delivery.dict() for delivery in self.delivery_types if delivery.status == STATUS_AVAILABLE ], 'schedule': self.schedule.dict() if self.schedule else [], 'schedule_str': self.schedule.get_days_str() if self.schedule else '', 'time_breaks': [time_break.dict() for time_break in self.time_break], 'time_breaks_str': [time_break.get_days_str() for time_break in self.time_break], 'called_phone': self.called_phone, 'extra_info': self.extra_info, 'text_color': 'FFFFFF', # for unified app } return dct def admin_dict(self): return { 'id': self.key.id(), 'title': self.title, 'address': self.description } @classmethod def get_suitable_venues(cls, city): return [ venue for venue in cls.query(cls.address.city == city.city).fetch() if venue.active ] def suitable_for_city(self, city): return self.address.city == city.city def is_open(self, minutes_offset=0): now = datetime.utcnow() + timedelta( minutes=minutes_offset) + timedelta(hours=self.timezone_offset) return working_hours.check(self.schedule, now) def update_address(self): from models.config.config import Config from methods import geocoder candidates = geocoder.get_houses_by_coordinates( self.coordinates.lat, self.coordinates.lon) if candidates: address = candidates[0] self.address = Address(**address['address']) cfg = Config.get() if self.address.country not in cfg.COUNTRIES: cfg.COUNTRIES.append(self.address.country) cfg.put() self.update_timezone() def update_timezone(self): from methods import timezone zone = timezone.get_time_zone(self.coordinates.lat, self.coordinates.lon) if zone: self.timezone_offset = zone['offset'] self.timezone_name = zone['name'] @classmethod def get_first_tz(cls): venue = cls.query().fetch() return venue.timezone_offset()
class FlakeAnalysisRequest(VersionedModel): """Represents a request to analyze a flake. The name of the flake will be the key, and the model is versioned. """ # Name of the flake. Could be a step name, or a test name. # Assume there are no step and test with the same name. name = ndb.StringProperty(indexed=True) # Indicate whether the flake is a step or a test. is_step = ndb.BooleanProperty(indexed=True, default=True) # Indicate whether the flake is run on Swarming for some configuration. swarmed = ndb.BooleanProperty(indexed=False, default=False) # Indicate whether analysis on this flake is supported. supported = ndb.BooleanProperty(indexed=False, default=False) # The bug id for this flake on Monorail. bug_id = ndb.IntegerProperty(indexed=False) # The key to the Flake entity to be associated with the resulting analysis. flake_key = ndb.KeyProperty(Flake) # The reporter of this bug, use triggering_source as an emun to define this # value. The triggering source FINDIT_PIPELINES should be used for when # Findit itself reports a bug. bug_reported_by = ndb.IntegerProperty(indexed=False) # The emails of users who request analysis of this flake. user_emails = ndb.StringProperty(indexed=False, repeated=True) # Whether the user emails have been obscured. user_emails_obscured = ndb.BooleanProperty(indexed=True, default=False) # When was the last edit of the email list. user_emails_last_edit = ndb.DateTimeProperty(indexed=True) # The build steps in which the flake occurred. build_steps = ndb.LocalStructuredProperty(BuildStep, compressed=True, repeated=True) # Executed analyses on different test configurations. analyses = ndb.KeyProperty(MasterFlakeAnalysis, repeated=True) # Arguments number differs from overridden method - pylint: disable=W0221 @classmethod def Create(cls, name, is_step, bug_id): instance = super(cls, FlakeAnalysisRequest).Create(key=name) instance.name = name instance.is_step = is_step instance.bug_id = bug_id return instance def AddBuildStep(self, master_name, builder_name, build_number, step_name, reported_time): """Adds a build step in which the flake is found.""" for s in self.build_steps: if s.master_name == master_name and s.builder_name == builder_name: # For the same builder/tester, only analyze the earliest build. # TODO: re-evaluate cases that flakes might be re-introduced in between. if s.build_number <= build_number: return False s.build_number = build_number s.reported_time = reported_time return True self.build_steps.append( BuildStep.Create(master_name, builder_name, build_number, step_name, reported_time)) return True def CopyFrom(self, other): """Copies all states from the given request.""" assert isinstance(other, FlakeAnalysisRequest) self.is_step = other.is_step self.bug_id = other.bug_id self.user_emails = other.user_emails self.build_steps = other.build_steps self.analyses = other.analyses @property def on_cq(self): """Returns True if the flake is on Commit Queue.""" return any(step.on_cq for step in self.build_steps) def _GetNormalizedConfigurationNames(self, master_name, builder_name): for build_step in self.build_steps: if ((build_step.master_name == master_name and build_step.builder_name == builder_name) or (build_step.wf_master_name == master_name and build_step.wf_builder_name == builder_name)): return build_step.wf_master_name, build_step.wf_builder_name return None, None def FindMatchingAnalysisForConfiguration(self, master_name, builder_name): # Returns the analysis that corresponds to the requested master and builder. normalized_master_name, normalized_builder_name = ( self._GetNormalizedConfigurationNames(master_name, builder_name)) if not normalized_master_name or not normalized_builder_name: return None for analysis_key in self.analyses: analysis_master_name, analysis_builder_name = ( MasterFlakeAnalysis.GetBuildConfigurationFromKey(analysis_key)) if (analysis_master_name == normalized_master_name and analysis_builder_name == normalized_builder_name): return analysis_key.get() return None def Update(self, **kwargs): """Updates fields according to what's specified in kwargs. Fields specified in kwargs will be updated accordingly, while those not present in kwargs will be untouched. Args: **kwargs (dict): The keys in kwargs should match the name of the field you want to update, and the values should be the value you update it to. """ # TODO(crbug.com/772156): Refactor this into a base model. any_changes = False for arg, value in kwargs.iteritems(): current_value = getattr(self, arg, None) if current_value != value: setattr(self, arg, value) any_changes = True if any_changes: self.put()
class GameState(ndb.Model): """Per-game singleton to store the game state. Keyed on "team_id#channel_id". """ last_action = ndb.StringProperty(required=False) last_action_target = ndb.StringProperty(required=False) # One of: # READY # ACTED # CHALLENGED # CHALLENGE_LOST # CHALLENGE_LOSS_RESOLVED # BLOCKED # BLOCK_CHALLENGED # BLOCK_CHALLENGE_WON # BLOCK_CHALLENGE_LOST # CARDS_TAKEN status = ndb.StringProperty() challenger = ndb.StringProperty(required=False) blocker = ndb.StringProperty(required=False) blocked_with = ndb.StringProperty(required=False) last_timestamp = ndb.DateTimeProperty(auto_now=True) unused_cards = ndb.StructuredProperty(Card, repeated=True) # Next player first players = ndb.LocalStructuredProperty(Player, repeated=True) def remaining_players(self): return [player for player in self.players if not player.is_out()] def get_player(self, username): for player in self.players: if player.username == username: return player return None def next_player(self): return self.remaining_players()[0] def last_player(self): return self.remaining_players()[-1] def player_usernames(self): return [player.username for player in self.players] def winner(self): remaining_players = self.remaining_players() if len(remaining_players) == 1: return remaining_players[0].username return None def status_line(self): winner = self.winner() action_bit = "%s used %s" % (self.last_player().username, self.last_action) if winner: return "*%s has won!*" % winner elif self.status == 'READY': return "It's %s's turn." % self.next_player().username elif self.status == 'ACTED': return "%s." % action_bit elif self.status == 'CHALLENGED': return "%s, and %s challenged." % (action_bit, self.challenger) elif self.status == 'CHALLENGE_LOST': return ("%s, and %s's challenge failed. %s must flip a card." % (action_bit, self.challenger, self.challenger)) elif self.status == 'CHALLENGE_LOSS_RESOLVED': return "%s, and %s's challenge failed." % (action_bit, self.challenger) elif self.status == 'BLOCKED': return "%s, and %s blocked with a %s." % (action_bit, self.blocker, self.blocked_with) elif self.status == 'BLOCK_CHALLENGED': return "%s, %s blocked with a %s, and %s challenged." % ( action_bit, self.blocker, self.blocked_with, self.challenger) elif self.status == 'BLOCK_CHALLENGE_WON': return ( "%s, %s blocked with a %s, and %s's challenge was " "successful." % (action_bit, self.blocker, self.blocked_with, self.challenger)) elif self.status == 'BLOCK_CHALLENGE_LOST': return ("%s, %s blocked with a %s, and %s's challenge failed. " "%s must flip a card." % (action_bit, self.blocker, self.blocked_with, self.challenger, self.challenger)) elif self.status == 'CARDS_TAKEN': return "%s, and has taken cards." % action_bit else: raise ValueError("Unknown status %s" % self.status) def status_view(self, viewer=None): lines = [self.status_line()] for player in self.players: lines.append(player.view(public=(viewer != player))) return _join_messages(lines) # After calling any of the following, you must then put() self. @staticmethod def create(game_id, players): cards = [ Card(name=name, eliminated=False) for name in CARDS for _ in xrange(3) ] random.shuffle(cards) players = [ Player(username=player.lstrip('@'), money=2, cards=[cards.pop(), cards.pop()]) for player in players ] return GameState(id=game_id, status='READY', unused_cards=cards, players=players) # ACTIONS def take_action(self, player, action, target): # TODO(benkraft): don't let you target yourself. if player != self.next_player(): raise Misplay("It's not your turn! It's %s's turn." % self.next_player().username) elif not (self.status == 'READY' or self.status in ('ACTED', 'BLOCKED', 'CHALLENGE_LOSS_RESOLVED') and self.last_action not in ACTIONS_WITH_RESPONSE): # TODO(benkraft): say what we're waiting on raise Misplay("It's not time for the next person to go yet!") elif action not in ACTION_NAMES: raise Misplay("I've never heard of that action, try one of these: " "%s." % ' '.join(sorted(ACTIONS))) action = ACTION_NAMES[action] cost = ACTION_COSTS.get(action, 0) if player.money < cost: raise Misplay("You don't have enough money to do that; you need " "%s and only have %s." % (cost, player.money)) elif player.money >= 10 and action != 'coup': raise Misplay("You have 10 coins; you must coup.") elif action in ACTIONS_WITH_TARGETS: if not target: raise Misplay("That action needs a target.") if target.is_out(): raise Misplay("%s is out.") if action == 'steal' and not target.money: raise Misplay("You can't steal from someone with no money.") # Okay, we're ready to act. Finish up the last action. responses = [] responses.append(self._flush_action()) responses.append(self._begin_action(action, target)) responses.append(self._maybe_autoresolve_action()) return _join_messages(responses) def _flush_action(self): """Cannot be used for ACTIONS_WITH_RESPONSE.""" if not self.last_action: # If the last action has been flushed, this is a no-op. return if self.status == 'BLOCKED': # If the action was blocked, just clear it. text = "%s's %s was blocked." % (self.last_player().username, self.last_action) self._clear_action() return text if self.last_action in ACTION_GAINS: self.last_player().money += ACTION_GAINS[self.last_action] if self.last_action == 'steal': target = self.get_player(self.last_action_target) amount = min(2, target.money) target.money -= amount self.last_player().money += amount text = "%s's %s was completed successfully." % ( self.last_player().username, self.last_action) self._clear_action() return text def _clear_action(self): self.last_action = None self.last_action_target = None self.challenge_loser = None self.blocker = None self.blocked_with = None self.status = 'READY' def _begin_action(self, action, target): self.last_action = action self.last_action_target = target.username if target else None self.status = 'ACTED' # Costs get deducted immediately, since they happen no matter what; # gains get processed when the action succeeds. if action in ACTION_COSTS: self.next_player().money -= ACTION_COSTS[action] # Advance the turn self.players = self.players[1:] + [self.players[0]] while self.players[0].is_out(): self.players = self.players[1:] + [self.players[0]] if target: target_text = " on %s" % target.username else: target_text = "" # TODO(benkraft): a less awkward message (e.g. "benkraft stole from # %s") responses = [ "%s used %s%s!" % (self.last_player().username, action, target_text) ] if action in ACTION_CARDS: responses.append("If you wish to challenge, `/coup challenge`.") if action in ACTION_BLOCKS: responses.append("If you wish to block, " "`/coup block <with_card>`.") return _join_messages(responses) def _maybe_autoresolve_action(self, challenge_complete=False, block_complete=False): if self.last_action == 'income': # Don't bother saying it completed, that's obvious. self._flush_action() return elif self.last_action == 'coup' or ( self.last_action == 'assassinate' and self.status == 'BLOCK_CHALLENGE_LOSS_RESOLVED'): target = self.get_player(self.last_action_target) if target.one_card(): return _join_messages([ self._flip_card(target, target.live_cards()[0]), self._flush_action() ]) if self.last_action in CARD_LOSS_ACTIONS: return "If you're ready to lose a card, `/coup lose <card>`." elif self.last_action == 'exchange': return "To pick up your cards, `/coup exchange`." elif (block_complete or challenge_complete and self.last_action not in ACTION_BLOCKS): return self._flush_action() # FLIPPING CARDS def _flip_card(self, player, card): card.eliminated = True # If this eliminated a player, and it was their turn, advance the turn. while self.players[0].is_out(): self.players = self.players[1:] + [self.players[0]] text = "%s flipped over a %s." % (player.username, card.name) winner = self.winner() if winner: return _join_messages([text, "%s wins!" % winner]) else: return text def _redeal_card(self, player, card_name): c = player.remove_card(card_name) self.unused_cards.append(c) random.shuffle(self.unused_cards) player.cards.append(self.unused_cards.pop()) return "%s flipped over a %s and drew a new card." % (player.username, card_name) # CHALLENGES def pose_challenge(self, challenger): # TODO(benkraft): make them say what to challenge, to prevent races? # TODO(benkraft): don't let you challenge yourself if challenger.is_out(): raise Misplay("You can't challenge after you've been eliminated") if self.status == 'ACTED' and self.last_action in ACTION_CARDS: self.status = 'CHALLENGED' verb = self.last_action elif self.status == 'BLOCKED': self.status = 'BLOCK_CHALLENGED' verb = 'block' else: raise Misplay("There's nothing to challenge.") self.challenger = challenger.username challengee = self._challengee() text = "%s has challenged %s's %s." % (challenger.username, challengee.username, verb) if challengee.one_card(): return _join_messages( [text, self._resolve_challenge(challengee.live_cards()[0])]) else: return _join_messages([ text, "%s, please flip a card with `/coup show <card>`." % challengee.username ]) def resolve_challenge(self, player, card_name): challengee = self._challengee() if (challengee != player or self.status not in ('CHALLENGED', 'BLOCK_CHALLENGED')): raise Misplay("You haven't been challenged.") card = challengee.find_live_card(card_name) if not card: raise Misplay("You don't have that card.") return self._resolve_challenge(card) def lose_challenge(self, player, card_name): challenger = self.get_player(self.challenger) card = challenger.find_live_card(card_name) if player != challenger: # TODO(benkraft): better error message here. raise Misplay("You haven't lost a challenge.") elif self.status not in ('CHALLENGE_LOST', 'BLOCK_CHALLENGE_LOST'): raise Misplay("You haven't lost a challenge.") elif not card: raise Misplay("You don't have that card.") text = self._flip_card(challenger, card) if self.status == 'CHALLENGE_LOST': self.status = 'CHALLENGE_LOSS_RESOLVED' if self.last_action in ACTION_BLOCKS: return _join_messages([ text, "If you wish to block, " "`/coup block <with_card>`." ]) else: return _join_messages([ text, self._maybe_autoresolve_action(challenge_complete=True) ]) else: # self.status == 'BLOCK_CHALLENGE_LOST' failed_text = "The %s was blocked." % self.last_action self._clear_action() return _join_messages([text, failed_text]) def _challengee(self): if self.status == 'CHALLENGED': return self.players[-1] else: return self.get_player(self.blocker) def _resolve_challenge(self, card): challengee = self._challengee() challenger = self.get_player(self.challenger) flip_card_text = ("%s, please flip a card with `/coup flip <card>`." % challenger.username) # TODO(benkraft): refactor to deduplicate? if self.status == 'CHALLENGED': if card.name == ACTION_CARDS[self.last_action]: self.status = 'CHALLENGE_LOST' redeal_text = self._redeal_card(challengee, card.name) if challenger.one_card(): return _join_messages([ redeal_text, self.lose_challenge(challenger, challenger.live_card_names()[0]) ]) else: return _join_messages([redeal_text, flip_card_text]) else: failed_text = "The %s failed." % self.last_action flip_text = self._flip_card(challengee, card) self._clear_action() return _join_messages([flip_text, failed_text]) else: # self.status == 'BLOCK_CHALLENGED' if card.name == self.blocked_with: self.status = 'BLOCK_CHALLENGE_LOST' redeal_text = self._redeal_card(challengee, card.name) if challenger.one_card(): return _join_messages([ redeal_text, self.lose_challenge(challenger, challenger.live_card_names()[0]) ]) else: return _join_messages([redeal_text, flip_card_text]) else: self.status = 'BLOCK_CHALLENGE_WON' flip_text = self._flip_card(challengee, card) return _join_messages([ flip_text, "The block failed.", self._maybe_autoresolve_action(block_complete=True) ]) # BLOCKS def pose_block(self, blocker, card_name): # TODO(benkraft): guess card if it's unique if self.status not in ('ACTED', 'CHALLENGE_LOSS_RESOLVED'): raise Misplay("You can't block right now.") elif self.last_action not in ACTION_BLOCKS: raise Misplay("%s can't be blocked." % self.last_action) elif self.last_player() == blocker: raise Misplay("You can't block yourself.") # Foreign aid can be blocked by anyone; steal and assassinate can only # be blocked by their targets. elif (self.last_action != 'foreignaid' and self.last_action_target != blocker.username): raise Misplay("Only the target of a %s can block it." % self.last_action) elif card_name not in ACTION_BLOCKS[self.last_action]: raise Misplay("You can't block %s with a %s." % (self.last_action, card_name)) self.status = 'BLOCKED' self.blocker = blocker.username self.blocked_with = card_name return ("%s has blocked %s's %s with a %s. If you wish to challenge, " "`/coup challenge`." % (blocker.username, self.last_player().username, self.last_action, card_name)) # AMBASSADOR def take_cards(self, player): # TODO(benkraft): this might not be true, if the state is READY if player != self.last_player(): raise Misplay("It's not your turn.") elif self.last_action != 'exchange': raise Misplay("You didn't exchange.") elif self.status not in ('ACTED', 'CHALLENGE_LOSS_RESOLVED', 'BLOCK_CHALLENGE_WON'): # TODO(benkraft): say why raise Misplay("You can't take your cards right now.") self.status = 'CARDS_TAKEN' random.shuffle(self.unused_cards) card1 = self.unused_cards.pop() card2 = self.unused_cards.pop() player.cards.extend([card1, card2]) return ("You got a %s and a %s. To choose which cards to return, " "`/coup return <card1> <card2>`." % (card1.name, card2.name)) def return_cards(self, player, card1_name, card2_name): # TODO(benkraft): this might not be true, if the state is READY if player != self.last_player(): raise Misplay("It's not your turn.") elif self.last_action != 'exchange': raise Misplay("You didn't exchange.") elif self.status != 'CARDS_TAKEN': raise Misplay("You didn't take cards! " "To take cards, `/coup take`.") elif (card1_name == card2_name and not player.live_card_names().count(card1_name) >= 2): raise Misplay("You don't have two %ss." % card1_name) for card in [card1_name, card2_name]: if not player.find_live_card(card): raise Misplay("You don't have a %s." % card) for card in [card1_name, card2_name]: player.remove_card(card) self._clear_action() return "%s returned their cards." % player.username # CARD LOSS def lose_card(self, player, card_name): if self.last_action not in CARD_LOSS_ACTIONS: raise Misplay("You don't need to lose a card now.") elif player.username != self.last_action_target: raise Misplay("You weren't the target of the %s." % self.last_action) elif self.status not in ('ACTED', 'CHALLENGE_LOSS_RESOLVED', 'BLOCK_CHALLENGE_WON'): raise Misplay("It's not time to flip a card yet.") card = player.find_live_card(card_name) if not card: raise Misplay("You don't have a %s." % card_name) text = self._flip_card(player, card) self._clear_action() return text
class Post(PolyModel): """Model of a post.""" title = ndb.StringProperty() photo_url = ndb.StringProperty() pdf_files = ndb.JsonProperty() text = ndb.TextProperty() # user who is the author author = ndb.KeyProperty(kind="User", required=True) # institution to which this post belongs institution = ndb.KeyProperty(kind="Institution", required=True) state = ndb.StringProperty(choices=set([ 'draft', 'published', 'deleted' ]), default='published') # Comments of the post # Concurrency controlled by Transactions comments = ndb.JsonProperty(default={}) # Date and time of a creation of a post publication_date = ndb.DateTimeProperty(auto_now_add=True) # user who deleted the post last_modified_by = ndb.KeyProperty(kind="User") # Date and time of last modified last_modified_date = ndb.DateTimeProperty(auto_now=True) # Likes of Post likes = ndb.LocalStructuredProperty(Like, repeated=True) # Images uploaded uploaded_images = ndb.StringProperty(repeated=True) # When post is shared post shared_post = ndb.KeyProperty(kind="Post") # Video url video_url = ndb.StringProperty() # When post is shared event shared_event = ndb.KeyProperty(kind="Event") # Users that are interested in the post subscribers = ndb.KeyProperty(kind="User", repeated=True) def create(post, data, author_key, institution_key): """Create a post and check required fields.""" post = post.createSharing(data) if (post.isCommonPost(data)): if not data.get('title'): raise FieldException("Title can not be empty") if not data.get('text'): raise FieldException("Text can not be empty") post.title = data.get('title') post.photo_url = data.get('photo_url') post.text = data.get('text') post.pdf_files = Utils.toJson(data.get('pdf_files')) post.last_modified_by = author_key post.author = author_key post.institution = institution_key post.video_url = data.get('video_url') post.subscribers = [author_key] return post def isCommonPost(post, data): """The post not is sharing or event.""" return post.shared_event is None and post.shared_post is None and data.get('type_survey') is None def createSharing(self, data): """Create different type of post, can be shared post or shared event.""" if data.get('shared_event'): self.shared_event = ndb.Key(urlsafe=data["shared_event"]) elif data.get('shared_post'): self.shared_post = ndb.Key(urlsafe=data["shared_post"]) return self def make(post, host): """Create personalized json of post.""" publication_date = post.publication_date.isoformat() last_modified_date = post.last_modified_date.isoformat() author = post.author.get() last_modified_by = post.last_modified_by.get() institution = post.institution.get() post_dict = { 'title': post.title, 'text': post.text, 'author': author.name, 'author_img': author.photo_url, 'institution_name': institution.name, 'institution_image': institution.photo_url, 'likes': getLikesUri(post, host), 'number_of_likes': post.get_number_of_likes(), 'photo_url': post.photo_url, 'video_url': post.video_url, 'uploaded_images': post.uploaded_images, 'state': post.state, 'comments': getCommentsUri(post, host), 'number_of_comments': post.get_number_of_comment(), 'publication_date': publication_date, 'last_modified_date': last_modified_date, 'author_key': author.key.urlsafe(), 'last_modified_by': last_modified_by.name, 'institution_key': institution.key.urlsafe(), 'institution_state': institution.state, 'key': post.key.urlsafe(), 'pdf_files': post.pdf_files if post.pdf_files else [], 'subscribers': [subscriber.urlsafe() for subscriber in post.subscribers] } return post.modify_post(post_dict, host) def make_comments(post): for comment in post.comments.values(): post.loadAuthor(comment) for reply in comment['replies'].values(): post.loadAuthor(reply) def loadAuthor(self, entity): author = ndb.Key(urlsafe=entity["author_key"]).get() entity["author_name"] = author.name entity["author_img"] = author.photo_url def modify_post(post, post_dict, host): """Create personalized json if post was deleted or shared.""" if(post.state == 'deleted'): post_dict['title'] = None post_dict['text'] = None if(post.shared_post): post = post.shared_post.get() post_dict['shared_post'] = post.make(host) if(post.shared_event): post_dict['shared_event'] = Event.make(post.shared_event.get()) return post_dict def get_comment(self, comment_id): """Get a comment by id.""" return self.comments.get(comment_id) def get_number_of_comment(self): """Get number of comments.""" return len(self.comments) @ndb.transactional(retries=10) def add_comment(self, comment): """Add a comment to the post.""" post = self.key.get() post.comments[comment.id] = Utils.toJson(comment) post.put() def remove_comment(self, comment): """Remove a commet from post.""" del self.comments[comment.get('id')] self.put() @ndb.transactional(retries=10) def reply_comment(self, reply, comment_id): comment = self.get_comment(comment_id) Utils._assert( not comment, "This comment has been deleted.", EntityException) replies = comment.get('replies') replies[reply.id] = Utils.toJson(reply) self.put() def get_like(self, author_key): """Get a like by author key.""" for like in self.likes: if like.author == author_key: return like def get_number_of_likes(self): """Get the number of likes in this post.""" return len(self.likes) @ndb.transactional(retries=10) def like_comment(self, user, comment_id=None, reply_id=None): """Increment one 'like' in comment or reply.""" post = self.key.get() comment = post.get_comment(comment_id) if reply_id: comment = comment.get('replies').get(reply_id) Utils._assert(comment is None, "This comment has been deleted.", NotAuthorizedException) likes = comment.get('likes') Utils._assert(user.key.urlsafe() in likes, "User already liked this comment", NotAuthorizedException) likes.append(user.key.urlsafe()) post.put() return comment @ndb.transactional(retries=10, xg=True) def like(self, author_key): """Increment one 'like' in post.""" post = self.key.get() user = author_key.get() Utils._assert(post.key in user.liked_posts, "User already liked this publication", NotAuthorizedException) if post.get_like(author_key) is None: like = Like() like.author = author_key like.id = Utils.getHash(like) post.likes.append(like) post.put() user.like_post(post.key) return post def dislike(self, author_key): """Decrease one 'like' in post.""" like = self.get_like(author_key) if like: self.likes.remove(like) self.put() def delete(self, user): """Change the state and add the information about this.""" self.last_modified_by = user.key self.state = 'deleted' self.put() def has_activity(self): """Check if the post has comments or likes.""" has_comments = len(self.comments) > 0 has_likes = len(self.likes) > 0 return has_comments or has_likes def can_edit(self): """Check if the post can be edit by the user.""" is_published = self.state == 'published' is_inst_active = self.institution.get().state == 'active' return (not self.has_activity()) and is_published and is_inst_active def add_subscriber(self, user): """Add a subscriber.""" if user.state == 'active': self.subscribers.append(user.key) self.put() def remove_subscriber(self, user): """Remove a subscriber.""" if user.key in self.subscribers and self.author != user.key: self.subscribers.remove(user.key) self.put() def create_notification_message(self, user_key, current_institution_key, sender_institution_key=None): """ Create message that will be used in notification. user_key -- The user key that made the action. current_institution_key -- The institution that user was in the moment that made the action. sender_institution_key -- The institution by which the post was created, if it hasn't been defined yet, the sender institution should be the current institution. """ return create_message( sender_key= user_key, current_institution_key=current_institution_key, sender_institution_key=sender_institution_key or current_institution_key ) @staticmethod def is_hidden(post): """Check if the post is deleted and has no activity.""" has_no_comments = post.get('number_of_comments') == 0 has_no_likes = post.get('number_of_likes') == 0 is_deleted = post.get('state') == 'deleted' return is_deleted and has_no_comments and has_no_likes
class Placement(ndb.Model): location = ndb.StringProperty() zone = ndb.StringProperty() stuff = ndb.LocalStructuredProperty(Stuff, repeated=True)