class Sheila(StoredObject): _id = fields.StringField(primary=True) _meta = {'optimistic': True} # Simple fields sheila_str = fields.StringField(default='sheila', validate=True, required=True) sheila_int = fields.IntegerField(default=7, validate=MaxValueValidator(9)) sheila_now = fields.DateTimeField() sheila_url = fields.StringField(validate=URLValidator()) sheila_foostop = fields.StringField(required=True, validate=RegexValidator(r'foo$'), list=True) created = fields.DateTimeField(auto_now_add=True) modified = fields.DateTimeField(auto_now=True) # List fields sheila_strs = fields.StringField(list=True, validate=MinLengthValidator(5), list_validate=MinLengthValidator(3)) sheila_nows = fields.DateTimeField(list=True) #, default=[]) sheila_urls = fields.StringField( list=True, validate=[URLValidator(), MinLengthValidator(20)], list_validate=MinLengthValidator(2)) sheila_ints = fields.IntegerField(list=True, validate=MinValueValidator(3), list_validate=MinLengthValidator(2)) # Foreign fields sheila_ron = fields.ForeignField('Ron', backref='ron') sheila_rons = fields.ForeignField('Ron', backref='rons', list=True)
class Foo(StoredObject): _id = fields.IntegerField(primary=True) integer_field = fields.IntegerField() string_field = fields.StringField() datetime_field = fields.DateTimeField() float_field = fields.FloatField() list_field = fields.IntegerField(list=True)
class Schema(StoredObject): _id = fields.IntegerField(primary=True) value1 = fields.IntegerField() value2 = fields.IntegerField() _meta = { 'validators': [validate_schema], }
class V3(StoredObject): _id = fields.StringField(_primary_key=True, index=True) my_string = fields.StringField() my_int = fields.IntegerField(default=5) my_number = fields.IntegerField() my_null = fields.StringField(required=True) _meta = { 'optimistic': True, 'version_of': self.V2, 'version': 3, 'optimistic': True }
class Conference(StoredObject): #: Determines the email address for submission and the OSF url # Example: If endpoint is spsp2014, then submission email will be # [email protected] or [email protected] and the OSF url will # be osf.io/view/spsp2014 endpoint = fields.StringField(primary=True, required=True, unique=True) #: Full name, e.g. "SPSP 2014" name = fields.StringField(required=True) info_url = fields.StringField(required=False, default=None) logo_url = fields.StringField(required=False, default=None) location = fields.StringField(required=False, default=None) start_date = fields.DateTimeField(default=None) end_date = fields.DateTimeField(default=None) active = fields.BooleanField(required=True) admins = fields.ForeignField('user', list=True, required=False, default=None) #: Whether to make submitted projects public public_projects = fields.BooleanField(required=False, default=True) poster = fields.BooleanField(default=True) talk = fields.BooleanField(default=True) # field_names are used to customize the text on the conference page, the categories # of submissions, and the email adress to send material to. field_names = fields.DictionaryField(default=lambda: DEFAULT_FIELD_NAMES) # Cached number of submissions num_submissions = fields.IntegerField(default=0) @classmethod def get_by_endpoint(cls, endpoint, active=True): query = Q('endpoint', 'iexact', endpoint) if active: query &= Q('active', 'eq', True) try: return Conference.find_one(query) except ModularOdmException: raise ConferenceError('Endpoint {0} not found'.format(endpoint))
class Ron(StoredObject): _id = fields.DateTimeField(primary=True) ron_str = fields.StringField() ron_int = fields.IntegerField() ron_now = fields.DateTimeField()
class V3(StoredObject): _id = fields.StringField(_primary_key=True, index=True) my_string = fields.StringField() my_int = fields.IntegerField(default=5) my_number = fields.IntegerField() my_null = fields.StringField(required=True) @classmethod def _migrate(cls, old, new): if not old.my_null: new.my_null = 'default' _meta = { 'optimistic': True, 'version_of': self.V2, 'version': 3, 'optimistic': True }
class Schema1(StoredObject): _id = fields.StringField(primary=True) number = fields.IntegerField() deleted = fields.FloatField() _meta = { 'optimistic': True, 'version': 1, }
class Conference(StoredObject): #: Determines the email address for submission and the OSF url # Example: If endpoint is spsp2014, then submission email will be # [email protected] or [email protected] and the OSF url will # be osf.io/view/spsp2014 endpoint = fields.StringField(primary=True, required=True, unique=True) #: Full name, e.g. "SPSP 2014" name = fields.StringField(required=True) info_url = fields.StringField(required=False, default=None) logo_url = fields.StringField(required=False, default=None) active = fields.BooleanField(required=True) admins = fields.ForeignField('user', list=True, required=False, default=None) #: Whether to make submitted projects public public_projects = fields.BooleanField(required=False, default=True) poster = fields.BooleanField(default=True) talk = fields.BooleanField(default=True) # field_names are used to customize the text on the conference page, the categories # of submissions, and the email adress to send material to. field_names = fields.DictionaryField( default=lambda: { 'submission1': 'poster', 'submission2': 'talk', 'submission1_plural': 'posters', 'submission2_plural': 'talks', 'meeting_title_type': 'Posters & Talks', 'add_submission': 'poster or talk', 'mail_subject': 'Presentation title', 'mail_message_body': 'Presentation abstract (if any)', 'mail_attachment': 'Your presentation file (e.g., PowerPoint, PDF, etc.)' }) # Cached number of submissions num_submissions = fields.IntegerField(default=0) @classmethod def get_by_endpoint(cls, endpoint, active=True): query = Q('endpoint', 'iexact', endpoint) if active: query &= Q('active', 'eq', True) try: return Conference.find_one(query) except ModularOdmException: raise ConferenceError('Endpoint {0} not found'.format(endpoint))
class OsfStorageFileVersion(StoredObject): """A version of an OsfStorageFileNode. contains information about where the file is located, hashes and datetimes """ _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) creator = fields.ForeignField('user', required=True) # Date version record was created. This is the date displayed to the user. date_created = fields.DateTimeField(auto_now_add=True) # Dictionary specifying all information needed to locate file on backend # { # 'service': 'cloudfiles', # required # 'container': 'osf', # required # 'object': '20c53b', # required # 'worker_url': '127.0.0.1', # 'worker_host': 'upload-service-1', # } location = fields.DictionaryField(validate=utils.validate_location) # Dictionary containing raw metadata from upload service response # { # 'size': 1024, # required # 'content_type': 'text/plain', # required # 'date_modified': '2014-11-07T20:24:15', # required # 'md5': 'd077f2', # } metadata = fields.DictionaryField() size = fields.IntegerField() content_type = fields.StringField() # Date file modified on third-party backend. Not displayed to user, since # this date may be earlier than the date of upload if the file already # exists on the backend date_modified = fields.DateTimeField() @property def location_hash(self): return self.location['object'] def is_duplicate(self, other): return self.location_hash == other.location_hash def update_metadata(self, metadata): self.metadata.update(metadata) self.content_type = self.metadata.get('contentType', None) try: self.size = self.metadata['size'] self.date_modified = parse_date(self.metadata['modified'], ignoretz=True) except KeyError as err: raise errors.MissingFieldError(str(err)) self.save()
class NodeWikiPage(GuidStoredObject): redirect_mode = 'redirect' _id = fields.StringField(primary=True) page_name = fields.StringField(validate=validate_page_name) version = fields.IntegerField() date = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow) is_current = fields.BooleanField() content = fields.StringField(default='') user = fields.ForeignField('user') node = fields.ForeignField('node') @property def deep_url(self): return '{}wiki/{}/'.format(self.node.deep_url, self.page_name) @property def url(self): return '{}wiki/{}/'.format(self.node.url, self.page_name) def html(self, node): """The cleaned HTML of the page""" sanitized_content = render_content(self.content, node=node) try: return linkify( sanitized_content, [ nofollow, ], ) except TypeError: logger.warning('Returning unlinkified content.') return sanitized_content def raw_text(self, node): """ The raw text of the page, suitable for using in a test search""" return sanitize(self.html(node), tags=[], strip=True) def save(self, *args, **kwargs): rv = super(NodeWikiPage, self).save(*args, **kwargs) if self.node: self.node.update_search() return rv def rename(self, new_name, save=True): self.page_name = new_name if save: self.save() def to_json(self): return {}
class V2(StoredObject): _id = fields.StringField(_primary_key=True, index=True) my_string = fields.StringField() my_int = fields.IntegerField(default=5) my_number = fields.IntegerField() my_null = fields.StringField(required=False) @classmethod def _migrate(cls, old, new): if old.my_string: new.my_string = old.my_string + 'yo' if old.my_number: new.my_number = int(old.my_number) _meta = { 'optimistic': True, 'version_of': V1, 'version': 2, 'optimistic': True }
class ForwardNodeSettings(AddonNodeSettingsBase): url = fields.StringField(validate=URLValidator()) label = fields.StringField(validate=sanitized) redirect_bool = fields.BooleanField(default=True, validate=True) redirect_secs = fields.IntegerField( default=15, validate=[MinValueValidator(5), MaxValueValidator(60)] ) @property def link_text(self): return self.label if self.label else self.url
class Tag(StoredObject): value = fields.StringField(primary=True, index=False) count = fields.StringField(default='c', validate=True, index=True) misc = fields.StringField(default='') misc2 = fields.StringField(default='') created = fields.DateTimeField(validate=True) modified = fields.DateTimeField(validate=True, auto_now=True) keywords = fields.StringField( default=['keywd1', 'keywd2'], validate=[MinLengthValidator(5), MaxLengthValidator(10)], list=True) mybool = fields.BooleanField(default=False) myint = fields.IntegerField() myfloat = fields.FloatField(required=True, default=4.5) myurl = fields.StringField(validate=URLValidator())
class OsfStorageFileVersion(StoredObject): _id = oid_primary_key creator = fields.ForeignField('user', required=True) date_created = fields.DateTimeField(auto_now_add=True) # Dictionary specifying all information needed to locate file on backend # { # 'service': 'buttfiles', # required # 'container': 'osf', # required # 'object': '20c53b', # required # 'worker_url': '127.0.0.1', # 'worker_host': 'upload-service-1', # } location = fields.DictionaryField(validate=validate_location) # Dictionary containing raw metadata from upload service response # { # 'size': 1024, # required # 'content_type': 'text/plain', # required # 'date_modified': '2014-11-07T20:24:15', # required # 'md5': 'd077f2', # } metadata = fields.DictionaryField() size = fields.IntegerField() content_type = fields.StringField() date_modified = fields.DateTimeField() @property def location_hash(self): return self.location['object'] def is_duplicate(self, other): return self.location_hash == other.location_hash def update_metadata(self, metadata): self.metadata.update(metadata) for key, parser in metadata_fields.iteritems(): try: value = metadata[key] except KeyError: raise errors.MissingFieldError setattr(self, key, parser(value)) self.save()
class Schema3(StoredObject): _id = fields.StringField(primary=True) name = fields.StringField(default='eman') number = fields.IntegerField() @classmethod def _migrate(self, old, new): new.number = old.number + 1 return new @classmethod def _unmigrate(cls, new, old): old.number = new.number - 1 return old _meta = { 'optimistic': True, 'version': 3, 'version_of': Schema2, }
class Foo(TestObject): _id = fields.IntegerField()
class Baz(TestObject): _id = fields.IntegerField() ref = fields.ForeignField('foo', backref='food')
class NodeWikiPage(GuidStoredObject): _id = fields.StringField(primary=True) page_name = fields.StringField(validate=validate_page_name) version = fields.IntegerField() date = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow) is_current = fields.BooleanField() content = fields.StringField(default='') user = fields.ForeignField('user') node = fields.ForeignField('node') @property def deep_url(self): return '{}wiki/{}/'.format(self.node.deep_url, self.page_name) @property def url(self): return '{}wiki/{}/'.format(self.node.url, self.page_name) @property def rendered_before_update(self): return self.date < WIKI_CHANGE_DATE def html(self, node): """The cleaned HTML of the page""" sanitized_content = render_content(self.content, node=node) try: return linkify( sanitized_content, [ nofollow, ], ) except TypeError: logger.warning('Returning unlinkified content.') return sanitized_content def raw_text(self, node): """ The raw text of the page, suitable for using in a test search""" return sanitize(self.html(node), tags=[], strip=True) def get_draft(self, node): """ Return most recently edited version of wiki, whether that is the last saved version or the most recent sharejs draft. """ db = wiki_utils.share_db() sharejs_uuid = wiki_utils.get_sharejs_uuid(node, self.page_name) doc_item = db['docs'].find_one({'_id': sharejs_uuid}) if doc_item: sharejs_version = doc_item['_v'] sharejs_timestamp = doc_item['_m']['mtime'] sharejs_timestamp /= 1000 # Convert to appropriate units sharejs_date = datetime.datetime.utcfromtimestamp( sharejs_timestamp) if sharejs_version > 1 and sharejs_date > self.date: return doc_item['_data'] return self.content def save(self, *args, **kwargs): rv = super(NodeWikiPage, self).save(*args, **kwargs) if self.node: self.node.update_search() return rv def rename(self, new_name, save=True): self.page_name = new_name if save: self.save() def to_json(self): return {}
class SpamMixin(StoredObject): """Mixin to add to objects that can be marked as spam. """ _meta = {'abstract': True} UNKNOWN = 0 FLAGGED = 1 SPAM = 2 HAM = 4 spam_status = fields.IntegerField(default=UNKNOWN, index=True) # Reports is a dict of reports keyed on reporting user # Each report is a dictionary including: # - date: date reported # - retracted: if a report has been retracted # - category: What type of spam does the reporter believe this is # - text: Comment on the comment reports = fields.DictionaryField(default=dict, validate=validate_reports) def flag_spam(self, save=False): # If ham and unedited then tell user that they should read it again if self.spam_status == self.UNKNOWN: self.spam_status = self.FLAGGED if save: self.save() def remove_flag(self, save=False): if self.spam_status != self.FLAGGED: return for report in self.reports.values(): if not report.get('retracted', True): return self.spam_status = self.UNKNOWN if save: self.save() def confirm_ham(self, save=False): self.spam_status = self.HAM if save: self.save() def confirm_spam(self, save=False): self.spam_status = self.SPAM if save: self.save() @property def is_spam(self): return self.spam_status == self.SPAM def report_abuse(self, user, save=False, **kwargs): """Report object is spam or other abuse of OSF :param user: User submitting report :param date: Date report submitted :param save: Save changes :param kwargs: Should include category and message :raises ValueError: if user is reporting self """ if user == self.user: raise ValueError('User cannot report self.') self.flag_spam() report = {'date': datetime.utcnow(), 'retracted': False} report.update(kwargs) if 'text' not in report: report['text'] = None self.reports[user._id] = report if save: self.save() def retract_report(self, user, save=False): """Retract last report by user Only marks the last report as retracted because there could be history in how the object is edited that requires a user to flag or retract even if object is marked as HAM. :param user: User retracting :param save: Save changes """ if user._id in self.reports: if not self.reports[user._id]['retracted']: self.reports[user._id]['retracted'] = True self.remove_flag() else: raise ValueError('User has not reported this content') if save: self.save()
class NodeWikiPage(GuidStoredObject, Commentable): _id = fields.StringField(primary=True) page_name = fields.StringField(validate=validate_page_name) version = fields.IntegerField() date = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow) content = fields.StringField(default='') user = fields.ForeignField('user') node = fields.ForeignField('node') # For Django compatibility @property def pk(self): return self._id @property def is_current(self): key = to_mongo_key(self.page_name) if key in self.node.wiki_pages_current: return self.node.wiki_pages_current[key] == self._id else: return False @property def deep_url(self): return '{}wiki/{}/'.format(self.node.deep_url, urllib.quote(self.page_name)) @property def url(self): return '{}wiki/{}/'.format(self.node.url, urllib.quote(self.page_name)) @property def rendered_before_update(self): return self.date < WIKI_CHANGE_DATE # For Comment API compatibility @property def target_type(self): """The object "type" used in the OSF v2 API.""" return 'wiki' @property def root_target_page(self): """The comment page type associated with NodeWikiPages.""" return 'wiki' @property def is_deleted(self): key = mongo_utils.to_mongo_key(self.page_name) return key not in self.node.wiki_pages_current @property def absolute_api_v2_url(self): path = '/wikis/{}/'.format(self._id) return api_v2_url(path) def belongs_to_node(self, node_id): """Check whether the wiki is attached to the specified node.""" return self.node._id == node_id def get_extra_log_params(self, comment): return {'wiki': {'name': self.page_name, 'url': comment.get_comment_page_url()}} # used by django and DRF def get_absolute_url(self): return self.absolute_api_v2_url def html(self, node): """The cleaned HTML of the page""" sanitized_content = render_content(self.content, node=node) try: return linkify( sanitized_content, [nofollow, ], ) except TypeError: logger.warning('Returning unlinkified content.') return sanitized_content def raw_text(self, node): """ The raw text of the page, suitable for using in a test search""" return sanitize(self.html(node), tags=[], strip=True) def get_draft(self, node): """ Return most recently edited version of wiki, whether that is the last saved version or the most recent sharejs draft. """ db = wiki_utils.share_db() sharejs_uuid = wiki_utils.get_sharejs_uuid(node, self.page_name) doc_item = db['docs'].find_one({'_id': sharejs_uuid}) if doc_item: sharejs_version = doc_item['_v'] sharejs_timestamp = doc_item['_m']['mtime'] sharejs_timestamp /= 1000 # Convert to appropriate units sharejs_date = datetime.datetime.utcfromtimestamp(sharejs_timestamp) if sharejs_version > 1 and sharejs_date > self.date: return doc_item['_data'] return self.content def save(self, *args, **kwargs): rv = super(NodeWikiPage, self).save(*args, **kwargs) if self.node: self.node.update_search() return rv def rename(self, new_name, save=True): self.page_name = new_name if save: self.save() def to_json(self): return {} def clone_wiki(self, node_id): """Clone a node wiki page. :param node: The Node of the cloned wiki page :return: The cloned wiki page """ node = Node.load(node_id) if not node: raise ValueError('Invalid node') clone = self.clone() clone.node = node clone.user = self.user clone.save() return clone @classmethod def clone_wiki_versions(cls, node, copy, user, save=True): """Clone wiki pages for a forked or registered project. :param node: The Node that was forked/registered :param copy: The fork/registration :param user: The user who forked or registered the node :param save: Whether to save the fork/registration :return: copy """ copy.wiki_pages_versions = {} copy.wiki_pages_current = {} for key in node.wiki_pages_versions: copy.wiki_pages_versions[key] = [] for wiki_id in node.wiki_pages_versions[key]: node_wiki = NodeWikiPage.load(wiki_id) cloned_wiki = node_wiki.clone_wiki(copy._id) copy.wiki_pages_versions[key].append(cloned_wiki._id) if node_wiki.is_current: copy.wiki_pages_current[key] = cloned_wiki._id if save: copy.save() return copy
class Bar(TestObject): _id = fields.IntegerField()
class Foo(TestObject): _id = fields.IntegerField() bars = fields.AbstractForeignField(list=True)
class Foo(TestObject): _id = fields.IntegerField() bars = fields.ForeignField('bar', list=True)
class Model(StoredObject): _id = fields.IntegerField(primary=True) value = fields.StringField()
class SpamMixin(StoredObject): """Mixin to add to objects that can be marked as spam. """ _meta = {'abstract': True} # # Node fields that trigger an update to search on save # SPAM_UPDATE_FIELDS = { # 'spam_status', # } spam_status = fields.IntegerField(default=SpamStatus.UNKNOWN, index=True) spam_pro_tip = fields.StringField(default=None) # Data representing the original spam indication # - author: author name # - author_email: email of the author # - content: data flagged # - headers: request headers # - Remote-Addr: ip address from request # - User-Agent: user agent from request # - Referer: referrer header from request (typo +1, rtd) spam_data = fields.DictionaryField(default=dict) date_last_reported = fields.DateTimeField(default=None, index=True) # Reports is a dict of reports keyed on reporting user # Each report is a dictionary including: # - date: date reported # - retracted: if a report has been retracted # - category: What type of spam does the reporter believe this is # - text: Comment on the comment reports = fields.DictionaryField(default=dict, validate=_validate_reports) def flag_spam(self): # If ham and unedited then tell user that they should read it again if self.spam_status == SpamStatus.UNKNOWN: self.spam_status = SpamStatus.FLAGGED def remove_flag(self, save=False): if self.spam_status != SpamStatus.FLAGGED: return for report in self.reports.values(): if not report.get('retracted', True): return self.spam_status = SpamStatus.UNKNOWN if save: self.save() @property def is_spam(self): return self.spam_status == SpamStatus.SPAM @property def is_spammy(self): return self.spam_status in [SpamStatus.FLAGGED, SpamStatus.SPAM] def report_abuse(self, user, save=False, **kwargs): """Report object is spam or other abuse of OSF :param user: User submitting report :param save: Save changes :param kwargs: Should include category and message :raises ValueError: if user is reporting self """ if user == self.user: raise ValueError('User cannot report self.') self.flag_spam() date = datetime.utcnow() report = {'date': date, 'retracted': False} report.update(kwargs) if 'text' not in report: report['text'] = None self.reports[user._id] = report self.date_last_reported = report['date'] if save: self.save() def retract_report(self, user, save=False): """Retract last report by user Only marks the last report as retracted because there could be history in how the object is edited that requires a user to flag or retract even if object is marked as HAM. :param user: User retracting :param save: Save changes """ if user._id in self.reports: if not self.reports[user._id]['retracted']: self.reports[user._id]['retracted'] = True self.remove_flag() else: raise ValueError('User has not reported this content') if save: self.save() def confirm_ham(self, save=False): # not all mixins will implement check spam pre-req, only submit ham when it was incorrectly flagged if settings.SPAM_CHECK_ENABLED and self.spam_data and self.spam_status in [ SpamStatus.FLAGGED, SpamStatus.SPAM ]: client = _get_client() client.submit_ham( user_ip=self.spam_data['headers']['Remote-Addr'], user_agent=self.spam_data['headers'].get('User-Agent'), referrer=self.spam_data['headers'].get('Referer'), comment_content=self.spam_data['content'], comment_author=self.spam_data['author'], comment_author_email=self.spam_data['author_email'], ) logger.info('confirm_ham update sent') self.spam_status = SpamStatus.HAM if save: self.save() def confirm_spam(self, save=False): # not all mixins will implement check spam pre-req, only submit spam when it was incorrectly flagged if settings.SPAM_CHECK_ENABLED and self.spam_data and self.spam_status in [ SpamStatus.UNKNOWN, SpamStatus.HAM ]: client = _get_client() client.submit_spam( user_ip=self.spam_data['headers']['Remote-Addr'], user_agent=self.spam_data['headers'].get('User-Agent'), referrer=self.spam_data['headers'].get('Referer'), comment_content=self.spam_data['content'], comment_author=self.spam_data['author'], comment_author_email=self.spam_data['author_email'], ) logger.info('confirm_spam update sent') self.spam_status = SpamStatus.SPAM if save: self.save() @abc.abstractmethod def check_spam(self, saved_fields, request_headers, save=False): """Must return is_spam""" pass def do_check_spam(self, author, author_email, content, request_headers): if self.spam_status == SpamStatus.HAM: return False if self.is_spammy: return True client = _get_client() remote_addr = request_headers['Remote-Addr'] user_agent = request_headers.get('User-Agent') referer = request_headers.get('Referer') is_spam, pro_tip = client.check_comment( user_ip=remote_addr, user_agent=user_agent, referrer=referer, comment_content=content, comment_author=author, comment_author_email=author_email) self.spam_pro_tip = pro_tip self.spam_data['headers'] = { 'Remote-Addr': remote_addr, 'User-Agent': user_agent, 'Referer': referer, } self.spam_data['content'] = content self.spam_data['author'] = author self.spam_data['author_email'] = author_email if is_spam: self.flag_spam() return is_spam
class Foo(StoredObject): _id = fields.IntegerField(required=True, primary=True) a = fields.IntegerField() b = fields.IntegerField()
class Foo(StoredObject): _id = fields.IntegerField(primary=True) modified = fields.BooleanField(default=False)
class FileVersion(StoredObject): """A version of an OsfStorageFileNode. contains information about where the file is located, hashes and datetimes """ _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) creator = fields.ForeignField('user') identifier = fields.StringField(required=True) # Date version record was created. This is the date displayed to the user. date_created = fields.DateTimeField(auto_now_add=True) # Dictionary specifying all information needed to locate file on backend # { # 'service': 'cloudfiles', # required # 'container': 'osf', # required # 'object': '20c53b', # required # 'worker_url': '127.0.0.1', # 'worker_host': 'upload-service-1', # } location = fields.DictionaryField(default=None, validate=utils.validate_location) # Dictionary containing raw metadata from upload service response # { # 'size': 1024, # required # 'content_type': 'text/plain', # required # 'date_modified': '2014-11-07T20:24:15', # required # 'md5': 'd077f2', # } metadata = fields.DictionaryField() size = fields.IntegerField() content_type = fields.StringField() # Date file modified on third-party backend. Not displayed to user, since # this date may be earlier than the date of upload if the file already # exists on the backend date_modified = fields.DateTimeField() @property def location_hash(self): return self.location['object'] @property def archive(self): return self.metadata.get('archive') def is_duplicate(self, other): return self.location_hash == other.location_hash def update_metadata(self, metadata, save=True): self.metadata.update(metadata) # metadata has no defined structure so only attempt to set attributes # If its are not in this callback it'll be in the next self.size = self.metadata.get('size', self.size) self.content_type = self.metadata.get('contentType', self.content_type) if self.metadata.get('modified') is not None: # TODO handle the timezone here the user that updates the file may see an # Incorrect version self.date_modified = parse_date(self.metadata['modified'], ignoretz=True) if save: self.save() def _find_matching_archive(self, save=True): """Find another version with the same sha256 as this file. If found copy its vault name and glacier id, no need to create additional backups. returns True if found otherwise false """ if 'sha256' not in self.metadata: return False # Dont bother searching for nothing if 'vault' in self.metadata and 'archive' in self.metadata: # Shouldn't ever happen, but we already have an archive return True # We've found ourself qs = self.__class__.find( Q('_id', 'ne', self._id) & Q('metadata.vault', 'ne', None) & Q('metadata.archive', 'ne', None) & Q('metadata.sha256', 'eq', self.metadata['sha256'])).limit(1) if qs.count() < 1: return False other = qs[0] try: self.metadata['vault'] = other.metadata['vault'] self.metadata['archive'] = other.metadata['archive'] except KeyError: return False if save: self.save() return True
class BadgeAssertion(StoredObject): _id = fields.StringField(default=lambda: str(ObjectId())) #Backrefs badge = fields.ForeignField('badge', backref='assertion') node = fields.ForeignField('node', backref='awarded') _awarder = fields.ForeignField('badgesusersettings') #Custom fields revoked = fields.BooleanField(default=False) reason = fields.StringField() #Required issued_on = fields.IntegerField(required=True) #Optional evidence = fields.StringField() expires = fields.StringField() @classmethod def create(cls, badge, node, evidence=None, save=True, awarder=None): b = cls() b.badge = badge b.node = node b.evidence = evidence b.issued_on = calendar.timegm(datetime.utctimetuple(datetime.utcnow())) b._awarder = awarder if save: b.save() return b @property def issued_date(self): return datetime.fromtimestamp(self.issued_on).strftime('%Y/%m/%d') @property def verify(self, vtype='hosted'): return { 'type': 'hosted', 'url': api_url_for('get_assertion_json', _absolute=True, aid=self._id) } @property def recipient(self): return { 'idenity': self.node._id, 'type': 'osfnode', # TODO Could be an email? 'hashed': False } @property def awarder(self): if self.badge.is_system_badge and self._awarder: return self._awarder return self.badge.creator def to_json(self): return { 'uid': self._id, 'recipient': self.node._id, 'badge': self.badge._id, 'verify': self.verify, 'issued_on': self.issued_date, 'evidence': self.evidence, 'expires': self.expires } def to_openbadge(self): return { 'uid': self._id, 'recipient': self.recipient, 'badge': '{}{}/json/'.format(DOMAIN, self.badge._id), # GUIDs Web url for 'verify': self.verify, 'issuedOn': self.issued_on, 'evidence': self.evidence, 'expires': self.expires }