class Conference(StoredObject): #: Determines the email address for submission and the OSF url # Example: If endpoint is spsp2014, then submission email will be # [email protected] or [email protected] and the OSF url will # be osf.io/view/spsp2014 endpoint = fields.StringField(primary=True, required=True, unique=True) #: Full name, e.g. "SPSP 2014" name = fields.StringField(required=True) info_url = fields.StringField(required=False, default=None) logo_url = fields.StringField(required=False, default=None) active = fields.BooleanField(required=True) admins = fields.ForeignField('user', list=True, required=False, default=None) #: Whether to make submitted projects public public_projects = fields.BooleanField(required=False, default=True) poster = fields.BooleanField(default=True) talk = fields.BooleanField(default=True) @classmethod def get_by_endpoint(cls, endpoint, active=True): query = Q('endpoint', 'iexact', endpoint) if active: query &= Q('active', 'eq', True) try: return Conference.find_one(query) except ModularOdmException: raise ConferenceError('Endpoint {0} not found'.format(endpoint))
class ArchiveTarget(StoredObject): """Stores the results of archiving a single addon """ _id = fields.StringField( primary=True, default=lambda: str(ObjectId()) ) # addon_short_name of target addon name = fields.StringField() status = fields.StringField(default=ARCHIVER_INITIATED) # <dict> representation of a website.archiver.AggregateStatResult # Format: { # 'target_id': <str>, # 'target_name': <str>, # 'targets': <list>(StatResult | AggregateStatResult), # 'num_files': <int>, # 'disk_usage': <float>, # } stat_result = fields.DictionaryField() errors = fields.StringField(list=True) def __repr__(self): return '<{0}(_id={1}, name={2}, status={3})>'.format( self.__class__.__name__, self._id, self.name, self.status )
class ForwardNodeSettings(AddonNodeSettingsBase): complete = True has_auth = True url = fields.StringField(validate=URLValidator()) label = fields.StringField(validate=sanitized) @property def link_text(self): return self.label if self.label else self.url def on_delete(self): self.reset() def reset(self): self.url = None self.label = None def after_register(self, node, registration, user, save=True): clone = self.clone() clone.owner = registration clone.on_add() clone.save() return clone, None
class NodeLicenseRecord(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) node_license = fields.ForeignField('nodelicense', required=True) # Deliberately left as a StringField to support year ranges (e.g. 2012-2015) year = fields.StringField() copyright_holders = fields.StringField(list=True) @property def name(self): return self.node_license.name if self.node_license else None @property def text(self): return self.node_license.text if self.node_license else None @property def id(self): return self.node_license.id if self.node_license else None def to_json(self): return serialize_node_license_record(self) def copy(self): copied = NodeLicenseRecord(node_license=self.node_license, year=self.year, copyright_holders=self.copyright_holders) copied.save() return copied
class KeyAuthContext(StoredObject, AuthContext): _id = fields.StringField(primary=True) can_provision = fields.BooleanField(default=False) can_create_repos = fields.BooleanField(default=False) full_name = fields.StringField() email = fields.StringField() def can_read_repo(self, repo_id): for field in ['admin_repos', 'read_repos', 'write_repos']: try: for ref in getattr(self, field).get('repometa', []): if repo_id in getattr(self, field)['repometa'][ref]: return True except AttributeError: pass return RepoMeta.load(repo_id).is_public def can_write_repo(self, repo_id): for field in ['admin_repos', 'write_repos']: try: for ref in getattr(self, field).get('repometa', []): if repo_id in getattr(self, field)['repometa'][ref]: return True except AttributeError: pass return False def __init__(self, *args, **kwargs): super(KeyAuthContext, self).__init__(*args, **kwargs) self._id = sha( str(SystemRandom().random()) ).hexdigest()
class AddonS3UserSettings(AddonUserSettingsBase): access_key = fields.StringField() secret_key = fields.StringField() def to_json(self, user): ret = super(AddonS3UserSettings, self).to_json(user) ret['has_auth'] = self.has_auth if self.owner: ret['name'] = self.owner.display_full_name() ret['profile_url'] = self.owner.profile_url return ret @property def has_auth(self): return bool(self.access_key and self.secret_key) @property def is_valid(self): return utils.can_list(self.access_key, self.secret_key) def revoke_auth(self, auth=None, save=False): for node_settings in self.addons3nodesettings__authorized: node_settings.deauthorize(auth=auth, save=True) self.s3_osf_user, self.access_key, self.secret_key = None, None, None if save: self.save() return True
class V1(StoredObject): _id = fields.StringField(_primary_key=True, index=True) my_string = fields.StringField() my_float = fields.FloatField() my_number = fields.FloatField() my_null = fields.StringField(required=False) _meta = {'optimistic': True, 'version': 1, 'optimistic': True}
class AddonFigShareUserSettings(AddonUserSettingsBase): oauth_request_token = fields.StringField() oauth_request_token_secret = fields.StringField() oauth_access_token = fields.StringField() oauth_access_token_secret = fields.StringField() @property def has_auth(self): return self.oauth_access_token is not None def to_json(self, user): ret = super(AddonFigShareUserSettings, self).to_json(user) ret.update({ 'authorized': self.has_auth, 'name': self.owner.display_full_name(), 'profile_url': self.owner.profile_url, }) return ret def remove_auth(self, save=False): self.oauth_access_token = None self.oauth_access_token_secret = None for node_settings in self.addonfigsharenodesettings__authorized: node_settings.deauthorize(auth=Auth(user=self.owner), save=True) if save: self.save() def delete(self, save=False): self.remove_auth(save=False) super(AddonFigShareUserSettings, self).delete(save=save)
class Conference(StoredObject): #: Determines the email address for submission and the OSF url # Example: If endpoint is spsp2014, then submission email will be # [email protected] or [email protected] and the OSF url will # be osf.io/view/spsp2014 endpoint = fields.StringField(primary=True, required=True, unique=True) #: Full name, e.g. "SPSP 2014" name = fields.StringField(required=True) info_url = fields.StringField(required=False, default=None) logo_url = fields.StringField(required=False, default=None) location = fields.StringField(required=False, default=None) start_date = fields.DateTimeField(default=None) end_date = fields.DateTimeField(default=None) active = fields.BooleanField(required=True) admins = fields.ForeignField('user', list=True, required=False, default=None) #: Whether to make submitted projects public public_projects = fields.BooleanField(required=False, default=True) poster = fields.BooleanField(default=True) talk = fields.BooleanField(default=True) # field_names are used to customize the text on the conference page, the categories # of submissions, and the email adress to send material to. field_names = fields.DictionaryField(default=lambda: DEFAULT_FIELD_NAMES) # Cached number of submissions num_submissions = fields.IntegerField(default=0) @classmethod def get_by_endpoint(cls, endpoint, active=True): query = Q('endpoint', 'iexact', endpoint) if active: query &= Q('active', 'eq', True) try: return Conference.find_one(query) except ModularOdmException: raise ConferenceError('Endpoint {0} not found'.format(endpoint))
class FigShareGuidFile(GuidFile): article_id = fields.StringField(index=True) file_id = fields.StringField(index=True) @property def waterbutler_path(self): if getattr(self.node.get_addon('figshare'), 'figshare_type', None) == 'project': return '/{}/{}'.format(self.article_id, self.file_id) return '/' + str(self.file_id) @property def provider(self): return 'figshare' def _exception_from_response(self, response): try: if response.json()['data']['extra']['status'] == 'drafts': self._metadata_cache = response.json()['data'] raise fig_exceptions.FigshareIsDraftError(self) except KeyError: pass super(FigShareGuidFile, self)._exception_from_response(response) @property def version_identifier(self): return '' @property def unique_identifier(self): return '{}{}'.format(self.article_id, self.file_id)
class CitationStyle(StoredObject): """Persistent representation of a CSL style. These are parsed from .csl files, so that metadata fields can be indexed. """ # The name of the citation file, sans extension _id = fields.StringField(primary=True) # The full title of the style title = fields.StringField(required=True) # Datetime the file was last parsed date_parsed = fields.DateTimeField(default=datetime.datetime.utcnow, required=True) short_title = fields.StringField(required=False) summary = fields.StringField(required=False) def to_json(self): return { 'id': self._id, 'title': self.title, 'short_title': self.short_title, 'summary': self.summary, }
class NotificationDigest(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) user_id = fields.StringField() timestamp = fields.DateTimeField() event = fields.StringField() message = fields.StringField() node_lineage = fields.StringField(list=True)
class OsfStorageGuidFile(GuidFile): """A reference back to a OsfStorageFileNode path is the "waterbutler path" as well as the path used to look up a filenode GuidFile.path == FileNode.path == '/' + FileNode._id """ provider = 'osfstorage' version_identifier = 'version' _path = fields.StringField(index=True) premigration_path = fields.StringField(index=True) path = fields.StringField(required=True, index=True) # Marker for invalid GUIDs that are associated with a node but not # part of a GUID's file tree, e.g. those generated by spiders _has_no_file_tree = fields.BooleanField(default=False) @classmethod def get_or_create(cls, node, path): try: return cls.find_one(Q('node', 'eq', node) & Q('path', 'eq', path)), False except NoResultsFound: # Create new new = cls(node=node, path=path) new.save() return new, True @property def waterbutler_path(self): return self.path @property def unique_identifier(self): return self._metadata_cache['extra']['version'] @property def file_url(self): return os.path.join('osfstorage', 'files', self.path.lstrip('/')) def get_download_path(self, version_idx): url = furl.furl('/{0}/'.format(self._id)) url.args.update({ 'action': 'download', 'version': version_idx, 'mode': 'render', }) return url.url @property def extra(self): if not self._metadata_cache: return {} return { 'fullPath': self._metadata_cache['extra']['fullPath'], }
class OsfStorageFileNode(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) is_deleted = fields.BooleanField(default=False) name = fields.StringField(required=True, index=True) kind = fields.StringField(required=True, index=True) parent = fields.ForeignField('OsfStorageFileNode', index=True) versions = fields.ForeignField('OsfStorageFileVersion', list=True) node_settings = fields.ForeignField('OsfStorageNodeSettings', required=True, index=True) def materialized_path(self): def lineage(): current = self while current: yield current current = current.parent path = os.path.join(*reversed([x.name for x in lineage()])) if self.kind == 'folder': return '/{}/'.format(path) return '/{}'.format(path) def append_file(self, name, save=True): assert self.kind == 'folder' child = OsfStorageFileNode(name=name, kind='file', parent=self, node_settings=self.node_settings) if save: child.save() return child def find_child_by_name(self, name): assert self.kind == 'folder' return self.__class__.find_one( Q('name', 'eq', name) & Q('kind', 'eq', 'file') & Q('parent', 'eq', self)) @property def path(self): return '/{}{}'.format(self._id, '/' if self.kind == 'folder' else '') def get_download_count(self, version=None): """ :param int version: Optional one-based version index """ parts = ['download', self.node_settings.owner._id, self._id] if version is not None: parts.append(version) page = ':'.join([format(part) for part in parts]) _, count = get_basic_counters(page) return count or 0
class NodeLicense(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) id = fields.StringField(required=True, unique=True, editable=False) name = fields.StringField(required=True, unique=True) text = fields.StringField(required=True) properties = fields.StringField(list=True)
class OsfStorageTrashedFileNode(StoredObject): """The graveyard for all deleted OsfStorageFileNodes""" _id = fields.StringField(primary=True) name = fields.StringField(required=True, index=True) kind = fields.StringField(required=True, index=True) parent = fields.ForeignField('OsfStorageFileNode', index=True) versions = fields.ForeignField('OsfStorageFileVersion', list=True) node_settings = fields.ForeignField('OsfStorageNodeSettings', required=True, index=True)
class WikiPageVersion(StoredObject): _meta = {'optimistic': True} _id = fields.StringField(primary=True, index=True) modified_on = fields.DateTimeField() text = fields.StringField(default=[''], list=True) @property def content(self): return '\n'.join(self.text)
class User(StoredObject): _id = fields.StringField(primary=True) name = fields.StringField(required=True) date_created = fields.DateTimeField(auto_now_add=set_datetime) date_updated = fields.DateTimeField(auto_now=set_datetime) read_only = fields.StringField(editable=False) unique = fields.StringField(unique=True) _meta = {'optimistic': True}
class Identifier(StoredObject): """A persistent identifier model for DOIs, ARKs, and the like.""" _id = fields.StringField(default=lambda: str(ObjectId())) # object to which the identifier points referent = fields.AbstractForeignField(required=True) # category: e.g. 'ark', 'doi' category = fields.StringField(required=True) # value: e.g. 'FK424601' value = fields.StringField(required=True)
class AddonDataverseUserSettings(AddonOAuthUserSettingsBase): oauth_provider = DataverseProvider serializer = serializer.DataverseSerializer # Legacy Fields api_token = fields.StringField() dataverse_username = fields.StringField() encrypted_password = fields.StringField()
class Tag(StoredObject): _id = fields.StringField(primary=True) date_created = fields.DateTimeField(validate=True, auto_now_add=True) date_modified = fields.DateTimeField(validate=True, auto_now=True) value = fields.StringField(default='default', validate=MinLengthValidator(5)) keywords = fields.StringField(default=['keywd1', 'keywd2'], validate=MinLengthValidator(5), list=True) _meta = {'optimistic': True}
class ApiOAuth2Scope(StoredObject): """ Store information about recognized OAuth2 scopes. Only scopes registered under this database model can be requested by third parties. """ _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) name = fields.StringField(unique=True, required=True, index=True) description = fields.StringField(required=True) is_active = fields.BooleanField( default=True, index=True) # TODO: Add mechanism to deactivate a scope?
class OsfStorageFileVersion(StoredObject): """A version of an OsfStorageFileNode. contains information about where the file is located, hashes and datetimes """ _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) creator = fields.ForeignField('user', required=True) # Date version record was created. This is the date displayed to the user. date_created = fields.DateTimeField(auto_now_add=True) # Dictionary specifying all information needed to locate file on backend # { # 'service': 'cloudfiles', # required # 'container': 'osf', # required # 'object': '20c53b', # required # 'worker_url': '127.0.0.1', # 'worker_host': 'upload-service-1', # } location = fields.DictionaryField(validate=utils.validate_location) # Dictionary containing raw metadata from upload service response # { # 'size': 1024, # required # 'content_type': 'text/plain', # required # 'date_modified': '2014-11-07T20:24:15', # required # 'md5': 'd077f2', # } metadata = fields.DictionaryField() size = fields.IntegerField() content_type = fields.StringField() # Date file modified on third-party backend. Not displayed to user, since # this date may be earlier than the date of upload if the file already # exists on the backend date_modified = fields.DateTimeField() @property def location_hash(self): return self.location['object'] def is_duplicate(self, other): return self.location_hash == other.location_hash def update_metadata(self, metadata): self.metadata.update(metadata) self.content_type = self.metadata.get('contentType', None) try: self.size = self.metadata['size'] self.date_modified = parse_date(self.metadata['modified'], ignoretz=True) except KeyError as err: raise errors.MissingFieldError(str(err)) self.save()
class NodeWikiPage(GuidStoredObject): redirect_mode = 'redirect' _id = fields.StringField(primary=True) page_name = fields.StringField(validate=validate_page_name) version = fields.IntegerField() date = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow) is_current = fields.BooleanField() content = fields.StringField(default='') user = fields.ForeignField('user') node = fields.ForeignField('node') @property def deep_url(self): return '{}wiki/{}/'.format(self.node.deep_url, self.page_name) @property def url(self): return '{}wiki/{}/'.format(self.node.url, self.page_name) def html(self, node): """The cleaned HTML of the page""" sanitized_content = render_content(self.content, node=node) try: return linkify( sanitized_content, [ nofollow, ], ) except TypeError: logger.warning('Returning unlinkified content.') return sanitized_content def raw_text(self, node): """ The raw text of the page, suitable for using in a test search""" return sanitize(self.html(node), tags=[], strip=True) def save(self, *args, **kwargs): rv = super(NodeWikiPage, self).save(*args, **kwargs) if self.node: self.node.update_search() return rv def rename(self, new_name, save=True): self.page_name = new_name if save: self.save() def to_json(self): return {}
class TrashedFileNode(StoredObject): """The graveyard for all deleted FileNodes""" _id = fields.StringField(primary=True) last_touched = fields.DateTimeField() history = fields.DictionaryField(list=True) versions = fields.ForeignField('FileVersion', list=True) node = fields.ForeignField('node', required=True) parent = fields.AbstractForeignField(default=None) is_file = fields.BooleanField(default=True) provider = fields.StringField(required=True) name = fields.StringField(required=True) path = fields.StringField(required=True) materialized_path = fields.StringField(required=True) checkout = fields.AbstractForeignField('User') deleted_by = fields.AbstractForeignField('User') deleted_on = fields.DateTimeField(auto_now_add=True) tags = fields.ForeignField('Tag', list=True) @property def deep_url(self): """Allows deleted files to resolve to a view that will provide a nice error message and http.GONE """ return self.node.web_url_for('addon_deleted_file', trashed_id=self._id) def restore(self, recursive=True, parent=None): """Recreate a StoredFileNode from the data in this object Will re-point all guids and finally remove itself :raises KeyExistsException: """ data = self.to_storage() data.pop('deleted_on') data.pop('deleted_by') if parent: data['parent'] = parent._id elif data['parent']: # parent is an AbstractForeignField, so it gets stored as tuple data['parent'] = data['parent'][0] restored = FileNode.resolve_class(self.provider, int(self.is_file))(**data) if not restored.parent: raise ValueError('No parent to restore to') restored.save() if recursive: for child in TrashedFileNode.find(Q('parent', 'eq', self)): child.restore(recursive=recursive, parent=restored) TrashedFileNode.remove_one(self) return restored
class Conference(StoredObject): #: Determines the email address for submission and the OSF url # Example: If endpoint is spsp2014, then submission email will be # [email protected] or [email protected] and the OSF url will # be osf.io/view/spsp2014 endpoint = fields.StringField(primary=True, required=True, unique=True) #: Full name, e.g. "SPSP 2014" name = fields.StringField(required=True) info_url = fields.StringField(required=False, default=None) logo_url = fields.StringField(required=False, default=None) active = fields.BooleanField(required=True) admins = fields.ForeignField('user', list=True, required=False, default=None) #: Whether to make submitted projects public public_projects = fields.BooleanField(required=False, default=True) poster = fields.BooleanField(default=True) talk = fields.BooleanField(default=True) # field_names are used to customize the text on the conference page, the categories # of submissions, and the email adress to send material to. field_names = fields.DictionaryField( default=lambda: { 'submission1': 'poster', 'submission2': 'talk', 'submission1_plural': 'posters', 'submission2_plural': 'talks', 'meeting_title_type': 'Posters & Talks', 'add_submission': 'poster or talk', 'mail_subject': 'Presentation title', 'mail_message_body': 'Presentation abstract (if any)', 'mail_attachment': 'Your presentation file (e.g., PowerPoint, PDF, etc.)' }) # Cached number of submissions num_submissions = fields.IntegerField(default=0) @classmethod def get_by_endpoint(cls, endpoint, active=True): query = Q('endpoint', 'iexact', endpoint) if active: query &= Q('active', 'eq', True) try: return Conference.find_one(query) except ModularOdmException: raise ConferenceError('Endpoint {0} not found'.format(endpoint))
class QueuedMail(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) user = fields.ForeignField('User', index=True, required=True) to_addr = fields.StringField() send_at = fields.DateTimeField(index=True, required=True) # string denoting the template, presend to be used. Has to be an index of queue_mail types email_type = fields.StringField(index=True, required=True) # dictionary with variables used to populate mako template and store information used in presends # Example: # self.data = { # 'nid' : 'ShIpTo', # 'fullname': 'Florence Welch', #} data = fields.DictionaryField() sent_at = fields.DateTimeField(index=True) def send_mail(self): """ Grabs the data from this email, checks for user subscription to help mails, constructs the mail object and checks presend. Then attempts to send the email through send_mail() :return: boolean based on whether email was sent. """ mail_struct = queue_mail_types[self.email_type] presend = mail_struct['presend'](self) mail = Mail( mail_struct['template'], subject=mail_struct['subject'], categories=mail_struct.get('categories', None) ) self.data['osf_url'] = settings.DOMAIN if presend and self.user.is_active and self.user.osf_mailing_lists.get(settings.OSF_HELP_LIST): send_mail(self.to_addr or self.user.username, mail, mimetype='html', **(self.data or {})) self.sent_at = datetime.utcnow() self.save() return True else: self.__class__.remove_one(self) return False def find_sent_of_same_type_and_user(self): """ Queries up for all emails of the same type as self, sent to the same user as self. Does not look for queue-up emails. :return: a list of those emails """ return self.__class__.find( Q('email_type', 'eq', self.email_type) & Q('user', 'eq', self.user) & Q('sent_at', 'ne', None) )
class ExternalAccount(StoredObject): """An account on an external service. Note that this object is not and should not be aware of what other objects are associated with it. This is by design, and this object should be kept as thin as possible, containing only those fields that must be stored in the database. The ``provider`` field is a de facto foreign key to an ``ExternalProvider`` object, as providers are not stored in the database. """ __indices__ = [ { 'key_or_list': [ ('provider', pymongo.ASCENDING), ('provider_id', pymongo.ASCENDING), ], 'unique': True, } ] _id = fields.StringField(default=lambda: str(ObjectId()), primary=True) # The OAuth credentials. One or both of these fields should be populated. # For OAuth1, this is usually the "oauth_token" # For OAuth2, this is usually the "access_token" oauth_key = fields.StringField() # For OAuth1, this is usually the "oauth_token_secret" # For OAuth2, this is not used oauth_secret = fields.StringField() # Used for OAuth2 only refresh_token = fields.StringField() expires_at = fields.DateTimeField() scopes = fields.StringField(list=True, default=lambda: list()) # The `name` of the service # This lets us query for only accounts on a particular provider provider = fields.StringField(required=True) # The proper 'name' of the service # Needed for account serialization provider_name = fields.StringField(required=True) # The unique, persistent ID on the remote service. provider_id = fields.StringField() # The user's name on the external service display_name = fields.StringField() # A link to the user's profile on the external service profile_url = fields.StringField() def __repr__(self): return '<ExternalAccount: {}/{}>'.format(self.provider, self.provider_id)
class Blog(StoredObject): _id = fields.StringField(primary=True, optimistic=True) body = fields.StringField(default='blog body') title = fields.StringField(default='asdfasdfasdf', validate=MinLengthValidator(8)) tag = fields.ForeignField('Tag', backref='tagged') tags = fields.ForeignField('Tag', list=True, backref='taggeds') _meta = { 'optimistic': True, 'log_level': logging.DEBUG, }
class ForwardNodeSettings(AddonNodeSettingsBase): url = fields.StringField(validate=URLValidator()) label = fields.StringField(validate=sanitized) redirect_bool = fields.BooleanField(default=True, validate=True) redirect_secs = fields.IntegerField( default=15, validate=[MinValueValidator(5), MaxValueValidator(60)] ) @property def link_text(self): return self.label if self.label else self.url