def setUp(self): self.datastore = DS.DataStore( 'mim:///', database='test_db') self.session = ORMSession(bind=self.datastore) class BasicMapperExtension(MapperExtension): def after_insert(self, instance, state): assert 'clean'==state.status def before_insert(self, instance, state): assert 'new'==state.status def before_update(self, instance, state): assert 'dirty'==state.status def after_update(self, instance, state): assert 'clean'==state.status class Basic(MappedClass): class __mongometa__: name='basic' session = self.session extensions = [BasicMapperExtension] _id = FieldProperty(S.ObjectId) a = FieldProperty(int) b = FieldProperty([int]) c = FieldProperty(dict( d=int, e=int)) Mapper.compile_all() self.Basic = Basic self.session.remove(self.Basic)
def setUp(self): self.datastore = DS.DataStore( 'mim:///', database='test_db') self.session = ThreadLocalORMSession(Session(bind=self.datastore)) class Parent(MappedClass): class __mongometa__: name='parent' session = self.session _id = FieldProperty(S.ObjectId) Mapper.compile_all() self.Parent = Parent self.create_app = TestApp(MingMiddleware(self._wsgi_create_object)) self.remove_app = TestApp(MingMiddleware(self._wsgi_remove_object)) self.remove_exc = TestApp(MingMiddleware(self._wsgi_remove_object_exc))
def command(self): from allura import model as M main_session_classes = [M.main_orm_session, M.repository_orm_session, M.task_orm_session] if asbool(self.config.get('activitystream.recording.enabled', False)): from activitystream.storage.mingstorage import activity_orm_session main_session_classes.append(activity_orm_session) self.basic_setup() main_indexes = defaultdict(lambda: defaultdict(list)) # by db, then collection name project_indexes = defaultdict(list) # by collection name base.log.info('Collecting indexes...') for m in Mapper.all_mappers(): mgr = m.collection.m cname = mgr.collection_name cls = m.mapped_class if cname is None: base.log.info('... skipping abstract class %s', cls) continue base.log.info('... for class %s', cls) if session(cls) in main_session_classes: idx = main_indexes[session(cls)][cname] else: idx = project_indexes[cname] idx.extend(mgr.indexes) base.log.info('Updating indexes for main DB') for odm_session, db_indexes in main_indexes.iteritems(): db = odm_session.impl.db for name, indexes in db_indexes.iteritems(): self._update_indexes(db[name], indexes) base.log.info('Updating indexes for project DB') db = M.project_doc_session.db base.log.info('... DB: %s', db) for name, indexes in project_indexes.iteritems(): self._update_indexes(db[name], indexes) base.log.info('Done updating indexes')
def command(self): from allura import model as M # self.basic_setup() existing_thumbs = 0 base.log.info('Collecting application attachment classes') package_model_map = {} for m in Mapper.all_mappers(): sess = m.session cls = m.mapped_class if issubclass(cls, M.BaseAttachment): if sess is M.project_orm_session: package = cls.__module__.split('.', 1)[0] l = package_model_map.get(package, []) l.append(cls) package_model_map[package] = l if len(self.args) > 1: projects = M.Project.query.find({'shortname': self.args[1]}) else: projects = M.Project.query.find() for p in projects: base.log.info('=' * 20) base.log.info("Processing project '%s'", p.shortname) c.project = p if self.options.force: existing_thumbs += M.BaseAttachment.query.find({'type': 'thumbnail'}).count() base.log.info('Removing %d current thumbnails (per --force)', existing_thumbs) M.BaseAttachment.query.remove({'type': 'thumbnail'}) # ProjectFile's live in main collection (unlike File's) # M.ProjectFile.query.find({'app_config_id': None, 'type': 'attachment'}).all() for app in p.app_configs: base.log.info("Processing application '%s' mounted at '%s' of type '%s'", app.options['mount_label'], app.options['mount_point'], app.tool_name) # Any application may contain DiscussionAttachment's, it has discussion_id field self.process_att_of_type(M.DiscussionAttachment, {'app_config_id': app._id, 'discussion_id': {'$ne': None}}) # Otherwise, we'll take attachment classes belonging to app's package ep = iter_entry_points('allura', app.tool_name).next() app_package = ep.module_name.split('.', 1)[0] if app_package == 'allura': # Apps in allura known to not define own attachment types continue classes = package_model_map.get(app_package, []) for cls in classes: self.process_att_of_type(cls, {'app_config_id': app._id, 'discussion_id': None}) base.log.info('-' * 10) base.log.info('Recreated %d thumbs', self.created_thumbs) if self.options.force: if existing_thumbs != self.created_thumbs: base.log.warning('There were %d thumbs before --force operation started, but %d recreated', existing_thumbs, self.created_thumbs) ThreadLocalORMSession.flush_all()
def build_model_inheritance_graph(): graph = dict((m.mapped_class, ([], [])) for m in Mapper.all_mappers()) for cls, (parents, children) in graph.iteritems(): for b in cls.__bases__: if b not in graph: continue parents.append(b) graph[b][1].append(cls) return graph
def setUp(self): self.datastore = DS.DataStore( 'mim:///', database='test_db') self.session = ORMSession(bind=self.datastore) class Basic(MappedClass): class __mongometa__: name='basic' session = self.session _id = FieldProperty(S.ObjectId) a = FieldProperty(int) b = FieldProperty([int]) c = FieldProperty(dict( d=int, e=int)) d = FieldPropertyWithMissingNone(str, if_missing=S.Missing) e = FieldProperty(str, if_missing=S.Missing) Mapper.compile_all() self.Basic = Basic self.session.remove(self.Basic)
def setUp(self): self.datastore = DS.DataStore( 'mim:///', database='test_db') self.session = ORMSession(bind=self.datastore) class Parent(MappedClass): class __mongometa__: name='parent' session = self.session _id = FieldProperty(int) children = RelationProperty('Child') class Child(MappedClass): class __mongometa__: name='child' session = self.session _id = FieldProperty(int) parent_id = ForeignIdProperty('Parent') parent = RelationProperty('Parent') Mapper.compile_all() self.Parent = Parent self.Child = Child
def setUp(self): self.bind = DS.DataStore(master='mim:///', database='test_db') self.doc_session = Session(self.bind) self.orm_session = ORMSession(self.doc_session) class Base(MappedClass): class __mongometa__: name='test_doc' session = self.orm_session polymorphic_on='type' polymorphic_identity='base' _id = FieldProperty(S.ObjectId) type=FieldProperty(str, if_missing='base') a=FieldProperty(int) class Derived(Base): class __mongometa__: polymorphic_identity='derived' type=FieldProperty(str, if_missing='derived') b=FieldProperty(int) Mapper.compile_all() self.Base = Base self.Derived = Derived
def command(self): from allura import model as M self.basic_setup() main_indexes = defaultdict( lambda: defaultdict(list)) # by db, then collection name project_indexes = defaultdict(list) # by collection name base.log.info('Collecting indexes...') for m in Mapper.all_mappers(): mgr = m.collection.m cname = mgr.collection_name cls = m.mapped_class if cname is None: base.log.info('... skipping abstract class %s', cls) continue base.log.info('... for class %s', cls) if session(cls) in (M.main_orm_session, M.repository_orm_session, M.task_orm_session): idx = main_indexes[session(cls)][cname] else: idx = project_indexes[cname] idx.extend(mgr.indexes) base.log.info('Updating indexes for main DB') for odm_session, db_indexes in main_indexes.iteritems(): db = odm_session.impl.db for name, indexes in db_indexes.iteritems(): self._update_indexes(db[name], indexes) base.log.info('Updating indexes for project DBs') configured_dbs = set() for projects in utils.chunked_find(M.Project): for p in projects: db = p.database_uri if db in configured_dbs: continue configured_dbs.add(db) c.project = p db = M.project_doc_session.db base.log.info('... DB: %s', db) for name, indexes in project_indexes.iteritems(): self._update_indexes(db[name], indexes) if not configured_dbs: # e.g. during bootstrap with no projects db = M.project_doc_session.db base.log.info('... default DB: %s', db) for name, indexes in project_indexes.iteritems(): self._update_indexes(db[name], indexes) base.log.info('Done updating indexes')
def command(self): from allura import model as M self.basic_setup() main_indexes = defaultdict(lambda: defaultdict(list)) # by db, then collection name project_indexes = defaultdict(list) # by collection name base.log.info('Collecting indexes...') for m in Mapper.all_mappers(): mgr = m.collection.m cname = mgr.collection_name cls = m.mapped_class if cname is None: base.log.info('... skipping abstract class %s', cls) continue base.log.info('... for class %s', cls) if session(cls) in ( M.main_orm_session, M.repository_orm_session, M.task_orm_session): idx = main_indexes[session(cls)][cname] else: idx = project_indexes[cname] idx.extend(mgr.indexes) base.log.info('Updating indexes for main DB') for odm_session, db_indexes in main_indexes.iteritems(): db = odm_session.impl.db for name, indexes in db_indexes.iteritems(): self._update_indexes(db[name], indexes) base.log.info('Updating indexes for project DBs') configured_dbs = set() for projects in utils.chunked_find(M.Project): for p in projects: db = p.database_uri if db in configured_dbs: continue configured_dbs.add(db) c.project = p db = M.project_doc_session.db base.log.info('... DB: %s', db) for name, indexes in project_indexes.iteritems(): self._update_indexes(db[name], indexes) if not configured_dbs: # e.g. during bootstrap with no projects db = M.project_doc_session.db base.log.info('... default DB: %s', db) for name, indexes in project_indexes.iteritems(): self._update_indexes(db[name], indexes) base.log.info('Done updating indexes')
def command(self): from allura import model as M main_session_classes = [ M.main_orm_session, M.repository_orm_session, M.task_orm_session, M.main_explicitflush_orm_session ] if asbool(self.config.get('activitystream.recording.enabled', False)): from activitystream.storage.mingstorage import activity_odm_session main_session_classes.append(activity_odm_session) self.basic_setup() # by db, then collection name main_indexes = defaultdict(lambda: defaultdict(list)) project_indexes = defaultdict(list) # by collection name base.log.info('Collecting indexes...') for m in Mapper.all_mappers(): mgr = m.collection.m cname = mgr.collection_name cls = m.mapped_class if cname is None: base.log.info('... skipping abstract class %s', cls) continue base.log.info('... for class %s', cls) if session(cls) in main_session_classes: idx = main_indexes[session(cls)][cname] else: idx = project_indexes[cname] idx.extend(mgr.indexes) base.log.info('Updating indexes for main DB') for odm_session, db_indexes in six.iteritems(main_indexes): db = odm_session.impl.db for name, indexes in six.iteritems(db_indexes): self._update_indexes(db[name], indexes) base.log.info('Updating indexes for project DB') db = M.project_doc_session.db base.log.info('... DB: %s', db) for name, indexes in six.iteritems(project_indexes): self._update_indexes(db[name], indexes) base.log.info('Done updating indexes')
@property def html_text(self): """A markdown processed version of the page text""" return g.markdown_wiki.cached_convert(self, 'text') def authors(self): """All the users that have edited this page""" def uniq(users): t = {} for user in users: t[user.username] = user.id return t.values() user_ids = uniq([r.author for r in self.history().all()]) return User.query.find({'_id': {'$in': user_ids}}).all() def delete(self): Shortlink.query.remove(dict(ref_id=self.index_id())) self.deleted = True suffix = " {:%Y-%m-%d %H:%M:%S.%f}".format(datetime.utcnow()) self.title += suffix class WikiAttachment(BaseAttachment): ArtifactType = Page class __mongometa__: polymorphic_identity = 'WikiAttachment' attachment_type = FieldProperty(str, if_missing='WikiAttachment') Mapper.compile_all()
for p in paths[start:end]: if p['copyfrom_path'] is not None: result['copied'].append({ 'new': h.really_unicode(p.path), 'old': h.really_unicode(p.copyfrom_path), 'ratio': 1, }) elif p['action'] == 'A': result['added'].append(h.really_unicode(p.path)) elif p['action'] == 'D': result['removed'].append(h.really_unicode(p.path)) elif p['action'] in ['M', 'R']: # 'R' means 'Replaced', i.e. # svn rm aaa.txt # echo "Completely new aaa!" > aaa.txt # svn add aaa.txt # svn commit -m "Replace aaa.txt" result['changed'].append(h.really_unicode(p.path)) for r in result['copied'][:]: if r['old'] in result['removed']: result['removed'].remove(r['old']) result['copied'].remove(r) result['renamed'].append(r) if r['new'] in result['added']: result['added'].remove(r['new']) return result Mapper.compile_all()