def update_neighborhood_acl(neighborhood_doc, init_doc): '''Convert nbhd admins users to --init-- project admins''' if options.test: log.info('Update nbhd %s', neighborhood_doc['name']) if 'acl' not in neighborhood_doc: log.warning('Neighborhood %s already updated', neighborhood_doc['name']) return p = Object(init_doc) p.root_project=p r_anon = _project_role(init_doc['_id'], '*anonymous') r_auth = _project_role(init_doc['_id'], '*authenticated') r_admin = _project_role(init_doc['_id'], 'Admin') acl = neighborhood_doc['acl'] new_acl = list(init_doc['acl']) assert acl['read'] == [None] # nbhd should be public for uid in acl['admin'] + acl['moderate']: u = c_user.find(dict(_id=uid)).next() if options.test: log.info('... grant nbhd admin to: %s', u['username']) continue role = _project_role(init_doc['_id'], user_id=uid) if r_admin['_id'] not in role['roles']: role['roles'].append(r_admin['_id']) c_project_role.save(role) _grant(new_acl, 'read', r_anon['_id']) _grant(new_acl, 'admin', r_admin['_id']) _grant(new_acl, 'register', r_admin['_id']) if acl['create'] == [ ]: if options.test: log.info('grant register to auth') _grant(new_acl, 'register', r_auth['_id']) del neighborhood_doc['acl'] if options.test: log.info('--- new init acl:\n%s\n%s\n---', pformat(_format_acd(init_doc['acl'])), pformat(map(_format_ace, new_acl))) init_doc['acl'] = new_acl
def refresh_tree_info(self, tree, seen, lazy=True): from allura.model.repository import TreeDoc if lazy and tree.binsha in seen: return seen.add(tree.binsha) doc = TreeDoc(dict( _id=tree.hexsha, tree_ids=[], blob_ids=[], other_ids=[])) for o in tree: if o.type == 'submodule': continue obj = Object( name=h.really_unicode(o.name), id=o.hexsha) if o.type == 'tree': self.refresh_tree_info(o, seen, lazy) doc.tree_ids.append(obj) elif o.type == 'blob': doc.blob_ids.append(obj) else: obj.type = o.type doc.other_ids.append(obj) doc.m.save(safe=False) return doc
def by_name(self): d = Object((x.name, x) for x in self.other_ids) d.update( (x.name, Object(x, type='tree')) for x in self.tree_ids) d.update( (x.name, Object(x, type='blob')) for x in self.blob_ids) return d
def test_get_set(self): d = dict(a=1, b=2) obj = Object(d, c=3) self.assertEqual(1, obj.a) self.assertEqual(1, obj['a']) self.assertEqual(3, obj.c) self.assertEqual(3, obj['c']) obj.d = 5 self.assertEqual(5, obj['d']) self.assertEqual(5, obj.d) self.assertEqual(obj, dict(a=1, b=2, c=3, d=5)) self.assertRaises(AttributeError, getattr, obj, 'e')
def test_convert_post_content(): nbhd = Object() nbhd.url_prefix = '/motorola/' text = '''rel100? or ?rel101 or rel102 or rel103a or rel104''' mapping = dict( rel100='rel/100/', rel101='rel/101/', rel102='rel/102/', rel103='rel/103/', rel104='rel/104/') converted = convert_post_content(mapping, 'foo', text, nbhd) assert 'href="/projects/foo.motorola/files/rel/100' in converted, converted assert 'href="/projects/foo.motorola/files/rel/101' in converted, converted assert 'href="/projects/foo.motorola/files/rel/102' in converted, converted assert 'href="/projects/foo.motorola/files/rel/103' not in converted, converted assert 'href="/projects/foo.motorola/files/rel/104' in converted, converted
def test_safe(self): now = datetime.now() oid = ObjectId() safe_obj = Object( a=[1,2,3], b=dict(a=12), c=[ 'foo', 1, 1L, 1.0, now, Decimal('0.3'), None, oid ]) safe_obj.make_safe() self.assertEqual(safe_obj, dict( a=[1,2,3], b=dict(a=12), c=[ 'foo', 1, 1L, 1.0, now, 0.3, None, oid ])) unsafe_obj = Object( my_tuple=(1,2,3)) self.assertRaises(AssertionError, unsafe_obj.make_safe)
def __init__(self, session, cls, ming_cursor, refresh=False, decorate=None): self.session = session self.cls = cls self.mapper = mapper(cls) self.ming_cursor = ming_cursor self._options = Object( refresh=refresh, decorate=decorate, instrument=True)
def index(self, version=None, **kw): c.post = self.W.post if request.method == 'POST': require_access(self.post, 'moderate') post_fields = self.W.edit_post.to_python(kw, None) file_info = post_fields.pop('file_info', None) self.post.add_multiple_attachments(file_info) for k, v in post_fields.iteritems(): try: setattr(self.post, k, v) except AttributeError: continue self.post.edit_count = self.post.edit_count + 1 self.post.last_edit_date = datetime.utcnow() self.post.last_edit_by_id = c.user._id self.thread.is_spam( self.post) # run spam checker, nothing to do with result yet self.post.commit() g.director.create_activity( c.user, 'modified', self.post, target=self.post.thread.artifact or self.post.thread, related_nodes=[self.post.app_config.project], tags=['comment']) redirect(request.referer) elif request.method == 'GET': if self.post.deleted: raise exc.HTTPNotFound if version is not None: HC = self.post.__mongometa__.history_class ss = HC.query.find({ 'artifact_id': self.post._id, 'version': int(version) }).first() if not ss: raise exc.HTTPNotFound post = Object(ss.data, acl=self.post.acl, author=self.post.author, url=self.post.url, thread=self.post.thread, reply_subject=self.post.reply_subject, attachments=self.post.attachments, related_artifacts=self.post.related_artifacts, parent_security_context=lambda: None, last_edit_by=lambda: self.post.last_edit_by()) else: post = self.post return dict(discussion=self.post.discussion, post=post)
def get_changes(self, oid): rev = self._revision(oid) try: log_entry = self._svn.log(self._url, revision_start=rev, limit=1, discover_changed_paths=True)[0] except pysvn.ClientError: log.info('ClientError processing %r %r, treating as empty', oid, self._repo, exc_info=True) log_entry = Object(date='', message='', changed_paths=[]) return [p.path for p in log_entry.changed_paths]
def compute_diffs(repo_id, tree_cache, rhs_ci): '''compute simple differences between a commit and its first parent''' if rhs_ci.tree_id is None: return tree_cache def _walk_tree(tree, tree_index): for x in tree.blob_ids: yield x.id for x in tree.other_ids: yield x.id for x in tree.tree_ids: yield x.id for xx in _walk_tree(tree_index[x.id], tree_index): yield xx rhs_tree_ids = TreesDoc.m.get(_id=rhs_ci._id).tree_ids if rhs_ci.parent_ids: lhs_ci = CommitDoc.m.get(_id=rhs_ci.parent_ids[0]) else: lhs_ci = None if lhs_ci is not None: lhs_tree_ids = TreesDoc.m.get(_id=lhs_ci._id).tree_ids else: lhs_tree_ids = [] new_tree_ids = [ tid for tid in chain(lhs_tree_ids, rhs_tree_ids) if tid not in tree_cache ] tree_index = dict( (t._id, t) for t in TreeDoc.m.find(dict(_id={'$in': new_tree_ids}),validate=False)) tree_index.update(tree_cache) rhs_tree_ids_set = set(rhs_tree_ids) tree_cache.clear() tree_cache.update( (id, t) for id,t in tree_index.iteritems() if id in rhs_tree_ids_set) rhs_tree = tree_index[rhs_ci.tree_id] if lhs_ci is None: lhs_tree = Object(_id=None, tree_ids=[], blob_ids=[], other_ids=[]) else: lhs_tree = tree_index[lhs_ci.tree_id] differences = [] commit_info = get_commit_info(rhs_ci) for name, lhs_id, rhs_id in _diff_trees(lhs_tree, rhs_tree, tree_index): differences.append( dict(name=name, lhs_id=lhs_id, rhs_id=rhs_id)) # Set last commit data rhs_tree = tree_index[rhs_ci.tree_id] refresh_last_commit(repo_id, '/', rhs_tree, lhs_tree, None, commit_info) # Build the diffinfo di = DiffInfoDoc(dict( _id=rhs_ci._id, differences=differences)) di.m.save() return tree_cache
def test_can_merge(self): mr = mock.Mock(downstream_repo=Object(full_fs_path='downstream-url'), source_branch='source-branch', target_branch='target-branch', downstream=mock.Mock(commit_id='cid')) git = mock.Mock() git.merge_tree.return_value = 'clean merge' self.repo._impl._git.git = git assert_equal(self.repo.can_merge(mr), True) git.fetch.assert_called_once_with('downstream-url', 'source-branch') git.merge_base.assert_called_once_with('cid', 'target-branch') git.merge_tree.assert_called_once_with(git.merge_base.return_value, 'target-branch', 'cid') git.merge_tree.return_value = '+<<<<<<<' assert_equal(self.repo.can_merge(mr), False)
def test_safe(self): now = datetime.now() oid = ObjectId() c = [ 'foo', 1, 1.0, now, Decimal('0.3'), None, oid ] if six.PY2: c = [ 'foo', 1, long(1), 1.0, now, Decimal('0.3'), None, oid ] safe_obj = Object( a=[1,2,3], b=dict(a=12), c=c) safe_obj.make_safe() expected = Object( a=[1,2,3], b=dict(a=12), c=c) self.assertTrue(expected, safe_obj) unsafe_obj = Object( my_tuple=(1,2,3)) self.assertRaises(AssertionError, unsafe_obj.make_safe)
def deserialize_project(datum, projectSchema, nbhd): # type: (dict, NewProjectSchema, Neighborhood) -> object p = projectSchema.deserialize(datum) p = Object(p) # convert from dict to something with attr-access # generate a shortname, and try to make it unique if not p.shortname: max_shortname_len = 15 # maybe more depending on NeighborhoodProjectShortNameValidator impl, but this is safe shortname = orig_shortname = make_shortname(p.name, max_shortname_len) for i in range(1, 10): try: ProjectRegistrationProvider.get( ).shortname_validator.to_python(shortname, neighborhood=nbhd) except formencode.api.Invalid: if len(orig_shortname) == max_shortname_len - 1: shortname = orig_shortname + str(i) else: shortname = orig_shortname[:max_shortname_len - 1] + str(i) else: # we're good! break p.shortname = shortname return p
def _diff_trees(lhs, rhs, index, *path): def _fq(name): return '/'.join(reversed((name, ) + path)) # Diff the trees (and keep deterministic order) rhs_tree_ids = OrderedDict((o.name, o.id) for o in rhs.tree_ids) for o in lhs.tree_ids: # remove so won't be picked up as added, below rhs_id = rhs_tree_ids.pop(o.name, None) if rhs_id == o.id: # no change continue elif rhs_id is None: # removed yield (_fq(o.name), o.id, None) rhs_tree = Object(_id=None, tree_ids=[], blob_ids=[], other_ids=[]) else: # changed rhs_tree = index[rhs_id] for difference in _diff_trees(index[o.id], rhs_tree, index, o.name, *path): yield difference for name, id in rhs_tree_ids.items(): # added yield (_fq(name), None, id) lhs_tree = Object(_id=None, tree_ids=[], blob_ids=[], other_ids=[]) for difference in _diff_trees(lhs_tree, index[id], index, name, *path): yield difference # Diff the blobs (and keep deterministic order) rhs_blob_ids = OrderedDict((o.name, o.id) for o in rhs.blob_ids) for o in lhs.blob_ids: rhs_id = rhs_blob_ids.pop(o.name, None) if rhs_id == o.id: continue # no change elif rhs_id is None: yield (_fq(o.name), o.id, None) else: yield (_fq(o.name), o.id, rhs_id) for name, id in rhs_blob_ids.items(): yield (_fq(name), None, id)
def update_neighborhood_acl(neighborhood_doc, init_doc): '''Convert nbhd admins users to --init-- project admins''' if options.test: log.info('Update nbhd %s', neighborhood_doc['name']) if 'acl' not in neighborhood_doc: log.warning('Neighborhood %s already updated', neighborhood_doc['name']) return p = Object(init_doc) p.root_project = p r_anon = _project_role(init_doc['_id'], '*anonymous') r_auth = _project_role(init_doc['_id'], '*authenticated') r_admin = _project_role(init_doc['_id'], 'Admin') acl = neighborhood_doc['acl'] new_acl = list(init_doc['acl']) assert acl['read'] == [None] # nbhd should be public for uid in acl['admin'] + acl['moderate']: u = c_user.find(dict(_id=uid)).next() if options.test: log.info('... grant nbhd admin to: %s', u['username']) continue role = _project_role(init_doc['_id'], user_id=uid) if r_admin['_id'] not in role['roles']: role['roles'].append(r_admin['_id']) c_project_role.save(role) _grant(new_acl, 'read', r_anon['_id']) _grant(new_acl, 'admin', r_admin['_id']) _grant(new_acl, 'register', r_admin['_id']) if acl['create'] == []: if options.test: log.info('grant register to auth') _grant(new_acl, 'register', r_auth['_id']) del neighborhood_doc['acl'] if options.test: log.info('--- new init acl:\n%s\n%s\n---', pformat(_format_acd(init_doc['acl'])), pformat(map(_format_ace, new_acl))) init_doc['acl'] = new_acl
def test_no_drop(self): collection = Mock(name='collection') collection.index_information.return_value = { '_id_': {'key': '_id'}, '_foo_bar': {'key': [('foo', 1), ('bar', 1)]}, } indexes = [ Mock(unique=False, index_spec=[('foo', 1)], index_options={'unique': False, 'sparse': False}), ] cmd = show_models.EnsureIndexCommand('ensure_index') cmd.options = Object(clean=False) cmd._update_indexes(collection, indexes) assert collection.ensure_index.called assert not collection.drop_index.called
def index(self, version=None, **kw): c.post = self.W.post if request.method == 'POST': require_access(self.post, 'moderate') post_fields = self.W.edit_post.to_python(kw, None) file_info = post_fields.pop('file_info', None) if hasattr(file_info, 'file'): self.post.attach(file_info.filename, file_info.file, content_type=file_info.type, post_id=self.post._id, thread_id=self.post.thread_id, discussion_id=self.post.discussion_id) for k, v in post_fields.iteritems(): try: setattr(self.post, k, v) except AttributeError: continue self.post.edit_count = self.post.edit_count + 1 self.post.last_edit_date = datetime.utcnow() self.post.last_edit_by_id = c.user._id g.director.create_activity( c.user, 'modified', self.post, target=self.post.thread.artifact or self.post.thread, related_nodes=[self.post.app_config.project]) redirect(request.referer) elif request.method == 'GET': if version is not None: HC = self.post.__mongometa__.history_class ss = HC.query.find({ 'artifact_id': self.post._id, 'version': int(version) }).first() if not ss: raise exc.HTTPNotFound post = Object(ss.data, acl=self.post.acl, author=self.post.author, url=self.post.url, thread=self.post.thread, reply_subject=self.post.reply_subject, attachments=self.post.attachments, related_artifacts=self.post.related_artifacts) else: post = self.post return dict(discussion=self.post.discussion, post=post)
def test_safe(self): now = datetime.now() oid = ObjectId() c = ['foo', 1, 1.0, now, Decimal('0.3'), None, oid] safe_obj = Object(a=[1, 2, 3], b=dict(a=12), c=c) safe_obj.make_safe() expected = Object(a=[1, 2, 3], b=dict(a=12), c=c) self.assertTrue(expected, safe_obj) unsafe_obj = Object(my_tuple=(1, 2, 3)) self.assertRaises(AssertionError, unsafe_obj.make_safe)
def test_update_indexes_order(self): collection = Mock(name='collection') collection.index_information.return_value = { '_id_': {'key': '_id'}, '_foo_bar': {'key': [('foo', 1), ('bar', 1)]}, } indexes = [ Mock(unique=False, index_spec=[('foo', 1)], index_options={'unique': False, 'sparse': False}), ] cmd = show_models.EnsureIndexCommand('ensure_index') cmd.options = Object(clean=True) cmd._update_indexes(collection, indexes) collection_call_order = {} for i, call_ in enumerate(collection.mock_calls): method_name = call_[0] collection_call_order[method_name] = i assert collection_call_order['ensure_index'] < collection_call_order['drop_index'], collection.mock_calls
def setUp(): setup_unit_test() c.user._id = None c.project = mock.Mock() c.project.name = 'Test Project' c.project.shortname = 'tp' c.project._id = 'testproject/' c.project.url = lambda: '/testproject/' app_config = mock.Mock() app_config._id = None app_config.project_id = 'testproject/' app_config.tool_name = 'tool' app_config.options = Object(mount_point='foo') c.app = mock.Mock() c.app.config = app_config c.app.config.script_name = lambda: '/testproject/test_application/' c.app.config.url = lambda: 'http://testproject/test_application/' c.app.url = c.app.config.url() c.app.__version__ = '0.0'
def merge(self, mr): g = self._impl._git.git # can't merge in bare repo, so need to clone tmp_path = tempfile.mkdtemp() tmp_repo = git.Repo.clone_from(self.full_fs_path, to_path=tmp_path, bare=False) tmp_repo = GitImplementation(Object(full_fs_path=tmp_path))._git tmp_repo.git.fetch('origin', mr.target_branch) tmp_repo.git.checkout(mr.target_branch) tmp_repo.git.fetch(mr.downstream_repo.full_fs_path, mr.source_branch) author = h.really_unicode(c.user.display_name or c.user.username) tmp_repo.git.config('user.name', author) tmp_repo.git.config('user.email', '') msg = u'Merge {} branch {} into {}\n\n{}'.format( mr.downstream_repo.url(), mr.source_branch, mr.target_branch, h.absurl(mr.url())) tmp_repo.git.merge(mr.downstream.commit_id, '-m', msg) tmp_repo.git.push('origin', mr.target_branch) shutil.rmtree(tmp_path, ignore_errors=True)
def __init__(self, mapped_class, collection, session, **kwargs): self.mapped_class = mapped_class self.collection = collection self.session = session self.properties = [] self.property_index = {} classname = f'{mapped_class.__module__}.{mapped_class.__name__}' self._mapper_by_collection[collection] = self self._mapper_by_class[mapped_class] = self self._mapper_by_classname[classname] = self self._all_mappers.append(self) properties = kwargs.pop('properties', {}) include_properties = kwargs.pop('include_properties', None) exclude_properties = kwargs.pop('exclude_properties', []) extensions = kwargs.pop('extensions', []) self.extensions = [e(self) for e in extensions] self.options = Object(kwargs.pop('options', dict(refresh=False, instrument=True))) if kwargs: raise TypeError('Unknown kwd args: %r' % kwargs) self._instrument_class(properties, include_properties, exclude_properties)
def setUp(): g._push_object(Globals()) c._push_object(mock.Mock()) c.user._id = None c.project = mock.Mock() c.project.name = 'Test Project' c.project.shortname = 'tp' c.project._id = 'testproject/' c.project.database_uri = 'mim://nosetest:project' c.project.url = lambda: '/testproject/' app_config = mock.Mock() app_config._id = None app_config.project_id = 'testproject/' app_config.tool_name = 'tool' app_config.options = Object(mount_point = 'foo') c.app = mock.Mock() c.app.config = app_config c.app.config.script_name = lambda:'/testproject/test_application/' c.app.config.url = lambda:'http://testproject/test_application/' c.app.url = c.app.config.url() c.app.__version__ = '0.0'
def _get_refs(self, field_name): """ Returns a list of valid reference objects (branches or tags) from the git database :return: List of git ref objects. :rtype: list """ cache_name = 'cached_' + field_name cache = getattr(self._repo, cache_name, None) if cache: return cache refs = [] start_time = time() ref_list = getattr(self._git, field_name) for ref in ref_list: try: hex_sha = ref.commit.hexsha except ValueError: log.debug(u"Found invalid sha: {}".format(ref)) continue refs.append(Object(name=ref.name, object_id=hex_sha)) time_taken = time() - start_time threshold = tg.config.get('repo_refs_cache_threshold') try: threshold = float(threshold) if threshold else None except ValueError: threshold = None log.warn('Skipping reference caching - The value for config param ' '"repo_refs_cache_threshold" must be a float.') if threshold is not None and time_taken > threshold: setattr(self._repo, cache_name, refs) sess = session(self._repo) if sess: sess.flush(self._repo) return refs
def create(self, doc, options): doc = self.collection.make(doc) mapper = self.by_collection(type(doc)) return mapper._from_doc(doc, Object(self.options, **options))
class ODMCursor(object): """Represents the results of query. The cursors can be iterated over to retrieve the results one by one. """ def __bool__(self): raise MingException('Cannot evaluate ODMCursor to a boolean') __nonzero__ = __bool__ # python 2 def __init__(self, session, cls, ming_cursor, refresh=False, decorate=None, fields=None): self.session = session self.cls = cls self.mapper = mapper(cls) self.ming_cursor = ming_cursor self._options = Object( refresh=refresh, decorate=decorate, fields=fields, instrument=True) def __iter__(self): return self @property def extensions(self): return self.session.extensions def count(self): """Get the number of objects retrieved by the query""" return self.ming_cursor.count() def _next_impl(self): doc = next(self.ming_cursor) obj = self.session.imap.get(self.cls, doc['_id']) if obj is None: obj = self.mapper.create(doc, self._options, remake=False) state(obj).status = ObjectState.clean self.session.save(obj) elif self._options.refresh: # Refresh object state(obj).update(doc) state(obj).status = ObjectState.clean else: # Never refresh objects from the DB unless explicitly requested pass other_session = session(obj) if other_session is not None and other_session != self: other_session.expunge(obj) self.session.save(obj) if self._options.decorate is not None: return self._options.decorate(obj) else: return obj def next(self): _call_hook(self, 'before_cursor_next', self) try: return self._next_impl() finally: _call_hook(self, 'after_cursor_next', self) __next__ = next def options(self, **kwargs): odm_cursor = ODMCursor(self.session, self.cls,self.ming_cursor) odm_cursor._options = Object(self._options, **kwargs) _call_hook(self, 'cursor_created', odm_cursor, 'options', self, **kwargs) return odm_cursor def limit(self, limit): """Limit the number of entries retrieved by the query""" odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.limit(limit)) _call_hook(self, 'cursor_created', odm_cursor, 'limit', self, limit) return odm_cursor def skip(self, skip): """Skip the first ``skip`` entries retrieved by the query""" odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.skip(skip)) _call_hook(self, 'cursor_created', odm_cursor, 'skip', self, skip) return odm_cursor def hint(self, index_or_name): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.hint(index_or_name)) _call_hook(self, 'cursor_created', odm_cursor, 'hint', self, index_or_name) return odm_cursor def sort(self, *args, **kwargs): """Sort results of the query. See :meth:`pymongo.cursor.Cursor.sort` for details on the available arguments. """ odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.sort(*args, **kwargs)) _call_hook(self, 'cursor_created', odm_cursor, 'sort', self, *args, **kwargs) return odm_cursor def one(self): """Gets one result and exaclty one. Raises ``ValueError`` exception if less or more than one result is returned by the query. """ try: result = self.next() except StopIteration: raise ValueError('Less than one result from .one()') try: self.next() except StopIteration: return result raise ValueError('More than one result from .one()') def first(self): """Gets the first result of the query""" try: return self.next() except StopIteration: return None def all(self): """Retrieve all the results of the query""" return list(self)
class ODMCursor(object): def __init__(self, session, cls, ming_cursor, refresh=False, decorate=None): self.session = session self.cls = cls self.mapper = mapper(cls) self.ming_cursor = ming_cursor self._options = Object( refresh=refresh, decorate=decorate, instrument=True) def __iter__(self): return self def __len__(self): return self.count() @property def extensions(self): return self.session.extensions def count(self): return self.ming_cursor.count() def _next_impl(self): doc = self.ming_cursor.next() obj = self.session.imap.get(self.cls, doc['_id']) if obj is None: obj = self.mapper.create(doc, self._options) state(obj).status = ObjectState.clean self.session.save(obj) elif self._options.refresh: # Refresh object state(obj).update(doc) state(obj).status = ObjectState.clean else: # Never refresh objects from the DB unless explicitly requested pass other_session = session(obj) if other_session is not None and other_session != self: other_session.expunge(obj) self.session.save(obj) if self._options.decorate is not None: return self._options.decorate(obj) else: return obj def next(self): call_hook(self, 'before_cursor_next', self) try: return self._next_impl() finally: call_hook(self, 'after_cursor_next', self) def options(self, **kwargs): odm_cursor = ODMCursor(self.session, self.cls,self.ming_cursor) odm_cursor._options = Object(self._options, **kwargs) call_hook(self, 'cursor_created', odm_cursor, 'options', self, **kwargs) return odm_cursor def limit(self, limit): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.limit(limit)) call_hook(self, 'cursor_created', odm_cursor, 'limit', self, limit) return odm_cursor def skip(self, skip): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.skip(skip)) call_hook(self, 'cursor_created', odm_cursor, 'skip', self, skip) return odm_cursor def hint(self, index_or_name): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.hint(index_or_name)) call_hook(self, 'cursor_created', odm_cursor, 'hint', self, index_or_name) return odm_cursor def sort(self, *args, **kwargs): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.sort(*args, **kwargs)) call_hook(self, 'cursor_created', odm_cursor, 'sort', self, *args, **kwargs) return odm_cursor def one(self): try: result = self.next() except StopIteration: raise ValueError, 'Less than one result from .one()' try: self.next() except StopIteration: return result raise ValueError, 'More than one result from .one()' def first(self): try: return self.next() except StopIteration: return None def all(self): return list(self)
def by_name(self): d = Object((x.name, x) for x in self.other_ids) d.update((x.name, Object(x, type='tree')) for x in self.tree_ids) d.update((x.name, Object(x, type='blob')) for x in self.blob_ids) return d
def __init__(*args, **kwargs): self_.__ming__ = _ORMDecoration(self.mapper, self_, Object(self.mapper.options)) self.func(self_, *args, **kwargs) if self.mapper.session: self.save(self_)
class ODMCursor(object): """Represents the results of query. The cursors can be iterated over to retrieve the results one by one. """ def __bool__(self): raise MingException('Cannot evaluate ODMCursor to a boolean') __nonzero__ = __bool__ # python 2 def __init__(self, session, cls, ming_cursor, refresh=False, decorate=None, fields=None): self.session = session self.cls = cls self.mapper = mapper(cls) self.ming_cursor = ming_cursor self._options = Object(refresh=refresh, decorate=decorate, fields=fields, instrument=True) def __iter__(self): return self @property def extensions(self): return self.session.extensions def count(self): """Get the number of objects retrieved by the query""" return self.ming_cursor.count() def distinct(self, *args, **kwargs): return self.ming_cursor.distinct(*args, **kwargs) def _next_impl(self): doc = next(self.ming_cursor) obj = self.session.imap.get(self.cls, doc['_id']) if obj is None: obj = self.mapper.create(doc, self._options, remake=False) state(obj).status = ObjectState.clean self.session.save(obj) elif self._options.refresh: # Refresh object state(obj).update(doc) state(obj).status = ObjectState.clean else: # Never refresh objects from the DB unless explicitly requested pass other_session = session(obj) if other_session is not None and other_session != self: other_session.expunge(obj) self.session.save(obj) if self._options.decorate is not None: return self._options.decorate(obj) else: return obj def next(self): _call_hook(self, 'before_cursor_next', self) try: return self._next_impl() finally: _call_hook(self, 'after_cursor_next', self) __next__ = next def options(self, **kwargs): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor) odm_cursor._options = Object(self._options, **kwargs) _call_hook(self, 'cursor_created', odm_cursor, 'options', self, **kwargs) return odm_cursor def limit(self, limit): """Limit the number of entries retrieved by the query""" odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.limit(limit)) _call_hook(self, 'cursor_created', odm_cursor, 'limit', self, limit) return odm_cursor def skip(self, skip): """Skip the first ``skip`` entries retrieved by the query""" odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.skip(skip)) _call_hook(self, 'cursor_created', odm_cursor, 'skip', self, skip) return odm_cursor def hint(self, index_or_name): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.hint(index_or_name)) _call_hook(self, 'cursor_created', odm_cursor, 'hint', self, index_or_name) return odm_cursor def sort(self, *args, **kwargs): """Sort results of the query. See :meth:`pymongo.cursor.Cursor.sort` for details on the available arguments. """ odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor.sort(*args, **kwargs)) _call_hook(self, 'cursor_created', odm_cursor, 'sort', self, *args, **kwargs) return odm_cursor def one(self): """Gets one result and exaclty one. Raises ``ValueError`` exception if less or more than one result is returned by the query. """ try: result = self.next() except StopIteration: raise ValueError('Less than one result from .one()') try: self.next() except StopIteration: return result raise ValueError('More than one result from .one()') def first(self): """Gets the first result of the query""" try: return self.next() except StopIteration: return None def all(self): """Retrieve all the results of the query""" return list(self) def rewind(self): """Rewind this cursor to its unevaluated state. Reset this cursor if it has been partially or completely evaluated. Any options that are present on the cursor will remain in effect. Future iterating performed on this cursor will cause new queries to be sent to the server, even if the resultant data has already been retrieved by this cursor. """ return self.ming_cursor.rewind()
def compute_diffs(repo_id, tree_cache, rhs_ci): '''compute simple differences between a commit and its first parent''' if rhs_ci.tree_id is None: return tree_cache def _walk_tree(tree, tree_index): for x in tree.blob_ids: yield x.id for x in tree.other_ids: yield x.id for x in tree.tree_ids: yield x.id for xx in _walk_tree(tree_index[x.id], tree_index): yield xx rhs_tree_ids = TreesDoc.m.get(_id=rhs_ci._id).tree_ids if rhs_ci.parent_ids: lhs_ci = CommitDoc.m.get(_id=rhs_ci.parent_ids[0]) else: lhs_ci = None if lhs_ci is not None: lhs_tree_ids = TreesDoc.m.get(_id=lhs_ci._id).tree_ids else: lhs_tree_ids = [] new_tree_ids = [ tid for tid in chain(lhs_tree_ids, rhs_tree_ids) if tid not in tree_cache ] tree_index = dict((t._id, t) for t in TreeDoc.m.find(dict(_id={'$in': new_tree_ids}), validate=False)) tree_index.update(tree_cache) rhs_tree_ids_set = set(rhs_tree_ids) tree_cache.clear() tree_cache.update( (id, t) for id, t in tree_index.iteritems() if id in rhs_tree_ids_set) rhs_tree = tree_index[rhs_ci.tree_id] if lhs_ci is None: lhs_tree = Object(_id=None, tree_ids=[], blob_ids=[], other_ids=[]) else: lhs_tree = tree_index[lhs_ci.tree_id] differences = [] commit_info = get_commit_info(rhs_ci) for name, lhs_id, rhs_id in _diff_trees(lhs_tree, rhs_tree, tree_index): differences.append(dict(name=name, lhs_id=lhs_id, rhs_id=rhs_id)) # Set last commit info if rhs_id is not None: set_last_commit(repo_id, rhs_id, commit_info) rhs_tree = tree_index.get(rhs_id, None) if rhs_tree is not None: for oid in _walk_tree(rhs_tree, tree_index): set_last_commit(repo_id, oid, commit_info) # Set last commit data for trees without it in the RHS if True: last_commit_collection = LastCommitDoc.m.session.db[ LastCommitDoc.m.collection_name] last_commits = set(d['object_id'] for d in last_commit_collection.find( dict(object_id={'$in': rhs_tree_ids}, _id=re.compile("^{0}:".format(repo_id))), { 'object_id': 1, '_id': 0 })) for tree_id in rhs_tree_ids: if tree_id not in last_commits: set_last_commit(repo_id, tree_id, commit_info) # Build the diffinfo di = DiffInfoDoc(dict(_id=rhs_ci._id, differences=differences)) di.m.save() return tree_cache
def test_from_bson(self): bson = dict( a=[1,2,3], b=dict(c=5)) obj = Object.from_bson(bson) self.assertEqual(obj, dict(a=[1,2,3], b=dict(c=5)))
def commit_browser_data(self, **kw): head_ids = [head.object_id for head in c.app.repo.get_heads()] commit_ids = [c.app.repo.rev_to_commit_id(r) for r in c.app.repo.log(head_ids, id_only=True)] log.info('Grab %d commit objects by ID', len(commit_ids)) commits_by_id = { c_obj._id: c_obj for c_obj in M.repository.CommitDoc.m.find(dict(_id={'$in': commit_ids}))} log.info('... build graph') parents = {} children = defaultdict(list) dates = {} for row, (oid, ci) in enumerate(commits_by_id.iteritems()): parents[oid] = list(ci.parent_ids) dates[oid] = ci.committed.date for p_oid in ci.parent_ids: children[p_oid].append(oid) result = [] for row, oid in enumerate(topo_sort(children, parents, dates, head_ids)): ci = commits_by_id[oid] url = c.app.repo.url_for_commit(Object(_id=oid)) msg_split = ci.message.splitlines() if msg_split: msg = msg_split[0] else: msg = "No commit message." result.append(dict( oid=oid, short_id=c.app.repo.shorthand_for_commit(oid), row=row, parents=ci.parent_ids, message=msg, url=url)) log.info('...done') col_idx = {} columns = [] def find_column(columns): for i, col in enumerate(columns): if col is None: return i columns.append(None) return len(columns) - 1 for row, ci_json in enumerate(result): oid = ci_json['oid'] colno = col_idx.get(oid) if colno is None: colno = find_column(columns) col_idx[oid] = colno columns[colno] = None ci_json['column'] = colno for p in parents[oid]: p_col = col_idx.get(p, None) if p_col is not None: continue p_col = find_column(columns) col_idx[p] = p_col columns[p_col] = p built_tree = dict( (ci_json['oid'], ci_json) for ci_json in result) return dict( commits=[ci_json['oid'] for ci_json in result], built_tree=built_tree, next_column=len(columns), max_row=row)
def test_from_bson(self): bson = dict(a=[1, 2, 3], b=dict(c=5)) obj = Object.from_bson(bson) self.assertEqual(obj, dict(a=[1, 2, 3], b=dict(c=5)))
def test_default_branch_non_standard_unset(self): with mock.patch.object(self.repo, 'get_branches') as gb,\ mock.patch.object(self.repo, 'set_default_branch') as set_db: gb.return_value = [Object(name='foo')] assert_equal(self.repo.get_default_branch('master'), 'foo') set_db.assert_called_once_with('foo')
def options(self, **kwargs): odm_cursor = ODMCursor(self.session, self.cls, self.ming_cursor) odm_cursor._options = Object(self._options, **kwargs) _call_hook(self, 'cursor_created', odm_cursor, 'options', self, **kwargs) return odm_cursor
def allow(cls, role_id, permission): return Object( access=cls.ALLOW, role_id=role_id, permission=permission)
def __init__(self, mapper, instance, options): self.mapper = mapper self.instance = instance self.state = ObjectState(options, None) self.state.document = Object() self.state.original_document = Object()
def deny(cls, role_id, permission): return Object( access=cls.DENY, role_id=role_id, permission=permission)
def heads(self): return [Object(name=None, object_id=self._oid(self.head))]
from allura.command.show_models import dfs, build_model_inheritance_graph log = logging.getLogger('update-acls') options = None optparser = OptionParser( usage='allurapaste script <ini file> -- %prog [options] [neighborhood1...]' ) optparser.add_option('-t', '--test', dest='test', action='store_true') main_db = M.main_doc_session.db c_neighborhood = main_db.neighborhood c_project = main_db.project c_user = main_db.user c_project_role = main_db.project_role c.project = Object(database_uri=c_project.find().next()['database_uri']) project_db = M.project_doc_session.db c_app_config = project_db.config def main(): global options options, neighborhoods = optparser.parse_args() if neighborhoods: log.info('Updating neighborhoods: %s', neighborhoods) q_neighborhoods = list( c_neighborhood.find(dict(name={'$in': neighborhoods}))) neighborhood_ids = [n['_id'] for n in q_neighborhoods] q_projects = list( c_project.find(dict(neighborhood_id={'$in': neighborhood_ids})))
def refresh_commit_info(self, oid, seen_object_ids, lazy=True): from allura.model.repository import CommitDoc ci_doc = CommitDoc.m.get(_id=oid) if ci_doc and lazy: return False revno = self._revno(oid) rev = self._revision(oid) try: log_entry = self._svn.log( self._url, revision_start=rev, limit=1, discover_changed_paths=True)[0] except pysvn.ClientError: log.info('ClientError processing %r %r, treating as empty', oid, self._repo, exc_info=True) log_entry = Object(date='', message='', changed_paths=[]) log_date = None if hasattr(log_entry, 'date'): log_date = datetime.utcfromtimestamp(log_entry.date) user = Object( name=h.really_unicode(log_entry.get('author', '--none--')), email='', date=log_date) args = dict( tree_id=None, committed=user, authored=user, message=h.really_unicode(log_entry.get("message", "--none--")), parent_ids=[], child_ids=[]) if revno > 1: args['parent_ids'] = [self._oid(revno - 1)] if ci_doc: ci_doc.update(**args) ci_doc.m.save() else: ci_doc = CommitDoc(dict(args, _id=oid)) try: ci_doc.m.insert(safe=True) except DuplicateKeyError: if lazy: return False return True
def refresh_commit_info(self, oid, seen_object_ids, lazy=True): from allura.model.repo import CommitDoc, DiffInfoDoc ci_doc = CommitDoc.m.get(_id=oid) if ci_doc and lazy: return False revno = self._revno(oid) rev = self._revision(oid) try: log_entry = self._svn.log( self._url, revision_start=rev, limit=1, discover_changed_paths=True)[0] except pysvn.ClientError: log.info('ClientError processing %r %r, treating as empty', oid, self._repo, exc_info=True) log_entry = Object(date='', message='', changed_paths=[]) log_date = None if hasattr(log_entry, 'date'): log_date = datetime.utcfromtimestamp(log_entry.date) user = Object( name=h.really_unicode(log_entry.get('author', '--none--')), email='', date=log_date) args = dict( tree_id=None, committed=user, authored=user, message=h.really_unicode(log_entry.get("message", "--none--")), parent_ids=[], child_ids=[]) if revno > 1: args['parent_ids'] = [self._oid(revno - 1)] if ci_doc: ci_doc.update(**args) ci_doc.m.save() else: ci_doc = CommitDoc(dict(args, _id=oid)) try: ci_doc.m.insert(safe=True) except DuplicateKeyError: if lazy: return False # Save diff info di = DiffInfoDoc.make(dict(_id=ci_doc._id, differences=[])) for path in log_entry.changed_paths: if path.action in ('A', 'M', 'R'): try: rhs_info = self._svn.info2( self._url + h.really_unicode(path.path), revision=self._revision(ci_doc._id), recurse=False)[0][1] rhs_id = self._obj_oid(ci_doc._id, rhs_info) except pysvn.ClientError, e: # pysvn will sometimes misreport deleted files (D) as # something else (like A), causing info2() to raise a # ClientError since the file doesn't exist in this # revision. Set lrhs_id = None to treat like a deleted file log.info('This error was handled gracefully and logged ' 'for informational purposes only:\n' + str(e)) rhs_id = None else: rhs_id = None if ci_doc.parent_ids and path.action in ('D', 'M', 'R'): try: lhs_info = self._svn.info2( self._url + h.really_unicode(path.path), revision=self._revision(ci_doc.parent_ids[0]), recurse=False)[0][1] lhs_id = self._obj_oid(ci_doc._id, lhs_info) except pysvn.ClientError, e: # pysvn will sometimes report new files as 'M'odified, # causing info2() to raise ClientError since the file # doesn't exist in the parent revision. Set lhs_id = None # to treat like a newly added file. log.info('This error was handled gracefully and logged ' 'for informational purposes only:\n' + str(e)) lhs_id = None