Ejemplo n.º 1
0
 def command(self):
     from allura import model as M
     main_session_classes = [M.main_orm_session, M.repository_orm_session,
             M.task_orm_session]
     if asbool(self.config.get('activitystream.recording.enabled', False)):
         from activitystream.storage.mingstorage import activity_orm_session
         main_session_classes.append(activity_orm_session)
     self.basic_setup()
     main_indexes = defaultdict(lambda: defaultdict(list))  # by db, then collection name
     project_indexes = defaultdict(list)  # by collection name
     base.log.info('Collecting indexes...')
     for m in Mapper.all_mappers():
         mgr = m.collection.m
         cname = mgr.collection_name
         cls = m.mapped_class
         if cname is None:
             base.log.info('... skipping abstract class %s', cls)
             continue
         base.log.info('... for class %s', cls)
         if session(cls) in main_session_classes:
             idx = main_indexes[session(cls)][cname]
         else:
             idx = project_indexes[cname]
         idx.extend(mgr.indexes)
     base.log.info('Updating indexes for main DB')
     for odm_session, db_indexes in main_indexes.iteritems():
         db = odm_session.impl.db
         for name, indexes in db_indexes.iteritems():
             self._update_indexes(db[name], indexes)
     base.log.info('Updating indexes for project DB')
     db = M.project_doc_session.db
     base.log.info('... DB: %s', db)
     for name, indexes in project_indexes.iteritems():
         self._update_indexes(db[name], indexes)
     base.log.info('Done updating indexes')
Ejemplo n.º 2
0
    def test_trove_hierarchy(self):
        root_parent = M.TroveCategory(fullname="Root", trove_cat_id=1, trove_parent_id=0)
        category_a = M.TroveCategory(fullname="CategoryA", trove_cat_id=2, trove_parent_id=1)
        category_b = M.TroveCategory(fullname="CategoryB", trove_cat_id=3, trove_parent_id=1)
        child_a = M.TroveCategory(fullname="ChildA", trove_cat_id=4, trove_parent_id=2)
        child_b = M.TroveCategory(fullname="ChildB", trove_cat_id=5, trove_parent_id=2)

        session(M.TroveCategory).flush()

        r = self.app.get("/categories/browse")
        rendered_tree = r.html.find("div", {"id": "content_base"}).find("div").find("div").find("ul")
        expected = BeautifulSoup(
            """
        <ul>
            <li>Root</li>
            <ul>
                <li>CategoryA</li>
                <ul>
                    <li>ChildA</li>
                    <li>ChildB</li>
                </ul>
                <li>CategoryB</li>
            </ul>
        </ul>
        """.strip()
        )
        assert str(expected) == str(rendered_tree)
Ejemplo n.º 3
0
 def __call__(self, restore_context=True):
     '''Call the task function with its context.  If restore_context is True,
     c.project/app/user will be restored to the values they had before this
     function was called.
     '''
     from allura import model as M
     self.time_start = datetime.utcnow()
     session(self).flush(self)
     log.info('starting %r', self)
     old_cproject = getattr(c, 'project', None)
     old_capp = getattr(c, 'app', None)
     old_cuser = getattr(c, 'user', None)
     try:
         func = self.function
         c.project = M.Project.query.get(_id=self.context.project_id)
         c.app = None
         if c.project:
             c.project.notifications_disabled = self.context.get('notifications_disabled', False)
             app_config = M.AppConfig.query.get(_id=self.context.app_config_id)
             if app_config:
                 c.app = c.project.app_instance(app_config)
         c.user = M.User.query.get(_id=self.context.user_id)
         with log_output(log):
             self.result = func(*self.args, **self.kwargs)
         self.state = 'complete'
         return self.result
     except Exception, exc:
         log.info('Error on job %r, re-raising it', self)
         self.state = 'error'
         if hasattr(exc, 'format_error'):
             self.result = exc.format_error()
             log.error(self.result)
         else:
             self.result = traceback.format_exc()
         raise
Ejemplo n.º 4
0
 def commit(self):
     ss = VersionedArtifact.commit(self)
     session(self).flush()
     if self.version > 1:
         v1 = self.get_version(self.version - 1)
         v2 = self
         la = [line + '\n' for line in v1.text.splitlines()]
         lb = [line + '\n' for line in v2.text.splitlines()]
         diff = ''.join(difflib.unified_diff(
             la, lb,
             'v%d' % v1.version,
             'v%d' % v2.version))
         description = '<pre>' + diff + '</pre>'
         if v1.title != v2.title:
             subject = '%s renamed page %s to %s' % (
                 context.user.username, v1.title, v2.title)
         else:
             subject = '%s modified page %s' % (
                 context.user.username, self.title)
     else:
         description = self.text
         subject = '%s created page %s' % (
             context.user.username, self.title)
     Feed.post(self, title=None, description=description)
     Notification.post(
         artifact=self, topic='metadata', text=description, subject=subject)
     return ss
Ejemplo n.º 5
0
 def set_default_branch(self, name):
     if not name:
         return
     # HEAD should point to default branch
     self._git.git.symbolic_ref('HEAD', 'refs/heads/%s' % name)
     self._repo.default_branch_name = name
     session(self._repo).flush(self._repo)
Ejemplo n.º 6
0
 def test_existing_lcd_unchained(self):
     commit1 = self._add_commit('Commit 1', ['file1', 'dir1/file1'])
     commit2 = self._add_commit(
         'Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
     commit3 = self._add_commit(
         'Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['file1'], [commit2])
     prev_lcd = M.repo.LastCommit(
         path='dir1',
         commit_id=commit2._id,
         entries=[
             dict(
                 name='file1',
                 commit_id=commit1._id),
             dict(
                 name='file2',
                 commit_id=commit2._id),
         ],
     )
     session(prev_lcd).flush()
     tree = self._build_tree(commit3, '/dir1', ['file1', 'file2'])
     lcd = M.repo.LastCommit.get(tree)
     self.assertEqual(lcd._id, prev_lcd._id)
     self.assertEqual(
         self.repo._commits[lcd.commit_id].message, commit2.message)
     self.assertEqual(lcd.path, 'dir1')
     self.assertEqual(lcd.entries, prev_lcd.entries)
Ejemplo n.º 7
0
 def check_call(cmd):
     p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
     stdout, stderr = p.communicate(input="p\n")
     if p.returncode != 0:
         self._repo.status = "ready"
         session(self._repo).flush(self._repo)
         raise SVNCalledProcessError(cmd, p.returncode, stdout, stderr)
Ejemplo n.º 8
0
 def test_existing_lcd_partial(self):
     commit1 = self._add_commit('Commit 1', ['file1'])
     commit2 = self._add_commit('Commit 2', ['file1', 'file2'], ['file2'], [commit1])
     commit3 = self._add_commit('Commit 3', ['file1', 'file2', 'file3'], ['file3'], [commit2])
     commit4 = self._add_commit('Commit 4', ['file1', 'file2', 'file3', 'file4'], ['file2', 'file4'], [commit3])
     prev_lcd = M.repository.LastCommit(
         path='',
         commit_id=commit3._id,
         entries=[
             dict(
                 name='file1',
                 commit_id=commit1._id),
             dict(
                 name='file2',
                 commit_id=commit2._id),
             dict(
                 name='file3',
                 commit_id=commit3._id),
         ],
     )
     session(prev_lcd).flush()
     lcd = M.repository.LastCommit.get(commit4.tree)
     self.assertEqual(self.repo._commits[lcd.commit_id].message, commit4.message)
     self.assertEqual(lcd.path, '')
     self.assertEqual(len(lcd.entries), 4)
     self.assertEqual(lcd.by_name['file1'], commit1._id)
     self.assertEqual(lcd.by_name['file2'], commit4._id)
     self.assertEqual(lcd.by_name['file3'], commit3._id)
     self.assertEqual(lcd.by_name['file4'], commit4._id)
Ejemplo n.º 9
0
    def prepare_context(self, context):
        full_timeline = g.director.get_timeline(
            self.user, page=0, limit=100,
            actor_only=True,
        )
        filtered_timeline = list(islice(ifilter(perm_check(c.user), full_timeline),
                                        0, 8))
        for activity in filtered_timeline:
            # Get the project for the activity.obj so we can use it in the
            # template. Expunge first so Ming doesn't try to flush the attr
            # we create to temporarily store the project.
            #
            # The get_activity_object() calls are cheap, pulling from
            # the session identity map instead of mongo since identical
            # calls are made by perm_check() above.
            session(activity).expunge(activity)
            activity_obj = get_activity_object(activity.obj)
            activity.obj.project = getattr(activity_obj, 'project', None)

        context.update({
            'follow_toggle': W.follow_toggle,
            'following': g.director.is_connected(c.user, self.user),
            'timeline': filtered_timeline,
            'activity_app': self.activity_app,
        })
        g.register_js('activity_js/follow.js')
        return context
Ejemplo n.º 10
0
 def request_token(self, **kw):
     req = oauth.Request.from_request(
         request.method,
         request.url.split('?')[0],
         headers=request.headers,
         parameters=dict(request.params),
         query_string=request.query_string
     )
     consumer_token = M.OAuthConsumerToken.query.get(
         api_key=req['oauth_consumer_key'])
     if consumer_token is None:
         log.error('Invalid consumer token')
         raise exc.HTTPForbidden
     consumer = consumer_token.consumer
     try:
         self.server.verify_request(req, consumer, None)
     except:
         log.error('Invalid signature')
         raise exc.HTTPForbidden
     req_token = M.OAuthRequestToken(
         consumer_token_id=consumer_token._id,
         callback=req.get('oauth_callback', 'oob')
     )
     session(req_token).flush()
     log.info('Saving new request token with key: %s', req_token.api_key)
     return req_token.to_string()
Ejemplo n.º 11
0
    def deliver(cls, nid, artifact_index_ids, topic):
        '''Called in the notification message handler to deliver notification IDs
        to the appropriate mailboxes.  Atomically appends the nids
        to the appropriate mailboxes.
        '''

        artifact_index_ids.append(None)  # get tool-wide ("None") and specific artifact subscriptions
        d = {
            'project_id': c.project._id,
            'app_config_id': c.app.config._id,
            'artifact_index_id': {'$in': artifact_index_ids},
            'topic': {'$in': [None, topic]}
        }
        mboxes = cls.query.find(d).all()
        log.debug('Delivering notification %s to mailboxes [%s]', nid, ', '.join([str(m._id) for m in mboxes]))
        for mbox in mboxes:
            try:
                mbox.query.update(
                    # _id is automatically specified by ming's "query", so this matches the current mbox
                    {'$push': dict(queue=nid),
                     '$set': dict(last_modified=datetime.utcnow(),
                                  queue_empty=False),
                     })
                # Make sure the mbox doesn't stick around to be flush()ed
                session(mbox).expunge(mbox)
            except:
                # log error but try to keep processing, lest all the other eligible
                # mboxes for this notification get skipped and lost forever
                log.exception(
                    'Error adding notification: %s for artifact %s on project %s to user %s',
                    nid, artifact_index_ids, c.project._id, mbox.user_id)
Ejemplo n.º 12
0
    def clone_from(self, source_url):
        '''Initialize a repo as a clone of another using svnsync'''
        self.init(default_dirs=False, skip_special_files=True)
        # Need a pre-revprop-change hook for cloning
        fn = os.path.join(self._repo.fs_path, self._repo.name,
                          'hooks', 'pre-revprop-change')
        with open(fn, 'wb') as fp:
            fp.write('#!/bin/sh\n')
        os.chmod(fn, 0755)

        def check_call(cmd):
            p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
            stdout, stderr = p.communicate(input='p\n')
            if p.returncode != 0:
                self._repo.status = 'ready'
                session(self._repo).flush(self._repo)
                raise SVNCalledProcessError(cmd, p.returncode, stdout, stderr)

        self._repo.status = 'importing'
        session(self._repo).flush(self._repo)
        log.info('Initialize %r as a clone of %s',
                 self._repo, source_url)
        check_call(['svnsync', 'init', self._url, source_url])
        check_call(['svnsync', '--non-interactive', 'sync', self._url])
        log.info('... %r cloned', self._repo)
        if not svn_path_exists("file://%s%s/%s" %
                         (self._repo.fs_path,
                          self._repo.name,
                          c.app.config.options['checkout_url'])):
            c.app.config.options['checkout_url'] = ""
        self._repo.refresh(notify=False)
        self._setup_special_files()
Ejemplo n.º 13
0
 def install_app(self, ep_name, mount_point=None, mount_label=None, ordinal=None, **override_options):
     App = g.entry_points['tool'][ep_name]
     with h.push_config(c, project=self):
         try:
             mount_point = v.MountPointValidator(App).to_python(mount_point)
         except fe.Invalid as e:
             raise exceptions.ToolError(str(e))
     if ordinal is None:
         ordinal = int(self.ordered_mounts(include_hidden=True)
                       [-1]['ordinal']) + 1
     options = App.default_options()
     options['mount_point'] = mount_point
     options[
         'mount_label'] = mount_label or App.default_mount_label or mount_point
     options['ordinal'] = int(ordinal)
     options.update(override_options)
     cfg = AppConfig(
         project_id=self._id,
         tool_name=ep_name.lower(),
         options=options)
     app = App(self, cfg)
     with h.push_config(c, project=self, app=app):
         session(cfg).flush()
         app.install(self)
     return app
 def set_api_ticket(self, expire=None):
     if not expire:
         expire = timedelta(days=1)
     api_ticket = M.ApiTicket(user_id=self.user._id, capabilities={'import': ['Projects','test']},
                              expires=datetime.utcnow() + expire)
     session(api_ticket).flush()
     self.set_api_token(api_ticket)
Ejemplo n.º 15
0
def main():
    test = sys.argv[-1] == 'test'
    projects = M.Project.query.find().all()
    log.info('Fixing tracker fields')
    for p in projects:
        if p.parent_id:
            continue
        c.project = p
        q = TM.Globals.query.find()
        if not q.count():
            continue
        for g in q:
            if g.open_status_names:
                continue
            if g.status_names is None:
                old_names = ['open', 'closed']
            else:
                old_names = g.status_names.split() or ['open', 'closed']
            if g.open_status_names is None:
                g.open_status_names = ' '.join(
                    name for name in old_names if name != 'closed')
            if g.closed_status_names is None:
                g.closed_status_names = 'closed'
        if test:
            log.info('... would fix tracker(s) in %s', p.shortname)
        else:
            log.info('... fixing tracker(s) in %s', p.shortname)
            session(g).flush()
        session(g).clear()
Ejemplo n.º 16
0
 def next_ticket_num(self):
     gbl = Globals.query.find_and_modify(
         query=dict(app_config_id=self.app_config_id),
         update={'$inc': { 'last_ticket_num': 1}},
         new=True)
     session(gbl).expunge(gbl)
     return gbl.last_ticket_num
Ejemplo n.º 17
0
    def clone_from(self, source_url, copy_hooks=False):
        """Initialize a repo as a clone of another using svnsync"""
        self.init(default_dirs=False, skip_special_files=True)
        # Need a pre-revprop-change hook for cloning
        fn = os.path.join(self._repo.fs_path, self._repo.name, "hooks", "pre-revprop-change")
        with open(fn, "wb") as fp:
            fp.write("#!/bin/sh\n")
        os.chmod(fn, 0755)

        def check_call(cmd):
            p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
            stdout, stderr = p.communicate(input="p\n")
            if p.returncode != 0:
                self._repo.status = "ready"
                session(self._repo).flush(self._repo)
                raise SVNCalledProcessError(cmd, p.returncode, stdout, stderr)

        self._repo.status = "importing"
        session(self._repo).flush(self._repo)
        log.info("Initialize %r as a clone of %s", self._repo, source_url)
        check_call(["svnsync", "init", self._url, source_url])
        check_call(["svnsync", "--non-interactive", "sync", self._url])
        log.info("... %r cloned", self._repo)
        if not svn_path_exists(
            "file://%s%s/%s" % (self._repo.fs_path, self._repo.name, c.app.config.options["checkout_url"])
        ):
            c.app.config.options["checkout_url"] = ""
        self._repo.refresh(notify=False)
        self._setup_special_files(source_url, copy_hooks)
Ejemplo n.º 18
0
    def ls(self):
        '''
        List the entries in this tree, with historical commit info for
        each node.  Eventually, ls_old can be removed and this can be
        replaced with the following:

            return self._lcd_map(LastCommit.get(self))
        '''
        # look for existing new format first
        last_commit = LastCommit.get(self, create=False)
        if last_commit:
            return self._lcd_map(last_commit)
        # otherwise, try old format
        old_style_results = self.ls_old()
        if old_style_results:
            return old_style_results
        # finally, use the new implentation that auto-vivifies
        last_commit = LastCommit.get(self, create=True)
        # ensure that the LCD is saved, even if
        # there is an error later in the request
        if last_commit:
            session(last_commit).flush(last_commit)
            return self._lcd_map(last_commit)
        else:
            return []
Ejemplo n.º 19
0
 def do_request_merge(self, **kw):
     try:
         kw = self.mr_widget.to_python(kw)
     except formencode.Invalid:
         # trigger error_handler directly
         return self.request_merge(**kw)
     downstream = dict(
         project_id=c.project._id,
         mount_point=c.app.config.options.mount_point,
         commit_id=c.app.repo.commit(kw['source_branch'])._id)
     with c.app.repo.push_upstream_context():
         mr = M.MergeRequest.upsert(
             downstream=downstream,
             target_branch=kw['target_branch'],
             source_branch=kw['source_branch'],
             summary=kw['summary'],
             description=kw['description'])
         if kw.get('subscribe'):
             mr.subscribe(user=c.user)
         M.Notification.post(
             mr, 'merge_request',
             subject=mr.email_subject,
             message_id=mr.message_id(),
         )
         t = M.Thread.new(
             discussion_id=c.app.config.discussion_id,
             ref_id=mr.index_id(),
         )
         session(t).flush()
         g.director.create_activity(c.user, 'created', mr,
                                    related_nodes=[c.project], tags=['merge-request'])
         redirect(mr.url())
Ejemplo n.º 20
0
 def commit(self, update_stats=True):
     '''Save off a snapshot of the artifact and increment the version #'''
     self.version += 1
     try:
         ip_address = request.headers.get('X_FORWARDED_FOR', request.remote_addr)
         ip_address = ip_address.split(',')[0].strip()
     except:
         ip_address = '0.0.0.0'
     data = dict(
         artifact_id=self._id,
         artifact_class='%s.%s' % (
             self.__class__.__module__,
             self.__class__.__name__),
         version=self.version,
         author=dict(
             id=c.user._id,
             username=c.user.username,
             display_name=c.user.get_pref('display_name'),
             logged_ip=ip_address),
         timestamp=datetime.utcnow(),
         data=state(self).clone())
     ss = self.__mongometa__.history_class(**data)
     session(ss).insert_now(ss, state(ss))
     log.info('Snapshot version %s of %s',
              self.version, self.__class__)
     if update_stats:
         if self.version > 1:
             g.statsUpdater.modifiedArtifact(
                 self.type_s, self.mod_date, self.project, c.user)
         else :
             g.statsUpdater.newArtifact(
                 self.type_s, self.mod_date, self.project, c.user)
     return ss
Ejemplo n.º 21
0
def main():
    args = arguments()

    c.user = M.User.query.get(username='******')

    with h.push_context(args.shortname, args.mountpt, neighborhood='Projects'):

        tool = c.project.app_config_by_tool_type(args.mountpt)

        # create tons of topics
        discussion = Forum.query.get(
            app_config_id=tool._id,
            shortname=args.forumname)

        for i in range(5000):
            subject = 'fake topic {}'.format(str(i))
            thd = discussion.thread_class()(discussion_id=discussion._id, subject=subject)
            # subj = str(uuid.uuid4())[:8]
            p = thd.post(subject, 'a new topic 2')

            for j in range(randint(1, 5)):
                new_post = {'text':'comment text'}
                # post = thd.add_post(**new_post)
                post = thd.add_post(text='comment text for real', subject="test subject")

            if i % 1000:
                session(p).flush()
Ejemplo n.º 22
0
 def install_app(self, ep_name, mount_point=None, mount_label=None, ordinal=None, **override_options):
     App = g.entry_points['tool'][ep_name]
     if not mount_point:
         base_mount_point = mount_point = App.default_mount_point
         for x in range(10):
             if self.app_instance(mount_point) is None: break
             mount_point = base_mount_point + '-%d' % x
     if not App.relaxed_mount_points:
         mount_point = mount_point.lower()
     if not App.validate_mount_point(mount_point):
         raise exceptions.ToolError, 'Mount point "%s" is invalid' % mount_point
     # HACK: reserved url components
     if mount_point in ('feed', 'index', 'icon', '_nav.json'):
         raise exceptions.ToolError, (
             'Mount point "%s" is reserved' % mount_point)
     if self.app_instance(mount_point) is not None:
         raise exceptions.ToolError, (
             'Mount point "%s" is already in use' % mount_point)
     assert self.app_instance(mount_point) is None
     if ordinal is None:
         ordinal = int(self.ordered_mounts(include_hidden=True)[-1]['ordinal']) + 1
     options = App.default_options()
     options['mount_point'] = mount_point
     options['mount_label'] = mount_label or App.default_mount_label or mount_point
     options['ordinal'] = int(ordinal)
     options.update(override_options)
     cfg = AppConfig(
         project_id=self._id,
         tool_name=ep_name.lower(),
         options=options)
     app = App(self, cfg)
     with h.push_config(c, project=self, app=app):
         session(cfg).flush()
         app.install(self)
     return app
Ejemplo n.º 23
0
 def track_login(self, req):
     user_ip = utils.ip_address(req)
     user_agent = req.headers.get('User-Agent')
     self.last_access['login_date'] = datetime.utcnow()
     self.last_access['login_ip'] = user_ip
     self.last_access['login_ua'] = user_agent
     session(self).flush(self)
Ejemplo n.º 24
0
 def next_ticket_num(cls):
     gbl = cls.query.find_and_modify(
         query=dict(app_config_id=c.app.config._id),
         update={'$inc': { 'last_ticket_num': 1}},
         new=True)
     session(cls).expunge(gbl)
     return gbl.last_ticket_num
Ejemplo n.º 25
0
 def install_app(self, ep_name, mount_point=None, mount_label=None, ordinal=None, **override_options):
     App = g.entry_points["tool"][ep_name]
     if not mount_point:
         base_mount_point = mount_point = App.default_mount_point
         for x in range(10):
             if self.app_instance(mount_point) is None:
                 break
             mount_point = base_mount_point + "-%d" % x
     if not h.re_path_portion.match(mount_point):
         raise exceptions.ToolError, 'Mount point "%s" is invalid' % mount_point
     # HACK: reserved url components
     if mount_point in ("feed", "index", "icon", "_nav.json"):
         raise exceptions.ToolError, ('Mount point "%s" is reserved' % mount_point)
     if self.app_instance(mount_point) is not None:
         raise exceptions.ToolError, ('Mount point "%s" is already in use' % mount_point)
     assert self.app_instance(mount_point) is None
     if ordinal is None:
         ordinal = int(self.ordered_mounts(include_hidden=True)[-1]["ordinal"]) + 1
     options = App.default_options()
     options["mount_point"] = mount_point
     options["mount_label"] = mount_label or App.default_mount_label or mount_point
     options["ordinal"] = int(ordinal)
     options.update(override_options)
     cfg = AppConfig(project_id=self._id, tool_name=ep_name, options=options)
     app = App(self, cfg)
     with h.push_config(c, project=self, app=app):
         session(cfg).flush()
         app.install(self)
     return app
Ejemplo n.º 26
0
 def commit(self):
     """Save off a snapshot of the artifact and increment the version #"""
     self.version += 1
     try:
         ip_address = request.headers.get("X_FORWARDED_FOR", request.remote_addr)
         ip_address = ip_address.split(",")[0].strip()
     except:
         ip_address = "0.0.0.0"
     data = dict(
         artifact_id=self._id,
         artifact_class="%s.%s" % (self.__class__.__module__, self.__class__.__name__),
         version=self.version,
         author=dict(
             id=c.user._id,
             username=c.user.username,
             display_name=c.user.get_pref("display_name"),
             logged_ip=ip_address,
         ),
         timestamp=datetime.utcnow(),
         data=state(self).clone(),
     )
     ss = self.__mongometa__.history_class(**data)
     session(ss).insert_now(ss, state(ss))
     log.info("Snapshot version %s of %s", self.version, self.__class__)
     return ss
Ejemplo n.º 27
0
 def deliver(cls, nid, artifact_index_id, topic):
     """Called in the notification message handler to deliver notification IDs
     to the appropriate mailboxes.  Atomically appends the nids
     to the appropriate mailboxes.
     """
     d = {
         "project_id": c.project._id,
         "app_config_id": c.app.config._id,
         "artifact_index_id": {"$in": [None, artifact_index_id]},
         "topic": {"$in": [None, topic]},
     }
     mboxes = cls.query.find(d).all()
     log.debug("Delivering notification %s to mailboxes [%s]", nid, ", ".join([str(m._id) for m in mboxes]))
     for mbox in mboxes:
         try:
             mbox.query.update(
                 {"$push": dict(queue=nid), "$set": dict(last_modified=datetime.utcnow(), queue_empty=False)}
             )
             # Make sure the mbox doesn't stick around to be flush()ed
             session(mbox).expunge(mbox)
         except:
             # log error but try to keep processing, lest all the other eligible
             # mboxes for this notification get skipped and lost forever
             log.exception(
                 "Error adding notification: %s for artifact %s on project %s to user %s",
                 nid,
                 artifact_index_id,
                 c.project._id,
                 mbox.user_id,
             )
def migrate_project_database(project):
    c.project = project
    target_uri = M.Project.default_database_uri(project.shortname)
    target_db = target_uri.rsplit('/')[-1]
    if project.database_uri == target_uri:
        log.info('Project %s is already migrated to %s', project.shortname, project.database_uri)
        return 2
    conn = M.session.main_doc_session.db.connection
    host = '%s:%s' % (conn.host, conn.port)
    dirname = os.tempnam()
    try:
        log.info('Backing up %s to %s', project.shortname, dirname)
        db_uri = project.database_uri
        db = db_uri.rsplit('/')[-1]
        assert 0 == os.system('%s --host %s --db %s -o %s' % (
                MONGO_DUMP, host, db, dirname))
        assert 0 == os.system('%s --host %s --db %s %s/%s ' % (
                MONGO_RESTORE, host, target_db, dirname, db))
        for p in M.Project.query.find(dict(database_uri=db_uri)):
            p.database_uri = M.Project.default_database_uri(project.shortname)
        session(project).flush()
        conn.drop_database(db)
    finally:
        if os.path.exists(dirname):
            shutil.rmtree(dirname)
    return 0
Ejemplo n.º 29
0
 def clone_from(self, source_url):
     '''Initialize a repo as a clone of another'''
     self._repo.status = 'cloning'
     session(self._repo).flush(self._repo)
     log.info('Initialize %r as a clone of %s',
              self._repo, source_url)
     try:
         fullname = self._setup_paths(create_repo_dir=False)
         if os.path.exists(fullname):
             shutil.rmtree(fullname)
         if self.can_hotcopy(source_url):
             shutil.copytree(source_url, fullname)
             post_receive = os.path.join(self._repo.full_fs_path, 'hooks', 'post-receive')
             if os.path.exists(post_receive):
                 os.rename(post_receive, post_receive + '-user')
             repo = git.Repo(fullname)
         else:
             repo = git.Repo.clone_from(
                 source_url,
                 to_path=fullname,
                 bare=True)
         self.__dict__['_git'] = repo
         self._setup_special_files(source_url)
     except:
         self._repo.status = 'ready'
         session(self._repo).flush(self._repo)
         raise
Ejemplo n.º 30
0
    def test_events(self, post_event):
        setup_trove_categories()

        # Create event
        cfg = {"trovecategories.enableediting": "true"}
        with h.push_config(config, **cfg):
            r = self.app.post("/categories/create/", params=dict(categoryname="test"))

        category_id = post_event.call_args[0][1]
        assert_true(isinstance(category_id, int))
        assert_equals(post_event.call_args[0][0], "trove_category_created")
        category = M.TroveCategory.query.get(trove_cat_id=category_id)

        # Update event
        category.fullname = "test2"
        session(M.TroveCategory).flush()
        edited_category_id = post_event.call_args[0][1]
        assert_true(isinstance(edited_category_id, int))
        assert_equals(edited_category_id, category_id)
        assert_equals(post_event.call_args[0][0], "trove_category_updated")

        # Delete event
        M.TroveCategory.delete(category)
        session(M.TroveCategory).flush()
        deleted_category_id = post_event.call_args[0][1]
        assert_true(isinstance(deleted_category_id, int))
        assert_equals(deleted_category_id, category_id)
        assert_equals(post_event.call_args[0][0], "trove_category_deleted")
Ejemplo n.º 31
0
    def api_tickets(self, **data):
        import json
        import dateutil.parser
        if request.method == 'POST':
            log.info('api_tickets: %s', data)
            ok = True
            for_user = M.User.by_username(data['for_user'])
            if not for_user:
                ok = False
                flash('User not found')
            caps = None
            try:
                caps = json.loads(data['caps'])
            except ValueError:
                ok = False
                flash('JSON format error')
            if type(caps) is not type({}):
                ok = False
                flash(
                    'Capabilities must be a JSON dictionary, mapping capability name to optional discriminator(s) (or "")'
                )
            try:
                expires = dateutil.parser.parse(data['expires'])
            except ValueError:
                ok = False
                flash('Date format error')
            if ok:
                tok = None
                try:
                    tok = M.ApiTicket(user_id=for_user._id,
                                      capabilities=caps,
                                      expires=expires)
                    session(tok).flush()
                    log.info('New token: %s', tok)
                    flash('API Ticket created')
                except:
                    log.exception('Could not create API ticket:')
                    flash('Error creating API ticket')
        elif request.method == 'GET':
            data = {'expires': datetime.utcnow() + timedelta(days=2)}

        data['token_list'] = M.ApiTicket.query.find().sort(
            'mod_date', pymongo.DESCENDING).all()
        log.info(data['token_list'])
        return data
Ejemplo n.º 32
0
 def command(self):
     from allura import model as M
     self.basic_setup()
     main_indexes = defaultdict(
         lambda: defaultdict(list))  # by db, then collection name
     project_indexes = defaultdict(list)  # by collection name
     base.log.info('Collecting indexes...')
     for m in Mapper.all_mappers():
         mgr = m.collection.m
         cname = mgr.collection_name
         cls = m.mapped_class
         if cname is None:
             base.log.info('... skipping abstract class %s', cls)
             continue
         base.log.info('... for class %s', cls)
         if session(cls) in (M.main_orm_session, M.repository_orm_session,
                             M.task_orm_session):
             idx = main_indexes[session(cls)][cname]
         else:
             idx = project_indexes[cname]
         idx.extend(mgr.indexes)
     base.log.info('Updating indexes for main DB')
     for odm_session, db_indexes in main_indexes.iteritems():
         db = odm_session.impl.db
         for name, indexes in db_indexes.iteritems():
             self._update_indexes(db[name], indexes)
     base.log.info('Updating indexes for project DBs')
     configured_dbs = set()
     for projects in utils.chunked_find(M.Project):
         for p in projects:
             db = p.database_uri
             if db in configured_dbs: continue
             configured_dbs.add(db)
             c.project = p
             db = M.project_doc_session.db
             base.log.info('... DB: %s', db)
             for name, indexes in project_indexes.iteritems():
                 self._update_indexes(db[name], indexes)
     if not configured_dbs:
         # e.g. during bootstrap with no projects
         db = M.project_doc_session.db
         base.log.info('... default DB: %s', db)
         for name, indexes in project_indexes.iteritems():
             self._update_indexes(db[name], indexes)
     base.log.info('Done updating indexes')
Ejemplo n.º 33
0
    def save_image(cls,
                   filename,
                   fp,
                   content_type=None,
                   thumbnail_size=None,
                   thumbnail_meta=None,
                   square=False,
                   save_original=False,
                   original_meta=None):
        if content_type is None:
            content_type = utils.guess_mime_type(filename)
        if not content_type.lower() in SUPPORTED_BY_PIL:
            return None, None

        try:
            image = Image.open(fp)
        except IOError as e:
            log.error('Error opening image %s %s', filename, e)
            return None, None

        format = image.format
        if save_original:
            original_meta = original_meta or {}
            original = cls(filename=filename,
                           content_type=content_type,
                           **original_meta)
            with original.wfile() as fp_w:
                try:
                    if 'transparency' in image.info:
                        image.save(fp_w,
                                   format,
                                   transparency=image.info['transparency'])
                    else:
                        image.save(fp_w, format)
                except Exception as e:
                    session(original).expunge(original)
                    log.error('Error saving image %s %s', filename, e)
                    return None, None
        else:
            original = None

        thumbnail = cls.save_thumbnail(filename, image, content_type,
                                       thumbnail_size, thumbnail_meta, square)

        return original, thumbnail
Ejemplo n.º 34
0
 def execute(cls, options):
     for chunk in chunked_find(M.User, {}):
         for u in chunk:
             log.info('Trimming emails for user %s', u.username)
             new_addresses = [M.EmailAddress.canonical(addr) for addr in u.email_addresses]
             u.email_addresses = new_addresses
             if u.preferences.email_address is not None:
                 u.preferences.email_address = M.EmailAddress.canonical(
                     u.preferences.email_address)
             session(u).flush(u)
     for chunk in chunked_find(M.EmailAddress, {}):
         for a in chunk:
             log.info('Trimming email address entry %s', a.email)
             a.email = M.EmailAddress.canonical(a.email)
             session(a).flush(a)
     M.main_orm_session.flush()
     M.main_orm_session.clear()
     log.info('Finished trimming emails')
Ejemplo n.º 35
0
 def command(self):
     self.basic_setup()
     h.set_context(self.args[1], neighborhood=self.args[2])
     extra_status = []
     for s in self.args[3:]:
         s = s.lower()
         if s == 'production':
             print('All projects always have access to prodcution tools,'
                   ' so removing from list.')
             continue
         if s not in ('alpha', 'beta'):
             print('Unknown tool status %s' % s)
             sys.exit(1)
         extra_status.append(s)
     print('Setting project "%s" tool access to production + %r' %
           (self.args[1], extra_status))
     c.project._extra_tool_status = extra_status
     session(c.project).flush()
Ejemplo n.º 36
0
 def from_artifact(cls, a):
     result = cls.query.get(ref_id=a.index_id())
     if result is None:
         try:
             result = cls(
                 ref_id = a.index_id(),
                 project_id = a.app_config.project_id,
                 app_config_id = a.app_config._id)
             session(result).flush(result)
         except pymongo.errors.DuplicateKeyError: # pragma no cover
             session(result).expunge(result)
             result = cls.query.get(ref_id=a.index_id())
     result.link = a.shorthand_id()
     result.url = a.url()
     if result.link is None:
         result.delete()
         return None
     return result
Ejemplo n.º 37
0
def main():
    conn = M.session.main_doc_session.bind.conn
    n = M.Neighborhood.query.get(url_prefix='/u/')
    for p in M.Project.query.find(dict(neighborhood_id=n._id)):
        if not p.database_configured: continue
        if not p.shortname.startswith('u/'): continue
        log.info('Checking to see if %s is configured...', p.database)
        db = conn[p.database]
        if is_unconfigured(db):
            if sys.argv[-1] == 'test':
                log.info('... it is not, so I would drop it.')
                continue
            log.info('... it is not, so dropping it.')
            conn.drop_database(p.database)
            p.database_configured = False
            session(p).flush()
        else:
            log.info('... it is.')
Ejemplo n.º 38
0
 def new(cls):
     '''Create a new ticket, safely (ensuring a unique ticket_num'''
     while True:
         ticket_num = c.app.globals.next_ticket_num()
         ticket = cls(app_config_id=c.app.config._id,
                      custom_fields=dict(),
                      ticket_num=ticket_num)
         try:
             session(ticket).flush(ticket)
             h.log_action(log, 'opened').info('')
             return ticket
         except OperationFailure, err:
             if 'duplicate' in err.args[0]:
                 log.warning('Try to create duplicate ticket %s',
                             ticket.url())
                 session(ticket).expunge(ticket)
                 continue
             raise
Ejemplo n.º 39
0
 def _try_flush(self, instance, expunge=False):
     try:
         inst_session = session(instance)
     except AttributeError:
         inst_session = None
     if inst_session:
         inst_session.flush(instance)
         if expunge:
             inst_session.expunge(instance)
Ejemplo n.º 40
0
    def setUp(self):
        setup_unit_test()
        self.session = session(File)
        self.conn = M.session.main_doc_session.db._connection
        self.db = M.session.main_doc_session.db

        self.db.fs.remove()
        self.db.fs.files.remove()
        self.db.fs.chunks.remove()
Ejemplo n.º 41
0
 def clone_from(self, source_url, copy_hooks=False):
     '''Initialize a repo as a clone of another'''
     self._repo.status = 'cloning'
     session(self._repo).flush(self._repo)
     log.info('Initialize %r as a clone of %s', self._repo, source_url)
     try:
         fullname = self._setup_paths(create_repo_dir=False)
         if os.path.exists(fullname):
             shutil.rmtree(fullname)
         repo = git.Repo.clone_from(source_url, to_path=fullname, bare=True)
         self.__dict__['_git'] = repo
         self._setup_special_files(source_url, copy_hooks)
     except:
         self._repo.status = 'ready'
         session(self._repo).flush(self._repo)
         raise
     log.info('... %r cloned', self._repo)
     self._repo.refresh(notify=False)
Ejemplo n.º 42
0
 def post(cls,
          function,
          args=None,
          kwargs=None,
          result_type='forget',
          priority=10,
          delay=0,
          flush_immediately=True,
          ):
     '''Create a new task object based on the current context.'''
     if args is None:
         args = ()
     if kwargs is None:
         kwargs = {}
     task_name = '%s.%s' % (
         function.__module__,
         function.__name__)
     context = dict(
         project_id=None,
         app_config_id=None,
         user_id=None,
         notifications_disabled=False)
     if getattr(c, 'project', None):
         context['project_id'] = c.project._id
         context[
             'notifications_disabled'] = c.project.notifications_disabled
     if getattr(c, 'app', None):
         context['app_config_id'] = c.app.config._id
     if getattr(c, 'user', None):
         context['user_id'] = c.user._id
     obj = cls(
         state='ready',
         priority=priority,
         result_type=result_type,
         task_name=task_name,
         args=args,
         kwargs=kwargs,
         process=None,
         result=None,
         context=context,
         time_queue=datetime.utcnow() + timedelta(seconds=delay))
     if flush_immediately:
         session(obj).flush(obj)
     return obj
Ejemplo n.º 43
0
    def command(self):
        self.basic_setup()
        shortname = self.args[1]
        nb = M.Neighborhood.query.get(name=shortname)
        if not nb:
            nb = M.Neighborhood.query.get(_id=ObjectId(shortname))
        if nb is None:
            raise exceptions.NoSuchNeighborhoodError(
                "The neighborhood %s "
                "could not be found in the database" % shortname)
        tool_value = self.args[2].lower()
        if tool_value[:1] == "t":
            home_tool_active = True
        else:
            home_tool_active = False

        if home_tool_active == nb.has_home_tool:
            return

        p = nb.neighborhood_project
        if home_tool_active:
            zero_position_exists = False
            for ac in p.app_configs:
                if ac.options['ordinal'] == 0:
                    zero_position_exists = True
                    break

            if zero_position_exists:
                for ac in p.app_configs:
                    ac.options['ordinal'] = ac.options['ordinal'] + 1
            p.install_app('home', 'home', 'Home', ordinal=0)
        else:
            app_config = p.app_config('home')
            zero_position_exists = False
            if app_config.options['ordinal'] == 0:
                zero_position_exists = True

            p.uninstall_app('home')
            if zero_position_exists:
                for ac in p.app_configs:
                    ac.options['ordinal'] = ac.options['ordinal'] - 1

        session(M.AppConfig).flush()
        session(M.Neighborhood).flush()
Ejemplo n.º 44
0
 def do_request_merge(self, **kw):
     kw = self.mr_widget.to_python(kw)
     downstream = dict(project_id=c.project._id,
                       mount_point=c.app.config.options.mount_point,
                       commit_id=c.app.repo.commit(kw['source_branch'])._id)
     with c.app.repo.push_upstream_context():
         mr = M.MergeRequest.upsert(downstream=downstream,
                                    target_branch=kw['target_branch'],
                                    summary=kw['summary'],
                                    description=kw['description'])
         M.Notification.post(mr,
                             'merge_request',
                             subject='Merge request: ' + mr.summary)
         t = M.Thread.new(discussion_id=c.app.config.discussion_id,
                          artifact_reference=mr.index_id(),
                          subject='Discussion for Merge Request #:%s: %s' %
                          (mr.request_number, mr.summary))
         session(t).flush()
         redirect(mr.url())
Ejemplo n.º 45
0
 def compute_tree_new(self, commit, tree_path='/'):
     from allura.model import repo as RM
     tree_path = tree_path[:-1]
     tree_id = self._tree_oid(commit._id, tree_path)
     tree, isnew = RM.Tree.upsert(tree_id)
     if not isnew: return tree_id
     log.debug('Computing tree for %s: %s', self._revno(commit._id),
               tree_path)
     rev = self._revision(commit._id)
     try:
         infos = self._svn.info2(self._url + tree_path,
                                 revision=rev,
                                 depth=pysvn.depth.immediates)
     except pysvn.ClientError:
         log.exception('Error computing tree for %s: %s(%s)', self._repo,
                       commit, tree_path)
         tree.delete()
         return None
     log.debug('Compute tree for %d paths', len(infos))
     for path, info in infos[1:]:
         last_commit_id = self._oid(info['last_changed_rev'].number)
         last_commit = M.repo.Commit.query.get(_id=last_commit_id)
         M.repo_refresh.set_last_commit(
             self._repo._id,
             re.sub(r'/?$', '/', tree_path),  # force it to end with /
             path,
             self._tree_oid(commit._id, path),
             M.repo_refresh.get_commit_info(last_commit))
         if info.kind == pysvn.node_kind.dir:
             tree.tree_ids.append(
                 Object(id=self._tree_oid(commit._id, path), name=path))
         elif info.kind == pysvn.node_kind.file:
             tree.blob_ids.append(
                 Object(id=self._tree_oid(commit._id, path), name=path))
         else:
             assert False
     session(tree).flush(tree)
     trees_doc = RM.TreesDoc.m.get(_id=commit._id)
     if not trees_doc:
         trees_doc = RM.TreesDoc(dict(_id=commit._id, tree_ids=[]))
     trees_doc.tree_ids.append(tree_id)
     trees_doc.m.save(safe=False)
     return tree_id
Ejemplo n.º 46
0
 def deliver(cls, nid, artifact_index_id, topic):
     '''Called in the notification message handler to deliver notification IDs
     to the appropriate mailboxes.  Atomically appends the nids
     to the appropriate mailboxes.
     '''
     d = {
         'project_id':c.project._id,
         'app_config_id':c.app.config._id,
         'artifact_index_id':{'$in':[None, artifact_index_id]},
         'topic':{'$in':[None, topic]}
         }
     for mbox in cls.query.find(d):
         mbox.query.update(
             {'$push':dict(queue=nid),
              '$set':dict(last_modified=datetime.utcnow(),
                          queue_empty=False),
             })
         # Make sure the mbox doesn't stick around to be flush()ed
         session(mbox).expunge(mbox)
Ejemplo n.º 47
0
def main():
    test = sys.argv[-1] == 'test'
    all_projects = M.Project.query.find().all()
    log.info('Fixing tracker thread subjects')
    for project in all_projects:
        if project.parent_id: continue
        c.project = project
        all_tickets = TM.Ticket.query.find(
        )  # will find all tickets for all trackers in this project
        if not all_tickets.count(): continue
        for ticket in all_tickets:
            thread = ticket.get_discussion_thread()
            thread.subject = ''
        if test:
            log.info('... would fix ticket threads in %s', project.shortname)
        else:
            log.info('... fixing ticket threads in %s', project.shortname)
            session(project).flush()
        session(project).clear()
Ejemplo n.º 48
0
 def subscribe(
     cls,
     user_id=None, project_id=None, app_config_id=None,
     artifact=None, topic=None,
     type='direct', n=1, unit='day'):
     if user_id is None: user_id = c.user._id
     if project_id is None: project_id = c.project._id
     if app_config_id is None: app_config_id = c.app.config._id
     tool_already_subscribed = cls.query.get(user_id=user_id,
         project_id=project_id,
         app_config_id=app_config_id,
         artifact_index_id=None)
     if tool_already_subscribed:
         log.debug('Tried to subscribe to artifact %s, while there is a tool subscription', artifact)
         return
     if artifact is None:
         artifact_title = 'All artifacts'
         artifact_url = None
         artifact_index_id = None
     else:
         i = artifact.index()
         artifact_title = i['title_s']
         artifact_url = artifact.url()
         artifact_index_id = i['id']
         artifact_already_subscribed = cls.query.get(user_id=user_id,
             project_id=project_id,
             app_config_id=app_config_id,
             artifact_index_id=artifact_index_id)
         if artifact_already_subscribed:
             return
     d = dict(user_id=user_id, project_id=project_id, app_config_id=app_config_id,
              artifact_index_id=artifact_index_id, topic=topic)
     sess = session(cls)
     try:
         mbox = cls(
             type=type, frequency=dict(n=n, unit=unit),
             artifact_title=artifact_title,
             artifact_url=artifact_url,
             **d)
         sess.flush(mbox)
     except pymongo.errors.DuplicateKeyError:
         sess.expunge(mbox)
         mbox = cls.query.get(**d)
         mbox.artifact_title = artifact_title
         mbox.artifact_url = artifact_url
         mbox.type = type
         mbox.frequency.n = n
         mbox.frequency.unit = unit
         sess.flush(mbox)
     if not artifact_index_id:
         # Unsubscribe from individual artifacts when subscribing to the tool
         for other_mbox in cls.query.find(dict(
             user_id=user_id, project_id=project_id, app_config_id=app_config_id)):
             if other_mbox is not mbox:
                 other_mbox.delete()
Ejemplo n.º 49
0
 def clone_from(self, source_url):
     '''Initialize a repo as a clone of another'''
     fullname = self._setup_paths(create_repo_dir=False)
     if os.path.exists(fullname):
         shutil.rmtree(fullname)
     log.info('Initialize %r as a clone of %s', self._repo, source_url)
     # !$ hg doesn't like unicode as urls
     src, repo = hg.clone(ui.ui(),
                          source_url.encode('utf-8'),
                          self._repo.full_fs_path.encode('utf-8'),
                          update=False)
     self.__dict__['_hg'] = repo
     self._setup_special_files()
     self._repo.status = 'analyzing'
     session(self._repo).flush()
     log.info('... %r cloned, analyzing', self._repo)
     self._repo.refresh(notify=False)
     self._repo.status = 'ready'
     log.info('... %s ready', self._repo)
     session(self._repo).flush()
Ejemplo n.º 50
0
    def test_trove_hierarchy(self):
        self.create_some_cats()
        session(M.TroveCategory).flush()

        r = self.app.get('/categories/browse')
        rendered_tree = r.html.find('div', {'id': 'content_base'}).find('div').find('div').find('ul')
        expected = BeautifulSoup("""
        <ul>
            <li>Root</li>
            <ul>
                <li>CategoryA</li>
                <ul>
                    <li>ChildA</li>
                    <li>ChildB</li>
                </ul>
                <li>CategoryB</li>
            </ul>
        </ul>
        """.strip(), 'html.parser')
        assert_equals(str(expected), str(rendered_tree))
Ejemplo n.º 51
0
def main():
    test = sys.argv[-1] == 'test'
    projects = M.Project.query.find().all()
    log.info('Restoring labels on projects')
    for p in projects:
        restore_labels(p, test)
    if not test:
        session(p).flush()
    log.info('Restoring labels on artifacts')
    for p in projects:
        if p.parent_id: continue
        c.project = p
        for name, cls in MappedClass._registry.iteritems():
            if not issubclass(cls, M.Artifact): continue
            if session(cls) is None: continue
            for a in cls.query.find():
                restore_labels(a, test)
        if not test:
            M.artifact_orm_session.flush()
        M.artifact_orm_session.clear()
Ejemplo n.º 52
0
 def _fs(cls):
     gridfs_args = (session(cls).impl.db, cls._root_collection())
     try:
         # for some pymongo 2.x versions the _connect option is available to avoid index creation on every usage
         # (it'll still create indexes on delete & write)
         gridfs = GridFS(*gridfs_args, _connect=False)
     except TypeError:  # (unexpected keyword argument)
         # pymongo 3.0 removes the _connect arg
         # pymongo 3.1 makes index creation only happen on the very first write
         gridfs = GridFS(*gridfs_args)
     return gridfs
Ejemplo n.º 53
0
    def import_tool(self, project, user, project_name=None, mount_point=None,
            mount_label=None, trac_url=None, user_map=None, **kw):
        """ Import Trac tickets into a new Allura Tracker tool.

        """
        trac_url = trac_url.rstrip('/') + '/'
        mount_point = mount_point or 'tickets'
        app = project.install_app(
                'Tickets',
                mount_point=mount_point,
                mount_label=mount_label or 'Tickets',
                open_status_names='new assigned accepted reopened',
                closed_status_names='closed',
                import_id={
                        'source': self.source,
                        'trac_url': trac_url,
                    },
            )
        session(app.config).flush(app.config)
        session(app.globals).flush(app.globals)
        try:
            with h.push_config(c, app=app):
                TracImportSupport().perform_import(
                        json.dumps(export(trac_url), cls=DateJSONEncoder),
                        json.dumps({
                            'user_map': json.loads(user_map) if user_map else {},
                            'usernames_match': self.usernames_match(trac_url),
                            }),
                        )
            AuditLog.log(
                'import tool %s from %s' % (
                        app.config.options.mount_point,
                        trac_url,
                    ),
                project=project, user=user, url=app.url,
            )
            g.post_event('project_updated')
            return app
        except Exception:
            h.make_app_admin_only(app)
            raise
Ejemplo n.º 54
0
def main(opts):
    if opts.project and not opts.nbhd:
        error('Specify neighborhood')
    p_query = {}
    if opts.nbhd:
        nbhd = M.Neighborhood.query.get(url_prefix=opts.nbhd)
        if not nbhd:
            error("Can't find such neighborhood")
        p_query['neighborhood_id'] = nbhd._id
        if opts.project:
            p_query['shortname'] = opts.project

        projects = M.Project.query.find(p_query).all()
        if not projects:
            error('No project matches given parameters')

        app_config_ids = []
        for p in projects:
            for ac in p.app_configs:
                if ac.tool_name.lower() == 'wiki':
                    app_config_ids.append(ac._id)

        if not app_config_ids:
            error('No wikis in given projects')
        query = {'app_config_id': {'$in': app_config_ids}}
    else:
        query = {}

    M.artifact_orm_session._get().skip_last_updated = True
    try:
        for chunk in chunked_find(Page, query):
            for page in chunk:
                if '/' in page.title:
                    log.info('Found {} in {}'.format(page.title,
                                                     page.app_config.url()))
                    page.title = page.title.replace('/', '-')
                    with h.push_context(page.app_config.project._id,
                                        app_config_id=page.app_config_id):
                        session(page).flush(page)
    finally:
        M.artifact_orm_session._get().skip_last_updated = False
Ejemplo n.º 55
0
 def commit(self, update_stats=True):
     '''Save off a snapshot of the artifact and increment the version #'''
     try:
         ip_address = utils.ip_address(request)
     except:
         ip_address = '0.0.0.0'
     data = dict(
         artifact_id=self._id,
         artifact_class='%s.%s' % (
             self.__class__.__module__,
             self.__class__.__name__),
         author=dict(
             id=c.user._id,
             username=c.user.username,
             display_name=c.user.get_pref('display_name'),
             logged_ip=ip_address),
         data=state(self).clone())
     while True:
         self.version += 1
         data['version'] = self.version
         data['timestamp'] = datetime.utcnow()
         ss = self.__mongometa__.history_class(**data)
         try:
             session(ss).insert_now(ss, state(ss))
         except pymongo.errors.DuplicateKeyError:
             log.warning('Trying to create duplicate version %s of %s',
                         self.version, self.__class__)
             session(ss).expunge(ss)
             continue
         else:
             break
     log.debug('Snapshot version %s of %s',
               self.version, self.__class__)
     if update_stats:
         if self.version > 1:
             g.statsUpdater.modifiedArtifact(
                 self.type_s, self.mod_date, self.project, c.user)
         else:
             g.statsUpdater.newArtifact(
                 self.type_s, self.mod_date, self.project, c.user)
     return ss
Ejemplo n.º 56
0
 def install_app(self,
                 ep_name,
                 mount_point=None,
                 mount_label=None,
                 ordinal=None,
                 **override_options):
     App = g.entry_points['tool'][ep_name]
     if not mount_point:
         base_mount_point = mount_point = App.default_mount_point
         for x in range(10):
             if self.app_instance(mount_point) is None: break
             mount_point = base_mount_point + '-%d' % x
     if not App.relaxed_mount_points:
         mount_point = mount_point.lower()
     if not App.validate_mount_point(mount_point):
         raise exceptions.ToolError, 'Mount point "%s" is invalid' % mount_point
     # HACK: reserved url components
     if mount_point in ('feed', 'index', 'icon', '_nav.json'):
         raise exceptions.ToolError, ('Mount point "%s" is reserved' %
                                      mount_point)
     if self.app_instance(mount_point) is not None:
         raise exceptions.ToolError, ('Mount point "%s" is already in use' %
                                      mount_point)
     assert self.app_instance(mount_point) is None
     if ordinal is None:
         ordinal = int(
             self.ordered_mounts(include_hidden=True)[-1]['ordinal']) + 1
     options = App.default_options()
     options['mount_point'] = mount_point
     options[
         'mount_label'] = mount_label or App.default_mount_label or mount_point
     options['ordinal'] = int(ordinal)
     options.update(override_options)
     cfg = AppConfig(project_id=self._id,
                     tool_name=ep_name.lower(),
                     options=options)
     app = App(self, cfg)
     with h.push_config(c, project=self, app=app):
         session(cfg).flush()
         app.install(self)
     return app
Ejemplo n.º 57
0
def get_commit_info(commit):
    if not isinstance(commit, Commit):
        commit = mapper(Commit).create(commit, dict(instrument=False))
    sess = session(commit)
    if sess: sess.expunge(commit)
    return dict(id=commit._id,
                author=commit.authored.name,
                author_email=commit.authored.email,
                date=commit.authored.date,
                author_url=commit.author_url,
                shortlink=commit.shorthand_id(),
                summary=commit.summary)
Ejemplo n.º 58
0
 def test_commit(self):
     entry = self.repo.commit('HEAD')
     assert str(entry.authored.name) == 'Rick Copeland', entry.authored
     assert entry.message
     # Test that sha1s for named refs are looked up in cache first, instead
     # of from disk.
     with mock.patch('forgegit.model.git_repo.M.repo.Commit.query') as q:
         self.repo.heads.append(Object(name='HEAD', object_id='deadbeef'))
         self.repo.commit('HEAD')
         q.get.assert_called_with(_id='deadbeef')
     # test the auto-gen tree fall-through
     orig_tree = M.repo.Tree.query.get(_id=entry.tree_id)
     assert orig_tree
     # force it to regenerate the tree
     M.repo.Tree.query.remove(dict(_id=entry.tree_id))
     session(orig_tree).flush()
     # ensure we don't just pull it from the session cache
     session(orig_tree).expunge(orig_tree)
     # ensure we don't just use the LazyProperty copy
     session(entry).expunge(entry)
     entry = self.repo.commit(entry._id)
     # regenerate the tree
     new_tree = entry.tree
     assert new_tree
     self.assertEqual(new_tree._id, orig_tree._id)
     self.assertEqual(new_tree.tree_ids, orig_tree.tree_ids)
     self.assertEqual(new_tree.blob_ids, orig_tree.blob_ids)
     self.assertEqual(new_tree.other_ids, orig_tree.other_ids)
Ejemplo n.º 59
0
    def deliver(cls, nid, artifact_index_ids, topic):
        '''Called in the notification message handler to deliver notification IDs
        to the appropriate mailboxes.  Atomically appends the nids
        to the appropriate mailboxes.
        '''

        artifact_index_ids.append(
            None)  # get tool-wide ("None") and specific artifact subscriptions
        d = {
            'project_id': c.project._id,
            'app_config_id': c.app.config._id,
            'artifact_index_id': {
                '$in': artifact_index_ids
            },
            'topic': {
                '$in': [None, topic]
            }
        }
        mboxes = cls.query.find(d).all()
        log.debug('Delivering notification %s to mailboxes [%s]', nid,
                  ', '.join([str(m._id) for m in mboxes]))
        for mbox in mboxes:
            try:
                mbox.query.update(
                    # _id is automatically specified by ming's "query", so this matches the current mbox
                    {
                        '$push':
                        dict(queue=nid),
                        '$set':
                        dict(last_modified=datetime.utcnow(),
                             queue_empty=False),
                    })
                # Make sure the mbox doesn't stick around to be flush()ed
                session(mbox).expunge(mbox)
            except:
                # log error but try to keep processing, lest all the other eligible
                # mboxes for this notification get skipped and lost forever
                log.exception(
                    'Error adding notification: %s for artifact %s on project %s to user %s',
                    nid, artifact_index_ids, c.project._id, mbox.user_id)
Ejemplo n.º 60
0
    def get_discussion_thread(self, data=None):
        """Return the discussion thread and parent_id for this artifact.

        :return: (:class:`allura.model.discuss.Thread`, parent_thread_id (int))

        """
        from .discuss import Thread
        threads = Thread.query.find(dict(ref_id=self.index_id())).all()
        if not threads:
            idx = self.index()
            t = Thread.new(
                app_config_id=self.app_config_id,
                discussion_id=self.app_config.discussion_id,
                ref_id=idx['id'],
                subject='%s discussion' % h.get_first(idx, 'title'))
        elif len(threads) == 1:
            t = threads[0]
        else:
            # there should not be multiple threads, we'll merge them
            destination = threads.pop()
            for thread in threads:
                for post in thread.posts:
                    post.thread_id = destination._id
                    destination.num_replies += 1
                    destination.last_post_date = max(destination.last_post_date, post.mod_date)
                    session(post).flush(post)
                    session(post).expunge(post)  # so thread.posts ref later in the code doesn't use stale posts
                Thread.query.remove({'_id': thread._id})  # NOT thread.delete() since that would remove its posts too
                thread.attachment_class().query.update({'thread_id': thread._id},
                                                       {'$set': {'thread_id': destination._id}},
                                                       multi=True)
            t = destination

        parent_id = None
        if data:
            in_reply_to = data.get('in_reply_to', [])
            if in_reply_to:
                parent_id = in_reply_to[0]

        return t, parent_id