예제 #1
0
파일: blog.py 프로젝트: araymund/karl
def coarse_month_range(year, month):
    """Returns the range of coarse datetimes for a month."""
    last_day = calendar.monthrange(year, month)[1]
    first_moment = coarse_datetime_repr(datetime.datetime(year, month, 1))
    last_moment = coarse_datetime_repr(
        datetime.datetime(year, month, last_day, 23, 59, 59))
    return first_moment, last_moment
예제 #2
0
def date_range(year, month):
    """return a tuple of two datetimes suitable for a range query"""
    begin = datetime.datetime(year, month, 1)
    endyear, endmonth = next_month(year, month)
    end = datetime.datetime(endyear, endmonth, 1)

    return coarse_datetime_repr(begin), coarse_datetime_repr(end)
예제 #3
0
파일: blog.py 프로젝트: disko/karl
def coarse_month_range(year, month):
    """Returns the range of coarse datetimes for a month."""
    last_day = calendar.monthrange(year, month)[1]
    first_moment = coarse_datetime_repr(
        datetime.datetime(year, month, 1))
    last_moment = coarse_datetime_repr(
        datetime.datetime(year, month, last_day, 23, 59, 59))
    return first_moment, last_moment
예제 #4
0
파일: search.py 프로젝트: cguardia/karl
def make_query(context, request):
    """Given a search request, return a catalog query and a list of terms.
    """
    params = request.params
    query = {}
    terms = []

    term = params.get('body')
    if term:
        terms.append(term)

    kind = params.get('kind')
    if kind:
        searcher = queryUtility(IGroupSearchFactory, kind)
        if searcher is None:
            # If the 'kind' we got is not known, return an error
            fmt = "The LiveSearch group %s is not known"
            raise HTTPBadRequest(fmt % kind)
        terms.append(kind)
    else:
        searcher = default_group_search

    searcher = searcher(context, request, term)
    query.update(searcher.criteria)

    creator = params.get('creator')
    if creator:
        userids = list(_iter_userids(context, request, creator))
        query['creator'] = {
            'query': userids,
            'operator': 'or',
            }
        terms.append(creator)

    tags = filter(None, params.getall('tags'))
    if tags:
        query['tags'] = {
            'query': tags,
            'operator': 'or',
            }
        terms.extend(tags)

    year = params.get('year')
    if year:
        year = int(year)
        begin = coarse_datetime_repr(datetime.datetime(year, 1, 1))
        end = coarse_datetime_repr(datetime.datetime(year, 12, 31, 12, 59, 59))
        query['creation_date'] = (begin, end)
        terms.append(year)

    since = params.get('since')
    if since:
        option = since_options[since]
        since = datetime.datetime.now() - option['delta']
        query['creation_date'] = (coarse_datetime_repr(since), None)
        terms.append(option['name'])

    return query, terms
예제 #5
0
def get_catalog_events(context, request,
                       searchterm=None, year=None, month=None,
                       past_events=None):

    # Build up a query
    query = dict(
        path={'query': resource_path(context)},
        allowed={'query': effective_principals(request), 'operator': 'or'},
        interfaces=[ICalendarEvent],
        sort_index="start_date",
        reverse=True,
        use_cache=False,
        )

    if searchterm is not None:
        query['texts'] = searchterm

    if searchterm is not None and year is None and month is None:
        # all years, all months, don't add anything to the query
        pass
    elif year is not None or month is not None:
        if year is not None:
            year = int(year)
        else:
            # No year given, assume this year
            year = datetime.datetime.now().year

        if month is not None:
            month = int(month)
            last_day = calendar.monthrange(year, month)[1]
            first_moment = coarse_datetime_repr(
                datetime.datetime(year, month, 1))
            last_moment = coarse_datetime_repr(
                datetime.datetime(year, month, last_day, 23, 59, 59))

        else:
            # No month given, search entire year
            first_moment = coarse_datetime_repr(datetime.datetime(year, 1, 1))
            last_moment = coarse_datetime_repr(datetime.datetime(year+1, 1, 1))

        query['start_date'] = (None, last_moment)
        query['end_date'] = (first_moment, None)

    else:
        # Show either all future or all past events
        now = coarse_datetime_repr(datetime.datetime.now())
        if past_events:
            # Past; show the most recent first
            query['end_date'] = (None, now)
            query['reverse'] = True
        else:
            # Future; show the soonest first
            query['end_date'] = (now, None)
            query['reverse'] = False

    batch = get_catalog_batch_grid(context, request, **query)

    return batch
예제 #6
0
파일: test_utils.py 프로젝트: boothead/karl
 def test_coarse_datetime_repr(self):
     import datetime
     from karl.utils import coarse_datetime_repr
     self.assertEqual(coarse_datetime_repr(
         datetime.datetime(2009, 2, 13, 23, 31, 30)), 12345678)
     self.assertEqual(coarse_datetime_repr(
         datetime.datetime(2009, 2, 13, 23, 31, 31)), 12345678)
     self.assertEqual(coarse_datetime_repr(
         datetime.datetime(2009, 2, 13, 23, 31, 40)), 12345679)
예제 #7
0
 def test_coarse_datetime_repr(self):
     import datetime
     from karl.utils import coarse_datetime_repr
     self.assertEqual(
         coarse_datetime_repr(datetime.datetime(2009, 2, 13, 23, 31, 30)),
         12345678)
     self.assertEqual(
         coarse_datetime_repr(datetime.datetime(2009, 2, 13, 23, 31, 31)),
         12345678)
     self.assertEqual(
         coarse_datetime_repr(datetime.datetime(2009, 2, 13, 23, 31, 40)),
         12345679)
예제 #8
0
def get_catalog_news(context, request, searchterm=None, year=None, month=None):

    # Build up a query
    query = dict(
        path={'query': resource_path(context)},
        allowed={
            'query': effective_principals(request),
            'operator': 'or'
        },
        interfaces=[INewsItem],
        sort_index="publication_date",
        reverse=True,
    )

    if searchterm is not None:
        query['texts'] = searchterm

    now = coarse_datetime_repr(datetime.datetime.now())
    if year is not None or month is not None:
        if year is not None:
            year = int(year)
        else:
            # No year given, assume this year
            year = datetime.datetime.now().year

        if month is not None:
            month = int(month)
            last_day = calendar.monthrange(year, month)[1]
            first_moment = coarse_datetime_repr(
                datetime.datetime(year, month, 1))
            last_moment = coarse_datetime_repr(
                datetime.datetime(year, month, last_day, 23, 59, 59))

        else:
            # No month given, search entire year
            first_moment = coarse_datetime_repr(datetime.datetime(year, 1, 1))
            last_moment = coarse_datetime_repr(
                datetime.datetime(year + 1, 1, 1))

        # Never show news items that aren't published yet
        last_moment = min(last_moment, now)

        query['publication_date'] = (first_moment, last_moment)

    else:
        # Don't show news from future
        query['publication_date'] = (None, now)

    batch = get_catalog_batch_grid(context, request, **query)

    return batch
예제 #9
0
def get_catalog_news(context, request,
                     searchterm=None, year=None, month=None):

    # Build up a query
    query = dict(
        path={'query': resource_path(context)},
        allowed={'query': effective_principals(request), 'operator': 'or'},
        interfaces=[INewsItem],
        sort_index="publication_date",
        reverse=True,
        )

    if searchterm is not None:
        query['texts'] = searchterm

    now = coarse_datetime_repr(datetime.datetime.now())
    if year is not None or month is not None:
        if year is not None:
            year = int(year)
        else:
            # No year given, assume this year
            year = datetime.datetime.now().year

        if month is not None:
            month = int(month)
            last_day = calendar.monthrange(year, month)[1]
            first_moment = coarse_datetime_repr(
                datetime.datetime(year, month, 1))
            last_moment = coarse_datetime_repr(
                datetime.datetime(year, month, last_day, 23, 59, 59))

        else:
            # No month given, search entire year
            first_moment = coarse_datetime_repr(datetime.datetime(year, 1, 1))
            last_moment = coarse_datetime_repr(datetime.datetime(year+1, 1, 1))

        # Never show news items that aren't published yet
        last_moment = min(last_moment, now)

        query['publication_date'] = (first_moment, last_moment)

    else:
        # Don't show news from future
        query['publication_date'] = (None, now)

    batch = get_catalog_batch_grid(context, request, **query)

    return batch
예제 #10
0
    def test_excludes_inactive(self):
        from datetime import datetime
        from datetime import timedelta
        from karl.utils import coarse_datetime_repr
        now = datetime.now()
        today = now.today()
        six_months_ago = today - timedelta(days=180)
        self._set_TODAY(today)
        self._register()
        context = testing.DummyModel()
        profiles = context['profiles'] = testing.DummyModel()
        profiles[None] = testing.DummyModel()
        catalog = context.catalog = karltesting.DummyCatalog(
                                      {1: '/foo', 2: '/bar'})
        foo = testing.DummyModel(content_modified=now - timedelta(1))
        bar = testing.DummyModel(content_modified=now - timedelta(32))
        karltesting.registerModels({'/foo': foo,
                                '/bar': bar,
                               })
        request = testing.DummyRequest()

        info = self._callFUT(context, request)

        self.assertEqual(len(catalog.queries), 1)
        query = catalog.queries[0]
        self.assertEqual(query['content_modified'],
                         (coarse_datetime_repr(six_months_ago), None))

        communities = info['communities']
        self.assertEqual(len(communities), 2)
        self.assertEqual(communities[0].context, foo)
        self.assertEqual(communities[1].context, bar)
        self.failUnless(info['actions'])
        _checkCookie(request, 'active')
예제 #11
0
    def test_excludes_inactive(self):
        from datetime import datetime
        from datetime import timedelta
        from karl.utils import coarse_datetime_repr
        now = datetime.now()
        today = now.today()
        six_months_ago = today - timedelta(days=180)
        self._set_TODAY(today)
        self._register()
        context = testing.DummyModel()
        profiles = context['profiles'] = testing.DummyModel()
        profiles[None] = testing.DummyModel()
        catalog = context.catalog = karltesting.DummyCatalog(
                                      {1: '/foo', 2: '/bar'})
        foo = testing.DummyModel(content_modified=now - timedelta(1))
        bar = testing.DummyModel(content_modified=now - timedelta(32))
        karltesting.registerModels({'/foo': foo,
                                '/bar': bar,
                               })
        request = testing.DummyRequest()

        info = self._callFUT(context, request)

        self.assertEqual(len(catalog.queries), 1)
        query = catalog.queries[0]
        self.assertEqual(query['content_modified'],
                         (coarse_datetime_repr(six_months_ago), None))

        communities = info['communities']
        self.assertEqual(len(communities), 2)
        self.assertEqual(communities[0].context, foo)
        self.assertEqual(communities[1].context, bar)
        self.failUnless(info['actions'])
        _checkCookie(request, 'active')
예제 #12
0
파일: admin.py 프로젝트: karlproject/karl
def archive_communities_view(context, request):
    """
    Archive inactive communities.
    """
    api = AdminTemplateAPI(context, request, 'Admin UI: Move Content')

    # Find inactive communities
    search = ICatalogSearch(context)
    now = datetime.datetime.now()
    timeago = now - datetime.timedelta(days=425)  # ~14 months
    timeago = now - datetime.timedelta(days=4)  # XXX Testing
    count, docids, resolver = search(
        interfaces=[ICommunity],
        content_modified=(None, coarse_datetime_repr(timeago)))
    communities = [
        {'title': community.title,
         'url': request.resource_url(community),
         'path': resource_path(community)}
        for community in (resolver(docid) for docid in docids)
    ]
    communities.sort(key=itemgetter('path'))
    return {
        'api': api,
        'menu':_menu_macro(),
        'communities': communities,
    }
예제 #13
0
파일: test_site.py 프로젝트: lslaz1/karl
 def test_w_date(self):
     import datetime
     from karl.utils import coarse_datetime_repr
     context = testing.DummyModel()
     today = datetime.date.today()
     self._decorate(context, today)
     result = self._callFUT(context, None)
     self.assertEqual(result, coarse_datetime_repr(today))
예제 #14
0
파일: test_site.py 프로젝트: cguardia/karl
 def test_w_date(self):
     import datetime
     from karl.utils import coarse_datetime_repr
     context = testing.DummyModel()
     today = datetime.date.today()
     self._decorate(context, today)
     result = self._callFUT(context, None)
     self.assertEqual(result, coarse_datetime_repr(today))
예제 #15
0
파일: communities.py 프로젝트: lslaz1/karl
def show_active_communities_view(context, request):
    _set_cookie_via_request(request, 'active')

    six_months_ago = _today() - datetime.timedelta(days=180)
    content_modified = (coarse_datetime_repr(six_months_ago), None)

    return _show_communities_view_helper(context,
                                         request, prefix='Active ',
                                         content_modified=content_modified)
예제 #16
0
파일: search.py 프로젝트: boothead/karl
def make_query(context, request):
    """Given a search request, return a catalog query and a list of terms.
    """
    params = request.params
    query = {}
    terms = []
    body = params.get("body")
    if body:
        query["texts"] = body
        query["sort_index"] = "texts"
        terms.append(body)

    creator = params.get("creator")
    if creator:
        userids = list(_iter_userids(context, request, creator))
        query["creator"] = {"query": userids, "operator": "or"}
        terms.append(creator)

    types = params.getall("types")
    if types:
        type_dict = {}
        for t in get_content_types():
            type_dict[interface_id(t)] = t
        ifaces = [type_dict[name] for name in types]
        query["interfaces"] = {"query": ifaces, "operator": "or"}
        terms.extend(iface.getTaggedValue("name") for iface in ifaces)
    else:
        query["interfaces"] = [IContent]

    tags = params.getall("tags")
    if tags:
        query["tags"] = {"query": tags, "operator": "or"}
        terms.extend(tags)

    year = params.get("year")
    if year:
        year = int(year)
        begin = coarse_datetime_repr(datetime.datetime(year, 1, 1))
        end = coarse_datetime_repr(datetime.datetime(year, 12, 31, 12, 59, 59))
        query["creation_date"] = (begin, end)
        terms.append(year)

    return query, terms
예제 #17
0
파일: communities.py 프로젝트: iotest3/new
def show_active_communities_view(context, request):
    _set_cookie_via_request(request, 'active')

    six_months_ago = _today() - datetime.timedelta(days=180)
    content_modified = (coarse_datetime_repr(six_months_ago), None)

    return _show_communities_view_helper(context,
                                         request, prefix='Active ',
                                         content_modified=content_modified,
                                        )
예제 #18
0
def _get_catalog_events(calendar, request,
                        first_moment, last_moment, layer_name=None):

    searcher = ICatalogSearch(calendar)
    search_params = dict(
        allowed={'query': effective_principals(request), 'operator': 'or'},
        interfaces=[ICalendarEvent],
        sort_index='start_date',
        reverse=False,
        )

    if first_moment:
        end_date = (coarse_datetime_repr(first_moment), None)
        search_params['end_date'] = end_date

    if last_moment:
        start_date = (None, coarse_datetime_repr(last_moment))
        search_params['start_date'] = start_date

    docids_seen = set()

    events = []

    for layer in _get_calendar_layers(calendar):
        if layer_name and layer.__name__ != layer_name:
            continue

        total, docids, resolver = searcher(
            virtual={'query':layer.paths, 'operator':'or'},
            **search_params)

        for docid in docids:
            if docid not in docids_seen:
                docids_seen.add(docid)

                event = resolver(docid)
                event._v_layer_color = layer.color.strip()
                event._v_layer_title = layer.title

                events.append(event)

    return events
예제 #19
0
def archive_to_box_view(context, request):
    """
    Archive inactive communities to the Box storage service.
    """
    api = AdminTemplateAPI(context, request, 'Admin UI: Archive to Box')
    communities = None
    box = find_box(context)
    client = BoxClient(box, request.registry.settings)
    logged_in = False
    state = request.params.get('state', None)

    if state:
        if state == box.state:
            client.authorize(request.params['code'])
        else:
            raise HTTPBadRequest("Box state does not match")
        state = box.state = None

#return HTTPFound(request.path_url)

    if box.logged_in:
        logged_in = True
        # Find inactive communities
        search = ICatalogSearch(context)
        now = datetime.datetime.now()
        timeago = now - datetime.timedelta(days=425)  # ~14 months
        count, docids, resolver = search(
            interfaces=[ICommunity],
            content_modified=(None, coarse_datetime_repr(timeago)))
        communities = [{
            'title': community.title,
            'url': request.resource_url(community),
            'path': resource_path(community)
        } for community in (resolver(docid) for docid in docids)]
        communities.sort(key=itemgetter('path'))

    if not box.logged_in:
        state = box.state = str(uuid.uuid4())

    return {
        'api': api,
        'menu': _menu_macro(),
        'communities': communities,
        'logged_in': logged_in,
        'state': state,
        'client_id': client.client_id,
        'authorize_url': client.authorize_url,
        'redirect_uri': request.path_url,
    }
예제 #20
0
    def _query(self):
        """The part implemented for each portlet, actually grab data"""

        searcher = getAdapter(self.context, ICatalogSearch)
        path = {
            'query': resource_path(self.context),
        }
        # show only upcoming events, the soonest first.
        now = coarse_datetime_repr(datetime.datetime.now())
        total, docids, resolver = searcher(path=path,
                                           sort_index='start_date',
                                           end_date=(now, None),
                                           interfaces=[ICalendarEvent],
                                           reverse=False,
                                           use_cache=False)

        return resolver, list(docids)
예제 #21
0
파일: adapters.py 프로젝트: Falmarri/karl
    def _query(self):
        """The part implemented for each portlet, actually grab data"""

        searcher = getAdapter(self.context, ICatalogSearch)
        path = {
            'query':resource_path(self.context),
            }
        # show only upcoming events, the soonest first.
        now = coarse_datetime_repr(datetime.datetime.now())
        total, docids, resolver = searcher(
            path=path,
            sort_index='start_date',
            end_date=(now, None),
            interfaces=[ICalendarEvent],
            reverse=False,
            use_cache=False
            )

        return resolver, list(docids)
예제 #22
0
def _get_date_or_datetime(object, attr, default):
    d = getattr(object, attr, None)
    if (isinstance(d, datetime.datetime) or isinstance(d, datetime.date)):
        return coarse_datetime_repr(d)
    return default
예제 #23
0
from karl.utils import coarse_datetime_repr

from karl.models.site import get_name
from karl.models.site import get_path
from karl.models.site import get_mimetype
from karl.models.site import get_creator

from karl.models.profile import Profile

from karl.scripting import get_default_config
from karl.scripting import open_root

import logging
LOGGER = "export_usage_metadata"
log = logging.getLogger(LOGGER)
old_date = coarse_datetime_repr(datetime(1999, 1, 1))
logging.basicConfig()


def _get_docid(context):
    return context.docid


def _get_title(context):
    return getattr(context, 'title', None)


def _get_name(context):
    return get_name(context, None)

예제 #24
0
파일: search.py 프로젝트: iotest3/new
def make_query(context, request):
    """Given a search request, return a catalog query and a list of terms.
    """
    params = request.params
    query = {}
    terms = []

    term = params.get('body')
    if term:
        terms.append(term)

    kind = params.get('kind')
    if kind:
        searcher = queryUtility(IGroupSearchFactory, kind)
        if searcher is None:
            # If the 'kind' we got is not known, return an error
            fmt = "The LiveSearch group %s is not known"
            raise HTTPBadRequest(fmt % kind)
        terms.append(kind)
    else:
        searcher = default_group_search

    if term:
        weighted_term = WeightedQuery(term)
    else:
        weighted_term = None

    searcher = searcher(context, request, weighted_term)
    query.update(searcher.criteria)

    creator = params.get('creator')
    if creator:
        userids = list(_iter_userids(context, request, creator))
        query['creator'] = {
            'query': userids,
            'operator': 'or',
        }
        terms.append(creator)

    tags = filter(None, params.getall('tags'))
    if tags:
        query['tags'] = {
            'query': tags,
            'operator': 'or',
        }
        terms.extend(tags)

    year = params.get('year')
    if year:
        year = int(year)
        begin = coarse_datetime_repr(datetime.datetime(year, 1, 1))
        end = coarse_datetime_repr(datetime.datetime(year, 12, 31, 12, 59, 59))
        query['creation_date'] = (begin, end)
        terms.append(year)

    since = params.get('since')
    if since:
        option = since_options[since]
        since = datetime.datetime.now() - option['delta']
        query['creation_date'] = (coarse_datetime_repr(since), None)
        terms.append(option['name'])

    sort = params.get('sort')
    if sort:
        option = sort_options[sort]
        query['sort_index'] = option['sort_index']
        query['reverse'] = option['reverse']
        terms.append(option['name'])

    return query, terms
예제 #25
0
파일: stats.py 프로젝트: cguardia/karl
def collect_profile_stats(context):
    """
    Returns an iterator where for each user profile a dict is returned with the
    following keys::

        + first_name
        + last_name
        + userid
        + date_created
        + is_staff
        + num_communities
        + num_communities_moderator
        + location
        + department
        + roles
        + num_documents
        + num_tags
        + documents_this_month
    """
    communities = find_communities(context)
    search = ICatalogSearch(context)
    profiles = find_profiles(context)
    users = find_users(context)

    # Collect community membership
    membership = {}
    moderatorship = {}
    for community in communities.values():
        for name in community.member_names:
            if name not in membership:
                membership[name] = 1
            else:
                membership[name] += 1
        for name in community.moderator_names:
            if name not in moderatorship:
                moderatorship[name] = 1
            else:
                moderatorship[name] += 1

    for profile in profiles.values():
        info = users.get_by_id(profile.__name__)
        if info is not None:
            groups = info['groups']
        else:
            groups = []
        name = profile.__name__
        stats = dict(
            first_name=profile.firstname,
            last_name=profile.lastname,
            userid=name,
            date_created=profile.created,
            location=profile.location,
            department=profile.department,
            is_staff='group.KarlStaff' in groups,
            roles=','.join(groups),
            num_communities=membership.get(name, 0),
            num_communities_moderator=moderatorship.get(name, 0),
        )

        count, docids, resolver = search(creator=name)
        stats['num_documents'] = count

        begin = coarse_datetime_repr(datetime.datetime.now() - THIRTY_DAYS)
        count, docids, resolver = search(
            creator=name, creation_date=(begin, None),
        )
        stats['documents_this_month'] = count

        tags = find_tags(context)
        stats['num_tags'] = len(tags.getTags(users=(name,)))

        yield stats
예제 #26
0
파일: site.py 프로젝트: karlproject/karl
def _get_date_or_datetime(object, attr, default):
    d = getattr(object, attr, None)
    if (isinstance(d, datetime.datetime) or
        isinstance(d, datetime.date)):
        return coarse_datetime_repr(d)
    return default
예제 #27
0
def _get_catalog_events(calendar, request,
                        first_moment, last_moment, layer_name=None):

    searcher = ICatalogSearch(calendar)
    search_params = dict(
        allowed={'query': effective_principals(request), 'operator': 'or'},
        interfaces=[ICalendarEvent],
        sort_index='start_date',
        reverse=False,
        )

    if first_moment:
        end_date = (coarse_datetime_repr(first_moment), None)
        search_params['end_date'] = end_date

    if last_moment:
        start_date = (None, coarse_datetime_repr(last_moment))
        search_params['start_date'] = start_date

    docids_seen = set()

    events = []

    for layer in _get_calendar_layers(calendar):
        if layer_name and layer.__name__ != layer_name:
            continue

        total, docids, resolver = searcher(
            virtual={'query':layer.paths, 'operator':'or'},
            **search_params)

        for docid in docids:
            if docid not in docids_seen:

                # We need to clone the event, because if an event is
                # shown in multiple layers, then we want to show it multiple
                # times.
                # This also means that making the color and title
                # volatile, serves no purpose any more. It used to serve
                # a purpose when the same event could only have
                # been displayed once.
                #
                # It's important to perform a shallow copy of the event. A deep
                # copy can leak out and try to copy the entire database through
                # the parent reference

                event = copy.copy(resolver(docid))
                event._v_layer_color = layer.color.strip()
                event._v_layer_title = layer.title

                # but... showing an event multiple times, for each
                # layer it is in,
                # currently only makes sense for all-day
                # events. As, for normal events, this would make
                # them undisplayable.
                # So we only add the event to the seen ones, if
                # it is a normal, and not an all-day event.
                # A special case is list views when we want the normal
                # events duplicated too. The characteristics of a list
                # view is that the end date is open.
                all_day = event.startDate.hour == 0 and \
                    event.startDate.minute == 0 and \
                    event.endDate.hour == 0 and \
                    event.endDate.minute == 0
                in_list_view = last_moment is None
                if not in_list_view and not all_day:
                    docids_seen.add(docid)

                events.append(event)

    # The result set needs to be sorted by start_date.
    # XXX maybe we can do this directly from the catalog?
    events.sort(key=lambda event: event.startDate)

    return events
예제 #28
0
def collect_profile_stats(context):
    """
    Returns an iterator where for each user profile a dict is returned with the
    following keys::

        + first_name
        + last_name
        + userid
        + date_created
        + is_staff
        + num_communities
        + num_communities_moderator
        + location
        + department
        + roles
        + num_documents
        + num_tags
        + documents_this_month
    """
    communities = find_communities(context)
    search = ICatalogSearch(context)
    profiles = find_profiles(context)
    users = find_users(context)

    # Collect community membership
    membership = {}
    moderatorship = {}
    for community in communities.values():
        for name in community.member_names:
            if name not in membership:
                membership[name] = 1
            else:
                membership[name] += 1
        for name in community.moderator_names:
            if name not in moderatorship:
                moderatorship[name] = 1
            else:
                moderatorship[name] += 1

    for profile in profiles.values():
        info = users.get_by_id(profile.__name__)
        if info is not None:
            groups = info['groups']
        else:
            groups = []
        name = profile.__name__
        stats = dict(
            first_name=profile.firstname,
            last_name=profile.lastname,
            userid=name,
            date_created=profile.created,
            location=profile.location,
            department=profile.department,
            is_staff='group.KarlStaff' in groups,
            roles=','.join(groups),
            num_communities=membership.get(name, 0),
            num_communities_moderator=moderatorship.get(name, 0),
        )

        count, docids, resolver = search(creator=name)
        stats['num_documents'] = count

        begin = coarse_datetime_repr(datetime.datetime.now() - THIRTY_DAYS)
        count, docids, resolver = search(
            creator=name, creation_date=(begin, None),
        )
        stats['documents_this_month'] = count

        tags = find_tags(context)
        stats['num_tags'] = len(tags.getTags(users=(name,)))

        yield stats
예제 #29
0
파일: apiviews.py 프로젝트: araymund/karl
    def get_communities_to_archive(self):
        """
        GET: /arc2box/communities/

        Returns a list of communities eligible to be archived.  Accepts the
        following query parameters, none of which are required:

            + last_activity: Integer number of days. Will only include
              communities with no updates younger than given number of days.
              Default is 540 days (roughly 18 months).
            + filter: Text used as a filter on the communitie's title.  Only
              matching communities will be returned.  By default, no filtering
              is done.
            + limit: Integer number of results to return.  Default is to return
              all results.
            + offset: Integer index of first result to return. For use in
              conjunction with `limit` in order to batch results.

        Results are ordered by time of last activity, oldest first.

        Returns a list of objects, with each object containing the following
        keys:

            + id: docid of the community.
            + name: The name of the community (URL name).
            + title: The title (display name) of the community.
            + last_activity: Time of last activity on this community.
            + url: URL of the community.
            + items: integer count of number documents in this community.
            + status: workflow state with regards to archive process,

        The possible values of `status` are:

            + null: The community is in normal mode, not currently in any
                    archive state.
            + "copying": The archiver is in the process of copying community
                         content to Box.
            + "reviewing": The archiver has finished copying community content
                           to Box and is ready for an administrator to review
                           the content in Box before proceeding.
            + "removing": The archiver is in the process of removing content
                          from the community.  A transition to this state is
                          the point of no return.
            + "archived": The archiver has copied all community content to the
                          Box archive and removed the content from Karl. The
                          community is mothballed.
            + "exception": An exception has occurred while processing this
                           community.
        """
        params = self.request.params
        last_activity = int(params.get('last_activity', 540))
        filter_text = params.get('filter')
        limit = int(params.get('limit', 0))
        offset = int(params.get('offset', 0))

        search = ICatalogSearch(self.context)
        now = datetime.datetime.now()
        timeago = now - datetime.timedelta(days=last_activity)
        count, docids, resolver = search(
            interfaces=[ICommunity],
            content_modified=(None, coarse_datetime_repr(timeago)),
            sort_index='content_modified',
            reverse=True)

        def results(docids=docids, limit=limit, offset=offset):
            if offset and not filter_text:
                docids = docids[offset:]
                offset = 0
            for docid in docids:
                if offset:
                    offset -= 1
                    continue
                community = resolver(docid)
                if (not filter_text
                        or filter_text.lower() in community.title.lower()):
                    yield community
                    if limit:
                        limit -= 1
                        if not limit:
                            break

        route_url = self.request.route_url

        def record(community):
            path = resource_path(community)
            items, _, _ = search(path=path)
            return {
                'id': community.docid,
                'name': community.__name__,
                'title': community.title,
                'last_activity': str(community.content_modified),
                'url': route_url('archive_to_box', traverse=path.lstrip('/')),
                'items': items,
                'status': getattr(community, 'archive_status', None),
            }

        logger.info('arc2box: Got communities')
        return [record(community) for community in results()]
예제 #30
0
파일: apiviews.py 프로젝트: araymund/karl
    def get_communities_to_archive(self):
        """
        GET: /arc2box/communities/

        Returns a list of communities eligible to be archived.  Accepts the
        following query parameters, none of which are required:

            + last_activity: Integer number of days. Will only include
              communities with no updates younger than given number of days.
              Default is 540 days (roughly 18 months).
            + filter: Text used as a filter on the communitie's title.  Only
              matching communities will be returned.  By default, no filtering
              is done.
            + limit: Integer number of results to return.  Default is to return
              all results.
            + offset: Integer index of first result to return. For use in
              conjunction with `limit` in order to batch results.

        Results are ordered by time of last activity, oldest first.

        Returns a list of objects, with each object containing the following
        keys:

            + id: docid of the community.
            + name: The name of the community (URL name).
            + title: The title (display name) of the community.
            + last_activity: Time of last activity on this community.
            + url: URL of the community.
            + items: integer count of number documents in this community.
            + status: workflow state with regards to archive process,

        The possible values of `status` are:

            + null: The community is in normal mode, not currently in any
                    archive state.
            + "copying": The archiver is in the process of copying community
                         content to Box.
            + "reviewing": The archiver has finished copying community content
                           to Box and is ready for an administrator to review
                           the content in Box before proceeding.
            + "removing": The archiver is in the process of removing content
                          from the community.  A transition to this state is
                          the point of no return.
            + "archived": The archiver has copied all community content to the
                          Box archive and removed the content from Karl. The
                          community is mothballed.
            + "exception": An exception has occurred while processing this
                           community.
        """
        params = self.request.params
        last_activity = int(params.get('last_activity', 540))
        filter_text = params.get('filter')
        limit = int(params.get('limit', 0))
        offset = int(params.get('offset', 0))

        search = ICatalogSearch(self.context)
        now = datetime.datetime.now()
        timeago = now - datetime.timedelta(days=last_activity)
        count, docids, resolver = search(
            interfaces=[ICommunity],
            content_modified=(None, coarse_datetime_repr(timeago)),
            sort_index='content_modified',
            reverse=True)

        def results(docids=docids, limit=limit, offset=offset):
            if offset and not filter_text:
                docids = docids[offset:]
                offset = 0
            for docid in docids:
                if offset:
                    offset -= 1
                    continue
                community = resolver(docid)
                if (not filter_text or
                    filter_text.lower() in community.title.lower()):
                    yield community
                    if limit:
                        limit -= 1
                        if not limit:
                            break

        route_url = self.request.route_url

        def record(community):
            path = resource_path(community)
            items, _, _ = search(path=path)
            return {
                'id': community.docid,
                'name': community.__name__,
                'title': community.title,
                'last_activity': str(community.content_modified),
                'url': route_url('archive_to_box', traverse=path.lstrip('/')),
                'items': items,
                'status': getattr(community, 'archive_status', None),
            }

        logger.info('arc2box: Got communities')
        return [record(community) for community in results()]