Beispiel #1
0
 def _getDeviceBatch(self,
                     selectstatus='none',
                     goodevids=[],
                     badevids=[],
                     offset=0,
                     count=50,
                     filter='',
                     orderby='titleOrId',
                     orderdir='asc'):
     unused(count, offset, orderby, orderdir)
     if not isinstance(goodevids, (list, tuple)):
         goodevids = [goodevids]
     if not isinstance(badevids, (list, tuple)):
         badevids = [badevids]
     if selectstatus == 'all':
         idquery = ~In('id', badevids)
     else:
         idquery = In('id', goodevids)
     devfilter = '(?is).*%s.*' % filter
     filterquery = Or(MatchRegexp('id', devfilter),
                      MatchRegexp('name', devfilter),
                      MatchRegexp('text_ipAddress', devfilter),
                      MatchRegexp('deviceClassPath', devfilter))
     query = Eq('uid', self.context.absolute_url_path()) & idquery
     query = query & filterquery
     catalog = IModelCatalogTool(self.context)
     objects = catalog.search(query=query)
     return [x['id'] for x in objects]
def get_uids(index_client, root="", types=()):
    start = 0
    need_results = True
    query = [Eq("tx_state", 0)]
    if root:
        root = root.rstrip('/')
        query.append(
            Or(Eq("uid", "{}".format(root)),
               MatchGlob("uid", "{}/*".format(root))))

    if not isinstance(types, (tuple, list)):
        types = (types, )

    if types:
        query.append(In("objectImplements", [dottedname(t) for t in types]))

    while need_results:
        search_results = index_client.search(
            SearchParams(query=And(*query),
                         start=start,
                         limit=MODEL_INDEX_BATCH_SIZE,
                         order_by="uid",
                         fields=["uid"]))
        start += MODEL_INDEX_BATCH_SIZE
        for result in search_results.results:
            yield result.uid
        need_results = start < search_results.total_count
    def results(self, start, until=None):
        today = DateTime()
        today = DateTime(today.year(), today.month(), today.day())
        start = DateTime(start)
        start = DateTime(start.year(), start.month(), start.day())

        query = Indexed('chimpfeeds') & \
                In('review_state', ('published', )) & \
                Ge('feedSchedule', start)

        if until:
            try:
                until = DateTime(until)
            except DateTime.SyntaxError:
                pass
            else:
                query = query & Le('feedSchedule', until)

        site = getToolByName(self.context, "portal_url").getPortalObject()
        settings = IFeedSettings(site)
        if settings.use_moderation:
            query = query & Eq('feedModerate', True)

        catalog = getToolByName(self.context, "portal_catalog")

        extras = []
        utilities = getUtilitiesFor(IGroupExtras)
        groups = InterestGroupVocabulary()(self.context)
        for name, util in utilities:
            for group in groups:
                extras.extend(util.items(group.title, start, until))

        return list(catalog.evalAdvancedQuery(
            query, (('feedSchedule', 'desc'), ))) + extras
Beispiel #4
0
    def _buildQuery(self, types, paths, depth, query, filterPermissions):
        qs = []
        if query is not None:
            qs.append(query)

        # Build the path query
        if not paths:
            paths = ('/'.join(self.context.getPhysicalPath()), )

        q = {'query': paths}
        if depth is not None:
            q['depth'] = depth
        pathq = Generic('path', q)
        qs.append(pathq)

        # Build the type query
        if not isinstance(types, (tuple, list)):
            types = (types, )
        subqs = [Eq('objectImplements', dottedname(t)) for t in types]
        if subqs:
            # Don't unnecessarily nest in an Or if there is only one type query
            typeq = subqs[0] if len(subqs) == 1 else Or(*subqs)
            qs.append(typeq)

        # filter based on permissions
        if filterPermissions:
            qs.append(
                In('allowedRolesAndUsers',
                   allowedRolesAndGroups(self.context)))

        # Consolidate into one query
        return And(*qs)
Beispiel #5
0
    def searchTablePage(self, tp, **kwargs):
        if 'path' not in kwargs.keys():
            kwargs['path'] = '/'.join(tp.getPhysicalPath())
        if 'is_label' not in kwargs.keys():
            kwargs['is_label'] = False
        query = Eq('is_label', True)
        query &= Eq('path', kwargs['path'])

        sub_query = None
        for k, v in kwargs.items():
            if k in SKIP_KEYS:
                continue
            if type(v) == dict:
                # Handle complex subqueries (range?)
                term = self._buildRangeQuery(k, v)
            elif type(v) == list:
                term = In(k, v)
            else:
                term = Eq(k, v)

            if sub_query:
                sub_query &= term
            else:
                sub_query = term

        query = query | sub_query
        return self.evalAdvancedQuery(query,
                                      sortSpecs=(kwargs.get(
                                          'sort_on',
                                          'getObjPositionInParent'), ))
Beispiel #6
0
def get_possible_faqs(self):
    log.info('get_possible_faqs')
    queries = []
    title = In('Title', ["*frequently*", "*faq*", "FAQ*", "Frequently*"])
    portal_type = In("portal_type", ["Document", "RichDocument", "Folder"])
    ids = ["faq", "faq.php", "faq.stm", "faqs"]
    for i in range(0, 10):
        ids.append('faq%d.stm' % i)
        ids.append('faq0%d.php' % i)

    id = In('getId', ids)
    body = Eq('SearchableText', "FAQ")
    fop = Eq('path', '/osha/portal/fop')
    advanced_query = And(Or(id, title, body), portal_type, Not(fop))
    ls = self.portal_catalog.evalAdvancedQuery(advanced_query,
                                               (('Date', 'desc'), ))

    # XXX: Didn't work :(
    # ls = self.portal_catalog(
    #             getId='faq.php',
    #             path='/osha/portal/en/good_practice/priority_groups/disability/')

    ls = self.portal_catalog(
        getId='faq2.stm',
        path='osha/en/good_practice/topics/dangerous_substances/faq2.stm')

    # ls = self.portal_catalog(
    #             getId='faq.php',
    #             path='osha/en/good_practice/topics/accident_prevention/')

    log.info("Processing FAQs: %s" % "\n".join([i.getURL() for i in ls]))

    odict = {}
    for l in ls:
        o = l.getObject()
        odict[o.absolute_url()] = o
        ts = o.getTranslations().values()
        for t in ts:
            odict[t[0].absolute_url()] = t[0]

    objects = odict.values()
    return objects

    k = ['/'.join(o.getPhysicalPath()) for o in objects]
    k.sort()
    display_str = '\n'.join(k) or 'none'
    return display_str
Beispiel #7
0
def getAppKpis(self):
    paths = []

    devices = self.REQUEST.get('deviceId')
    if not devices:
        self.REQUEST.RESPONSE.setHeader('status', 404)
        paths.append(['ERROR', 'Device ids not specified'])
        self.REQUEST.RESPONSE.setHeader('content-type', 'application/json')
        return json.dumps(paths)

    if isinstance(devices, basestring):
        devices = [devices]

    applicationIds = self.REQUEST.get('applicationId')
    if not applicationIds:
        applicationIds = []
    elif isinstance(applicationIds, basestring):
        applicationIds = [applicationIds]

    for devId in devices:
        dev = self.dmd.Devices.deviceSearch({'id': devId})
        if len(dev):
            dev = dev[0].getObject()
            devPathL = len(dev.getPrimaryId()) + 1
            for brain in dev.componentSearch({
                    'getParentDeviceName': devId,
                    'monitored': True,
                    'meta_type': 'BBCApplication'
            }):
                # skip if not requested
                if len(applicationIds) and (brain.getPrimaryId.split('/')
                                            )[-1] not in applicationIds:
                    continue
                # we got here so this is one of the wanted records
                app = brain.getObject()
                for kpi in app.ApplicationToKPI():
                    paths.append([kpi.getPrimaryId()[devPathL:], kpi.name()])

            # fetch non-automated datasources
            if 'manual' in applicationIds:
                for brain in dev.componentSearch.evalAdvancedQuery(
                        Eq('getParentDeviceName', devId)
                        & Eq('monitored', True) & ~In('meta_type', [
                            'BBCApplication', 'BBCApplicationKPI',
                            'BBCApplicationKPIDerive',
                            'BBCApplicationKPIAbsolute',
                            'BBCApplicationKPICounter', 'BBCApplicationKPIGauge'
                        ])):
                    dsId = (brain.getPrimaryId.split('/'))[-1]
                    if dsId == '-': continue
                    try:
                        ds = brain.getObject()
                        paths.append([ds.getPrimaryId()[devPathL:], ds.name()])
                    except:
                        continue

    self.REQUEST.RESPONSE.setHeader('content-type', 'application/json')
    return json.dumps(paths)
Beispiel #8
0
 def remove_mapping(path, instances):
     try:
         org = dmd.Events.getOrganizer(path)
         results = ICatalogTool(org).search(EventClassInst,
                                            query=In('id', instances))
         if results.total:
             log.info(
                 'Removing deprecated Event Class Instances from {}'.
                 format(path))
             for instance in instances:
                 if safe_hasattr(org, instance):
                     org.removeInstances([instance])
     except Exception:
         pass
Beispiel #9
0
    def get_items(self):
        catalog = getToolByName(self.context, 'portal_catalog')
        today = DateTime()

        query = In('review_state', ('published', )) & \
            Eq('chimpfeeds', self.name) & \
            Le('feedSchedule', today)

        settings = IFeedSettings(self.context)
        if settings.use_moderation:
            query = query & Eq('feedModerate', True)

        brains = catalog.evalAdvancedQuery(query, (('Date', 'desc'), ))
        objects = tuple(brain.getObject() for brain in brains)
        return tuple(ItemProxy(obj).__of__(obj) for obj in objects)
Beispiel #10
0
 def findMatchingOrganizers(self, organizerClass, organizerPath,
                            userFilter):
     filterRegex = '(?i)^%s.*%s.*' % (organizerPath, userFilter)
     filterRegex = '/zport/dmd/{0}*{1}*'.format(organizerPath, userFilter)
     if self.validRegex(filterRegex):
         orgquery = (Eq(
             'objectImplements', 'Products.ZenModel.%s.%s' %
             (organizerClass, organizerClass))
                     & MatchRegexp('uid', filterRegex))
         paths = [
             "{0}".format(b.getPath())
             for b in IModelCatalogTool(self._dmd).search(query=orgquery)
         ]
         if paths:
             return In('path', paths)
def keyword_search(root, keywords):
    """Generate objects that match one or more of given keywords."""
    if isinstance(keywords, basestring):
        keywords = [keywords]
    elif isinstance(keywords, set):
        keywords = list(keywords)

    if keywords:
        catalog = ICatalogTool(root)
        query = In('searchKeywords', keywords)
        for result in catalog.search(query=query):
            try:
                yield result.getObject()
            except Exception:
                pass
Beispiel #12
0
    def getDevProdStateJSON(self, prodStates=['Maintenance']):
        """
        Return a map of device to production state in a format suitable for a
        YUI data table.

        @return: A JSON representation of a dictionary describing devices
        @rtype: "{
            'columns':['Device', 'Prod State'],
            'data':[
                {'Device':'<a href=/>', 'Prod State':'Production'},
                {'Device':'<a href=/>', 'Prod State':'Maintenance'},
            ]}"
        """

        if isinstance(prodStates, basestring):
            prodStates = [prodStates]

        def getProdStateInt(prodStateString):
            for t in self.context.getProdStateConversions():
                if t[0] == prodStateString:
                    return t[1]

        numericProdStates = [getProdStateInt(p) for p in prodStates]

        catalog = IModelCatalogTool(self.context.getPhysicalRoot().zport.dmd)
        query = In('productionState', numericProdStates)

        query = And(query,
                    Eq('objectImplements', 'Products.ZenModel.Device.Device'))
        objects = list(catalog.search(query=query, orderby='id',
                                      fields="uuid"))
        devs = (x.getObject() for x in objects)

        mydict = {'columns': ['Device', 'Prod State'], 'data': []}
        for dev in devs:
            if not self.context.checkRemotePerm(ZEN_VIEW, dev): continue
            mydict['data'].append({
                'Device': dev.getPrettyLink(),
                'Prod State': dev.getProdState()
            })
            if len(mydict['data']) >= 100:
                break
        return mydict
Beispiel #13
0
    def getProducts(self):
        """
        """
        if self.request.get("form-sent") is None:
            return []

        query = Eq("path", "/".join(self.context.getPhysicalPath()))
        query &= Eq("object_provides",
                    "easyshop.core.interfaces.catalog.IProduct")

        search_text = self.request.get("search_text", "")
        search_category = self.request.get("search_category", [])

        if search_text != "":
            query &= Eq("Title", search_text)

        if len(search_category) > 0:
            query &= In("categories", search_category)

        catalog = getToolByName(self.context, "portal_catalog")
        brains = catalog.evalAdvancedQuery(query)

        return brains
Beispiel #14
0
    def find_claimable_device(self, device_class=None):
        '''
        Find a possible Linux device for the host:

        Search by id, title, and management IP, against id, hostnames, and IPs
        '''

        if device_class is None:
            device_class = self.proxy_deviceclass()

        suggested_name = self.suggested_device_name()

        search_values = [
            x for x in self.id, suggested_name, self.hostname, self.host_ip
            if x is not None
        ]
        brains = device_class.deviceSearch.evalAdvancedQuery(
            And(
                MatchGlob('getDeviceClassPath',
                          device_class.getOrganizerName() + "*"),
                Or(In('id', search_values), In('titleOrId', search_values),
                   In('getDeviceIp', search_values))))

        possible_devices = []
        for brain in brains:
            try:
                device = brain.getObject()

                if device.openstack_hostComponent() is None:
                    if hasattr(device, 'getIpRealm'):
                        if self.getIpRealm() is device.getIpRealm():
                            possible_devices.append(device)
                    else:
                        possible_devices.append(device)
                else:
                    LOG.info(
                        "%s component %s unable to claim device %s, because it is already linked to %s",
                        self.meta_type, self.name(), device.id,
                        device.openstack_hostComponent().id)
            except Exception:
                pass

        # 1. First look by matching id against my id/suggested_name/hostname
        for device in possible_devices:
            if device.id == self.id:
                return device

        for device in possible_devices:
            if device.id == suggested_name or device.id == self.hostname:
                return device

        # 2. Next find by matching name against my id/suggested_name/hostname
        for device in possible_devices:
            if device.name() == self.id:
                return device

        for device in possible_devices:
            if device.name() == suggested_name or device.name(
            ) == self.hostname:
                return device

        # Otherwise, return the first device, if one was found
        if possible_devices:
            return possible_devices[0]

        if device_class == self.proxy_deviceclass():
            # check for other devices that we would have claimed, if they
            # had been in the right device class
            device = self.find_claimable_device(device_class=self.dmd.Devices)
            if device:
                LOG.info(
                    "No claimable device found for %s, but %s was found "
                    "in another device class.  Moving it to %s will make "
                    "it eligible.", self.id, device.id,
                    self.proxy_deviceclass().getOrganizerName())

        # No claimable device was found.
        return None
Beispiel #15
0
## Script (Python) "getContentByAuthors"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=authors
##title=
##
from Products.AdvancedQuery import In, And, Not, Eq, Or

objects = {}
for author in authors:
    query = Or(In('authors', author), In('editors', author),
               In('translators', author))
    # From what I can tell this sorting isn't working correctly, but I don't think
    # it really matters... all this script is used for is to get the raw numbers.
    objects[author] = context.catalog.evalAdvancedQuery(query, ('sortTitle', ))

return objects
def handle_publishing(context, settings, dry_run=True, log=True):
    '''
    '''
    catalog = context.portal_catalog
    wf = context.portal_workflow
    now = context.ZopeTime()

    actions = settings.publish_actions
    action_taken = False
    audit = ''
    audit = []
    for a in actions:
        audit_record = {}

        date_index = 'effective'
        date_method = 'getEffectiveDate'
        date_index_value = a.date_index
        if date_index_value:
            if '|' in date_index_value:
                items = date_index_value.split('|')
                _date_index = items[0]
                _date_method = items[1]
            else:
                _date_index = date_index_value
                _date_method = date_index_value
            if _date_index in catalog.indexes():
                date_index = _date_index
                date_method = _date_method
            else:
                logger.warn("date index does not exist: %s" %
                            (str(_date_index)))
                continue

        audit_record['header'] = 'Actions triggered by "%s"' % str(date_index)
        audit_record['content_types'] = str(a.portal_types)
        audit_record['initial_state'] = str(a.initial_state)
        audit_record['transition'] = str(a.transition)
        audit_record['date_index_method'] = (str(date_index) + '/' +
                                             str(date_method))
        audit_record['actions'] = []

        query = (Eq('review_state', a.initial_state)
                 & Le(date_index, now)
                 & Eq('enableAutopublishing', True)
                 & In('portal_type', a.portal_types))

        brains = catalog.evalAdvancedQuery(query)
        affected = 0
        total = 0
        for brain in brains:
            o = brain.getObject()
            try:
                eff_date = getattr(o, date_method)()
            except AttributeError:
                logger.warn("date field does not exist: %s" %
                            (str(date_method)))
                continue
            exp_date = o.getExpirationDate()
            # The dates in the indexes are always set!
            # So unless we test for actual dates on the
            # objects, objects with no EffectiveDate are also published.
            # ipdb> brain.effective
            # Out[0]: DateTime('1000/01/01')
            # ipdb> brain.expires
            # Out[0]: DateTime('2499/12/31')

            # we only publish if:
            # a) the effective date is set and is in the past, and if
            # b) the expiration date has not been set or is in the future:
            if (eff_date is not None and eff_date < now
                    and (exp_date is None or exp_date > now)):
                audit_action = {}
                audit_action['portal_type'] = brain.portal_type
                audit_action['url'] = brain.getURL()
                audit_action['title'] = brain.Title
                audit_action['transition'] = a.transition
                if log:
                    logger.info(str(audit_action))
                total += 1
                action_taken = True
                if not dry_run:
                    try:
                        wf.doActionFor(o, a.transition)
                        o.reindexObject()
                        affected += 1
                    except WorkflowException:
                        logger.info(
                            """The state '%s' of the workflow associated with the
                               object at '%s' does not provide the '%s' action
                            """ % (brain.review_state, o.getURL()),
                            str(a.transition))
                audit_record['actions'].append(audit_action)

        if log:
            logger.info(
                """Ran collective.autopublishing (publish): %d objects found, %d affected
                    """ % (total, affected))
        audit.append(audit_record)
    if action_taken:
        return audit
    else:
        return []
##title=returns matching publications ordered by type
##
# This script makes a catalog query for publications on a given area of interest
# and structures the result by publication type
from Products.AdvancedQuery import In, Eq
if keywords == []:
    keywords = context.REQUEST.get('keywords', [])

pc = context.portal_catalog
if hasattr(pc, 'getZCatalog'):
    pc = pc.getZCatalog()

TQ = Eq('portal_type', 'PublicationFolder') & Eq('review_state', 'published')
TYPES = pc.evalAdvancedQuery(TQ)

PQ = Eq('portal_type', 'Publication') & In('Subject', keywords) & Eq(
    'review_state', 'published')
PUBS = pc.evalAdvancedQuery(PQ, (('effective', 'desc'), ))

PubByPath = {}
PrefixList = []
TypesByPath = {}

for TYPE in TYPES:
    P = TYPE.getPath()
    TypesByPath[P] = TYPE
    if P[-1] != "/":
        P = P + "/"
    PrefixList.append(P)

for PUB in PUBS:
Beispiel #18
0
    def gather_events(self, start=None, stop=None, **kw):
        catalog = cmfutils.getToolByName(self.context, 'portal_catalog')

        # search in the navigation root of the currently selected language
        # and in the canonical root path with Language = preferredLanguage or
        # neutral
        pstate = getMultiAdapter((self.context, self.context.request),
                        name=u'plone_portal_state')

        nav_root_path = pstate.navigation_root_path()
        paths = [nav_root_path]

        nav_root = pstate.portal().restrictedTraverse(nav_root_path)
        try:
            canonical_path = '/'.join(
                nav_root.getCanonical().getPhysicalPath())
        except AttributeError:
            pass
        else:
            if canonical_path not in paths:
                paths.append(canonical_path)

        portal_languages = getToolByName(self.context, 'portal_languages')
        preflang = portal_languages.getPreferredLanguage()

        # If we are in the root (i. e. not inside a subsite), restrict
        # to the current folder. This restores the p4a.calendar's behaviour of
        # gather_events, since that also returns only events from the current
        # calendar.
        oshaview = getMultiAdapter((self.context, self.context.request),
                    name=u'oshaview')
        subsite = oshaview.getCurrentSubsite()

        if subsite is None:
            paths = ['/'.join(self.context.getPhysicalPath())]

        query = And(
            Eq('portal_type', 'Event'),
            In('path', paths),
            In('Language', ['', preflang]),
            )

        # Not sure where this key comes from, but it is not an index...
        for bad in ['-C', 'set_language']:
            if bad in kw:
                del kw[bad]

        kw = _make_zcatalog_query(start, stop, kw)
        # apparently arbitrary keys can be inserted and if they are not named like a cat index, advanced query will shout
        indexes = catalog.indexes()
        for key in kw.keys():
            if key not in indexes:
                del kw[key]
        
        for key, value in kw.items():
            if key in ['start', 'end']:
                if value['range'] == 'max':
                    query = And(query, Le(key, value['query']))
                else:
                    query = And(query, Ge(key, value['query']))
            else:
                query = And(query, Eq(key, value))

        if hasattr(catalog, 'getZCatalog'):
            catalog = catalog.getZCatalog()
        event_brains = catalog.evalAdvancedQuery(query, (('Date', 'desc'),))
        return (OSHBrainEvent(x) for x in event_brains)
Beispiel #19
0
topiclist = context.GLOBALSETTINGS.TopicsOfInterest()
for TOP in topiclist:
    if TOP['id'] == id:
        break
MTSubjectPath = TOP.get('MTSubjectPath', '')
osha_keywords = TOP.get('osha_keywords', '')
path = TOP.get('path', '')

today = DateTime().earliestTime()
delta = (today - period).Date()

query = And(Ge('modified', delta), Eq('review_state', 'published'))
if MTSubjectPath != '' and osha_keywords != '':
    query = query & Or(Eq('osha_keywords', osha_keywords),
                       In('MTSubjectPath', MTSubjectPath))
if path:
    query = query & Eq('path', path)

sortSpec = (('modified', 'desc'), )

RESMAP = {}
res = CAT.evalAdvancedQuery(query, sortSpec)

for R in res:
    T = R.portal_type
    list = RESMAP.get(T, [])
    list.append(R)
    RESMAP[T] = list

numresults = len(res)
    def _build_query(self,
                     types=(),
                     paths=(),
                     depth=None,
                     query=None,
                     filterPermissions=True,
                     globFilters=None):
        """
        Build and AdvancedQuery query

        @params types: list/tuple of values for objectImplements field
        @params globFilters: dict with user passed field: value filters
        @params query: AdvancedQuery passed by the user. Most of the time None
        @param filterPermissions: Boolean indicating whether to check for user perms or not

        @return: tuple (AdvancedQuery query, not indexed filters dict)
        """
        indexed, stored, _ = self.model_catalog_client.get_indexes()
        not_indexed_user_filters = {}  # Filters that use not indexed fields

        user_filters_query = None
        types_query = None
        paths_query = None
        permissions_query = None

        partial_queries = []
        if query:
            """
            # if query is a dict, we convert it to AdvancedQuery
            # @TODO We should make the default query something other than AdvancedQuery
            subqueries = []
            if isinstance(query, dict):
                for attr, value in query.iteritems():
                    if isinstance(value, str) and '*' in value:
                        subqueries.append(MatchGlob(attr, value))
                    else:
                        subqueries.append(Eq(attr, value))
                query = And(*subqueries)
            partial_queries.append(query)
            """
            partial_queries.append(self._parse_user_query(query))

        # Build query from filters passed by user
        if globFilters:
            for key, value in globFilters.iteritems():
                if key in indexed:
                    if user_filters_query:
                        user_filters_query = And(user_filters_query,
                                                 MatchRegexp(key, value))
                    else:
                        user_filters_query = MatchRegexp(key, value)
                else:
                    not_indexed_user_filters[key] = value

        if user_filters_query:
            partial_queries.append(user_filters_query)

        # Build the objectImplements query
        if not isinstance(types, (tuple, list)):
            types = (types, )
        types_query_list = [
            Eq('objectImplements', dottedname(t)) for t in types
        ]
        if types_query_list:
            if len(types_query_list) > 1:
                types_query = Or(*types_query_list)
            else:
                types_query = types_query_list[0]

            partial_queries.append(types_query)

        # Build query for paths
        if paths is not False:  # When paths is False we dont add any path condition
            # TODO: Account for depth or get rid of it
            # TODO: Consider indexing the device's uid as a path
            context_path = '/'.join(self.context.getPrimaryPath())
            uid_path_query = In(
                'path', (context_path, )
            )  # MatchGlob(UID, context_path)   # Add the context uid as filter
            partial_queries.append(uid_path_query)
            if paths:
                if isinstance(paths, basestring):
                    paths = (paths, )
                partial_queries.append(In('path', paths))
            """  OLD CODE. Why this instead of In?  What do we need depth for?
            q = {'query':paths}
            if depth is not None:
                q['depth'] = depth
            paths_query = Generic('path', q)
            """

        # filter based on permissions
        if filterPermissions and allowedRolesAndGroups(self.context):
            permissions_query = In('allowedRolesAndUsers',
                                   allowedRolesAndGroups(self.context))
            partial_queries.append(permissions_query)

        # Put together all queries
        search_query = And(*partial_queries)
        return (search_query, not_indexed_user_filters)
Beispiel #21
0
                o.fields.update(e.fields)
    if o.objectId in tids:
        for t in tobjects:
            if t.objectId == o.objectId:
                #tob = tobjects.pop(tobjects.index(t))
                #tids.remove(t.objectId)
                # ... the above bit is probably supposed to be an optimization, but:
                # doing a pop of the thing we're in is questionable, and
                # the tobjects.index(t) fails in a mixed collection/module list,
                # because mybrains don't like cmp against wrapped results
                o.fields.update(t.fields)

modules = [o for o in content if o.portal_type == 'Module']
forks = context.content.getContentByRole('parentAuthor', member)
moduleIds = [m.objectId for m in modules]
query = And(In('containedModuleIds', moduleIds), Not(Eq('authors', member)))
containers = context.content.catalog.evalAdvancedQuery(query)

mobjects = context.content.getContentByRole('maintainer', member)
maintainerObjects = [m for m in mobjects if m.objectId not in objectIds]

#contrib_objects = [o for o in eobjects+tobjects if o.objectId not in objectIds]
#content.extend(contrib_objects)

results['content'] = content
results['containers'] = containers
results['forks'] = forks
results['m_content'] = maintainerObjects

for k, l in results.items():
    results[k] = list(l)
Beispiel #22
0
modified for CalendarX 0.9.6(stable) for listOfSubjects, xcrt & xpaths bugs
Released under the GPL (see LICENSE.txt)
 List of variables used
 xmy = improved MY/PUBLIC event switcher: MY == any review_state + user == CREATOR
 xsub = category to view (from Subject -- works with existing CMFEvents and ATEvents)
 xpub = default 1 query for published, 0 = not queried for published status, 'visible' for visible status
 xcrt = (creator) default 1 for no test (view events from anyone), or 0 = query for user = CREATOR,
 xgroups = show shared private events to shared (listed) group members.
"""

from Products.AdvancedQuery import Between, Eq, Generic, In, Le, Ge

#RESTRICTS query to certain portal types, if option is checked in calendar_properties
q_xtypes = 0
if context.getCXAttribute('restrictToThisListOfTypes'):
    q_xtypes = In('portal_type', context.getCXAttribute('eventTypes'))

#RESTRICTS query to certain paths, if paths are listed in calendar_properties
#make sure paths listed are fully comparable to the paths as listed in the path index
q_xpaths = 0
if context.getCXAttribute('restrictToThisListOfPaths'):
    q_xpaths = In('path', context.getCXAttribute('listOfPaths'))

#RESTRICTS query to the same path as the CalendarX instance
if context.getCXAttribute('restrictToThisFolder'):
    q_xpaths = In('path', '/'.join(context.getPhysicalPath()[:-1]))

#XMY: build an xmy query for MY/PUBLIC requests
#  if 'xmy' == '0', then we don't need xmy in the query
#  if 'xmy' == '1' or anything else, then set xcrt = '11' to ONLY show user == CREATOR
#  and then set xpub = '11' to allow viewing ANY review_state events (including PRIVATE)
Beispiel #23
0
    def testDataManager(self):
        # before any changes are made, tx_state is None
        self.assertIsNone(self._get_transaction_state())
        device_class_1 = "device_class_1"
        device_class_2 = "device_class_2"
        device_class_3 = "device_class_3"
        device_class_4 = "device_class_4"

        # create an organizer
        dc_1 = self.dmd.Devices.createOrganizer(device_class_1)
        tx_state = self._get_transaction_state()
        dc_1_uid = dc_1.idx_uid()

        # Some tx_state checks
        self.assertIsNotNone(tx_state)
        self.assertTrue(len(tx_state.pending_updates) > 0)
        self.assertTrue(len(tx_state.indexed_updates) == 0)
        self.assertTrue(len(tx_state.temp_indexed_uids) == 0)
        self.assertTrue(len(tx_state.temp_deleted_uids) == 0)

        # The new organizer index update should have been buffered in tx_state
        self._check_tx_state(pending=dc_1_uid)

        # A search with commit_dirty=False should not find the new device organizer
        search_results = self.model_catalog.search(query=Eq(UID, dc_1_uid),
                                                   commit_dirty=False)
        self.assertEquals(search_results.total, 0)

        # A search with commit_dirty=True must find the new device organizer
        search_results = self.model_catalog.search(query=Eq(UID, dc_1_uid),
                                                   commit_dirty=True)
        # model catalog should return the dirty doc
        self.assertEquals(search_results.total, 1)
        self._validate_temp_indexed_results(search_results,
                                            expected_object_uids=[dc_1_uid])

        # the tx_state object should have been updated appropiately
        self._check_tx_state(temp_indexed=dc_1_uid)
        self.assertTrue(len(tx_state.pending_updates) == 0)

        # create another organizer
        dc_2 = self.dmd.Devices.createOrganizer(device_class_2)
        dc_2_uid = dc_2.idx_uid()

        # check tx_state has been updated accordinly
        self._check_tx_state(pending=dc_2_uid, temp_indexed=dc_1_uid)

        # search for both device classes with commit_dirty=False, it should only return dc_1_uid
        query = MatchGlob(UID, "/zport/dmd/Devices/device_class*")
        search_results = self.model_catalog.search(query=query,
                                                   commit_dirty=False)
        self._validate_temp_indexed_results(search_results,
                                            expected_object_uids=[dc_1_uid])
        # tx_state should not have changed
        self._check_tx_state(pending=dc_2_uid, temp_indexed=dc_1_uid)

        # now with commit_dirty=True
        search_results = self.model_catalog.search(query=query,
                                                   commit_dirty=True)
        self._check_tx_state(temp_indexed=[dc_1_uid, dc_2_uid])
        # it should return 2 device classes
        self.assertEquals(search_results.total, 2)
        self._validate_temp_indexed_results(
            search_results, expected_object_uids=[dc_1_uid, dc_2_uid])

        # Lets delete device_class_1
        self.dmd.Devices._delObject(device_class_1)
        self._check_tx_state(pending=[dc_1_uid])
        #   a search with commit = True should not return device_class_1 anymore
        search_results = self.model_catalog.search(query=query,
                                                   commit_dirty=True)
        self._validate_temp_indexed_results(search_results,
                                            expected_object_uids=[dc_2_uid])
        self._check_tx_state(temp_deleted=[dc_1_uid])
        #   however, we should have two temp docs matching "/zport/dmd/Devices/device_class*"
        mi_results = self.model_index.search(SearchParams(query))
        self.assertTrue(mi_results.total_count == 2)
        #   make sure a count type of query works (search with limit=0)
        search_results = self.model_catalog.search(query=query,
                                                   limit=0,
                                                   commit_dirty=True)
        self.assertTrue(search_results.total == 1)

        #   some more tx_state checks before moving on to the next thing
        tx_state = self._get_transaction_state()
        self.assertTrue(len(tx_state.pending_updates) == 0)
        self.assertTrue(len(tx_state.indexed_updates) == 2)
        self.assertTrue(len(tx_state.temp_indexed_uids) == 1)
        self.assertTrue(len(tx_state.temp_deleted_uids) == 1)

        # Simulate transaction is committed and do checks
        updated_uids = set(
            tx_state.pending_updates.keys()) | tx_state.temp_indexed_uids
        try:
            tid = self.data_manager._get_tid()
            # before commit we should have 2 docs with tx_state = tid
            mi_results = self.model_index.search(
                SearchParams(Eq(TX_STATE_FIELD, tid)))
            self.assertTrue(mi_results.total_count == 2)
            # Lets do the commit
            self._simulate_tx_commit()
            self.assertIsNone(self._get_transaction_state())
            # Check we only have one doc matching "/zport/dmd/Devices/device_class*"
            search_results = self.model_catalog.search(query=query,
                                                       commit_dirty=False)
            self.assertEquals(search_results.total, 1)
            # Check the result's tx_state field has been set to zero
            brain = search_results.results.next()
            self.assertEquals(brain.tx_state, 0)
            # No documents should remain with tx_state == tid
            mi_results = self.model_index.search(
                SearchParams(Eq(TX_STATE_FIELD, tid)))
            self.assertEquals(mi_results.total_count, 0)
        finally:
            # clean up created docs in solr
            query = In(UID, updated_uids)
            self.model_index.unindex_search(SearchParams(query))

        # create another organizer in a new transaction
        dc_3 = self.dmd.Devices.createOrganizer(device_class_3)
        dc_3_uid = dc_3.idx_uid()
        self._check_tx_state(pending=dc_3_uid)
        tx_state = self._get_transaction_state()
        self.assertTrue(len(tx_state.pending_updates) == 1)
        self.assertTrue(len(tx_state.indexed_updates) == 0)
        self.assertTrue(len(tx_state.temp_indexed_uids) == 0)
        self.assertTrue(len(tx_state.temp_deleted_uids) == 0)
        # Manual mid-transaction commit
        self.data_manager.do_mid_transaction_commit()
        self._check_tx_state(temp_indexed=dc_3_uid)
        self.assertTrue(len(tx_state.pending_updates) == 0)
        self.assertTrue(len(tx_state.indexed_updates) == 1)
        self.assertTrue(len(tx_state.temp_indexed_uids) == 1)
        self.assertTrue(len(tx_state.temp_deleted_uids) == 0)
        query = MatchGlob(UID, "/zport/dmd/Devices/device_class*")
        search_results = self.model_catalog.search(query=query,
                                                   commit_dirty=False)
        self._validate_temp_indexed_results(search_results,
                                            expected_object_uids=[dc_3_uid])
        # Simulate transaction is aborted and check tx state has been reset
        self.data_manager.abort(transaction.get())
        # No docs should match the device class uid
        search_results = self.model_catalog.search(query=Eq(UID, dc_3_uid),
                                                   commit_dirty=False)
        self.assertTrue(search_results.total == 0)
        # No documents should remain with tx_state == tid
        tid = self.data_manager._get_tid()
        mi_results = self.model_index.search(
            SearchParams(Eq(TX_STATE_FIELD, tid)))
        self.assertEquals(mi_results.total_count, 0)
        self.assertIsNone(self._get_transaction_state())

        # delete a doc that exists before current tx, do a search with commit dirty and abort
        dc_4 = self.dmd.Devices.createOrganizer(device_class_4)
        dc_4_uid = dc_4.idx_uid()
        query = Eq(UID, dc_4_uid)
        try:
            self._simulate_tx_commit(
            )  # commit to get the device_class_4 doc in solr
            # check the doc exists in solr
            search_results = self.model_catalog.search(query=query)
            self.assertTrue(search_results.total == 1)
            # delete the object
            self.dmd.Devices._delObject(device_class_4)
            # a model catalog search with commit_dirty=True should no return the deleted doc
            search_results = self.model_catalog.search(query=query,
                                                       commit_dirty=True)
            self.assertTrue(search_results.total == 0)
            # however the doc is still in solr
            mi_results = self.model_index.search(SearchParams(query))
            self.assertTrue(mi_results.total_count == 1)
            # Abort tx
            self.data_manager.abort(transaction.get())
            # The doc should have been left intact in solr
            search_results = self.model_catalog.search(query=query)
            self.assertTrue(search_results.total == 1)
        finally:
            # clean up created docs in solr
            self.model_index.unindex_search(SearchParams(query))
    def contentFilterAQ(self):
        '''
        Parse request and generate AdvancedQuery query
        '''
        portal_state = getMultiAdapter((self.context, self.request),
                                       name="plone_portal_state")
        member = portal_state.member()

        query_parts = []

        text = self.request.get('datasets.filter.text')
        if text:
            query_parts.append(Eq('SearchableText', text))

        genre = self.request.get('datasets.filter.genre')
        genre_vocab = self.dstools.genre_vocab
        if genre:
            # convert token from request to value
            query_parts.append(
                In('BCCDataGenre', [
                    genre_vocab.getTermByToken(token).value
                    for token in genre if token in genre_vocab.by_token
                ]))
        else:
            # if nothing selcted use all values in vocab
            query_parts.append(
                In('BCCDataGenre',
                   ('DataGenreSpeciesOccurrence', 'DataGenreSpeciesAbsence',
                    'DataGenreSpeciesAbundance', 'DataGenreE', 'DataGenreCC',
                    'DataGenreFC', 'DataGenreTraits', 'DataGenreSDMModel')))

        resolution = self.request.get('datasets.filter.resolution')
        resolution_vocab = self.dstools.resolution_vocab
        if resolution:
            # convert token to value
            query_parts.append(
                In('BCCResolution', [
                    resolution_vocab.getTermByToken(token).value
                    for token in resolution
                    if token in resolution_vocab.by_token
                ]))

        layer = self.request.get('datasets.filter.layer')
        layer_vocab = self.dstools.layer_vocab
        if layer:
            query_parts.append(
                In('BCCEnviroLayer', [
                    layer_vocab.getTermByToken(token).value
                    for token in layer if token in layer_vocab.by_token
                ]))

        emsc = self.request.get('datasets.filter.emsc')
        emsc_vocab = self.dstools.emsc_vocab
        if emsc:
            query_parts.append(
                In('BCCEmissionScenario', [
                    emsc_vocab.getTermByToken(token).value
                    for token in emsc if token in emsc_vocab.by_token
                ]))

        gcm = self.request.get('datasets.filter.gcm')
        gcm_vocab = self.dstools.gcm_vocab
        if gcm:
            query_parts.append(
                In('BCCGlobalClimateModel', [
                    gcm_vocab.getTermByToken(token).value
                    for token in gcm if token in gcm_vocab.by_token
                ]))

        # TODO: year

        # FIXME: source filter is incomplete
        source = self.request.get('datasets.filter.source')
        if source:
            for token in source:
                if token == 'user':
                    query_parts.append(Eq('Creator', member.getId()))
                elif token == 'admin':
                    query_parts.append(Eq('Creator', 'BCCVL'))
                elif token == 'shared':
                    query_parts.append(
                        Not(In('Creator', (member.getId(), 'BCCVL'))))
                # FIXME: missing: shared, ala

        # add path filter
        if self.path:
            query_parts.append(
                Generic('path', {
                    'query': self.path,
                    'depth': -1
                }))
        # add additional query filters
        query_parts.append(Eq('object_provides', IDataset.__identifier__))
        return And(*query_parts)
def handle_retracting(context, settings, dry_run=True, log=True):
    '''
    '''
    catalog = context.portal_catalog
    wf = context.portal_workflow
    now = context.ZopeTime()

    actions = settings.retract_actions
    action_taken = False
    audit = []
    for a in actions:
        audit_record = {}

        date_index = 'expires'
        date_method = 'getExpirationDate'
        date_index_value = a.date_index
        if date_index_value:
            if '|' in date_index_value:
                items = date_index_value.split('|')
                _date_index = items[0]
                _date_method = items[1]
            else:
                _date_index = date_index_value
                _date_method = date_index_value
            if _date_index in catalog.indexes():
                date_index = _date_index
                date_method = _date_method
            else:
                logger.warn("date index does not exist: %s" %
                            (str(_date_index)))
                continue

        audit_record['header'] = 'Actions triggered by "%s"' % str(date_index)
        audit_record['content_types'] = str(a.portal_types)
        audit_record['initial_state'] = str(a.initial_state)
        audit_record['transition'] = str(a.transition)
        audit_record['date_index_method'] = (str(date_index) + '/' +
                                             str(date_method))
        audit_record['actions'] = []

        query = (In('review_state', a.initial_state)
                 & Le(date_index, now)
                 & Eq('enableAutopublishing', True)
                 & In('portal_type', a.portal_types))

        brains = catalog.evalAdvancedQuery(query)

        affected = 0
        total = 0
        for brain in brains:
            o = brain.getObject()
            try:
                exp_date = getattr(o, date_method)()
            except AttributeError:
                logger.warn("date field does not exist: %s" %
                            (str(date_method)))
                continue
            # The dates in the indexes are always set.
            # So we need to test on the objects if the dates
            # are actually set.

            # we only retract if:
            # the expiration date is set and is in the past:
            if exp_date is not None and exp_date < now:
                audit_action = {}
                audit_action['portal_type'] = brain.portal_type
                audit_action['url'] = brain.getURL()
                audit_action['title'] = brain.Title
                audit_action['transition'] = a.transition
                if log:
                    logger.info(str(audit_action))
                total += 1
                action_taken = True
                if not dry_run:
                    try:
                        wf.doActionFor(o, a.transition)
                        o.reindexObject()
                        affected += 1
                    except WorkflowException:
                        logger.info(
                            """The state '%s' of the workflow associated with the
                               object at '%s' does not provide the '%s' action
                            """ % (brain.review_state, o.getURL()),
                            str(a.transition))
                audit_record['actions'].append(audit_action)

        if log:
            logger.info(
                """Ran collective.autopublishing (retract): %d objects found, %d affected
                    """ % (total, affected))
        audit.append(audit_record)
    if action_taken:
        return audit
    else:
        return []
Beispiel #26
0
    def search(self, request):
        """
        """
        # get serchterms
        searchable = request.form.get("SearchableText", None)
        fulltext = request.form.get("SearchableText", "")
        authors = request.form.get("authors", "")
        #author_id  = request.form.get("author_id", "")
        abstract = request.form.get("abstract", "")
        title = request.form.get("title", "")
        jel = request.form.get("jel", "")
        papers = request.form.get("papers", [])
        #year       = request.form.get("year", "all")
        #period     = request.form.get("period", "all")

        query = ""

        #import pdb; pdb.set_trace()
        if "discussion" in papers:

            query = Or(
                Eq('portal_type', 'File'),
                And(Eq('portal_type', "DiscussionPaper"),
                    In('review_state', ("rejected", "discussible"))))

        if "journal" in papers:
            query_jp = Or(Eq("portal_type", "JournalPaper"),
                          Eq('portal_type', 'eJFile'))

            if query == "":
                query = query_jp
            else:
                query = Or(query, query_jp)

        #if "comment" in papers:
        #    query = And(Eq('portal_type', "Comment"))

        if query == "":
            return []

        if searchable is not None:
            if searchable == "":
                return []

            query = And(query, Eq("SearchableText", searchable))

    # if year != "all":
    #     year_start, year_end = _createYearRange(year)
    #     query = And(query,
    #                 Between("created", year_start, year_end))

    # elif period != "all":
    #     period_start, period_end = _createPeriod(period)
    #     query = And(query,
    #                 Between("created", period_start, period_end))

        if fulltext != "":
            #query_fulltext = In("eJFulltext", fulltext)

            query_fulltext = In("SearchableText", fulltext)
            query = And(query, query_fulltext)

#### XXX getAuthorsForTitle nicht mehr existent. refactoren!
        if authors != "":
            query_author = In("getAuthorsForTitle", authors)
            query = And(query, query_author)

        if abstract != "":
            query_abstract = Eq("getAbstract", abstract)
            query = And(query, query_abstract)

        if title != "":
            query_title = In("Title", title)
            query = And(query, query_title)

        if jel != "":
            query_jel = Eq("getJelAsString", jel)
            query = And(query, query_jel)

        # search

        brains = self.context.portal_catalog.evalAdvancedQuery(query)

        return brains
Beispiel #27
0
    def testPartialUpdates(self):
        # for this test we need to create a test device and commit the changes to
        device = manage_createDevice(self.dmd, 'my_device', '/')
        ip = "10.10.10.1"
        prod_state = 500
        device_uid = device.idx_uid()
        device.setManageIp(ip)
        device.setProdState(prod_state)

        # get the uids we are about to commit so we can revert them at the end
        tx_state = self._get_transaction_state()
        tid = tx_state.tid
        updated_uids = set(
            tx_state.pending_updates.keys()) | tx_state.temp_indexed_uids
        try:
            # simulate the transaction was committed and do a few partial updates
            self._simulate_tx_commit()
            # make sure the device was correctly indexed
            fields = ["productionState", "text_ipAddress"]
            search_results = self.model_catalog.search(query=Eq(
                UID, device_uid),
                                                       fields=fields,
                                                       commit_dirty=False)
            self.assertEquals(search_results.total, 1)
            brain = search_results.results.next()
            self.assertEquals(brain.uid, device_uid)
            self.assertEquals(brain.text_ipAddress, ip)
            self.assertEquals(brain.productionState, prod_state)

            # update prod state triggers an atomic update
            new_prod_state = 1000
            device.setProdState(new_prod_state)
            # tx_state.pending_updates.values()[0].spec.to_dict()
            # mi_results = self.model_index.search(SearchParams(Eq(UID, device_uid)))
            # repeat the search and make sure that the atomic update has all the fields it should
            search_results = self.model_catalog.search(query=Eq(
                UID, device_uid),
                                                       fields=fields,
                                                       commit_dirty=True)
            self.assertEquals(search_results.total, 1)
            brain = search_results.results.next()
            self.assertEquals(brain.uid, device_uid)
            self.assertEquals(brain.text_ipAddress, ip)
            self.assertEquals(brain.productionState, new_prod_state)
            # Make sure the index update is correct
            tx_state = self._get_transaction_state()
            index_update = tx_state.indexed_updates.get(device_uid)
            self.assertIsNotNone(index_update)
            expected_fields = MANDATORY_FIELDS | set(["productionState"])
            self.assertEquals(expected_fields, index_update.idxs)

            # Set manage ip also sends a partial update for fields
            # 'decimal_ipAddress', 'text_ipAddress'
            new_ip = "10.10.10.2"
            device.setManageIp(new_ip)
            search_results = self.model_catalog.search(query=Eq(
                UID, device_uid),
                                                       fields=fields,
                                                       commit_dirty=True)
            self.assertEquals(search_results.total, 1)
            brain = search_results.results.next()
            self.assertEquals(brain.uid, device_uid)
            self.assertEquals(brain.text_ipAddress, new_ip)
            self.assertEquals(brain.productionState, new_prod_state)
            # Make sure the partial updates have been correctly combined
            tx_state = self._get_transaction_state()
            index_update = tx_state.indexed_updates.get(device_uid)
            self.assertIsNotNone(index_update)
            expected_fields = MANDATORY_FIELDS | set(
                ['decimal_ipAddress', 'text_ipAddress', "productionState"])
            self.assertEquals(expected_fields, index_update.idxs)

            # simulate another transaction commit and check everything went well
            self._simulate_tx_commit()
            search_results = self.model_catalog.search(query=Eq(
                UID, device_uid),
                                                       fields=fields,
                                                       commit_dirty=False)
            self.assertEquals(search_results.total, 1)
            brain = search_results.results.next()
            self.assertEquals(brain.uid, device_uid)
            self.assertEquals(brain.text_ipAddress, new_ip)
            self.assertEquals(brain.productionState, new_prod_state)

            # make sure all temp documents have beed deleted
            search_results = self.model_catalog.search(query=Eq(
                TX_STATE_FIELD, tid),
                                                       commit_dirty=False)
            self.assertEquals(search_results.total, 0)
        finally:
            query = In(UID, updated_uids)
            self.model_index.unindex_search(SearchParams(query))