Пример #1
0
 def ar_analysis_values(self):
     ret = []
     analyses = self.context.getAnalyses(is_active=True)
     for proxy in analyses:
         analysis = proxy.getObject()
         if proxy.review_state == 'retracted':
             # these are scraped up when Retested analyses are found below.
             continue
         # things that are manually inserted into the analysis.
         # These things will be included even if they are not present in
         # include_fields in the request.
         method = analysis.getMethod()
         analysis_data = {
             "Uncertainty": analysis.getUncertainty(),
             "Method": method.Title() if method else '',
             "Unit": analysis.getUnit(),
         }
         # Place all schema fields ino the result.
         analysis_data.update(load_brain_metadata(proxy, []))
         # Place all schema fields ino the result.
         analysis_data.update(load_field_values(analysis, []))
         # call any adapters that care to modify the Analysis data.
         # adapters = getAdapters((analysis, ), IJSONReadExtender)
         # for name, adapter in adapters:
         #     adapter(request, analysis_data)
         if not self.include_fields or "transitions" in self.include_fields:
             analysis_data['transitions'] = get_workflow_actions(analysis)
         retest_of = analysis.getRetestOf()
         if retest_of:
             prevs = [{'created': str(retest_of.created()),
                       'Result': retest_of.getResult(),
                       'InterimFields': retest_of.getInterimFields()}]
             analysis_data['Previous Results'] = prevs
         ret.append(analysis_data)
     return ret
Пример #2
0
 def ar_analysis_values(self):
     ret = []
     analyses = self.context.getAnalyses(cancellation_state='active')
     for proxy in analyses:
         analysis = proxy.getObject()
         service = analysis.getService()
         if proxy.review_state == 'retracted':
             # these are scraped up when Retested analyses are found below.
             continue
         # things that are manually inserted into the analysis.
         # These things will be included even if they are not present in
         # include_fields in the request.
         method = analysis.getMethod()
         if not method:
             method = service.getMethod()
         service = analysis.getService()
         hs = hasattr(analysis, "specification")
         analysis_data = {
             "Uncertainty": service.getUncertainty(analysis.getResult()),
             "Method": method.Title() if method else '',
             "specification": analysis.specification if hs else {},
             "Unit": service.getUnit(),
         }
         # Place all schema fields ino the result.
         analysis_data.update(load_brain_metadata(proxy, []))
         # Place all schema fields ino the result.
         analysis_data.update(load_field_values(analysis, []))
         # call any adapters that care to modify the Analysis data.
         # adapters = getAdapters((analysis, ), IJSONReadExtender)
         # for name, adapter in adapters:
         #     adapter(request, analysis_data)
         if not self.include_fields or "transitions" in self.include_fields:
             analysis_data['transitions'] = get_workflow_actions(analysis)
         if analysis.getRetested():
             retracted = self.context.getAnalyses(review_state='retracted',
                                         title=analysis.Title(),
                                         full_objects=True)
             prevs = sorted(retracted, key=lambda item: item.created())
             prevs = [{'created': str(p.created()),
                       'Result': p.getResult(),
                       'InterimFields': p.getInterimFields()}
                      for p in prevs]
             analysis_data['Previous Results'] = prevs
         ret.append(analysis_data)
     return ret
Пример #3
0
def read(context, request):
    tag = AuthenticatorView(context, request).authenticator()
    pattern = '<input .*name="(\w+)".*value="(\w+)"'
    _authenticator = re.match(pattern, tag).groups()[1]

    ret = {
        "url": router.url_for("read", force_external=True),
        "success": True,
        "error": False,
        "objects": [],
        "_authenticator": _authenticator,
    }
    debug_mode = App.config.getConfiguration().debug_mode
    catalog_name = request.get("catalog_name", "portal_catalog")
    if not catalog_name:
        raise ValueError("bad or missing catalog_name: " + catalog_name)
    catalog = getToolByName(context, catalog_name)
    indexes = catalog.indexes()

    contentFilter = {}
    for index in indexes:
        if index in request:
            if index == 'review_state' and "{" in request[index]:
                continue
            contentFilter[index] = safe_unicode(request[index])
        if "%s[]"%index in request:
            value = request["%s[]"%index]
            if type(value) in (list, tuple):
                contentFilter[index] = [safe_unicode(v) for v in value]
            else:
                contentFilter[index] = value

    if 'limit' in request:
        try:
            contentFilter['sort_limit'] = int(request["limit"])
        except ValueError:
            pass
    sort_on = request.get('sort_on', 'id')
    contentFilter['sort_on'] = sort_on
    # sort order
    sort_order = request.get('sort_order', '')
    if sort_order:
        contentFilter['sort_order'] = sort_order
    else:
        sort_order = 'ascending'
        contentFilter['sort_order'] = 'ascending'

    include_fields = get_include_fields(request)
    if debug_mode:
        logger.info("contentFilter: " + str(contentFilter))

    # Get matching objects from catalog
    proxies = catalog(**contentFilter)

    # batching items
    page_nr = int(request.get("page_nr", 0))
    try:
        page_size = int(request.get("page_size", 10))
    except ValueError:
        page_size = 10
    # page_size == 0: show all
    if page_size == 0:
        page_size = len(proxies)
    first_item_nr = page_size * page_nr
    if first_item_nr > len(proxies):
        first_item_nr = 0
    page_proxies = proxies[first_item_nr:first_item_nr + page_size]
    for proxy in page_proxies:
        obj_data = {}

        # Place all proxy attributes into the result.
        obj_data.update(load_brain_metadata(proxy, include_fields))

        # Place all schema fields ino the result.
        obj = proxy.getObject()
        obj_data.update(load_field_values(obj, include_fields))

        obj_data['path'] = "/".join(obj.getPhysicalPath())

        # call any adapters that care to modify this data.
        adapters = getAdapters((obj, ), IJSONReadExtender)
        for name, adapter in adapters:
            adapter(request, obj_data)

        ret['objects'].append(obj_data)

    ret['total_objects'] = len(proxies)
    ret['first_object_nr'] = first_item_nr
    last_object_nr = first_item_nr + len(page_proxies)
    if last_object_nr > ret['total_objects']:
        last_object_nr = ret['total_objects']
    ret['last_object_nr'] = last_object_nr

    if debug_mode:
        logger.info("{0} objects returned".format(len(ret['objects'])))
    return ret
Пример #4
0
def read(context, request):
    tag = AuthenticatorView(context, request).authenticator()
    pattern = '<input .*name="(\w+)".*value="(\w+)"'
    _authenticator = re.match(pattern, tag).groups()[1]

    ret = {
        "url": router.url_for("read", force_external=True),
        "success": True,
        "error": False,
        "objects": [],
        "_authenticator": _authenticator,
    }
    debug_mode = App.config.getConfiguration().debug_mode
    catalog_name = request.get("catalog_name", "portal_catalog")
    if not catalog_name:
        raise ValueError("bad or missing catalog_name: " + catalog_name)
    catalog = getToolByName(context, catalog_name)
    indexes = catalog.indexes()

    contentFilter = {}
    for index in indexes:
        if index in request:
            if index == 'review_state' and "{" in request[index]:
                continue
            contentFilter[index] = safe_unicode(request[index])
        if "%s[]"%index in request:
            value = request["%s[]"%index]
            if type(value) in (list, tuple):
                contentFilter[index] = [safe_unicode(v) for v in value]
            else:
                contentFilter[index] = value

    if 'limit' in request:
        try:
            contentFilter['sort_limit'] = int(request["limit"])
        except ValueError:
            pass
    sort_on = request.get('sort_on', 'id')
    contentFilter['sort_on'] = sort_on
    # sort order
    sort_order = request.get('sort_order', '')
    if sort_order:
        contentFilter['sort_order'] = sort_order
    else:
        sort_order = 'ascending'
        contentFilter['sort_order'] = 'ascending'

    include_fields = get_include_fields(request)
    if debug_mode:
        logger.info("contentFilter: " + str(contentFilter))

    # Get matching objects from catalog
    proxies = catalog(**contentFilter)
    # batching items
    page_nr = int(request.get("page_nr", 0))
    try:
        page_size = int(request.get("page_size", 10))
    except ValueError:
        page_size = 10
    # page_size == 0: show all
    if page_size == 0:
        page_size = len(proxies)
    first_item_nr = page_size * page_nr
    if first_item_nr > len(proxies):
        first_item_nr = 0
    page_proxies = proxies[first_item_nr:first_item_nr + page_size]

    for proxy in page_proxies:
        obj_data = {}

        # Place all proxy attributes into the result.
        obj_data.update(load_brain_metadata(proxy, include_fields))

        # Place all schema fields ino the result.
        obj = proxy.getObject()
        obj_data.update(load_field_values(obj, include_fields))

        obj_data['path'] = "/".join(obj.getPhysicalPath())

        # call any adapters that care to modify this data.
        adapters = getAdapters((obj, ), IJSONReadExtender)
        for name, adapter in adapters:
            adapter(request, obj_data)

        ret['objects'].append(obj_data)

    ret['total_objects'] = len(proxies)
    ret['first_object_nr'] = first_item_nr
    last_object_nr = first_item_nr + len(page_proxies)
    if last_object_nr > ret['total_objects']:
        last_object_nr = ret['total_objects']
    ret['last_object_nr'] = last_object_nr

    if debug_mode:
        logger.info("{0} objects returned".format(len(ret['objects'])))
    return ret