Пример #1
0
def guard_submit(obj):
    """Returns if 'submit' transition can be applied to the worksheet passed in.
    By default, the target state for the 'submit' transition for a worksheet is
    'to_be_verified', so this guard returns true if all the analyses assigned
    to the worksheet have already been submitted. Those analyses that are in a
    non-valid state (cancelled, inactive) are dismissed in the evaluation, but
    at least one analysis must be in an active state (and submitted) for this
    guard to return True. Otherwise, always returns False.
    Note this guard depends entirely on the current status of the children.
    """
    analyses = obj.getAnalyses()
    if not analyses:
        # An empty worksheet cannot be submitted
        return False

    can_submit = False
    for analysis in obj.getAnalyses():
        # Dismiss analyses that are not active
        if not api.is_active(analysis):
            continue
        # Dismiss analyses that have been rejected or retracted
        if api.get_workflow_status_of(analysis) in ["rejected", "retracted"]:
            continue
        # Worksheet cannot be submitted if there is one analysis not submitted
        can_submit = ISubmitted.providedBy(analysis)
        if not can_submit:
            # No need to look further
            return False

    # This prevents the submission of the worksheet if all its analyses are in
    # a detached status (rejected, retracted or cancelled)
    return can_submit
Пример #2
0
    def render_checkbox(self, item, microorganism, antibiotic):
        """Renders the checkbox properties for the item, microorganism and
        antibiotic passed-in
        """
        uid = api.get_uid(antibiotic)

        # Check whether there are analyses assigned
        analyses = self.get_analyses_for(microorganism, antibiotic)
        analysis = filter(lambda a: a.getKeyword() == REPORT_KEY, analyses)
        analysis = analysis and analysis[-1] or None

        # Set whether reporting is enabled/disabled
        item[uid] = self.is_reporting_enabled(analysis, antibiotic)

        # If sample is not in an editable status, no further actions required
        if not self.can_add_analyses():
            return

        item["allow_edit"].append(uid)
        if not analyses:
            # No analyses assigned for this microorganism
            item.setdefault("disabled", []).append(uid)

        elif analysis and ISubmitted.providedBy(analysis):
            # Analysis assigned, but report info submitted already
            item.setdefault("disable", []).append(uid)
Пример #3
0
def is_submitted_or_submittable(analysis):
    """Returns whether the analysis is submittable or has already been submitted
    """
    if ISubmitted.providedBy(analysis):
        return True
    if wf.isTransitionAllowed(analysis, "submit"):
        return True
    return False
Пример #4
0
def update_ast_analysis(analysis, antibiotics, remove=False):
    # There is nothing to do if the analysis has been verified
    analysis = api.get_object(analysis)
    if IVerified.providedBy(analysis):
        return

    # Convert antibiotics to interim fields
    keyword = analysis.getKeyword()
    interim_fields = map(lambda ab: to_interim(keyword, ab), antibiotics)

    # Get the analysis interim fields
    an_interims = copy.deepcopy(analysis.getInterimFields()) or []
    an_keys = sorted(map(lambda i: i.get("keyword"), an_interims))

    # Remove non-specified antibiotics
    if remove:
        in_keys = map(lambda i: i.get("keyword"), interim_fields)
        an_interims = filter(lambda a: a["keyword"] in in_keys, an_interims)

    # Keep analysis' original antibiotics
    abx = filter(lambda a: a["keyword"] not in an_keys, interim_fields)
    an_interims.extend(abx)

    # Is there any difference?
    new_keys = sorted(map(lambda i: i.get("keyword"), an_interims))
    if new_keys == an_keys:
        # No changes
        return

    # If no antibiotics, remove the analysis
    if remove and not an_interims:
        sample = analysis.getRequest()
        sample._delObject(api.get_id(analysis))
        return

    if ISubmitted.providedBy(analysis):
        # Analysis has been submitted already, retract
        succeed, message = wf.doActionFor(analysis, "retract")
        if not succeed:
            path = api.get_path(analysis)
            logger.error("Cannot retract analysis '{}'".format(path))
            return

    # Assign the antibiotics
    analysis.setInterimFields(an_interims)

    # Compute all combinations of interim/antibiotic and possible result and
    # and generate the result options for this analysis (the "Result" field is
    # never displayed and is only used for reporting)
    result_options = get_result_options(analysis)
    analysis.setResultOptions(result_options)

    # Apply the IASTAnalysis marker interface (just in case)
    alsoProvides(analysis, IASTAnalysis)

    analysis.reindexObject()
Пример #5
0
def has_analyses_in_panic(sample):
    """Returns whether the sample passed in have at least one analysis for which
    the result is in panic in accordance with the specifications. Only analyses
    in "to_be_verified" status and beyond are considered
    """
    analyses = sample.getAnalyses(full_objects=True)
    for analysis in analyses:
        if ISubmitted.providedBy(analysis) and is_in_panic(analysis):
            return True
    return False
Пример #6
0
    def verify_and_retest(relative):
        if not ISubmitted.providedBy(relative):
            # Result not yet submitted, no need to create a retest
            return

        # Apply the transition manually, but only if analysis can be verified
        doActionFor(relative, "verify")

        # Create the retest
        create_retest(relative)
Пример #7
0
    def update_analyses(self, microorganism, antibiotics):
        analyses = self.get_analyses_for(microorganism)

        # Filter those that are not yet submitted
        analyses = filter(lambda a: not ISubmitted.providedBy(a), analyses)

        if not analyses:
            if antibiotics:
                # Create new analyses
                keywords = [ZONE_SIZE_KEY, RESISTANCE_KEY, REPORT_KEY]
                utils.create_ast_analyses(self.context, keywords,
                                          microorganism, antibiotics)

        elif not antibiotics:
            # Remove analyses
            analyses_ids = map(api.get_id, analyses)
            map(self.context._delObject, analyses_ids)  # noqa

        else:
            # Update analyses
            map(
                lambda a: utils.update_ast_analysis(
                    a, antibiotics, remove=True), analyses)
Пример #8
0
    def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
        """Set/Assign Analyses to this AR

        :param items: List of Analysis objects/brains, AnalysisService
                      objects/brains and/or Analysis Service uids
        :type items: list
        :param prices: Mapping of AnalysisService UID -> price
        :type prices: dict
        :param specs: List of AnalysisService UID -> Result Range mappings
        :type specs: list
        :param hidden: List of AnalysisService UID -> Hidden mappings
        :type hidden: list
        :returns: list of new assigned Analyses
        """
        if items is None:
            items = []

        # Bail out if the items is not a list type
        if not isinstance(items, (list, tuple)):
            raise TypeError(
                "Items parameter must be a tuple or list, got '{}'".format(
                    type(items)))

        # Bail out if the AR is inactive
        if not api.is_active(instance):
            raise Unauthorized(
                "Inactive ARs can not be modified".format(AddAnalysis))

        # Bail out if the user has not the right permission
        if not check_permission(AddAnalysis, instance):
            raise Unauthorized(
                "You do not have the '{}' permission".format(AddAnalysis))

        # Convert the items to a valid list of AnalysisServices
        services = filter(None, map(self._to_service, items))

        # Calculate dependencies
        dependencies = map(lambda s: s.getServiceDependencies(), services)
        dependencies = list(itertools.chain.from_iterable(dependencies))

        # Merge dependencies and services
        services = set(services + dependencies)

        # Modify existing AR specs with new form values of selected analyses
        specs = self.resolve_specs(instance, specs)

        # Add analyses
        params = dict(prices=prices, hidden=hidden, specs=specs)
        map(lambda serv: self.add_analysis(instance, serv, **params), services)

        # Get all analyses (those from descendants included)
        analyses = instance.objectValues("Analysis")
        analyses.extend(self.get_analyses_from_descendants(instance))

        # Bail out those not in services list or submitted
        uids = map(api.get_uid, services)
        to_remove = filter(lambda an: an.getServiceUID() not in uids, analyses)
        to_remove = filter(lambda an: not ISubmitted.providedBy(an), to_remove)

        # Remove analyses
        map(self.remove_analysis, to_remove)
Пример #9
0
    def folderitem(self, obj, item, index):
        """Service triggered each time an item is iterated in folderitems.

        The use of this service prevents the extra-loops in child objects.

        :obj: the instance of the class to be foldered
        :item: dict containing the properties of the object to be used by
            the template
        :index: current index of the item
        """
        # ensure we have an object and not a brain
        obj = api.get_object(obj)
        uid = api.get_uid(obj)

        # settings for this analysis
        service_settings = self.context.getAnalysisServiceSettings(uid)
        hidden = service_settings.get("hidden", obj.getHidden())

        # get the category
        category = obj.getCategoryTitle()
        item["category"] = category
        if category not in self.categories:
            self.categories.append(category)

        price = obj.getPrice()
        keyword = obj.getKeyword()

        if uid in self.analyses:
            analysis = self.analyses[uid]
            # Might differ from the service keyword
            keyword = analysis.getKeyword()
            # Mark the row as disabled if the analysis has been submitted
            item["disabled"] = ISubmitted.providedBy(analysis)
            # get the hidden status of the analysis
            hidden = analysis.getHidden()
            # get the price of the analysis
            price = analysis.getPrice()

        # get the specification of this object
        rr = self.get_results_range()
        spec = rr.get(keyword, ResultsRangeDict())

        item["Title"] = obj.Title()
        item["Unit"] = obj.getUnit()
        item["Price"] = price
        item["before"]["Price"] = self.get_currency_symbol()
        item["allow_edit"] = self.get_editable_columns(obj)
        item["selected"] = uid in self.selected
        item["min"] = str(spec.get("min", ""))
        item["max"] = str(spec.get("max", ""))
        item["warn_min"] = str(spec.get("warn_min", ""))
        item["warn_max"] = str(spec.get("warn_max", ""))
        item["Hidden"] = hidden

        # Append info link before the service
        # see: bika.lims.site.coffee for the attached event handler
        item["before"]["Title"] = get_link(
            "analysisservice_info?service_uid={}".format(uid),
            value="<span class='glyphicon glyphicon-info-sign'></span>",
            css_class="service_info")

        # Icons
        after_icons = ""
        if obj.getAccredited():
            after_icons += get_image("accredited.png",
                                     title=t(_("Accredited")))
        if obj.getAttachmentOption() == "r":
            after_icons += get_image("attach_reqd.png",
                                     title=t(_("Attachment required")))
        if obj.getAttachmentOption() == "n":
            after_icons += get_image("attach_no.png",
                                     title=t(_('Attachment not permitted')))
        if after_icons:
            item["after"]["Title"] = after_icons

        return item
Пример #10
0
def remove_cascaded_analyses_of_root_samples(portal):
    """Removes Analyses from Root Samples that belong to Partitions

    https://github.com/senaite/senaite.core/issues/1504
    """
    logger.info("Removing cascaded analyses from Root Samples...")

    # Query all root Samples
    query = {
        "isRootAncestor": True,
        "sort_on": "created",
        "sort_order": "ascending",
    }
    root_samples = api.search(query, "bika_catalog_analysisrequest_listing")
    total = len(root_samples)
    logger.info("{} Samples to check... ".format(total))

    to_clean = []

    for num, brain in enumerate(root_samples):
        logger.debug("Checking Root Sample {}/{}".format(num + 1, total))

        # No Partitions, continue...
        if not brain.getDescendantsUIDs:
            continue

        # get the root sample
        root_sample = api.get_object(brain)
        # get the contained analyses of the root sample
        root_analyses = root_sample.objectIds(spec=["Analysis"])

        # Mapping of cascaded Analysis -> Partition
        analysis_mapping = {}

        # check if a root analysis is located as well in one of the partitions
        for partition in root_sample.getDescendants():
            # get the contained analyses of the partition
            part_analyses = partition.objectIds(spec=["Analysis"])
            # filter analyses that cascade root analyses
            cascaded = filter(lambda an: an in root_analyses, part_analyses)
            # keep a mapping of analysis -> partition
            for analysis in cascaded:
                analysis_mapping[analysis] = partition

        if analysis_mapping:
            to_clean.append((root_sample, analysis_mapping))

    # count the cases for each condition
    case_counter = defaultdict(int)

    # cleanup cascaded analyses
    # mapping maps the analysis id -> partition
    for sample, mapping in to_clean:

        # go through the cascaded analyses and decide if the cascaded analysis
        # should be removed from (a) the root sample or (b) the partition.

        for analysis_id, partition in mapping.items():

            # analysis from the root sample
            root_an = sample[analysis_id]
            # WF state from the root sample analysis
            root_an_state = api.get_workflow_status_of(root_an)

            # analysis from the partition sample
            part_an = partition[analysis_id]
            # WF state from the partition sample analysis
            part_an_state = api.get_workflow_status_of(part_an)

            case_counter["{}_{}".format(root_an_state, part_an_state)] += 1

            # both analyses have the same WF state
            if root_an_state == part_an_state:
                # -> remove the analysis from the root sample
                sample._delObject(analysis_id)
                logger.info(
                    "Remove analysis '{}' in state '{}' from sample {}: {}".
                    format(analysis_id, root_an_state, api.get_id(sample),
                           api.get_url(sample)))

            # both are in verified/published state
            elif IVerified.providedBy(root_an) and IVerified.providedBy(
                    part_an):
                root_an_result = root_an.getResult()
                part_an_result = root_an.getResult()
                if root_an_result == part_an_result:
                    # remove the root analysis
                    sample._delObject(analysis_id)
                    logger.info(
                        "Remove analysis '{}' in state '{}' from sample {}: {}"
                        .format(analysis_id, root_an_state, api.get_id(sample),
                                api.get_url(sample)))
                else:
                    # -> unsolvable edge case
                    #    display an error message
                    logger.error("Analysis '{}' of root sample in state '{}' "
                                 "and Analysis of partition in state {}. "
                                 "Please fix manually: {}".format(
                                     analysis_id, root_an_state, part_an_state,
                                     api.get_url(sample)))

            # root analysis is in invalid state
            elif root_an_state in ["rejected", "retracted"]:
                # -> probably the retest was automatically created in the
                #    parent instead of the partition
                pass

            # partition analysis is in invalid state
            elif part_an_state in ["rejected", "retracted"]:
                # -> probably the retest was automatically created in the
                #    parent instead of the partition
                pass

            # root analysis was submitted, but not the partition analysis
            elif ISubmitted.providedBy(
                    root_an) and not ISubmitted.providedBy(part_an):
                # -> remove the analysis from the partition
                partition._delObject(analysis_id)
                logger.info(
                    "Remove analysis '{}' in state '{}' from partition {}: {}".
                    format(analysis_id, part_an_state, api.get_id(partition),
                           api.get_url(partition)))

            # partition analysis was submitted, but not the root analysis
            elif ISubmitted.providedBy(
                    part_an) and not ISubmitted.providedBy(root_an):
                # -> remove the analysis from the root sample
                sample._delObject(analysis_id)
                logger.info(
                    "Remove analysis '{}' in state '{}' from sample {}: {}".
                    format(analysis_id, root_an_state, api.get_id(sample),
                           api.get_url(sample)))

            # inconsistent state
            else:
                logger.warning(
                    "Can not handle analysis '{}' located in '{}' (state {}) and '{}' (state {})"
                    .format(analysis_id, repr(sample), root_an_state,
                            repr(partition), part_an_state))

    logger.info("Removing cascaded analyses from Root Samples... [DONE]")

    logger.info("State Combinations (root_an_state, part_an_state): {}".format(
        sorted(case_counter.items(), key=itemgetter(1), reverse=True)))
Пример #11
0
    def set(self, instance, items, prices=None, specs=None, hidden=None, **kw):
        """Set/Assign Analyses to this AR

        :param items: List of Analysis objects/brains, AnalysisService
                      objects/brains and/or Analysis Service uids
        :type items: list
        :param prices: Mapping of AnalysisService UID -> price
        :type prices: dict
        :param specs: List of AnalysisService UID -> Result Range mappings
        :type specs: list
        :param hidden: List of AnalysisService UID -> Hidden mappings
        :type hidden: list
        :returns: list of new assigned Analyses
        """
        # This setter returns a list of new set Analyses
        new_analyses = []

        # Current assigned analyses
        analyses = instance.objectValues("Analysis")

        # Submitted analyses must be retained
        submitted = filter(lambda an: ISubmitted.providedBy(an), analyses)

        # Prevent removing all analyses
        #
        # N.B.: Submitted analyses are rendered disabled in the HTML form.
        #       Therefore, their UIDs are not included in the submitted UIDs.
        if not items and not submitted:
            logger.warn("Not allowed to remove all Analyses from AR.")
            return new_analyses

        # Bail out if the items is not a list type
        if not isinstance(items, (list, tuple)):
            raise TypeError(
                "Items parameter must be a tuple or list, got '{}'".format(
                    type(items)))

        # Bail out if the AR is inactive
        if not api.is_active(instance):
            raise Unauthorized(
                "Inactive ARs can not be modified".format(AddAnalysis))

        # Bail out if the user has not the right permission
        if not check_permission(AddAnalysis, instance):
            raise Unauthorized(
                "You do not have the '{}' permission".format(AddAnalysis))

        # Convert the items to a valid list of AnalysisServices
        services = filter(None, map(self._to_service, items))

        # Calculate dependencies
        # FIXME Infinite recursion error possible here, if the formula includes
        #       the Keyword of the Service that includes the Calculation
        dependencies = map(lambda s: s.getServiceDependencies(), services)
        dependencies = list(itertools.chain.from_iterable(dependencies))

        # Merge dependencies and services
        services = set(services + dependencies)

        # Modify existing AR specs with new form values of selected analyses.
        self._update_specs(instance, specs)

        # Create a mapping of Service UID -> Hidden status
        if hidden is None:
            hidden = []
        hidden = dict(map(lambda d: (d.get("uid"), d.get("hidden")), hidden))

        # Ensure we have a prices dictionary
        if prices is None:
            prices = dict()

        # CREATE/MODIFY ANALYSES

        for service in services:
            service_uid = api.get_uid(service)
            keyword = service.getKeyword()

            # Create the Analysis if it doesn't exist
            if shasattr(instance, keyword):
                analysis = instance._getOb(keyword)
            else:
                analysis = create_analysis(instance, service)
                new_analyses.append(analysis)

            # set the hidden status
            analysis.setHidden(hidden.get(service_uid, False))

            # Set the price of the Analysis
            analysis.setPrice(prices.get(service_uid, service.getPrice()))

        # DELETE ANALYSES

        # Service UIDs
        service_uids = map(api.get_uid, services)

        # Analyses IDs to delete
        delete_ids = []

        # Assigned Attachments
        assigned_attachments = []

        for analysis in analyses:
            service_uid = analysis.getServiceUID()

            # Skip if the Service is selected
            if service_uid in service_uids:
                continue

            # Skip non-open Analyses
            if analysis in submitted:
                continue

            # Remember assigned attachments
            # https://github.com/senaite/senaite.core/issues/1025
            assigned_attachments.extend(analysis.getAttachment())
            analysis.setAttachment([])

            # If it is assigned to a worksheet, unassign it before deletion.
            worksheet = analysis.getWorksheet()
            if worksheet:
                worksheet.removeAnalysis(analysis)

            delete_ids.append(analysis.getId())

        if delete_ids:
            # Note: subscriber might promote the AR
            instance.manage_delObjects(ids=delete_ids)

        # Remove orphaned attachments
        for attachment in assigned_attachments:
            # only delete attachments which are no further linked
            if not attachment.getLinkedAnalyses():
                logger.info("Deleting attachment: {}".format(
                    attachment.getId()))
                attachment_id = api.get_id(attachment)
                api.get_parent(attachment).manage_delObjects(attachment_id)

        return new_analyses