コード例 #1
0
ファイル: guards.py プロジェクト: vinely/senaite.core
def guard_verify(analysis_request):
    """Returns whether the transition "verify" can be performed or not.
    Returns True if at there is at least one analysis in a non-dettached state
    and all analyses in a non-detached state are in "verified" state.
    """
    # Discard detached analyses
    analyses = analysis_request.getAnalyses(full_objects=True)
    analyses = filter(
        lambda an: api.get_workflow_status_of(an) not in
        ANALYSIS_DETACHED_STATES, analyses)

    # If not all analyses are for internal use, rely on "regular" analyses
    internals = map(IInternalUse.providedBy, analyses)
    omit_internals = not all(internals)

    analyses_ready = False
    for analysis in analyses:
        # Omit analyses for internal use
        if omit_internals and IInternalUse.providedBy(analysis):
            continue

        # All analyses must be in verified (or further) status
        if not IVerified.providedBy(analysis):
            return False

        analyses_ready = True
    return analyses_ready
コード例 #2
0
def guard_verify(obj):
    """Returns True if 'verify' transition can be applied to the Worksheet
    passed in. This is, returns true if all the analyses assigned
    have already been verified. Those analyses that are in an inactive state
    (cancelled, inactive) are dismissed, but at least one analysis must be in
    an active state (and verified), otherwise always return False.
    Note this guard depends entirely on the current status of the children
    :returns: true or false
    """

    analyses = obj.getAnalyses()
    if not analyses:
        # An empty worksheet cannot be verified
        return False

    can_verify = False
    for analysis in obj.getAnalyses():
        # Dismiss analyses that are not active
        if not api.is_active(analysis):
            continue
        # Dismiss analyses that have been rejected or retracted
        if api.get_workflow_status_of(analysis) in ["rejected", "retracted"]:
            continue
        # Worksheet cannot be verified if there is one analysis not verified
        can_verify = IVerified.providedBy(analysis)
        if not can_verify:
            # No need to look further
            return False

    # This prevents the verification of the worksheet if all its analyses are in
    # a detached status (rejected, retracted or cancelled)
    return can_verify
コード例 #3
0
def update_ast_analysis(analysis, antibiotics, remove=False):
    # There is nothing to do if the analysis has been verified
    analysis = api.get_object(analysis)
    if IVerified.providedBy(analysis):
        return

    # Convert antibiotics to interim fields
    keyword = analysis.getKeyword()
    interim_fields = map(lambda ab: to_interim(keyword, ab), antibiotics)

    # Get the analysis interim fields
    an_interims = copy.deepcopy(analysis.getInterimFields()) or []
    an_keys = sorted(map(lambda i: i.get("keyword"), an_interims))

    # Remove non-specified antibiotics
    if remove:
        in_keys = map(lambda i: i.get("keyword"), interim_fields)
        an_interims = filter(lambda a: a["keyword"] in in_keys, an_interims)

    # Keep analysis' original antibiotics
    abx = filter(lambda a: a["keyword"] not in an_keys, interim_fields)
    an_interims.extend(abx)

    # Is there any difference?
    new_keys = sorted(map(lambda i: i.get("keyword"), an_interims))
    if new_keys == an_keys:
        # No changes
        return

    # If no antibiotics, remove the analysis
    if remove and not an_interims:
        sample = analysis.getRequest()
        sample._delObject(api.get_id(analysis))
        return

    if ISubmitted.providedBy(analysis):
        # Analysis has been submitted already, retract
        succeed, message = wf.doActionFor(analysis, "retract")
        if not succeed:
            path = api.get_path(analysis)
            logger.error("Cannot retract analysis '{}'".format(path))
            return

    # Assign the antibiotics
    analysis.setInterimFields(an_interims)

    # Compute all combinations of interim/antibiotic and possible result and
    # and generate the result options for this analysis (the "Result" field is
    # never displayed and is only used for reporting)
    result_options = get_result_options(analysis)
    analysis.setResultOptions(result_options)

    # Apply the IASTAnalysis marker interface (just in case)
    alsoProvides(analysis, IASTAnalysis)

    analysis.reindexObject()
コード例 #4
0
def is_verified_or_verifiable(analysis):
    """Returns whether the analysis is verifiable or has already been verified
    """
    if IVerified.providedBy(analysis):
        return True
    if wf.isTransitionAllowed(analysis, "verify"):
        return True
    if wf.isTransitionAllowed(analysis, "multi_verify"):
        return True
    return False
コード例 #5
0
ファイル: guards.py プロジェクト: sbppy/senaite.core
def guard_verify(analysis_request):
    """Returns whether the transition "verify" can be performed or not.
    Returns True if at there is at least one analysis in a non-dettached state
    and all analyses in a non-dettached state are in "verified" state.
    """
    analyses_ready = False
    for analysis in analysis_request.getAnalyses():
        analysis = api.get_object(analysis)
        analysis_status = api.get_workflow_status_of(analysis)
        if analysis_status in ANALYSIS_DETACHED_STATES:
            continue
        # All analyses must be in verified (or further) status
        if not IVerified.providedBy(analysis):
            return False
        analyses_ready = True
    return analyses_ready
コード例 #6
0
def guard_retract(analysis):
    """ Return whether the transition "retract" can be performed or not
    """
    # Cannot retract if there are dependents that cannot be retracted
    if not is_transition_allowed(analysis.getDependents(), "retract"):
        return False

    dependencies = analysis.getDependencies()
    if not dependencies:
        return True

    # Cannot retract if all dependencies have been verified
    if all(map(lambda an: IVerified.providedBy(an), dependencies)):
        return False

    return True
コード例 #7
0
def after_rollback_to_receive(analysis_request):
    """Function triggered after "rollback to receive" transition is performed
    """
    if IVerified.providedBy(analysis_request):
        noLongerProvides(analysis_request, IVerified)
コード例 #8
0
ファイル: panel.py プロジェクト: senaite/senaite.ast
 def can_add_analyses(self):
     """Returns whether the status of context allows to add analyses or not
     """
     if IVerified.providedBy(self.context):
         return False
     return api.is_active(self.context)
コード例 #9
0
 def is_verified(self):
     """Checks if the AR is verified
     """
     return IVerified.providedBy(self.context)
コード例 #10
0
ファイル: v01_03_003.py プロジェクト: rollomix92/senaite.core
def remove_cascaded_analyses_of_root_samples(portal):
    """Removes Analyses from Root Samples that belong to Partitions

    https://github.com/senaite/senaite.core/issues/1504
    """
    logger.info("Removing cascaded analyses from Root Samples...")

    # Query all root Samples
    query = {
        "isRootAncestor": True,
        "sort_on": "created",
        "sort_order": "ascending",
    }
    root_samples = api.search(query, "bika_catalog_analysisrequest_listing")
    total = len(root_samples)
    logger.info("{} Samples to check... ".format(total))

    to_clean = []

    for num, brain in enumerate(root_samples):
        logger.debug("Checking Root Sample {}/{}".format(num + 1, total))

        # No Partitions, continue...
        if not brain.getDescendantsUIDs:
            continue

        # get the root sample
        root_sample = api.get_object(brain)
        # get the contained analyses of the root sample
        root_analyses = root_sample.objectIds(spec=["Analysis"])

        # Mapping of cascaded Analysis -> Partition
        analysis_mapping = {}

        # check if a root analysis is located as well in one of the partitions
        for partition in root_sample.getDescendants():
            # get the contained analyses of the partition
            part_analyses = partition.objectIds(spec=["Analysis"])
            # filter analyses that cascade root analyses
            cascaded = filter(lambda an: an in root_analyses, part_analyses)
            # keep a mapping of analysis -> partition
            for analysis in cascaded:
                analysis_mapping[analysis] = partition

        if analysis_mapping:
            to_clean.append((root_sample, analysis_mapping))

    # count the cases for each condition
    case_counter = defaultdict(int)

    # cleanup cascaded analyses
    # mapping maps the analysis id -> partition
    for sample, mapping in to_clean:

        # go through the cascaded analyses and decide if the cascaded analysis
        # should be removed from (a) the root sample or (b) the partition.

        for analysis_id, partition in mapping.items():

            # analysis from the root sample
            root_an = sample[analysis_id]
            # WF state from the root sample analysis
            root_an_state = api.get_workflow_status_of(root_an)

            # analysis from the partition sample
            part_an = partition[analysis_id]
            # WF state from the partition sample analysis
            part_an_state = api.get_workflow_status_of(part_an)

            case_counter["{}_{}".format(root_an_state, part_an_state)] += 1

            # both analyses have the same WF state
            if root_an_state == part_an_state:
                # -> remove the analysis from the root sample
                sample._delObject(analysis_id)
                logger.info(
                    "Remove analysis '{}' in state '{}' from sample {}: {}".
                    format(analysis_id, root_an_state, api.get_id(sample),
                           api.get_url(sample)))

            # both are in verified/published state
            elif IVerified.providedBy(root_an) and IVerified.providedBy(
                    part_an):
                root_an_result = root_an.getResult()
                part_an_result = root_an.getResult()
                if root_an_result == part_an_result:
                    # remove the root analysis
                    sample._delObject(analysis_id)
                    logger.info(
                        "Remove analysis '{}' in state '{}' from sample {}: {}"
                        .format(analysis_id, root_an_state, api.get_id(sample),
                                api.get_url(sample)))
                else:
                    # -> unsolvable edge case
                    #    display an error message
                    logger.error("Analysis '{}' of root sample in state '{}' "
                                 "and Analysis of partition in state {}. "
                                 "Please fix manually: {}".format(
                                     analysis_id, root_an_state, part_an_state,
                                     api.get_url(sample)))

            # root analysis is in invalid state
            elif root_an_state in ["rejected", "retracted"]:
                # -> probably the retest was automatically created in the
                #    parent instead of the partition
                pass

            # partition analysis is in invalid state
            elif part_an_state in ["rejected", "retracted"]:
                # -> probably the retest was automatically created in the
                #    parent instead of the partition
                pass

            # root analysis was submitted, but not the partition analysis
            elif ISubmitted.providedBy(
                    root_an) and not ISubmitted.providedBy(part_an):
                # -> remove the analysis from the partition
                partition._delObject(analysis_id)
                logger.info(
                    "Remove analysis '{}' in state '{}' from partition {}: {}".
                    format(analysis_id, part_an_state, api.get_id(partition),
                           api.get_url(partition)))

            # partition analysis was submitted, but not the root analysis
            elif ISubmitted.providedBy(
                    part_an) and not ISubmitted.providedBy(root_an):
                # -> remove the analysis from the root sample
                sample._delObject(analysis_id)
                logger.info(
                    "Remove analysis '{}' in state '{}' from sample {}: {}".
                    format(analysis_id, root_an_state, api.get_id(sample),
                           api.get_url(sample)))

            # inconsistent state
            else:
                logger.warning(
                    "Can not handle analysis '{}' located in '{}' (state {}) and '{}' (state {})"
                    .format(analysis_id, repr(sample), root_an_state,
                            repr(partition), part_an_state))

    logger.info("Removing cascaded analyses from Root Samples... [DONE]")

    logger.info("State Combinations (root_an_state, part_an_state): {}".format(
        sorted(case_counter.items(), key=itemgetter(1), reverse=True)))