Пример #1
0
 def do_action(self, action, objects):
     """Performs the workflow transition passed in and returns the list of
     objects that have been successfully transitioned
     """
     transitioned = []
     ActionHandlerPool.get_instance().queue_pool()
     for obj in objects:
         obj = api.get_object(obj)
         success, message = do_action_for(obj, action)
         if success:
             transitioned.append(obj)
     ActionHandlerPool.get_instance().resume()
     return transitioned
Пример #2
0
    def submitTransition(self, action, came_from, items):
        """Performs the action's transition for the specified items

        Returns (numtransitions, destination), where:
        - numtransitions: the number of objects successfully transitioned.
            If no objects have been successfully transitioned, gets 0 value
        - destination: the destination url to be loaded immediately
        """
        transitioned = []
        actions = ActionHandlerPool.get_instance()
        actions.queue_pool()
        for item in items:
            success, message = doActionFor(item, action)
            if success:
                transitioned.append(item.UID())
            else:
                self.addPortalMessage(message, 'error')
        actions.resume()

        # automatic label printing
        dest = None
        auto_stickers_action = self.portal.bika_setup.getAutoPrintStickers()
        if transitioned and action == auto_stickers_action:
            self.request.form['uids'] = transitioned
            self.workflow_action_print_stickers()
            dest = self.destination_url

        return len(transitioned), dest
Пример #3
0
def create_retest(ar):
    """Creates a retest (Analysis Request) from an invalidated Analysis Request
    :param ar: The invalidated Analysis Request
    :type ar: IAnalysisRequest
    :rtype: IAnalysisRequest
    """
    if not ar:
        raise ValueError("Source Analysis Request cannot be None")

    if not IAnalysisRequest.providedBy(ar):
        raise ValueError("Type not supported: {}".format(repr(type(ar))))

    if ar.getRetest():
        # Do not allow the creation of another retest!
        raise ValueError("Retest already set")

    if not ar.isInvalid():
        # Analysis Request must be in 'invalid' state
        raise ValueError("Cannot do a retest from an invalid Analysis Request"
                         .format(repr(ar)))

    # 0. Open the actions pool
    actions_pool = ActionHandlerPool.get_instance()
    actions_pool.queue_pool()

    # 1. Create the Retest (Analysis Request)
    ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample']
    retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID())
    retest.setSample(ar.getSample())
    copy_field_values(ar, retest, ignore_fieldnames=ignore)
    renameAfterCreation(retest)

    # 2. Copy the analyses from the source
    intermediate_states = ['retracted', 'reflexed']
    for an in ar.getAnalyses(full_objects=True):
        if (api.get_workflow_status_of(an) in intermediate_states):
            # Exclude intermediate analyses
            continue

        nan = _createObjectByType("Analysis", retest, an.getKeyword())

        # Make a copy
        ignore_fieldnames = ['DataAnalysisPublished']
        copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames)
        nan.unmarkCreationFlag()
        push_reindex_to_actions_pool(nan)

    # 3. Assign the source to retest
    retest.setInvalidated(ar)

    # 4. Transition the retest to "sample_received"!
    changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received')

    # 5. Reindex and other stuff
    push_reindex_to_actions_pool(retest)
    push_reindex_to_actions_pool(retest.aq_parent)

    # 6. Resume the actions pool
    actions_pool.resume()
    return retest
Пример #4
0
    def do_action(self, action, objects):
        """Performs the workflow transition passed in and returns the list of
        objects that have been successfully transitioned
        """

        start = time.time()
        transitioned = []
        ActionHandlerPool.get_instance().queue_pool()
        for obj in objects:
            obj = api.get_object(obj)
            success, message = do_action_for(obj, action)
            if success:
                transitioned.append(obj)
        ActionHandlerPool.get_instance().resume()

        end = time.time()
        logger.info("Action '{}' for {} objects took {:.2f}s".format(
            action, len(transitioned), end - start))
        return transitioned
Пример #5
0
    def do_unassign(self, analysis_uids):
        actions = ActionHandlerPool.get_instance()
        actions.queue_pool()
        # Remove duplicates first
        query = dict(UID=analysis_uids, portal_type="DuplicateAnalysis")
        for brain in api.search(query, CATALOG_ANALYSIS_LISTING):
            doActionFor(api.get_object(brain), "unassign")
        # Now remove the rest
        query = dict(UID=analysis_uids)
        for brain in api.search(query, CATALOG_ANALYSIS_LISTING):
            doActionFor(api.get_object(brain), "unassign")
        actions.resume()

        message = _("Changes saved.")
        self.context.plone_utils.addPortalMessage(message, 'info')
        self.destination_url = self.context.absolute_url()
        self.request.response.redirect(self.destination_url)
Пример #6
0
    def do_assign(self, analysis_uids):
        actions = ActionHandlerPool.get_instance()
        actions.queue_pool()
        # We retrieve the analyses from the database sorted by AR ID
        # ascending, so the positions of the ARs inside the WS are
        # consistent with the order of the ARs
        catalog = api.get_tool(CATALOG_ANALYSIS_LISTING)
        brains = catalog({
            "UID": analysis_uids,
            "sort_on": "getRequestID"})

        # Now, we need the analyses within a request ID to be sorted by
        # sortkey (sortable_title index), so it will appear in the same
        # order as they appear in Analyses list from AR view
        curr_arid = None
        curr_brains = []
        sorted_brains = []
        for brain in brains:
            arid = brain.getRequestID
            if curr_arid != arid:
                # Sort the brains we've collected until now, that
                # belong to the same Analysis Request
                curr_brains.sort(key=attrgetter("getPrioritySortkey"))
                sorted_brains.extend(curr_brains)
                curr_arid = arid
                curr_brains = []

            # Now we are inside the same AR
            curr_brains.append(brain)
            continue

        # Sort the last set of brains we've collected
        curr_brains.sort(key=attrgetter('getPrioritySortkey'))
        sorted_brains.extend(curr_brains)

        # Add analyses into the worksheet
        self.context.addAnalyses(sorted_brains)
        actions.resume()

        self.destination_url = self.context.absolute_url()
        self.request.response.redirect(self.destination_url)
Пример #7
0
def create_partition(analysis_request,
                     request,
                     analyses,
                     sample_type=None,
                     container=None,
                     preservation=None,
                     skip_fields=None,
                     internal_use=True):
    """
    Creates a partition for the analysis_request (primary) passed in
    :param analysis_request: uid/brain/object of IAnalysisRequest type
    :param request: the current request object
    :param analyses: uids/brains/objects of IAnalysis type
    :param sampletype: uid/brain/object of SampleType
    :param container: uid/brain/object of Container
    :param preservation: uid/brain/object of Preservation
    :param skip_fields: names of fields to be skipped on copy from primary
    :return: the new partition
    """
    partition_skip_fields = [
        "Analyses",
        "Attachment",
        "Client",
        "DetachedFrom",
        "Profile",
        "Profiles",
        "RejectionReasons",
        "Remarks",
        "ResultsInterpretation",
        "ResultsInterpretationDepts",
        "Sample",
        "Template",
        "creation_date",
        "id",
        "modification_date",
        "ParentAnalysisRequest",
        "PrimaryAnalysisRequest",
    ]
    if skip_fields:
        partition_skip_fields.extend(skip_fields)
        partition_skip_fields = list(set(partition_skip_fields))

    # Copy field values from the primary analysis request
    ar = api.get_object(analysis_request)
    record = fields_to_dict(ar, partition_skip_fields)

    # Update with values that are partition-specific
    record.update({
        "InternalUse": internal_use,
        "ParentAnalysisRequest": api.get_uid(ar),
    })
    if sample_type is not None:
        record["SampleType"] = sample_type and api.get_uid(sample_type) or ""
    if container is not None:
        record["Container"] = container and api.get_uid(container) or ""
    if preservation is not None:
        record["Preservation"] = preservation and api.get_uid(
            preservation) or ""

    # Create the Partition
    client = ar.getClient()
    analyses = list(set(map(api.get_object, analyses)))
    services = map(lambda an: an.getAnalysisService(), analyses)

    # Populate the root's ResultsRanges to partitions
    results_ranges = ar.getResultsRange() or []
    partition = create_analysisrequest(client,
                                       request=request,
                                       values=record,
                                       analyses=services,
                                       results_ranges=results_ranges)

    # Reindex Parent Analysis Request
    ar.reindexObject(idxs=["isRootAncestor"])

    # Manually set the Date Received to match with its parent. This is
    # necessary because crar calls to processForm, so DateReceived is not
    # set because the partition has not been received yet
    partition.setDateReceived(ar.getDateReceived())
    partition.reindexObject(idxs="getDateReceived")

    # Force partition to same status as the primary
    status = api.get_workflow_status_of(ar)
    changeWorkflowState(partition, "bika_ar_workflow", status)
    if IReceived.providedBy(ar):
        alsoProvides(partition, IReceived)

    # And initialize the analyses the partition contains. This is required
    # here because the transition "initialize" of analyses rely on a guard,
    # so the initialization can only be performed when the sample has been
    # received (DateReceived is set)
    ActionHandlerPool.get_instance().queue_pool()
    for analysis in partition.getAnalyses(full_objects=True):
        doActionFor(analysis, "initialize")
    ActionHandlerPool.get_instance().resume()
    return partition