예제 #1
0
def after_submit(analysis):
    """Method triggered after a 'submit' transition for the analysis passed in
    is performed. Promotes the submit transition to the Worksheet to which the
    analysis belongs to. Note that for the worksheet there is already a guard
    that assures the transition to the worksheet will only be performed if all
    analyses within the worksheet have already been transitioned.
    This function is called automatically by
    bika.lims.workfow.AfterTransitionEventHandler
    """
    # Promote to analyses this analysis depends on
    promote_to_dependencies(analysis, "submit")

    # TODO: REFLEX TO REMOVE
    # Do all the reflex rules process
    if IRequestAnalysis.providedBy(analysis):
        analysis._reflex_rule_process('submit')

    # Promote transition to worksheet
    ws = analysis.getWorksheet()
    if ws:
        doActionFor(ws, 'submit')

    # Promote transition to Analysis Request
    if IRequestAnalysis.providedBy(analysis):
        doActionFor(analysis.getRequest(), 'submit')
        reindex_request(analysis)
예제 #2
0
    def __call__(self):

        if "viewlet_submitted" in self.request.form:
            data = {}
            try:
                data = self.validate_form_inputs()
            except ValidationError as e:
                self.form_error(e.message)

            # Validation is complete, now set local variables from form inputs.
            aliquots = []
            j = 0
            workflow_enabled = self.context.bika_setup.getSamplingWorkflowEnabled()
            for x in range(data['seq_start'], data['seq_start'] + data['count']):
                aliquot = create_sample(self.context, self.request, data, j, x)
                partition = create_samplepartition(aliquot, {'services': [], 'part_id': aliquot.getId() + "-P"})
                if not workflow_enabled:
                    doActionFor(aliquot, 'sample_due')
                    doActionFor(partition, 'sample_due')
                # aliquot.setLinkedSample(data['biospecimens'][j].UID())
                aliquot.reindexObject()
                if (x-data['seq_start']+1) % data['aliquot_count'] == 0 and (x-data['seq_start']+1) != 0:
                    j += 1
                aliquots.append(aliquot)

            # store the created biospecimens
            assign_items_to_storages(self.context, aliquots, data['storages'])

            msg = u'%s Aliquots created.' % len(aliquots)
            self.context.plone_utils.addPortalMessage(msg)
            self.request.response.redirect(self.context.absolute_url() + '/aliquots')
예제 #3
0
def ObjectInitializedEventHandler(instance, event):
    """called an object is created
    """
    if instance.portal_type == 'Sample':

        if not instance.getField('Barcode').get(instance):
            instance.getField('Barcode').set(instance, instance.getId())

        create_samplepartition(instance, {
            'services': [],
            'part_id': instance.getId() + "-P"
        })

        location = instance.getStorageLocation()
        if hasattr(instance, 'api_source'):
            if instance.api_source == "odk":  #special case for field collecdted odk samples
                doActionFor(instance, 'sample_due')
                if location:
                    doActionFor(location, 'reserve')
                    instance.update_box_status(location)
            delattr(instance, 'api_source')
        else:
            if float(instance.getField('Volume').get(instance)) > 0:
                doActionFor(instance, 'sample_due')
                doActionFor(instance, 'receive')

            if location:
                doActionFor(location, 'occupy')
                instance.update_box_status(location)
예제 #4
0
 def workflow_script_sample_due(self):
     if skip(self, "sample_due"):
         return
     # All associated AnalysisRequests are also transitioned
     for ar in self.getAnalysisRequests():
         doActionFor(ar, "sample_due")
         ar.reindexObject()
예제 #5
0
 def workflow_script_verify(self):
     if skip(self, "verify"):
         return
     workflow = getToolByName(self, 'portal_workflow')
     self.reindexObject(idxs=[
         "review_state",
     ])
     if not "verify all analyses" in self.REQUEST['workflow_skiplist']:
         # verify all analyses in this self.
         analyses = self.getAnalyses()
         for analysis in analyses:
             state = workflow.getInfoFor(analysis, 'review_state', '')
             if state != 'to_be_verified':
                 continue
             if (hasattr(analysis, 'getNumberOfVerifications') and hasattr(
                     analysis, 'getNumberOfRequiredVerifications')):
                 # For the 'verify' transition to (effectively) take place,
                 # we need to check if the required number of verifications
                 # for the analysis is, at least, the number of verifications
                 # performed previously +1
                 success = True
                 revers = analysis.getNumberOfRequiredVerifications()
                 nmvers = analysis.getNumberOfVerifications()
                 username = getToolByName(
                     self, 'portal_membership').getAuthenticatedMember(
                     ).getUserName()
                 analysis.addVerificator(username)
                 if revers - nmvers <= 1:
                     success, message = doActionFor(analysis, 'verify')
                     if not success:
                         # If failed, delete last verificator.
                         analysis.deleteLastVerificator()
             else:
                 doActionFor(analysis, 'verify')
예제 #6
0
def after_retest(analysis):
    """Function triggered before 'retest' transition takes place. Creates a
    copy of the current analysis
    """
    # When an analysis is retested, it automatically transitions to verified,
    # so we need to mark the analysis as such
    alsoProvides(analysis, IVerified)

    def verify_and_retest(relative):
        if not ISubmitted.providedBy(relative):
            # Result not yet submitted, no need to create a retest
            return

        # Apply the transition manually, but only if analysis can be verified
        doActionFor(relative, "verify")

        # Create the retest
        create_retest(relative)

    # Retest and auto-verify relatives, from bottom to top
    relatives = list(reversed(analysis.getDependents(recursive=True)))
    relatives.extend(analysis.getDependencies(recursive=True))
    map(verify_and_retest, relatives)

    # Create the retest
    create_retest(analysis)

    # Try to rollback the Analysis Request
    if IRequestAnalysis.providedBy(analysis):
        doActionFor(analysis.getRequest(), "rollback_to_receive")
        reindex_request(analysis)
예제 #7
0
    def removeAnalysis(self, analysis):
        """ delete an analyses from the worksheet and un-assign it
        """
        workflow = getToolByName(self, 'portal_workflow')

        # overwrite saved context UID for event subscriber
        self.REQUEST['context_uid'] = self.UID()
        doActionFor(analysis, 'unassign')

        # remove analysis from context.Analyses *after* unassign,
        # (doActionFor requires worksheet in analysis.getBackReferences)
        Analyses = self.getAnalyses()
        if analysis in Analyses:
            Analyses.remove(analysis)
            self.setAnalyses(Analyses)
            analysis.reindexObject()
        layout = [
            slot for slot in self.getLayout()
            if slot['analysis_uid'] != analysis.UID()
        ]
        self.setLayout(layout)

        if analysis.portal_type == "DuplicateAnalysis":
            self.manage_delObjects(ids=[analysis.id])
        # Reindex the worksheet in order to update its columns
        self.reindexObject()
예제 #8
0
    def __call__(self):
        form = self.request.form

        # Form submit toggle
        form_submitted = form.get("submitted", False)

        # Buttons
        form_preview = form.get("button_preview", False)
        form_create = form.get("button_create", False)
        form_cancel = form.get("button_cancel", False)

        objs = self.get_objects()

        # No ARs selected
        if not objs:
            return self.redirect(message=_("No items selected"),
                                 level="warning")

        # Handle preview
        if form_submitted and form_preview:
            logger.info("*** PREVIEW ***")

        # Handle create
        if form_submitted and form_create:
            logger.info("*** CREATE PARTITIONS ***")

            partitions = []

            # create the partitions
            for partition in form.get("partitions", []):
                primary_uid = partition.get("primary_uid")
                sampletype_uid = partition.get("sampletype_uid")
                analyses_uids = partition.get("analyses")
                if not analyses_uids or not primary_uid:
                    # Cannot create a partition w/o analyses!
                    continue

                partition = self.create_partition(primary_uid, sampletype_uid,
                                                  analyses_uids)
                partitions.append(partition)
                logger.info("Successfully created partition: {}".format(
                    api.get_path(partition)))

                # Force the reception of the partition
                doActionFor(partition, "receive")

            if not partitions:
                # If no partitions were created, show a warning message
                return self.redirect(message=_("No partitions were created"))

            message = _("Created {} partitions: {}".format(
                len(partitions), ", ".join(map(api.get_title, partitions))))
            return self.redirect(message=message)

        # Handle cancel
        if form_submitted and form_cancel:
            logger.info("*** CANCEL ***")
            return self.redirect(message=_("Partitioning canceled"))

        return self.template()
예제 #9
0
    def workflow_action_aliquot_receive(self):
        form = self.request.form
        selected_aliquots = WorkflowAction._get_selected_items(self)
        aliquots = []
        for uid in selected_aliquots.keys():
            if not form["AliquotType"][0][uid] or not form["Volume"][0][uid] or not form["Unit"][0][uid]:
                continue
            try:
                aliquot = selected_aliquots.get(uid, None)
                aliquot.getField("SampleType").set(aliquot, form["AliquotType"][0][uid])
                aliquot.getField("Volume").set(aliquot, form["Volume"][0][uid])
                unit = "ml"
                for u in VOLUME_UNITS:
                    if u["ResultValue"] == form["Unit"][0][uid]:
                        unit = u["ResultText"]
                aliquot.getField("Unit").set(aliquot, unit)
                aliquot.reindexObject()
                aliquots.append(aliquot)
            except ReferenceException:
                continue

        message = PMF("Changes saved.")
        self.context.plone_utils.addPortalMessage(message, "info")

        for aliquot in aliquots:
            doActionFor(aliquot, "receive")
            for partition in aliquot.objectValues("SamplePartition"):
                doActionFor(partition, "receive")

        self.destination_url = self.context.absolute_url()
        if self.context.portal_type == "Project":
            self.destination_url += "/aliquots"
        self.request.response.redirect(self.destination_url)
예제 #10
0
 def workflow_script_sample_due(self):
     if skip(self, "sample_due"):
         return
     # All associated AnalysisRequests are also transitioned
     for ar in self.getAnalysisRequests():
         doActionFor(ar, "sample_due")
         ar.reindexObject()
예제 #11
0
def after_reject(obj):
    """Method triggered after a 'reject' transition for the Analysis Request
    passed in is performed. Transitions and sets the rejection reasons to the
    parent Sample. Also transitions the analyses assigned to the AR
    bika.lims.workflow.AfterTransitionEventHandler
    :param obj: Analysis Request affected by the transition
    :type obj: AnalysisRequest
    """
    sample = obj.getSample()
    if not sample:
        return

    if getCurrentState(sample) != 'rejected':
        doActionFor(sample, 'reject')
        reasons = obj.getRejectionReasons()
        sample.setRejectionReasons(reasons)

    # Deactivate all analyses from this Analysis Request
    ans = obj.getAnalyses(full_objects=True)
    for analysis in ans:
        doActionFor(analysis, 'reject')

    if obj.bika_setup.getNotifyOnRejection():
        # Import here to brake circular importing somewhere
        from bika.lims.utils.analysisrequest import notify_rejection
        # Notify the Client about the Rejection.
        notify_rejection(obj)
예제 #12
0
def force_receive(analysis_request):
    actions = [
        "no_sampling_workflow", "send_to_lab", "deliver", "send_to_poc",
        "receive"
    ]
    for action in actions:
        doActionFor(analysis_request, action)
예제 #13
0
def after_attach(obj):
    # TODO Workflow Duplicate Attach - Attach transition is still available?
    # If all analyses on the worksheet have been attached,
    # then attach the worksheet.
    ws = obj.getWorksheet()
    if ws:
        doActionFor(ws)
예제 #14
0
파일: events.py 프로젝트: nassimcha/sencua
def after_retract(reference_analysis):
    """Function triggered after a 'retract' transition for the reference
    analysis passed in is performed. The reference analysis transitions to
    "retracted" state and a new copy of the reference analysis is created
    """
    reference = reference_analysis.getSample()
    service = reference_analysis.getAnalysisService()
    worksheet = reference_analysis.getWorksheet()
    instrument = reference_analysis.getInstrument()
    if worksheet:
        # This a reference analysis in a worksheet
        slot = worksheet.get_slot_position_for(reference_analysis)
        refgid = reference_analysis.getReferenceAnalysesGroupID()
        ref = worksheet.add_reference_analysis(reference, service, slot,
                                               refgid)
        if not ref:
            logger.warn(
                "Cannot add a retest for reference analysis {} into {}".format(
                    reference_analysis.getId(), worksheet.getId()))
            return

        ref.setRetestOf(reference_analysis)
        ref.setResult(reference_analysis.getResult())
        if instrument:
            ref.setInstrument(instrument)
            instrument.reindexObject()

        # Try to rollback the worksheet to prevent inconsistencies
        wf.doActionFor(worksheet, "rollback_to_open")

    elif instrument:
        # This is an internal calibration test
        instrument.addReferences(reference, [api.get_uid(service)])
        instrument.reindexObject()
예제 #15
0
    def setUp(self):
        super(TestBarcodeEntry, self).setUp()
        login(self.portal, TEST_USER_NAME)
        clients = self.portal.clients
        bs = self.portal.bika_setup
        # @formatter:off
        self.client = self.addthing(clients, 'Client', title='Happy Hills', ClientID='HH')
        contact = self.addthing(self.client, 'Contact', Firstname='Rita', Lastname='Mohale')
        container = self.addthing(bs.bika_containers, 'Container', title='Bottle', capacity="10ml")
        sampletype = self.addthing(bs.bika_sampletypes, 'SampleType', title='Water', Prefix='H2O')
        samplepoint = self.addthing(bs.bika_samplepoints, 'SamplePoint', title='Toilet')
        service = self.addthing(bs.bika_analysisservices, 'AnalysisService', title='Ecoli', Keyword="ECO")
        batch = self.addthing(self.portal.batches, 'Batch', title='B1')
        # Create Sample with single partition
        self.sample1 = self.addthing(self.client, 'Sample', SampleType=sampletype)
        self.sample2 = self.addthing(self.client, 'Sample', SampleType=sampletype)
        self.addthing(self.sample1, 'SamplePartition', Container=container)
        self.addthing(self.sample2, 'SamplePartition', Container=container)
        # Create an AR
        self.ar1 = self.addthing(self.client, 'AnalysisRequest', Contact=contact,
                                Sample=self.sample1, Analyses=[service], SamplingDate=DateTime())
        # Create a secondary AR - linked to a Batch
        self.ar2 = self.addthing(self.client, 'AnalysisRequest', Contact=contact,
                                Sample=self.sample1, Analyses=[service], SamplingDate=DateTime(),
                                Batch=batch)
        # Create an AR - single AR on sample2
        self.ar3 = self.addthing(self.client, 'AnalysisRequest', Contact=contact,
                                Sample=self.sample2, Analyses=[service], SamplingDate=DateTime())
        # @formatter:on
        wf = getToolByName(self.portal, 'portal_workflow')
        for ar in self.ar1, self.ar2, self.ar3:
            # Set initial AR state
            doActionFor(ar, 'no_sampling_workflow')

        transaction.commit()
예제 #16
0
파일: events.py 프로젝트: xispa/bika.lims
def after_attach(obj):
    # TODO Workflow Duplicate Attach - Attach transition is still available?
    # If all analyses on the worksheet have been attached,
    # then attach the worksheet.
    ws = obj.getWorksheet()
    if ws:
        doActionFor(ws)
예제 #17
0
def after_verify(analysis):
    """
    Method triggered after a 'verify' transition for the analysis passed in
    is performed. Promotes the transition to the Analysis Request and to
    Worksheet (if the analysis is assigned to any)
    This function is called automatically by
    bika.lims.workfow.AfterTransitionEventHandler
    """
    # Promote to analyses this analysis depends on
    promote_to_dependencies(analysis, "verify")

    # TODO: REFLEX TO REMOVE
    # Do all the reflex rules process
    if IRequestAnalysis.providedBy(analysis):
        analysis._reflex_rule_process('verify')

    # Promote transition to worksheet
    ws = analysis.getWorksheet()
    if ws:
        doActionFor(ws, 'verify')

    # Promote transition to Analysis Request
    if IRequestAnalysis.providedBy(analysis):
        doActionFor(analysis.getRequest(), 'verify')
        reindex_request(analysis)
예제 #18
0
    def add_object_at(self, object_brain_uid, row, column):
        """Adds an sample to the specified position. If the sample is a primary
        (contains partitions) or the sample is a partition, it creates a new
        partition with no analyses and store this partition instead.
        If an object already exists at the given position, return False.
        Otherwise, return True
        """
        if not self.can_add_object(object_brain_uid, row, column):
            return False

        sample = api.get_object(object_brain_uid)
        if sample.isPartition() or sample.getDescendants():
            # If the sample is a partition or contains partitions, we need to
            # create a specific partition for storage, without analyses
            sample = api.create_partition_for_storage(sample)

        stored = super(StorageSamplesContainer, self).add_object_at(sample,
                                                                    row, column)
        if not stored:
            return False

        # Transition the sample to "stored" state
        # TODO check if the sample has a container assigned in BeforeTransition
        # If it does not have a container assigned, change the workflow state
        # to the previous one automatically (integrity-check)
        self.reindexObject(idxs=["get_samples_uids", "is_full"])
        sample = api.get_object(sample)
        wf.doActionFor(sample, "store")
        return stored
예제 #19
0
    def workflow_action_sample_due(self):
        form = self.request.form
        selected_biospecimens = WorkflowAction._get_selected_items(self)
        biospecimens = []
        for uid in selected_biospecimens.keys():
            if not form['Barcode'][0][uid] or \
                    not form['Type'][0][uid]:
                continue
            try:
                obj = selected_biospecimens.get(uid, None)
                obj.getField('Barcode').set(obj, form['Barcode'][0][uid])
                obj.getField('SampleType').set(obj, form['Type'][0][uid])
                obj.setId(form['Barcode'][0][uid])
                obj.edit(SampleID=obj.getId())
                obj.reindexObject()
                biospecimens.append(obj)
            except ReferenceException:
                continue
        message = PMF("Changes saved.")
        self.context.plone_utils.addPortalMessage(message, 'info')

        for biospecimen in biospecimens:
            doActionFor(biospecimen, 'sample_due')
            for partition in biospecimen.objectValues('SamplePartition'):
                doActionFor(partition, 'sample_due')

        self.destination_url = self.context.absolute_url()
        if form['portal_type'] == 'Kit' or \
                form['portal_type'] == 'SampleBatch':
            self.destination_url = form['view_url']

        self.destination_url += '/biospecimens'
        self.request.response.redirect(self.destination_url)
예제 #20
0
 def workflow_script_verify(self):
     if skip(self, "verify"):
         return
     workflow = getToolByName(self, 'portal_workflow')
     self.reindexObject(idxs=["review_state", ])
     if not "verify all analyses" in self.REQUEST['workflow_skiplist']:
         # verify all analyses in this self.
         analyses = self.getAnalyses()
         for analysis in analyses:
             state = workflow.getInfoFor(analysis, 'review_state', '')
             if state != 'to_be_verified':
                 continue
             if (hasattr(analysis, 'getNumberOfVerifications') and
                 hasattr(analysis, 'getNumberOfRequiredVerifications')):
                 # For the 'verify' transition to (effectively) take place,
                 # we need to check if the required number of verifications
                 # for the analysis is, at least, the number of verifications
                 # performed previously +1
                 success = True
                 revers = analysis.getNumberOfRequiredVerifications()
                 nmvers = analysis.getNumberOfVerifications()
                 analysis.setNumberOfVerifications(nmvers+1)
                 if revers-nmvers <= 1:
                     success, message = doActionFor(analysis, 'verify')
                     if not success:
                         # If failed, restore to the previous number
                         analysis.setNumberOfVerifications(nmvers)
             else:
                 doActionFor(analysis, 'verify')
예제 #21
0
파일: events.py 프로젝트: xispa/bika.lims
def after_reject(obj):
    """Method triggered after a 'reject' transition for the Analysis Request
    passed in is performed. Transitions and sets the rejection reasons to the
    parent Sample. Also transitions the analyses assigned to the AR
    bika.lims.workflow.AfterTransitionEventHandler
    :param obj: Analysis Request affected by the transition
    :type obj: AnalysisRequest
    """
    sample = obj.getSample()
    if not sample:
        return

    if getCurrentState(sample) != 'rejected':
        doActionFor(sample, 'reject')
        reasons = obj.getRejectionReasons()
        sample.setRejectionReasons(reasons)

    # Deactivate all analyses from this Analysis Request
    ans = obj.getAnalyses(full_objects=True)
    for analysis in ans:
        doActionFor(analysis, 'reject')

    if obj.bika_setup.getNotifyOnRejection():
        # Import here to brake circular importing somewhere
        from bika.lims.utils.analysisrequest import notify_rejection
        # Notify the Client about the Rejection.
        notify_rejection(obj)
예제 #22
0
def after_retract(duplicate_analysis):
    """Function triggered after a 'retract' transition for the duplicate passed
    in is performed. The duplicate transitions to "retracted" state and a new
    copy of the duplicate is created.
    """
    # Rename the analysis to make way for it's successor.
    # Support multiple retractions by renaming to *-0, *-1, etc
    parent = duplicate_analysis.aq_parent
    keyword = duplicate_analysis.getKeyword()
    analyses = filter(lambda an: an.getKeyword() == keyword,
                      parent.objectValues("DuplicateAnalysis"))

    # Rename the retracted duplicate
    # https://docs.plone.org/develop/plone/content/rename.html
    # _verifyObjectPaste permission check must be cancelled
    parent._verifyObjectPaste = str
    retracted_id = '{}-{}'.format(keyword, len(analyses))
    # Make sure all persistent objects have _p_jar attribute
    transaction.savepoint(optimistic=True)
    parent.manage_renameObject(duplicate_analysis.getId(), retracted_id)
    delattr(parent, '_verifyObjectPaste')

    # Find out the slot position of the duplicate in the worksheet
    worksheet = duplicate_analysis.getWorksheet()
    if not worksheet:
        logger.warn(
            "Duplicate {} has been retracted, but without worksheet".format(
                duplicate_analysis.getId()))
        return

    dest_slot = worksheet.get_slot_position_for(duplicate_analysis)
    if not dest_slot:
        logger.warn("Duplicate {} has been retracted, but not found in any"
                    "slot of worksheet {}".format(duplicate_analysis.getId(),
                                                  worksheet.getId()))
        return

    # Create a copy (retest) of the duplicate and assign to worksheet
    ref_gid = duplicate_analysis.getReferenceAnalysesGroupID()
    retest = _createObjectByType("DuplicateAnalysis", worksheet, tmpID())
    copy_analysis_field_values(duplicate_analysis, retest)
    retest.setAnalysis(duplicate_analysis.getAnalysis())
    retest.setRetestOf(duplicate_analysis)
    retest.setReferenceAnalysesGroupID(ref_gid)
    retest.setResult(duplicate_analysis.getResult())
    worksheet.addToLayout(retest, dest_slot)
    worksheet.setAnalyses(worksheet.getAnalyses() + [
        retest,
    ])

    # Reindex
    retest.reindexObject(idxs=[
        "getAnalyst", "getWorksheetUID", "isRetest",
        "getReferenceAnalysesGroupID"
    ])
    worksheet.reindexObject(idxs=["getAnalysesUIDs"])

    # Try to rollback the worksheet to prevent inconsistencies
    doActionFor(worksheet, "rollback_to_open")
예제 #23
0
 def update_box_status(self, location):
     box = location.aq_parent
     state = self.portal_workflow.getInfoFor(box, 'review_state')
     free_pos = box.get_free_positions()
     if not free_pos and state == 'available':
         doActionFor(box, 'occupy')
     elif free_pos and state == 'occupied':
         doActionFor(box, 'liberate')
예제 #24
0
def _promote_transition(obj, transition_id):
    sample = obj.getSample()
    if sample:
        doActionFor(sample, transition_id)

    parent_ar = obj.getPrimaryAnalysisRequest()
    if parent_ar:
        doActionFor(parent_ar, transition_id)
예제 #25
0
파일: events.py 프로젝트: xispa/bika.lims
def _promote_transition(obj, transition_id):
    """Promotes the transition passed in to the object's parent
    :param obj: Analysis Request for which the transition has to be promoted
    :param transition_id: Unique id of the transition
    """
    sample = obj.getSample()
    if sample:
        doActionFor(sample, transition_id)
예제 #26
0
 def workflow_script_schedule_sampling(self):
     """
     This function runs all the needed process for that action
     """
     # transact the related analysis requests
     ars = self.getAnalysisRequests()
     for ar in ars:
         doActionFor(ar, 'schedule_sampling')
예제 #27
0
 def workflow_script_schedule_sampling(self):
     """
     This function runs all the needed process for that action
     """
     # transact the related analysis requests
     ars = self.getAnalysisRequests()
     for ar in ars:
         doActionFor(ar, 'schedule_sampling')
예제 #28
0
def _promote_transition(obj, transition_id):
    """Promotes the transition passed in to the object's parent
    :param obj: Analysis Request for which the transition has to be promoted
    :param transition_id: Unique id of the transition
    """
    sample = obj.getSample()
    if sample:
        doActionFor(sample, transition_id)
예제 #29
0
def create_analysisrequest(client,
                           request,
                           values,
                           analyses=None,
                           partitions=None,
                           specifications=None,
                           prices=None):
    """This is meant for general use and should do everything necessary to
    create and initialise an AR and any other required auxilliary objects
    (Sample, SamplePartition, Analysis...)
    :param client:
        The container (Client) in which the ARs will be created.
    :param request:
        The current Request object.
    :param values:
        a dict, where keys are AR|Sample schema field names.
    :param analyses:
        Analysis services list.  If specified, augments the values in
        values['Analyses']. May consist of service objects, UIDs, or Keywords.
    :param partitions:
        A list of dictionaries, if specific partitions are required.  If not
        specified, AR's sample is created with a single partition.
    :param specifications:
        These values augment those found in values['Specifications']
    :param prices:
        Allow different prices to be set for analyses.  If not set, prices
        are read from the associated analysis service.
    """
    # Don't pollute the dict param passed in
    values = dict(values.items())

    # Create new sample or locate the existing for secondary AR
    secondary = False
    # TODO Sample Cleanup - Manage secondary ARs properly

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', client, tmpID())
    ar.processForm(REQUEST=request, values=values)

    # Resolve the services uids and set the analyses for this Analysis Request
    service_uids = get_services_uids(context=client,
                                     values=values,
                                     analyses_serv=analyses)
    ar.setAnalyses(service_uids, prices=prices, specs=specifications)

    # TODO Sample Cleanup - Manage secondary ARs properly
    if secondary:
        # Secondary AR does not longer comes from a Sample, rather from an AR.
        # If the Primary AR has been received, then force the transition of the
        # secondary to received and set the description/comment in the
        # transition accordingly so it will be displayed later in the log tab
        logger.warn("Sync transition for secondary AR is still missing")

    # Try first with no sampling transition, cause it is the most common config
    success, message = doActionFor(ar, "no_sampling_workflow")
    if not success:
        doActionFor(ar, "to_be_sampled")
    return ar
예제 #30
0
 def workflow_script_schedule_sampling(self):
     """
     This function runs all the needed process for that action
     """
     workflow = getToolByName(self, "portal_workflow")
     # transact the related analysis requests
     ars = self.getAnalysisRequests()
     for ar in ars:
         doActionFor(ar, "schedule_sampling")
예제 #31
0
def after_retract(obj):
    """Function triggered after a 'retract' transition for the analysis passed
    in is performed. Retracting an analysis cause its transition to 'retracted'
    state and the creation of a new copy of the same analysis as a retest.
    Note that retraction only affects to single Analysis and has no other
    effect in the status of the Worksheet to which the Analysis is assigned or
    to the Analysis Request to which belongs (transition is never proomoted)
    This function is called automatically by
    bika.lims.workflow.AfterTransitionEventHandler
    """
    # TODO Workflow Analysis - review this function
    # Rename the analysis to make way for it's successor.
    # Support multiple retractions by renaming to *-0, *-1, etc
    parent = obj.aq_parent
    kw = obj.getKeyword()
    analyses = [
        x for x in parent.objectValues("Analysis")
        if x.getId().startswith(obj.getId())
    ]

    # LIMS-1290 - Analyst must be able to retract, which creates a new
    # Analysis.  So, _verifyObjectPaste permission check must be cancelled:
    parent._verifyObjectPaste = str
    # This is needed for tests:
    # https://docs.plone.org/develop/plone/content/rename.html
    # Testing warning: Rename mechanism relies of Persistent attribute
    # called _p_jar to be present on the content object. By default, this is
    # not the case on unit tests. You need to call transaction.savepoint() to
    # make _p_jar appear on persistent objects.
    # If you don't do this, you'll receive a "CopyError" when calling
    # manage_renameObjects that the operation is not supported.
    transaction.savepoint()
    parent.manage_renameObject(kw, "{0}-{1}".format(kw, len(analyses)))
    delattr(parent, '_verifyObjectPaste')

    # Create new analysis from the retracted obj
    analysis = create_analysis(parent, obj)
    changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received")

    # Assign the new analysis to this same worksheet, if any.
    ws = obj.getWorksheet()
    if ws:
        ws.addAnalysis(analysis)
    analysis.reindexObject()

    # retract our dependencies
    dependencies = obj.getDependencies()
    for dependency in dependencies:
        doActionFor(dependency, 'retract')

    # Retract our dependents
    dependents = obj.getDependents()
    for dependent in dependents:
        doActionFor(dependent, 'retract')

    _reindex_request(obj)
예제 #32
0
    def addReferences(self, position, reference, service_uids):
        """ Add reference analyses to reference, and add to worksheet layout
        """
        workflow = getToolByName(self, 'portal_workflow')
        rc = getToolByName(self, REFERENCE_CATALOG)
        layout = self.getLayout()
        wst = self.getWorksheetTemplate()
        wstlayout = wst and wst.getLayout() or []
        ref_type = reference.getBlank() and 'b' or 'c'
        ref_uid = reference.UID()

        if position == 'new':
            highest_existing_position = len(wstlayout)
            for pos in [int(slot['position']) for slot in layout]:
                if pos > highest_existing_position:
                    highest_existing_position = pos
            position = highest_existing_position + 1

        # LIMS-2132 Reference Analyses got the same ID
        refgid = self.nextReferenceAnalysesGroupID(reference)

        for service_uid in service_uids:
            # services with dependents don't belong in references
            service = rc.lookupObject(service_uid)
            calc = service.getCalculation()
            if calc and calc.getDependentServices():
                continue
            ref_uid = reference.addReferenceAnalysis(service_uid, ref_type)
            ref_analysis = rc.lookupObject(ref_uid)

            # Set the required number of verifications
            reqvers = service.getNumberOfRequiredVerifications()
            ref_analysis.setNumberOfRequiredVerifications(reqvers)

            # Set ReferenceAnalysesGroupID (same id for the analyses from
            # the same Reference Sample and same Worksheet)
            ref_analysis.setReferenceAnalysesGroupID(refgid)
            ref_analysis.reindexObject(idxs=["getReferenceAnalysesGroupID"])

            # copy the interimfields
            if calc:
                ref_analysis.setInterimFields(calc.getInterimFields())

            self.setLayout(self.getLayout() +
                           [{
                               'position': position,
                               'type': ref_type,
                               'container_uid': reference.UID(),
                               'analysis_uid': ref_analysis.UID()
                           }])
            self.setAnalyses(self.getAnalyses() + [
                ref_analysis,
            ])
            doActionFor(ref_analysis, 'assign')
            # Reindex the worksheet in order to update its columns
            self.reindexObject()
예제 #33
0
def before_reject(analysis):
    """Function triggered before 'unassign' transition takes place
    """
    worksheet = analysis.getWorksheet()
    if not worksheet:
        return

    # Rejection of a routine analysis causes the removal of their duplicates
    for dup in worksheet.get_duplicates_for(analysis):
        doActionFor(dup, "unassign")
예제 #34
0
 def __call__(self):
     plone.protect.CheckAuthenticator(self.request)
     action = self.request.get('workflow_action', '')
     if action == 'reject' and not self.context.bika_setup.isRejectionWorkflowEnabled():
         return json.dumps({"error": "true"})
     if action:
         doActionFor(self.context, action)
         return json.dumps({"success": "true"})
     else:
         return json.dumps({"error": "true"})
예제 #35
0
    def verify_and_retest(relative):
        if not ISubmitted.providedBy(relative):
            # Result not yet submitted, no need to create a retest
            return

        # Apply the transition manually, but only if analysis can be verified
        doActionFor(relative, "verify")

        # Create the retest
        create_retest(relative)
예제 #36
0
 def __call__(self):
     plone.protect.CheckAuthenticator(self.request)
     action = self.request.get('workflow_action', '')
     if action == 'reject' and not self.context.bika_setup.isRejectionWorkflowEnabled():
         return json.dumps({"error": "true"})
     if action:
         doActionFor(self.context, action)
         return json.dumps({"success": "true"})
     else:
         return json.dumps({"error": "true"})
예제 #37
0
def after_retract(obj):
    """Method triggered after a 'retract' transition for the Analysis Request
    passed in is performed. Transitions and sets the analyses of the Analyses
    Request to retracted.
    :param obj: Analysis Request affected by the transition
    :type obj: AnalysisRequest
    """
    ans = obj.getAnalyses(full_objects=True)
    for analysis in ans:
        doActionFor(analysis, 'retract')
예제 #38
0
파일: events.py 프로젝트: xispa/bika.lims
def after_retract(obj):
    """Function triggered after a 'retract' transition for the analysis passed
    in is performed. Retracting an analysis cause its transition to 'retracted'
    state and the creation of a new copy of the same analysis as a retest.
    Note that retraction only affects to single Analysis and has no other
    effect in the status of the Worksheet to which the Analysis is assigned or
    to the Analysis Request to which belongs (transition is never proomoted)
    This function is called automatically by
    bika.lims.workflow.AfterTransitionEventHandler
    """
    # TODO Workflow Analysis - review this function
    # Rename the analysis to make way for it's successor.
    # Support multiple retractions by renaming to *-0, *-1, etc
    parent = obj.aq_parent
    kw = obj.getKeyword()
    analyses = [x for x in parent.objectValues("Analysis")
                if x.getId().startswith(obj.getId())]

    # LIMS-1290 - Analyst must be able to retract, which creates a new
    # Analysis.  So, _verifyObjectPaste permission check must be cancelled:
    parent._verifyObjectPaste = str
    # This is needed for tests:
    # https://docs.plone.org/develop/plone/content/rename.html
    # Testing warning: Rename mechanism relies of Persistent attribute
    # called _p_jar to be present on the content object. By default, this is
    # not the case on unit tests. You need to call transaction.savepoint() to
    # make _p_jar appear on persistent objects.
    # If you don't do this, you'll receive a "CopyError" when calling
    # manage_renameObjects that the operation is not supported.
    transaction.savepoint()
    parent.manage_renameObject(kw, "{0}-{1}".format(kw, len(analyses)))
    delattr(parent, '_verifyObjectPaste')

    # Create new analysis from the retracted obj
    analysis = create_analysis(parent, obj)
    changeWorkflowState(
        analysis, "bika_analysis_workflow", "sample_received")

    # Assign the new analysis to this same worksheet, if any.
    ws = obj.getWorksheet()
    if ws:
        ws.addAnalysis(analysis)
    analysis.reindexObject()

    # retract our dependencies
    dependencies = obj.getDependencies()
    for dependency in dependencies:
        doActionFor(dependency, 'retract')

    # Retract our dependents
    dependents = obj.getDependents()
    for dependent in dependents:
        doActionFor(dependent, 'retract')

    _reindex_request(obj)
예제 #39
0
    def test_default_stickers(self):
        """https://jira.bikalabs.com/browse/WINE-44: display SampleID or
        SamplePartition ID depending on bikasetup.ShowPartitions value
        """

        folder = self.portal.bika_setup.bika_analysisservices
        services = [_createObjectByType("AnalysisService", folder, tmpID()),
                    _createObjectByType("AnalysisService", folder, tmpID())]
        services[0].processForm()
        services[1].processForm()
        services[0].edit(title="Detect Dust")
        services[1].edit(title="Detect water")
        service_uids = [s.UID for s in services]
        folder = self.portal.clients
        client = _createObjectByType("Client", folder, tmpID())
        client.processForm()
        folder = self.portal.clients.objectValues("Client")[0]
        contact = _createObjectByType("Contact", folder, tmpID())
        contact.processForm()
        contact.edit(Firstname="Bob", Surname="Dobbs", email="*****@*****.**")
        folder = self.portal.bika_setup.bika_sampletypes
        sampletype = _createObjectByType("SampleType", folder, tmpID())
        sampletype.processForm()
        sampletype.edit(title="Air", Prefix="AIR")

        values = {'Client': client.UID(),
                  'Contact': contact.UID(),
                  'SamplingDate': '2015-01-01',
                  'SampleType': sampletype.UID()}

        for size in ["large", "small"]:

            # create and receive AR
            ar = create_analysisrequest(client, {}, values, service_uids)
            ar.bika_setup.setShowPartitions(False)
            doActionFor(ar, 'receive')
            self.assertEquals(ar.portal_workflow.getInfoFor(ar, 'review_state'), 'sample_received')
            # check sticker text
            ar.REQUEST['items'] = ar.getId()
            ar.REQUEST['template'] = "bika.lims:sticker_%s.pt"%size
            sticker = Sticker(ar, ar.REQUEST)()
            pid = ar.getSample().objectValues("SamplePartition")[0].getId()
            self.assertNotIn(pid, sticker, "Sticker must not contain partition ID %s"%pid)

            # create and receive AR
            ar = create_analysisrequest(client, {}, values, service_uids)
            ar.bika_setup.setShowPartitions(True)
            doActionFor(ar, 'receive')
            self.assertEquals(ar.portal_workflow.getInfoFor(ar, 'review_state'), 'sample_received')
            # check sticker text
            ar.REQUEST['items'] = ar.getId()
            ar.REQUEST['template'] = "bika.lims:sticker_%s.pt"%size
            sticker = Sticker(ar, ar.REQUEST)()
            pid = ar.getSample().objectValues("SamplePartition")[0].getId()
            self.assertIn(pid, sticker, "Sticker must contain partition ID %s"%pid)
예제 #40
0
파일: events.py 프로젝트: xispa/bika.lims
def after_cancel(obj):
    """Method triggered after a 'cancel' transition for the Analysis Request
    passed in is performed. Deactivates all analyses contained in the object.
    This function is called automatically by
    bika.lims.workflow.AfterTransitionEventHandler
    :param obj: Analysis Request affected by the transition
    :type obj: AnalysisRequest
    """
    ans = obj.getAnalyses(full_objects=True, cancellation_state='active')
    for analysis in ans:
        doActionFor(analysis, 'cancel')
예제 #41
0
    def workflow_script_submit(self):
        """
        Method triggered after a 'submit' transition for the current analysis
        is performed. Responsible of triggering cascade actions such as
        transitioning dependent analyses, transitioning worksheets, etc
        depending on the current analysis and other analyses that belong to the
        same Analysis Request or Worksheet.
        This function is called automatically by
        bika.lims.workfow.AfterTransitionEventHandler
        """
        # The analyses that depends on this analysis to calculate their results
        # must be transitioned too, otherwise the user will be forced to submit
        # them individually. Note that the automatic transition of dependents
        # must only take place if all their dependencies have been submitted
        # already.
        for dependent in self.getDependents():
            # If this submit transition has already been done for this
            # dependent analysis within the current request, continue.
            if skip(dependent, 'submit', peek=True):
                continue

            # TODO Workflow. All below and inside this loop should be moved to
            # a guard_submit_transition inside analysis

            # If this dependent has already been submitted, omit
            if dependent.getSubmittedBy():
                continue

            # The dependent cannot be transitioned if doesn't have result
            if not dependent.getResult():
                continue

            # If the calculation associated to the dependent analysis requires
            # the manual introduction of interim fields, do not transition the
            # dependent automatically, force the user to do it manually.
            calculation = dependent.getCalculation()
            if calculation and calculation.getInterimFields():
                continue

            # All dependencies from this dependent analysis are ok?
            deps = dependent.getDependencies()
            dsub = [
                dep for dep in deps if wasTransitionPerformed(dep, 'submit')
            ]
            if len(deps) == len(dsub):
                # The statuses of all dependencies of this dependent are ok
                # (at least, all of them have been submitted already)
                doActionFor(dependent, 'submit')

        # Do all the reflex rules process
        self._reflex_rule_process('submit')

        # Delegate the transition of Worksheet to base class AbstractAnalysis
        super(AbstractRoutineAnalysis, self).workflow_script_submit()
예제 #42
0
def after_cancel(obj):
    """Method triggered after a 'cancel' transition for the Analysis Request
    passed in is performed. Deactivates all analyses contained in the object.
    This function is called automatically by
    bika.lims.workflow.AfterTransitionEventHandler
    :param obj: Analysis Request affected by the transition
    :type obj: AnalysisRequest
    """
    ans = obj.getAnalyses(full_objects=True, cancellation_state='active')
    for analysis in ans:
        doActionFor(analysis, 'cancel')
예제 #43
0
 def workflow_script_verify(self):
     if skip(self, "verify"):
         return
     workflow = getToolByName(self, 'portal_workflow')
     self.reindexObject(idxs=["review_state", ])
     if not "verify all analyses" in self.REQUEST['workflow_skiplist']:
         # verify all analyses in this self.
         analyses = self.getAnalyses()
         for analysis in analyses:
             if workflow.getInfoFor(analysis, 'review_state', '') != 'to_be_verified':
                 continue
             doActionFor(analysis, "verify")
예제 #44
0
파일: events.py 프로젝트: xispa/bika.lims
def after_publish(obj):
    """Method triggered after an 'publish' transition for the Analysis Request
    passed in is performed. Performs the 'publish' transition to children.
    This function is called automatically by
    bika.lims.workflow.AfterTransitionEventHandler
    :param obj: Analysis Request affected by the transition
    :type obj: AnalysisRequest
    """
    # Transition the children
    ans = obj.getAnalyses(full_objects=True)
    for analysis in ans:
        doActionFor(analysis, 'publish')
예제 #45
0
파일: events.py 프로젝트: xispa/bika.lims
def after_submit(obj):
    """
    Method triggered after a 'submit' transition for the Worksheet passed in is
    performed.
    This function is called automatically by
    bika.lims.workfow.AfterTransitionEventHandler
    """
    # Submitting a Worksheet must never transition the analyses.
    # In fact, a worksheet can only be transitioned to "to_be_verified" if
    # all the analyses that contain have been submitted manually after
    # the results input
    doActionFor(obj, 'attach')
예제 #46
0
 def workflow_script_retract(self):
     if skip(self, "retract"):
         return
     workflow = getToolByName(self, 'portal_workflow')
     self.reindexObject(idxs=["review_state", ])
     if not "retract all analyses" in self.REQUEST['workflow_skiplist']:
         # retract all analyses in this self.
         # (NB: don't retract if it's verified)
         analyses = self.getAnalyses()
         for analysis in analyses:
             if workflow.getInfoFor(analysis, 'review_state', '') not in ('attachment_due', 'to_be_verified',):
                 continue
             doActionFor(analysis, 'retract')
예제 #47
0
 def workflow_script_verify(self):
     if skip(self, "verify"):
         return
     workflow = getToolByName(self, "portal_workflow")
     self.reindexObject(idxs=["review_state"])
     if not "verify all analyses" in self.REQUEST["workflow_skiplist"]:
         # verify all analyses in this self.
         analyses = self.getAnalyses()
         for analysis in analyses:
             state = workflow.getInfoFor(analysis, "review_state", "")
             if state != "to_be_verified":
                 continue
             doActionFor(analysis, "verify")
예제 #48
0
 def workflow_script_preserve(self):
     """This action can happen in the Sample UI, so we transition all
     self partitions that are still 'to_be_preserved'
     """
     workflow = getToolByName(self, "portal_workflow")
     parts = self.objectValues("SamplePartition")
     tbs = [sp for sp in parts if workflow.getInfoFor(sp, "review_state") == "to_be_preserved"]
     for sp in tbs:
         doActionFor(sp, "preserve")
     # All associated AnalysisRequests are also transitioned
     for ar in self.getAnalysisRequests():
         doActionFor(ar, "preserve")
         ar.reindexObject()
예제 #49
0
 def workflow_script_receive(self):
     workflow = getToolByName(self, "portal_workflow")
     self.setDateReceived(DateTime())
     self.reindexObject(idxs=["review_state", "getDateReceived"])
     # Receive all self partitions that are still 'sample_due'
     parts = self.objectValues("SamplePartition")
     sample_due = [sp for sp in parts if workflow.getInfoFor(sp, "review_state") == "sample_due"]
     for sp in sample_due:
         workflow.doActionFor(sp, "receive")
     # when a self is received, all associated
     # AnalysisRequests are also transitioned
     for ar in self.getAnalysisRequests():
         doActionFor(ar, "receive")
예제 #50
0
 def workflow_script_to_be_preserved(self):
     if skip(self, "to_be_preserved"):
         return
     workflow = getToolByName(self, "portal_workflow")
     parts = self.objectValues("SamplePartition")
     # Transition our children
     tbs = [sp for sp in parts if workflow.getInfoFor(sp, "review_state") == "to_be_preserved"]
     for sp in tbs:
         doActionFor(sp, "to_be_preserved")
     # All associated AnalysisRequests are also transitioned
     for ar in self.getAnalysisRequests():
         doActionFor(ar, "to_be_preserved")
         ar.reindexObject()
예제 #51
0
파일: events.py 프로젝트: xispa/bika.lims
def after_submit(obj):
    """Method triggered after a 'submit' transition for the analysis passed in
    is performed. Promotes the submit transition to the Worksheet to which the
    analysis belongs to. Note that for the worksheet there is already a guard
    that assures the transition to the worksheet will only be performed if all
    analyses within the worksheet have already been transitioned.
    This function is called automatically by
    bika.lims.workfow.AfterTransitionEventHandler
    """
    ws = obj.getWorksheet()
    if ws:
        doActionFor(ws, 'submit')
    _reindex_request(obj)
예제 #52
0
def ObjectModifiedEventHandler(instance, event):
    """update certain field values of the AR created
    """
    if isinstance(event, ContainerModifiedEvent):
        ar_analyses = instance.getAnalyses(full_objects=True)
        sample = instance.getSample()
        partition = sample.objectValues('SamplePartition')[0]
        workflow = getToolByName(sample, 'portal_workflow')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        for analysis in ar_analyses:
            doActionFor(analysis, sample_state)

        for analysis in ar_analyses:
            analysis.setSamplePartition(partition)
예제 #53
0
 def workflow_script_retract(self):
     if skip(self, "retract"):
         return
     workflow = getToolByName(self, "portal_workflow")
     self.reindexObject(idxs=["review_state"])
     if not "retract all analyses" in self.REQUEST["workflow_skiplist"]:
         # retract all analyses in this self.
         # (NB: don't retract if it's verified)
         analyses = self.getAnalyses()
         for analysis in analyses:
             state = workflow.getInfoFor(analysis, "review_state", "")
             if state not in ("attachment_due", "to_be_verified"):
                 continue
             doActionFor(analysis, "retract")
예제 #54
0
파일: events.py 프로젝트: xispa/bika.lims
def after_no_sampling_workflow(obj):
    """Method triggered after a 'no_sampling_workflow' transition for the
    Sample passed in is performed. Triggers the 'no_sampling_workflow'
    transition for dependent objects, such as Sample Partitions and
    Analysis Requests.
    This function is called automatically by
    bika.lims.workflow.AfterTransitionEventHandler
    :param obj: Sample affected by the transition
    :type obj: Sample
    """
    _cascade_transition(obj, 'no_sampling_workflow')

    if obj.getSamplingWorkflowEnabled():
        to_be_preserved = []
        sample_due = []
        lowest_state = 'sample_due'
        for p in obj.objectValues('SamplePartition'):
            if p.getPreservation():
                lowest_state = 'to_be_preserved'
                to_be_preserved.append(p)
            else:
                sample_due.append(p)
        for p in to_be_preserved:
            doActionFor(p, 'to_be_preserved')
        for p in sample_due:
            doActionFor(p, 'sample_due')
        doActionFor(obj, lowest_state)
    else:
        doActionFor(obj, 'sample_due')
예제 #55
0
def create_sample(context, request, values, j, x):
    """Create sample as biospecimen or aliquot
    """
    # Retrieve the required tools
    uc = getToolByName(context, 'uid_catalog')
    # Determine if the sampling workflow is enabled
    workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
    # Create sample or refer to existing for secondary analysis request
    sample = _createObjectByType('Sample', context, tmpID())
    # Update the created sample with indicated values
    sample.processForm(REQUEST=request, values=values)
    if 'datesampled' in values:
        sample.setDateSampled(values['datesampled'])
    else:
        sample.setDateSampled(DateTime())
    if 'datereceived' in values:
        sample.setDateReceived(values['datereceived'])
    else:
        sample.setDateReceived(DateTime())
    if 'datesampling' in values:
        sample.setSamplingDate(values['datesampling'])
    else:
        sample.setSamplingDate(DateTime())
    if 'datecreated' in values:
        field = sample.getField('DateCreated')
        field.set(sample, values['datecreated'])
    else:
        field = sample.getField('DateCreated')
        field.set(sample, DateTime())
    # Specifically set the storage location
    if 'StorageLocation' in values:
        sample.setStorageLocation(values['StorageLocation'])
    if 'kits' in values:
        field = sample.getField('Kit')
        field.set(sample, values['kits'][j].UID())
        alsoProvides(sample, IBiospecimen)
    if 'biospecimens' in values:
        field = sample.getField('LinkedSample')
        field.set(sample, values['biospecimens'][j].UID())
        # sample.setLinkedSample(values['biospecimens'][j].UID())
        alsoProvides(sample, IAliquot)
    context.manage_renameObject(sample.id, values['id_template'].format(id=x), )
    # Perform the appropriate workflow action
    workflow_action = 'sampling_workflow' if workflow_enabled \
        else 'no_sampling_workflow'
    doActionFor(sample, workflow_action)
    # Set the SampleID
    sample.edit(SampleID=sample.getId())
    # Return the newly created sample
    return sample
예제 #56
0
 def workflow_script_sample(self):
     if skip(self, "sample"):
         return
     workflow = getToolByName(self, 'portal_workflow')
     parts = self.objectValues('SamplePartition')
     # This action can happen in the Sample UI.  So we transition all
     # partitions that are still 'to_be_sampled'
     tbs = [sp for sp in parts
            if workflow.getInfoFor(sp, 'review_state') == 'to_be_sampled']
     for sp in tbs:
         doActionFor(sp, "sample")
     # All associated AnalysisRequests are also transitioned
     for ar in self.getAnalysisRequests():
         doActionFor(ar, "sample")
         ar.reindexObject()
예제 #57
0
 def workflow_script_cancel(self):
     """
     When the round is cancelled, all its associated Samples and ARs are cancelled by the system.
     """
     if skip(self, "cancel"):
         return
     self.reindexObject(idxs=["cancellation_state", ])
     # deactivate all analysis requests in this sampling round.
     analysis_requests = self.getAnalysisRequests()
     for ar in analysis_requests:
         ar_obj = ar.getObject()
         workflow = getToolByName(self, 'portal_workflow')
         if workflow.getInfoFor(ar_obj, 'cancellation_state') != 'cancelled':
             doActionFor(ar.getObject(), 'cancel')
             doActionFor(ar.getObject().getSample(), 'cancel')
예제 #58
0
    def test_LIMS_2080_correctly_interpret_false_and_blank_values(self):
        client = self.portal.clients.objectValues()[0]
        arimport = self.addthing(client, 'ARImport')
        arimport.unmarkCreationFlag()
        arimport.setFilename("test1.csv")
        arimport.setOriginalFile("""
Header,      File name,  Client name,  Client ID, Contact,     CC Names - Report, CC Emails - Report, CC Names - Invoice, CC Emails - Invoice, No of Samples, Client Order Number, Client Reference,,
Header Data, test1.csv,  Happy Hills,  HH,        Rita Mohale,                  ,                   ,                    ,                    , 10,            HHPO-001,                            ,,
Samples,    ClientSampleID,    SamplingDate,DateSampled,SamplePoint,SampleMatrix,SampleType,ContainerType,ReportDryMatter,Priority,Total number of Analyses or Profiles,Price excl Tax,ECO,SAL,COL,TAS,MicroBio,Properties
Analysis price,,,,,,,,,,,,,,
"Total Analyses or Profiles",,,,,,,,,,,,,9,,,
Total price excl Tax,,,,,,,,,,,,,,
"Sample 1", HHS14001,          3/9/2014,    3/9/2014,   ,     ,     Water,     Cup,          0,              Normal,  1,                                   0,             0,0,0,0,0,1
"Sample 2", HHS14002,          3/9/2014,    3/9/2014,   ,     ,     Water,     Cup,          0,              Normal,  2,                                   0,             0,0,0,0,1,1
"Sample 3", HHS14002,          3/9/2014,    3/9/2014,   Toilet,     Liquids,     Water,     Cup,          1,              Normal,  4,                                   0,             1,1,1,1,0,0
"Sample 4", HHS14002,          3/9/2014,    3/9/2014,   Toilet,     Liquids,     Water,     Cup,          1,              Normal,  2,                                   0,             1,0,0,0,1,0
        """)

        # check that values are saved without errors
        arimport.setErrors([])
        arimport.save_header_data()
        arimport.save_sample_data()
        errors = arimport.getErrors()
        if errors:
            self.fail("Unexpected errors while saving data: " + str(errors))
        transaction.commit()
        browser = self.getBrowser(
            username=TEST_USER_NAME,
            password=TEST_USER_PASSWORD,
            loggedIn=True)

        doActionFor(arimport, 'validate')
        c_state = getCurrentState(arimport)
        self.assertTrue(
            c_state == 'valid',
            "ARrimport in 'invalid' state after it has been transitioned to "
            "'valid'.")
        browser.open(arimport.absolute_url() + "/edit")
        content = browser.contents
        re.match(
            '<option selected=\"selected\" value=\"\d+\">Toilet</option>',
            content)
        if len(re.findall('<.*selected.*Toilet', content)) != 2:
            self.fail("Should be two empty SamplePoints, and two with values")
        if len(re.findall('<.*selected.*Liquids', content)) != 2:
            self.fail("Should be two empty Matrix fields, and two with values")
        if len(re.findall('<.*checked.*ReportDry', content)) != 2:
            self.fail("Should be two False DryMatters, and two True")
예제 #59
0
파일: events.py 프로젝트: xispa/bika.lims
def _cascade_transition(obj, transition_id):
    """ Performs the transition for the transition_id passed in to children
    :param obj: Sample for which the transition has to be cascaded
    :param transition_id: Unique id of the transition
    """
    # Sample all obj partitions
    # Note the transition for SamplePartition already transitions all the
    # analyses associated to that Sample partition, so there is no need to
    # transition all the analyses from Sample here.
    for part in obj.objectValues('SamplePartition'):
        doActionFor(part, transition_id)

    # when a obj is sampled, all associated
    # AnalysisRequests are also transitioned
    for ar in obj.getAnalysisRequests():
        doActionFor(ar, transition_id)