Ejemplo n.º 1
0
def create_requests_from_partitions(analysis_request):
    """If more than one SamplePartition is set for the given AnalysisRequest,
    creates a new internal AR for every single SamplePartition, assign the
    primary sample to children and removes the analyses from the primary AR.
    """
    logger.info("*** Creating new requests from partitions ***")
    partitions = analysis_request.getPartitions()
    if len(partitions) < 2:
        # Only one partition, do not create new requests
        return list()

    created = list()
    client = analysis_request.getClient()
    primary_sample = analysis_request.getSample()
    primary_sample_uid = api.get_uid(primary_sample)

    ar_proxies = analysis_request.Schema().fields()
    ar_proxies = filter(lambda field: IProxyField.providedBy(field),
                        ar_proxies)
    ar_proxies = map(lambda field: field.getName(), ar_proxies)
    skip_fields = [
        "Client", "Sample", "PrimarySample", "Template", "Profile", "Profiles",
        "Analyses", "ParentAnalysisRequest", "PrimaryAnalysisRequest",
        "RejectionReasons", "Remarks"
    ]
    skip_fields.extend(ar_proxies)
    for part in partitions:
        analyses = part.getAnalyses()
        analyses = map(lambda an: api.get_object(an), analyses)

        # Create the new derivative sample (~partition)
        field_values = dict(PrimarySample=primary_sample_uid, InternalUse=True)
        sample_copy = copy(primary_sample,
                           container=client,
                           new_field_values=field_values)
        #sample_copy.id = part.id
        sample_uid = api.get_uid(sample_copy)

        # Create a new Analysis Request for this Sample and analyses
        field_values = dict(Sample=sample_uid,
                            Analyses=analyses,
                            PrimaryAnalysisRequest=analysis_request)
        ar_copy = copy(analysis_request,
                       container=client,
                       skip_fields=skip_fields,
                       new_field_values=field_values)

        # Create sample partition
        services = map(lambda an: an.getAnalysisService(), analyses)
        partition = dict(services=services,
                         part_id="{}-P1".format(sample_copy.getId()))
        create_samplepartition(sample_copy, partition, analyses)

        # Force all items to be in received state
        force_receive(ar_copy)

        created.append(ar_copy)
    return created
Ejemplo n.º 2
0
def create_analysisrequest(
        context,
        request,
        values,  # {field: value, ...}
        analyses=[],
        # uid, service or analysis; list of uids, services or analyses
        partitions=None,
        # list of dictionaries with container, preservation etc)
        specifications=None,
        prices=None):
    """This is meant for general use and should do everything necessary to
    create and initialise the AR and it's requirements.
    XXX The ar-add form's ajaxAnalysisRequestSubmit should be calling this.
    """
    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')

    # It's necessary to modify these and we don't want to pollute the
    # parent's data
    values = values.copy()

    # Create new sample or locate the existing for secondary AR
    if not values.get('Sample', False):
        secondary = False
        workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)
    else:
        secondary = True
        if ISample.providedBy(values['Sample']):
            sample = values['Sample']
        else:
            brains = bc(UID=values['Sample'])
            if brains:
                sample = brains[0].getObject()
        if not sample:
            raise RuntimeError("create_analysisrequest No sample. values=%s" %
                               values)
        workflow_enabled = sample.getSamplingWorkflowEnabled()

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())
    # Set some required fields manually before processForm is called
    ar.setSample(sample)
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)
    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    action = '{0}sampling_workflow'.format('' if workflow_enabled else 'no_')
    workflow.doActionFor(ar, action)

    # Set analysis request analyses
    service_uids = _resolve_items_to_service_uids(analyses)
    analyses = ar.setAnalyses(service_uids,
                              prices=prices,
                              specs=specifications)

    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        api.content.transition(obj=ar, to_state='sampled')
        api.content.transition(obj=ar, to_state='sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state == 'sample_received':
            doActionFor(ar, 'receive')

        for analysis in ar.getAnalyses(full_objects=1):
            doActionFor(analysis, 'sample')
            doActionFor(analysis, 'sample_due')
            analysis_transition_ids = [
                t['id'] for t in workflow.getTransitionsFor(analysis)
            ]
            if 'receive' in analysis_transition_ids and sample_state == 'sample_received':
                doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        if not partitions:
            partitions = [{'services': analyses}]
        for n, partition in enumerate(partitions):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample, partition, analyses)
        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not workflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 3
0
def create_analysisrequest(context, request, values, analyses=None,
                           partitions=None, specifications=None, prices=None):
    """This is meant for general use and should do everything necessary to
    create and initialise an AR and any other required auxilliary objects
    (Sample, SamplePartition, Analysis...)

    :param context:
        The container in which the ARs will be created.
    :param request:
        The current Request object.
    :param values:
        a dict, where keys are AR|Sample schema field names.
    :param analyses:
        Analysis services list.  If specified, augments the values in
        values['Analyses']. May consist of service objects, UIDs, or Keywords.
    :param partitions:
        A list of dictionaries, if specific partitions are required.  If not
        specified, AR's sample is created with a single partition.
    :param specifications:
        These values augment those found in values['Specifications']
    :param prices:
        Allow different prices to be set for analyses.  If not set, prices
        are read from the associated analysis service.
    """

    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')

    # It's necessary to modify these and we don't want to pollute the
    # parent's data
    values = values.copy()
    analyses = analyses if analyses else values.get('Analyses', [])
    if not analyses:
        raise RuntimeError(
                "create_analysisrequest: no analyses provided")

    # Create new sample or locate the existing for secondary AR
    if not values.get('Sample', False):
        secondary = False
        workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)
    else:
        secondary = True
        sample = get_sample_from_values(context, values)
        workflow_enabled = sample.getSamplingWorkflowEnabled()

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())

    # Set some required fields manually before processForm is called
    ar.setSample(sample)
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)
    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    action = '{0}sampling_workflow'.format('' if workflow_enabled else 'no_')
    workflow.doActionFor(ar, action)

    # Set analysis request analyses
    service_uids = _resolve_items_to_service_uids(analyses)
    analyses = ar.setAnalyses(service_uids, prices=prices, specs=specifications)

    # Continue to set the state of the AR
    skip_receive = ['to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved']
    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        doActionFor(ar, 'sampled')
        doActionFor(ar, 'sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state not in skip_receive:
            doActionFor(ar, 'receive')

    # Set the state of analyses we created.
    for analysis in analyses:
        doActionFor(analysis, 'sample_due')
        analysis_state = workflow.getInfoFor(analysis, 'review_state')
        if analysis_state not in skip_receive:
            doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        if not partitions:
            partitions = [{'services': analyses}]
        for n, partition in enumerate(partitions):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample,
                    partition,
                    analyses
                )
        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not workflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 4
0
def create_analysisrequest(context, request, values):
    """Create an AR.

    :param context the container in which the AR will be created (Client)
    :param request the request object
    :param values a dictionary containing fieldname/value pairs, which
           will be applied.  Some fields will have specific code to handle them,
           and others will be directly written to the schema.
    :return the new AR instance

    Special keys present (or required) in the values dict, which are not present
    in the schema:

        - Partitions: data about partitions to be created, and the
                      analyses that are to be assigned to each.

        - Prices: custom prices set in the HTML form.

        - ResultsRange: Specification values entered in the HTML form.

    """
    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')

    # Create new sample or locate the existing for secondary AR
    if values['Sample']:
        secondary = True
        if ISample.providedBy(values['Sample']):
            sample = values['Sample']
        else:
            sample = bc(UID=values['Sample'])[0].getObject()
        samplingworkflow_enabled = sample.getSamplingWorkflowEnabled()
    else:
        secondary = False
        samplingworkflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())
    ar.setSample(sample)

    # processform renames the sample, this requires values to contain the Sample.
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)

    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    workflow_action = 'sampling_workflow' if samplingworkflow_enabled \
        else 'no_sampling_workflow'
    workflow.doActionFor(ar, workflow_action)


    # We need to send a list of service UIDS to setAnalyses function.
    # But we may have received a list of titles, list of UIDS,
    # list of keywords or list of service objects!
    service_uids = []
    for obj in values['Analyses']:
        uid = False
        # service objects
        if hasattr(obj, 'portal_type') and obj.portal_type == 'AnalysisService':
            uid = obj.UID()
        # Analysis objects (shortcut for eg copying analyses from other AR)
        elif hasattr(obj, 'portal_type') and obj.portal_type == 'Analysis':
            uid = obj.getService()
        # Maybe already UIDs.
        if not uid:
            bsc = getToolByName(context, 'bika_setup_catalog')
            brains = bsc(portal_type='AnalysisService', UID=obj)
            if brains:
                uid = brains[0].UID
        # Maybe already UIDs.
        if not uid:
            bsc = getToolByName(context, 'bika_setup_catalog')
            brains = bsc(portal_type='AnalysisService', title=obj)
            if brains:
                uid = brains[0].UID
        if uid:
            service_uids.append(uid)
        else:
            logger.info("In analysisrequest.add.create_analysisrequest: cannot "
                        "find uid of this service: %s" % obj)

    # Set analysis request analyses
    ar.setAnalyses(service_uids,
                   prices=values.get("Prices", []),
                   specs=values.get('ResultsRange', []))
    analyses = ar.getAnalyses(full_objects=True)

    skip_receive = ['to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved']
    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        doActionFor(ar, 'sampled')
        doActionFor(ar, 'sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state not in skip_receive:
            doActionFor(ar, 'receive')

    for analysis in analyses:
        doActionFor(analysis, 'sample_due')
        analysis_state = workflow.getInfoFor(analysis, 'review_state')
        if analysis_state not in skip_receive:
            doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        partitions = []
        for n, partition in enumerate(values['Partitions']):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample,
                    partition
                )
            # now assign analyses to this partition.
            obj = partition['object']
            for analysis in analyses:
                if analysis.getService().UID() in partition['services']:
                    analysis.setSamplePartition(obj)

            partitions.append(partition)

        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not samplingworkflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 5
0
def create_analysisrequest(context,
                           request,
                           values,
                           analyses=None,
                           partitions=None,
                           specifications=None,
                           prices=None):
    """This is meant for general use and should do everything necessary to
    create and initialise an AR and any other required auxilliary objects
    (Sample, SamplePartition, Analysis...)

    :param context:
        The container in which the ARs will be created.
    :param request:
        The current Request object.
    :param values:
        a dict, where keys are AR|Sample schema field names.
    :param analyses:
        Analysis services list.  If specified, augments the values in
        values['Analyses']. May consist of service objects, UIDs, or Keywords.
    :param partitions:
        A list of dictionaries, if specific partitions are required.  If not
        specified, AR's sample is created with a single partition.
    :param specifications:
        These values augment those found in values['Specifications']
    :param prices:
        Allow different prices to be set for analyses.  If not set, prices
        are read from the associated analysis service.
    """

    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')
    # Analyses are analyses services
    analyses_services = analyses
    analyses = []
    # It's necessary to modify these and we don't want to pollute the
    # parent's data
    values = values.copy()
    analyses_services = analyses_services if analyses_services else []
    anv = values['Analyses'] if values.get('Analyses', None) else []
    analyses_services = anv + analyses_services

    if not analyses_services:
        raise RuntimeError(
            "create_analysisrequest: no analyses services provided")

    # Create new sample or locate the existing for secondary AR
    if not values.get('Sample', False):
        secondary = False
        workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)
    else:
        secondary = True
        sample = get_sample_from_values(context, values)
        workflow_enabled = sample.getSamplingWorkflowEnabled()

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())

    # Set some required fields manually before processForm is called
    ar.setSample(sample)
    values['Sample'] = sample

    if values.get('DateSampled', False):
        #Inject the timezone into a selection by
        #datewidget which is timezone naive
        #ie. DateSampled is '2017-05-15 01:05'
        #but should be      '2017/05/15 01:05:00 GMT+2'
        #else processForm => reindexObject() sets it to GMT+0 which results in
        #an incorrect date record.

        tz = DateTime().timezone()
        datesampled = DateTime(values['DateSampled'] + ' ' + tz)
        values['DateSampled'] = datesampled

    ar.processForm(REQUEST=request, values=values)
    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    action = '{0}sampling_workflow'.format('' if workflow_enabled else 'no_')
    workflow.doActionFor(ar, action)

    # Set analysis request analyses
    service_uids = _resolve_items_to_service_uids(analyses_services)
    # processForm already has created the analyses, but here we create the
    # analyses with specs and prices. This function, even it is called 'set',
    # deletes the old analyses, so eventually we obtain the desired analyses.
    ar.setAnalyses(service_uids, prices=prices, specs=specifications)
    # Gettin the ar objects
    analyses = ar.getAnalyses(full_objects=True)
    # Continue to set the state of the AR
    skip_receive = [
        'to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved'
    ]
    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        doActionFor(ar, 'sampled')
        doActionFor(ar, 'sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state not in skip_receive:
            doActionFor(ar, 'receive')

    # Set the state of analyses we created.
    for analysis in analyses:
        revers = analysis.getService().getNumberOfRequiredVerifications()
        analysis.setNumberOfRequiredVerifications(revers)
        doActionFor(analysis, 'sample_due')
        analysis_state = workflow.getInfoFor(analysis, 'review_state')
        if analysis_state not in skip_receive:
            doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        if not partitions:
            partitions = values.get('Partitions', [{'services': service_uids}])
        for n, partition in enumerate(partitions):
            # Calculate partition id
            partition['object'] = create_samplepartition(
                sample, partition, analyses)
        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not workflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Once the ar is fully created, check if there are rejection reasons
    reject_field = values.get('RejectionReasons', '')
    if reject_field and reject_field.get('checkbox', False):
        doActionFor(ar, 'reject')

    # If the Sampling Workflow field values are valid,
    # and the SamplingWorkflow is enabled, we will
    # automatically kick off the "sample" transition now
    tids = [t['id'] for t in get_transitions_for(ar)]
    if 'sample' in tids and ar.getSampler() and ar.getDateSampled():
        do_transition_for(ar, 'sample')

    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 6
0
def create_analysisrequest(context,
                           request,
                           values,
                           analyses=[],
                           partitions=None,
                           specifications=None,
                           prices=None):
    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')

    # Create new sample or locate the existing for secondary AR
    if values.get('Sample'):
        secondary = True
        if ISample.providedBy(values['Sample']):
            sample = values['Sample']
        else:
            sample = bc(UID=values['Sample'])[0].getObject()
        workflow_enabled = sample.getSamplingWorkflowEnabled()
    else:
        secondary = False
        workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())
    ar.setSample(sample)

    # processform renames the sample, this requires values to contain the Sample.
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)

    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    workflow_action = 'sampling_workflow' if workflow_enabled \
        else 'no_sampling_workflow'
    workflow.doActionFor(ar, workflow_action)

    # Set analysis request analyses
    analyses = ar.setAnalyses(analyses, prices=prices, specs=specifications)

    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        doActionFor(ar, 'sample')
        doActionFor(ar, 'sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state == 'sample_received':
            doActionFor(ar, 'receive')

        for analysis in ar.getAnalyses(full_objects=1):
            doActionFor(analysis, 'sample')
            doActionFor(analysis, 'sample_due')
            analysis_transition_ids = [
                t['id'] for t in workflow.getTransitionsFor(analysis)
            ]
            if 'receive' in analysis_transition_ids and sample_state == 'sample_received':
                doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        if not partitions:
            partitions = [{'services': analyses}]
        for n, partition in enumerate(partitions):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample, partition, analyses)
        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not workflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 7
0
def create_analysisrequest(
    context,
    request,
    values,
    analyses=[],
    partitions=None,
    specifications=None,
    prices=None
):
    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')

    # Create new sample or locate the existing for secondary AR
    if values['Sample']:
        secondary = True
        if ISample.providedBy(values['Sample']):
            sample = values['Sample']
        else:
            sample = bc(UID=values['Sample'])[0].getObject()
        workflow_enabled = sample.getSamplingWorkflowEnabled()
    else:
        secondary = False
        workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())
    ar.setSample(sample)

    # processform renames the sample, this requires values to contain the Sample.
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)

    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    workflow_action = 'sampling_workflow' if workflow_enabled \
        else 'no_sampling_workflow'
    workflow.doActionFor(ar, workflow_action)

    # Set analysis request analyses
    analyses = ar.setAnalyses(analyses, prices=prices, specs=specifications)

    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        doActionFor(ar, 'sample')
        doActionFor(ar, 'sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state == 'sample_received':
            doActionFor(ar, 'receive')

        for analysis in ar.getAnalyses(full_objects=1):
            doActionFor(analysis, 'sample')
            doActionFor(analysis, 'sample_due')
            analysis_transition_ids = [t['id'] for t in workflow.getTransitionsFor(analysis)]
            if 'receive' in analysis_transition_ids and sample_state == 'sample_received':
                doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        for n, partition in enumerate(partitions):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample,
                    partition,
                    analyses
                )
        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not workflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 8
0
def create_analysisrequest(client, request, values, analyses=None,
                           partitions=None, specifications=None, prices=None):
    """This is meant for general use and should do everything necessary to
    create and initialise an AR and any other required auxilliary objects
    (Sample, SamplePartition, Analysis...)

    :param client:
        The container (Client) in which the ARs will be created.
    :param request:
        The current Request object.
    :param values:
        a dict, where keys are AR|Sample schema field names.
    :param analyses:
        Analysis services list.  If specified, augments the values in
        values['Analyses']. May consist of service objects, UIDs, or Keywords.
    :param partitions:
        A list of dictionaries, if specific partitions are required.  If not
        specified, AR's sample is created with a single partition.
    :param specifications:
        These values augment those found in values['Specifications']
    :param prices:
        Allow different prices to be set for analyses.  If not set, prices
        are read from the associated analysis service.
    """
    # Don't pollute the dict param passed in
    values = deepcopy(values)

    # Create new sample or locate the existing for secondary AR
    secondary = False
    sample = None
    if not values.get('Sample', False):
        sample = create_sample(client, request, values)
    else:
        sample = get_sample_from_values(client, values)
        secondary = True

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', client, tmpID())

    # Set some required fields manually before processForm is called
    ar.setSample(sample)
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)
    ar.edit(RequestID=ar.getId())

    # Set analysis request analyses. 'Analyses' param are analyses services
    analyses = analyses if analyses else []
    service_uids = get_services_uids(
        context=client, analyses_serv=analyses, values=values)
    # processForm already has created the analyses, but here we create the
    # analyses with specs and prices. This function, even it is called 'set',
    # deletes the old analyses, so eventually we obtain the desired analyses.
    ar.setAnalyses(service_uids, prices=prices, specs=specifications)
    analyses = ar.getAnalyses(full_objects=True)

    # Create sample partitions
    if not partitions:
        partitions = values.get('Partitions',
                                [{'services': service_uids}])

    part_num = 0
    prefix = sample.getId() + "-P"
    if secondary:
        # Always create new partitions if is a Secondary AR, cause it does
        # not make sense to reuse the partitions used in a previous AR!
        sparts = sample.getSamplePartitions()
        for spart in sparts:
            spartnum = int(spart.getId().split(prefix)[1])
            if spartnum > part_num:
                part_num = spartnum

    for n, partition in enumerate(partitions):
        # Calculate partition id
        partition_id = '%s%s' % (prefix, part_num + 1)
        partition['part_id'] = partition_id
        # Point to or create sample partition
        if partition_id in sample.objectIds():
            partition['object'] = sample[partition_id]
        else:
            partition['object'] = create_samplepartition(
                sample,
                partition,
                analyses
            )
        part_num += 1

    # At this point, we have a fully created AR, with a Sample, Partitions and
    # Analyses, but the state of all them is the initial ("sample_registered").
    # We can now transition the whole thing (instead of doing it manually for
    # each object we created). After and Before transitions will take care of
    # cascading and promoting the transitions in all the objects "associated"
    # to this Analysis Request.
    sampling_workflow_enabled = sample.getSamplingWorkflowEnabled()
    action = 'no_sampling_workflow'
    if sampling_workflow_enabled:
        action = 'sampling_workflow'
    # Transition the Analysis Request and related objects to "sampled" (if
    # sampling workflow not enabled) or to "to_be_sampled" statuses.
    doActionFor(ar, action)

    if secondary:
        # If secondary AR, then we need to manually transition the AR (and its
        # children) to fit with the Sample Partition's current state
        sampleactions = getReviewHistoryActionsList(sample)
        doActionsFor(ar, sampleactions)
        # We need a workaround here in order to transition partitions.
        # auto_no_preservation_required and auto_preservation_required are
        # auto transitions applied to analysis requests, but partitions don't
        # have them, so we need to replace them by the sample_workflow
        # equivalent.
        if 'auto_no_preservation_required' in sampleactions:
            index = sampleactions.index('auto_no_preservation_required')
            sampleactions[index] = 'sample_due'
        elif 'auto_preservation_required' in sampleactions:
            index = sampleactions.index('auto_preservation_required')
            sampleactions[index] = 'to_be_preserved'
        # We need to transition the partition manually
        # Transition pre-preserved partitions
        for partition in partitions:
            part = partition['object']
            doActionsFor(part, sampleactions)

    # Transition pre-preserved partitions
    for p in partitions:
        if 'prepreserved' in p and p['prepreserved']:
            part = p['object']
            doActionFor(part, 'preserve')

    # Once the ar is fully created, check if there are rejection reasons
    reject_field = values.get('RejectionReasons', '')
    if reject_field and reject_field.get('checkbox', False):
        doActionFor(ar, 'reject')

    return ar
Ejemplo n.º 9
0
def create_analysisrequest(
    context,
    request,
    values,
    analyses=[],
    partitions=None,
    specifications=None,
    prices=None
):
    # Gather neccesary tools
    portal_workflow = context.portal_workflow
    # Determine if the sampling workflow is enabled
    workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
    # Create the sample
    sample = create_sample(context, request, values)
    values['Sample'] = sample
    values['Sample_uid'] = sample.UID()
    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())
    ar.setSample(sample)
    ar.processForm(REQUEST=request, values=values)
    # Object has been renamed
    ar.edit(RequestID=ar.getId())
    # Set analysis request analyses
    analyses = ar.setAnalyses(analyses, prices=prices, specs=specifications)
    # Create sample partitions
    for n, partition in enumerate(partitions):
        # Calculate partition id
        partition_prefix = sample.getId() + "-P"
        partition_id = '%s%s' % (partition_prefix, n + 1)
        partition['part_id'] = partition_id
        # Point to or create sample partition
        if partition_id in sample.objectIds():
            partition['object'] = sample[partition_id]
        else:
            partition['object'] = create_samplepartition(
                sample,
                partition,
                analyses
            )
    # Perform the appropriate workflow action
    workflow_action =  'sampling_workflow' if workflow_enabled \
        else 'no_sampling_workflow'
    portal_workflow.doActionFor(ar, workflow_action)
    # If Preservation is required for some partitions,
    # and the SamplingWorkflow is disabled, we need
    # to transition to to_be_preserved manually.
    if not workflow_enabled:
        to_be_preserved = []
        sample_due = []
        lowest_state = 'sample_due'
        for p in sample.objectValues('SamplePartition'):
            if p.getPreservation():
                lowest_state = 'to_be_preserved'
                to_be_preserved.append(p)
            else:
                sample_due.append(p)
        for p in to_be_preserved:
            doActionFor(p, 'to_be_preserved')
        for p in sample_due:
            doActionFor(p, 'sample_due')
        doActionFor(sample, lowest_state)
        doActionFor(ar, lowest_state)
    # Receive secondary AR
    if values.get('Sample_uid', ''):
        doActionFor(ar, 'sampled')
        doActionFor(ar, 'sample_due')
        not_receive = [
            'to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved'
        ]
        sample_state = portal_workflow.getInfoFor(sample, 'review_state')
        if sample_state not in not_receive:
            doActionFor(ar, 'receive')
        for analysis in ar.getAnalyses(full_objects=1):
            doActionFor(analysis, 'sampled')
            doActionFor(analysis, 'sample_due')
            if sample_state not in not_receive:
                doActionFor(analysis, 'receive')
    # Transition pre-preserved partitions
    for p in partitions:
        if 'prepreserved' in p and p['prepreserved']:
            part = p['object']
            state = portal_workflow.getInfoFor(part, 'review_state')
            if state == 'to_be_preserved':
                portal_workflow.doActionFor(part, 'preserve')
    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 10
0
def create_analysisrequest(context, request, values, analyses=None,
                           partitions=None, specifications=None, prices=None):
    """Override the one in bika.lims
    """

    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')
    # Analyses are analyses services
    analyses_services = analyses
    analyses = []
    # It's necessary to modify these and we don't want to pollute the
    # parent's data
    values = values.copy()
    analyses_services = analyses_services if analyses_services else []
    anv = values['Analyses'] if values.get('Analyses', None) else []
    analyses_services = anv + analyses_services

    if not analyses_services:
        raise RuntimeError(
                "create_analysisrequest: no analyses services provided")

    # Create new sample or locate the existing for secondary AR
    if not values.get('Sample', False):
        secondary = False
        workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)
    else:
        secondary = True
        sample = get_sample_from_values(context, values)
        workflow_enabled = sample.getSamplingWorkflowEnabled()

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())

    # Set some required fields manually before processForm is called
    ar.setSample(sample)
    values['Sample'] = sample
    values['SampleType'] = sample.getSampleType()
    ar.processForm(REQUEST=request, values=values)
    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    action = '{0}sampling_workflow'.format('' if workflow_enabled else 'no_')
    workflow.doActionFor(ar, action)

    # Set analysis request analyses
    service_uids = _resolve_items_to_service_uids(analyses_services)
    # processForm already has created the analyses, but here we create the
    # analyses with specs and prices. This function, even it is called 'set',
    # deletes the old analyses, so eventually we obtain the desired analyses.
    ar.setAnalyses(service_uids, prices=prices, specs=specifications)
    # Gettin the ar objects
    analyses = ar.getAnalyses(full_objects=True)
    # Continue to set the state of the AR
    skip_receive = ['to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved']
    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        doActionFor(ar, 'sampled')
        doActionFor(ar, 'sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state not in skip_receive:
            doActionFor(ar, 'receive')

    # Set the state of analyses we created.
    for analysis in analyses:
        revers = analysis.getService().getNumberOfRequiredVerifications()
        analysis.setNumberOfRequiredVerifications(revers)
        doActionFor(analysis, 'sample_due')
        analysis_state = workflow.getInfoFor(analysis, 'review_state')
        if analysis_state not in skip_receive:
            doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        if not partitions:
            partitions = [{'services': service_uids}]
        for n, partition in enumerate(partitions):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample,
                    partition,
                    analyses
                )
        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not workflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')
    # Once the ar is fully created, check if there are rejection reasons
    reject_field = values.get('RejectionReasons', '')
    if reject_field and reject_field.get('checkbox', False):
        doActionFor(ar, 'reject')
    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 11
0
def create_analysisrequest(
        context,
        request,
        values,  # {field: value, ...}
        analyses=[],
        # uid, service or analysis; list of uids, services or analyses
        partitions=None,
        # list of dictionaries with container, preservation etc)
        specifications=None,
        prices=None):
    """This is meant for general use and should do everything necessary to
    create and initialise the AR and it's requirements.
    XXX The ar-add form's ajaxAnalysisRequestSubmit should be calling this.
    """
    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')

    # It's necessary to modify these and we don't want to pollute the
    # parent's data
    values = values.copy()

    # Create new sample or locate the existing for secondary AR
    if not values.get('Sample', False):
        secondary = False
        workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
        sample = create_sample(context, request, values)
    else:
        secondary = True
        if ISample.providedBy(values['Sample']):
            sample = values['Sample']
        else:
            brains = bc(UID=values['Sample'])
            if brains:
                sample = brains[0].getObject()
        if not sample:
            raise RuntimeError(
                "create_analysisrequest No sample. values=%s" % values)
        workflow_enabled = sample.getSamplingWorkflowEnabled()

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())
    # Set some required fields manually before processForm is called
    ar.setSample(sample)
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)
    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    action = '{0}sampling_workflow'.format('' if workflow_enabled else 'no_')
    workflow.doActionFor(ar, action)

    # Set analysis request analyses
    service_uids = _resolve_items_to_service_uids(analyses)
    analyses = ar.setAnalyses(service_uids, prices=prices, specs=specifications)

    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        api.content.transition(obj=ar, to_state='sampled')
        api.content.transition(obj=ar, to_state='sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state == 'sample_received':
            doActionFor(ar, 'receive')

        for analysis in ar.getAnalyses(full_objects=1):
            doActionFor(analysis, 'sample')
            doActionFor(analysis, 'sample_due')
            analysis_transition_ids = [t['id'] for t in
                                       workflow.getTransitionsFor(analysis)]
            if 'receive' in analysis_transition_ids and sample_state == 'sample_received':
                doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        if not partitions:
            partitions = [{'services': analyses}]
        for n, partition in enumerate(partitions):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample,
                    partition,
                    analyses
                )
        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not workflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Return the newly created Analysis Request
    return ar
Ejemplo n.º 12
0
def create_analysisrequest(context, request, values):
    """Create an AR.

    :param context the container in which the AR will be created (Client)
    :param request the request object
    :param values a dictionary containing fieldname/value pairs, which
           will be applied.  Some fields will have specific code to handle them,
           and others will be directly written to the schema.
    :return the new AR instance

    Special keys present (or required) in the values dict, which are not present
    in the schema:

        - Partitions: data about partitions to be created, and the
                      analyses that are to be assigned to each.

        - Prices: custom prices set in the HTML form.

        - ResultsRange: Specification values entered in the HTML form.

    """
    # Gather neccesary tools
    workflow = getToolByName(context, 'portal_workflow')
    bc = getToolByName(context, 'bika_catalog')

    # Create new sample or locate the existing for secondary AR
    sample = False
    if values['Sample']:
        if ISample.providedBy(values['Sample']):
            secondary = True
            sample = values['Sample']
            samplingworkflow_enabled = sample.getSamplingWorkflowEnabled()
        else:
            brains = bc(UID=values['Sample'])
            if brains:
                secondary = True
                sample = brains[0].getObject()
                samplingworkflow_enabled = sample.getSamplingWorkflowEnabled()
    if not sample:
        secondary = False
        sample = create_sample(context, request, values)
        samplingworkflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()

    # Create the Analysis Request
    ar = _createObjectByType('AnalysisRequest', context, tmpID())
    ar.setSample(sample)

    # processform renames the sample, this requires values to contain the Sample.
    values['Sample'] = sample
    ar.processForm(REQUEST=request, values=values)

    # Object has been renamed
    ar.edit(RequestID=ar.getId())

    # Set initial AR state
    workflow_action = 'sampling_workflow' if samplingworkflow_enabled \
        else 'no_sampling_workflow'
    workflow.doActionFor(ar, workflow_action)


    # We need to send a list of service UIDS to setAnalyses function.
    # But we may have received a list of titles, list of UIDS,
    # list of keywords or list of service objects!
    service_uids = []
    for obj in values['Analyses']:
        uid = False
        # service objects
        if hasattr(obj, 'portal_type') and obj.portal_type == 'AnalysisService':
            uid = obj.UID()
        # Analysis objects (shortcut for eg copying analyses from other AR)
        elif hasattr(obj, 'portal_type') and obj.portal_type == 'Analysis':
            uid = obj.getService()
        # Maybe already UIDs.
        if not uid:
            bsc = getToolByName(context, 'bika_setup_catalog')
            brains = bsc(portal_type='AnalysisService', UID=obj)
            if brains:
                uid = brains[0].UID
        # Maybe already UIDs.
        if not uid:
            bsc = getToolByName(context, 'bika_setup_catalog')
            brains = bsc(portal_type='AnalysisService', title=obj)
            if brains:
                uid = brains[0].UID
        if uid:
            service_uids.append(uid)
        else:
            logger.info("In analysisrequest.add.create_analysisrequest: cannot "
                        "find uid of this service: %s" % obj)

    # Set analysis request analyses
    ar.setAnalyses(service_uids,
                   prices=values.get("Prices", []),
                   specs=values.get('ResultsRange', []))
    analyses = ar.getAnalyses(full_objects=True)

    skip_receive = ['to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved']
    if secondary:
        # Only 'sample_due' and 'sample_recieved' samples can be selected
        # for secondary analyses
        doActionFor(ar, 'sampled')
        doActionFor(ar, 'sample_due')
        sample_state = workflow.getInfoFor(sample, 'review_state')
        if sample_state not in skip_receive:
            doActionFor(ar, 'receive')

    for analysis in analyses:
        doActionFor(analysis, 'sample_due')
        analysis_state = workflow.getInfoFor(analysis, 'review_state')
        if analysis_state not in skip_receive:
            doActionFor(analysis, 'receive')

    if not secondary:
        # Create sample partitions
        partitions = []
        for n, partition in enumerate(values['Partitions']):
            # Calculate partition id
            partition_prefix = sample.getId() + "-P"
            partition_id = '%s%s' % (partition_prefix, n + 1)
            partition['part_id'] = partition_id
            # Point to or create sample partition
            if partition_id in sample.objectIds():
                partition['object'] = sample[partition_id]
            else:
                partition['object'] = create_samplepartition(
                    sample,
                    partition
                )
            # now assign analyses to this partition.
            obj = partition['object']
            for analysis in analyses:
                if analysis.getService().UID() in partition['services']:
                    analysis.setSamplePartition(obj)

            partitions.append(partition)

        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not samplingworkflow_enabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            doActionFor(ar, lowest_state)

        # Transition pre-preserved partitions
        for p in partitions:
            if 'prepreserved' in p and p['prepreserved']:
                part = p['object']
                state = workflow.getInfoFor(part, 'review_state')
                if state == 'to_be_preserved':
                    workflow.doActionFor(part, 'preserve')

    # Return the newly created Analysis Request
    return ar