Exemplo n.º 1
0
def decouple_analyses_from_sample_workflow(portal):
    logger.info("Decoupling analyses from sample workflow ...")
    add_index(portal,
              catalog_id=CATALOG_ANALYSIS_LISTING,
              index_name="isSampleReceived",
              index_attribute="isSampleReceived",
              index_metatype="BooleanIndex")

    wf_id = "bika_analysis_workflow"
    affected_rs = [
        "sample_registered", "to_be_sampled", "sampled", "sample_due",
        "sample_received", "to_be_preserved", "not_requested", "registered"
    ]
    wf_tool = api.get_tool("portal_workflow")
    workflow = wf_tool.getWorkflowById(wf_id)
    query = dict(portal_type=["Analysis", "DuplicateAnalysis"],
                 review_state=affected_rs)
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        # Set state
        analysis = api.get_object(brain)
        target_state = analysis.getWorksheet() and "assigned" or "unassigned"

        if num % 100 == 0:
            logger.info("Restoring state to '{}': {}/{}".format(
                target_state, num, total))

        changeWorkflowState(analysis, wf_id, target_state)

        # Update role mappings
        workflow.updateRoleMappingsFor(analysis)

        # Reindex
        analysis.reindexObject()
Exemplo n.º 2
0
def remove_orphan_reference_analyses(portal):
    logger.info("Removing orphan reference analyses ...")
    wf_id = "bika_referenceanalysis_workflow"
    wf_tool = api.get_tool("portal_workflow")
    workflow = wf_tool.getWorkflowById(wf_id)
    query = dict(portal_type="ReferenceAnalysis", review_state="unassigned")
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        orphan = api.get_object(brain)
        worksheet = orphan.getWorksheet()
        if worksheet:
            logger.info("Reassigning orphan reference: {}/{}".format(
                num, total))
            # This one has a worksheet! reindex and do nothing
            changeWorkflowState(orphan, wf_id, "assigned")
            # Update role mappings
            workflow.updateRoleMappingsFor(orphan)
            # Reindex
            orphan.reindexObject()
            continue
        elif orphan.getInstrument():
            # This is a calibration test, do nothing!
            if not brain.getInstrumentUID:
                orphan.reindexObject()
            total -= 1
            continue

        if num % 100 == 0:
            logger.info("Removing orphan reference analysis: {}/{}".format(
                num, total))
        # Remove the duplicate
        orphan.aq_parent.manage_delObjects([orphan.getId()])
Exemplo n.º 3
0
def after_no_sampling_workflow(analysis_request):
    """ Event fired for no_sampling_workflow that makes the status of the
    Analysis request or Sample to become sample_ordered
    """
    if not analysis_request.isPartition():
        # Generate the delivery pdf
        generate_requisition_pdf(analysis_request)

    # Set specifications by default
    sample_type = analysis_request.getSampleType()
    specs = api.get_field_value(sample_type, "DefaultAnalysisSpecifications",
                                None)
    if specs:
        analysis_request.setSpecification(api.get_object(specs))
    else:
        # Find out suitable specs by Sample Type name
        sample_type_title = sample_type.Title()
        specs_title = "{} - calculated".format(sample_type_title)
        query = dict(portal_type="AnalysisSpec", title=specs_title)
        specs = api.search(query, 'bika_setup_catalog')
        if specs:
            analysis_request.setSpecification(api.get_object(specs[0]))

    if analysis_request.isPartition():
        # Change workflow state to "at_reception"
        wf.changeWorkflowState(analysis_request,
                               wf_id="bika_ar_workflow",
                               state_id="sample_at_reception")
Exemplo n.º 4
0
def create_partition_for_storage(sample_obj_brain_or_uid):
    """Creates an empty partition suitable for storage from the given sample
    If the sample passed in is a partition, generates a copy of the same
    partition without analyses set, but keeping the same parent.
    If the sample passed in is a primary sample, generates a new partition, but
    without analyses
    """
    sample = get_object(sample_obj_brain_or_uid)
    logger.info("Creating partition for storage: {}".format(get_id(sample)))

    PARTITION_SKIP_FIELDS = [
        "Analyses",
        "Attachment",
        "Client",
        "Profile",
        "Profiles",
        "RejectionReasons",
        "Remarks",
        "ResultsInterpretation",
        "ResultsInterpretationDepts",
        "Sample",
        "Template",
        "creation_date",
        "id",
        "modification_date",
        "ParentAnalysisRequest",
    ]
    primary = sample
    if sample.isPartition():
        primary = sample.getParentAnalysisRequest()

    # Set the basic fields for the Partition
    record = {
        "ParentAnalysisRequest": get_uid(primary),
    }

    # Copy all fields
    for fieldname, field in get_fields(sample).items():
        if field.type == "computed":
            logger.info("Skipping computed field {}".format(fieldname))
            continue
        if fieldname in PARTITION_SKIP_FIELDS:
            logger.info("Skipping field {}".format(fieldname))
            continue
        fieldvalue = field.get(sample)
        record[fieldname] = fieldvalue
        logger.info("Update record '{}': {}".format(fieldname,
                                                    repr(fieldvalue)))

    client = sample.getClient()
    partition = crar(client, request={}, values=record)

    # Force status to "stored"
    wf.changeWorkflowState(partition, "bika_ar_workflow", "stored")

    # Reindex the primary AR
    primary.reindexObject(idxs=["isRootAncestor"])
    return partition
Exemplo n.º 5
0
def fix_cancelled_analyses_inconsistencies(portal):
    logger.info("Resolving cancelled analyses inconsistencies ...")
    wf_id = "bika_analysis_workflow"
    wf_tool = api.get_tool("portal_workflow")
    workflow = wf_tool.getWorkflowById(wf_id)
    query = dict(portal_type="Analysis", cancellation_state="cancelled")
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        if brain.review_state == "cancelled":
            continue
        if num % 100 == 0:
            logger.info("Resolving state to 'cancelled': {}/{}".format(
                num, total))
        # Set state
        analysis = api.get_object(brain)
        changeWorkflowState(analysis, wf_id, "cancelled")
        # Update role mappings
        workflow.updateRoleMappingsFor(analysis)
        # Reindex
        analysis.reindexObject(idxs=["cancellation_state"])
Exemplo n.º 6
0
def remove_attachment_due_from_analysis_workflow(portal):
    logger.info("Removing attachment_due state from analysis workflow ...")
    wf_id = "bika_analysis_workflow"
    affected_rs = ["attachment_due"]
    wf_tool = api.get_tool("portal_workflow")
    workflow = wf_tool.getWorkflowById(wf_id)
    query = dict(review_state=affected_rs)
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        analysis = api.get_object(brain)
        target_state = analysis.getWorksheet() and "assigned" or "unassigned"

        if num % 100 == 0:
            logger.info("Restoring state to '{}': {}/{}".format(
                target_state, num, total))

        changeWorkflowState(analysis, wf_id, target_state)

        # Update role mappings
        workflow.updateRoleMappingsFor(analysis)

        # Reindex
        analysis.reindexObject()