Пример #1
0
def fix_analyses_storage_instrument(portal):
    """Walks through all Analyses not yet verified and if they belong to the
    Storage requisition category, remove the instrument assignment
    """
    logger.info("Sanitizing 'Storage instrument' from analyses")
    query = dict(portal_type="AnalysisCategory", title="Storage requisition")
    cat = api.search(query, "bika_setup_catalog")
    if not cat:
        logger.warn("Category 'Storage requisition' not found [SKIP]")
        return

    cat_uid = api.get_uid(cat[0])

    # Cleanup analysis services first
    query = dict(portal_type="AnalysisService", getCategoryUID=cat_uid)
    brains = api.search(query, "bika_setup_catalog")
    for brain in brains:
        service = api.get_object(brain)
        if not service.getInstrument():
            continue
        service.setInstrument(None)
        service.reindexObject()

    # Cleanup analyses
    query = dict(getCategoryUID=cat_uid, )
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    for brain in brains:
        if brain.review_state in ['published', 'rejected', 'invalid']:
            continue
        if not brain.getInstrumentUID:
            continue
        analysis = api.get_object(brain)
        analysis.setInstrument(None)
        analysis.reindexObject()
    logger.info("Sanitizing 'Storage instrument' from analyses [DONE]")
Пример #2
0
def sanitize_ranges_calculation_from_analyses(portal):
    """Walks through all Analyses not yet verified and remove the calculation
    if is Ranges Calculation set
    """
    logger.info("Sanitizing 'Ranges Calculation' from analyses")
    query = dict(portal_type="Calculation", title="Ranges calculation")
    calc = api.search(query, "bika_setup_catalog")
    if not calc:
        logger.warn("Calculation 'Ranges calculation' not found! [SKIP]")
        return
    calc = api.get_object(calc[0])
    calc_uid = api.get_uid(calc)

    # Cleanup analysis services first
    query = dict(portal_type="AnalysisService", getCalculationUID=calc_uid)
    brains = api.search(query, "bika_setup_catalog")
    for brain in brains:
        service = api.get_object(brain)
        service.setCalculation(None)
        service.reindexObject()

    # Cleanup analyses
    query = dict()
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    for brain in brains:
        if brain.getCalculationUID != calc_uid:
            continue
        analysis = api.get_object(brain)
        analysis.setCalculation(None)
        analysis.reindexObject()
    logger.info("Sanitizing 'Ranges Calculation' from analyses [DONE]")
Пример #3
0
 def get_object_by_uid(self, uid):
     """Get the object by UID
     """
     logger.debug("get_object_by_uid::UID={}".format(uid))
     obj = api.get_object_by_uid(uid, None)
     if obj is None:
         logger.warn("!! No object found for UID #{} !!")
     return obj
Пример #4
0
def update_workflow(workflow_id, settings):
    logger.info("Updating workflow '{}' ...".format(workflow_id))
    wf_tool = api.get_tool("portal_workflow")
    workflow = wf_tool.getWorkflowById(workflow_id)
    if not workflow:
        logger.warn("Workflow '{}' not found [SKIP]".format(workflow_id))
    states = settings.get("states", {})
    for state_id, values in states.items():
        update_workflow_state(workflow, state_id, values)

    transitions = settings.get("transitions", {})
    for transition_id, values in transitions.items():
        update_workflow_transition(workflow, transition_id, values)
Пример #5
0
def set_field_value(instance, field_name, value):
    """Sets the value to a Schema field
    """
    if field_name == "id":
        logger.warn("Assignment of id is not allowed")
        return
    logger.info("Field {} = {}".format(field_name, repr(value)))
    instance = get_object(instance)
    field = instance.Schema() and instance.Schema().getField(
        field_name) or None
    if not field:
        fail("No field {} found for {}".format(field_name, repr(instance)))
    field.set(instance, value)
Пример #6
0
 def get(self, obj, key):
     if not obj or not key:
         return ""
     parts = key.split(".")
     if len(parts) == 1:
         v = obj.get(key)
         if v is None:
             logger.warn("No reference found for key={} on object={}"
                         .format(key, obj.id))
             return "*** {} is not a valid key ***".format(key)
         if callable(v):
             v = v()
         return v
     nkey = '.'.join(parts[1:])
     nobj = obj.get(parts[0])
     return self.get(nobj, nkey)
Пример #7
0
def calculate_result(self, mapping=None, default=_marker):
    """Calculate the result
    """
    if mapping is None:
        mapping = {}
    formula = self.getMinifiedFormula()
    formula = string.Template(formula).safe_substitute(mapping)
    formula = formula.replace("[", "%(").replace("]", ")")

    try:
        formula = formula.format(**mapping)
    except KeyError:
        pass

    try:
        result = eval(formula, self._getGlobals())
    except (TypeError, ZeroDivisionError, KeyError, ImportError) as e:
        if default is _marker:
            raise e
        logger.warn(traceback.print_exc())
        return default
    return result
Пример #8
0
def generate_delivery_pdf(context, ars_or_samples):
    if not ars_or_samples:
        logger.warn("No Analysis Requests or Samples provided")
        return

    if ISample.providedBy(ars_or_samples) or \
        IAnalysisRequest.providedBy(ars_or_samples):
        return generate_delivery_pdf(context, [ars_or_samples])

    if not isinstance(ars_or_samples, list):
        logger.warn("Type not supported: {}".format(repr(ars_or_samples)))
        return

    html = DeliveryFormPdf(context,
                           context.REQUEST,
                           analysis_requests=ars_or_samples).template()
    html = safe_unicode(html).encode("utf-8")
    filename = "delivery"
    pdf_fn = tempfile.mktemp(suffix=".pdf")
    pdf = createPdf(htmlreport=html, outfile=pdf_fn)
    if not pdf:
        ar_ids = map(lambda ar: ar.id, ars_or_samples)
        logger.warn(
            "Unable to generate the PDF of delivery form for {}".format(
                ' '.join(ar_ids)))
        return None

    def _attach_to_ar(pdf, ar_brain_or_obj):
        ar = api.get_object(ar_brain_or_obj)
        attid = ar.aq_parent.generateUniqueId('Attachment')
        att = _createObjectByType("Attachment", ar.aq_parent, attid)
        att.setAttachmentFile(open(pdf_fn))
        # Awkward workaround to rename the file
        attf = att.getAttachmentFile()
        attf.filename = '%s.pdf' % filename
        att.setAttachmentFile(attf)
        att.unmarkCreationFlag()
        renameAfterCreation(att)
        atts = ar.getAttachment() + [att] if ar.getAttachment() else [att]
        atts = [a.UID() for a in atts]
        ar.setAttachment(atts)

    for ar_or_sample in ars_or_samples:
        # Attach the pdf to the Analysis Request
        if ISample.providedBy(ar_or_sample):
            for ar in ar_or_sample.getAnalysisRequests():
                _attach_to_ar(pdf, ar)
        elif IAnalysisRequest.providedBy(ar_or_sample):
            _attach_to_ar(pdf, ar_or_sample)

    return pdf_fn
Пример #9
0
def generate_requisition_pdf(ar_or_sample):
    if not ar_or_sample:
        logger.warn("No Analysis Request or Sample provided")
        return
    if ISample.providedBy(ar_or_sample):
        for ar in ar_or_sample.getAnalysisRequests():
            generate_requisition_pdf(ar)
        return
    elif not IAnalysisRequest.providedBy(ar_or_sample):
        logger.warn("Type not supported: {}".format(repr(ar_or_sample)))
        return

    html = RequisitionFormPdf(ar_or_sample, ar_or_sample.REQUEST).template()
    html = safe_unicode(html).encode('utf-8')
    filename = '%s-requisition' % ar_or_sample.id
    pdf_fn = tempfile.mktemp(suffix=".pdf")
    pdf = createPdf(htmlreport=html, outfile=pdf_fn)
    if not pdf:
        logger.warn(
            "Unable to generate the PDF of requisition form for {}".format(
                ar_or_sample.id))
        return

    # Attach the pdf to the Analysis Request
    attid = ar_or_sample.aq_parent.generateUniqueId('Attachment')
    att = _createObjectByType("Attachment", ar_or_sample.aq_parent, attid)
    att.setAttachmentFile(open(pdf_fn))
    att.setReportOption('i')  # Ignore in report

    # Try to assign the Requisition Attachment Type
    query = dict(portal_type='AttachmentType', title='Requisition')
    brains = api.search(query, 'bika_setup_catalog')
    if brains:
        att_type = api.get_object(brains[0])
        att.setAttachmentType(att_type)

    # Awkward workaround to rename the file
    attf = att.getAttachmentFile()
    attf.filename = '%s.pdf' % filename
    att.setAttachmentFile(attf)
    att.unmarkCreationFlag()
    renameAfterCreation(att)
    atts = ar_or_sample.getAttachment() + [att] if \
        ar_or_sample.getAttachment() else [att]
    atts = [a.UID() for a in atts]
    ar_or_sample.setAttachment(atts)
    os.remove(pdf_fn)
Пример #10
0
    def get_bs_object(xlsx_row, xlsx_keyword, portal_type, criteria):
        text_value = xlsx_row.get(xlsx_keyword, None)
        if not text_value:
            logger.warn("Value not set for keyword {}".format(xlsx_keyword))
            return None

        query = {"portal_type": portal_type, criteria: text_value}
        brain = api.search(query, 'bika_setup_catalog')
        if not brain:
            logger.warn("No objects found for type {} and {} '{}'".format(
                portal_type, criteria, text_value))
            return None
        if len(brain) > 1:
            logger.warn(
                "More than one object found for type {} and {} '{}'".format(
                    portal_type, criteria, text_value))
            return None

        return api.get_object(brain[0])
Пример #11
0
def generate_delivery_pdf(context, ars_or_samples):
    if not ars_or_samples:
        logger.warn("No Analysis Requests or Samples provided")
        return

    if ISample.providedBy(ars_or_samples) or \
        IAnalysisRequest.providedBy(ars_or_samples):
        return generate_delivery_pdf(context, [ars_or_samples])

    if not isinstance(ars_or_samples, list):
        logger.warn("Type not supported: {}".format(repr(ars_or_samples)))
        return

    html = DeliveryFormPdf(context, context.REQUEST,
                           analysis_requests=ars_or_samples).template()
    html = safe_unicode(html).encode("utf-8")
    filename = "delivery"
    pdf_fn = tempfile.mktemp(suffix=".pdf")
    pdf = createPdf(htmlreport=html, outfile=pdf_fn)
    if not pdf:
        ar_ids = map(lambda ar: ar.id, ars_or_samples)
        logger.warn("Unable to generate the PDF of delivery form for {}".
                    format(' '.join(ar_ids)))
        return None

    def _attach_to_ar(pdf, ar_brain_or_obj):
        ar = api.get_object(ar_brain_or_obj)

        # Attach the pdf to the Analysis Request
        attid = ar.aq_parent.generateUniqueId('Attachment')
        att = _createObjectByType(
            "Attachment", ar.aq_parent, attid)
        att.setAttachmentFile(open(pdf_fn))
        att.setReportOption('i')  # Ignore in report

        # Try to assign the Requisition Attachment Type
        query = dict(portal_type='AttachmentType', title='Delivery')
        brains = api.search(query, 'bika_setup_catalog')
        if brains:
            att_type = api.get_object(brains[0])
            att.setAttachmentType(att_type)

        # Awkward workaround to rename the file
        attf = att.getAttachmentFile()
        attf.filename = '%s.pdf' % filename
        att.setAttachmentFile(attf)
        att.unmarkCreationFlag()
        renameAfterCreation(att)
        atts = ar.getAttachment() + [att] if ar.getAttachment() else [att]
        atts = [a.UID() for a in atts]
        ar.setAttachment(atts)

    # TODO Create only one Attachment per Client and assign it to all ARs
    # There is no need to creat a single Attachment object for each AR. Same
    # attachment can be assigned to different ARs and they will resolve the
    # attachment correctly later. This will be useful for:
    # a) Reduce the database size (less pdfs to store)
    # b) workflow_download_delivery can easily return the attachments that are
    #    different when multiple ARs are selected.

    for ar_or_sample in ars_or_samples:
        # Attach the pdf to the Analysis Request
        if ISample.providedBy(ar_or_sample):
            for ar in ar_or_sample.getAnalysisRequests():
                _attach_to_ar(pdf, ar)
        elif IAnalysisRequest.providedBy(ar_or_sample):
            _attach_to_ar(pdf, ar_or_sample)

    return pdf_fn
Пример #12
0
def import_specifications_for_sample_type(portal, sample_type):
    logger.info("Importing specs for {}".format(sample_type.Title()))

    def get_bs_object(xlsx_row, xlsx_keyword, portal_type, criteria):
        text_value = xlsx_row.get(xlsx_keyword, None)
        if not text_value:
            logger.warn("Value not set for keyword {}".format(xlsx_keyword))
            return None

        query = {"portal_type": portal_type, criteria: text_value}
        brain = api.search(query, 'bika_setup_catalog')
        if not brain:
            logger.warn("No objects found for type {} and {} '{}'".format(
                portal_type, criteria, text_value))
            return None
        if len(brain) > 1:
            logger.warn(
                "More than one object found for type {} and {} '{}'".format(
                    portal_type, criteria, text_value))
            return None

        return api.get_object(brain[0])

    raw_specifications = get_xls_specifications()
    for spec in raw_specifications:

        # Valid Analysis Service?
        service = get_bs_object(spec, "keyword", "AnalysisService",
                                "getKeyword")
        if not service:
            continue

        # The calculation exists?
        calc_title = "Ranges calculation"
        query = dict(calculation=calc_title)
        calc = get_bs_object(query, "calculation", "Calculation", "title")
        if not calc:
            # Create a new one
            folder = portal.bika_setup.bika_calculations
            _id = folder.invokeFactory("Calculation", id=tmpID())
            calc = folder[_id]
            calc.edit(title=calc_title,
                      PythonImports=[{
                          "module": "bhp.lims.specscalculations",
                          "function": "get_specification_for"
                      }],
                      Formula="get_specification_for($spec)")
            calc.unmarkCreationFlag()
            renameAfterCreation(calc)

        # Existing AnalysisSpec?
        specs_title = "{} - calculated".format(sample_type.Title())
        query = dict(portal_type='AnalysisSpec', title=specs_title)
        aspec = api.search(query, 'bika_setup_catalog')
        if not aspec:
            # Create the new AnalysisSpecs object!
            folder = portal.bika_setup.bika_analysisspecs
            _id = folder.invokeFactory('AnalysisSpec', id=tmpID())
            aspec = folder[_id]
            aspec.edit(title=specs_title)
            aspec.unmarkCreationFlag()
            renameAfterCreation(aspec)
        elif len(aspec) > 1:
            logger.warn(
                "More than one Analysis Specification found for {}".format(
                    specs_title))
            continue
        else:
            aspec = api.get_object(aspec[0])
        aspec.setSampleType(sample_type)

        # Set the analysis keyword and bind it to the calculation to use
        keyword = service.getKeyword()
        specs_dict = {
            'keyword': keyword,
            'min_operator': 'geq',
            'min': '0',
            'max_operator': 'lt',
            'max': '0',
            'minpanic': '',
            'maxpanic': '',
            'warn_min': '',
            'warn_max': '',
            'hidemin': '',
            'hidemax': '',
            'rangecomments': '',
            'calculation': api.get_uid(calc),
        }
        grades_dict = {grade: "" for grade in GRADES_KEYS}
        specs_dict.update(grades_dict)
        ranges = api.get_field_value(aspec, 'ResultsRange', [{}])
        ranges = filter(lambda val: val.get('keyword') != keyword, ranges)
        ranges.append(specs_dict)
        aspec.setResultsRange(ranges)
Пример #13
0
    def Import(self):
        logger.info("*** Custom import of Analysis Specifications ***")
        for row in self.get_rows(3):
            keyword = row.get('utestid')
            if not keyword:
                logger.warn("No keyword found")
                continue

            query = dict(portal_type="AnalysisService", getKeyword=keyword)
            analysis = api.search(query, 'bika_setup_catalog')
            if not analysis:
                logger.warn("No analysis service found for {}".format(keyword))
                continue
            if len(analysis) > 1:
                logger.warn(
                    "More than one service found for {}".format(keyword))
                continue
            analysis = api.get_object(analysis[0])

            # TODO No Sample Type defined in the file, just use Whole Blood
            st_title = row.get('sample_type', 'Whole Blood')
            query = dict(portal_type="SampleType", title=st_title)
            sample_type = api.search(query, 'bika_setup_catalog')
            if not sample_type:
                logger.warn("No sample type found for {}".format(st_title))
                continue
            if len(sample_type) > 1:
                logger.warn(
                    "More than one sample type found for {}".format(st_title))
                continue
            sample_type = api.get_object(sample_type[0])

            unit = row.get('utestid_units')
            min_spec = row.get('lln', '')
            max_spec = row.get('uln', '')
            gender = row.get('gender', 'a')
            gender = gender == 'mf' and 'a' or gender
            age_low = row.get('age_low', '')
            if age_low:
                age_low = '{}{}'.format(age_low, row.get('age_low_unit', 'd'))
            age_high = row.get('age_high', '')
            if age_high:
                age_high = '{}{}'.format(age_high,
                                         row.get('age_high_unit', 'd'))
            if not age_low and not age_high:
                logger.warn(
                    "Cannot create Spec, Age low and high not defined.")
                continue
            max_panic = row.get('panic_high_value', '')
            min_panic = row.get('panic_low_value', '')

            # TODO No Specs title defined in the file, just use sample type's
            specs_title = row.get('title', st_title)
            specs_key = []
            specs_key.append(specs_title)
            if gender:
                str_gender = gender.upper()
                if gender == 'a':
                    str_gender = 'MF'
                specs_key.append(str_gender)
            if age_low and age_high:
                specs_key.append('{} - {}'.format(age_low, age_high))
            elif age_low:
                specs_key.append('({}+)'.format(age_low))
            elif age_high:
                specs_key.append('(-{})'.format(age_high))
            specs_title = ' '.join(specs_key)

            specs_dict = {
                'keyword': analysis.getKeyword(),
                'min': min_spec,
                'max': max_spec,
                'minpanic': min_panic,
                'maxpanic': max_panic,
                'warn_min': '',
                'warn_max': '',
                'hidemin': '',
                'hidemax': '',
                'rangecomments': '',
            }

            query = dict(portal_type='AnalysisSpec', title=specs_title)
            aspec = api.search(query, 'bika_setup_catalog')
            if not aspec:
                # Create a new one
                folder = self.context.bika_setup.bika_analysisspecs
                _id = folder.invokeFactory('AnalysisSpec', id=tmpID())
                aspec = folder[_id]
                aspec.edit(title=specs_title)
                aspec.Schema().getField("Gender").set(aspec, gender)
                aspec.Schema().getField("Agefrom").set(aspec, age_low)
                aspec.Schema().getField("Ageto").set(aspec, age_high)
                aspec.unmarkCreationFlag()
                renameAfterCreation(aspec)

            elif len(aspec) > 1:
                logger.warn(
                    "More than one Analysis Specification found for {}".format(
                        specs_title))
                continue
            else:
                aspec = api.get_object(aspec[0])

            result_range = aspec.Schema().getField('ResultsRange').get(aspec)
            result_range.append(specs_dict)
            aspec.Schema().getField('ResultsRange').set(aspec, result_range)
            aspec.setSampleType(sample_type.UID())
            aspec.reindexObject()