示例#1
0
def fix_analysis_requests_assay_date(portal):
    logger.info("Updating Assay Date for old Analysis Requests ...")
    query = dict(
        portal_type="AnalysisRequest",
        review_state=["published", "to_be_verified", "verified", "invalid"])
    brains = api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        if num % 100 == 0:
            logger.info(
                "Updating Assay Date for old Analysis Requests: {}/{}".format(
                    num, total))
        if num % TRANSACTION_THERESHOLD == 0:
            commit_transaction(portal)

        request = api.get_object(brain)
        if not api.get_field_value(request, "AssayDate", None):
            review_states = ["to_be_verified", "published", "verified"]
            analyses = request.getAnalyses(review_state=review_states)
            captures = map(lambda an: an.getResultCaptureDate, analyses)
            captures = sorted(captures)
            if captures:
                api.set_field_value(request, "AssayDate", captures[-1])
                request.reindexObject()
    commit_transaction(portal)
    logger.info("Updating Assay Date for old Analysis Requests [DONE]")
示例#2
0
def after_submit(analysis):
    """Actions to be done after a submit transition for an analysis takes place
    """
    analysis = api.get_object(analysis)
    if IRequestAnalysis.providedBy(analysis):
        ar = analysis.getRequest()
        set_field_value(ar, "AssayDate", analysis.getDateSubmitted())
示例#3
0
def update_internal_use(portal):
    """Walks through all Samples and assigns its value to False if no value set
    """
    logger.info("*** Updating InternalUse field on Samples/ARs ***")
    samples = api.search(dict(portal_type="Sample"), "bika_catalog")
    for sample in samples:
        sample = api.get_object(sample)
        if _api.get_field_value(sample, "InternalUse", None) is None:
            _api.set_field_value(sample, "InternalUse", False)
示例#4
0
def copy(source, container, skip_fields=None, new_field_values=None):
    if new_field_values is None:
        new_field_values = dict()
    source = api.get_object(source)
    logger.info("Creating copy of {} with id {}".format(
        source.portal_type, source.id))
    destination = _createObjectByType(source.portal_type, container, tmpID())
    field_values = to_dict(source, skip_fields=skip_fields)
    for field_name, field_value in field_values.items():
        _api.set_field_value(destination, field_name, field_value)
    for field_name, field_value in new_field_values.items():
        _api.set_field_value(destination, field_name, field_value)
    destination.unmarkCreationFlag()
    renameAfterCreation(destination)
    destination.reindexObject()
    return destination
示例#5
0
    mime_msg['To'] = to
    msg_txt = MIMEText(safe_unicode(body).encode('utf-8'), _subtype='html')
    mime_msg.preamble = 'This is a multi-part MIME message.'
    mime_msg.attach(msg_txt)
    try:
        host = getToolByName(view.context, 'MailHost')
        host.send(mime_msg.as_string(), immediate=True)
    except Exception, msg:
        ar = view.context.id
        logger.error("Panic level email %s: %s" % (ar, str(msg)))
        message = _('Unable to send an email to alert client '
                    'that some results exceeded the panic levels') \
                  + (": %s" % str(msg))
        addMessage(view, message, 'warning')
        return False
    api.set_field_value(view.context, "PanicEmailAlertSent", True)
    return True


def addMessage(view, message, msg_type="info"):
    view.context.plone_utils.addPortalMessage(message, msg_type)


class AnalysisRequestView(AnalysisRequestViewView):

    def __call__(self):
        template = super(AnalysisRequestView, self).__call__()
        if not handle_email_panic(self):
            return template