def folderitem(self, obj, item, index): item = super(QCAnalysesView, self).folderitem(obj, item, index) obj = self.get_object(obj) # Fill Worksheet cell worksheet = obj.getWorksheet() if not worksheet: return item # Fill the Worksheet cell ws_id = api.get_id(worksheet) ws_url = api.get_url(worksheet) item["replace"]["Worksheet"] = get_link(ws_url, value=ws_id) if IDuplicateAnalysis.providedBy(obj): an_type = "d" img_name = "duplicate.png" parent = obj.getRequest() else: an_type = obj.getReferenceType() img_name = an_type == "c" and "control.png" or "blank.png" parent = obj.aq_parent # Render the image an_type = QCANALYSIS_TYPES.getValue(an_type) item['before']['Service'] = get_image(img_name, title=an_type) # Fill the Parent cell parent_url = api.get_url(parent) parent_id = api.get_id(parent) item["replace"]["Parent"] = get_link(parent_url, value=parent_id) return item
def remove_analysis(self, analysis): """Removes a given analysis from the instance """ # Remember assigned attachments # https://github.com/senaite/senaite.core/issues/1025 attachments = analysis.getAttachment() analysis.setAttachment([]) # If assigned to a worksheet, unassign it before deletion worksheet = analysis.getWorksheet() if worksheet: worksheet.removeAnalysis(analysis) # Remove the analysis # Note the analysis might belong to a partition analysis.aq_parent.manage_delObjects(ids=[api.get_id(analysis)]) # Remove orphaned attachments for attachment in attachments: if not attachment.getLinkedAnalyses(): # only delete attachments which are no further linked logger.info("Deleting attachment: {}".format( attachment.getId())) attachment_id = api.get_id(attachment) api.get_parent(attachment).manage_delObjects(attachment_id)
def get_rejection_mail(sample, rejection_pdf=None): """Generates an email to sample contacts with rejection reasons """ # Get the reasons reasons = sample.getRejectionReasons() reasons = reasons and reasons[0] or {} reasons = reasons.get("selected", []) + [reasons.get("other")] reasons = filter(None, reasons) reasons = "<br/>- ".join(reasons) # Render the email body setup = api.get_setup() lab_address = setup.laboratory.getPrintAddress() email_body = Template(setup.getEmailBodySampleRejection()) email_body = email_body.safe_substitute({ "lab_address": "<br/>".join(lab_address), "reasons": reasons and "<br/>-{}".format(reasons) or "", "sample_id": api.get_id(sample), "sample_link": get_link(api.get_url(sample), api.get_id(sample)) }) def to_valid_email_address(contact): if not contact: return None address = contact.getEmailAddress() if not is_valid_email_address(address): return None return address # Get the recipients _to = [sample.getContact()] + sample.getCCContact() _to = map(to_valid_email_address, _to) _to = filter(None, _to) if not _to: # Cannot send an e-mail without recipient! logger.warn("No valid recipients for {}".format(api.get_id(sample))) return None lab = api.get_setup().laboratory attachments = rejection_pdf and [rejection_pdf] or [] return compose_email(from_addr=lab.getEmailAddress(), to_addr=_to, subj=_("%s has been rejected") % api.get_id(sample), body=email_body, attachments=attachments)
def get_email_body(self, sample): """Returns the email body text """ retest = sample.getRetest() lab_address = api.get_bika_setup().laboratory.getPrintAddress() setup = api.get_setup() body = Template(setup.getEmailBodySampleInvalidation())\ .safe_substitute( dict(sample_link=self.get_html_link(sample), retest_link=self.get_html_link(retest), sample_id=api.get_id(sample), retest_id=api.get_id(retest), lab_address="<br/>".join(lab_address))) return body
def __call__(self): form = self.request.form # Form submit toggle form_submitted = form.get("submitted", False) form_store = form.get("button_store", False) form_cancel = form.get("button_cancel", False) # Get the container container = self.get_container() if not container: return self.redirect(message=_s("No items selected"), level="warning") if not IStorageSamplesContainer.providedBy(container): logger.warn("Not a samples container: {}").format(repr(container)) self.redirect(redirect_url=self.get_next_url()) # If container is full, there is no way to add more samples there if container.is_full(): message = _("Cannot store samples. Samples container {} is full") return self.redirect(message=message.format(api.get_id(container)), level="warning") # Handle store if form_submitted and form_store: alpha_position = form.get("position") sample_uid = form.get("sample_uid") if not alpha_position or not api.is_uid(sample_uid): message = _("No position or not valid sample selected") return self.redirect(message=message) sample = api.get_object(sample_uid) logger.info("Storing sample {} in {} at {}".format( api.get_id(sample), api.get_id(container), alpha_position)) # Store position = container.alpha_to_position(alpha_position) if container.add_object_at(sample, position[0], position[1]): message = _("Stored sample {} at position {}").format( api.get_id(sample), alpha_position) if container.is_full(): return self.redirect(redirect_url=self.get_next_url()) return self.redirect(redirect_url=self.get_fallback_url(), message=message) # Handle cancel if form_submitted and form_cancel: return self.redirect(message=_("Sample storing canceled")) return self.template()
def get_object_info(self): object_info = self.get_base_info() # Default values for other fields when the Batch is selected doctor = self.context.getField("Doctor").get(self.context) client = self.context.getClient() field_values = { "Doctor": self.to_field_value(doctor), "Client": self.to_field_value(client), } patient = self.context.getField("Patient").get(self.context) if patient: field_values.update({ "Patient": self.to_field_value(patient), "ClientPatientID": { "uid": api.get_uid(patient), "title": patient.getClientPatientID() or api.get_id(patient), } }) # Allow to choose Patients from same Client only and apply # generic filters when a client is selected too filter_queries = {} if client: query = {"query": api.get_path(client), "depth": 1} filter_queries = { "Patient": {"path": query}, "ClientPatientID": {"path": query}, } object_info["field_values"] = field_values object_info["filter_queries"] = filter_queries return object_info
def search_by_prefix(portal_type, prefix): """Returns brains which share the same portal_type and ID prefix """ # Get all brains for all catalogs for given portal type brains = search_catalogs(portal_type) # Filter brains with the same ID prefix return filter(lambda brain: api.get_id(brain).startswith(prefix), brains)
def get_sample_container_info(self): """Returns the storage container this Sample is stored in """ # Search the container the sample is stored in query = { "portal_type": "StorageSamplesContainer", "get_samples_uids": api.get_uid(self.context) } brains = api.search(query, SENAITE_STORAGE_CATALOG) if not brains: return None # Get the data info from the container container = api.get_object(brains[0]) position = container.get_object_position(self.context) position = container.position_to_alpha(position[0], position[1]) return { "uid": api.get_uid(container), "id": api.get_id(container), "title": api.get_title(container), "url": api.get_url(container), "position": position, "full_title": container.get_full_title(), "when": wf.getTransitionDate(self.context, "store"), }
def resolve_client_for_doctor(doctor): # If the doctor has a client assigned already, return it directly. We # consider that a patient with a client assigned this way has been processed # already or does not require any further checks client = doctor.getClient() if client: return client # Try to infer the client from Samples or Batches batches = doctor.getBatches(full_objects=True) client_uids = map(lambda b: b.getClientUID(), batches) client_uids.extend(map(lambda s: s.getClientUID, doctor.getSamples())) client_uids = filter(None, list(set(client_uids))) if not client_uids: # This Doctor has no batch/sample assigned return None clients = map(api.get_object_by_uid, client_uids) internals = map(is_internal_client, clients) if all(internals): # All clients are internal, return the first one return clients[0] else: # OOps, we have a problem here. This Doctor is assigned to samples and # batches that belong to different types of client! logger.error( "Doctor {} is assigned to clients from different types".format( api.get_id(doctor), repr(clients))) return None
def folderitem(self, obj, item, index): item = super(InstrumentReferenceAnalysesView, self).folderitem(obj, item, index) analysis = api.get_object(obj) # Partition is used to group/toggle QC Analyses sample = analysis.getSample() item["replace"]["Partition"] = get_link(api.get_url(sample), api.get_id(sample)) # Get retractions field item["Retractions"] = "" report = analysis.getRetractedAnalysesPdfReport() if report: url = api.get_url(analysis) href = "{}/at_download/RetractedAnalysesPdfReport".format(url) attrs = {"class": "pdf", "target": "_blank"} title = _("Retractions") link = get_link(href, title, **attrs) item["Retractions"] = title item["replace"]["Retractions"] = link # Add the analysis to the QC Chart self.chart.add_analysis(analysis) return item
def send_panic_email(self): # Send an alert email setup = api.get_setup() laboratory = setup.laboratory subject = self.request.get('subject') to = self.request.get('to') body = self.request.get('email_body') body = "<br/>".join(body.split("\r\n")) mime_msg = MIMEMultipart('related') mime_msg['Subject'] = subject mime_msg['From'] = formataddr( (encode_header(laboratory.getName()), laboratory.getEmailAddress())) mime_msg['To'] = to msg_txt = MIMEText(safe_unicode(body).encode('utf-8'), _subtype='html') mime_msg.preamble = 'This is a multi-part MIME message.' mime_msg.attach(msg_txt) try: host = api.get_tool("MailHost") host.send(mime_msg.as_string(), immediate=True) except Exception, msg: sample_id = api.get_id(self.sample) logger.error("Panic level email %s: %s" % (sample_id, str(msg))) message = _("Unable to send an email to alert client " "that some results exceeded the panic levels") message = "{}: {}".format(message, str(msg)) return self.redirect(self.back_url, message, "warning")
def getRequestID(self): """Return the ID of the linked AR """ ar = self.getRequest() if not ar: return "" return api.get_id(ar)
def guard(self, action): """Returns False if the sample is queued or contains queued analyses """ # Check if this current request life-cycle is handled by a consumer request = capi.get_request() queue_task_uid = request.get("queue_tuid", "") if capi.is_uid(queue_task_uid): ctx_id = capi.get_id(self.context) logger.info("Skip guard for {}: {}".format(ctx_id, action)) return True # Don't do anything if senaite.queue is not enabled if not api.is_queue_enabled(): return True # Check if the sample is queued if api.is_queued(self.context, status=["queued"]): return False # Check whether the sample contains queued analyses for brain in self.context.getAnalyses(): if api.is_queued(brain, status=["queued"]): return False return True
def search_by_prefix(portal_type, prefix): """Returns brains which share the same portal_type and ID prefix """ catalog = api.get_tool("uid_catalog") brains = catalog({"portal_type": portal_type}) # Filter brains with the same ID prefix return filter(lambda brain: api.get_id(brain).startswith(prefix), brains)
def email_attachments(self): attachments = [] # Convert report PDFs -> email attachments for report in self.reports: pdf = self.get_pdf(report) if pdf is None: logger.error("Skipping empty PDF for report {}" .format(report.getId())) continue sample = report.getAnalysisRequest() filename = "{}.pdf".format(api.get_id(sample)) filedata = pdf.data attachments.append( mailapi.to_email_attachment(filedata, filename)) # Convert additional attachments for attachment in self.attachments: af = attachment.getAttachmentFile() filedata = af.data filename = af.filename attachments.append( mailapi.to_email_attachment(filedata, filename)) return attachments
def search_by_prefix(portal_type, prefix): """Returns brains which share the same portal_type and ID prefix """ catalog = api.get_tool("portal_catalog") brains = catalog({"portal_type": portal_type}) # Filter brains with the same ID prefix return filter(lambda brain: api.get_id(brain).startswith(prefix), brains)
def do_rejection(sample, notify=None): """Rejects the sample and if succeeds, generates the rejection pdf and sends a notification email. If notify is None, the notification email will only be sent if the setting in Setup is enabled """ sample_id = api.get_id(sample) if not sample.getRejectionReasons(): logger.warn("Cannot reject {} w/o rejection reasons".format(sample_id)) return success, msg = doActionFor(sample, "reject") if not success: logger.warn("Cannot reject the sample {}".format(sample_id)) return # Generate a pdf with the rejection reasons pdf = get_rejection_pdf(sample) # Attach the PDF to the sample filename = "{}-rejected.pdf".format(sample_id) sample.createAttachment(pdf, filename=filename) # Do we need to send a notification email? if notify is None: setup = api.get_setup() notify = setup.getNotifyOnSampleRejection() if notify: # Compose and send the email mime_msg = get_rejection_mail(sample, pdf) # Send the email send_email(mime_msg)
def resolve_analyses(self, instance, service): """Resolves analyses for the service and instance It returns a list, cause for a given sample, multiple analyses for same service can exist due to the possibility of having multiple partitions """ analyses = [] # Does the analysis exists in this instance already? instance_analyses = self.get_from_instance(instance, service) if instance_analyses: analyses.extend(instance_analyses) # Does the analysis exists in an ancestor? from_ancestor = self.get_from_ancestor(instance, service) for ancestor_analysis in from_ancestor: # Move the analysis into this instance. The ancestor's # analysis will be masked otherwise analysis_id = api.get_id(ancestor_analysis) logger.info("Analysis {} is from an ancestor".format(analysis_id)) cp = ancestor_analysis.aq_parent.manage_cutObjects(analysis_id) instance.manage_pasteObjects(cp) analyses.append(instance._getOb(analysis_id)) # Does the analysis exists in descendants? from_descendant = self.get_from_descendant(instance, service) analyses.extend(from_descendant) return analyses
def create_report(self, parent, pdf, html, uids, metadata): """Create a new report object NOTE: We limit the creation of reports to 1 to avoid conflict errors on simultaneous publication. :param parent: parent object where to create the report inside :returns: ARReport """ parent_id = api.get_id(parent) logger.info("Create Report for {} ...".format(parent_id)) # Manually update the view on the database to avoid conflict errors parent._p_jar.sync() # Create the report object report = api.create(parent, "ARReport", AnalysisRequest=api.get_uid(parent), Pdf=pdf, Html=html, ContainedAnalysisRequests=uids, Metadata=metadata) # Commit the changes transaction.commit() logger.info("Create Report for {} [DONE]".format(parent_id)) return report
def get_title_or_id_from_uid(uid): """Returns the title or ID from the given UID """ obj = api.get_object_by_uid(uid, default=None) if obj is None: return "" title_or_id = api.get_title(obj) or api.get_id(obj) return title_or_id
def after_retract(analysis): """Function triggered after a 'retract' transition for the analysis passed in is performed. The analysis transitions to "retracted" state and a new copy of the analysis is created. The copy initial state is "unassigned", unless the the retracted analysis was assigned to a worksheet. In such case, the copy is transitioned to 'assigned' state too """ # Retract our dependents (analyses that depend on this analysis) cascade_to_dependents(analysis, "retract") # Retract our dependencies (analyses this analysis depends on) promote_to_dependencies(analysis, "retract") # Rename the analysis to make way for it's successor. # Support multiple retractions by renaming to *-0, *-1, etc parent = analysis.aq_parent keyword = analysis.getKeyword() # Get only those that are analyses and with same keyword as the original analyses = parent.getAnalyses(full_objects=True) analyses = filter(lambda an: an.getKeyword() == keyword, analyses) # TODO This needs to get managed by Id server in a nearly future! new_id = '{}-{}'.format(keyword, len(analyses)) # Create a copy of the retracted analysis an_uid = api.get_uid(analysis) new_analysis = create_analysis(parent, analysis, id=new_id, RetestOf=an_uid) new_analysis.setResult("") new_analysis.setResultCaptureDate(None) new_analysis.reindexObject() logger.info("Retest for {} ({}) created: {}".format( keyword, api.get_id(analysis), api.get_id(new_analysis))) # Assign the new analysis to this same worksheet, if any. worksheet = analysis.getWorksheet() if worksheet: worksheet.addAnalysis(new_analysis) # Try to rollback the Analysis Request if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), "rollback_to_receive") reindex_request(analysis)
def folderitem(self, obj, item, index): """Applies new properties to the item (Client) that is currently being rendered as a row in the list :param obj: client to be rendered as a row in the list :param item: dict representation of the client, suitable for the list :param index: current position of the item within the list :type obj: ATContentType/DexterityContentType :type item: dict :type index: int :return: the dict representation of the item :rtype: dict """ obj = api.get_object(obj) # XXX Refactor expiration to a proper place # ---------------------------- 8< ------------------------------------- if item.get("review_state", "current") == "current": # Check expiry date exdate = obj.getExpiryDate() if exdate: expirydate = DT2dt(exdate).replace(tzinfo=None) if (datetime.today() > expirydate): # Trigger expiration self.workflow.doActionFor(obj, "expire") item["review_state"] = "expired" item["obj"] = obj if self.contentFilter.get('review_state', '') \ and item.get('review_state', '') == 'expired': # This item must be omitted from the list return None # ---------------------------- >8 ------------------------------------- url = api.get_url(obj) id = api.get_id(obj) item["ID"] = id item["replace"]["ID"] = get_link(url, value=id) item["DateSampled"] = self.ulocalized_time( obj.getDateSampled(), long_format=True) item["DateReceived"] = self.ulocalized_time(obj.getDateReceived()) item["DateOpened"] = self.ulocalized_time(obj.getDateOpened()) item["ExpiryDate"] = self.ulocalized_time(obj.getExpiryDate()) # Icons after_icons = '' if obj.getBlank(): after_icons += get_image( "blank.png", title=t(_("Blank"))) if obj.getHazardous(): after_icons += get_image( "hazardous.png", title=t(_("Hazardous"))) if after_icons: item["after"]["ID"] = after_icons return item
def _get_title_or_id_from_uid(uid): """Returns the title or ID from the given UID """ try: obj = api.get_object_by_uid(uid) except api.APIError: return "<Deleted {}>".format(uid) title_or_id = api.get_title(obj) or api.get_id(obj) return title_or_id
def update_ast_analysis(analysis, antibiotics, remove=False): # There is nothing to do if the analysis has been verified analysis = api.get_object(analysis) if IVerified.providedBy(analysis): return # Convert antibiotics to interim fields keyword = analysis.getKeyword() interim_fields = map(lambda ab: to_interim(keyword, ab), antibiotics) # Get the analysis interim fields an_interims = copy.deepcopy(analysis.getInterimFields()) or [] an_keys = sorted(map(lambda i: i.get("keyword"), an_interims)) # Remove non-specified antibiotics if remove: in_keys = map(lambda i: i.get("keyword"), interim_fields) an_interims = filter(lambda a: a["keyword"] in in_keys, an_interims) # Keep analysis' original antibiotics abx = filter(lambda a: a["keyword"] not in an_keys, interim_fields) an_interims.extend(abx) # Is there any difference? new_keys = sorted(map(lambda i: i.get("keyword"), an_interims)) if new_keys == an_keys: # No changes return # If no antibiotics, remove the analysis if remove and not an_interims: sample = analysis.getRequest() sample._delObject(api.get_id(analysis)) return if ISubmitted.providedBy(analysis): # Analysis has been submitted already, retract succeed, message = wf.doActionFor(analysis, "retract") if not succeed: path = api.get_path(analysis) logger.error("Cannot retract analysis '{}'".format(path)) return # Assign the antibiotics analysis.setInterimFields(an_interims) # Compute all combinations of interim/antibiotic and possible result and # and generate the result options for this analysis (the "Result" field is # never displayed and is only used for reporting) result_options = get_result_options(analysis) analysis.setResultOptions(result_options) # Apply the IASTAnalysis marker interface (just in case) alsoProvides(analysis, IASTAnalysis) analysis.reindexObject()
def get_base_info(self): """Returns the basic dictionary structure for the current object """ return { "id": api.get_id(self.context), "uid": api.get_uid(self.context), "title": api.get_title(self.context), "field_values": {}, "filter_queries": {}, }
def folderitem(self, obj, item, index): obj = api.get_object(obj) currency = currency_format(self.context, 'en') item['replace']['id'] = get_link(item["url"], api.get_id(obj)) item['client'] = obj.getClient().Title() item['invoicedate'] = self.ulocalized_time(obj.getInvoiceDate()) item['subtotal'] = currency(obj.getSubtotal()) item['vatamount'] = currency(obj.getVATAmount()) item['total'] = currency(obj.getTotal()) return item
def __call__(self): # Create the invoice object and link it to the current AR. pdf = self.create_pdf() invoice = self.context.createInvoice(pdf) self.add_status_message( _("Invoice {} created").format(api.get_id(invoice))) # Reload the page to see the the new fields self.request.response.redirect("%s/invoice" % self.aq_parent.absolute_url())
def fix_email_address(portal, portal_types=None, catalog_id="portal_catalog"): """Validates the email address of portal types that inherit from Person. The field did not have an email validator, causing some views to fail when rendering the value while expecting a valid email address format """ logger.info("Fixing email addresses ...") if not portal_types: portal_types = ["Contact", "LabContact", "SupplierContact"] query = dict(portal_type=portal_types) brains = api.search(query, catalog_id) total = len(brains) for num, brain in enumerate(brains): if num and num % 1000 == 0: logger.info("{}/{} Fixing email addresses ...".format(num, total)) obj = api.get_object(brain) email_address = obj.getEmailAddress() if not email_address: continue if not is_valid_email_address(email_address): obj_id = api.get_id(obj) logger.info("No valid email address for {}: {}" .format(obj_id, email_address)) # Maybe is a list of email addresses emails = map(lambda x: x.strip(), re.split("[;:, ]", email_address)) # Bail out non-valid emails emails = filter(lambda em: is_valid_email_address(em), emails) if emails: email = emails[0] logger.info("Email address assigned for {}: {}" .format(api.get_id(obj), email)) obj.setEmailAddress(email) obj.reindexObject() else: logger.warn("Cannot resolve email address from '{}'" .format(email_address)) logger.info("Fixing email addresses [DONE]")
def __call__(self): # N.B. form = self.request.form analyst = self.request.get('analyst', '') template = self.request.get('template', '') instrument = self.request.get('instrument', '') if not analyst: message = _("Analyst must be specified.") self.context.plone_utils.addPortalMessage(message, 'info') self.request.RESPONSE.redirect(self.context.absolute_url()) return rc = getToolByName(self.context, REFERENCE_CATALOG) wf = getToolByName(self.context, "portal_workflow") pm = getToolByName(self.context, "portal_membership") ws = _createObjectByType("Worksheet", self.context, tmpID()) ws.processForm() # Set analyst and instrument ws.setAnalyst(analyst) if instrument: ws.setInstrument(instrument) # Set the default layout for results display ws.setResultsLayout(self.context.bika_setup.getWorksheetLayout()) # overwrite saved context UID for event subscribers self.request['context_uid'] = ws.UID() # if no template was specified, redirect to blank worksheet if not template: ws.processForm() self.request.RESPONSE.redirect(ws.absolute_url() + "/add_analyses") return wst = rc.lookupObject(template) ws.applyWorksheetTemplate(wst) if ws.getLayout(): self.request.RESPONSE.redirect(ws.absolute_url() + "/manage_results") elif api.is_queued(ws): msg = _("Analyses for {} have been queued".format(_api.get_id(ws))) self.context.plone_utils.addPortalMessage(msg) self.request.RESPONSE.redirect(_api.get_url(ws.aq_parent)) else: msg = _("No analyses were added") self.context.plone_utils.addPortalMessage(msg) self.request.RESPONSE.redirect(ws.absolute_url() + "/add_analyses")
def get_item_info(self, brain_or_object): """Return the data of this brain or object """ return { "obj": brain_or_object, "uid": api.get_uid(brain_or_object), "url": api.get_url(brain_or_object), "id": api.get_id(brain_or_object), "title": api.get_title(brain_or_object), "portal_type": api.get_portal_type(brain_or_object), "review_state": api.get_workflow_status_of(brain_or_object), }
def subject(self): """Returns the subject of the email """ email_subject = api.get_registry_record("senaite.panic.email_subject") client = self.sample.getClient() return self.context.translate( email_subject, mapping={ "sample_id": api.get_id(self.sample), "client_id": client.getClientID(), "client_sample_id": self.sample.getClientSampleID(), })
def get_id(brain_or_object): """Proxy to bika.lims.api.get_id """ return api.get_id(brain_or_object)
def get_variables(context, **kw): """Prepares a dictionary of key->value pairs usable for ID formatting """ # allow portal_type override portal_type = kw.get("portal_type") or api.get_portal_type(context) # The variables map hold the values that might get into the constructed id variables = { 'context': context, 'id': api.get_id(context), 'portal_type': portal_type, 'year': get_current_year(), 'parent': api.get_parent(context), 'seq': 0, } # Augment the variables map depending on the portal type if portal_type == "AnalysisRequest": variables.update({ 'sampleId': context.getSample().getId(), 'sample': context.getSample(), }) elif portal_type == "SamplePartition": variables.update({ 'sampleId': context.aq_parent.getId(), 'sample': context.aq_parent, }) elif portal_type == "Sample": # get the prefix of the assigned sample type sample_id = context.getId() sample_type = context.getSampleType() sampletype_prefix = sample_type.getPrefix() date_now = DateTime() sampling_date = context.getSamplingDate() date_sampled = context.getDateSampled() # Try to get the date sampled and sampling date if sampling_date: samplingDate = DT2dt(sampling_date) else: # No Sample Date? logger.error("Sample {} has no sample date set".format(sample_id)) # fall back to current date samplingDate = DT2dt(date_now) if date_sampled: dateSampled = DT2dt(date_sampled) else: # No Sample Date? logger.error("Sample {} has no sample date set".format(sample_id)) dateSampled = DT2dt(date_now) variables.update({ 'clientId': context.aq_parent.getClientID(), 'dateSampled': dateSampled, 'samplingDate': samplingDate, 'sampleType': sampletype_prefix, }) return variables