def get_object_by_record(record): """Find an object by a given record Inspects request the record to locate an object :param record: A dictionary representation of an object :type record: dict :returns: Found Object or None :rtype: object """ # nothing to do here if not record: return None if record.get("uid"): return get_object_by_uid(record["uid"]) if record.get("path"): return get_object_by_path(record["path"]) if record.get("parent_path") and record.get("id"): path = "/".join([record["parent_path"], record["id"]]) return get_object_by_path(path) logger.warn("get_object_by_record::No object found! record='%r'" % record) return None
def guard_sample_prep_complete_transition(self): """ This relies on user created workflow. This function must defend against user errors. AR and Analysis guards refer to this one. - If error is encountered, do not permit object to proceed. Break this rule carelessly and you may see recursive automatic workflows. - If sampleprep workflow is badly configured, primary review_state can get stuck in "sample_prep" forever. """ wftool = getToolByName(self, "portal_workflow") try: # get sampleprep workflow object. sp_wf_name = self.getPreparationWorkflow() sp_wf = wftool.getWorkflowById(sp_wf_name) # get sampleprep_review state. sp_review_state = wftool.getInfoFor(self, "sampleprep_review_state") assert sp_review_state except WorkflowException as e: logger.warn("guard_sample_prep_complete_transition: " "WorkflowException %s" % e) return False except AssertionError: logger.warn("'%s': cannot get 'sampleprep_review_state'" % sampleprep_wf_name) return False # get state from workflow - error = allow transition # get possible exit transitions for state: error = allow transition transitions = sp_wf if len(transitions) > 0: return False return True
def query_parent_objects(self, context, query=None): """Return the objects of the same type from the parent object :param query: Catalog query to narrow down the objects :type query: dict :returns: Content objects of the same portal type in the parent """ # return the object values if we have no catalog query if query is None: return self.get_parent_objects(context) # avoid undefined reference of catalog in except... catalog = None # try to fetch the results via the catalog try: catalogs = api.get_catalogs_for(context) catalog = catalogs[0] return map(api.get_object, catalog(query)) except (IndexError, UnicodeDecodeError, ParseError, api.BikaLIMSError) as e: # fall back to the object values of the parent logger.warn("UniqueFieldValidator: Catalog query {} failed " "for catalog {} ({}) -> returning object values of {}" .format(query, repr(catalog), str(e), repr(api.get_parent(context)))) return self.get_parent_objects(context)
def renameAfterCreation(obj): """Rename the content after it was created/added """ # Check if the _bika_id was already set bika_id = getattr(obj, "_bika_id", None) if bika_id is not None: return bika_id # Can't rename without a subtransaction commit when using portal_factory transaction.savepoint(optimistic=True) # The id returned should be normalized already new_id = None # Checking if an adapter exists for this content type. If yes, we will # get new_id from adapter. for name, adapter in getAdapters((obj, ), IIdServer): if new_id: logger.warn(('More than one ID Generator Adapter found for' 'content type -> %s') % obj.portal_type) new_id = adapter.generate_id(obj.portal_type) if not new_id: new_id = generateUniqueId(obj) # TODO: This is a naive check just in current folder # -> this should check globally for duplicate objects with same prefix # N.B. a check like `search_by_prefix` each time would probably slow things # down too much! # -> A solution could be to store all IDs with a certain prefix in a storage parent = api.get_parent(obj) if new_id in parent.objectIds(): # XXX We could do the check in a `while` loop and generate a new one. raise KeyError("The ID {} is already taken in the path {}".format( new_id, api.get_path(parent))) # rename the object to the new id parent.manage_renameObject(obj.id, new_id) return new_id
def _to_service(self, thing): """Convert to Analysis Service :param thing: UID/Catalog Brain/Object/Something :returns: Analysis Service object or None """ # Convert UIDs to objects if api.is_uid(thing): thing = api.get_object_by_uid(thing, None) # Bail out if the thing is not a valid object if not api.is_object(thing): logger.warn("'{}' is not a valid object!".format(repr(thing))) return None # Ensure we have an object here and not a brain obj = api.get_object(thing) if IAnalysisService.providedBy(obj): return obj if IAnalysis.providedBy(obj): return obj.getAnalysisService() # An object, but neither an Analysis nor AnalysisService? # This should never happen. msg = "ARAnalysesField doesn't accept objects from {} type. " \ "The object will be dismissed.".format(api.get_portal_type(obj)) logger.warn(msg) return None
def getRequest(self): """Return the AR to which this is linked there is a short time between creation and linking when it is not linked """ # Attachment field in AnalysisRequest is still a ReferenceField, not # an UIDReferenceField yet. tool = getToolByName(self, REFERENCE_CATALOG) uids = [uid for uid in tool.getBackReferences(self, 'AnalysisRequestAttachment')] if len(uids) > 1: logger.warn("Attachment assigned to more than one Analysis Request." "This should never happen!. The first Analysis Request" "will be returned.") if len(uids) > 0: reference = uids[0] ar = tool.lookupObject(reference.sourceUID) return ar # This Attachment is not linked directly to an Analysis Request, but # probably linked to an Analysis, so try to get the Analysis Request # from there. analysis = self.getAnalysis() if IRequestAnalysis.providedBy(analysis): return analysis.getRequest() return None
def get_tool(name, context=None, default=_marker): """Get a portal tool by name :param name: The name of the tool, e.g. `portal_catalog` :type name: string :param context: A portal object :type context: ATContentType/DexterityContentType/CatalogBrain :returns: Portal Tool """ # Try first with the context if context is not None: try: context = get_object(context) return getToolByName(context, name) except (BikaLIMSError, AttributeError) as e: # https://github.com/senaite/bika.lims/issues/396 logger.warn("get_tool::getToolByName({}, '{}') failed: {} " "-> falling back to plone.api.portal.get_tool('{}')" .format(repr(context), name, repr(e), name)) return get_tool(name, default=default) # Try with the plone api try: return ploneapi.portal.get_tool(name) except InvalidParameterError: if default is not _marker: return default fail("No tool named '%s' found." % name)
def __call__(self): out = { "title": None, "instrument": None, "methods": [], } try: plone.protect.CheckAuthenticator(self.request) except Forbidden: logger.warn("Forbidden. Request authenticator missing or invalid.") return json.dumps(out) uc = getToolByName(self, 'uid_catalog') brains = uc(UID=self.request.get("uid", '0')) if brains: instrument = brains[0].getObject() out["title"] = instrument.Title() out["instrument"] = instrument.UID() # Handle multiple Methods per instrument methods = instrument.getMethods() for method in methods: out["methods"].append({ "uid": method.UID(), "title": method.Title(), }) return json.dumps(out)
def wrapper(f): try: router.DefaultRouter.add_url_rule(route, endpoint=endpoint, view_func=f, options=kw) except AssertionError, e: logger.warn("Failed to register route {}: {}".format(route, e))
def get_sort_on(allowed_indexes=None): """ returns the 'sort_on' from the request """ sort_on = get("sort_on") if allowed_indexes and sort_on not in allowed_indexes: logger.warn("Index '{}' is not in allowed_indexes".format(sort_on)) return None return sort_on
def setImportDataInterface(self, values): """ Return the current list of import data interfaces """ exims = self.getImportDataInterfacesList() new_values = [value for value in values if value in exims] if len(new_values) < len(values): logger.warn("Some Interfaces weren't added...") self.Schema().getField('ImportDataInterface').set(self, new_values)
def getRetest(self): """Returns the retest that comes from this analysis, if any """ back_refs = get_backreferences(self, 'AnalysisRetestOf') if not back_refs: return None if len(back_refs) > 1: logger.warn("Analysis {} with multiple retests".format(self.id)) return api.get_object_by_uid(back_refs[0])
def reindexIndex(self, catalog, index): cat = self._getCatalog(catalog) if index not in cat.indexes(): logger.warn("Index {} not found in {}".format(index, catalog)) return indexes = self.reindexcatalog.get(cat.id, []) if index not in indexes: indexes.append(index) self.reindexcatalog[cat.id] = indexes
def getRetest(self): """Returns the retest that comes from this analysis, if any """ relationship = "{}RetestOf".format(self.portal_type) back_refs = get_backreferences(self, relationship) if not back_refs: return None if len(back_refs) > 1: logger.warn("Analysis {} with multiple retests".format(self.id)) return api.get_object_by_uid(back_refs[0])
def isVisible(self, field, mode="view", default="visible"): """Returns whether the field is visible in a given mode """ if mode != "edit": return default if not hasattr(self.context, "isOpen"): logger.warn("Object {} does not have 'isOpen' method defined". format(self.context.__class__.__name__)) return default return self.context.isOpen() and "visible" or "invisible"
def get_metadata_for(instance, catalog): """Returns the metadata for the given instance from the specified catalog """ path = api.get_path(instance) try: return catalog.getMetadataForUID(path) except KeyError: logger.warn("Cannot get metadata from {}. Path not found: {}".format( catalog.id, path)) return {}
def doActionFor(instance, action_id): actionperformed = False message = '' if not skip(instance, action_id, peek=True): try: api.do_transition_for(instance, action_id) actionperformed = True except api.BikaLIMSError as e: message = str(e) logger.warn(message) return actionperformed, message
def t(i18n_msg): """Safely translate and convert to UTF8, any zope i18n msgid returned from a bikaMessageFactory _ """ text = to_unicode(i18n_msg) try: text = translate(text) except UnicodeDecodeError: # TODO: This is only a quick fix logger.warn("{} couldn't be translated".format(text)) return to_utf8(text)
def delete_attachment(self, attachment): """Delete attachment from the AR or Analysis The attachment will be only deleted if it is not further referenced by another AR/Analysis. """ # Get the holding parent of this attachment parent = None if attachment.getLinkedRequests(): # Holding parent is an AR parent = attachment.getRequest() elif attachment.getLinkedAnalyses(): # Holding parent is an Analysis parent = attachment.getAnalysis() if parent is None: logger.warn( "Attachment {} is nowhere assigned. This should never happen!". format(repr(attachment))) return False # Get the other attachments of the holding parent attachments = parent.getAttachment() # New attachments to set if attachment in attachments: attachments.remove(attachment) # Set the attachments w/o the current attachments parent.setAttachment(attachments) retain = False # Attachment is referenced by another Analysis if attachment.getLinkedAnalyses(): holder = attachment.getAnalysis() logger.info("Attachment {} referenced by {} -> RETAIN".format( repr(attachment), repr(holder))) retain = True # Attachment is referenced by another AR if attachment.getLinkedRequests(): holder = attachment.getRequest() logger.info("Attachment {} referenced by {} -> RETAIN".format( repr(attachment), repr(holder))) retain = True # Delete attachment finally if retain is False: client = api.get_parent(attachment) client.manage_delObjects([ attachment.getId(), ])
def set(self, instance, value, **kw): """Converts the value into a DateTime object before setting. """ try: value = DateTime(value) except SyntaxError: logger.warn( "Value '{}' is not a valid DateTime string".format(value)) return False self._set(instance, value, **kw)
def set(self, instance, value, **kw): """Converts the value into a DateTime object before setting. """ try: value = DateTime(value) except SyntaxError: logger.warn("Value '{}' is not a valid DateTime string" .format(value)) return False self._set(instance, value, **kw)
def disable_csrf_protection(): """ disables the CSRF protection https://pypi.python.org/pypi/plone.protect """ if not HAS_PLONE_PROTECT: logger.warn( "Can not disable CSRF protection – please install plone.protect" ) return False request = get_request() interface.alsoProvides(request, IDisableCSRFProtection) return True
def doActionFor(instance, action_id): actionperformed = False message = '' if not skip(instance, action_id, peek=True): try: api.do_transition_for(instance, action_id) actionperformed = True except ploneapi.exc.InvalidParameterError as e: message = str(e) logger.warn("Failed to perform transition {} on {}: {}".format( action_id, instance, message)) return actionperformed, message
def Import(self): s_t = '' c_t = 'lab' bucket = {} pc = getToolByName(self.context, 'portal_catalog') bsc = getToolByName(self.context, 'bika_setup_catalog') # collect up all values into the bucket for row in self.get_rows(3): c_t = row['Client_title'] if row['Client_title'] else 'lab' if c_t not in bucket: bucket[c_t] = {} s_t = row['SampleType_title'] if row['SampleType_title'] else s_t if s_t not in bucket[c_t]: bucket[c_t][s_t] = [] service = bsc(portal_type='AnalysisService', title=row['service']) if not service: service = bsc(portal_type='AnalysisService', getKeyword=row['service']) try: service = service[0].getObject() bucket[c_t][s_t].append({ 'keyword': service.getKeyword(), 'min': row.get('min', '0'), 'max': row.get('max', '0'), 'minpanic': row.get('minpanic', '0'), 'maxpanic': row.get('maxpanic', '0'), 'error': row.get('error', '0'), }) except IndexError: warning = "Error with service name %s on sheet %s. Service not uploaded." logger.warning(warning, row.get('service', ''), self.sheetname) # write objects. for c_t in bucket: if c_t == 'lab': folder = self.context.bika_setup.bika_analysisspecs else: folder = pc(portal_type='Client', title=c_t) if (not folder or len(folder) != 1): logger.warn( "Client %s not found. Omiting client specifications." % c_t) continue folder = folder[0].getObject() for s_t in bucket[c_t]: resultsrange = bucket[c_t][s_t] sampletype = bsc(portal_type='SampleType', title=s_t)[0] _id = folder.invokeFactory('AnalysisSpec', id=tmpID()) obj = folder[_id] obj.edit(title=sampletype.Title, ResultsRange=resultsrange) obj.setSampleType(sampletype.UID) obj.unmarkCreationFlag() renameAfterCreation(obj)
def Import(self): s_t = '' c_t = 'lab' bucket = {} pc = getToolByName(self.context, 'portal_catalog') bsc = getToolByName(self.context, 'bika_setup_catalog') # collect up all values into the bucket for row in self.get_rows(3): c_t = row['Client_title'] if row['Client_title'] else 'lab' if c_t not in bucket: bucket[c_t] = {} s_t = row['SampleType_title'] if row['SampleType_title'] else s_t if s_t not in bucket[c_t]: bucket[c_t][s_t] = [] service = bsc(portal_type='AnalysisService', title=row['service']) if not service: service = bsc(portal_type='AnalysisService', getKeyword=row['service']) try: service = service[0].getObject() bucket[c_t][s_t].append({ 'keyword': service.getKeyword(), 'min': row.get('min','0'), 'max': row.get('max','0'), 'minpanic': row.get('minpanic','0'), 'maxpanic': row.get('maxpanic','0'), 'error': row.get('error','0'), }) except IndexError: warning = "Error with service name %s on sheet %s. Service not uploaded." logger.warning(warning, row.get('service', ''), self.sheetname) # write objects. for c_t in bucket: if c_t == 'lab': folder = self.context.bika_setup.bika_analysisspecs else: folder = pc(portal_type='Client', title=c_t) if (not folder or len(folder) != 1): logger.warn("Client %s not found. Omiting client specifications." % c_t) continue folder = folder[0].getObject() for s_t in bucket[c_t]: resultsrange = bucket[c_t][s_t] sampletype = bsc(portal_type='SampleType', title=s_t)[0] _id = folder.invokeFactory('AnalysisSpec', id=tmpID()) obj = folder[_id] obj.edit( title=sampletype.Title, ResultsRange=resultsrange) obj.setSampleType(sampletype.UID) obj.unmarkCreationFlag() renameAfterCreation(obj)
def t(i18n_msg): """Safely translate and convert to UTF8, any zope i18n msgid returned from a bikaMessageFactory _ """ text = to_unicode(i18n_msg) try: request = api.get_request() domain = getattr(i18n_msg, "domain", "senaite.core") text = translate(text, domain=domain, context=request) except UnicodeDecodeError: # TODO: This is only a quick fix logger.warn("{} couldn't be translated".format(text)) return to_utf8(text)
def doActionFor(instance, action_id): actionperformed = False message = '' workflow = api.portal.get_tool("portal_workflow") if not skip(instance, action_id, peek=True): try: workflow.doActionFor(instance, action_id) actionperformed = True except WorkflowException as e: message = str(e) logger.warn("Failed to perform transition {} on {}: {}".format( action_id, instance, message)) return actionperformed, message
def json_data(self, instance, default=None): """Get a JSON compatible value """ value = self.get(instance) out = [] for rel in value: if rel.isBroken(): logger.warn("Skipping broken relation {}".format(repr(rel))) continue obj = rel.to_object out.append(api.get_url_info(obj)) return out
def has_permission(self, permission, obj=None): """Returns if the current user has rights for the permission passed in :param permission: permission identifier :param obj: object to check the permission against :return: True if the user has rights for the permission passed in """ if not permission: logger.warn("None permission is not allowed") return False if obj is None: return check_permission(permission, self.context) return check_permission(permission, obj)
def get_rejection_mail(sample, rejection_pdf=None): """Generates an email to sample contacts with rejection reasons """ # Get the reasons reasons = sample.getRejectionReasons() reasons = reasons and reasons[0] or {} reasons = reasons.get("selected", []) + [reasons.get("other")] reasons = filter(None, reasons) reasons = "<br/>- ".join(reasons) # Render the email body setup = api.get_setup() lab_address = setup.laboratory.getPrintAddress() email_body = Template(setup.getEmailBodySampleRejection()) email_body = email_body.safe_substitute({ "lab_address": "<br/>".join(lab_address), "reasons": reasons and "<br/>-{}".format(reasons) or "", "sample_id": api.get_id(sample), "sample_link": get_link(api.get_url(sample), api.get_id(sample)) }) def to_valid_email_address(contact): if not contact: return None address = contact.getEmailAddress() if not is_valid_email_address(address): return None return address # Get the recipients _to = [sample.getContact()] + sample.getCCContact() _to = map(to_valid_email_address, _to) _to = filter(None, _to) if not _to: # Cannot send an e-mail without recipient! logger.warn("No valid recipients for {}".format(api.get_id(sample))) return None lab = api.get_setup().laboratory attachments = rejection_pdf and [rejection_pdf] or [] return compose_email(from_addr=lab.getEmailAddress(), to_addr=_to, subj=_("%s has been rejected") % api.get_id(sample), body=email_body, attachments=attachments)
def SetDepartmentCookies(event): """Login event handler. When user logs in for the first time, we are setting department filtering cookie values. """ # Fix for https://jira.bikalabs.com/browse/LIMS-2597 if not is_bika_installed(): logger.warn("Package 'bika.lims' is not installed, skipping event handler for IUserLoggedInEvent.") return # get the bika_setup object portal = api.portal.get() bika_setup = portal.get("bika_setup") # just to be sure... # This should go into the api.py module once it is in place if bika_setup is None: raise RuntimeError("bika_setup not found in this Bika LIMS installation") # Getting request, response and username request = api.env.getRequest() response = request.RESPONSE user = api.user.get_current() username = user and user.getUserName() or None portal_catalog = api.portal.get_tool("portal_catalog") if bika_setup.getAllowDepartmentFiltering(): dep_for_cookie = '' if username == 'admin': departments = portal_catalog(portal_type='Department', sort_on='sortable_title', sort_order='ascending', inactive_state='active') for department in departments: dep_for_cookie += department.UID + ',' response.setCookie('dep_filter_disabled', 'true', path='/', max_age=24 * 3600) else: labcontact = portal_catalog(portal_type='LabContact', getUsername=username) if labcontact: departments = labcontact[0].getObject().getSortedDepartments() dep_for_cookie = departments[0].UID() if len(departments) > 0 else '' response.setCookie('filter_by_department_info', dep_for_cookie, path='/', max_age=24 * 3600) else: response.setCookie('filter_by_department_info', None, path='/', max_age=0) response.setCookie('dep_filter_disabled', None, path='/', max_age=0)
def search(self, query, search_term, search_field, catalog): """Performs a search against the catalog and returns the brains """ logger.info("Reference Widget Catalog: {}".format(catalog.id)) if not search_term: return catalog(query) index = self.get_index(search_field, catalog) if not index: logger.warn("*** Index not found: '{}'".format(search_field)) return [] meta = index.meta_type if meta == "TextIndexNG3": query[index.id] = "{}*".format(search_term) elif meta == "ZCTextIndex": logger.warn("*** Field '{}' ({}). Better use TextIndexNG3".format( meta, search_field)) query[index.id] = "{}*".format(search_term) elif meta in ["FieldIndex", "KeywordIndex"]: logger.warn("*** Field '{}' ({}). Better use TextIndexNG3".format( meta, search_field)) query[index.id] = search_term else: logger.warn("*** Index '{}' ({}) not supported".format( search_field, meta)) return [] logger.info("Reference Widget Query: {}".format(repr(query))) return catalog(query)
def getAnalysis(self): """Return the analysis to which this is linked it may not be linked to an analysis """ analysis = get_backreferences(self, 'AnalysisAttachment', as_brains=True) if not analysis: return None if len(analysis) > 1: logger.warn("Single attachment assigned to more than one Analysis") analysis = api.get_object(analysis[0]) return analysis
def unlink_version(self, source, target): """Unlink the current version of the target from the source """ if not hasattr(source, REFERENCE_VERSIONS): return target_uid = api.get_uid(target) if target_uid in source.reference_versions[target_uid]: # delete the version del source.reference_versions[target_uid] # persist changes that occured referenced versions source._p_changed = 1 else: logger.warn("No version link found on '{}' -> '{}'".format( repr(source), repr(target)))
def ClearDepartmentCookies(event): """Logout event handler. When user explicitly logs out from the Logout menu, clean department filtering related cookies. """ if not is_bika_installed(): logger.warn("Package 'bika.lims' is not installed, skipping event handler for IUserLoggedOutEvent.") return request = api.env.getRequest() response = request.RESPONSE # Voiding our special cookie on logout response.setCookie('filter_by_department_info', None, path='/', max_age=0) response.setCookie('dep_filter_disabled', None, path='/', max_age=0)
def write_at_field_values(self, obj_or_path, **kwargs): """Write valid field values from kwargs into the object's AT fields. obj_id_path could be an object or a path to an object, relative to the portal root. This makes the keyword much easier to use directly from within a robot test. """ portal = api.portal.get() if isinstance(obj_or_path, basestring): obj = portal.restrictedTraverse(obj_or_path) else: obj = obj_or_path uc = getToolByName(obj, 'uid_catalog') schema = obj.Schema() # fields contains all schema-valid field values from the request. fields = {} for fieldname, value in kwargs.items(): if fieldname not in schema: continue field = schema.getField(fieldname) fieldtype = field.getType() mutator = field.getMutator(obj) if schema[fieldname].type in ('reference'): # Assume that the value is a UID brains = uc(UID=value) if not brains: logger.warn("Can't resolve: %s:%s" % (fieldname, value)) continue if schema[fieldname].multiValued: value = [b.UID for b in brains] if brains else [] else: value = brains[0].UID if brains else None elif fieldtype == 'Products.Archetypes.Field.BooleanField': if value.lower() in ('0', 'false', 'no') or not value: value = False else: value = True elif fieldtype in [ 'Products.ATExtensions.field.records.RecordsField', 'Products.ATExtensions.field.records.RecordField', 'bika.lims.browser.fields.referenceresultsfield.ReferenceResultsField' ]: value = eval(value) if mutator: mutator(value) else: field.set(obj, value) obj.reindexObject()
def get_date(context, value): """Tries to return a DateTime.DateTime object """ if not value: return None if isinstance(value, DateTime): return value if isinstance(value, datetime): return dt2DT(value) if not isinstance(value, basestring): return None def try_parse(date_string, format): if not format: return None try: struct_time = strptime(date_string, format) return datetime(*struct_time[:6]) except ValueError: pass return None def get_locale_format(key, context): format = context.translate(key, domain="senaite.core", mapping={}) # TODO: Is this replacement below strictly necessary? return format.replace(r"${", '%').replace('}', '') # Try with prioritized formats formats = [ get_locale_format("date_format_long", context), get_locale_format("date_format_short", context), "%Y-%m-%d %H:%M", "%Y-%m-%d", "%Y-%m-%d %H:%M:%S" ] for pri_format in formats: val = try_parse(value, pri_format) if not val: continue val = dt2DT(val) if val.timezoneNaive(): # Use local timezone for tz naive strings # see http://dev.plone.org/plone/ticket/10141 zone = val.localZone(safelocaltime(val.timeTime())) parts = val.parts()[:-1] + (zone, ) val = DateTime(*parts) return val logger.warn("Unable to convert {} to datetime".format(value)) return None
def get_dashboard_registry_record(): """ Return the 'bika.lims.dashboard_panels_visibility' values. :return: A dictionary or None """ try: registry = api.portal.get_registry_record( 'bika.lims.dashboard_panels_visibility') return registry except InvalidParameterError: # No entry in the registry for dashboard panels roles. # Maybe upgradestep 1.1.8 was not run? logger.warn("Cannot find a record with name " "'bika.lims.dashboard_panels_visibility' in " "registry_record. Missed upgrade 1.1.8?") return dict()
def send_mail(self, sender, receiver, subject="", body=""): """Send email from sender to receiver """ mime_msg = MIMEMultipart('related') mime_msg['Subject'] = subject mime_msg['From'] = sender mime_msg['To'] = receiver msg_txt = MIMEText(body, 'plain') mime_msg.attach(msg_txt) try: host = getToolByName(self, 'MailHost') host.send(mime_msg.as_string(), immediate=True) except SMTPServerDisconnected as msg: logger.warn("SMTPServerDisconnected: %s." % msg) except SMTPRecipientsRefused as msg: raise WorkflowException(str(msg))
def url_for(endpoint, default=DEFAULT_ENDPOINT, **values): """Looks up the API URL for the given endpoint :param endpoint: The name of the registered route (aka endpoint) :type endpoint: string :returns: External URL for this endpoint :rtype: string/None """ try: return router.url_for(endpoint, force_external=True, values=values) except Exception: logger.warn("Could not build API URL for endpoint '%s'. " "No route provider registered?" % endpoint) # build generic API URL return router.url_for(default, force_external=True, values=values)
def getUpperDetectionLimit(self): """Returns the Upper Detection Limit (UDL) that applies to this analysis in particular. If no value set or the analysis service doesn't allow manual input of detection limits, returns the value set by default in the Analysis Service """ if self.isUpperDetectionLimit(): result = self.getResult() try: # in this case, the result itself is the LDL. return float(result) except (TypeError, ValueError): logger.warn("The result for the analysis %s is a lower " "detection limit, but not floatable: '%s'. " "Returnig AS's default LDL." % (self.id, result)) return AbstractBaseAnalysis.getUpperDetectionLimit(self)
def cloneAR(self, ar): newar = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID()) newar.setSample(ar.getSample()) ignore_fieldnames = [ 'Analyses', 'DatePublished', 'DatePublishedViewer', 'ParentAnalysisRequest', 'ChildAnaysisRequest', 'Digest', 'Sample' ] copy_field_values(ar, newar, ignore_fieldnames=ignore_fieldnames) # Set the results for each AR analysis ans = ar.getAnalyses(full_objects=True) # If a whole AR is retracted and contains retracted Analyses, these # retracted analyses won't be created/shown in the new AR workflow = getToolByName(self, "portal_workflow") analyses = [ x for x in ans if workflow.getInfoFor(x, "review_state") not in ("retracted") ] for an in analyses: if hasattr(an, 'IsReflexAnalysis') and an.IsReflexAnalysis: # We don't want reflex analyses to be copied continue try: nan = _createObjectByType("Analysis", newar, an.getKeyword()) except Exception as e: from bika.lims import logger logger.warn( 'Cannot create analysis %s inside %s (%s)' % an.getAnalysisService().Title(), newar, e) continue # Make a copy ignore_fieldnames = ['Verificators', 'DataAnalysisPublished'] copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames) nan.unmarkCreationFlag() zope.event.notify(ObjectInitializedEvent(nan)) changeWorkflowState(nan, 'bika_analysis_workflow', 'to_be_verified') nan.reindexObject() newar.reindexObject() newar.aq_parent.reindexObject() renameAfterCreation(newar) if hasattr(ar, 'setChildAnalysisRequest'): ar.setChildAnalysisRequest(newar) newar.setParentAnalysisRequest(ar) return newar
def write_at_field_values(self, obj_or_path, **kwargs): """Write valid field values from kwargs into the object's AT fields. obj_id_path could be an object or a path to an object, relative to the portal root. This makes the keyword much easier to use directly from within a robot test. """ portal = api.portal.get() if isinstance(obj_or_path, basestring): obj = portal.restrictedTraverse(obj_or_path) else: obj = obj_or_path uc = getToolByName(obj, 'uid_catalog') schema = obj.Schema() # fields contains all schema-valid field values from the request. fields = {} for fieldname, value in kwargs.items(): if fieldname not in schema: continue field = schema.getField(fieldname) fieldtype = field.getType() mutator = field.getMutator(obj) if schema[fieldname].type in ('reference'): # Assume that the value is a UID brains = uc(UID=value) if not brains: logger.warn("Can't resolve: %s:%s" % (fieldname, value)) continue if schema[fieldname].multiValued: value = [b.UID for b in brains] if brains else [] else: value = brains[0].UID if brains else None elif fieldtype == 'Products.Archetypes.Field.BooleanField': if value.lower() in ('0', 'false', 'no') or not value: value = False else: value = True elif fieldtype in [ 'Products.ATExtensions.field.records.RecordsField', 'Products.ATExtensions.field.records.RecordField', 'bika.lims.browser.fields.referenceresultsfield.ReferenceResultsField']: value = eval(value) if mutator: mutator(value) else: field.set(obj, value) obj.reindexObject()
def set_dashboard_registry_record(registry_info): """ Sets the 'bika.lims.dashboard_panels_visibility' values. :param registry_info: A dictionary type object with all its values as *unicode* objects. :return: A dictionary or None """ try: api.portal.set_registry_record( 'bika.lims.dashboard_panels_visibility', registry_info) except InvalidParameterError: # No entry in the registry for dashboard panels roles. # Maybe upgradestep 1.1.8 was not run? logger.warn("Cannot find a record with name " "'bika.lims.dashboard_panels_visibility' in " "registry_record. Missed upgrade 1.1.8?")
def set_dashboard_registry_record(registry_info): """ Sets the 'bika.lims.dashboard_panels_visibility' values. :param registry_info: A dictionary type object with all its values as *unicode* objects. :return: A dictionary or None """ try: api.portal.set_registry_record('bika.lims.dashboard_panels_visibility', registry_info) except InvalidParameterError: # No entry in the registry for dashboard panels roles. # Maybe upgradestep 1.1.8 was not run? logger.warn("Cannot find a record with name " "'bika.lims.dashboard_panels_visibility' in " "registry_record. Missed upgrade 1.1.8?")
def update_object_with_data(content, record): """Update the content with the record data :param content: A single folderish catalog brain or content object :type content: ATContentType/DexterityContentType/CatalogBrain :param record: The data to update :type record: dict :returns: The updated content object :rtype: object :raises: APIError, :class:`~plone.jsonapi.routes.exceptions.APIError` """ # ensure we have a full content object content = get_object(content) # get the proper data manager dm = IDataManager(content) if dm is None: fail(400, "Update for this object is not allowed") print('------------') print(dm) print(content) print(record) # Iterate through record items print('=============') for k, v in record.items(): print('----------') print(k) print(v) try: success = dm.set(k, v, **record) #starting point except Unauthorized: fail(401, "Not allowed to set the field '%s'" % k) except ValueError, exc: fail(400, str(exc)) if not success: logger.warn("update_object_with_data::skipping key=%r", k) continue logger.debug("update_object_with_data::field %r updated", k)
def resource_to_portal_type(resource): """Converts a resource to a portal type :param resource: Resource name as it is used in the content route :type name: string :returns: Portal type name :rtype: string """ if resource is None: return None resource_mapping = get_resource_mapping() portal_type = resource_mapping.get(resource.lower()) if portal_type is None: logger.warn("Could not map the resource '{}' " "to any known portal type".format(resource)) return portal_type
def create_object(container, portal_type, **data): """Creates an object slug :returns: The new created content object :rtype: object """ if "id" in data: # always omit the id as Bika LIMS generates a proper one id = data.pop("id") logger.warn("Passed in ID '{}' omitted! Bika LIMS " "generates a proper ID for you" .format(id)) try: # Special case for ARs # => return immediately w/o update if portal_type == "AnalysisRequest": obj = create_analysisrequest(container, **data) # Omit values which are already set through the helper data = u.omit(data, "SampleType", "Analyses") # Set the container as the client, as the AR lives in it data["Client"] = container # Standard content creation else: # we want just a minimun viable object and set the data later obj = api.create(container, portal_type) # obj = api.create(container, portal_type, **data) except Unauthorized: fail(401, "You are not allowed to create this content") # Update the object with the given data, but omit the id try: update_object_with_data(obj, data) #start point except APIError: # Failure in creation process, delete the invalid object container.manage_delObjects(obj.id) # reraise the error raise return obj