def get(self, instance, **kwargs): """retrieves the value of the same named field on the proxy object """ # The default value default = self.getDefault(instance) # Retrieve the proxy object proxy_object = self.get_proxy(instance) # Return None if we could not find a proxied object, e.g. through # the proxy expression 'context.getSample()' on an AR if proxy_object is None: logger.debug("Expression '{}' did not return a valid Proxy Object on {}" .format(self.proxy, instance)) return default # Lookup the proxied field by name field_name = self.getName() field = proxy_object.getField(field_name) # Bail out if the proxy object has no identical named field if field is None: raise KeyError("Object '{}' with id '{}' has no field named '{}'".format( proxy_object.portal_type, proxy_object.getId(), field_name)) # return the value of the proxy field return field.get(proxy_object)
def take_snapshot(obj, store=True, **kw): """Takes a snapshot of the passed in object :param obj: Content object :returns: New snapshot """ logger.debug("📷 Take new snapshot for {}".format(repr(obj))) # get the object data snapshot = get_object_data(obj) # get the metadata metadata = get_object_metadata(obj, **kw) # store the metadata snapshot["__metadata__"] = metadata # convert the snapshot to JSON data = json.dumps(snapshot) # return immediately if not store: return snapshot # get the snapshot storage storage = get_storage(obj) # store the snapshot data storage.append(data) # Mark the content as auditable alsoProvides(obj, IAuditable) return snapshot
def get_keyword_query(self, **kw): """Generates a query from the given keywords. Only known indexes make it into the generated query. :returns: Catalog query :rtype: dict """ query = dict() # Only known indexes get observed indexes = self.catalog.get_indexes() # Handle additional keyword parameters for k, v in kw.iteritems(): # handle uid in keywords if k.lower() == "uid": k = "UID" # handle portal_type in keywords if k.lower() == "portal_type": if v: v = _.to_list(v) if k not in indexes: logger.warn("Skipping unknown keyword parameter '%s=%s'" % (k, v)) continue if v is None: logger.warn("Skip None value in kw parameter '%s=%s'" % (k, v)) continue logger.debug("Adding '%s=%s' to query" % (k, v)) query[k] = v return query
def reindexMovedObject(obj, event): """Reindex moved/renamed object """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reidexing moved object '{}' in catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) # old and new name old_name = event.oldName new_name = event.newName if old_name and new_name: old_parent = event.oldParent old_ppath = api.get_path(old_parent) old_path = "/".join([old_ppath, old_name]) # uncatalog the old path catalog.uncatalog_object(old_path) # reindex object catalog.reindexObject(obj)
def extract_fields(self): """Extract the given fieldnames from the object :returns: Schema name/value mapping :rtype: dict """ # get the proper data manager for the object dm = IDataManager(self.context) # filter out ignored fields fieldnames = filter(lambda name: name not in self.ignore, self.keys) # schema mapping out = dict() for fieldname in fieldnames: try: # get the field value with the data manager fieldvalue = dm.json_data(fieldname) # https://github.com/collective/plone.jsonapi.routes/issues/52 # -> skip restricted fields except Unauthorized: logger.debug("Skipping restricted field '%s'" % fieldname) continue except ValueError: logger.debug("Skipping invalid field '%s'" % fieldname) continue out[fieldname] = api.to_json_value(self.context, fieldname, fieldvalue) return out
def _set(self, instance, value, **kw): """Set the value of the field """ logger.debug("ATFieldManager::set: value=%r" % value) # check field permission if not self.field.checkPermission("write", instance): raise Unauthorized("You are not allowed to write the field {}" .format(self.name)) # check if field is writable if not self.field.writeable(instance): raise Unauthorized("Field {} is read only." .format(self.name)) # id fields take only strings if self.name == "id": value = str(value) # get the field mutator mutator = self.field.getMutator(instance) # Inspect function and apply *args and **kwargs if possible. mapply(mutator, value, **kw) return True
def set(self, instance, value, **kwargs): """writes the value to the same named field on the proxy object """ # Retrieve the proxy object proxy_object = self.get_proxy(instance) # Return None if we could not find a proxied object, e.g. through # the proxy expression 'context.getSample()' on an AR if not proxy_object: logger.debug("Expression '{}' did not return a valid Proxy Object on {}" .format(self.proxy, instance)) return None # Lookup the proxied field by name field_name = self.getName() field = proxy_object.getField(field_name) # Bail out if the proxy object has no identical named field. if field is None: raise KeyError("Object '{}' with id '{}' has no field named '{}'".format( proxy_object.portal_type, proxy_object.getId(), field_name)) # set the value on the proxy object field.set(proxy_object, value, **kwargs) # get the current time now = DateTime.DateTime() # update the modification date of the proxied object proxy_object.setModificationDate(now) # update the modification date of the holding object instance.setModificationDate(now)
def set(self, instance, value, **kwargs): """writes the value to the same named field on the proxy object """ # Retrieve the proxy object proxy_object = self._get_proxy(instance) # Return None if we could not find a proxied object, e.g. through # the proxy expression 'context.getSample()' on an AR if not proxy_object: logger.debug( "Expression '{}' did not return a valid Proxy Object on {}". format(self.proxy, instance)) return None # Lookup the proxied field by name field_name = self.getName() field = proxy_object.getField(field_name) # Bail out if the proxy object has no identical named field. if field is None: raise KeyError( "Object '{}' with id '{}' has no field named '{}'".format( proxy_object.portal_type, proxy_object.getId(), field_name)) # set the value on the proxy object field.set(proxy_object, value, **kwargs)
def _set(self, instance, value, **kw): """Set the value of the field """ logger.debug("ATFieldManager::set: value=%r" % value) # check field permission if not self.field.checkPermission("write", instance): raise Unauthorized( "You are not allowed to write the field {}".format(self.name)) # check if field is writable if not self.field.writeable(instance): raise Unauthorized("Field {} is read only.".format(self.name)) # id fields take only strings if self.name == "id": value = str(value) # get the field mutator mutator = self.field.getMutator(instance) # Inspect function and apply *args and **kwargs if possible. mapply(mutator, value, **kw) return True
def _get(self, instance, **kw): """Get the value of the field """ logger.debug("DexterityFieldManager::get: instance={} field={}".format( instance, self.field)) # TODO: Check security on the field level return self.field.get(instance)
def _get(self, instance, **kw): """Get the value of the field """ logger.debug("DexterityFieldManager::get: instance={} field={}" .format(instance, self.field)) # TODO: Check security on the field level return self.field.get(instance)
def get_object_by_uid(self, uid): """Get the object by UID """ logger.debug("get_object_by_uid::UID={}".format(uid)) obj = api.get_object_by_uid(uid, None) if obj is None: logger.warn("!! No object found for UID #{} !!") return obj
def getClientList(self, contentFilter): searchTerm = self.request.get(self.form_id + '_filter', '').lower() mtool = api.get_tool('portal_membership') state = self.request.get('%s_review_state' % self.form_id, self.default_review_state) # This is used to decide how much of the objects need to be waked up # for further permission checks, which might get expensive on sites # with many clients list_pagesize = self.request.get("list_pagesize", self.pagesize) states = { 'default': [ 'active', ], 'active': [ 'active', ], 'inactive': [ 'inactive', ], 'all': ['active', 'inactive'] } # Use the catalog to speed things up and also limit the results catalog = api.get_tool("portal_catalog") catalog_query = { "portal_type": "Client", "inactive_state": states[state], "sort_on": "sortable_title", "sort_order": "ascending", } # Inject the searchTerm to narrow the results further if searchTerm: catalog_query["SearchableText"] = searchTerm logger.debug("getClientList::catalog_query=%s" % catalog_query) brains = catalog(catalog_query) clients = [] for brain in brains: # only wake up objects if they are shown on one page if len(clients) > list_pagesize: # otherwise append only the brain clients.append(brain) continue # wake up the object client = brain.getObject() # skip clients where the search term does not match if searchTerm and not client_match(client, searchTerm): continue # Only show clients to which we have Manage AR rights. # (ritamo only sees Happy Hills). if not mtool.checkPermission(ManageAnalysisRequests, client): continue clients.append(brain) return clients
def _recursive_reindex_object_security(self, obj): """Reindex object security after user linking """ if hasattr(aq_base(obj), "objectValues"): for child_obj in obj.objectValues(): self._recursive_reindex_object_security(child_obj) logger.debug("Reindexing object security for {}".format(repr(obj))) obj.reindexObjectSecurity()
def isActive(self): """Checks if the Contact is active """ wftool = getToolByName(self, "portal_workflow") status = wftool.getStatusOf("bika_inactive_workflow", self) if status and status.get("inactive_state") in ACTIVE_STATES: logger.debug("Contact '{}' is active".format(self.Title())) return True logger.debug("Contact '{}' is deactivated".format(self.Title())) return False
def reindexObject(obj, event): """Reindex an object in all registered catalogs """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Unindexing object '{}' from catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) catalog.reindexObject(obj)
def isActive(self): """Checks if the Contact is active """ wftool = api.portal.get_tool("portal_workflow") status = wftool.getStatusOf("bika_inactive_workflow", self) if status and status.get("inactive_state") in ACTIVE_STATES: logger.debug("Contact '{}' is active".format(self.Title())) return True logger.debug("Contact '{}' is deactivated".format(self.Title())) return False
def indexObject(obj, event): """Additionally index the object into the bika catalogs """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Indexing object '{}' into catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) catalog.indexObject(obj)
def get_index(self, name): """get an index by name TODO: Combine indexes of relevant catalogs depending on the portal_type which is searched for. """ catalog = self.get_catalog() index = catalog._catalog.getIndex(name) logger.debug("get_index={} of catalog '{}' --> {}".format( name, catalog.__name__, index)) return index
def icon_cache_key(method, self, brain_or_object): """Generates a cache key for the icon lookup Includes the virtual URL to handle multiple HTTP/HTTPS domains Example: http://senaite.local/clients?modified=1512033263370 """ url = api.get_url(brain_or_object) modified = api.get_modification_date(brain_or_object).millis() key = "{}?modified={}".format(url, modified) logger.debug("Generated Cache Key: {}".format(key)) return key
def isOlderVersion(self, product, version): # If the version to upgrade is lower than te actual version of the # product, skip the step to prevent out-of-date upgrade # Since there are heteregeneous names of versioning before v3.2.0, we # need to convert the version string to numbers, format and compare iver = self.getInstalledVersion(product) iver = self.normalizeVersion(iver) nver = self.normalizeVersion(version) logger.debug('{0} versions: Installed {1} - Target {2}'.format( product, nver, iver)) return nver < iver
def isOlderVersion(self, product, version): # If the version to upgrade is lower than te actual version of the # product, skip the step to prevent out-of-date upgrade # Since there are heteregeneous names of versioning before v3.2.0, we # need to convert the version string to numbers, format and compare iver = self.getInstalledVersion(product) iver = self.normalizeVersion(iver) nver = self.normalizeVersion(version) logger.debug('{0} versions: Installed {1} - Target {2}' .format(product, nver, iver)) return nver < iver
def reindexObjectSecurity(obj, event): """Reindex only security information on catalogs """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reindex security for object '{}' from catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) catalog.reindexObject(obj, idxs=obj._cmf_security_indexes, update_metadata=0)
def set(self, instance, value, **kw): """Decodes base64 value and set the file object """ value = str(value).decode("base64") # handle the filename if "filename" not in kw: logger.debug("FielFieldManager::set: No Filename detected " "-> using title or id") kw["filename"] = kw.get("id") or kw.get("title") self._set(instance, value, **kw)
def AfterTransitionEventHandler(instance, event): # creation doesn't have a 'transition' if not event.transition: return debug_mode = App.config.getConfiguration().debug_mode if not debug_mode: return if not skip(instance, event.transition.id, peek=True): logger.debug("Started transition %s on %s" % (event.transition.id, instance))
def cached_is_transition_allowed(analysis, transition_id): """Check if the transition is allowed for the given analysis and cache the value on the request. Note: The request is obtained by the given expression from the `locals()`, which includes the given arguments. """ logger.debug("cached_is_transition_allowed: analyis=%r transition=%s" % (analysis, transition_id)) if wf.isTransitionAllowed(analysis, transition_id): return True return False
def _get(self, instance, **kw): """Get the value of the field """ logger.debug("ATFieldManager::get: instance={} field={}".format( instance, self.field)) # check the field permission if not self.field.checkPermission("read", instance): raise Unauthorized( "You are not allowed to read the field {}".format(self.name)) # return the field value return self.field.get(instance)
def _get(self, instance, **kw): """Get the value of the field """ logger.debug("ATFieldManager::get: instance={} field={}" .format(instance, self.field)) # check the field permission if not self.field.checkPermission("read", instance): raise Unauthorized("You are not allowed to read the field {}" .format(self.name)) # return the field value return self.field.get(instance)
def reindexObjectSecurity(obj, event): """Reindex only security information on catalogs """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug( "Reindex security for object '{}' from catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) catalog.reindexObject(obj, idxs=obj._cmf_security_indexes, update_metadata=0)
def getClientList(self, contentFilter): searchTerm = self.request.get(self.form_id + '_filter', '').lower() mtool = api.get_tool('portal_membership') state = self.request.get('%s_review_state' % self.form_id, self.default_review_state) # This is used to decide how much of the objects need to be waked up # for further permission checks, which might get expensive on sites # with many clients list_pagesize = self.request.get("list_pagesize", self.pagesize) states = { 'default': ['active', ], 'active': ['active', ], 'inactive': ['inactive', ], 'all': ['active', 'inactive'] } # Use the catalog to speed things up and also limit the results catalog = api.get_tool("portal_catalog") catalog_query = { "portal_type": "Client", "inactive_state": states[state], "sort_on": "sortable_title", "sort_order": "ascending", } # Inject the searchTerm to narrow the results further if searchTerm: catalog_query["SearchableText"] = searchTerm logger.debug("getClientList::catalog_query=%s" % catalog_query) brains = catalog(catalog_query) clients = [] for brain in brains: # only wake up objects if they are shown on one page if len(clients) > list_pagesize: # otherwise append only the brain clients.append(brain) continue # wake up the object client = brain.getObject() # skip clients where the search term does not match if searchTerm and not client_match(client, searchTerm): continue # Only show clients to which we have Manage AR rights. # (ritamo only sees Happy Hills). if not mtool.checkPermission(ManageAnalysisRequests, client): continue clients.append(brain) return clients
def __call__(self): request = self.request form = request.form CheckAuthenticator(form) self.newSearch = False self.searchstring = form.get("searchstring", "") if form.get("submitted"): logger.debug("Form Submitted: {}".format(form)) if form.get("unlink_button", False): self._unlink_user() elif form.get("delete_button", False): self._unlink_user(delete=True) elif form.get("search_button", False): logger.debug("Search User") self.newSearch = True elif form.get("link_button", False): logger.debug("Link User") self._link_user(form.get("userid")) elif form.get("save_button", False): logger.debug("Create User") self._create_user() return self.template()
def _set(self, instance, value, **kw): """Set the value of the field """ logger.debug("DexterityFieldManager::set: value=%r" % value) # Check if the field is read only if self.field.readonly: raise Unauthorized("Field is read only") # Validate self.field.validate(value) # TODO: Check security on the field level return self.field.set(instance, value)
def publish(self, ar): """Set status to prepublished/published/republished """ wf = api.get_tool("portal_workflow") status = wf.getInfoFor(ar, "review_state") transitions = {"verified": "publish", "published": "republish"} transition = transitions.get(status, "prepublish") logger.info("AR Transition: {} -> {}".format(status, transition)) try: wf.doActionFor(ar, transition) return True except WorkflowException as e: logger.debug(e) return False
def __call__(self): protect.CheckAuthenticator(self.request) searchTerm = self.request.get('searchTerm', '').lower() page = self.request.get('page', 1) nr_rows = self.request.get('rows', 20) sort_order = self.request.get('sord') or 'ascending' sort_index = self.request.get('sidx') or 'sortable_title' if sort_order == "desc": sort_order = "descending" # Use the catalog to speed things up and also limit the results catalog = api.get_tool("portal_catalog") catalog_query = { "portal_type": "Client", "inactive_state": "active", "sort_on": sort_index, "sort_order": sort_order, "sort_limit": 500 } # Inject the searchTerm to narrow the results further if searchTerm: catalog_query["SearchableText"] = searchTerm logger.debug("ajaxGetClients::catalog_query=%s" % catalog_query) brains = catalog(catalog_query) rows = [] for brain in brains: client = brain.getObject() # skip clients where the search term does not match if searchTerm and not client_match(client, searchTerm): continue rows.append( { "ClientID": client.getClientID(), "Title": client.Title(), "ClientUID": client.UID(), } ) pages = len(rows) / int(nr_rows) pages += divmod(len(rows), int(nr_rows))[1] and 1 or 0 ret = {'page': page, 'total': pages, 'records': len(rows), 'rows': rows[(int(page) - 1) * int(nr_rows): int(page) * int(nr_rows)]} return json.dumps(ret)
def __call__(self): protect.CheckAuthenticator(self.request) searchTerm = self.request.get("searchTerm", "").lower() page = self.request.get("page", 1) nr_rows = self.request.get("rows", 20) sort_order = self.request.get("sord") or "ascending" sort_index = self.request.get("sidx") or "sortable_title" if sort_order == "desc": sort_order = "descending" # Use the catalog to speed things up and also limit the results catalog = api.get_tool("portal_catalog") catalog_query = { "portal_type": "Client", "review_state": "active", "sort_on": sort_index, "sort_order": sort_order, "sort_limit": 500 } # Inject the searchTerm to narrow the results further if searchTerm: catalog_query["SearchableText"] = searchTerm logger.debug("ajaxGetClients::catalog_query=%s" % catalog_query) brains = catalog(catalog_query) rows = [] for brain in brains: client = brain.getObject() # skip clients where the search term does not match if searchTerm and not client_match(client, searchTerm): continue rows.append({ "ClientID": client.getClientID(), "Title": client.Title(), "ClientUID": client.UID(), }) pages = len(rows) / int(nr_rows) pages += divmod(len(rows), int(nr_rows))[1] and 1 or 0 ret = { "page": page, "total": pages, "records": len(rows), "rows": rows[(int(page) - 1) * int(nr_rows):int(page) * int(nr_rows)] } return json.dumps(ret)
def get(self, name, **kw): """Get the value of the field by name """ logger.debug("ATDataManager::get: fieldname=%s", name) # fetch the field by name field = self.get_field(name) # bail out if we have no field if not field: return None # call the field adapter and get the value fieldmanager = IFieldManager(field) return fieldmanager.get(self.context, **kw)
def setValidTo(self, value): """Custom setter method to calculate a `ValidTo` date based on the `ValidFrom` and `ExpirationInterval` field values. """ valid_from = self.getValidFrom() valid_to = DateTime(value) interval = self.getExpirationInterval() if valid_from and interval: valid_to = valid_from + int(interval) self.getField("ValidTo").set(self, valid_to) logger.debug("Set ValidTo Date to: %r" % valid_to) else: # just set the value self.getField("ValidTo").set(self, valid_to)
def inner(func): logger.debug("Semaphore for {} -> {}".format(func, semaphore)) @wraps(func) def wrapper(*args, **kwargs): try: logger.info("==> {}::Acquire Semaphore ...".format( func.__name__)) semaphore.acquire() return func(*args, **kwargs) finally: logger.info("<== {}::Release Semaphore ...".format( func.__name__)) semaphore.release() return wrapper
def update_object_with_data(content, record): """Update the content with the record data :param content: A single folderish catalog brain or content object :type content: ATContentType/DexterityContentType/CatalogBrain :param record: The data to update :type record: dict :returns: The updated content object :rtype: object :raises: APIError, :class:`~plone.jsonapi.routes.exceptions.APIError` """ # ensure we have a full content object content = get_object(content) # get the proper data manager dm = IDataManager(content) if dm is None: fail(400, "Update for this object is not allowed") print('------------') print(dm) print(content) print(record) # Iterate through record items print('=============') for k, v in record.items(): print('----------') print(k) print(v) try: success = dm.set(k, v, **record) #starting point except Unauthorized: fail(401, "Not allowed to set the field '%s'" % k) except ValueError, exc: fail(400, str(exc)) if not success: logger.warn("update_object_with_data::skipping key=%r", k) continue logger.debug("update_object_with_data::field %r updated", k)
def url_for(endpoint, default="bika.lims.jsonapi.get", **values): """Looks up the API URL for the given endpoint :param endpoint: The name of the registered route (aka endpoint) :type endpoint: string :returns: External URL for this endpoint :rtype: string/None """ try: return router.url_for(endpoint, force_external=True, values=values) except Exception: # XXX plone.jsonapi.core should catch the BuildError of Werkzeug and # throw another error which can be handled here. logger.debug("Could not build API URL for endpoint '%s'. " "No route provider registered?" % endpoint) # build generic API URL return router.url_for(default, force_external=True, values=values)
def reindexMovedObject(obj, event): """Reindex moved/renamed object """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reidexing moved object '{}' in catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) # check if the object was renamed old_name = event.oldName if old_name: new_path = api.get_path(obj) base_path = new_path.replace(event.newName, "") old_path = "".join([base_path, old_name]) # uncatalog the old path catalog.uncatalog_object(old_path) catalog.reindexObject(obj)
def import_xml(context): portal = context.getSite() qi = api.get_tool("portal_quickinstaller") installed = qi.isProductInstalled("bika.lims") if not installed: logger.debug("Nothing to import.") return if not context.readDataFile("senaite.xml"): logger.debug("Nothing to import.") return # create content slugs for UID references create_content_slugs(portal, "", context) # import objects importObjects(portal, "", context)
def set(self, instance, value, **kw): logger.debug("NamedFileFieldManager::set:File field" "detected ('%r'), base64 decoding value", self.field) data = str(value).decode("base64") filename = kw.get("filename") or kw.get("id") or kw.get("title") contentType = kw.get("mimetype") or kw.get("content_type") if contentType: # create NamedFile with content type information value = self.field._type(data=data, contentType=contentType, filename=filename) else: # create NamedFile w/o content type information # -> will be guessed by the extension of the filename value = self.field._type(data=data, filename=filename) return self.field.set(instance, value)
def unlinkUser(self, delete=False): """Unlink the user to the Contact :returns: True if OK, False if no User was unlinked :rtype: bool """ userid = self.getUsername() user = self.getUser() if user: logger.debug("Unlinking User '{}' from Contact '{}'".format( userid, self.Title())) # Unlink the User if not self._unlinkUser(): return False # Also remove the Plone User (caution) if delete: logger.debug("Removing Plone User '{}'".format(userid)) api.user.delete(username=userid) return True return False
def __call__(self): pc = self.portal_catalog self.checkPermission = self.context.portal_membership.checkPermission self.now = DateTime() self.SamplingWorkflowEnabled = self.context.bika_setup.getSamplingWorkflowEnabled() # Client details (if client is associated) self.client = None client_uid = hasattr(self.context, 'getClientUID') and self.context.getClientUID() if client_uid: proxies = pc(portal_type='Client', UID=client_uid) if proxies: self.client = proxies[0].getObject() client_address = self.client.getPostalAddress() if self.contact and not client_address: client_address = self.contact.getBillingAddress() if not client_address: client_address = self.contact.getPhysicalAddress() if client_address: _keys = ['address', 'city', 'state', 'zip', 'country'] _list = [client_address.get(v) for v in _keys if client_address.get(v)] self.client_address = "<br/>".join(_list).replace("\n", "<br/>") if self.client_address.endswith("<br/>"): self.client_address = self.client_address[:-5] else: self.client_address = None # Reporter self.member = self.context.portal_membership.getAuthenticatedMember() self.username = self.member.getUserName() self.reporter = self.user_fullname(self.username) self.reporter_email = self.user_email(self.username) self.reporter_signature = "" c = [x for x in self.bika_setup_catalog(portal_type='LabContact') if x.getObject().getUsername() == self.username] if c: sf = c[0].getObject().getSignature() if sf: self.reporter_signature = sf.absolute_url() + "/Signature" # laboratory self.laboratory = self.context.bika_setup.laboratory self.accredited = self.laboratory.getLaboratoryAccredited() lab_address = self.laboratory.getPrintAddress() if lab_address: _keys = ['address', 'city', 'state', 'zip', 'country'] _list = [lab_address.get(v) for v in _keys if lab_address.get(v)] self.lab_address = "<br/>".join(_list).replace("\n", "<br/>") if self.lab_address.endswith("<br/>"): self.lab_address = self.lab_address[:-5] else: self.lab_address = None # Analysis Request results self.ars = [] self.ar_headers = [_("Request ID"), _("Date Requested"), _("Sample Type"), _("Sample Point")] self.analysis_headers = [_("Analysis Service"), _("Method"), _("Result"), _("Analyst")] for ar in self.context.getAnalysisRequests(): datecreated = ar.created() # datereceived = ar.getDateReceived() # datepublished = ar.getDatePublished() datalines = [] for analysis in ar.getAnalyses(full_objects=True): service = analysis.getService() method = service.getMethod() sample = ar.getSample() result = analysis.getResult() formatted_result = format_numeric_result(analysis, result) datalines.append({_("Analysis Service"): analysis.getService().Title(), _("Method"): method and method.Title() or "", _("Result"): formatted_result, _("Analyst"): self.user_fullname(analysis.getAnalyst()), _("Remarks"): analysis.getRemarks()}) self.ars.append({ _("Request ID"): ar.getRequestID(), _("Date Requested"): self.ulocalized_time(datecreated), # requested->created _("Sample Type"): sample.getSampleType() and sample.getSampleType().Title() or '', _("Sample Point"): sample.getSamplePoint() and sample.getSamplePoint().Title() or '', _("datalines"): datalines, }) # Create Report fn = self.context.Title() + " " + self.ulocalized_time(self.now) report_html = self.template() debug_mode = App.config.getConfiguration().debug_mode if debug_mode: tmp_fd, tmp_fn = tempfile.mkstemp(suffix=".html") logger.debug("Writing HTML for %s to %s" % (self.context, tmp_fn)) tmp_fd.write(report_html) tmp_fd.close() pdf_fd, pdf_fn = tempfile.mkstemp(suffix="pdf") pdf_fd.close() pdf = createPdf(report_html, outfile=pdf_fn) if debug_mode: logger.debug("Wrote PDF for %s to %s" % (self.context, pdf_fn)) else: os.remove(pdf_fn) # XXX Email published batches to who? # Send PDF to browser if not pdf.err: setheader = self.request.RESPONSE.setHeader setheader('Content-Type', 'application/pdf') setheader("Content-Disposition", "attachment;filename=\"%s\"" % fn) self.request.RESPONSE.write(pdf)
def publishFromHTML(self, aruid, results_html): # The AR can be published only and only if allowed uc = getToolByName(self.context, 'uid_catalog') ars = uc(UID=aruid) if not ars or len(ars) != 1: return [] ar = ars[0].getObject(); wf = getToolByName(ar, 'portal_workflow') allowed_states = ['verified', 'published'] # Publish/Republish allowed? if wf.getInfoFor(ar, 'review_state') not in allowed_states: # Pre-publish allowed? if not ar.getAnalyses(review_state=allowed_states): return [] # HTML written to debug file debug_mode = App.config.getConfiguration().debug_mode if debug_mode: tmp_fn = tempfile.mktemp(suffix=".html") logger.debug("Writing HTML for %s to %s" % (ar.Title(), tmp_fn)) open(tmp_fn, "wb").write(results_html) # Create the pdf report (will always be attached to the AR) # we must supply the file ourself so that createPdf leaves it alone. pdf_fn = tempfile.mktemp(suffix=".pdf") pdf_report = createPdf(htmlreport=results_html, outfile=pdf_fn) # PDF written to debug file if debug_mode: logger.debug("Writing PDF for %s to %s" % (ar.Title(), pdf_fn)) else: os.remove(pdf_fn) recipients = [] contact = ar.getContact() lab = ar.bika_setup.laboratory if pdf_report: if contact: recipients = [{ 'UID': contact.UID(), 'Username': to_utf8(contact.getUsername()), 'Fullname': to_utf8(contact.getFullname()), 'EmailAddress': to_utf8(contact.getEmailAddress()), 'PublicationModes': contact.getPublicationPreference() }] reportid = ar.generateUniqueId('ARReport') report = _createObjectByType("ARReport", ar, reportid) report.edit( AnalysisRequest=ar.UID(), Pdf=pdf_report, Html=results_html, Recipients=recipients ) report.unmarkCreationFlag() renameAfterCreation(report) # Set status to prepublished/published/republished status = wf.getInfoFor(ar, 'review_state') transitions = {'verified': 'publish', 'published' : 'republish'} transition = transitions.get(status, 'prepublish') try: wf.doActionFor(ar, transition) except WorkflowException: pass # compose and send email. # The managers of the departments for which the current AR has # at least one AS must receive always the pdf report by email. # https://github.com/bikalabs/Bika-LIMS/issues/1028 mime_msg = MIMEMultipart('related') mime_msg['Subject'] = self.get_mail_subject(ar)[0] mime_msg['From'] = formataddr( (encode_header(lab.getName()), lab.getEmailAddress())) mime_msg.preamble = 'This is a multi-part MIME message.' msg_txt = MIMEText(results_html, _subtype='html') mime_msg.attach(msg_txt) to = [] mngrs = ar.getResponsible() for mngrid in mngrs['ids']: name = mngrs['dict'][mngrid].get('name', '') email = mngrs['dict'][mngrid].get('email', '') if (email != ''): to.append(formataddr((encode_header(name), email))) if len(to) > 0: # Send the email to the managers mime_msg['To'] = ','.join(to) attachPdf(mime_msg, pdf_report, pdf_fn) try: host = getToolByName(ar, 'MailHost') host.send(mime_msg.as_string(), immediate=True) except SMTPServerDisconnected as msg: logger.warn("SMTPServerDisconnected: %s." % msg) except SMTPRecipientsRefused as msg: raise WorkflowException(str(msg)) # Send report to recipients recips = self.get_recipients(ar) for recip in recips: if 'email' not in recip.get('pubpref', []) \ or not recip.get('email', ''): continue title = encode_header(recip.get('title', '')) email = recip.get('email') formatted = formataddr((title, email)) # Create the new mime_msg object, cause the previous one # has the pdf already attached mime_msg = MIMEMultipart('related') mime_msg['Subject'] = self.get_mail_subject(ar)[0] mime_msg['From'] = formataddr( (encode_header(lab.getName()), lab.getEmailAddress())) mime_msg.preamble = 'This is a multi-part MIME message.' msg_txt = MIMEText(results_html, _subtype='html') mime_msg.attach(msg_txt) mime_msg['To'] = formatted # Attach the pdf to the email if requested if pdf_report and 'pdf' in recip.get('pubpref'): attachPdf(mime_msg, pdf_report, pdf_fn) # For now, I will simply ignore mail send under test. if hasattr(self.portal, 'robotframework'): continue msg_string = mime_msg.as_string() # content of outgoing email written to debug file if debug_mode: tmp_fn = tempfile.mktemp(suffix=".email") logger.debug("Writing MIME message for %s to %s" % (ar.Title(), tmp_fn)) open(tmp_fn, "wb").write(msg_string) try: host = getToolByName(ar, 'MailHost') host.send(msg_string, immediate=True) except SMTPServerDisconnected as msg: logger.warn("SMTPServerDisconnected: %s." % msg) except SMTPRecipientsRefused as msg: raise WorkflowException(str(msg)) ar.setDatePublished(DateTime()) return [ar]
def _process_request(self): # Use this function from a template that is using bika_listing_table # in such a way that the table_only request var will be used to # in-place-update the table. form_id = self.form_id form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') catalog = getToolByName(self.context, self.catalog) # Some ajax calls duplicate form values? I have not figured out why! if self.request.form: for key, value in self.request.form.items(): if isinstance(value, list): self.request.form[key] = self.request.form[key][0] # If table_only specifies another form_id, then we abort. # this way, a single table among many can request a redraw, # and only it's content will be rendered. if form_id not in self.request.get('table_only', form_id): return '' ## review_state_selector - value can be specified in request selected_state = self.request.get("%s_review_state" % form_id, 'default') # get review_state id=selected_state states = [r for r in self.review_states if r['id'] == selected_state] self.review_state = states and states[0] or self.review_states[0] # set selected review_state ('default'?) to request self.request['review_state'] = self.review_state['id'] # contentFilter is expected in every self.review_state. for k, v in self.review_state['contentFilter'].items(): self.contentFilter[k] = v # sort on self.sort_on = self.request.get(form_id + '_sort_on', None) # manual_sort_on: only sort the current batch of items # this is a compromise for sorting without column indexes self.manual_sort_on = None if self.sort_on \ and self.sort_on in self.columns.keys() \ and self.columns[self.sort_on].get('index', None): idx = self.columns[self.sort_on].get('index', self.sort_on) self.contentFilter['sort_on'] = idx else: if self.sort_on: self.manual_sort_on = self.sort_on if 'sort_on' in self.contentFilter: del self.contentFilter['sort_on'] # sort order self.sort_order = self.request.get(form_id + '_sort_order', '') if self.sort_order: self.contentFilter['sort_order'] = self.sort_order else: if 'sort_order' not in self.contentFilter: self.sort_order = 'ascending' self.contentFilter['sort_order'] = 'ascending' self.request.set(form_id+'_sort_order', 'ascending') else: self.sort_order = self.contentFilter['sort_order'] if self.manual_sort_on: del self.contentFilter['sort_order'] # pagesize pagesize = self.request.get(form_id + '_pagesize', self.pagesize) if type(pagesize) in (list, tuple): pagesize = pagesize[0] try: pagesize = int(pagesize) except: pagesize = self.pagesize = 10 self.pagesize = pagesize # Plone's batching wants this variable: self.request.set('pagesize', self.pagesize) # and we want to make our choice remembered in bika_listing also self.request.set(self.form_id + '_pagesize', self.pagesize) # pagenumber self.pagenumber = int(self.request.get(form_id + '_pagenumber', self.pagenumber)) # Plone's batching wants this variable: self.request.set('pagenumber', self.pagenumber) # index filters. self.And = [] self.Or = [] ##logger.info("contentFilter: %s"%self.contentFilter) for k, v in self.columns.items(): if not v.has_key('index') \ or v['index'] == 'review_state' \ or v['index'] in self.filter_indexes: continue self.filter_indexes.append(v['index']) ##logger.info("Filter indexes: %s"%self.filter_indexes) # any request variable named ${form_id}_{index_name} # will pass it's value to that index in self.contentFilter. # all conditions using ${form_id}_{index_name} are searched with AND for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue request_key = "%s_%s" % (form_id, index) value = self.request.get(request_key, '') if len(value) > 1: ##logger.info("And: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.And.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': logger.info("Unhandled DateIndex search on '%s'"%index) continue else: self.Or.append(Generic(index, value)) # if there's a ${form_id}_filter in request, then all indexes # are are searched for it's value. # ${form_id}_filter is searched with OR agains all indexes request_key = "%s_filter" % form_id value = self.request.get(request_key, '') if type(value) in (list, tuple): value = value[0] if len(value) > 1: for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue ##logger.info("Or: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.Or.append(MatchRegexp(index, value)) # https://github.com/bikalabs/Bika-LIMS/issues/1069 vals = value.split('-') if len(vals) > 2: valroot = vals[0] for i in range(1, len(vals)): valroot = '%s-%s' % (valroot, vals[i]) self.Or.append(MatchRegexp(index, valroot+'-*')) elif idx.meta_type == 'DateIndex': if type(value) in (list, tuple): value = value[0] if value.find(":") > -1: try: lohi = [DateTime(x) for x in value.split(":")] except: logger.info("Error (And, DateIndex='%s', term='%s')"%(index,value)) self.Or.append(Between(index, lohi[0], lohi[1])) else: try: self.Or.append(Eq(index, DateTime(value))) except: logger.info("Error (Or, DateIndex='%s', term='%s')"%(index,value)) else: self.Or.append(Generic(index, value)) self.Or.append(MatchRegexp('review_state', value)) # get toggle_cols cookie value # and modify self.columns[]['toggle'] to match. toggle_cols = self.get_toggle_cols() for col in self.columns.keys(): if col in toggle_cols: self.columns[col]['toggle'] = True else: self.columns[col]['toggle'] = False
def _process_request(self): """Scan request for parameters and configure class attributes accordingly. Setup AdvancedQuery or catalog contentFilter. Request parameters: <form_id>_limit_from: index of the first item to display <form_id>_rows_only: returns only the rows <form_id>_sort_on: list items are sorted on this key <form_id>_manual_sort_on: no index - sort with python <form_id>_pagesize: number of items <form_id>_filter: A string, will be regex matched against indexes in <form_id>_filter_indexes <form_id>_filter_indexes: list of index names which will be searched for the value of <form_id>_filter <form_id>_<index_name>: Any index name can be used after <form_id>_. any request variable named ${form_id}_{index_name} will pass it's value to that index in self.contentFilter. All conditions using ${form_id}_{index_name} are searched with AND. The parameter value will be matched with regexp if a FieldIndex or TextIndex. Else, AdvancedQuery.Generic is used. """ form_id = self.form_id form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') catalog = getToolByName(self.context, self.catalog) # Some ajax calls duplicate form values? I have not figured out why! if self.request.form: for key, value in self.request.form.items(): if isinstance(value, list): self.request.form[key] = self.request.form[key][0] # If table_only specifies another form_id, then we abort. # this way, a single table among many can request a redraw, # and only it's content will be rendered. if form_id not in self.request.get('table_only', form_id) \ or form_id not in self.request.get('rows_only', form_id): return '' self.rows_only = self.request.get('rows_only','') == form_id self.limit_from = int(self.request.get(form_id + '_limit_from',0)) # contentFilter is allowed in every self.review_state. for k, v in self.review_state.get('contentFilter', {}).items(): self.contentFilter[k] = v # sort on self.sort_on = self.sort_on \ if hasattr(self, 'sort_on') and self.sort_on \ else None self.sort_on = self.request.get(form_id + '_sort_on', self.sort_on) self.sort_order = self.request.get(form_id + '_sort_order', 'ascending') self.manual_sort_on = self.request.get(form_id + '_manual_sort_on', None) if self.sort_on: if self.sort_on in self.columns.keys(): if self.columns[self.sort_on].get('index', None): self.request.set(form_id+'_sort_on', self.sort_on) # The column can be sorted directly using an index idx = self.columns[self.sort_on]['index'] self.sort_on = idx # Don't sort manually! self.manual_sort_on = None else: # The column must be manually sorted using python self.manual_sort_on = self.sort_on else: # We cannot sort for a column that doesn't exist! msg = "{}: sort_on is '{}', not a valid column".format( self, self.sort_on) logger.error(msg) self.sort_on = None if self.manual_sort_on: self.manual_sort_on = self.manual_sort_on[0] \ if type(self.manual_sort_on) in (list, tuple) \ else self.manual_sort_on if self.manual_sort_on not in self.columns.keys(): # We cannot sort for a column that doesn't exist! msg = "{}: manual_sort_on is '{}', not a valid column".format( self, self.manual_sort_on) logger.error(msg) self.manual_sort_on = None if self.sort_on or self.manual_sort_on: # By default, if sort_on is set, sort the items ASC # Trick to allow 'descending' keyword instead of 'reverse' self.sort_order = 'reverse' if self.sort_order \ and self.sort_order[0] in ['d','r'] \ else 'ascending' else: # By default, sort on created self.sort_order = 'reverse' self.sort_on = 'created' self.contentFilter['sort_order'] = self.sort_order if self.sort_on: self.contentFilter['sort_on'] = self.sort_on # pagesize pagesize = self.request.get(form_id + '_pagesize', self.pagesize) if type(pagesize) in (list, tuple): pagesize = pagesize[0] try: pagesize = int(pagesize) except: pagesize = self.pagesize = 10 self.pagesize = pagesize # Plone's batching wants this variable: self.request.set('pagesize', self.pagesize) # and we want to make our choice remembered in bika_listing also self.request.set(self.form_id + '_pagesize', self.pagesize) # index filters. self.And = [] self.Or = [] ##logger.info("contentFilter: %s"%self.contentFilter) for k, v in self.columns.items(): if not v.has_key('index') \ or v['index'] == 'review_state' \ or v['index'] in self.filter_indexes: continue self.filter_indexes.append(v['index']) ##logger.info("Filter indexes: %s"%self.filter_indexes) # any request variable named ${form_id}_{index_name} # will pass it's value to that index in self.contentFilter. # all conditions using ${form_id}_{index_name} are searched with AND for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue request_key = "%s_%s" % (form_id, index) value = self.request.get(request_key, '') if len(value) > 1: ##logger.info("And: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.And.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': logger.info("Unhandled DateIndex search on '%s'"%index) continue else: self.Or.append(Generic(index, value)) # if there's a ${form_id}_filter in request, then all indexes # are are searched for it's value. # ${form_id}_filter is searched with OR agains all indexes request_key = "%s_filter" % form_id value = self.request.get(request_key, '') if type(value) in (list, tuple): value = value[0] if len(value) > 1: for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue ##logger.info("Or: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.Or.append(MatchRegexp(index, value)) self.expand_all_categories = True # https://github.com/bikalabs/Bika-LIMS/issues/1069 vals = value.split('-') if len(vals) > 2: valroot = vals[0] for i in range(1, len(vals)): valroot = '%s-%s' % (valroot, vals[i]) self.Or.append(MatchRegexp(index, valroot+'-*')) self.expand_all_categories = True elif idx.meta_type == 'DateIndex': if type(value) in (list, tuple): value = value[0] if value.find(":") > -1: try: lohi = [DateTime(x) for x in value.split(":")] except: logger.info("Error (And, DateIndex='%s', term='%s')"%(index,value)) self.Or.append(Between(index, lohi[0], lohi[1])) self.expand_all_categories = True else: try: self.Or.append(Eq(index, DateTime(value))) self.expand_all_categories = True except: logger.info("Error (Or, DateIndex='%s', term='%s')"%(index,value)) else: self.Or.append(Generic(index, value)) self.expand_all_categories = True self.Or.append(MatchRegexp('review_state', value)) # get toggle_cols cookie value # and modify self.columns[]['toggle'] to match. toggle_cols = self.get_toggle_cols() for col in self.columns.keys(): if col in toggle_cols: self.columns[col]['toggle'] = True else: self.columns[col]['toggle'] = False
def _process_request(self): # Use this function from a template that is using bika_listing_table # in such a way that the table_only request var will be used to # in-place-update the table. form_id = self.form_id form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') catalog = getToolByName(self.context, self.catalog) # If table_only specifies another form_id, then we abort. # this way, a single table among many can request a redraw, # and only it's content will be rendered. if form_id not in self.request.get('table_only', form_id): return '' ## review_state_selector cookie = json.loads(self.request.get("review_state", '{}')) cookie_key = "%s%s" % (self.context.portal_type, form_id) # first check POST selected_state = self.request.get("%s_review_state"%form_id, '') if not selected_state: # then check cookie selected_state = cookie.get(cookie_key, 'default') # get review_state id=selected_state states = [r for r in self.review_states if r['id'] == selected_state] review_state = states and states[0] or self.review_states[0] # set request and cookie to currently selected state id if not selected_state: selected_state = self.review_states[0]['id'] self.review_state = cookie[cookie_key] = selected_state cookie = json.dumps(cookie) self.request['review_state'] = cookie self.request.response.setCookie('review_state', cookie, path="/") # contentFilter is expected in every review_state. for k, v in review_state['contentFilter'].items(): self.contentFilter[k] = v # sort on sort_on = self.request.get(form_id + '_sort_on', '') # manual_sort_on: only sort the current batch of items # this is a compromise for sorting without column indexes self.manual_sort_on = None if sort_on \ and sort_on in self.columns.keys() \ and self.columns[sort_on].get('index', None): idx = self.columns[sort_on].get('index', sort_on) self.contentFilter['sort_on'] = idx else: if sort_on: self.manual_sort_on = sort_on if 'sort_on' in self.contentFilter: del self.contentFilter['sort_on'] # sort order self.sort_order = self.request.get(form_id + '_sort_order', '') if self.sort_order: self.contentFilter['sort_order'] = self.sort_order else: if 'sort_order' not in self.contentFilter: self.sort_order = 'ascending' self.contentFilter['sort_order'] = 'ascending' self.request.set(form_id+'_sort_order', 'ascending') else: self.sort_order = self.contentFilter['sort_order'] if self.manual_sort_on: del self.contentFilter['sort_order'] # pagesize pagesize = self.request.get(form_id + '_pagesize', self.pagesize) if type(pagesize) in (list, tuple): pagesize = pagesize[0] try: pagesize = int(pagesize) except: pagesize = self.pagesize self.pagesize = pagesize # Plone's batching wants this variable: self.request.set('pagesize', self.pagesize) # pagenumber self.pagenumber = int(self.request.get(form_id + '_pagenumber', self.pagenumber)) # Plone's batching wants this variable: self.request.set('pagenumber', self.pagenumber) # index filters. self.And = [] self.Or = [] ##logger.info("contentFilter: %s"%self.contentFilter) for k, v in self.columns.items(): if not v.has_key('index') \ or v['index'] == 'review_state' \ or v['index'] in self.filter_indexes: continue self.filter_indexes.append(v['index']) ##logger.info("Filter indexes: %s"%self.filter_indexes) # any request variable named ${form_id}_{index_name} # will pass it's value to that index in self.contentFilter. # all conditions using ${form_id}_{index_name} are searched with AND for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue request_key = "%s_%s" % (form_id, index) value = self.request.get(request_key, '') if len(value) > 1: ##logger.info("And: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.And.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': logger.error("Unhandled DateIndex search on '%s'"%index) continue else: self.Or.append(Generic(index, value)) # if there's a ${form_id}_filter in request, then all indexes # are are searched for it's value. # ${form_id}_filter is searched with OR agains all indexes request_key = "%s_filter" % form_id value = self.request.get(request_key, '') if len(value) > 1: for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue ##logger.info("Or: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.Or.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': if value.find(":") > -1: try: lohi = [DateTime(x) for x in value.split(":")] except: logger.error("Error (And, DateIndex='%s', term='%s')"%(index,value)) self.Or.append(Between(index, lohi[0], lohi[1])) else: try: self.Or.append(Eq(index, DateTime(value))) except: logger.error("Error (Or, DateIndex='%s', term='%s')"%(index,value)) else: self.Or.append(Generic(index, value)) self.Or.append(MatchRegexp('review_state', value)) # get toggle_cols cookie value # and modify self.columns[]['toggle'] to match. toggle_cols = self.get_toggle_cols() for col in self.columns.keys(): if col in toggle_cols: self.columns[col]['toggle'] = True else: self.columns[col]['toggle'] = False
def publishFromHTML(self, prouid, results_html): uc = getToolByName(self.context, 'uid_catalog') pros = uc(UID=prouid) if not pros or len(pros) != 1: return [] pro = pros[0].getObject(); # HTML written to debug file debug_mode = App.config.getConfiguration().debug_mode if debug_mode: tmp_fn = tempfile.mktemp(suffix=".html") logger.debug("Writing HTML for %s to %s" % (pro.Title(), tmp_fn)) open(tmp_fn, "wb").write(results_html) # Create the pdf report (will always be attached to the Order) # we must supply the file ourself so that createPdf leaves it alone. # This version replaces 'attachment' links; probably not required, # so it's repeated below, without these localise_images. # cleanup, results_html_for_pdf = self.localise_images(results_html) # pdf_fn = tempfile.mktemp(suffix=".pdf") # pdf_report = createPdf(htmlreport=results_html_for_pdf, outfile=pdf_fn) # for fn in cleanup: # os.remove(fn) pdf_fn = tempfile.mktemp(suffix=".pdf") pdf_report = createPdf(htmlreport=results_html, outfile=pdf_fn) # PDF written to debug file if debug_mode: logger.debug("Writing PDF for %s to %s" % (pro.Title(), pdf_fn)) else: os.remove(pdf_fn) recipients = [] # Send report to supplier supplier_data = self._supplier_data(pro) title = encode_header(supplier_data.get('title', '')) email = supplier_data.get('email') formatted = formataddr((title, email)) # Create the new mime_msg object mime_msg = MIMEMultipart('related') mime_msg['Subject'] = self.get_mail_subject(pro) """ Edit this to change the From address mime_msg['From'] = formataddr( (encode_header(lab.getName()), lab.getEmailAddress())) """ mime_msg['From'] = formataddr(("BIKA IMM", "*****@*****.**")) mime_msg.preamble = 'This is a multi-part MIME message.' msg_txt = MIMEText(results_html, _subtype='html') mime_msg.attach(msg_txt) mime_msg['To'] = formatted # Attach the pdf to the email if requested if pdf_report: attachPdf(mime_msg, pdf_report, pdf_fn) msg_string = mime_msg.as_string() # content of outgoing email written to debug file if debug_mode: tmp_fn = tempfile.mktemp(suffix=".email") logger.debug("Writing MIME message for %s to %s" % (pro.Title(), tmp_fn)) open(tmp_fn, "wb").write(msg_string) try: host = getToolByName(pro, 'MailHost') host.send(msg_string, immediate=True) except SMTPServerDisconnected as msg: logger.warn("SMTPServerDisconnected: %s." % msg) except SMTPRecipientsRefused as msg: raise WorkflowException(str(msg)) pro.setDateDispatched(DateTime()) return [pro]