def addZCTextIndex(catalog, index_name): if catalog is None: logger.warning('Could not find the catalog tool.' + catalog) return # Create lexicon to be able to add ZCTextIndex wordSplitter = Empty() wordSplitter.group = 'Word Splitter' wordSplitter.name = 'Unicode Whitespace splitter' caseNormalizer = Empty() caseNormalizer.group = 'Case Normalizer' caseNormalizer.name = 'Unicode Case Normalizer' stopWords = Empty() stopWords.group = 'Stop Words' stopWords.name = 'Remove listed and single char words' elem = [wordSplitter, caseNormalizer, stopWords] zc_extras = Empty() zc_extras.index_type = 'Okapi BM25 Rank' zc_extras.lexicon_id = 'Lexicon' try: catalog.manage_addProduct['ZCTextIndex'].manage_addLexicon('Lexicon', 'Lexicon', elem) except: logger.warning('Could not add ZCTextIndex to '+str(catalog)) catalog.addIndex(index_name, 'ZCTextIndex', zc_extras)
def __call__(self): """ Entry point of PrintView. If context.portal_type is a Worksheet, then the PrintView is initialized to manage only that worksheet. If the context.portal_type is a WorksheetFolder and there are items selected in the request (items param), the PrintView will show the preview for all the selected Worksheets. By default, returns a HTML-encoded template, but if the request contains a param 'pdf' with value 1, will flush a pdf for the worksheet. """ if self.context.portal_type == 'Worksheet': self._worksheets = [self.context] elif self.context.portal_type == 'WorksheetFolder' and \ self.request.get('items', ''): uids = self.request.get('items').split(',') uc = getToolByName(self.context, 'uid_catalog') self._worksheets = [obj.getObject() for obj in uc(UID=uids)] else: # Warn and redirect to referer logger.warning('PrintView: type not allowed: %s' % self.context.portal_type) self.destination_url = self.request.get_header( "referer", self.context.absolute_url()) # Generate PDF? if self.request.form.get('pdf', '0') == '1': return self._flush_pdf() else: return self.template()
def setupCatalogs(self, portal): # an item should belong to only one catalog. # that way looking it up means first looking up *the* catalog # in which it is indexed, as well as making it cheaper to index. def addIndex(cat, *args): try: cat.addIndex(*args) except: logger.warning("Could not create index %s in catalog %s" % (args, cat)) def addColumn(cat, col): try: cat.addColumn(col) except: logger.warning("Could not create metadata %s in catalog %s" % (col, cat)) bsc = getToolByName(portal, 'bika_setup_catalog', None) if bsc is None: logger.warning('Could not find the setup catalog tool.') return addIndex(bsc, 'ERP_Keyword', 'FieldIndex') addColumn(bsc, 'ERP_Keyword')
def copy_to_new_specs(self): specs = {} copy_from = self.request.get('copy_from', "") if not copy_from: return json.dumps(specs) uids = copy_from.split(",") proxies = self.analysisrequest_catalog(UID=uids) if not proxies: logger.warning( 'No object found for UIDs {0} while copying specs' .format(copy_from)) return json.dumps(specs) n = 0 for proxie in proxies: res_range = proxie.getObject().getResultsRange() new_rr = [] for i, rr in enumerate(res_range): s_uid = self.bika_setup_catalog( portal_type='AnalysisService', getKeyword=rr['keyword'])[0].UID rr['uid'] = s_uid new_rr.append(rr) specs[n] = new_rr n += 1 return json.dumps(specs)
def __call__(self): bsc = getToolByName(self.context, 'bika_setup_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bsc(id=items.split(","))] else: self.items = [self.context, ] new_items = [] for i in self.items: if i.portal_type == "Kit": catalog = bsc(portal_type="StockItem") brains = bsc.searchResults({'portal_type': 'Product', 'title': i.getKitTemplate().Title()}) if len(brains) == 1: new_items += [pi.getObject() for pi in catalog if pi.getObject().getProduct().getId() == brains[ 0].getObject().getId()] self.items = new_items if not self.items: logger.warning( "Cannot print sticker: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return template = 'templates/stickers/sticker_kits_small.pt' stickertemplate = ViewPageTemplateFile(template) return stickertemplate(self)
def __call__(self): bsc = getToolByName(self.context, 'bika_setup_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bsc(id=items.split(","))] else: self.items = [self.context,] # Orders get stickers for their stock items new_items = [] for i in self.items: if i.portal_type == 'Order': catalog = bsc(portal_type='StockItem') new_items += [pi.getObject() for pi in catalog if pi.getObject().getOrderId() == i.getId()] else: new_items.append(i) self.items = new_items if not self.items: logger.warning("Cannot print stickers: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return template = 'templates/stickers/sticker_stockitem_small.pt' stickertemplate = ViewPageTemplateFile(template) return stickertemplate(self)
def strptime(context, value): """given a string, this function tries to return a DateTime.DateTime object with the date formats from i18n translations """ val = "" for fmt in ['date_format_long', 'date_format_short']: fmtstr = context.translate(fmt, domain='bika', mapping={}) fmtstr = fmtstr.replace(r"${", '%').replace('}', '') try: val = _strptime(value, fmtstr) except ValueError: continue try: val = DateTime(*list(val)[:-6]) except DateTimeError: val = "" if val.timezoneNaive(): # Use local timezone for tz naive strings # see http://dev.plone.org/plone/ticket/10141 zone = val.localZone(safelocaltime(val.timeTime())) parts = val.parts()[:-1] + (zone,) val = DateTime(*parts) break else: try: # The following will handle an rfc822 string. value = value.split(" +", 1)[0] val = DateTime(value) except: logger.warning("DateTimeField failed to format date " "string '%s' with '%s'" % (value, fmtstr)) return val
def set(self, instance, value, **kwargs): """ Check if value is an actual date/time value. If not, attempt to convert it to one; otherwise, set to None. Assign all properties passed as kwargs to object. """ val = value if not value: val = None elif not isinstance(value, DateTime): for fmt in ['date_format_long', 'date_format_short']: fmtstr = instance.translate(fmt, domain='bika', mapping={}) fmtstr = fmtstr.replace(r"${", '%').replace('}', '') try: val = strptime(value, fmtstr) except ValueError: continue try: val = DateTime(*list(val)[:-6]) except DateTimeError: val = None if val.timezoneNaive(): # Use local timezone for tz naive strings # see http://dev.plone.org/plone/ticket/10141 zone = val.localZone(safelocaltime(val.timeTime())) parts = val.parts()[:-1] + (zone,) val = DateTime(*parts) break else: logger.warning("DateTimeField failed to format date " "string '%s' with '%s'" % (value, fmtstr)) super(DateTimeField, self).set(instance, val, **kwargs)
def get_workflow_actions(self): """ Compile a list of possible workflow transitions for items in this Table. """ # cbb return empty list if we are unable to select items if not self.bika_listing.show_select_column: return [] workflow = getToolByName(self.context, 'portal_workflow') # get all transitions for all items. transitions = {} actions = [] for obj in [i.get('obj', '') for i in self.items]: obj = get_object(obj) for it in workflow.getTransitionsFor(obj): transitions[it['id']] = it # the list is restricted to and ordered by these transitions. if 'transitions' in self.bika_listing.review_state: for tdict in self.bika_listing.review_state['transitions']: if tdict['id'] in transitions: actions.append(transitions[tdict['id']]) else: actions = transitions.values() new_actions = [] # remove any invalid items with a warning for a, action in enumerate(actions): if isinstance(action, dict) \ and 'id' in action: new_actions.append(action) else: logger.warning("bad action in review_state['transitions']: %s. " "(complete list: %s)." % (action, actions)) actions = new_actions # and these are removed if 'hide_transitions' in self.bika_listing.review_state: hidden_transitions = self.bika_listing.review_state['hide_transitions'] actions = [a for a in actions if a['id'] not in hidden_transitions] # cheat: until workflow_action is abolished, all URLs defined in # GS workflow setup will be ignored, and the default will apply. # (that means, WorkflowAction-bound URL is called). for i, action in enumerate(actions): actions[i]['url'] = '' # if there is a self.review_state['some_state']['custom_transitions'] # attribute on the BikaListingView, add these actions to the list. if 'custom_transitions' in self.bika_listing.review_state: for action in self.bika_listing.review_state['custom_transitions']: if isinstance(action, dict) and 'id' in action: actions.append(action) for a, action in enumerate(actions): actions[a]['title'] = t(PMF(actions[a]['title'])) return actions
def get_uid(self, catalog_name, **query): portal = api.portal.get() catalog = getToolByName(portal, catalog_name) # catalog call brains = catalog(**query) if brains: return brains[0].UID logger.warning("No brain in %s for %s" % (catalog_name, query))
def csvDate2BikaDate(self,DateTime): #11/03/2014 14:46:46 --> %d/%m/%Y %H:%M %p try: dtobj = datetime.strptime(DateTime, "%a %b %d %H:%M:%S %Y") return dtobj.strftime("%Y%m%d %H:%M:%S") except ValueError: warn = "No date format known." logger.warning(warn) return DateTime
def _cleanAndRebuildIfNeeded(portal, cleanrebuild): """ Rebuild the given catalogs. :portal: the Plone portal object :cleanrebuild: a list with catalog ids """ for cat in cleanrebuild: catalog = getToolByName(portal, cat) if catalog: catalog.softClearFindAndRebuild() else: logger.warning('%s do not found' % cat)
def Import(self): s_t = '' c_t = 'lab' bucket = {} pc = getToolByName(self.context, 'portal_catalog') bsc = getToolByName(self.context, 'bika_setup_catalog') # collect up all values into the bucket for row in self.get_rows(3): c_t = row['Client_title'] if row['Client_title'] else 'lab' if c_t not in bucket: bucket[c_t] = {} s_t = row['SampleType_title'] if row['SampleType_title'] else s_t if s_t not in bucket[c_t]: bucket[c_t][s_t] = [] service = bsc(portal_type='AnalysisService', title=row['service']) if not service: service = bsc(portal_type='AnalysisService', getKeyword=row['service']) try: service = service[0].getObject() bucket[c_t][s_t].append({ 'keyword': service.getKeyword(), 'min': row.get('min','0'), 'max': row.get('max','0'), 'minpanic': row.get('minpanic','0'), 'maxpanic': row.get('maxpanic','0'), 'error': row.get('error','0'), }) except IndexError: warning = "Error with service name %s on sheet %s. Service not uploaded." logger.warning(warning, row.get('service', ''), self.sheetname) # write objects. for c_t in bucket: if c_t == 'lab': folder = self.context.bika_setup.bika_analysisspecs else: folder = pc(portal_type='Client', title=c_t) if (not folder or len(folder) != 1): logger.warn("Client %s not found. Omiting client specifications." % c_t) continue folder = folder[0].getObject() for s_t in bucket[c_t]: resultsrange = bucket[c_t][s_t] sampletype = bsc(portal_type='SampleType', title=s_t)[0] _id = folder.invokeFactory('AnalysisSpec', id=tmpID()) obj = folder[_id] obj.edit( title=sampletype.Title, ResultsRange=resultsrange) obj.setSampleType(sampletype.UID) obj.unmarkCreationFlag() renameAfterCreation(obj)
def set_fields_from_request(obj, request): """Search request for keys that match field names in obj, and call field mutator with request value. The list of fields for which schema mutators were found is returned. """ schema = obj.Schema() # fields contains all schema-valid field values from the request. fields = {} for fieldname, value in request.items(): if fieldname not in schema: continue if schema[fieldname].type in ('reference'): brains = [] if value: brains = resolve_request_lookup(obj, request, fieldname) if not brains: logger.warning( "JSONAPI: Can't resolve reference: {} {}" .format(fieldname, value)) return [] if schema[fieldname].multiValued: value = [b.UID for b in brains] if brains else [] else: value = brains[0].UID if brains else None fields[fieldname] = value # Write fields. for fieldname, value in fields.items(): field = schema[fieldname] fieldtype = field.getType() if fieldtype == 'Products.Archetypes.Field.BooleanField': if value.lower() in ('0', 'false', 'no') or not value: value = False else: value = True elif fieldtype in ['Products.ATExtensions.field.records.RecordsField', 'Products.ATExtensions.field.records.RecordField']: try: value = eval(value) except: logger.warning( "JSONAPI: " + fieldname + ": Invalid " "JSON/Python variable") return [] mutator = field.getMutator(obj) if mutator: mutator(value) else: field.set(obj, value) obj.reindexObject() return fields.keys()
def _setup_catalog(portal, catalog_id, catalog_definition): """ Given a catalog definition it updates the indexes, columns and content_type definitions of the catalog. :portal: the Plone site object :catalog_id: a string as the catalog id :catalog_definition: a dictionary like { 'types': ['ContentType', ...], 'indexes': { 'UID': 'FieldIndex', ... }, 'columns': [ 'Title', ... ] } """ reindex = False catalog = getToolByName(portal, catalog_id, None) if catalog is None: logger.warning('Could not find the %s tool.' % (catalog_id)) return False # Indexes indexes_ids = catalog_definition.get('indexes', {}).keys() # Indexing for idx in indexes_ids: # The function returns if the index needs to be reindexed indexed = _addIndex(catalog, idx, catalog_definition['indexes'][idx]) reindex = True if indexed else reindex # Removing indexes in_catalog_idxs = catalog.indexes() to_remove = list(set(in_catalog_idxs)-set(indexes_ids)) for idx in to_remove: # The function returns if the index has been deleted desindexed = _delIndex(catalog, idx) reindex = True if desindexed else reindex # Columns columns_ids = catalog_definition.get('columns', []) for col in columns_ids: created = _addColumn(catalog, col) reindex = True if created else reindex # Removing columns in_catalog_cols = catalog.schema() to_remove = list(set(in_catalog_cols)-set(columns_ids)) for col in to_remove: # The function returns if the index has been deleted desindexed = _delColumn(catalog, col) reindex = True if desindexed else reindex return reindex
def isAttributeHidden(classname, fieldname): try: registry = queryUtility(IRegistry) hiddenattributes = registry.get('bika.lims.hiddenattributes', ()) if hiddenattributes is not None: for alist in hiddenattributes: if alist[0] == classname: return fieldname in alist[1:] except: logger.warning( 'Probem accessing optionally hidden attributes in registry') return False
def getUID(self, catalog_name, **query): portal = api.portal.get() catalog = getToolByName(portal, catalog_name) # login again saved = self.swapSecurityManager("test_labmanager") # catalog call brains = catalog(**query) # go back to original security manager setSecurityManager(saved) if brains: transaction.savepoint() return brains[0].UID logger.warning("No brain in %s for %s" % (catalog_name, query))
def get_json_value(self): """JSON converted field value """ value = self.get_field_value() try: # Always handle the value as unicode return json.dumps(safe_unicode(value)) except TypeError: logger.warning( "ParseError: '{}.{} ('{}') -> {}' is not JSON serializable!". format(self.context.getId(), self.field.getName(), self.field.type, repr(value))) return ""
def isItemAllowed(self, obj): """Returns true if the current analysis to be rendered has a slot assigned for the current layout. :param obj: analysis to be rendered as a row in the list :type obj: ATContentType/DexterityContentType :return: True if the obj has an slot assigned. Otherwise, False. :rtype: bool """ uid = api.get_uid(obj) if not self.get_item_slot(uid): logger.warning("Slot not assigned to item %s" % uid) return False return BaseView.isItemAllowed(self, obj)
def getInterimValue(self, keyword): """Returns the value of an interim of this analysis """ interims = filter(lambda item: item["keyword"] == keyword, self.getInterimFields()) if not interims: logger.warning("Interim '{}' for analysis '{}' not found".format( keyword, self.getKeyword())) return None if len(interims) > 1: logger.error("More than one interim '{}' found for '{}'".format( keyword, self.getKeyword())) return None return interims[0].get('value', '')
def _cleanAndRebuildIfNeeded(portal, cleanrebuild): """ Rebuild the given catalogs. :portal: the Plone portal object :cleanrebuild: a list with catalog ids """ for cat in cleanrebuild: catalog = getToolByName(portal, cat) if catalog: if hasattr(catalog, "softClearFindAndRebuild"): catalog.softClearFindAndRebuild() else: catalog.clearFindAndRebuild() else: logger.warning('%s do not found' % cat)
def setInterimValue(self, keyword, value): """Sets a value to an interim of this analysis :param keyword: the keyword of the interim :param value: the value for the interim """ # Ensure result integrity regards to None, empty and 0 values val = str('' if not value and value != 0 else value).strip() interims = self.getInterimFields() for interim in interims: if interim['keyword'] == keyword: interim['value'] = val self.setInterimFields(interims) return logger.warning("Interim '{}' for analysis '{}' not found".format( keyword, self.getKeyword()))
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bc(id=items.split(","))] else: self.items = [ self.context, ] # ARs get stickers for their respective samples. new_items = [] for i in self.items: if i.portal_type == 'AnalysisRequest': new_items.append(i.getSample()) else: new_items.append(i) self.items = new_items # Samples get stickers for their partitions. new_items = [] for i in self.items: if i.portal_type == 'Sample': new_items += i.objectValues('SamplePartition') else: new_items.append(i) self.items = new_items if not self.items: logger.warning( "Cannot print stickers: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return if self.items[0].portal_type == 'SamplePartition': template = self.request.get('template', '') prefix, tmpl = template.split(':') templates_dir = queryResourceDirectory('stickers', prefix).directory stickertemplate = ViewPageTemplateFile( os.path.join(templates_dir, tmpl)) return stickertemplate(self) elif self.items[0].portal_type == 'ReferenceSample': return self.referencesample_sticker()
def get_object(self, context, value): """Resolve a UID to an object. :param context: context is the object containing the field's schema. :type context: BaseContent :param value: A UID. :type value: string :return: Returns a Content object. :rtype: BaseContent """ if not value: return None obj = _get_object(context, value) if obj is None: logger.warning( "{}.{}: Resolving UIDReference failed for {}. No object will " "be returned.".format(context, self.getName(), value)) return obj
def set(self, context, value, **kwargs): """Accepts a UID, brain, or an object (or a list of any of these), and stores a UID or list of UIDS. :param context: context is the object who's schema contains this field. :type context: BaseContent :param value: A UID, brain or object (or a sequence of these). :type value: Any :param kwargs: kwargs are passed directly to the underlying get. :type kwargs: dict :return: None """ if self.multiValued: if not value: value = [] if type(value) not in (list, tuple): value = [ value, ] ret = [self.get_object(context, val) for val in value if val] self._set_backreferences(context, ret, **kwargs) uids = [self.get_uid(context, r) for r in ret if r] StringField.set(self, context, uids, **kwargs) else: # Sometimes we get given a list here with an empty string. # This is generated by html forms with empty values. # This is a single-valued field though, so: if isinstance(value, list) and value: if len(value) > 1: logger.warning( "Found values '\'{}\'' for singleValued field <{}>.{} " "- using only the first value in the list.".format( '\',\''.join(value), context.UID(), self.getName())) value = value[0] ret = self.get_object(context, value) if ret: self._set_backreferences(context, [ ret, ], **kwargs) uid = self.get_uid(context, ret) StringField.set(self, context, uid, **kwargs) else: StringField.set(self, context, '', **kwargs)
def __call__(self): if self.context.portal_type == 'SamplesFolder': if self.request.get('items', ''): uids = self.request.get('items').split(',') uc = getToolByName(self.context, 'uid_catalog') self._items = [obj.getObject() for obj in uc(UID=uids)] else: catalog = getToolByName(self.context, 'portal_catalog') contentFilter = { 'portal_type': 'Sample', 'sort_on': 'created', 'sort_order': 'reverse', 'review_state': ['to_be_sampled', 'scheduled_sampling'], 'path': { 'query': "/", 'level': 0 } } brains = catalog(contentFilter) self._items = [obj.getObject() for obj in brains] else: # Warn and redirect to referer logger.warning('PrintView: type not allowed: %s \n' % self.context.portal_type) self.destination_url = self.request.get_header( "referer", self.context.absolute_url()) # setting the filters self._filter_sampler = self.request.form.get('sampler', '') self._filter_client = self.request.form.get('client', '') self._filter_date_from = self.request.form.get('date_from', '') self._filter_date_to = self.request.form.get('date_to', '') self._avoid_filter_by_date = True if self.request.form.get( 'avoid_filter_by_date', False) == 'true' else False # Do print? if self.request.form.get('pdf', '0') == '1': response = self.request.response response.setHeader("Content-type", "application/pdf") response.setHeader("Content-Disposition", "inline") response.setHeader("filename", "temp.pdf") return self.pdfFromPOST() else: return self.template()
def __call__(self): # Need to generate a PDF with the stickers? if self.request.form.get('pdf', '0') == '1': response = self.request.response response.setHeader('Content-type', 'application/pdf') response.setHeader('Content-Disposition', 'inline') response.setHeader('filename', 'sticker.pdf') pdfstream = self.pdf_from_post() return pdfstream self.copies_count = self.get_copies_count() items = self.request.get('items', '') # If filter by type is given in the request, only the templates under # the path with the type name will be given as vocabulary. # Example: If filter_by_type=='worksheet', only *.tp files under a # folder with filter_by_type as name will be displayed. self.filter_by_type = self.request.get('filter_by_type', False) catalog = getToolByName(self.context, 'uid_catalog') self.items = [o.getObject() for o in catalog(UID=items.split(","))] if not self.items: # Default fallback, load from context self.items = [ self.context, ] # before retrieving the required data for each type of object copy # each object as many times as the number of desired sticker copies self.items = self._resolve_number_of_copies(self.items) new_items = [] for i in self.items: outitems = self._populateItems(i) new_items.extend(outitems) self.items = new_items if not self.items: logger.warning( "Cannot print stickers: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return return self.template()
def setWorksheetTemplate(self, worksheettemplate, **kw): """ Once a worksheettemplate has been set, the function looks for the method of the template, if there is one, the function sets the method field of the worksheet. """ self.getField('WorksheetTemplate').set(self, worksheettemplate) if worksheettemplate and isinstance(worksheettemplate, str): # worksheettemplate is a UID, so we need to get the object first uc = getToolByName(self, 'uid_catalog') wst = uc(UID=worksheettemplate) if wst and len(wst) == 1: self.setMethod(wst[0].getObject().getRestrictToMethod()) else: logger.warning( 'The given Worksheet Template UID "%s" to be set ' + 'in the Worksheet Object "%s" with uid "%s" is not valid' % (worksheettemplate, self.Title(), self.UID())) elif worksheettemplate and worksheettemplate.getRestrictToMethod(): self.setMethod(worksheettemplate.getRestrictToMethod())
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bc(id=items.split(","))] else: self.items = [self.context,] new_items = [] for i in self.items: outitems = self._populateItems(i) new_items.extend(outitems) self.items = new_items if not self.items: logger.warning("Cannot print stickers: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return return self.template()
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bc(id=items.split(","))] else: self.items = [self.context,] # ARs get stickers for their respective samples. new_items = [] for i in self.items: if i.portal_type == 'AnalysisRequest': new_items.append(i.getSample()) else: new_items.append(i) self.items = new_items # Samples get stickers for their partitions. new_items = [] for i in self.items: if i.portal_type == 'Sample': new_items += i.objectValues('SamplePartition') else: new_items.append(i) self.items = new_items if not self.items: logger.warning("Cannot print stickers: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return if self.items[0].portal_type == 'SamplePartition': template = self.request.get('template', '') prefix, tmpl = template.split(':') templates_dir = queryResourceDirectory('stickers', prefix).directory stickertemplate = ViewPageTemplateFile(os.path.join(templates_dir, tmpl)) return stickertemplate(self) elif self.items[0].portal_type == 'ReferenceSample': return self.referencesample_sticker()
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bc(id=items.split(","))] else: self.items = [ self.context, ] # ARs get labels for their respective samples. new_items = [] for i in self.items: if i.portal_type == 'AnalysisRequest': new_items.append(i.getSample()) else: new_items.append(i) self.items = new_items # Samples get labels for their partitions. new_items = [] for i in self.items: if i.portal_type == 'Sample': new_items += i.objectValues('SamplePartition') else: new_items.append(i) self.items = new_items if not self.items: logger.warning( "Cannot print labels: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return if self.items[0].portal_type == 'SamplePartition': if self.request.get('size', '') == 'small': return self.sample_small() else: return self.sample_large() elif self.items[0].portal_type == 'ReferenceSample': return self.referencesample_sticker()
def __call__(self, request): self.request = request # Stickers admittance are saved in sample type if not hasattr(self.context, 'getSampleType'): logger.warning( "{} has no attribute 'getSampleType', so no sticker will be " "returned.".format(self.context.getId())) return [] self.sample_type = self.context.getSampleType() sticker_ids = self.sample_type.getAdmittedStickers() default_sticker_id = self.get_default_sticker_id() result = [] # Getting only existing templates and its info stickers = getStickerTemplates() for sticker in stickers: if sticker.get('id') in sticker_ids: sticker_info = sticker.copy() sticker_info['selected'] = \ default_sticker_id == sticker.get('id') result.append(sticker_info) return result
def setupCatalogs(self, portal): def addIndex(cat, *args): try: cat.addIndex(*args) except: logger.warning("Could not create index %s in catalog %s" % (args, cat)) def addColumn(cat, col): try: cat.addColumn(col) except: logger.warning("Could not create metadata %s in catalog %s" % (col, cat)) # _______________________________# # BIKA_CATALOG # # _______________________________# bc = getToolByName(portal, 'bika_catalog', None) if bc is None: logger.warning('Could not find the bika_catalog tool.') return # Add indexes and metadata columns here at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Kit', ['bika_catalog']) at.setCatalogsByType('Project', ['bika_catalog']) at.setCatalogsByType('Shipment', ['bika_catalog']) at.setCatalogsByType('Aliquot', ['bika_catalog']) at.setCatalogsByType('Biospecimen', ['bika_catalog']) # _______________________________# # BIKA_SETUP_CATALOG # # _______________________________# bsc = getToolByName(portal, 'bika_setup_catalog', None) if bsc is None: logger.warning('Could not find the bika_setup_catalog tool.') return # Add indexes and metadata columns here at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('KitTemplate', [ 'bika_setup_catalog', ]) at.setCatalogsByType('StorageManagement', [ 'bika_setup_catalog', ]) at.setCatalogsByType('BiospecType', [ 'bika_setup_catalog', ]) at.setCatalogsByType('Multimage', [ 'bika_setup_catalog', ]) at.setCatalogsByType('StorageType', [ 'bika_setup_catalog', ]) bac = getToolByName(portal, 'bika_analysis_catalog', None) if bsc is None: logger.warning('Could not find the bika_analysis_catalog tool.') return
def set(self, instance, value, **kwargs): """ Check if value is an actual date/time value. If not, attempt to convert it to one; otherwise, set to None. Assign all properties passed as kwargs to object. """ val = value if not value: val = None elif isinstance(value, basestring): for fmt in ['date_format_long', 'date_format_short']: fmtstr = instance.translate(fmt, domain='bika', mapping={}) fmtstr = fmtstr.replace(r"${", '%').replace('}', '') try: val = strptime(value, fmtstr) except ValueError: continue try: val = DateTime(*list(val)[:-6]) except DateTimeError: continue if val.timezoneNaive(): # Use local timezone for tz naive strings # see http://dev.plone.org/plone/ticket/10141 zone = val.localZone(safelocaltime(val.timeTime())) parts = val.parts()[:-1] + (zone, ) val = DateTime(*parts) break else: try: # The following will handle an rfc822 string. value = value.split(" +", 1)[0] val = DateTime(value) except: logger.warning("DateTimeField failed to format date " "string '%s' with '%s'" % (value, fmtstr)) elif isinstance(value, datetime.datetime): val = dt2DT(value) super(DateTimeField, self).set(instance, val, **kwargs)
def setupCatalogs(self, portal): def addIndex(cat, *args): try: cat.addIndex(*args) except: logger.warning("Could not create index %s in catalog %s" % (args, cat)) def addColumn(cat, col): try: cat.addColumn(col) except: logger.warning("Could not create metadata %s in catalog %s" % (col, cat)) bsc = getToolByName(portal, 'bika_catalog', None) if bsc is None: logger.warning('Could not find the bika_catalog tool.') return # Add indexes and metadata colums here bsc = getToolByName(portal, 'bika_setup_catalog', None) if bsc is None: logger.warning('Could not find the bika_setup_catalog tool.') return # Add indexes and metadata colums here bsc = getToolByName(portal, 'bika_analysis_catalog', None) if bsc is None: logger.warning('Could not find the bika_analysis_catalog tool.') return
def set(self, context, value, **kwargs): """Accepts a UID, brain, or an object (or a list of any of these), and stores a UID or list of UIDS. :param context: context is the object who's schema contains this field. :type context: BaseContent :param value: A UID, brain or object (or a sequence of these). :type value: Any :param kwargs: kwargs are passed directly to the underlying get. :type kwargs: dict :return: None """ if self.multiValued: if not value: value = [] if type(value) not in (list, tuple): value = [value, ] ret = [self.get_object(context, val) for val in value if val] self._set_backreferences(context, ret) uids = [self.get_uid(context, r) for r in ret if r] StringField.set(self, context, uids, **kwargs) else: # Sometimes we get given a list here with an empty string. # This is generated by html forms with empty values. # This is a single-valued field though, so: if isinstance(value, list) and value: if len(value) > 1: logger.warning( "Found values '\'{}\'' for singleValued field <{}>.{} " "- using only the first value in the list.".format( '\',\''.join(value), context.UID(), self.getName())) value = value[0] ret = self.get_object(context, value) if ret: self._set_backreferences(context, [ret, ]) uid = self.get_uid(context, ret) StringField.set(self, context, uid, **kwargs) else: StringField.set(self, context, '', **kwargs)
def __call__(self): if self.context.portal_type == 'SamplesFolder': if self.request.get('items', ''): uids = self.request.get('items').split(',') uc = getToolByName(self.context, 'uid_catalog') self._items = [obj.getObject() for obj in uc(UID=uids)] else: catalog = getToolByName(self.context, 'portal_catalog') contentFilter = { 'portal_type': 'Sample', 'sort_on': 'created', 'sort_order': 'reverse', 'review_state': ['to_be_sampled', 'scheduled_sampling'], 'path': {'query': "/", 'level': 0} } brains = catalog(contentFilter) self._items = [obj.getObject() for obj in brains] else: # Warn and redirect to referer logger.warning( 'PrintView: type not allowed: %s \n' % self.context.portal_type) self.destination_url = self.request.get_header( "referer", self.context.absolute_url()) # setting the filters self._filter_sampler = self.request.form.get('sampler', '') self._filter_client = self.request.form.get('client', '') self._filter_date_from = self.request.form.get('date_from', '') self._filter_date_to = self.request.form.get('date_to', '') self._avoid_filter_by_date = True if self.request.form.get( 'avoid_filter_by_date', False) == 'true' else False # Do print? if self.request.form.get('pdf', '0') == '1': response = self.request.response response.setHeader("Content-type", "application/pdf") response.setHeader("Content-Disposition", "inline") response.setHeader("filename", "temp.pdf") return self.pdfFromPOST() else: return self.template()
def setupCatalogs(self, portal): def addIndex(cat, *args): try: cat.addIndex(*args) except: logger.warning("Could not create index %s in catalog %s" % (args, cat)) def addColumn(cat, col): try: cat.addColumn(col) except: logger.warning("Could not create metadata %s in catalog %s" % (col, cat)) # _______________________________# # BIKA_CATALOG # # _______________________________# bc = getToolByName(portal, 'bika_catalog', None) if bc is None: logger.warning('Could not find the bika_catalog tool.') return # Add indexes and metadata columns here at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Kit', ['bika_catalog']) at.setCatalogsByType('Project', ['bika_catalog']) at.setCatalogsByType('Shipment', ['bika_catalog']) at.setCatalogsByType('Biospecimen', ['bika_catalog']) addIndex(bc, 'getParentUID', 'FieldIndex') addIndex(bc, 'getProjectUID', 'FieldIndex') # _______________________________# # BIKA_SETUP_CATALOG # # _______________________________# bsc = getToolByName(portal, 'bika_setup_catalog', None) if bsc is None: logger.warning('Could not find the bika_setup_catalog tool.') return # Add indexes and metadata columns here at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('KitTemplate', ['bika_setup_catalog']) at.setCatalogsByType('InventoryOrder', ['bika_setup_catalog']) at.setCatalogsByType('StorageType', ['bika_setup_catalog']) at.setCatalogsByType('Product', ['bika_setup_catalog']) at.setCatalogsByType('StockItem', [ 'bika_setup_catalog', ]) at.setCatalogsByType('StorageLocation', ['bika_setup_catalog']) at.setCatalogsByType('StorageUnit', ['bika_setup_catalog']) at.setCatalogsByType('ManagedStorage', ['bika_setup_catalog']) at.setCatalogsByType('UnmanagedStorage', ['bika_setup_catalog']) at.setCatalogsByType('StoragePosition', ['bika_setup_catalog']) bac = getToolByName(portal, 'bika_analysis_catalog', None) if bsc is None: logger.warning('Could not find the bika_analysis_catalog tool.') return
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bc(id=items.split(","))] else: self.items = [self.context,] # ARs get labels for their respective samples. new_items = [] for i in self.items: if i.portal_type == 'AnalysisRequest': new_items.append(i.getSample()) else: new_items.append(i) self.items = new_items # Samples get labels for their partitions. new_items = [] for i in self.items: if i.portal_type == 'Sample': new_items += i.objectValues('SamplePartition') else: new_items.append(i) self.items = new_items if not self.items: logger.warning("Cannot print labels: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return if self.items[0].portal_type == 'SamplePartition': if self.request.get('size', '') == 'small': return self.sample_small() else: return self.sample_large() elif self.items[0].portal_type == 'ReferenceSample': return self.referencesample_sticker()
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') items = self.request.get('items', '') if items: self.items = [o.getObject() for o in bc(id=items.split(","))] else: self.items = [ self.context, ] new_items = [] for i in self.items: outitems = self._populateItems(i) new_items.extend(outitems) self.items = new_items if not self.items: logger.warning( "Cannot print stickers: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return return self.template()
def set(self, instance, value, **kwargs): """Set (multi-)references """ value = self.preprocess_value(value) existing_uids = self.get_backreferences_for(instance) if not value and not existing_uids: logger.warning("Field and value is empty!") return if not self.multiValued and len(value) > 1: raise ValueError( "Multiple values given for single valued field {}".format( repr(self))) set_uids = [] for val in value: if api.is_uid(val): set_uids.append(val) elif api.is_object(val): set_uids.append(api.get_uid(val)) else: logger.error("Target has no UID: %s/%s" % (val, value)) sub = filter(lambda uid: uid not in set_uids, existing_uids) add = filter(lambda uid: uid not in existing_uids, set_uids) for uid in set(existing_uids + set_uids): # The object to link target = api.get_object(uid) # Add reference to object if uid in add: __traceback_info__ = (instance, uid, value, existing_uids) self.add_reference(instance, target, **kwargs) # Delete reference to object elif uid in sub: self.del_reference(instance, target, **kwargs)
def SamplePrepWorkflowChain(ob, wftool): """Responsible for inserting the optional sampling preparation workflow into the workflow chain for objects with ISamplePrepWorkflow This is only done if the object is in 'sample_prep' state in the primary workflow (review_state). """ # use catalog to retrieve review_state: getInfoFor causes recursion loop chain = list(ToolWorkflowChain(ob, wftool)) try: bc = getToolByName(ob, 'bika_catalog') except AttributeError: logger.warning(traceback.format_exc()) logger.warning( "Error getting 'bika_catalog' using 'getToolByName' with '{0}'" " as context.".format(ob)) return chain proxies = bc(UID=ob.UID()) if not proxies or proxies[0].review_state != 'sample_prep': return chain sampleprep_workflow = ob.getPreparationWorkflow() if sampleprep_workflow: chain.append(sampleprep_workflow) return tuple(chain)
def __call__(self): # Need to generate a PDF with the stickers? if self.request.form.get("pdf", "0") == "1": response = self.request.response response.setHeader("Content-type", "application/pdf") response.setHeader("Content-Disposition", "inline") response.setHeader("filename", "sticker.pdf") pdfstream = self.pdf_from_post() return pdfstream # If filter by type is given in the request, only the templates under # the path with the type name will be given as vocabulary. # Example: If filter_by_type=='worksheet', only *.pt files under a # folder with filter_by_type as name will be displayed. self.filter_by_type = self.request.get("filter_by_type", False) self.items = self.get_items() if not self.items: logger.warning( "Cannot print stickers: no items specified in request") self.request.response.redirect(self.context.absolute_url()) return return self.template()
def setupCatalogs(self, portal): def addIndex(cat, *args): try: cat.addIndex(*args) except: logger.warning("Could not create index %s in catalog %s" % (args, cat)) def addColumn(cat, col): try: cat.addColumn(col) except: logger.warning("Could not create metadata %s in catalog %s" % (col, cat)) # _______________________________# # BIKA_CATALOG # # _______________________________# bc = getToolByName(portal, 'bika_catalog', None) if bc is None: logger.warning('Could not find the bika_catalog tool.') return # Add indexes and metadata columns here at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Kit', ['bika_catalog']) at.setCatalogsByType('Project', ['bika_catalog']) at.setCatalogsByType('Shipment', ['bika_catalog']) at.setCatalogsByType('Aliquot', ['bika_catalog']) at.setCatalogsByType('Biospecimen', ['bika_catalog']) # _______________________________# # BIKA_SETUP_CATALOG # # _______________________________# bsc = getToolByName(portal, 'bika_setup_catalog', None) if bsc is None: logger.warning('Could not find the bika_setup_catalog tool.') return # Add indexes and metadata columns here at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('KitTemplate', ['bika_setup_catalog', ]) at.setCatalogsByType('StorageManagement', ['bika_setup_catalog', ]) at.setCatalogsByType('BiospecType', ['bika_setup_catalog', ]) at.setCatalogsByType('Multimage', ['bika_setup_catalog', ]) at.setCatalogsByType('StorageType', ['bika_setup_catalog', ]) bac = getToolByName(portal, 'bika_analysis_catalog', None) if bsc is None: logger.warning('Could not find the bika_analysis_catalog tool.') return
def _logTransitionFailure(obj, transition_id): wftool = getToolByName(obj, "portal_workflow") chain = wftool.getChainFor(obj) for wf_id in chain: wf = wftool.getWorkflowById(wf_id) if wf is not None: sdef = wf._getWorkflowStateOf(obj) if sdef is not None: for tid in sdef.transitions: if tid != transition_id: continue tdef = wf.transitions.get(tid, None) if not tdef: continue if tdef.trigger_type != TRIGGER_USER_ACTION: logger.warning(" Trigger type is not manual") if not tdef.actbox_name: logger.warning(" No actbox_name set") if not wf._checkTransitionGuard(tdef, obj): guard = tdef.guard expr = guard.getExprText() logger.warning(" Guard failed: {0}".format(expr)) return logger.warning("Transition not found. Check the workflow definition!")
def doActionFor(instance, action_id, idxs=None): """Tries to perform the transition to the instance. Object is reindexed after the transition takes place, but only if succeeds. If idxs is set, only these indexes will be reindexed. Otherwise, will try to use the indexes defined in ACTIONS_TO_INDEX mapping if any. :param instance: Object to be transitioned :param action_id: transition id :param idxs: indexes to be reindexed after the transition :returns: True if the transition has been performed, together with message :rtype: tuple (bool,str) """ if not instance: return False, "" if isinstance(instance, list): # TODO Workflow . Check if this is strictly necessary # This check is here because sometimes Plone creates a list # from submitted form elements. logger.warn("Got a list of obj in doActionFor!") if len(instance) > 1: logger.warn( "doActionFor is getting an instance parameter which is a list " "with more than one item. Instance: '{}', action_id: '{}'". format(instance, action_id)) return doActionFor(instance=instance[0], action_id=action_id, idxs=idxs) # Since a given transition can cascade or promote to other objects, we want # to reindex all objects for which the transition succeed at once, at the # end of process. Otherwise, same object will be reindexed multiple times # unnecessarily. Also, ActionsHandlerPool ensures the same transition is not # applied twice to the same object due to cascade/promote recursions. pool = ActionHandlerPool.get_instance() if pool.succeed(instance, action_id): return False, "Transition {} for {} already done"\ .format(action_id, instance.getId()) # Return False if transition is not permitted if not isTransitionAllowed(instance, action_id): return False, "Transition {} for {} is not allowed"\ .format(action_id, instance.getId()) # Add this batch process to the queue pool.queue_pool() succeed = False message = "" workflow = getToolByName(instance, "portal_workflow") try: workflow.doActionFor(instance, action_id) succeed = True except WorkflowException as e: message = str(e) curr_state = getCurrentState(instance) clazz_name = instance.__class__.__name__ logger.warning( "Transition '{0}' not allowed: {1} '{2}' ({3})"\ .format(action_id, clazz_name, instance.getId(), curr_state)) logger.error(message) # If no indexes to reindex have been defined, try to use those defined in # the ACTIONS_TO_INDEXES mapping. Reindexing only those indexes that might # be affected by the transition boosts the overall performance!. if idxs is None: portal_type = instance.portal_type idxs = ACTIONS_TO_INDEXES.get(portal_type, {}).get(action_id, []) # Add the current object to the pool and resume pool.push(instance, action_id, succeed, idxs=idxs) pool.resume() return succeed, message
def setupCatalogs(self, portal): # an item should belong to only one catalog. # that way looking it up means first looking up *the* catalog # in which it is indexed, as well as making it cheaper to index. def addIndex(cat, *args): # noinspection PyBroadException try: cat.addIndex(*args) except: pass def addColumn(cat, col): # noinspection PyBroadException try: cat.addColumn(col) except: pass # create lexicon wordSplitter = Empty() wordSplitter.group = 'Word Splitter' wordSplitter.name = 'Unicode Whitespace splitter' caseNormalizer = Empty() caseNormalizer.group = 'Case Normalizer' caseNormalizer.name = 'Unicode Case Normalizer' stopWords = Empty() stopWords.group = 'Stop Words' stopWords.name = 'Remove listed and single char words' elem = [wordSplitter, caseNormalizer, stopWords] zc_extras = Empty() zc_extras.index_type = 'Okapi BM25 Rank' zc_extras.lexicon_id = 'Lexicon' # bika_catalog bc = getToolByName(portal, 'bika_catalog', None) if bc is None: logger.warning('Could not find the bika_catalog tool.') return # noinspection PyBroadException try: bc.manage_addProduct['ZCTextIndex'].manage_addLexicon( 'Lexicon', 'Lexicon', elem) except: logger.warning('Could not add ZCTextIndex to bika_catalog') pass at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Batch', ['bika_catalog', 'portal_catalog']) # TODO Remove in >v1.3.0 at.setCatalogsByType('Sample', ['bika_catalog', 'portal_catalog']) # TODO Remove in >v1.3.0 at.setCatalogsByType('SamplePartition', ['bika_catalog', 'portal_catalog']) at.setCatalogsByType('ReferenceSample', ['bika_catalog', 'portal_catalog']) addIndex(bc, 'path', 'ExtendedPathIndex', 'getPhysicalPath') addIndex(bc, 'allowedRolesAndUsers', 'KeywordIndex') addIndex(bc, 'UID', 'FieldIndex') addIndex(bc, 'SearchableText', 'ZCTextIndex', zc_extras) addIndex(bc, 'Title', 'ZCTextIndex', zc_extras) addIndex(bc, 'Description', 'ZCTextIndex', zc_extras) addIndex(bc, 'id', 'FieldIndex') addIndex(bc, 'getId', 'FieldIndex') addIndex(bc, 'Type', 'FieldIndex') addIndex(bc, 'portal_type', 'FieldIndex') addIndex(bc, 'created', 'DateIndex') addIndex(bc, 'Creator', 'FieldIndex') addIndex(bc, 'getObjPositionInParent', 'GopipIndex') addIndex(bc, 'title', 'FieldIndex', 'Title') addIndex(bc, 'sortable_title', 'FieldIndex') addIndex(bc, 'description', 'FieldIndex', 'Description') addIndex(bc, 'review_state', 'FieldIndex') addIndex(bc, 'Identifiers', 'KeywordIndex') addIndex(bc, 'is_active', 'BooleanIndex') addIndex(bc, 'BatchDate', 'DateIndex') addIndex(bc, 'getClientTitle', 'FieldIndex') addIndex(bc, 'getClientUID', 'FieldIndex') addIndex(bc, 'getClientID', 'FieldIndex') addIndex(bc, 'getClientBatchID', 'FieldIndex') addIndex(bc, 'getDateReceived', 'DateIndex') addIndex(bc, 'getDateSampled', 'DateIndex') addIndex(bc, 'getDueDate', 'DateIndex') addIndex(bc, 'getExpiryDate', 'DateIndex') addIndex(bc, 'getReferenceDefinitionUID', 'FieldIndex') addIndex(bc, 'getSampleTypeTitle', 'FieldIndex') addIndex(bc, 'getSampleTypeUID', 'FieldIndex') # https://github.com/senaite/senaite.core/pull/1091 addIndex(bc, 'getSupportedServices', 'KeywordIndex') addIndex(bc, 'getBlank', 'BooleanIndex') addIndex(bc, 'isValid', 'BooleanIndex') addColumn(bc, 'path') addColumn(bc, 'UID') addColumn(bc, 'id') addColumn(bc, 'getId') addColumn(bc, 'Type') addColumn(bc, 'portal_type') addColumn(bc, 'creator') addColumn(bc, 'Created') addColumn(bc, 'Title') addColumn(bc, 'Description') addColumn(bc, 'sortable_title') addColumn(bc, 'getClientTitle') addColumn(bc, 'getClientID') addColumn(bc, 'getClientBatchID') addColumn(bc, 'getSampleTypeTitle') addColumn(bc, 'getDateReceived') addColumn(bc, 'getDateSampled') addColumn(bc, 'review_state') # bika_setup_catalog bsc = getToolByName(portal, 'bika_setup_catalog', None) if bsc is None: logger.warning('Could not find the setup catalog tool.') return # noinspection PyBroadException try: bsc.manage_addProduct['ZCTextIndex'].manage_addLexicon( 'Lexicon', 'Lexicon', elem) except: logger.warning('Could not add ZCTextIndex to bika_setup_catalog') pass at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Department', ['bika_setup_catalog', "portal_catalog", ]) at.setCatalogsByType('Container', ['bika_setup_catalog', ]) at.setCatalogsByType('ContainerType', ['bika_setup_catalog', ]) at.setCatalogsByType('AnalysisCategory', ['bika_setup_catalog', ]) at.setCatalogsByType('AnalysisService', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('AnalysisSpec', ['bika_setup_catalog', ]) at.setCatalogsByType('SampleCondition', ['bika_setup_catalog']) at.setCatalogsByType('SampleMatrix', ['bika_setup_catalog', ]) at.setCatalogsByType('SampleType', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SamplePoint', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('StorageLocation', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SamplingDeviation', ['bika_setup_catalog', ]) at.setCatalogsByType('IdentifierType', ['bika_setup_catalog', ]) at.setCatalogsByType('Instrument', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('InstrumentType', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('InstrumentLocation', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Method', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Multifile', ['bika_setup_catalog']) at.setCatalogsByType('AttachmentType', ['bika_setup_catalog', ]) at.setCatalogsByType('Attachment', ['portal_catalog']) at.setCatalogsByType('Calculation', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('AnalysisProfile', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('ARTemplate', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('LabProduct', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('LabContact', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Manufacturer', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Preservation', ['bika_setup_catalog', ]) at.setCatalogsByType('ReferenceDefinition', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SRTemplate', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SubGroup', ['bika_setup_catalog', ]) at.setCatalogsByType('Supplier', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Unit', ['bika_setup_catalog', ]) at.setCatalogsByType('WorksheetTemplate', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('BatchLabel', ['bika_setup_catalog', ]) addIndex(bsc, 'path', 'ExtendedPathIndex', 'getPhysicalPath') addIndex(bsc, 'allowedRolesAndUsers', 'KeywordIndex') addIndex(bsc, 'UID', 'FieldIndex') addIndex(bsc, 'SearchableText', 'ZCTextIndex', zc_extras) addIndex(bsc, 'Title', 'ZCTextIndex', zc_extras) addIndex(bsc, 'Description', 'ZCTextIndex', zc_extras) addIndex(bsc, 'id', 'FieldIndex') addIndex(bsc, 'getId', 'FieldIndex') addIndex(bsc, 'Type', 'FieldIndex') addIndex(bsc, 'portal_type', 'FieldIndex') addIndex(bsc, 'created', 'DateIndex') addIndex(bsc, 'Creator', 'FieldIndex') addIndex(bsc, 'getObjPositionInParent', 'GopipIndex') addIndex(bc, 'Identifiers', 'KeywordIndex') addIndex(bsc, 'title', 'FieldIndex', 'Title') addIndex(bsc, 'sortable_title', 'FieldIndex') addIndex(bsc, 'description', 'FieldIndex', 'Description') addIndex(bsc, 'review_state', 'FieldIndex') addIndex(bsc, 'getAccredited', 'FieldIndex') addIndex(bsc, 'getAnalyst', 'FieldIndex') addIndex(bsc, 'getBlank', 'FieldIndex') addIndex(bsc, 'getCalculationTitle', 'FieldIndex') addIndex(bsc, 'getCalculationUID', 'FieldIndex') addIndex(bsc, 'getCalibrationExpiryDate', 'FieldIndex') addIndex(bsc, 'getCategoryTitle', 'FieldIndex') addIndex(bsc, 'getCategoryUID', 'FieldIndex') addIndex(bsc, 'getClientUID', 'FieldIndex') addIndex(bsc, 'getDepartmentTitle', 'FieldIndex') addIndex(bsc, 'getDocumentID', 'FieldIndex') addIndex(bsc, 'getDuplicateVariation', 'FieldIndex') addIndex(bsc, 'getFormula', 'FieldIndex') addIndex(bsc, 'getFullname', 'FieldIndex') addIndex(bsc, 'getHazardous', 'FieldIndex') addIndex(bsc, 'getInstrumentLocationName', 'FieldIndex') addIndex(bsc, 'getInstrumentTitle', 'FieldIndex') addIndex(bsc, 'getInstrumentType', 'FieldIndex') addIndex(bsc, 'getInstrumentTypeName', 'FieldIndex') addIndex(bsc, 'getKeyword', 'FieldIndex') addIndex(bsc, 'getManagerEmail', 'FieldIndex') addIndex(bsc, 'getManagerName', 'FieldIndex') addIndex(bsc, 'getManagerPhone', 'FieldIndex') addIndex(bsc, 'getMaxTimeAllowed', 'FieldIndex') addIndex(bsc, 'getMethodID', 'FieldIndex') addIndex(bsc, 'getAvailableMethodUIDs', 'KeywordIndex') addIndex(bsc, 'getModel', 'FieldIndex') addIndex(bsc, 'getName', 'FieldIndex') addIndex(bsc, 'getPointOfCapture', 'FieldIndex') addIndex(bsc, 'getPrice', 'FieldIndex') addIndex(bsc, 'getSamplePointTitle', 'KeywordIndex') addIndex(bsc, 'getSamplePointUID', 'FieldIndex') addIndex(bsc, 'getSampleTypeTitle', 'FieldIndex') addIndex(bsc, 'getSampleTypeTitles', 'KeywordIndex') addIndex(bsc, 'getSampleTypeUID', 'FieldIndex') addIndex(bsc, 'getServiceUID', 'FieldIndex') addIndex(bsc, 'getServiceUIDs', 'KeywordIndex') addIndex(bsc, 'getTotalPrice', 'FieldIndex') addIndex(bsc, 'getUnit', 'FieldIndex') addIndex(bsc, 'getVATAmount', 'FieldIndex') addIndex(bsc, 'getVolume', 'FieldIndex') addIndex(bsc, 'is_active', 'BooleanIndex') addColumn(bsc, 'path') addColumn(bsc, 'UID') addColumn(bsc, 'id') addColumn(bsc, 'getId') addColumn(bsc, 'Type') addColumn(bsc, 'portal_type') addColumn(bsc, 'getObjPositionInParent') addColumn(bsc, 'Title') addColumn(bsc, 'Description') addColumn(bsc, 'title') addColumn(bsc, 'sortable_title') addColumn(bsc, 'description') addColumn(bsc, 'review_state') addColumn(bsc, 'getAccredited') addColumn(bsc, 'getInstrumentType') addColumn(bsc, 'getInstrumentTypeName') addColumn(bsc, 'getInstrumentLocationName') addColumn(bsc, 'getBlank') addColumn(bsc, 'getCalculationTitle') addColumn(bsc, 'getCalculationUID') addColumn(bsc, 'getCalibrationExpiryDate') addColumn(bsc, 'getCategoryTitle') addColumn(bsc, 'getCategoryUID') addColumn(bsc, 'getClientUID') addColumn(bsc, 'getDepartmentTitle') addColumn(bsc, 'getDuplicateVariation') addColumn(bsc, 'getFormula') addColumn(bsc, 'getFullname') addColumn(bsc, 'getHazardous') addColumn(bsc, 'getInstrumentTitle') addColumn(bsc, 'getKeyword') addColumn(bsc, 'getManagerName') addColumn(bsc, 'getManagerPhone') addColumn(bsc, 'getManagerEmail') addColumn(bsc, 'getMaxTimeAllowed') addColumn(bsc, 'getModel') addColumn(bsc, 'getName') addColumn(bsc, 'getPointOfCapture') addColumn(bsc, 'getPrice') addColumn(bsc, 'getSamplePointTitle') addColumn(bsc, 'getSamplePointUID') addColumn(bsc, 'getSampleTypeTitle') addColumn(bsc, 'getSampleTypeUID') addColumn(bsc, 'getServiceUID') addColumn(bsc, 'getTotalPrice') addColumn(bsc, 'getUnit') addColumn(bsc, 'getVATAmount') addColumn(bsc, 'getVolume') # portal_catalog pc = getToolByName(portal, 'portal_catalog', None) if pc is None: logger.warning('Could not find the portal_catalog tool.') return addIndex(pc, 'Analyst', 'FieldIndex') addColumn(pc, 'Analyst') # TODO: Nmrl addColumn(pc, 'getProvince') addColumn(pc, 'getDistrict') # Setting up all LIMS catalogs defined in catalog folder setup_catalogs(portal, getCatalogDefinitions())
def addColumn(cat, col): try: cat.addColumn(col) except: logger.warning("Could not create metadata %s in catalog %s" % (col, cat))
def addIndex(cat, *args): try: cat.addIndex(*args) except: logger.warning("Could not create index %s in catalog %s" % (args, cat))
def setupCatalogs(self, portal): # an item should belong to only one catalog. # that way looking it up means first looking up *the* catalog # in which it is indexed, as well as making it cheaper to index. def addIndex(cat, *args): try: cat.addIndex(*args) except: pass def addColumn(cat, col): try: cat.addColumn(col) except: pass # create lexicon wordSplitter = Empty() wordSplitter.group = 'Word Splitter' wordSplitter.name = 'Unicode Whitespace splitter' caseNormalizer = Empty() caseNormalizer.group = 'Case Normalizer' caseNormalizer.name = 'Unicode Case Normalizer' stopWords = Empty() stopWords.group = 'Stop Words' stopWords.name = 'Remove listed and single char words' elem = [wordSplitter, caseNormalizer, stopWords] zc_extras = Empty() zc_extras.index_type = 'Okapi BM25 Rank' zc_extras.lexicon_id = 'Lexicon' # bika_analysis_catalog bac = getToolByName(portal, 'bika_analysis_catalog', None) if bac is None: logger.warning('Could not find the bika_analysis_catalog tool.') return try: bac.manage_addProduct['ZCTextIndex'].manage_addLexicon('Lexicon', 'Lexicon', elem) except: logger.warning('Could not add ZCTextIndex to bika_analysis_catalog') pass at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Analysis', ['bika_analysis_catalog']) at.setCatalogsByType('ReferenceAnalysis', ['bika_analysis_catalog']) at.setCatalogsByType('DuplicateAnalysis', ['bika_analysis_catalog']) addIndex(bac, 'path', 'ExtendedPathIndex', ('getPhysicalPath')) addIndex(bac, 'allowedRolesAndUsers', 'KeywordIndex') addIndex(bac, 'UID', 'FieldIndex') addIndex(bac, 'Title', 'FieldIndex') addIndex(bac, 'Description', 'ZCTextIndex', zc_extras) addIndex(bac, 'id', 'FieldIndex') addIndex(bac, 'Type', 'FieldIndex') addIndex(bac, 'portal_type', 'FieldIndex') addIndex(bac, 'created', 'DateIndex') addIndex(bac, 'Creator', 'FieldIndex') addIndex(bac, 'title', 'FieldIndex', 'Title') addIndex(bac, 'sortable_title', 'FieldIndex') addIndex(bac, 'description', 'FieldIndex', 'Description') addIndex(bac, 'review_state', 'FieldIndex') addIndex(bac, 'worksheetanalysis_review_state', 'FieldIndex') addIndex(bac, 'cancellation_state', 'FieldIndex') addIndex(bac, 'getDepartmentUID', 'KeywordIndex') addIndex(bac, 'getDueDate', 'DateIndex') addIndex(bac, 'getDateSampled', 'DateIndex') addIndex(bac, 'getDateReceived', 'DateIndex') addIndex(bac, 'getResultCaptureDate', 'DateIndex') addIndex(bac, 'getDateAnalysisPublished', 'DateIndex') addIndex(bac, 'getClientUID', 'FieldIndex') addIndex(bac, 'getAnalyst', 'FieldIndex') addIndex(bac, 'getClientTitle', 'FieldIndex') addIndex(bac, 'getRequestID', 'FieldIndex') addIndex(bac, 'getClientOrderNumber', 'FieldIndex') addIndex(bac, 'getKeyword', 'FieldIndex') addIndex(bac, 'getServiceTitle', 'FieldIndex') addIndex(bac, 'getServiceUID', 'FieldIndex') addIndex(bac, 'getCategoryUID', 'FieldIndex') addIndex(bac, 'getCategoryTitle', 'FieldIndex') addIndex(bac, 'getPointOfCapture', 'FieldIndex') addIndex(bac, 'getDateReceived', 'DateIndex') addIndex(bac, 'getResultCaptureDate', 'DateIndex') addIndex(bac, 'getSampleTypeUID', 'FieldIndex') addIndex(bac, 'getSamplePointUID', 'FieldIndex') addIndex(bac, 'getRawSamplePoints', 'KeywordsIndex') addIndex(bac, 'getRawSampleTypes', 'KeywordIndex') addIndex(bac, 'getRetested', 'FieldIndex') addIndex(bac, 'getReferenceAnalysesGroupID', 'FieldIndex') addColumn(bac, 'path') addColumn(bac, 'UID') addColumn(bac, 'id') addColumn(bac, 'Type') addColumn(bac, 'portal_type') addColumn(bac, 'getObjPositionInParent') addColumn(bac, 'Title') addColumn(bac, 'Description') addColumn(bac, 'title') addColumn(bac, 'sortable_title') addColumn(bac, 'description') addColumn(bac, 'review_state') addColumn(bac, 'cancellation_state') addColumn(bac, 'getRequestID') addColumn(bac, 'getReferenceAnalysesGroupID') addColumn(bac, 'getResultCaptureDate') addColumn(bac, 'Priority') # bika_catalog bc = getToolByName(portal, 'bika_catalog', None) if bc is None: logger.warning('Could not find the bika_catalog tool.') return try: bc.manage_addProduct['ZCTextIndex'].manage_addLexicon('Lexicon', 'Lexicon', elem) except: logger.warning('Could not add ZCTextIndex to bika_catalog') pass at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Batch', ['bika_catalog', 'portal_catalog']) at.setCatalogsByType('AnalysisRequest', ['bika_catalog', 'portal_catalog']) at.setCatalogsByType('Sample', ['bika_catalog', 'portal_catalog']) at.setCatalogsByType('SamplePartition', ['bika_catalog', 'portal_catalog']) at.setCatalogsByType('ReferenceSample', ['bika_catalog', 'portal_catalog']) at.setCatalogsByType('Report', ['bika_catalog', ]) at.setCatalogsByType('Worksheet', ['bika_catalog', 'portal_catalog']) addIndex(bc, 'path', 'ExtendedPathIndex', ('getPhysicalPath')) addIndex(bc, 'allowedRolesAndUsers', 'KeywordIndex') addIndex(bc, 'UID', 'FieldIndex') addIndex(bc, 'SearchableText', 'ZCTextIndex', zc_extras) addIndex(bc, 'Title', 'ZCTextIndex', zc_extras) addIndex(bc, 'Description', 'ZCTextIndex', zc_extras) addIndex(bc, 'id', 'FieldIndex') addIndex(bc, 'getId', 'FieldIndex') addIndex(bc, 'Type', 'FieldIndex') addIndex(bc, 'portal_type', 'FieldIndex') addIndex(bc, 'created', 'DateIndex') addIndex(bc, 'Creator', 'FieldIndex') addIndex(bc, 'getObjPositionInParent', 'GopipIndex') addIndex(bc, 'title', 'FieldIndex', 'Title') addIndex(bc, 'sortable_title', 'FieldIndex') addIndex(bc, 'description', 'FieldIndex', 'Description') addIndex(bc, 'review_state', 'FieldIndex') addIndex(bc, 'inactive_state', 'FieldIndex') addIndex(bc, 'worksheetanalysis_review_state', 'FieldIndex') addIndex(bc, 'cancellation_state', 'FieldIndex') addIndex(bc, 'Identifiers', 'KeywordIndex') addIndex(bc, 'getDepartmentUIDs', 'KeywordIndex') addIndex(bc, 'getAnalysisCategory', 'KeywordIndex') addIndex(bc, 'getAnalysisService', 'KeywordIndex') addIndex(bc, 'getAnalyst', 'FieldIndex') addIndex(bc, 'getAnalysts', 'KeywordIndex') addIndex(bc, 'BatchDate', 'DateIndex') addIndex(bc, 'getClientOrderNumber', 'FieldIndex') addIndex(bc, 'getClientReference', 'FieldIndex') addIndex(bc, 'getClientSampleID', 'FieldIndex') addIndex(bc, 'getClientTitle', 'FieldIndex') addIndex(bc, 'getClientUID', 'FieldIndex') addIndex(bc, 'getContactTitle', 'FieldIndex') addIndex(bc, 'getDateDisposed', 'DateIndex') addIndex(bc, 'getDateExpired', 'DateIndex') addIndex(bc, 'getDateOpened', 'DateIndex') addIndex(bc, 'getDatePublished', 'DateIndex') addIndex(bc, 'getDateReceived', 'DateIndex') addIndex(bc, 'getDateSampled', 'DateIndex') addIndex(bc, 'getDisposalDate', 'DateIndex') addIndex(bc, 'getDueDate', 'DateIndex') addIndex(bc, 'getExpiryDate', 'DateIndex') addIndex(bc, 'getInvoiced', 'FieldIndex') addIndex(bc, 'getPreserver', 'FieldIndex') addIndex(bc, 'getProfilesTitle', 'FieldIndex') addIndex(bc, 'getReferenceDefinitionUID', 'FieldIndex') addIndex(bc, 'getRequestID', 'FieldIndex') addIndex(bc, 'getSampleID', 'FieldIndex') addIndex(bc, 'getSamplePointTitle', 'FieldIndex') addIndex(bc, 'getSamplePointUID', 'FieldIndex') addIndex(bc, 'getSampler', 'FieldIndex') addIndex(bc, 'getScheduledSamplingSampler', 'FieldIndex') addIndex(bc, 'getSampleTypeTitle', 'FieldIndex') addIndex(bc, 'getSampleTypeUID', 'FieldIndex') addIndex(bc, 'getSampleUID', 'FieldIndex') addIndex(bc, 'getSamplingDate', 'DateIndex') addIndex(bc, 'getServiceTitle', 'FieldIndex') addIndex(bc, 'getWorksheetTemplateTitle', 'FieldIndex') addIndex(bc, 'Priority', 'FieldIndex') addIndex(bc, 'BatchUID', 'FieldIndex') addColumn(bc, 'path') addColumn(bc, 'UID') addColumn(bc, 'id') addColumn(bc, 'Type') addColumn(bc, 'portal_type') addColumn(bc, 'creator') addColumn(bc, 'Created') addColumn(bc, 'Title') addColumn(bc, 'Description') addColumn(bc, 'sortable_title') addColumn(bc, 'review_state') addColumn(bc, 'inactive_state') addColumn(bc, 'cancellation_state') addColumn(bc, 'getAnalysts') addColumn(bc, 'getSampleID') addColumn(bc, 'getRequestID') addColumn(bc, 'getClientOrderNumber') addColumn(bc, 'getClientReference') addColumn(bc, 'getClientSampleID') addColumn(bc, 'getContactTitle') addColumn(bc, 'getClientTitle') addColumn(bc, 'getProfilesTitle') addColumn(bc, 'getSamplePointTitle') addColumn(bc, 'getSampleTypeTitle') addColumn(bc, 'getAnalysisCategory') addColumn(bc, 'getAnalysisService') addColumn(bc, 'getDatePublished') addColumn(bc, 'getDateReceived') addColumn(bc, 'getDateSampled') addColumn(bc, 'review_state') # bika_setup_catalog bsc = getToolByName(portal, 'bika_setup_catalog', None) if bsc is None: logger.warning('Could not find the setup catalog tool.') return try: bsc.manage_addProduct['ZCTextIndex'].manage_addLexicon('Lexicon', 'Lexicon', elem) except: logger.warning('Could not add ZCTextIndex to bika_setup_catalog') pass at = getToolByName(portal, 'archetype_tool') at.setCatalogsByType('Department', ['bika_setup_catalog', "portal_catalog", ]) at.setCatalogsByType('Container', ['bika_setup_catalog', ]) at.setCatalogsByType('ContainerType', ['bika_setup_catalog', ]) at.setCatalogsByType('AnalysisCategory', ['bika_setup_catalog', ]) at.setCatalogsByType('AnalysisService', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('AnalysisSpec', ['bika_setup_catalog', ]) at.setCatalogsByType('SampleCondition', ['bika_setup_catalog']) at.setCatalogsByType('SampleMatrix', ['bika_setup_catalog', ]) at.setCatalogsByType('SampleType', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SamplePoint', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('StorageLocation', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SamplingDeviation', ['bika_setup_catalog', ]) at.setCatalogsByType('IdentifierType', ['bika_setup_catalog', ]) at.setCatalogsByType('Instrument', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('InstrumentType', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('InstrumentLocation', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Method', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Multifile', ['bika_setup_catalog']) at.setCatalogsByType('AttachmentType', ['bika_setup_catalog', ]) at.setCatalogsByType('Calculation', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('AnalysisProfile', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('ARTemplate', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('LabProduct', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('LabContact', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Manufacturer', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Preservation', ['bika_setup_catalog', ]) at.setCatalogsByType('ReferenceDefinition', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SRTemplate', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('SubGroup', ['bika_setup_catalog', ]) at.setCatalogsByType('Supplier', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('Unit', ['bika_setup_catalog', ]) at.setCatalogsByType('WorksheetTemplate', ['bika_setup_catalog', 'portal_catalog']) at.setCatalogsByType('BatchLabel', ['bika_setup_catalog', ]) at.setCatalogsByType('ARPriority', ['bika_setup_catalog', ]) addIndex(bsc, 'path', 'ExtendedPathIndex', ('getPhysicalPath')) addIndex(bsc, 'allowedRolesAndUsers', 'KeywordIndex') addIndex(bsc, 'UID', 'FieldIndex') addIndex(bsc, 'SearchableText', 'ZCTextIndex', zc_extras) addIndex(bsc, 'Title', 'ZCTextIndex', zc_extras) addIndex(bsc, 'Description', 'ZCTextIndex', zc_extras) addIndex(bsc, 'id', 'FieldIndex') addIndex(bsc, 'getId', 'FieldIndex') addIndex(bsc, 'Type', 'FieldIndex') addIndex(bsc, 'portal_type', 'FieldIndex') addIndex(bsc, 'created', 'DateIndex') addIndex(bsc, 'Creator', 'FieldIndex') addIndex(bsc, 'getObjPositionInParent', 'GopipIndex') addIndex(bc, 'Identifiers', 'KeywordIndex') addIndex(bsc, 'title', 'FieldIndex', 'Title') addIndex(bsc, 'sortable_title', 'FieldIndex') addIndex(bsc, 'description', 'FieldIndex', 'Description') addIndex(bsc, 'review_state', 'FieldIndex') addIndex(bsc, 'inactive_state', 'FieldIndex') addIndex(bsc, 'cancellation_state', 'FieldIndex') addIndex(bsc, 'getAccredited', 'FieldIndex') addIndex(bsc, 'getAnalyst', 'FieldIndex') addIndex(bsc, 'getInstrumentType', 'FieldIndex') addIndex(bsc, 'getInstrumentTypeName', 'FieldIndex') addIndex(bsc, 'getInstrumentLocationName', 'FieldIndex') addIndex(bsc, 'getBlank', 'FieldIndex') addIndex(bsc, 'getCalculationTitle', 'FieldIndex') addIndex(bsc, 'getCalculationUID', 'FieldIndex') addIndex(bsc, 'getCalibrationExpiryDate', 'FieldIndex') addIndex(bsc, 'getCategoryTitle', 'FieldIndex') addIndex(bsc, 'getCategoryUID', 'FieldIndex') addIndex(bsc, 'getClientUID', 'FieldIndex') addIndex(bsc, 'getDepartmentTitle', 'FieldIndex') addIndex(bsc, 'getDuplicateVariation', 'FieldIndex') addIndex(bsc, 'getFormula', 'FieldIndex') addIndex(bsc, 'getFullname', 'FieldIndex') addIndex(bsc, 'getHazardous', 'FieldIndex') addIndex(bsc, 'getInstrumentTitle', 'FieldIndex') addIndex(bsc, 'getKeyword', 'FieldIndex') addIndex(bsc, 'getManagerName', 'FieldIndex') addIndex(bsc, 'getManagerPhone', 'FieldIndex') addIndex(bsc, 'getManagerEmail', 'FieldIndex') addIndex(bsc, 'getMaxTimeAllowed', 'FieldIndex') addIndex(bsc, 'getModel', 'FieldIndex') addIndex(bsc, 'getName', 'FieldIndex') addIndex(bsc, 'getPointOfCapture', 'FieldIndex') addIndex(bsc, 'getPrice', 'FieldIndex') addIndex(bsc, 'getSamplePointTitle', 'KeywordIndex') addIndex(bsc, 'getSamplePointUID', 'FieldIndex') addIndex(bsc, 'getSampleTypeTitle', 'KeywordIndex') addIndex(bsc, 'getSampleTypeUID', 'FieldIndex') addIndex(bsc, 'getServiceTitle', 'FieldIndex') addIndex(bsc, 'getServiceUID', 'FieldIndex') addIndex(bsc, 'getTotalPrice', 'FieldIndex') addIndex(bsc, 'getUnit', 'FieldIndex') addIndex(bsc, 'getVATAmount', 'FieldIndex') addIndex(bsc, 'getVolume', 'FieldIndex') addIndex(bsc, 'sortKey', 'FieldIndex') addIndex(bsc, 'getMethodID', 'FieldIndex') addIndex(bsc, 'getDocumentID', 'FieldIndex') addIndex(bsc, 'getMethodUIDs', 'KeywordIndex') addColumn(bsc, 'path') addColumn(bsc, 'UID') addColumn(bsc, 'id') addColumn(bsc, 'getId') addColumn(bsc, 'Type') addColumn(bsc, 'portal_type') addColumn(bsc, 'getObjPositionInParent') addColumn(bsc, 'Title') addColumn(bsc, 'Description') addColumn(bsc, 'title') addColumn(bsc, 'sortable_title') addColumn(bsc, 'description') addColumn(bsc, 'review_state') addColumn(bsc, 'inactive_state') addColumn(bsc, 'cancellation_state') addColumn(bsc, 'getAccredited') addColumn(bsc, 'getInstrumentType') addColumn(bsc, 'getInstrumentTypeName') addColumn(bsc, 'getInstrumentLocationName') addColumn(bsc, 'getBlank') addColumn(bsc, 'getCalculationTitle') addColumn(bsc, 'getCalculationUID') addColumn(bsc, 'getCalibrationExpiryDate') addColumn(bsc, 'getCategoryTitle') addColumn(bsc, 'getCategoryUID') addColumn(bsc, 'getClientUID') addColumn(bsc, 'getDepartmentTitle') addColumn(bsc, 'getDuplicateVariation') addColumn(bsc, 'getFormula') addColumn(bsc, 'getFullname') addColumn(bsc, 'getHazardous') addColumn(bsc, 'getInstrumentTitle') addColumn(bsc, 'getKeyword') addColumn(bsc, 'getManagerName') addColumn(bsc, 'getManagerPhone') addColumn(bsc, 'getManagerEmail') addColumn(bsc, 'getMaxTimeAllowed') addColumn(bsc, 'getModel') addColumn(bsc, 'getName') addColumn(bsc, 'getPointOfCapture') addColumn(bsc, 'getPrice') addColumn(bsc, 'getSamplePointTitle') addColumn(bsc, 'getSamplePointUID') addColumn(bsc, 'getSampleTypeTitle') addColumn(bsc, 'getSampleTypeUID') addColumn(bsc, 'getServiceTitle') addColumn(bsc, 'getServiceUID') addColumn(bsc, 'getTotalPrice') addColumn(bsc, 'getUnit') addColumn(bsc, 'getVATAmount') addColumn(bsc, 'getVolume')
def addDuplicateAnalyses(self, src_slot, dest_slot): """ add duplicate analyses to worksheet """ rc = getToolByName(self, REFERENCE_CATALOG) workflow = getToolByName(self, 'portal_workflow') layout = self.getLayout() wst = self.getWorksheetTemplate() wstlayout = wst and wst.getLayout() or [] src_ar = [slot['container_uid'] for slot in layout if slot['position'] == src_slot] if src_ar: src_ar = src_ar[0] if not dest_slot or dest_slot == 'new': highest_existing_position = len(wstlayout) for pos in [int(slot['position']) for slot in layout]: if pos > highest_existing_position: highest_existing_position = pos dest_slot = highest_existing_position + 1 src_analyses = [rc.lookupObject(slot['analysis_uid']) for slot in layout if int(slot['position']) == int(src_slot)] dest_analyses = [rc.lookupObject(slot['analysis_uid']).getAnalysis().UID() for slot in layout if int(slot['position']) == int(dest_slot)] refgid = None processed = [] for analysis in src_analyses: if analysis.UID() in dest_analyses: continue if analysis.portal_type == 'ReferenceAnalysis': logger.warning('Cannot create duplicate analysis from ' 'ReferenceAnalysis at {}'.format(analysis)) continue # If retracted analyses, for some reason, the getLayout() returns # two times the regular analysis generated automatically after a # a retraction. if analysis.UID() in processed: continue # Omit retracted analyses # https://jira.bikalabs.com/browse/LIMS-1745 # https://jira.bikalabs.com/browse/LIMS-2001 if workflow.getInfoFor(analysis, "review_state") == 'retracted': continue processed.append(analysis.UID()) # services with dependents don't belong in duplicates service = analysis.getService() calc = service.getCalculation() if calc and calc.getDependentServices(): continue service = analysis.getService() _id = self._findUniqueId(service.getKeyword()) duplicate = _createObjectByType("DuplicateAnalysis", self, _id) duplicate.setAnalysis(analysis) # Set the required number of verifications reqvers = analysis.getNumberOfRequiredVerifications() duplicate.setNumberOfRequiredVerifications(reqvers) # Set ReferenceAnalysesGroupID (same id for the analyses from # the same Reference Sample and same Worksheet) if not refgid: prefix = analysis.aq_parent.getSample().id dups = [] for an in self.getAnalyses(): if an.portal_type == 'DuplicateAnalysis' \ and hasattr(an.aq_parent, 'getSample') \ and an.aq_parent.getSample().id == prefix: dups.append(an.getReferenceAnalysesGroupID()) dups = list(set(dups)) postfix = dups and len(dups) + 1 or 1 postfix = str(postfix).zfill(int(2)) refgid = '%s-D%s' % (prefix, postfix) duplicate.setReferenceAnalysesGroupID(refgid) duplicate.reindexObject(idxs=["getReferenceAnalysesGroupID"]) duplicate.processForm() if calc: duplicate.setInterimFields(calc.getInterimFields()) self.setLayout( self.getLayout() + [{'position': dest_slot, 'type': 'd', 'container_uid': analysis.aq_parent.UID(), 'analysis_uid': duplicate.UID()}, ] ) self.setAnalyses(self.getAnalyses() + [duplicate, ]) workflow.doActionFor(duplicate, 'assign')
def get_workflow_actions(self): """ Compile a list of possible workflow transitions for items in this Table. """ # cbb return empty list if we are unable to select items if not self.show_select_column: return [] workflow = getToolByName(self.context, 'portal_workflow') # check POST for a specified review_state selection selected_state = self.request.get("%s_review_state"%self.form_id, 'default') # get review_state id=selected_state states = [r for r in self.review_states if r['id'] == selected_state] self.review_state = states and states[0] \ or self.review_states[0] # get all transitions for all items. transitions = {} actions = [] for obj in [i.get('obj', '') for i in self.items]: obj = hasattr(obj, 'getObject') and obj.getObject() or obj for it in workflow.getTransitionsFor(obj): transitions[it['id']] = it # the list is restricted to and ordered by these transitions. if 'transitions' in self.review_state: for transition_dict in self.review_state['transitions']: if transition_dict['id'] in transitions: actions.append(transitions[transition_dict['id']]) else: actions = transitions.values() new_actions = [] # remove any invalid items with a warning for a,action in enumerate(actions): if isinstance(action, dict) \ and 'id' in action: new_actions.append(action) else: logger.warning("bad action in custom_actions: %s. (complete list: %s)."%(action,actions)) # and these are removed if 'hide_transitions' in self.review_state: actions = [a for a in actions if a['id'] not in self.review_state['hide_transitions']] # cheat: until workflow_action is abolished, all URLs defined in # GS workflow setup will be ignored, and the default will apply. # (that means, WorkflowAction-bound URL is called). for i, action in enumerate(actions): actions[i]['url'] = '' # if there is a self.review_state['some_state']['custom_actions'] attribute # on the BikaListingView, add these actions to the list. if 'custom_actions' in self.review_state: for action in self.review_state['custom_actions']: if isinstance(action, dict) \ and 'id' in action: actions.append(action) for a,action in enumerate(actions): actions[a]['title'] = t(PMF(actions[a]['id'] + "_transition_title")) return actions
def get_workflow_actions(self): """ Compile a list of possible workflow transitions for items in this Table. """ # cbb return empty list if we are unable to select items if not self.bika_listing.show_select_column: return [] workflow = getToolByName(self.context, 'portal_workflow') # get all transitions for all items. transitions = {} actions = [] for obj in [i.get('obj', '') for i in self.items]: obj = get_object(obj) for it in workflow.getTransitionsFor(obj): transitions[it['id']] = it # the list is restricted to and ordered by these transitions. if 'transitions' in self.bika_listing.review_state: for tdict in self.bika_listing.review_state['transitions']: if tdict['id'] in transitions: actions.append(transitions[tdict['id']]) else: actions = transitions.values() new_actions = [] # remove any invalid items with a warning for a, action in enumerate(actions): if isinstance(action, dict) \ and 'id' in action: new_actions.append(action) else: logger.warning( "bad action in review_state['transitions']: %s. " "(complete list: %s)." % (action, actions)) actions = new_actions # and these are removed if 'hide_transitions' in self.bika_listing.review_state: hidden_transitions = self.bika_listing.review_state[ 'hide_transitions'] actions = [a for a in actions if a['id'] not in hidden_transitions] # cheat: until workflow_action is abolished, all URLs defined in # GS workflow setup will be ignored, and the default will apply. # (that means, WorkflowAction-bound URL is called). for i, action in enumerate(actions): actions[i]['url'] = '' # if there is a self.review_state['some_state']['custom_transitions'] # attribute on the BikaListingView, add these actions to the list. if 'custom_transitions' in self.bika_listing.review_state: for action in self.bika_listing.review_state['custom_transitions']: if isinstance(action, dict) and 'id' in action: actions.append(action) for a, action in enumerate(actions): actions[a]['title'] = t(PMF(actions[a]['title'])) return actions