def __init__(self, context, request): super(AnalysisCategoriesView, self).__init__( context, request, 'AnalysisCategory', 'category_big.png') self.title = self.context.translate(_("Analysis Categories")) self.columns = { 'Title': { 'title': _('Category'), 'index': 'sortable_title', 'replace_url': 'absolute_url' }, 'Description': { 'title': _('Description'), 'index': 'description', 'attr': 'Description', 'toggle': False }, 'Department': { 'title': _('Department'), 'index': 'getDepartmentTitle', 'attr': 'getDepartmentTitle', }, 'SortKey': { 'title': _('Sort Key'), 'attr': 'getSortKey', 'sortable': False }, } for rs in self.review_states: rs['columns'] += ['Department', 'SortKey']
def __init__(self, context, request, **kwargs): AnalysesView.__init__(self, context, request, **kwargs) self.columns['getReferenceAnalysesGroupID'] = {'title': _('QC Sample ID'), 'sortable': False} self.columns['Worksheet'] = {'title': _('Worksheet'), 'sortable': False} self.review_states[0]['columns'] = ['Service', 'Worksheet', 'getReferenceAnalysesGroupID', 'Partition', 'Method', 'Instrument', 'Result', 'Uncertainty', 'CaptureDate', 'DueDate', 'state_title'] qcanalyses = context.getQCAnalyses() asuids = [an.UID() for an in qcanalyses] self.catalog = 'bika_analysis_catalog' self.contentFilter = {'UID': asuids, 'sort_on': 'sortable_title'} self.icon = self.portal_url + \ "/++resource++bika.lims.images/referencesample.png"
def __init__(self, context, request): BikaListingView.__init__(self, context, request) self.context_actions = {} self.catalog = 'bika_analysis_catalog' self.contentFilter = {'portal_type': 'Analysis', 'review_state':'impossible_state'} self.base_url = self.context.absolute_url() self.view_url = self.context.absolute_url() + "/add_duplicate" self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = False self.show_select_column = False self.columns = { 'Position': {'title': _('Position')}, 'RequestID': {'title': _('Request ID')}, 'Client': {'title': _('Client')}, 'created': {'title': _('Date Requested')}, } self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter':{}, 'transitions': [], 'columns':['Position', 'RequestID', 'Client', 'created'], }, ]
def get_custom_fields(self): """ Returns a dictionary with custom fields to be rendered after header_table with this structure: {<fieldid>:{title:<title>, value:<html>} """ custom = {} ar = self.context workflow = getToolByName(self.context, 'portal_workflow') # If is a retracted AR, show the link to child AR and show a warn msg if workflow.getInfoFor(ar, 'review_state') == 'invalid': childar = hasattr(ar, 'getChildAnalysisRequest') \ and ar.getChildAnalysisRequest() or None anchor = childar and ("<a href='%s'>%s</a>" % (childar.absolute_url(), childar.getRequestID())) or None if anchor: custom['ChildAR'] = { 'title': t(_("AR for retested results")), 'value': anchor } # If is an AR automatically generated due to a Retraction, show it's # parent AR information if hasattr(ar, 'getParentAnalysisRequest') and \ ar.getParentAnalysisRequest(): par = ar.getParentAnalysisRequest() anchor = "<a href='%s'>%s</a>" % (par.absolute_url(), par.getRequestID()) custom['ParentAR'] = { 'title': t(_("Invalid AR retested")), 'value': anchor } return custom
def __init__(self, context, request): BrowserView.__init__(self, context, request) self.icon = self.portal_url + "/++resource++bika.lims.images/worksheet_big.png" self.title = self.context.translate(_("Add Blank Reference")) self.description = self.context.translate(_( "Select services in the left column to locate " "reference samples. Select a reference by clicking it. "))
def folderitems(self): mtool = getToolByName(self.context, 'portal_membership') if mtool.checkPermission(ManageBika, self.context): del self.review_states[0]['transitions'] self.show_select_column = True self.review_states.append( {'id': 'active', 'title': _('Active'), 'contentFilter': {'inactive_state': 'active'}, 'transitions': [{'id':'deactivate'}, ], 'columns': ['Title', 'Description']}) self.review_states.append( {'id': 'inactive', 'title': _('Dormant'), 'contentFilter': {'inactive_state': 'inactive'}, 'transitions': [{'id':'activate'}, ], 'columns': ['Title', 'Description']}) items = BikaListingView.folderitems(self) for x in range(len(items)): if 'obj' in items[x]: items[x]['replace']['Title'] = "<a href='%s'>%s</a>" % \ (items[x]['url'], items[x]['Title']) return items
def __call__(self, result=None, **kwargs): translate = self.context.translate path = '++resource++bika.lims.images' alerts = {} # We look for IResultOutOfRange adapters for this object for name, adapter in getAdapters((self.context, ), IResultOutOfRange): ret = adapter(result) if not ret: continue spec = ret["spec_values"] rngstr = "{0} {1}, {2} {3}".format( t(_("min")), str(spec.get('min','')), t(_("max")), str(spec.get('max',''))) if ret["out_of_range"]: if ret["acceptable"]: message = "{0} ({1})".format( t(_('Result in shoulder range')), rngstr ) icon = path + '/warning.png' else: message = "{0} ({1})".format( t(_('Result out of range')), rngstr ) icon = path + '/exclamation.png' alerts[self.context.UID()] = [ { 'icon': icon, 'msg': message, 'field': 'Result', }, ] break return alerts
def getMaintenanceTypes(self): """ Return the current list of maintenance types """ types = [('Preventive',safe_unicode(_('Preventive')).encode('utf-8')), ('Repair', safe_unicode(_('Repair')).encode('utf-8')), ('Enhancement', safe_unicode(_('Enhancement')).encode('utf-8'))] return DisplayList(types)
def __init__(self, context, request): super(SampleConditionsView, self).__init__(context, request) self.catalog = 'bika_setup_catalog' self.contentFilter = {'portal_type': 'SampleCondition', 'sort_on': 'sortable_title'} self.context_actions = {_('Add'): { 'url': 'createObject?type_name=SampleCondition', 'icon': '++resource++bika.lims.images/add.png' }} self.title = _("Sample Conditions") self.icon = self.portal_url + "/++resource++bika.lims.images/samplecondition_big.png" self.description = "" self.show_sort_column = False self.show_select_row = False self.show_select_column = False self.pagesize = 25 self.columns = { 'Title': {'title': _('Sample Condition'), 'index': 'sortable_title'}, 'Description': {'title': _('Description'), 'index': 'description', 'toggle': True}, } self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter':{}, 'transitions':[{'id':'empty'}, ], 'columns': ['Title', 'Description']}, ]
def __init__(self, context, request, fieldvalue=[], allow_edit=False): super(AnalysisProfileAnalysesView, self).__init__(context, request) self.catalog = "bika_setup_catalog" self.contentFilter = {'portal_type': 'AnalysisService', 'sort_on': 'sortable_title', 'inactive_state': 'active',} self.context_actions = {} self.base_url = self.context.absolute_url() self.view_url = self.base_url self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = False self.show_column_toggles = False self.show_select_column = True self.allow_edit = allow_edit self.form_id = "analyses" self.profile = None self.categories = [] self.do_cats = self.context.bika_setup.getCategoriseAnalysisServices() if self.do_cats: self.pagesize = 999999 # hide batching controls self.show_categories = True self.expand_all_categories = False self.ajax_categories = True self.ajax_categories_url = self.context.absolute_url() + \ "/analysisprofile_analysesview" self.category_index = 'getCategoryTitle' self.columns = { 'Title': {'title': _('Service'), 'index': 'sortable_title', 'sortable': False,}, 'Price': {'title': _('Price'), 'sortable': False,}, } self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter':{}, 'columns': ['Title', 'Price', ], 'transitions': [{'id':'empty'}, ], # none }, ] self.fieldvalue = fieldvalue self.selected = [x.UID() for x in fieldvalue] if self.aq_parent.portal_type == 'AnalysisProfile': # Custom settings for the Analysis Services assigned to # the Analysis Profile # https://jira.bikalabs.com/browse/LIMS-1324 self.profile = self.aq_parent self.columns['Hidden'] = {'title': _('Hidden'), 'sortable': False, 'type': 'boolean'} self.review_states[0]['columns'].insert(1, 'Hidden')
def __init__(self, context, request): AnalysesView.__init__(self, context, request) self.contentFilter = {'portal_type':'ReferenceAnalysis', 'path': {'query':"/".join(self.context.getPhysicalPath()), 'level':0}} self.show_select_row = False self.show_sort_column = False self.show_select_column = False self.allow_edit = False self.columns = { 'id': {'title': _('ID')}, 'Category': {'title': _('Category')}, 'Service': {'title': _('Service')}, 'Worksheet': {'title': _('Worksheet')}, 'Result': {'title': _('Result')}, 'Uncertainty': {'title': _('+-')}, 'DueDate': {'title': _('Due Date')}, 'retested': {'title': _('Retested'), 'type':'boolean'}, 'state_title': {'title': _('State')}, } self.review_states = [ {'id':'all', 'title': _('All'), 'transitions': [], 'columns':['id', 'Category', 'Service', 'Worksheet', 'Result', 'Uncertainty', 'DueDate', 'state_title'], }, ]
def __call__(self): request = self.request form = request.form CheckAuthenticator(form) if form.get('submitted'): # Validate form submission csvfile = form.get('csvfile') data = csvfile.read() lines = data.splitlines() filename = csvfile.filename if not csvfile: addStatusMessage(request, _("No file selected")) return self.template() if len(lines) < 3: addStatusMessage(request, _("Too few lines in CSV file")) return self.template() # Create the arimport object arimport = _createObjectByType("ARImport", self.context, tmpID()) arimport.processForm() arimport.setTitle(self.mkTitle(filename)) arimport.schema['OriginalFile'].set(arimport, data) # Save all fields from the file into the arimport schema arimport.save_header_data() arimport.save_sample_data() # immediate batch creation if required arimport.create_or_reference_batch() # Attempt first validation try: workflow = getToolByName(self.context, 'portal_workflow') workflow.doActionFor(arimport, 'validate') except WorkflowException: self.request.response.redirect(arimport.absolute_url() + "/edit") else: return self.template()
def __init__(self, context, request, field): BikaListingView.__init__(self, context, request) self.selected = [o.UID() for o in getattr(field, field.accessor)()] self.context_actions = {} self.catalog = "bika_setup_catalog" self.contentFilter = {'review_state': 'impossible_state'} self.base_url = self.context.absolute_url() self.view_url = self.base_url self.show_categories = True self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = False self.show_select_column = True self.pagesize = 1000 self.form_id = 'serviceswidget' self.columns = { 'Service': {'title': _('Service')}, 'Keyword': {'title': _('Keyword'), 'index': 'getKeyword'}, 'Method': {'title': _('Method')}, 'Calculation': {'title': _('Calculation')}, } self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter':{}, 'transitions': [], 'columns':['Service', 'Keyword', 'Method', 'Calculation', ] }, ]
def __init__(self, context, request): super(ClientAnalysisRequestsView, self).__init__(context, request) self.view_url = self.view_url + "/analysisrequests" self.contentFilter['path'] = {"query": "/".join(context.getPhysicalPath()), "level" : 0 } self.context_actions = {} wf = getToolByName(self.context, 'portal_workflow') # client contact required active_contacts = [c for c in context.objectValues('Contact') if \ wf.getInfoFor(c, 'inactive_state', '') == 'active'] if context.portal_type == "Client" and not active_contacts: msg = _("Client contact required before request may be submitted") self.context.plone_utils.addPortalMessage(self.context.translate(msg)) else: # add actions enabled only for active clients # XXX subtractive workflow for these kinds of perms. self.context_actions = {} if wf.getInfoFor(self.context, 'inactive_state', '') == 'active': self.context_actions[_('Add')] = { 'url':'analysisrequest_add', 'icon': '++resource++bika.lims.images/add.png'} review_states = [] for review_state in self.review_states: review_state['columns'].remove('Client') review_states.append(review_state) self.review_states = review_states
def __init__(self, context, request): super(AttachmentTypesView, self).__init__(context, request) bsc = getToolByName(context, 'bika_setup_catalog') self.contentsMethod = bsc self.contentFilter = {'portal_type': 'AttachmentType', 'sort_on': 'sortable_title'} self.context_actions = {_('Add'): {'url':'createObject?type_name=AttachmentType', 'icon': '++resource++bika.lims.images/add.png'}} self.icon = "++resource++bika.lims.images/attachment_big.png" self.title = _("Attachment Types") self.show_sort_column = False self.show_select_row = False self.show_select_column = True self.pagesize = 25 self.columns = { 'Title': {'title': _('Attachment Type'), 'index': 'sortable_title'}, 'Description': {'title': _('Description'), 'index': 'description', 'toggle': True}, } self.review_states = [ {'id':'all', 'title': _('All'), 'columns': ['Title', 'Description']}, ]
def __call__(self): ar = self.context workflow = getToolByName(ar, 'portal_workflow') # If is a retracted AR, show the link to child AR and show a warn msg if workflow.getInfoFor(ar, 'review_state') == 'invalid': childar = hasattr(ar, 'getChildAnalysisRequest') \ and ar.getChildAnalysisRequest() or None childid = childar and childar.getRequestID() or None message = _('This Analysis Request has been withdrawn and is shown ' 'for trace-ability purposes only. Retest: ${retest_child_id}.', mapping={"retest_child_id":childid if childid else ''}) self.context.plone_utils.addPortalMessage(message, 'warning') # If is an AR automatically generated due to a Retraction, show it's # parent AR information if hasattr(ar, 'getParentAnalysisRequest') \ and ar.getParentAnalysisRequest(): par = ar.getParentAnalysisRequest() message = _( 'This Analysis Request has been generated automatically due to ' 'the retraction of the Analysis Request ${retracted_request_id}.', mapping={"retracted_request_id": par.getRequestID()}) self.context.plone_utils.addPortalMessage(message, 'info') can_do = getSecurityManager().checkPermission(ResultsNotRequested, ar) if workflow.getInfoFor(ar, 'cancellation_state') == "cancelled": self.request.response.redirect(ar.absolute_url()) elif not(can_do): self.request.response.redirect(ar.absolute_url()) else: return self.template()
def __call__(self): ar = self.context workflow = getToolByName(ar, 'portal_workflow') # If is a retracted AR, show the link to child AR and show a warn msg if workflow.getInfoFor(ar, 'review_state') == 'invalid': childar = hasattr(ar, 'getChildAnalysisRequest') \ and ar.getChildAnalysisRequest() or None childid = childar and childar.getRequestID() or None message = _('This Analysis Request has been withdrawn and is ' 'shown for trace-ability purposes only. Retest: ' '${retest_child_id}.', mapping={'retest_child_id': safe_unicode(childid) or ''}) self.context.plone_utils.addPortalMessage( self.context.translate(message), 'warning') # If is an AR automatically generated due to a Retraction, show it's # parent AR information if hasattr(ar, 'getParentAnalysisRequest') \ and ar.getParentAnalysisRequest(): par = ar.getParentAnalysisRequest() message = _('This Analysis Request has been ' 'generated automatically due to ' 'the retraction of the Analysis ' 'Request ${retracted_request_id}.', mapping={'retracted_request_id': par.getRequestID()}) self.context.plone_utils.addPortalMessage( self.context.translate(message), 'info') template = BikaListingView.__call__(self) return template
def __call__(self): translate = self.context.translate instrument = self.context.getInstrument() if not instrument: self.context.plone_utils.addPortalMessage( _("You must select an instrument"), 'info') self.request.RESPONSE.redirect(self.context.absolute_url()) return exim = instrument.getDataInterface() if not exim: self.context.plone_utils.addPortalMessage( _("Instrument has no data interface selected"), 'info') self.request.RESPONSE.redirect(self.context.absolute_url()) return # exim refers to filename in instruments/ if type(exim) == list: exim = exim[0] exim = exim.lower() # search instruments module for 'exim' module if not instruments.getExim(exim): self.context.plone_utils.addPortalMessage( _("Instrument exporter not found"), 'error') self.request.RESPONSE.redirect(self.context.absolute_url()) return exim = instruments.getExim(exim) exporter = exim.Export(self.context, self.request) data = exporter(self.context.getAnalyses()) pass
def folderitem(self, obj, item, index): obj_url = obj.absolute_url() pdf = obj.getPdf() filesize = 0 title = _('Download') anchor = "<a href='%s/at_download/Pdf'>%s</a>" % \ (obj_url, _("Download")) try: filesize = pdf.get_size() filesize = filesize / 1024 if filesize > 0 else 0 except: # POSKeyError: 'No blob file' # Show the record, but not the link title = _('Not available') anchor = title item['Title'] = title item['FileSize'] = '%sKb' % filesize fmt_date = self.ulocalized_time(obj.created(), long_format=1) item['Date'] = fmt_date item['PublishedBy'] = self.user_fullname(obj.Creator()) recip = [] for recipient in obj.getRecipients(): email = recipient['EmailAddress'] val = recipient['Fullname'] if email: val = "<a href='mailto:%s'>%s</a>" % (email, val) recip.append(val) item['replace']['Recipients'] = ', '.join(recip) item['replace']['Title'] = anchor return item
def parse_daterange(self, request, field_id, field_title): from_date = request.get('%s_fromdate' % field_id, None) from_date = from_date and from_date + ' 00:00' or None to_date = request.get('%s_todate' % field_id, None) to_date = to_date and to_date + ' 23:59' or None if from_date and to_date: query = {'query': [from_date, to_date], 'range': 'min:max'} elif from_date or to_date: query = {'query': from_date or to_date, 'range': from_date and 'min' or 'max'} else: return None if from_date and to_date: parms = translate(_("From ${start_date} to ${end_date}", mapping={"start_date":from_date, "end_date":to_date})) elif from_date: parms = translate(_("Before ${start_date}", mapping={"start_date":from_date})) elif to_date: parms = translate(_("After ${end_date}", mapping={"end_date":to_date})) res = {} res['contentFilter'] = (field_id, query) res['parms'] = {'title': field_title, 'value': parms} res['titles'] = parms return res
def __init__(self, context, request, fieldvalue, allow_edit): BikaListingView.__init__(self, context, request) self.context_actions = {} self.contentFilter = {'review_state' : 'impossible_state'} self.context_actions = {} self.base_url = self.context.absolute_url() self.view_url = self.base_url self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = False self.show_select_column = False self.pagesize = 1000 self.allow_edit = allow_edit self.specsresults = {} for specresults in fieldvalue: self.specsresults[specresults['keyword']] = specresults self.columns = { 'service': {'title': _('Service'), 'index': 'sortable_title', 'sortable': False}, 'min': {'title': _('Min'), 'sortable': False,}, 'max': {'title': _('Max'), 'sortable': False,}, 'error': {'title': _('Permitted Error %'), 'sortable': False}, } self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter':{}, 'transitions': [], 'columns': ['service', 'min', 'max', 'error'], }, ]
def __init__(self, context, request): BikaListingView.__init__(self, context, request) self.context_actions = {} self.catalog = 'bika_setup_catalog' self.contentFilter = {'review_state':'impossible_state'} self.base_url = self.context.absolute_url() self.view_url = self.context.absolute_url() self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = False self.show_select_column = True self.pagesize = 1000 self.show_workflow_action_buttons = False self.columns = { 'Service': {'title': _('Service'), 'sortable': False}, } self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter': {}, 'transitions': [], 'columns':['Service'], }, ]
def __call__(self, value, *args, **kwargs): instance = kwargs['instance'] translate = getToolByName(instance, 'translation_service').translate # remove spaces from formatted IBAN = ''.join(c for c in value if c.isalnum()) IBAN = IBAN[4:] + IBAN[:4] country = IBAN[-4:-2] if country not in country_dic: msg = _('Unknown IBAN country %s' % country) return to_utf8(translate(msg)) length_c, name_c = country_dic[country] if len(IBAN) != length_c: diff = len(IBAN) - length_c msg = _('Wrong IBAN length by %s: %s' % (('short by %i' % -diff) if diff < 0 else ('too long by %i' % diff), value)) return to_utf8(translate(msg)) # Validating procedure elif int("".join(str(letter_dic[x]) for x in IBAN)) % 97 != 1: msg = _('Incorrect IBAN number: %s' % value) return to_utf8(translate(msg)) else: # Accepted: return True
def folderitem(self, obj, item, index): if item.get('review_state', 'current') == 'current': # Check expiry date exdate = obj.getExpiryDate() if exdate: expirydate = DT2dt(exdate).replace(tzinfo=None) if (datetime.today() > expirydate): # Trigger expiration workflow.doActionFor(obj, 'expire') item['review_state'] = 'expired' item['obj'] = obj if self.contentFilter.get('review_state', '') \ and item.get('review_state', '') == 'expired': # This item must be omitted from the list return None item['ID'] = obj.id item['DateSampled'] = self.ulocalized_time(obj.getDateSampled(), long_format=True) item['DateReceived'] = self.ulocalized_time(obj.getDateReceived()) item['DateOpened'] = self.ulocalized_time(obj.getDateOpened()) item['ExpiryDate'] = self.ulocalized_time(obj.getExpiryDate()) after_icons = '' if obj.getBlank(): after_icons += "<img\ src='%s/++resource++bika.lims.images/blank.png' \ title='%s'>" % (self.portal_url, t(_('Blank'))) if obj.getHazardous(): after_icons += "<img\ src='%s/++resource++bika.lims.images/hazardous.png' \ title='%s'>" % (self.portal_url, t(_('Hazardous'))) item['replace']['ID'] = "<a href='%s/base_view'>%s</a> %s" % \ (item['url'], item['ID'], after_icons) return item
def parse_daterange(self, request, field_id, field_title): from_date = request.get('%s_fromdate' % field_id, None) from_date = from_date and from_date + ' 00:00' or None to_date = request.get('%s_todate' % field_id, None) to_date = to_date and to_date + ' 23:59' or None if from_date and to_date: query = {'query': [from_date, to_date], 'range': 'min:max'} elif from_date or to_date: query = {'query': from_date or to_date, 'range': from_date and 'min' or 'max'} else: return None if from_date and to_date: parms = 'from %s to %s' % (from_date, to_date) elif from_date: parms = _("From") + " %s" % from_date elif to_date: parms = _("to") + " %s" % to_date res = {} res['contentFilter'] = (field_id, query) res['parms'] = {'title': field_title, 'value': parms} res['titles'] = parms return res
def __call__(self): self.context_actions = {} wf = getToolByName(self.context, 'portal_workflow') mtool = getToolByName(self.context, 'portal_membership') addPortalMessage = self.context.plone_utils.addPortalMessage translate = self.context.translate # client contact required active_contacts = [c for c in self.context.objectValues('Contact') if wf.getInfoFor(c, 'inactive_state', '') == 'active'] if isActive(self.context): if self.context.portal_type == "Client" and not active_contacts: msg = _("Client contact required before request may be submitted") addPortalMessage(msg) else: if mtool.checkPermission(AddAnalysisRequest, self.context): self.context_actions[t(_('Add'))] = { 'url': self.context.absolute_url() + "/portal_factory/" "AnalysisRequest/Request new analyses/ar_add", 'icon': '++resource++bika.lims.images/add.png'} # in client context we can use a permission check for this transition # in multi-client listings, we must rather check against user roles. if mtool.checkPermission(ModifyPortalContent, self.context): review_states = [] for review_state in self.review_states: review_state['custom_actions'].extend( [{'id': 'copy_to_new', 'title': _('Copy to new'), 'url': 'workflow_action?action=copy_to_new'}, ]) review_states.append(review_state) self.review_states = review_states return super(ClientAnalysisRequestsView, self).__call__()
def __init__(self, context, request): super(ClientContactsView, self).__init__(context, request) self.catalog = "portal_catalog" self.contentFilter = { 'portal_type': 'Contact', 'sort_on':'sortable_title', 'path': { "query": "/".join(context.getPhysicalPath()), "level" : 0 } } self.context_actions = {_('Add'): {'url': 'createObject?type_name=Contact', 'icon': '++resource++bika.lims.images/add.png'}} self.show_sort_column = False self.show_select_row = False self.show_select_column = True self.pagesize = 50 self.form_id = "contacts" self.icon = self.portal_url + "/++resource++bika.lims.images/client_contact_big.png" self.title = _("Contacts") self.description = "" self.columns = { 'getFullname': {'title': _('Full Name'), 'index': 'getFullname'}, 'Username': {'title': _('User Name')}, 'getEmailAddress': {'title': _('Email Address')}, 'getBusinessPhone': {'title': _('Business Phone')}, 'getMobilePhone': {'title': _('Mobile Phone')}, } self.review_states = [ {'id':'default', 'title': _('Active'), 'contentFilter': {'inactive_state': 'active'}, 'transitions': [{'id':'deactivate'}, ], 'columns': ['getFullname', 'Username', 'getEmailAddress', 'getBusinessPhone', 'getMobilePhone']}, {'id':'inactive', 'title': _('Dormant'), 'contentFilter': {'inactive_state': 'inactive'}, 'transitions': [{'id':'activate'}, ], 'columns': ['getFullname', 'Username', 'getEmailAddress', 'getBusinessPhone', 'getMobilePhone']}, {'id':'all', 'title': _('All'), 'contentFilter':{}, 'columns': ['getFullname', 'Username', 'getEmailAddress', 'getBusinessPhone', 'getMobilePhone']}, ]
def __call__(self, value, *args, **kwargs): instance = kwargs['instance'] fieldname = kwargs['field'].getName() request = kwargs.get('REQUEST', {}) form = request.form form_value = form.get(fieldname) translate = getToolByName(instance, 'translation_service').translate # bsc = getToolByName(instance, 'bika_setup_catalog') # ResultValue must always be a number for field in form_value: try: float(field['ResultValue']) except: return to_utf8( translate( _("Validation failed: " "Result Values must be numbers"))) if 'ResultText' not in field: return to_utf8( translate( _("Validation failed: Result Text cannot be blank"))) return True
def __init__(self, context, request): BikaListingView.__init__(self, context, request) self.context_actions = {} self.catalog = 'bika_setup_catalog' self.contentFilter = {'review_state':'impossible_state'} self.base_url = self.context.absolute_url() self.view_url = self.context.absolute_url() self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = False self.show_select_column = True self.pagesize = 999999 self.show_workflow_action_buttons = False self.show_categories=context.bika_setup.getCategoriseAnalysisServices() self.expand_all_categories = False self.ajax_categories = True self.ajax_categories_url = self.base_url + '/service_view' self.category_index = 'getCategoryTitle' self.columns = { 'Service': {'title': _('Service'), 'sortable': False}, } self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter': {}, 'transitions': [], 'columns':['Service'], }, ]
def __init__(self, context, request): super(ajaxGetWorksheetReferences, self).__init__(context, request) self.catalog = 'bika_catalog' self.contentFilter = {'portal_type': 'ReferenceSample'} self.context_actions = {} self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = False self.show_select_column = False self.show_workflow_action_buttons = False self.pagesize = 50 # must set service_uids in __call__ before delegating to super self.service_uids = [] # must set control_type='b' or 'c' in __call__ before delegating self.control_type = "" self.columns['Services'] = {'title': _('Services')} self.columns['Definition'] = {'title': _('Reference Definition')} self.review_states = [ {'id':'default', 'title': _('All'), 'contentFilter':{}, 'columns': ['ID', 'Title', 'Definition', 'ExpiryDate', 'Services'] }, ]
def __init__(self, context, request): super(InstrumentsView, self).__init__(context, request) self.catalog = 'bika_setup_catalog' self.contentFilter = { 'portal_type': 'Instrument', 'sort_on': 'sortable_title' } self.context_actions = { _('Add'): { 'url': 'createObject?type_name=Instrument', 'icon': '++resource++bika.lims.images/add.png' } } self.title = self.context.translate(_("Instruments")) self.icon = self.portal_url + "/++resource++bika.lims.images/instrument_big.png" self.description = "" self.show_sort_column = False self.show_select_row = False self.show_select_column = True self.pagesize = 50 self.columns = { 'Title': { 'title': _('Instrument'), 'index': 'sortable_title' }, 'Type': { 'title': _('Type'), 'index': 'getInstrumentTypeName', 'toggle': True, 'sortable': True }, 'Brand': { 'title': _('Brand'), 'toggle': True }, 'Model': { 'title': _('Model'), 'index': 'getModel', 'toggle': True }, 'ExpiryDate': { 'title': _('Expiry Date'), 'toggle': True }, 'WeeksToExpire': { 'title': _('Weeks To Expire'), 'toggle': False }, 'Methods': { 'title': _('Methods'), 'toggle': True }, } self.review_states = [ { 'id': 'default', 'title': _('Active'), 'contentFilter': { 'inactive_state': 'active' }, 'transitions': [ { 'id': 'deactivate' }, ], 'columns': [ 'Title', 'Type', 'Brand', 'Model', 'ExpiryDate', 'WeeksToExpire', 'Methods' ] }, { 'id': 'inactive', 'title': _('Dormant'), 'contentFilter': { 'inactive_state': 'inactive' }, 'transitions': [ { 'id': 'activate' }, ], 'columns': [ 'Title', 'Type', 'Brand', 'Model', 'ExpiryDate', 'WeeksToExpire', 'Methods' ] }, { 'id': 'all', 'title': _('All'), 'contentFilter': {}, 'columns': [ 'Title', 'Type', 'Brand', 'Model', 'ExpiryDate', 'WeeksToExpire', 'Methods' ] }, ]
def get_mail_subject(self, ar): """ Returns the email subject in accordance with the client preferences """ client = ar.aq_parent subject_items = client.getEmailSubject() ai = co = cr = cs = False if 'ar' in subject_items: ai = True if 'co' in subject_items: co = True if 'cr' in subject_items: cr = True if 'cs' in subject_items: cs = True ais = [] cos = [] crs = [] css = [] blanks_found = False if ai: ais.append(ar.getRequestID()) if co: if ar.getClientOrderNumber(): if not ar.getClientOrderNumber() in cos: cos.append(ar.getClientOrderNumber()) else: blanks_found = True if cr or cs: sample = ar.getSample() if cr: if sample.getClientReference(): if not sample.getClientReference() in crs: crs.append(sample.getClientReference()) else: blanks_found = True if cs: if sample.getClientSampleID(): if not sample.getClientSampleID() in css: css.append(sample.getClientSampleID()) else: blanks_found = True line_items = [] if ais: ais.sort() li = t(_('ARs: ${ars}', mapping={'ars': ', '.join(ais)})) line_items.append(li) if cos: cos.sort() li = t(_('Orders: ${orders}', mapping={'orders': ', '.join(cos)})) line_items.append(li) if crs: crs.sort() li = t( _('Refs: ${references}', mapping={'references': ', '.join(crs)})) line_items.append(li) if css: css.sort() li = t( _('Samples: ${samples}', mapping={'samples': ', '.join(css)})) line_items.append(li) tot_line = ' '.join(line_items) if tot_line: subject = t( _('Analysis results for ${subject_parts}', mapping={'subject_parts': tot_line})) if blanks_found: subject += (' ' + t(_('and others'))) else: subject = t(_('Analysis results')) return subject, tot_line
def fiddle(self, schema): schema['TaxNumber'].widget.label = _("Grower #") return schema
def __init__(self, context, request): super(SuppliersView, self).__init__(context, request) self.catalog = 'bika_setup_catalog' self.contentFilter = { 'portal_type': 'Supplier', 'sort_on': 'sortable_title' } self.context_actions = { _('Add'): { 'url': 'createObject?type_name=Supplier', 'icon': '++resource++bika.lims.images/add.png' } } self.title = self.context.translate(_("Suppliers")) self.icon = "++resource++bika.lims.images/supplier_big.png" self.description = "" self.show_sort_column = False self.show_select_row = False self.show_select_column = True self.pagesize = 25 self.columns = { 'Name': { 'title': _('Name'), 'index': 'getName' }, 'Email': { 'title': _('Email'), 'toggle': True }, 'Phone': { 'title': _('Phone'), 'toggle': True }, 'Fax': { 'title': _('Fax'), 'toggle': True }, } self.review_states = [ { 'id': 'default', 'title': _('Active'), 'contentFilter': { 'inactive_state': 'active' }, 'transitions': [ { 'id': 'deactivate' }, ], 'columns': ['Name', 'Email', 'Phone', 'Fax'] }, { 'id': 'inactive', 'title': _('Dormant'), 'contentFilter': { 'inactive_state': 'inactive' }, 'transitions': [ { 'id': 'activate' }, ], 'columns': ['Name', 'Email', 'Phone', 'Fax'] }, { 'id': 'all', 'title': _('All'), 'contentFilter': {}, 'columns': ['Name', 'Email', 'Phone', 'Fax'] }, ]
from bika.lims.exportimport import instruments from bika.lims.interfaces import IInstrument, IDeactivable from bika.lims.config import QCANALYSIS_TYPES from bika.lims.content.bikaschema import BikaSchema from bika.lims.content.bikaschema import BikaFolderSchema from bika.lims import bikaMessageFactory as _ schema = BikaFolderSchema.copy() + BikaSchema.copy() + Schema(( ReferenceField( 'InstrumentType', vocabulary='getInstrumentTypes', allowed_types=('InstrumentType', ), relationship='InstrumentInstrumentType', required=1, widget=SelectionWidget(format='select', label=_("Instrument type"), visible={ 'view': 'invisible', 'edit': 'visible' }), ), ReferenceField( 'Manufacturer', vocabulary='getManufacturers', allowed_types=('Manufacturer', ), relationship='InstrumentManufacturer', required=1, widget=SelectionWidget(format='select', label=_("Manufacturer"), visible={ 'view': 'invisible',
def __init__(self, context, request): super(ClientSRTemplatesView, self).__init__(context, request) self.catalog = "bika_setup_catalog" self.contentFilter = { 'portal_type': 'SRTemplate', 'sort_on': 'sortable_title', 'path': { "query": "/".join(self.context.getPhysicalPath()), "level": 0 }, } self.show_sort_column = False self.show_select_row = False self.show_select_column = True self.pagesize = 50 self.form_id = "srtemplates" self.icon = self.portal_url + \ "/++resource++bika.lims.images/srtemplate_big.png" self.title = self.context.translate(_("SR Templates")) self.description = "" self.columns = { 'title': { 'title': _('Title'), 'index': 'sortable_title' }, 'Description': { 'title': _('Description'), 'index': 'description' }, } self.review_states = [ { 'id': 'default', 'title': _('Active'), 'contentFilter': { 'inactive_state': 'active' }, 'transitions': [ { 'id': 'deactivate' }, ], 'columns': ['title', 'Description'] }, { 'id': 'inactive', 'title': _('Dormant'), 'contentFilter': { 'inactive_state': 'inactive' }, 'transitions': [ { 'id': 'activate' }, ], 'columns': ['title', 'Description'] }, { 'id': 'all', 'title': _('All'), 'contentFilter': {}, 'columns': ['title', 'Description'] }, ]
def __call__(self): # get all the data into datalines pc = getToolByName(self.context, 'portal_catalog') bac = getToolByName(self.context, 'bika_analysis_catalog') bc = getToolByName(self.context, 'bika_catalog') rc = getToolByName(self.context, 'reference_catalog') self.report_content = {} parm_lines = {} parms = [] headings = {} count_all_ars = 0 count_all_analyses = 0 query = {} this_client = logged_in_client(self.context) if not this_client and self.request.form.has_key('ClientUID'): client_uid = self.request.form['ClientUID'] this_client = rc.lookupObject(client_uid) parms.append({ 'title': _('Client'), 'value': this_client.Title(), 'type': 'text' }) if this_client: headings['header'] = _("Analysis requests and analyses") headings['subheader'] = _( "Number of Analysis requests and analyses") else: headings['header'] = _("Analysis requests and analyses per client") headings['subheader'] = _( "Number of Analysis requests and analyses per client") date_query = formatDateQuery(self.context, 'Requested') if date_query: query['created'] = date_query requested = formatDateParms(self.context, 'Requested') else: requested = 'Undefined' parms.append({ 'title': _('Requested'), 'value': requested, 'type': 'text' }) workflow = getToolByName(self.context, 'portal_workflow') if self.request.form.has_key('bika_analysis_workflow'): query['review_state'] = self.request.form['bika_analysis_workflow'] review_state = workflow.getTitleForStateOnType( self.request.form['bika_analysis_workflow'], 'Analysis') parms.append({ 'title': _('Status'), 'value': review_state, 'type': 'text' }) if self.request.form.has_key('bika_cancellation_workflow'): query['cancellation_state'] = self.request.form[ 'bika_cancellation_workflow'] cancellation_state = workflow.getTitleForStateOnType( self.request.form['bika_cancellation_workflow'], 'Analysis') parms.append({ 'title': _('Active'), 'value': cancellation_state, 'type': 'text' }) if self.request.form.has_key('bika_worksheetanalysis_workflow'): query['worksheetanalysis_review_state'] = self.request.form[ 'bika_worksheetanalysis_workflow'] ws_review_state = workflow.getTitleForStateOnType( self.request.form['bika_worksheetanalysis_workflow'], 'Analysis') parms.append({ 'title': _('Assigned to worksheet'), 'value': ws_review_state, 'type': 'text' }) if self.request.form.has_key('bika_worksheetanalysis_workflow'): query['worksheetanalysis_review_state'] = self.request.form[ 'bika_worksheetanalysis_workflow'] ws_review_state = workflow.getTitleForStateOnType( self.request.form['bika_worksheetanalysis_workflow'], 'Analysis') parms.append({ 'title': _('Assigned to worksheet'), 'value': ws_review_state, 'type': 'text' }) # and now lets do the actual report lines formats = { 'columns': 3, 'col_heads': [_('Client'), _('Number of requests'), _('Number of analyses')], 'class': '' } datalines = [] if this_client: c_proxies = pc(portal_type="Client", UID=this_client.UID()) else: c_proxies = pc(portal_type="Client", sort_on='sortable_title') for client in c_proxies: query['getClientUID'] = client.UID dataline = [ { 'value': client.Title }, ] query['portal_type'] = 'AnalysisRequest' ars = bc(query) count_ars = len(ars) dataitem = {'value': count_ars} dataline.append(dataitem) query['portal_type'] = 'Analysis' analyses = bac(query) count_analyses = len(analyses) dataitem = {'value': count_analyses} dataline.append(dataitem) datalines.append(dataline) count_all_analyses += count_analyses count_all_ars += count_ars # footer data footlines = [] if not this_client: footline = [] footitem = {'value': _('Total'), 'class': 'total_label'} footline.append(footitem) footitem = {'value': count_all_ars} footline.append(footitem) footitem = {'value': count_all_analyses} footline.append(footitem) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines } return { 'report_title': self.context.translate(headings['header']), 'report_data': self.template() }
from bika.lims import PMF, bikaMessageFactory as _ from bika.lims.browser.widgets import AnalysisProfileAnalysesWidget from bika.lims.browser.widgets import ServicesWidget from bika.lims.config import PROJECTNAME from bika.lims.content.bikaschema import BikaSchema from Products.Archetypes.public import * from Products.Archetypes.references import HoldingReference from Products.CMFCore.permissions import View, ModifyPortalContent from Products.CMFCore.utils import getToolByName from zope.interface import Interface, implements import sys schema = BikaSchema.copy() + Schema(( StringField('ProfileKey', widget = StringWidget( label = _("Profile Keyword"), description = _("The profile's keyword is used to uniquely identify " + \ "it in import files. It has to be unique, and it may " + \ "not be the same as any Calculation Interim field ID."), ), ), ReferenceField('Service', schemata = 'Analyses', required = 1, multiValued = 1, allowed_types = ('AnalysisService',), relationship = 'AnalysisProfileAnalysisService', widget = AnalysisProfileAnalysesWidget( label = _("Profile Analyses"), description = _("The analyses included in this profile, grouped per category"), )
def __init__(self, context, request): BikaListingView.__init__(self, context, request) self.icon = self.portal_url + "/++resource++bika.lims.images/worksheet_big.png" self.title = self.context.translate(_("Add Analyses")) self.description = "" self.catalog = CATALOG_ANALYSIS_LISTING self.context_actions = {} # initial review state for first form display of the worksheet # add_analyses search view - first batch of analyses, latest first. self.sort_on = 'Priority' self.contentFilter = { 'portal_type': 'Analysis', 'review_state': 'sample_received', 'worksheetanalysis_review_state': 'unassigned', 'sort_on': 'getPrioritySortkey', 'cancellation_state': 'active' } self.base_url = self.context.absolute_url() self.view_url = self.base_url + "/add_analyses" self.show_sort_column = False self.show_select_row = False self.show_select_column = True self.pagesize = 50 self.columns = { 'Priority': { 'title': '', 'sortable': True, 'index': 'getPrioritySortkey' }, 'Client': { 'title': _('Client'), 'attr': 'getClientTitle', 'replace_url': 'getClientURL', 'index': 'getClientTitle' }, 'getClientOrderNumber': { 'title': _('Order'), 'index': 'getClientOrderNumber' }, 'getRequestID': { 'title': _('Request ID'), 'attr': 'getRequestID', 'replace_url': 'getRequestURL', 'index': 'getRequestID' }, 'CategoryTitle': { 'title': _('Category'), 'attr': 'getCategoryTitle', 'sortable': False }, 'Title': { 'title': _('Analysis'), 'index': 'getId' }, 'getDateReceived': { 'title': _('Date Received'), 'index': 'getDateReceived' }, 'getDueDate': { 'title': _('Due Date'), 'index': 'getDueDate' }, } self.filter_indexes = [ 'Title', ] self.review_states = [ { 'id': 'default', 'title': _('All'), 'contentFilter': {}, 'transitions': [ { 'id': 'assign' }, ], 'columns': [ 'Priority', 'Client', 'getClientOrderNumber', 'getRequestID', 'CategoryTitle', 'Title', 'getDateReceived', 'getDueDate' ], }, ]
from bika.lims.interfaces import IAnalysisCategory from bika.lims.config import PROJECTNAME from bika.lims import bikaMessageFactory as _ from zope.interface import implements schema = BikaSchema.copy() + Schema(( ReferenceField('Department', required = 1, vocabulary = 'getDepartments', vocabulary_display_path_bound = sys.maxint, allowed_types = ('Department',), relationship = 'AnalysisCategoryDepartment', referenceClass = HoldingReference, widget = ReferenceWidget( checkbox_bound = 1, label = _('Department'), description = _("The laboratory department"), ), ), ComputedField('DepartmentTitle', expression = "context.getDepartment() and context.getDepartment().Title() or ''", widget = ComputedWidget( visible = False, ), ), )) schema['description'].widget.visible = True schema['description'].schemata = 'default' class AnalysisCategory(BaseContent): implements(IAnalysisCategory)
def Import(context, request): """ Facs Calibur e9790 analysis results """ infile = request.form['facs_calibur_file'] fileformat = request.form['facs_calibur_format'] artoapply = request.form['facs_calibur_artoapply'] override = request.form['facs_calibur_override'] sample = request.form.get('facs_calibur_sample', 'requestid') instrument = request.form.get('instrument', None) errors = [] logs = [] warns = [] # Load the most suitable parser according to file extension/options/etc... parser = None if not hasattr(infile, 'facs_calibur_file'): errors.append(_("No file selected")) if fileformat == 'exp': parser = FacsCalibur2CSVParser(infile) else: errors.append( t( _("Unrecognized file format ${fileformat}", mapping={"fileformat": fileformat}))) if parser: # Load the importer status = ['sample_received', 'attachment_due', 'to_be_verified'] if artoapply == 'received': status = ['sample_received'] elif artoapply == 'received_tobeverified': status = ['sample_received', 'attachment_due', 'to_be_verified'] over = [False, False] if override == 'nooverride': over = [False, False] elif override == 'override': over = [True, False] elif override == 'overrideempty': over = [True, True] sam = ['getId', 'getSampleID', 'getClientSampleID'] if sample == 'requestid': sam = ['getId'] if sample == 'sampleid': sam = ['getSampleID'] elif sample == 'clientsid': sam = ['getClientSampleID'] elif sample == 'sample_clientsid': sam = ['getSampleID', 'getClientSampleID'] importer = FacsCalibur2Importer(parser=parser, context=context, allowed_ar_states=status, allowed_analysis_states=None, override=over, instrument_uid=instrument) tbex = '' try: importer.process() except: tbex = traceback.format_exc() errors = importer.errors logs = importer.logs warns = importer.warns if tbex: errors.append(tbex) results = {'errors': errors, 'log': logs, 'warns': warns} return json.dumps(results)
from bika.lims.browser.widgets.durationwidget import DurationWidget from bika.lims.browser.widgets.recordswidget import RecordsWidget from bika.lims.browser.widgets.referencewidget import ReferenceWidget from bika.lims.browser.widgets.uidselectionwidget import UIDSelectionWidget from bika.lims.config import ATTACHMENT_OPTIONS, SERVICE_POINT_OF_CAPTURE from bika.lims.content.bikaschema import BikaSchema from bika.lims.utils import to_utf8 as _c # Anywhere that there just isn't space for unpredictably long names, # this value will be used instead. It's set on the AnalysisService, # but accessed on all analysis objects. ShortTitle = StringField( 'ShortTitle', schemata="Description", widget=StringWidget( label=_("Short title"), description=_( "If text is entered here, it is used instead of the title when " "the service is listed in column headings. HTML formatting is " "allowed."))) # A simple integer to sort items. SortKey = FloatField( 'SortKey', schemata="Description", validators=('SortKeyValidator', ), widget=DecimalWidget( label=_("Sort Key"), description=_( "Float value from 0.0 - 1000.0 indicating the sort order. " "Duplicate values are ordered alphabetically."),
def __call__(self): form = self.request.form plone.protect.CheckAuthenticator(form) self.context = aq_inner(self.context) workflow = getToolByName(self.context, 'portal_workflow') bc = getToolByName(self.context, 'bika_catalog') rc = getToolByName(self.context, REFERENCE_CATALOG) translate = self.context.translate checkPermission = self.context.portal_membership.checkPermission # use came_from to decide which UI action was clicked. # "workflow_action" is the action name specified in the # portal_workflow transition url. came_from = "workflow_action" action = form.get(came_from, '') if not action and not form.get('bika_listing_filter_bar_submit', ''): # workflow_action_button is the action name specified in # the bika_listing_view table buttons. came_from = "workflow_action_button" action = form.get('workflow_action_id', '') if not action: if self.destination_url == "": self.destination_url = self.request.get_header("referer", self.context.absolute_url()) self.request.response.redirect(self.destination_url) return if action == "sample": objects = AnalysisRequestWorkflowAction._get_selected_items(self) transitioned = {'to_be_preserved':[], 'sample_due':[]} dsfn='getDateSampled' for obj_uid, obj in objects.items(): if obj.portal_type == "AnalysisRequest": ar = obj sample = obj.getSample() else: # If it is a Sample, then fieldname is DateSampled dsfn='DateSampled' sample = obj ar = sample.aq_parent # can't transition inactive items if workflow.getInfoFor(sample, 'inactive_state', '') == 'inactive': continue # grab this object's Sampler and DateSampled from the form # (if the columns are available and edit controls exist) if 'getSampler' in form and dsfn in form: try: Sampler = form['getSampler'][0][obj_uid].strip() DateSampled = form[dsfn][0][obj_uid].strip() except KeyError: continue Sampler = Sampler and Sampler or '' DateSampled = DateSampled and DateTime(DateSampled) or '' else: continue # write them to the sample sample.setSampler(Sampler) sample.setDateSampled(DateSampled) sample.reindexObject() ars = sample.getAnalysisRequests() # Analyses and AnalysisRequets have calculated fields # that are indexed; re-index all these objects. for ar in ars: ar.reindexObject() analyses = sample.getAnalyses({'review_state':'to_be_sampled'}) for a in analyses: a.getObject().reindexObject() # transition the object if both values are present if Sampler and DateSampled: workflow.doActionFor(sample, action) new_state = workflow.getInfoFor(sample, 'review_state') doActionFor(ar, action) transitioned[new_state].append(sample.Title()) message = None for state in transitioned: tlist = transitioned[state] if len(tlist) > 1: if state == 'to_be_preserved': message = _('${items} are waiting for preservation.', mapping = {'items': ', '.join(tlist)}) else: message = _('${items} are waiting to be received.', mapping = {'items': ', '.join(tlist)}) self.context.plone_utils.addPortalMessage(message, 'info') elif len(tlist) == 1: if state == 'to_be_preserved': message = _('${item} is waiting for preservation.', mapping = {'item': ', '.join(tlist)}) else: message = _('${item} is waiting to be received.', mapping = {'item': ', '.join(tlist)}) self.context.plone_utils.addPortalMessage(message, 'info') if not message: message = _('No changes made.') self.context.plone_utils.addPortalMessage(message, 'info') self.destination_url = self.request.get_header("referer", self.context.absolute_url()) self.request.response.redirect(self.destination_url) elif action == "preserve": objects = AnalysisRequestWorkflowAction._get_selected_items(self) transitioned = {} not_transitioned = [] Preserver = str() DatePreserved = str() for obj_uid, obj in objects.items(): if obj.portal_type == "AnalysisRequest": ar = obj sample = obj.getSample() else: sample = obj ar = sample.aq_parent # can't transition inactive items if workflow.getInfoFor(sample, 'inactive_state', '') == 'inactive': continue if not checkPermission(PreserveSample, sample): continue # grab this object's Preserver and DatePreserved from the form # (if the columns are available and edit controls exist) if 'getPreserver' in form and 'getDatePreserved' in form: try: Preserver = form['getPreserver'][0][obj_uid].strip() DatePreserved = form['getDatePreserved'][0][obj_uid].strip() except KeyError: continue Preserver = Preserver and Preserver or '' DatePreserved = DatePreserved and DateTime(DatePreserved) or '' else: continue for sp in sample.objectValues("SamplePartition"): if workflow.getInfoFor(sp, 'review_state') == 'to_be_preserved': sp.setDatePreserved(DatePreserved) sp.setPreserver(Preserver) for sp in sample.objectValues("SamplePartition"): if workflow.getInfoFor(sp, 'review_state') == 'to_be_preserved': if Preserver and DatePreserved: doActionFor(sp, action) transitioned[sp.aq_parent.Title()] = sp.Title() else: not_transitioned.append(sp) if len(transitioned.keys()) > 1: message = _('${items}: partitions are waiting to be received.', mapping = {'items': ', '.join(transitioned.keys())}) else: message = _('${item}: ${part} is waiting to be received.', mapping = {'item': ', '.join(transitioned.keys()), 'part': ', '.join(transitioned.values()),}) self.context.plone_utils.addPortalMessage(message, 'info') # And then the sample itself if Preserver and DatePreserved and not not_transitioned: doActionFor(sample, action) #message = _('${item} is waiting to be received.', # mapping = {'item': sample.Title()}) #message = t(message) #self.context.plone_utils.addPortalMessage(message, 'info') self.destination_url = self.request.get_header( "referer", self.context.absolute_url()) self.request.response.redirect(self.destination_url) elif action in ('prepublish', 'publish', 'republish'): # We pass a list of AR objects to Publish. # it returns a list of AR IDs which were actually published. objects = AnalysisRequestWorkflowAction._get_selected_items(self) its = [] for uid, obj in objects.items(): if isActive(obj): its.append(uid); its = ",".join(its) q = "/publish?items=" + its dest = self.portal_url+"/analysisrequests" + q self.request.response.redirect(dest) else: AnalysisRequestWorkflowAction.__call__(self)
def __call__(self): ar = self.context workflow = getToolByName(self.context, 'portal_workflow') if 'transition' in self.request.form: doActionFor(self.context, self.request.form['transition']) # Contacts get expanded for view contact = self.context.getContact() contacts = [] for cc in self.context.getCCContact(): contacts.append(cc) if contact in contacts: contacts.remove(contact) ccemails = [] for cc in contacts: ccemails.append( "%s <<a href='mailto:%s'>%s</a>>" % (cc.Title(), cc.getEmailAddress(), cc.getEmailAddress())) # CC Emails become mailto links emails = self.context.getCCEmails() if isinstance(emails, str): emails = emails and [ emails, ] or [] cc_emails = [] cc_hrefs = [] for cc in emails: cc_emails.append(cc) cc_hrefs.append("<a href='mailto:%s'>%s</a>" % (cc, cc)) # render header table self.header_table = HeaderTableView(self.context, self.request)() # Create Partitions View for this ARs sample p = SamplePartitionsView(self.context.getSample(), self.request) p.show_column_toggles = False self.parts = p.contents_table() # Create Field and Lab Analyses tables self.tables = {} for poc in POINTS_OF_CAPTURE: if self.context.getAnalyses(getPointOfCapture=poc): t = self.createAnalysesView( ar, self.request, getPointOfCapture=poc, show_categories=self.context.bika_setup. getCategoriseAnalysisServices()) t.allow_edit = True t.form_id = "%s_analyses" % poc t.review_states[0]['transitions'] = [{ 'id': 'submit' }, { 'id': 'retract' }, { 'id': 'verify' }] t.show_workflow_action_buttons = True t.show_select_column = True if getSecurityManager().checkPermission(EditFieldResults, self.context) \ and poc == 'field': t.review_states[0]['columns'].remove('DueDate') self.tables[POINTS_OF_CAPTURE.getValue( poc)] = t.contents_table() # Un-captured field analyses may cause confusion if ar.getAnalyses(getPointOfCapture='field', review_state=['sampled', 'sample_due']): message = _("There are field analyses without submitted results.") self.addMessage(message, 'info') # Create QC Analyses View for this AR show_cats = self.context.bika_setup.getCategoriseAnalysisServices() qcview = self.createQCAnalyesView(ar, self.request, show_categories=show_cats) qcview.allow_edit = False qcview.show_select_column = False qcview.show_workflow_action_buttons = False qcview.form_id = "%s_qcanalyses" qcview.review_states[0]['transitions'] = [{ 'id': 'submit' }, { 'id': 'retract' }, { 'id': 'verify' }] self.qctable = qcview.contents_table() # Create the ResultsInterpretation by department view from resultsinterpretation import ARResultsInterpretationView self.riview = ARResultsInterpretationView(ar, self.request) # If a general retracted is done, rise a waring if workflow.getInfoFor(ar, 'review_state') == 'sample_received': allstatus = list() for analysis in ar.getAnalyses(): status = workflow.getInfoFor(analysis.getObject(), 'review_state') if status not in ['retracted', 'to_be_verified', 'verified']: allstatus = [] break else: allstatus.append(status) if len(allstatus) > 0: self.addMessage("General Retract Done", 'warning') # If is a retracted AR, show the link to child AR and show a warn msg if workflow.getInfoFor(ar, 'review_state') == 'invalid': childar = hasattr(ar, 'getChildAnalysisRequest') \ and ar.getChildAnalysisRequest() or None message = _( 'These results have been withdrawn and are ' 'listed here for trace-ability purposes. Please follow ' 'the link to the retest') if childar: message = (message + " %s.") % childar.getRequestID() else: message = message + "." self.addMessage(message, 'warning') # If is an AR automatically generated due to a Retraction, show it's # parent AR information if hasattr(ar, 'getParentAnalysisRequest') \ and ar.getParentAnalysisRequest(): par = ar.getParentAnalysisRequest() message = _( 'This Analysis Request has been ' 'generated automatically due to ' 'the retraction of the Analysis ' 'Request ${retracted_request_id}.', mapping={'retracted_request_id': par.getRequestID()}) self.addMessage(message, 'info') self.renderMessages() return self.template()
def __init__(self, context, request): super(LabContactsView, self).__init__(context, request) self.catalog = "bika_setup_catalog" self.contentFilter = { "portal_type": "LabContact", "sort_on": "sortable_title", "sort_order": "ascending", } self.context_actions = { _("Add"): { "url": "createObject?type_name=LabContact", "permission": AddLabContact, "icon": "++resource++bika.lims.images/add.png" } } self.title = self.context.translate(_("Lab Contacts")) self.icon = "{}/{}".format( self.portal_url, "/++resource++bika.lims.images/labcontact_big.png") self.show_select_row = False self.show_select_column = True self.pagesize = 25 self.columns = collections.OrderedDict(( ("Fullname", { "title": _("Name"), "index": "sortable_title" }), ("DefaultDepartment", { "title": _("Default Department"), "toggle": False }), ("Departments", { "title": _("Departments"), "toggle": True }), ("BusinessPhone", { "title": _("Phone"), "toggle": True }), ("Fax", { "title": _("Fax"), "toggle": False }), ("MobilePhone", { "title": _("Mobile Phone"), "toggle": True }), ("EmailAddress", { "title": _("Email Address"), "toggle": True }), )) self.review_states = [ { "id": "default", "title": _("Active"), "contentFilter": { "is_active": True }, "transitions": [ { "id": "deactivate" }, ], "columns": self.columns.keys(), }, { "id": "inactive", "title": _("Inactive"), "contentFilter": { 'is_active': False }, "transitions": [ { "id": "activate" }, ], "columns": self.columns.keys(), }, { "id": "all", "title": _("All"), "contentFilter": {}, "columns": self.columns.keys(), }, ]
def __init__(self, context, request): super(SubGroupsView, self).__init__(context, request) self.catalog = 'bika_setup_catalog' self.contentFilter = { 'portal_type': 'SubGroup', 'sort_on': 'sortable_title' } self.context_actions = { _('Add'): { 'url': 'createObject?type_name=SubGroup', 'icon': '++resource++bika.lims.images/add.png' } } self.icon = self.portal_url + \ "/++resource++bika.lims.images/batch_big.png" self.title = self.context.translate(_("Sub-groups")) self.description = "" self.show_sort_column = False self.show_select_row = False self.show_select_column = True self.pagesize = 25 self.columns = { 'Title': { 'title': _('Sub-group'), 'index': 'sortable_title' }, 'Description': { 'title': _('Description'), 'index': 'description', 'toggle': True }, 'SortKey': { 'title': _('Sort Key') }, } self.review_states = [ { 'id': 'default', 'title': _('Active'), 'contentFilter': { 'inactive_state': 'active' }, 'transitions': [ { 'id': 'deactivate' }, ], 'columns': ['Title', 'Description', 'SortKey'] }, { 'id': 'inactive', 'title': _('Inactive'), 'contentFilter': { 'inactive_state': 'inactive' }, 'transitions': [ { 'id': 'activate' }, ], 'columns': ['Title', 'Description', 'SortKey'] }, { 'id': 'all', 'title': _('All'), 'contentFilter': {}, 'columns': ['Title', 'Description', 'SortKey'] }, ]
from AccessControl import ClassSecurityInfo from DateTime import DateTime from Products.Archetypes.public import * from plone.app.blob.field import FileField as BlobFileField from Products.CMFCore.utils import getToolByName from bika.lims.content.bikaschema import BikaSchema from bika.lims.config import PROJECTNAME from bika.lims import bikaMessageFactory as _ from bika.lims.utils import t from bika.lims.browser import ulocalized_time from bika.lims.utils import user_fullname schema = BikaSchema.copy() + Schema(( BlobFileField( 'ReportFile', widget=FileWidget(label=_("Report"), ), ), StringField( 'ReportType', widget=StringWidget( label=_("Report Type"), description=_("Report type"), ), ), ReferenceField( 'Client', allowed_types=('Client', ), relationship='ReportClient', widget=ReferenceWidget(label=_("Client"), ), ), ), )
from Products.ATContentTypes.content import schemata from Products.ATExtensions.ateapi import RecordsField from Products.CMFCore.utils import getToolByName from zope.interface import implements import sys schema = BikaFolderSchema.copy() + BikaSchema.copy() + Schema(( BooleanField( 'HasLevels', default=False, widget=BooleanWidget(visible=False), ), StringField( 'Temperature', widget=StringWidget( label=_('Temperature'), description=_( "Units can be specified in bika setup under Inventory."), input_class='numeric', ), ), ReferenceField( 'Department', vocabulary_display_path_bound=sys.maxint, allowed_types=('Department', ), relationship='StorageUnitDepartment', vocabulary='getDepartments', referenceClass=HoldingReference, widget=ReferenceWidget( checkbox_bound=0, label=_('Department'),
from bika.lims.interfaces import IReferenceSample from bika.lims.utils import sortable_title from zope.interface import implements import sys, time schema = BikaSchema.copy() + Schema(( ReferenceField( 'ReferenceDefinition', schemata=PMF('Description'), allowed_types=('ReferenceDefinition', ), relationship='ReferenceSampleReferenceDefinition', referenceClass=HoldingReference, vocabulary="getReferenceDefinitions", widget=ReferenceWidget( checkbox_bound=1, label=_("Reference Definition"), ), ), BooleanField( 'Blank', schemata=PMF('Description'), default=False, widget=BooleanWidget( label=_("Blank"), description=_("Reference sample values are zero or 'blank'"), ), ), BooleanField( 'Hazardous', schemata=PMF('Description'), default=False,
def Import(context, request): """ Read Agilent Masshunter analysis results """ form = request.form # TODO form['file'] sometimes returns a list infile = form['instrument_results_file'][0] if isinstance( form['instrument_results_file'], list) else \ form['instrument_results_file'] artoapply = form['artoapply'] override = form['results_override'] sample = form.get('sample', 'requestid') instrument = form.get('instrument', None) errors = [] logs = [] # Load the most suitable parser according to file extension/options/etc... parser = None if not hasattr(infile, 'filename'): errors.append(_("No file selected")) parser = AgilentMasshunterParser(infile) if parser: # Load the importer status = ['sample_received', 'attachment_due', 'to_be_verified'] if artoapply == 'received': status = ['sample_received'] elif artoapply == 'received_tobeverified': status = ['sample_received', 'attachment_due', 'to_be_verified'] over = [False, False] if override == 'nooverride': over = [False, False] elif override == 'override': over = [True, False] elif override == 'overrideempty': over = [True, True] sam = ['getRequestID', 'getSampleID', 'getClientSampleID'] if sample == 'requestid': sam = ['getRequestID'] if sample == 'sampleid': sam = ['getSampleID'] elif sample == 'clientsid': sam = ['getClientSampleID'] elif sample == 'sample_clientsid': sam = ['getSampleID', 'getClientSampleID'] importer = AgilentMasshunterImporter(parser=parser, context=context, idsearchcriteria=sam, allowed_ar_states=status, allowed_analysis_states=None, override=over, instrument_uid=instrument) tbex = '' try: importer.process() except: tbex = traceback.format_exc() errors = importer.errors logs = importer.logs warns = importer.warns if tbex: errors.append(tbex) results = {'errors': errors, 'log': logs, 'warns': warns} return json.dumps(results)
def __init__(self, context, request): super(FolderView, self).__init__(context, request) self.catalog = 'bika_catalog' self.contentFilter = { 'portal_type': 'Worksheet', 'review_state': ['open', 'to_be_verified', 'verified', 'rejected'], 'sort_on': 'id', 'sort_order': 'reverse' } self.context_actions = { _('Add'): { 'url': 'worksheet_add', 'icon': '++resource++bika.lims.images/add.png', 'class': 'worksheet_add' } } self.show_table_only = False self.show_sort_column = False self.show_select_row = False self.show_select_all_checkbox = True self.show_select_column = True self.pagesize = 25 self.restrict_results = False request.set('disable_border', 1) self.icon = self.portal_url + "/++resource++bika.lims.images/worksheet_big.png" self.title = self.context.translate(_("Worksheets")) self.description = "" pm = getToolByName(context, "portal_membership") # this is a property of self, because self.getAnalysts returns it self.analysts = getUsers(self, ['Manager', 'LabManager', 'Analyst']) self.analysts = self.analysts.sortedByKey() bsc = getToolByName(context, 'bika_setup_catalog') templates = [ t for t in bsc(portal_type='WorksheetTemplate', inactive_state='active') ] self.templates = [(t.UID, t.Title) for t in templates] self.templates.sort(lambda x, y: cmp(x[1], y[1])) self.instruments = [ (i.UID, i.Title) for i in bsc(portal_type='Instrument', inactive_state='active') ] self.instruments.sort(lambda x, y: cmp(x[1], y[1])) self.templateinstruments = {} for t in templates: i = t.getObject().getInstrument() if i: self.templateinstruments[t.UID] = i.UID() else: self.templateinstruments[t.UID] = '' self.columns = { 'Title': { 'title': _('Worksheet'), 'index': 'sortable_title' }, 'Priority': { 'title': _('Priority'), 'index': 'Priority', 'toggle': True }, 'Analyst': { 'title': _('Analyst'), 'index': 'getAnalyst', 'toggle': True }, 'Template': { 'title': _('Template'), 'toggle': True }, 'Services': { 'title': _('Services'), 'sortable': False, 'toggle': False }, 'SampleTypes': { 'title': _('Sample Types'), 'sortable': False, 'toggle': False }, 'Instrument': { 'title': _('Instrument'), 'sortable': False, 'toggle': False }, 'QC': { 'title': _('QC'), 'sortable': False, 'toggle': False }, 'QCTotals': { 'title': _('QC Samples (Analyses)'), 'sortable': False, 'toggle': False }, 'RoutineTotals': { 'title': _('Routine Samples (Analyses)'), 'sortable': False, 'toggle': False }, 'CreationDate': { 'title': PMF('Date Created'), 'toggle': True, 'index': 'created' }, 'state_title': { 'title': _('State'), 'index': 'review_state' }, } self.review_states = [ { 'id': 'default', 'title': _('All'), 'contentFilter': { 'portal_type': 'Worksheet', 'review_state': ['open', 'to_be_verified', 'verified'], 'sort_on': 'id', 'sort_order': 'reverse' }, 'transitions': [{ 'id': 'retract' }, { 'id': 'verify' }, { 'id': 'reject' }], 'columns': [ 'Title', 'Priority', 'Analyst', 'Template', 'Services', 'SampleTypes', 'Instrument', 'QC', 'QCTotals', 'RoutineTotals', 'CreationDate', 'state_title' ] }, # getAuthenticatedMember does not work in __init__ # so 'mine' is configured further in 'folderitems' below. { 'id': 'mine', 'title': _('Mine'), 'contentFilter': { 'portal_type': 'Worksheet', 'review_state': ['open', 'to_be_verified', 'verified', 'rejected'], 'sort_on': 'id', 'sort_order': 'reverse' }, 'transitions': [{ 'id': 'retract' }, { 'id': 'verify' }, { 'id': 'reject' }], 'columns': [ 'Title', 'Priority', 'Analyst', 'Template', 'Services', 'SampleTypes', 'Instrument', 'QC', 'QCTotals', 'RoutineTotals', 'CreationDate', 'state_title' ] }, { 'id': 'open', 'title': _('Open'), 'contentFilter': { 'portal_type': 'Worksheet', 'review_state': 'open', 'sort_on': 'id', 'sort_order': 'reverse' }, 'transitions': [], 'columns': [ 'Title', 'Priority', 'Analyst', 'Template', 'Services', 'SampleTypes', 'Instrument', 'QC', 'QCTotals', 'RoutineTotals', 'CreationDate', 'state_title' ] }, { 'id': 'to_be_verified', 'title': _('To be verified'), 'contentFilter': { 'portal_type': 'Worksheet', 'review_state': 'to_be_verified', 'sort_on': 'id', 'sort_order': 'reverse' }, 'transitions': [{ 'id': 'retract' }, { 'id': 'verify' }, { 'id': 'reject' }], 'columns': [ 'Title', 'Priority', 'Analyst', 'Template', 'Services', 'SampleTypes', 'Instrument', 'QC', 'QCTotals', 'RoutineTotals', 'CreationDate', 'state_title' ] }, { 'id': 'verified', 'title': _('Verified'), 'contentFilter': { 'portal_type': 'Worksheet', 'review_state': 'verified', 'sort_on': 'id', 'sort_order': 'reverse' }, 'transitions': [], 'columns': [ 'Title', 'Priority', 'Analyst', 'Template', 'Services', 'SampleTypes', 'Instrument', 'QC', 'QCTotals', 'RoutineTotals', 'CreationDate', 'state_title' ] }, ]
def __init__(self, context, request): super(ReferenceSamplesView, self).__init__(context, request) self.catalog = "bika_catalog" self.contentFilter = { "portal_type": "ReferenceSample", "getSupportedServices": self.get_assigned_services_uids(), "isValid": True, "review_state": "current", "inactive_state": "active", "sort_on": "sortable_title", "sort_order": "ascending", } self.context_actions = {} self.title = _("Add Control Reference") self.show_select_row = False self.show_select_all_checkbox = False self.show_column_toggles = False self.show_select_column = True self.show_categories = False self.pagesize = 999999 self.allow_edit = True self.show_search = False self.icon = "{}/{}".format( self.portal_url, "/++resource++bika.lims.images/worksheet_big.png") self.columns = collections.OrderedDict(( ("Title", { "title": _("Reference Sample"), "sortable": False }), ("SupportedServices", { "title": _("Supported Services"), "type": "multiselect", "sortable": False }), ("Position", { "title": _("Position"), "sortable": False }), )) self.review_states = [ { "id": "default", "title": _("All"), "contentFilter": {}, "transitions": [{ "id": "add" }], "custom_transitions": [{ "id": "add", "title": _("Add"), "url": self.__name__, }], "columns": self.columns.keys() }, ]
from Products.Archetypes.atapi import TextField from Products.Archetypes.atapi import registerType from Products.ATContentTypes.lib.historyaware import HistoryAwareMixin from Products.ATExtensions.field import RecordsField from Products.CMFCore.utils import getToolByName from Products.CMFCore.WorkflowCore import WorkflowException from Products.CMFPlone.utils import safe_unicode from zope.interface import implements schema = BikaSchema.copy() + Schema(( InterimFieldsField( 'InterimFields', widget=BikaRecordsWidget( label=_("Calculation Interim Fields"), description=_( "Define interim fields such as vessel mass, dilution factors, " "should your calculation require them. The field title " "specified here will be used as column headers and field " "descriptors where the interim fields are displayed. If " "'Apply wide' is enabled the field will be shown in a " "selection box on the top of the worksheet, allowing to apply " "a specific value to all the corresponding fields on the " "sheet."), ) ), UIDReferenceField( 'DependentServices', required=1,
def __init__(self, context, request): super(AuditLogView, self).__init__(context, request) self.catalog = "auditlog_catalog" self.contentFilter = { "sort_on": "snapshot_created", "sort_order": "desscending", } self.context_actions = {} self.title = self.context.translate(_("Audit Log")) self.icon = "{}/{}".format( self.portal_url, "/++resource++bika.lims.images/auditlog_big.png") self.show_select_column = False self.pagesize = 25 self.columns = collections.OrderedDict(( ("title", { "title": _("Title"), "index": "title" }), ("version", { "title": _("Version"), "index": "snapshot_version", "sortable": True }), ("modified", { "title": _("Date Modified"), "index": "modified", "sortable": True }), ("actor", { "title": _("Actor"), "index": "actor", "sortable": True }), ("fullname", { "title": _("Fullname"), "index": "fullname", "sortable": True }), ("roles", { "title": _("Roles"), "sortable": False, "toggle": False }), ("remote_address", { "title": _("Remote IP"), "sortable": True }), ("action", { "title": _("Action"), "index": "action", "sortable": True }), ("review_state", { "title": _("Workflow State"), "index": "review_state", "sortable": True }), ("diff", { "title": _("Changes"), "sortable": False }), )) self.review_states = [{ "id": "default", "title": _("Active"), "contentFilter": {}, "columns": self.columns.keys(), }]
def get_colors_palette(self): return { 'to_be_sampled': '#917A4C', _('To be sampled'): '#917A4C', 'to_be_preserved': '#C2803E', _('To be preserved'): '#C2803E', 'scheduled_sampling': '#F38630', _('Sampling scheduled'): '#F38630', 'sample_due': '#FA6900', _('Reception pending'): '#FA6900', 'sample_received': '#E0E4CC', _('Assignment pending'): '#E0E4CC', _('Sample received'): '#E0E4CC', 'assigned': '#dcdcdc', 'attachment_due': '#dcdcdc', 'open': '#dcdcdc', _('Results pending'): '#dcdcdc', 'rejected': '#FF6B6B', 'retracted': '#FF6B6B', _('Rejected'): '#FF6B6B', _('Retracted'): '#FF6B6B', 'invalid': '#C44D58', _('Invalid'): '#C44D58', 'to_be_verified': '#A7DBD8', _('To be verified'): '#A7DBD8', 'verified': '#69D2E7', _('Verified'): '#69D2E7', 'published': '#83AF9B', _('Published'): '#83AF9B', }
def Import(context, request): """ Abbot m2000 Real Time results import. This function handles requests when the user uploads a file and submits. It gets request parameters and creates a Parser object based on the parameters' values. After that, and based on that parser object, it creates an Importer object called importer that will process the selected file and try to import the results. """ # Read the values the user has specified for the parameters # that appear in the import view of the current instrument # and that are defined in the instrument interface template infile = request.form['filename'] fileformat = request.form['format'] artoapply = request.form['artoapply'] override = request.form['override'] instrument = request.form.get('instrument', None) errors = [] logs = [] warns = [] # Load the most suitable parser according to file extension/options/etc... parser = None if not hasattr(infile, 'filename'): errors.append(_("No file selected")) if fileformat == 'tsv': parser = Abbottm2000rtTSVParser(infile) else: errors.append( t( _("Unrecognized file format ${fileformat}", mapping={"fileformat": fileformat}))) if parser: # Select parameters for the importer from the values # just read from the import view status = ['sample_received', 'attachment_due', 'to_be_verified'] if artoapply == 'received': status = ['sample_received'] elif artoapply == 'received_tobeverified': status = ['sample_received', 'attachment_due', 'to_be_verified'] over = [False, False] if override == 'nooverride': over = [False, False] elif override == 'override': over = [True, False] elif override == 'overrideempty': over = [True, True] # Crate importer with the defined parser and the # rest of defined parameters. Then try to import the # results from the file importer = Abbottm2000rtImporter(parser=parser, context=context, allowed_ar_states=status, allowed_analysis_states=None, override=over, instrument_uid=instrument) tbex = '' try: # run the parser and save results importer.process() except: tbex = traceback.format_exc() errors = importer.errors logs = importer.logs warns = importer.warns if tbex: errors.append(tbex) results = {'errors': errors, 'log': logs, 'warns': warns} return json.dumps(results)
def get_analyses_section(self): """ Returns the section dictionary related with Analyses, that contains some informative panels (analyses pending analyses assigned, etc.) """ out = [] bc = getToolByName(self.context, CATALOG_ANALYSIS_LISTING) query = {'portal_type': "Analysis", 'is_active': True} # Check if dashboard_cookie contains any values to query elements by query = self._update_criteria_with_filters(query, 'analyses') # Active Analyses (All) total = self.search_count(query, bc.id) # Analyses to be assigned name = _('Assignment pending') desc = _('Assignment pending') purl = '#' query['review_state'] = ['unassigned'] out.append(self._getStatistics(name, desc, purl, bc, query, total)) # Analyses pending name = _('Results pending') desc = _('Results pending') purl = '#' query['review_state'] = [ 'unassigned', 'assigned', ] out.append(self._getStatistics(name, desc, purl, bc, query, total)) # Analyses to be verified name = _('To be verified') desc = _('To be verified') purl = '#' query['review_state'] = [ 'to_be_verified', ] out.append(self._getStatistics(name, desc, purl, bc, query, total)) # Analyses verified name = _('Verified') desc = _('Verified') purl = '#' query['review_state'] = [ 'verified', ] out.append(self._getStatistics(name, desc, purl, bc, query, total)) # Chart with the evolution of Analyses over a period, grouped by # periodicity outevo = self.fill_dates_evo(bc, query) out.append({ 'type': 'bar-chart-panel', 'name': _('Evolution of Analyses'), 'class': 'informative', 'description': _('Evolution of Analyses'), 'data': json.dumps(outevo), 'datacolors': json.dumps(self.get_colors_palette()) }) return {'id': 'analyses', 'title': _('Analyses'), 'panels': out}
def _fill_dates_evo(self, query_json, catalog_name, periodicity): """Returns an array of dictionaries, where each dictionary contains the amount of items created at a given date and grouped by review_state, based on the passed in periodicity. This is an expensive function that will not be called more than once every 2 hours (note cache decorator with `time() // (60 * 60 * 2) """ outevoidx = {} outevo = [] days = 1 if periodicity == PERIODICITY_YEARLY: days = 336 elif periodicity == PERIODICITY_BIANNUAL: days = 168 elif periodicity == PERIODICITY_QUARTERLY: days = 84 elif periodicity == PERIODICITY_MONTHLY: days = 28 elif periodicity == PERIODICITY_WEEKLY: days = 7 elif periodicity == PERIODICITY_ALL: days = 336 # Get the date range date_from, date_to = self.get_date_range(periodicity) query = json.loads(query_json) if 'review_state' in query: del query['review_state'] query['sort_on'] = 'created' query['created'] = {'query': (date_from, date_to), 'range': 'min:max'} otherstate = _('Other status') statesmap = self.get_states_map(query['portal_type']) stats = statesmap.values() stats.sort() stats.append(otherstate) statscount = {s: 0 for s in stats} # Add first all periods, cause we want all segments to be displayed curr = date_from.asdatetime() end = date_to.asdatetime() while curr < end: currstr = self._getDateStr(periodicity, DateTime(curr)) if currstr not in outevoidx: outdict = {'date': currstr} for k in stats: outdict[k] = 0 outevo.append(outdict) outevoidx[currstr] = len(outevo) - 1 curr = curr + datetime.timedelta(days=days) brains = search(query, catalog_name) for brain in brains: created = brain.created state = brain.review_state if state not in statesmap: logger.warn("'%s' State for '%s' not available" % (state, query['portal_type'])) state = statesmap[state] if state in statesmap else otherstate created = self._getDateStr(periodicity, created) statscount[state] += 1 if created in outevoidx: oidx = outevoidx[created] if state in outevo[oidx]: outevo[oidx][state] += 1 else: outevo[oidx][state] = 1 else: # Create new row currow = {'date': created, state: 1} outevo.append(currow) # Remove all those states for which there is no data rstates = [k for k, v in statscount.items() if v == 0] for o in outevo: for r in rstates: if r in o: del o[r] # Sort available status by number of occurences descending sorted_states = sorted(statscount.items(), key=itemgetter(1)) sorted_states = map(lambda item: item[0], sorted_states) sorted_states.reverse() return {'data': outevo, 'states': sorted_states}
def get_analysisrequests_section(self): """ Returns the section dictionary related with Analysis Requests, that contains some informative panels (like ARs to be verified, ARs to be published, etc.) """ out = [] catalog = getToolByName(self.context, CATALOG_ANALYSIS_REQUEST_LISTING) query = {'portal_type': "AnalysisRequest", 'is_active': True} # Check if dashboard_cookie contains any values to query # elements by query = self._update_criteria_with_filters(query, 'analysisrequests') # Active Samples (All) total = self.search_count(query, catalog.id) # Sampling workflow enabled? if self.context.bika_setup.getSamplingWorkflowEnabled(): # Samples awaiting to be sampled or scheduled name = _('Samples to be sampled') desc = _("To be sampled") purl = 'samples?samples_review_state=to_be_sampled' query['review_state'] = [ 'to_be_sampled', ] out.append( self._getStatistics(name, desc, purl, catalog, query, total)) # Samples awaiting to be preserved name = _('Samples to be preserved') desc = _("To be preserved") purl = 'samples?samples_review_state=to_be_preserved' query['review_state'] = [ 'to_be_preserved', ] out.append( self._getStatistics(name, desc, purl, catalog, query, total)) # Samples scheduled for Sampling name = _('Samples scheduled for sampling') desc = _("Sampling scheduled") purl = 'samples?samples_review_state=scheduled_sampling' query['review_state'] = [ 'scheduled_sampling', ] out.append( self._getStatistics(name, desc, purl, catalog, query, total)) # Samples awaiting for reception name = _('Samples to be received') desc = _("Reception pending") purl = 'analysisrequests?analysisrequests_review_state=sample_due' query['review_state'] = [ 'sample_due', ] out.append(self._getStatistics(name, desc, purl, catalog, query, total)) # Samples under way name = _('Samples with results pending') desc = _("Results pending") purl = 'analysisrequests?analysisrequests_review_state=sample_received' query['review_state'] = [ 'attachment_due', 'sample_received', ] out.append(self._getStatistics(name, desc, purl, catalog, query, total)) # Samples to be verified name = _('Samples to be verified') desc = _("To be verified") purl = 'analysisrequests?analysisrequests_review_state=to_be_verified' query['review_state'] = [ 'to_be_verified', ] out.append(self._getStatistics(name, desc, purl, catalog, query, total)) # Samples verified (to be published) name = _('Samples verified') desc = _("Verified") purl = 'analysisrequests?analysisrequests_review_state=verified' query['review_state'] = [ 'verified', ] out.append(self._getStatistics(name, desc, purl, catalog, query, total)) # Samples published name = _('Samples published') desc = _("Published") purl = 'analysisrequests?analysisrequests_review_state=published' query['review_state'] = [ 'published', ] out.append(self._getStatistics(name, desc, purl, catalog, query, total)) # Samples to be printed if self.context.bika_setup.getPrintingWorkflowEnabled(): name = _('Samples to be printed') desc = _("To be printed") purl = 'analysisrequests?analysisrequests_getPrinted=0' query['getPrinted'] = '0' query['review_state'] = [ 'published', ] out.append( self._getStatistics(name, desc, purl, catalog, query, total)) # Chart with the evolution of ARs over a period, grouped by # periodicity outevo = self.fill_dates_evo(catalog, query) out.append({ 'type': 'bar-chart-panel', 'name': _('Evolution of Samples'), 'class': 'informative', 'description': _('Evolution of Samples'), 'data': json.dumps(outevo), 'datacolors': json.dumps(self.get_colors_palette()) }) return {'id': 'analysisrequests', 'title': _('Samples'), 'panels': out}
def get_states_map(self, portal_type): if portal_type == 'Analysis': return { 'unassigned': _('Assignment pending'), 'assigned': _('Results pending'), 'to_be_verified': _('To be verified'), 'rejected': _('Rejected'), 'retracted': _('Retracted'), 'verified': _('Verified'), 'published': _('Published') } elif portal_type == 'AnalysisRequest': return { 'to_be_sampled': _('To be sampled'), 'to_be_preserved': _('To be preserved'), 'scheduled_sampling': _('Sampling scheduled'), 'sample_due': _('Reception pending'), 'rejected': _('Rejected'), 'invalid': _('Invalid'), 'sample_received': _('Results pending'), 'assigned': _('Results pending'), 'attachment_due': _('Results pending'), 'to_be_verified': _('To be verified'), 'verified': _('Verified'), 'published': _('Published') } elif portal_type == 'Worksheet': return { 'open': _('Results pending'), 'attachment_due': _('Results pending'), 'to_be_verified': _('To be verified'), 'verified': _('Verified') }