SampleType = sample[4], NoContainers = sample[5], AnalysisProfile = profiles, Analyses = analyses, ) aritem.processForm() arimport.edit( ImportOption = 's', ClientTitle = clientname, ClientID = client_id, ClientPhone = clientphone, ClientFax = clientfax, ClientAddress = clientaddress, ClientCity = clientcity, ClientEmail = clientemail, ContactName = contact, CCEmails = ccemail, Remarks = batch_remarks, OrderID = client_orderid, Temperature = temperature, DateImported = DateTime(), ) arimport.processForm() valid = self.validate_arimport_s(arimport) REQUEST.RESPONSE.write('<script>document.location.href="%s/client_arimports?portal_status_message=%s%%20imported"</script>' % (client.absolute_url(), arimport_id)) InitializeClass(bika_ar_import)
if spec in ['lab', 'all', 'clientandlab']: for p in self.portal_catalog(portal_type='AnalysisProfile', sort_on='sortable_title'): profile = p.getObject() # create detail line detail = [profile.Title(), profile.getProfileKey(), 'Lab'] rows.append(detail) #convert lists to csv string ramdisk = StringIO() writer = csv.writer(ramdisk, delimiter = delimiter, \ quoting = csv.QUOTE_NONNUMERIC) assert (writer) writer.writerows(rows) result = ramdisk.getvalue() ramdisk.close() #stream file to browser setheader = self.REQUEST.RESPONSE.setHeader setheader('Content-Length', len(result)) setheader('Content-Type', 'text/comma-separated-values') setheader('Content-Disposition', 'inline; filename=%s' % filename) self.REQUEST.RESPONSE.write(result) return InitializeClass(bika_profiles_export)
del self.REQUEST.other['suppress_escalation'] results[sample_id]['added'].append('%s' % (service_title)) if analysis: analysis.setUncertainty( self.get_uncertainty(result, service)) analysis.setResult(result) # set dummy titration values if required if analysis.getCalcType() == 't': analysis.setTitrationVolume(result) analysis.setTitrationFactor('1') analysis.processForm() results_ids = {} results_ids['results'] = results results_ids['ids'] = ids return results_ids def getInstrumentKeywordToServiceIdMap(self): d = {} for p in self.portal_catalog(portal_type='AnalysisService'): obj = p.getObject() keyword = obj.getInstrumentKeyword() if keyword: d[keyword] = obj.getId() return d InitializeClass(bika_instrument_import)
service.getCategoryTitle(), service.Title(), service.getKeyword(), service.getInstrumentKeyword(), service.getPrice(), service.getBulkPrice() ] rows.append(detail) #convert lists to csv string ramdisk = StringIO() writer = csv.writer(ramdisk, delimiter = delimiter, \ quoting = csv.QUOTE_NONNUMERIC) assert (writer) writer.writerows(rows) result = ramdisk.getvalue() ramdisk.close() #stream file to browser setheader = self.REQUEST.RESPONSE.setHeader setheader('Content-Length', len(result)) setheader('Content-Type', 'text/comma-separated-values') setheader('Content-Disposition', 'inline; filename=%s' % filename) self.REQUEST.RESPONSE.write(result) return InitializeClass(bika_services_export)
detail.append(attachments) rows.append(detail) count += 1 detail = [] for i in range(count_cell - 1): detail.append('') detail.append('Total') detail.append(total_count) detail.append(total_price) rows.append(detail) #convert lists to csv string ramdisk = StringIO() writer = csv.writer(ramdisk, delimiter = delimiter, \ quoting = csv.QUOTE_NONNUMERIC) assert (writer) writer.writerows(rows) result = ramdisk.getvalue() ramdisk.close() file_data = {} file_data['file'] = result file_data['file_name'] = filename return file_data InitializeClass(bika_ar_export)
price_change = True service_obj.edit(Price = new_price) msgs.append('%s %s %s %s price updated from %s to %s' % (counter, cat, service, keyword, old_price, new_price)) if new_cprice: if old_cprice != new_cprice: cprice_change = True service_obj.edit(BulkPrice = new_cprice) msgs.append('%s %s %s %s bulk discount updated from %s to %s' % (counter, cat, service, keyword, old_cprice, new_cprice)) if price_change and cprice_change: updated_both_counter += 1 elif price_change: updated_price_counter += 1 elif cprice_change: updated_cprice_counter += 1 msgs.append('____________________________________________________') msgs.append('%s services in input file' % (counter - 1)) msgs.append('%s services without keyword - not updated' % (no_kw_counter)) msgs.append('%s duplicate services - not updated' % (dup_counter)) msgs.append('%s services not found - not updated' % (not_found_counter)) msgs.append('%s service price and bulk discounts updated' % (updated_both_counter)) msgs.append('%s service prices updated' % (updated_price_counter)) msgs.append('%s service bulk discounts updated' % (updated_cprice_counter)) return msgs InitializeClass(bika_analysis_reset)
""" def indexObject(obj, path): self.reindexObject(obj) at = getToolByName(self, 'archetype_tool') types = [k for k, v in at.catalog_map.items() if self.id in v] self.manage_catalogClear() portal = getToolByName(self, 'portal_url').getPortalObject() portal.ZopeFindAndApply(portal, obj_metatypes=types, search_sub=True, apply_func=indexObject) InitializeClass(BikaCatalog) class BikaAnalysisCatalog(CatalogTool): """Catalog for analysis types""" implements(IBikaAnalysisCatalog) security = ClassSecurityInfo() _properties = ({'id': 'title', 'type': 'string', 'mode': 'w'}, ) title = 'Bika Analysis Catalog' id = 'bika_analysis_catalog' portal_type = meta_type = 'BikaAnalysisCatalog' plone_tool = 1