def sort_ar_fields(portal): """Sort AR fields from AR Add Form """ logger.info("*** Sorting fields from AR Add ***") sorted = [ 'Client', 'Contact', 'ParticipantID', 'OtherParticipantReference', 'ParticipantInitials', 'Gender', 'Visit', 'DateOfBirth', 'Fasting', 'ClientSampleID', 'DateSampled', 'SampleType', 'Volume', 'DefaultContainerType', 'Template', 'OtherInformation', '_ARAttachment', 'Priority', 'Remarks', ] storage = get_manage_add_storage(portal) storage.update({"order": sorted}) update_manage_add_storage(portal, storage)
def commit_transaction(portal): start = time.time() logger.info("Commit transaction ...") transaction.commit() end = time.time() logger.info("Commit transaction ... Took {:.2f}s [DONE]".format(end - start))
def get_analyses_data(self): rows = [] ars = self.search() total = len(ars) logger.info("Exporting data of {} ARs".format(total)) for num, ar in enumerate(ars): ar = SuperModel(api.get_uid(ar)) for an in self.get_analyses(ar): data = [] an = SuperModel(api.get_uid(an)) for row in ANALYSES_ROWS: model = ar title, key, converter = row if key.startswith("Analysis"): key = ".".join(key.split(".")[1:]) model = an value = self.get(model, key) data.append(converter(model, key, value)) rows.append(data) if num % 100 == 0: logger.info("Exported {}/{}".format(num, total)) return rows
def allow_client_courier_types(portal): portal_types = getToolByName(portal, 'portal_types') client = getattr(portal_types, 'Client') allowed_types = client.allowed_content_types if 'ClientCourier' not in allowed_types: client.allowed_content_types = allowed_types + ('ClientCourier', ) logger.info("Adding Client Courier to Client allowed types [DONE]")
def setup_workflows(portal): """Setup workflows """ logger.info("Setting up workflows ...") for wf_id, settings in WORKFLOWS_TO_UPDATE.items(): update_workflow(wf_id, settings) logger.info("Setting up workflows [DONE]")
def setup_printer_stickers(portal): """Setup printers and stickers templates """ logger.info("Setting up printers and stickers ...") def create_printer(folder, name, values): query = dict(portal_type="BarcodePrinter", Title=name) brains = api.search(query, "bika_setup_catalog") if brains: printer = api.get_object(brains[0]) printer.FileName = values["FileName"] printer.PrinterPath = values["PrinterPath"] printer.Template = values["Template"] return printer # Create a new Barcode Printer obj = _createObjectByType("BarcodePrinter", folder, tmpID()) obj.edit(title=name, FileName=values["FileName"], PrinterPath=values["PrinterPath"], Template=values["Template"]) obj.unmarkCreationFlag() renameAfterCreation(obj) printers = portal.bika_setup.barcodeprinters for printer_name, printer_values in PRINTERS.items(): create_printer(printers, printer_name, printer_values) logger.info("Setting up printers and stickers [DONE]")
def sanitize_ranges_calculation_from_analyses(portal): """Walks through all Analyses not yet verified and remove the calculation if is Ranges Calculation set """ logger.info("Sanitizing 'Ranges Calculation' from analyses") query = dict(portal_type="Calculation", title="Ranges calculation") calc = api.search(query, "bika_setup_catalog") if not calc: logger.warn("Calculation 'Ranges calculation' not found! [SKIP]") return calc = api.get_object(calc[0]) calc_uid = api.get_uid(calc) # Cleanup analysis services first query = dict(portal_type="AnalysisService", getCalculationUID=calc_uid) brains = api.search(query, "bika_setup_catalog") for brain in brains: service = api.get_object(brain) service.setCalculation(None) service.reindexObject() # Cleanup analyses query = dict() brains = api.search(query, CATALOG_ANALYSIS_LISTING) for brain in brains: if brain.getCalculationUID != calc_uid: continue analysis = api.get_object(brain) analysis.setCalculation(None) analysis.reindexObject() logger.info("Sanitizing 'Ranges Calculation' from analyses [DONE]")
def fix_analyses_storage_instrument(portal): """Walks through all Analyses not yet verified and if they belong to the Storage requisition category, remove the instrument assignment """ logger.info("Sanitizing 'Storage instrument' from analyses") query = dict(portal_type="AnalysisCategory", title="Storage requisition") cat = api.search(query, "bika_setup_catalog") if not cat: logger.warn("Category 'Storage requisition' not found [SKIP]") return cat_uid = api.get_uid(cat[0]) # Cleanup analysis services first query = dict(portal_type="AnalysisService", getCategoryUID=cat_uid) brains = api.search(query, "bika_setup_catalog") for brain in brains: service = api.get_object(brain) if not service.getInstrument(): continue service.setInstrument(None) service.reindexObject() # Cleanup analyses query = dict(getCategoryUID=cat_uid, ) brains = api.search(query, CATALOG_ANALYSIS_LISTING) for brain in brains: if brain.review_state in ['published', 'rejected', 'invalid']: continue if not brain.getInstrumentUID: continue analysis = api.get_object(brain) analysis.setInstrument(None) analysis.reindexObject() logger.info("Sanitizing 'Storage instrument' from analyses [DONE]")
def sort_ar_add_fields(portal): """Sort AR fields from AR Add Form """ logger.info("Sorting fields from AR Add ...") storage = get_manage_add_storage(portal) storage.update({"order": ADD_AR_FIELDS_SORTED}) update_manage_add_storage(portal, storage) logger.info("Sorting fields from AR Add [DONE]")
def disable_autopartitioning(portal): logger.info("Disabling auto-partitioning for Templates ...") query = dict(portal_type="ARTemplate") for template in api.search(query, "portal_catalog"): template = api.get_object(template) template.setAutoPartition(False) template.reindexObject() logger.info("Disabling auto-partitioning for Templates [DONE]")
def create_requests_from_partitions(analysis_request): """If more than one SamplePartition is set for the given AnalysisRequest, creates a new internal AR for every single SamplePartition, assign the primary sample to children and removes the analyses from the primary AR. """ logger.info("*** Creating new requests from partitions ***") partitions = analysis_request.getPartitions() if len(partitions) < 2: # Only one partition, do not create new requests return list() created = list() client = analysis_request.getClient() primary_sample = analysis_request.getSample() primary_sample_uid = api.get_uid(primary_sample) ar_proxies = analysis_request.Schema().fields() ar_proxies = filter(lambda field: IProxyField.providedBy(field), ar_proxies) ar_proxies = map(lambda field: field.getName(), ar_proxies) skip_fields = [ "Client", "Sample", "PrimarySample", "Template", "Profile", "Profiles", "Analyses", "ParentAnalysisRequest", "PrimaryAnalysisRequest", "RejectionReasons", "Remarks" ] skip_fields.extend(ar_proxies) for part in partitions: analyses = part.getAnalyses() analyses = map(lambda an: api.get_object(an), analyses) # Create the new derivative sample (~partition) field_values = dict(PrimarySample=primary_sample_uid, InternalUse=True) sample_copy = copy(primary_sample, container=client, new_field_values=field_values) #sample_copy.id = part.id sample_uid = api.get_uid(sample_copy) # Create a new Analysis Request for this Sample and analyses field_values = dict(Sample=sample_uid, Analyses=analyses, PrimaryAnalysisRequest=analysis_request) ar_copy = copy(analysis_request, container=client, skip_fields=skip_fields, new_field_values=field_values) # Create sample partition services = map(lambda an: an.getAnalysisService(), analyses) partition = dict(services=services, part_id="{}-P1".format(sample_copy.getId())) create_samplepartition(sample_copy, partition, analyses) # Force all items to be in received state force_receive(ar_copy) created.append(ar_copy) return created
def update_internal_use(portal): """Walks through all Samples and assigns its value to False if no value set """ logger.info("*** Updating InternalUse field on Samples/ARs ***") samples = api.search(dict(portal_type="Sample"), "bika_catalog") for sample in samples: sample = api.get_object(sample) if _api.get_field_value(sample, "InternalUse", None) is None: _api.set_field_value(sample, "InternalUse", False)
def after_verify(obj): """Event fired after receive (Process) transition is triggered """ logger.info("*** Custom after_verify transition ***") if IAnalysis.providedBy(obj) or IDuplicateAnalysis.providedBy(obj): analysis_events.after_verify(obj) if IAnalysisRequest.providedBy(obj): _promote_transition(obj, "verify")
def after_submit(obj): """Event fired after submit transition is triggered """ logger.info("*** Custom after_submit transition ***") if IAnalysis.providedBy(obj) or IDuplicateAnalysis.providedBy(obj): analysis_events.after_submit(obj) if IAnalysisRequest.providedBy(obj): _promote_transition(obj, "submit")
def setupHandler(context): """BHP setup handler """ if context.readDataFile('bhp.lims.txt') is None: return logger.info("BHP setup handler [BEGIN]") portal = context.getSite() # Setup Catalogs setup_catalogs(portal) # Run installers setup_laboratory(portal) # Add new content types setup_new_content_types(portal) # Apply ID format to content types setup_id_formatting(portal) # Sort AR fields (AR Add) sort_ar_fields(portal) # Hide unused AR Fields hide_unused_ar_fields(portal) # Setup specimen shipment (from clinic) workflow setup_bhp_workflow(portal) # Setup Attachment Types (requisition + delivery) setup_attachment_types(portal) # Update priorities to Urgent, Routine, STAT update_priorities(portal) # update analysis services (Replace % by PCT in Analysis Keywords) update_services(portal) # Update InternalUse for Samples and Analysis Requests update_internal_use(portal) # Import specifications from bhp/lims/resources/results_ranges.xlsx import_specifications(portal) # Setup Controlpanels setup_controlpanels(portal) # Setup printer stickers setup_printer_stickers(portal) # Reimport additional steps from profile import_profile_steps(portal) logger.info("BHP setup handler [DONE]")
def update_services(portal): logger.info("*** Updating services ***") for service in portal.bika_setup.bika_analysisservices.values(): keyword = service.Schema().getField('Keyword').get(service) if '%' in keyword: keyword = keyword.replace('%', '_PCT') logger.info("Replaced Analysis Keyword: {}".format(keyword)) service.setKeyword(keyword) service.reindexObject()
def setup_new_content_types(portal): """Setup new content types""" logger.info("*** Setup new content types ***") # Index objects - Importing through GenericSetup doesn't ids = ['couriers'] for obj_id in ids: obj = portal.bika_setup[obj_id] obj.unmarkCreationFlag() obj.reindexObject()
def setup_laboratory(portal): """Setup Laboratory """ logger.info("*** Setup Laboratory ***") lab = portal.bika_setup.laboratory lab.edit(title=_('BHP')) lab.reindexObject() # Set autoprinting of stickers on register portal.bika_setup.setAutoPrintStickers('register')
def apply_specifications_to_all_sampletypes(portal): logger.info("Applying specs to all sample types ...") def set_xlsx_specs(senaite_spec): logger.info("Applying specs to {}".format(senaite_spec.Title())) query = dict(portal_type="Calculation", title="Ranges calculation") calc = api.search(query, "bika_setup_catalog") if len(calc) == 0 or len(calc) > 1: logger.info("No calculation found [SKIP]") return calc_uid = api.get_uid(calc[0]) keywords = list() raw_specifications = get_xls_specifications() for spec in raw_specifications: keyword = spec.get("keyword") if keyword not in keywords: query = dict(portal_type="AnalysisService", getKeyword=keyword) brains = api.search(query, "bika_setup_catalog") if len(brains) == 0 or len(brains) > 1: logger.info( "No service found for {} [SKIP]".format(keyword)) continue keywords.append(keyword) specs_dict = { 'keyword': keyword, 'min_operator': 'geq', 'min': '0', 'max_operator': 'lt', 'max': '0', 'minpanic': '', 'maxpanic': '', 'warn_min': '', 'warn_max': '', 'hidemin': '', 'hidemax': '', 'rangecomments': '', 'calculation': calc_uid, } grades_dict = {grade: "" for grade in GRADES_KEYS} specs_dict.update(grades_dict) ranges = api.get_field_value(senaite_spec, 'ResultsRange', [{}]) ranges = filter(lambda val: val.get('keyword') != keyword, ranges) ranges.append(specs_dict) senaite_spec.setResultsRange(ranges) # Existing AnalysisSpec? query = dict(portal_type='AnalysisSpec') senaite_specs = api.search(query, 'bika_setup_catalog') for senaite_spec in senaite_specs: senaite_spec = api.get_object(senaite_spec) if not senaite_spec.Title().endswith("calculated"): continue set_xlsx_specs(senaite_spec) logger.info("Applying specs to all sample types [DONE]")
def setup_controlpanels(portal): """Setup Plone control and Senaite management panels """ logger.info("*** Setup Controlpanels ***") # get the bika_setup object bika_setup = api.get_bika_setup() cp = api.get_tool("portal_controlpanel") def get_action_index(action_id): if action_id == "*": action = cp.listActions()[-1] action_id = action.getId() for n, action in enumerate(cp.listActions()): if action.getId() == action_id: return n return -1 for item in CONTROLPANELS: id = item.get("id") type = item.get("type") title = item.get("title") description = item.get("description") panel = bika_setup.get(id, None) if panel is None: logger.info("Creating Setup Folder '{}' in Setup.".format(id)) # allow content creation in setup temporary portal_types = api.get_tool("portal_types") fti = portal_types.getTypeInfo(bika_setup) fti.filter_content_types = False myfti = portal_types.getTypeInfo(type) global_allow = myfti.global_allow myfti.global_allow = True _ = bika_setup.invokeFactory(type, id, title=title) panel = bika_setup[_] myfti.global_allow = global_allow fti.filter_content_types = True else: # set some meta data panel.setTitle(title) panel.setDescription(description) # Move configlet action to the right index action_index = get_action_index(id) ref_index = get_action_index(item["insert-after"]) if (action_index != -1) and (ref_index != -1): actions = cp._cloneActions() action = actions.pop(action_index) actions.insert(ref_index + 1, action) cp._actions = tuple(actions) cp._p_changed = 1 # reindex the object to render it properly in the navigation portlet panel.reindexObject()
def reindex(query, catalog_name, job_num): brains = api.search(query, catalog_name) total = len(brains) for num, brain in enumerate(brains): if num % 100 == 0: logger.info("Reindexing objects (job {}): {}/{}".format( job_num, num, total)) if num % TRANSACTION_THERESHOLD == 0: commit_transaction(portal) obj = api.get_object(brain) obj.reindexObject() commit_transaction(portal)
def flush_ids(portal): def to_flush(key): for id in IDS_TO_FLUSH: if key.startswith(id): return True return False number_generator = getUtility(INumberGenerator) keys = filter(lambda key: to_flush(key), number_generator.keys()) for key in keys: logger.info("Flush ID {}".format(key)) del number_generator.storage[key]
def set_field_value(instance, field_name, value): """Sets the value to a Schema field """ if field_name == "id": logger.warn("Assignment of id is not allowed") return logger.info("Field {} = {}".format(field_name, repr(value))) instance = get_object(instance) field = instance.Schema() and instance.Schema().getField( field_name) or None if not field: fail("No field {} found for {}".format(field_name, repr(instance))) field.set(instance, value)
def update_workflow(workflow_id, settings): logger.info("Updating workflow '{}' ...".format(workflow_id)) wf_tool = api.get_tool("portal_workflow") workflow = wf_tool.getWorkflowById(workflow_id) if not workflow: logger.warn("Workflow '{}' not found [SKIP]".format(workflow_id)) states = settings.get("states", {}) for state_id, values in states.items(): update_workflow_state(workflow, state_id, values) transitions = settings.get("transitions", {}) for transition_id, values in transitions.items(): update_workflow_transition(workflow, transition_id, values)
def after_process(obj): """Event fired after process (Process) transition is triggered """ logger.info("*** Custom after_process transition ***") if IAnalysisRequest.providedBy(obj): # Generate a derived AR (and Sample) for every single partition create_requests_from_partitions(obj) elif ISample.providedBy(obj): # We do not permit partitioning directly from Sample! # sample_events._cascade_transition(obj, 'process') pass
def hide_ar_add_fields(portal): """Hides unused fields from AR Add Form """ logger.info("Hiding default fields from AR Add ...") storage = get_manage_add_storage(portal) visibility = storage.get('visibility', {}).copy() ordered = storage.get('order', []) fields = list(set(visibility.keys() + ADD_AR_FIELDS_TO_HIDE + ordered)) for field_name in fields: visibility[field_name] = field_name not in ADD_AR_FIELDS_TO_HIDE storage.update({"visibility": visibility}) update_manage_add_storage(portal, storage) logger.info("Hiding default fields from AR Add [DONE]")
def after_publish(obj): """Event fired after receive (Process) transition is triggered """ logger.info("*** Custom after_publish transition ***") if IAnalysisRequest.providedBy(obj): # Transition Analyses to sample_due ans = obj.getAnalyses(full_objects=True) for analysis in ans: doActionFor(analysis, 'publish') # Promote to parent AR parent_ar = obj.getPrimaryAnalysisRequest() if parent_ar: doActionFor(parent_ar, "publish")
def setup_id_formatting(portal, format_definition=None): """Setup default ID formatting """ if not format_definition: logger.info("Setting up ID formatting ...") for formatting in ID_FORMATTING: setup_id_formatting(portal, format_definition=formatting) logger.info("Setting up ID formatting [DONE]") return bs = portal.bika_setup p_type = format_definition.get("portal_type", None) if not p_type: return form = format_definition.get("form", "") if not form: logger.info("Param 'form' for portal type {} not set [SKIP") return logger.info("Applying format '{}' for {}".format(form, p_type)) ids = list() for record in bs.getIDFormatting(): if record.get('portal_type', '') == p_type: continue ids.append(record) ids.append(format_definition) bs.setIDFormatting(ids)
def after_send_to_lab(obj): """ Event fired after send_to_lab transition is triggered. """ logger.info("*** Custom after_send_to_lab transition ***") if IAnalysisRequest.providedBy(obj): # Promote to sample sample = obj.getSample() if sample: doActionFor(sample, 'send_to_lab') elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'send_to_lab')
def workflow_action_download_requisition(self): if ISample.providedBy(self.context): # TODO, Concatenate the PDFs of all ocntaine ARs logger.info("This is a sample!") elif IAnalysisRequest.providedBy(self.context): # Redirect to the requisition PDF req_att = self.get_last_requisition_attachment(self.context) if not req_att: return self.destination_url = '{}/at_download/AttachmentFile'.format( req_att.absolute_url()) self.request.response.redirect(self.destination_url)