def guard_verify(obj): """Returns True if 'verify' transition can be applied to the Worksheet passed in. This is, returns true if all the analyses assigned have already been verified. Those analyses that are in an inactive state (cancelled, inactive) are dismissed, but at least one analysis must be in an active state (and verified), otherwise always return False. Note this guard depends entirely on the current status of the children :returns: true or false """ analyses = obj.getAnalyses() if not analyses: # An empty worksheet cannot be verified return False can_verify = False for analysis in obj.getAnalyses(): # Dismiss analyses that are not active if not api.is_active(analysis): continue # Dismiss analyses that have been rejected or retracted if api.get_workflow_status_of(analysis) in ["rejected", "retracted"]: continue # Worksheet cannot be verified if there is one analysis not verified can_verify = IVerified.providedBy(analysis) if not can_verify: # No need to look further return False # This prevents the verification of the worksheet if all its analyses are in # a detached status (rejected, retracted or cancelled) return can_verify
def workflow_action_retract_ar(self): # AR should be retracted # Can't transition inactive ARs if not api.is_active(self.context): message = _('Item is inactive.') self.context.plone_utils.addPortalMessage(message, 'info') self.request.response.redirect(self.context.absolute_url()) return # 1. Copies the AR linking the original one and viceversa ar = self.context newar = self.cloneAR(ar) # 2. The old AR gets a status of 'invalid' api.do_transition_for(ar, 'retract_ar') # 3. The new AR copy opens in status 'to be verified' changeWorkflowState(newar, 'bika_ar_workflow', 'to_be_verified') # 4. The system immediately alerts the client contacts who ordered # the results, per email and SMS, that a possible mistake has been # picked up and is under investigation. # A much possible information is provided in the email, linking # to the AR online. bika_setup = api.get_bika_setup() if bika_setup.getNotifyOnARRetract(): self.notify_ar_retract(ar, newar) message = _('${items} invalidated.', mapping={'items': ar.getRequestID()}) self.context.plone_utils.addPortalMessage(message, 'warning') self.request.response.redirect(newar.absolute_url())
def get_object_metadata(obj, **kw): """Get object metadata :param obj: Content object :returns: Dictionary of extracted object metadata """ # inject metadata of volatile data metadata = { "actor": get_user_id(), "roles": get_roles(), "action": "", "review_state": api.get_review_status(obj), "active": api.is_active(obj), "snapshot_created": DateTime().ISO(), "modified": api.get_modification_date(obj).ISO(), "remote_address": "", "user_agent": "", "referer": "", "comments": "", } # Update request data metadata.update(get_request_data()) # allow metadata overrides metadata.update(kw) return metadata
def guard_submit(obj): """Returns if 'submit' transition can be applied to the worksheet passed in. By default, the target state for the 'submit' transition for a worksheet is 'to_be_verified', so this guard returns true if all the analyses assigned to the worksheet have already been submitted. Those analyses that are in a non-valid state (cancelled, inactive) are dismissed in the evaluation, but at least one analysis must be in an active state (and submitted) for this guard to return True. Otherwise, always returns False. Note this guard depends entirely on the current status of the children. """ analyses = obj.getAnalyses() if not analyses: # An empty worksheet cannot be submitted return False can_submit = False for analysis in obj.getAnalyses(): # Dismiss analyses that are not active if not api.is_active(analysis): continue # Dismiss analyses that have been rejected or retracted if api.get_workflow_status_of(analysis) in ["rejected", "retracted"]: continue # Worksheet cannot be submitted if there is one analysis not submitted can_submit = ISubmitted.providedBy(analysis) if not can_submit: # No need to look further return False # This prevents the submission of the worksheet if all its analyses are in # a detached status (rejected, retracted or cancelled) return can_submit
def _reflex_rule_process(self, wf_action): """This function does all the reflex rule process. :param wf_action: is a string containing the workflow action triggered """ # Check out if the analysis has any reflex rule bound to it. # First we have get the analysis' method because the Reflex Rule # objects are related to a method. a_method = self.getMethod() if not a_method: return # After getting the analysis' method we have to get all Reflex Rules # related to that method. all_rrs = a_method.getBackReferences('ReflexRuleMethod') if not all_rrs: return # Once we have all the Reflex Rules with the same method as the # analysis has, it is time to get the rules that are bound to the # same analysis service that is using the analysis. for rule in all_rrs: if not api.is_active(rule): continue # Getting the rules to be done from the reflex rule taking # in consideration the analysis service, the result and # the state change action_row = rule.getActionReflexRules(self, wf_action) # Once we have the rules, the system has to execute its # instructions if the result has the expected result. doReflexRuleAction(self, action_row)
def workflow_action_retract_ar(self): # AR should be retracted # Can't transition inactive ARs if not api.is_active(self.context): message = _('Item is inactive.') self.context.plone_utils.addPortalMessage(message, 'info') self.request.response.redirect(self.context.absolute_url()) return # 1. Copies the AR linking the original one and viceversa ar = self.context newar = self.cloneAR(ar) # 2. The old AR gets a status of 'invalid' api.do_transition_for(ar, 'retract_ar') # 3. The new AR copy opens in status 'to be verified' changeWorkflowState(newar, 'bika_ar_workflow', 'to_be_verified') # 4. The system immediately alerts the client contacts who ordered # the results, per email and SMS, that a possible mistake has been # picked up and is under investigation. # A much possible information is provided in the email, linking # to the AR online. bika_setup = api.get_bika_setup() if bika_setup.getNotifyOnARRetract(): self.notify_ar_retract(ar, newar) message = _('${items} invalidated.', mapping={'items': ar.getId()}) self.context.plone_utils.addPortalMessage(message, 'warning') self.request.response.redirect(newar.absolute_url())
def guard_deliver(context): """Guard for deliver transition. Returns true if a Courier has been assigned to the Sample and the Sample (context) is active. Note we do not check for roles or client here because permissions for clients when the sample is in state `sample_shipped` are already defined in the workflow definition. """ sample = get_sample(context) if not sample: return False # If sample is inactive, we cannot deliver the sample to the lab if not api.is_active(sample): return False # If sample does not have a courier assigned, we cannot deliver if not sample.Schema()['Courier'].get(sample): return False # If the current user is a Client contact, do not allow to deliver user = api.get_current_user() if "Client" in user.getRoles(): return False return True
def _children_are_ready(obj, transition_id, dettached_states=None): """Returns true if the children of the object passed in (worksheet) have been all transitioned in accordance with the 'transition_id' passed in. If detached_states is provided, children with those states are dismissed, so they will not be taken into account in the evaluation. Nevertheless, at least one child with for which the transition_id performed is required for this function to return true (if all children are in detached states, it always return False). """ query = dict(getWorksheetUID=api.get_uid(obj)) brains = api.search(query, CATALOG_ANALYSIS_LISTING) if not brains: return False detached_count = 0 for brain in brains: if dettached_states and brain.review_state in dettached_states: detached_count += 1 # dismiss the brain and skip the rest of the checks continue if not api.is_active(brain): return False analysis = api.get_object(brain) if not wasTransitionPerformed(analysis, transition_id): return False if detached_count == len(brains): # If all brains are in a detached state, it means that the # condition of at least having one child for which the # transition is performed is not satisfied so return False return False return True
def workflow_action_invalidate(self): # AR should be retracted # Can't transition inactive ARs if not api.is_active(self.context): message = _('Item is inactive.') self.context.plone_utils.addPortalMessage(message, 'info') self.request.response.redirect(self.context.absolute_url()) return # Retract the AR and get the retest api.do_transition_for(self.context, 'invalidate') retest = self.context.getRetest() # 4. The system immediately alerts the client contacts who ordered # the results, per email and SMS, that a possible mistake has been # picked up and is under investigation. # A much possible information is provided in the email, linking # to the AR online. bika_setup = api.get_bika_setup() if bika_setup.getNotifyOnARRetract(): self.notify_ar_retract(self.context, retest) message = _('${items} invalidated.', mapping={'items': self.context.getId()}) self.context.plone_utils.addPortalMessage(message, 'warning') self.request.response.redirect(retest.absolute_url())
def _children_are_ready(obj, transition_id, dettached_states=None): """Returns true if the children of the object passed in (worksheet) have been all transitioned in accordance with the 'transition_id' passed in. If detached_states is provided, children with those states are dismissed, so they will not be taken into account in the evaluation. Nevertheless, at least one child with for which the transition_id performed is required for this function to return true (if all children are in detached states, it always return False). """ detached_count = 0 analyses = obj.getAnalyses() for analysis in analyses: if dettached_states: if api.get_review_status(analysis) in dettached_states: detached_count += 1 continue if not api.is_active(analysis): return False if not wasTransitionPerformed(analysis, transition_id): return False if detached_count == len(analyses): # If all analyses are in a detached state, it means that the # condition of at least having one child for which the # transition is performed is not satisfied so return False return False return True
def getInvoiceBatches(self, contentFilter={}): wf = getToolByName(self.context, 'portal_workflow') active_state = contentFilter.get('is_active', True) values = self.context.objectValues() if active_state: return filter(api.is_active, values) else: return filter(lambda o: not api.is_active(o), values)
def _is_frozen(self, brain_or_object): """Check if the passed in object is frozen :param obj: Analysis or AR Brain/Object :returns: True if the object is frozen """ obj = api.get_object(brain_or_object) active = api.is_active(obj) verified = wasTransitionPerformed(obj, 'verify') return not active or verified
def fix_service_status_inconsistences(): catalog = api.get_tool('bika_setup_catalog') brains = catalog(portal_type='AnalysisService') for brain in brains: obj = api.get_object(brain) if not api.is_active(obj): continue # If this service is active, then all the services this service # depends on must be active too, as well as the calculation calculation = obj.getCalculation() if not calculation: continue dependencies = calculation.getDependentServices() for dependency in dependencies: dependency = api.get_object(dependency) if not api.is_active(dependency): _change_inactive_state(dependency, 'active')
def getContacts(self, dl=True): pairs = [] objects = [] for contact in self.aq_parent.objectValues('Contact'): if is_active(contact) and contact.UID() != self.UID(): pairs.append((contact.UID(), contact.Title())) if not dl: objects.append(contact) pairs.sort(lambda x, y: cmp(x[1].lower(), y[1].lower())) return dl and DisplayList(pairs) or objects
def guard_activate(analysis_service): """Returns whether the transition activate can be performed for the analysis service passed in """ calculation = analysis_service.getCalculation() if not calculation: return True # If the calculation is inactive, we cannot activate the service if not api.is_active(calculation): return False # All services that we depend on to calculate our result are active or we # don't depend on other services. dependencies = calculation.getDependentServices() for dependency in dependencies: if not api.is_active(dependency): return False return True
def fix_service_profile_template_inconsistences(): catalog = api.get_tool('bika_setup_catalog') brains = catalog(portal_type='AnalysisService') for brain in brains: obj = api.get_object(brain) if api.is_active(obj): continue # If this service is inactive, be sure is not used neither in Profiles # nor in AR Templates obj.after_deactivate_transition_event()
def guard_send_to_pot(context): """Guard for sending the sample to the point of testing """ if not api.is_active(context): return False # If the current user is a Client contact, do not allow to deliver user = api.get_current_user() if "Client" in user.getRoles(): return False return True
def _is_frozen(self, brain_or_object): """Check if the passed in object is frozen: the object is cancelled, inactive or has been verified at some point :param brain_or_object: Analysis or AR Brain/Object :returns: True if the object is frozen """ if not api.is_active(brain_or_object): return True if api.get_workflow_status_of(brain_or_object) in FROZEN_STATES: return True # Check the review history if one of the frozen transitions was done object = api.get_object(brain_or_object) performed_transitions = set(getReviewHistoryActionsList(object)) if set(FROZEN_TRANSITIONS).intersection(performed_transitions): return True return False
def _is_frozen(self, brain_or_object, *frozen_transitions): """Check if the passed in object is frozen: the object is cancelled, inactive or has been verified at some point :param brain_or_object: Analysis or AR Brain/Object :param frozen_transitions: additional transitions that freeze the object :returns: True if the object is frozen """ if not api.is_active(brain_or_object): return True object = api.get_object(brain_or_object) frozen_trans = set(frozen_transitions) frozen_trans.add('verify') performed_transitions = set(getReviewHistoryActionsList(object)) if frozen_trans.intersection(performed_transitions): return True return False
def getCurrentState(self): workflow = getToolByName(self, 'portal_workflow') if self.getClosed(): return InstrumentMaintenanceTaskStatuses.CLOSED elif not api.is_active(self): return InstrumentMaintenanceTaskStatuses.CANCELLED else: now = DateTime() dfrom = self.getDownFrom() dto = self.getDownTo() and self.getDownTo() or DateTime(9999, 12, 31) if (now > dto): return InstrumentMaintenanceTaskStatuses.OVERDUE if (now >= dfrom): return InstrumentMaintenanceTaskStatuses.PENDING else: return InstrumentMaintenanceTaskStatuses.INQUEUE
def isTransitionAllowed(instance, transition_id): """Checks if the object can perform the transition passed in. :returns: True if transition can be performed :rtype: bool """ if transition_id not in ['reinstate', 'activate']: if not api.is_active(instance): return False wf_tool = getToolByName(instance, "portal_workflow") for wf_id in wf_tool.getChainFor(instance): wf = wf_tool.getWorkflowById(wf_id) if wf and wf.isActionSupported(instance, transition_id): return True return False
def workflow_script_activate(self): pu = getToolByName(self, 'plone_utils') # A calculation cannot be re-activated if services it depends on # are deactivated. services = self.getDependentServices() inactive_services = [] for service in services: if not api.is_active(service): inactive_services.append(service.Title()) if inactive_services: msg = _("Cannot activate calculation, because the following " "service dependencies are inactive: ${inactive_services}", mapping={'inactive_services': safe_unicode( ", ".join(inactive_services))}) pu.addPortalMessage(msg, 'error') transaction.get().abort() raise WorkflowException
def getCCs(self): """Return a JSON value, containing all Contacts and their default CCs. This function is used to set form values for javascript. """ items = [] for contact in self.getContacts(dl=False): item = {'uid': contact.UID(), 'title': contact.Title()} ccs = [] if hasattr(contact, 'getCCContact'): for cc in contact.getCCContact(): if api.is_active(cc): ccs.append({'title': cc.Title(), 'uid': cc.UID(), }) item['ccs_json'] = json.dumps(ccs) item['ccs'] = ccs items.append(item) items.sort(lambda x, y: cmp(x['title'].lower(), y['title'].lower())) return items
def __call__(self): plone.protect.CheckAuthenticator(self.request) wf = getToolByName(self.context, 'portal_workflow') ret = { 'ClientTitle': self.context.Title(), 'ClientID': self.context.getClientID(), 'ClientSysID': self.context.id, 'ClientUID': self.context.UID(), 'ContactUIDs': [ c.UID() for c in self.context.objectValues('Contact') if api.is_active(c) ] } return json.dumps(ret)
def getCCs(self): """Return a JSON value, containing all Contacts and their default CCs. This function is used to set form values for javascript. """ items = [] for contact in self.getContacts(dl=False): item = {'uid': contact.UID(), 'title': contact.Title()} ccs = [] if hasattr(contact, 'getCCContact'): for cc in contact.getCCContact(): if api.is_active(cc): ccs.append({ 'title': cc.Title(), 'uid': cc.UID(), }) item['ccs_json'] = json.dumps(ccs) item['ccs'] = ccs items.append(item) items.sort(lambda x, y: cmp(x['title'].lower(), y['title'].lower())) return items
def guard_process(context): """Guard for process (partitioning) process Only Primary Analysis Requests can be partitioned """ sample = get_sample(context) if not sample: return False if not api.is_active(sample): return False # If the sample is not a primary sample, do not allow processing if sample.Schema()['PrimarySample'].get(sample): return False # If the current user is a Client contact, do not allow to deliver user = api.get_current_user() if "Client" in user.getRoles(): return False return True
def getContacts(self, dl=True): bsc = getToolByName(self, 'bika_setup_catalog') pairs = [] objects = [] client = hasattr(self, 'getPrimaryReferrer') and self.getPrimaryReferrer() or None if client: for contact in client.objectValues('Contact'): if api.is_active(contact): pairs.append((contact.UID(), contact.Title())) if not dl: objects.append(contact) pairs.sort(lambda x, y: cmp(x[1].lower(), y[1].lower())) return dl and DisplayList(pairs) or objects # fallback - all Lab Contacts for contact in bsc(portal_type='LabContact', is_active=True, sort_on='sortable_title'): pairs.append((contact.UID, contact.Title)) if not dl: objects.append(contact.getObject()) return dl and DisplayList(pairs) or objects
def getContacts(self, dl=True): bsc = getToolByName(self, 'bika_setup_catalog') pairs = [] objects = [] client = hasattr( self, 'getPrimaryReferrer') and self.getPrimaryReferrer() or None if client: for contact in client.objectValues('Contact'): if api.is_active(contact): pairs.append((contact.UID(), contact.Title())) if not dl: objects.append(contact) pairs.sort(lambda x, y: cmp(x[1].lower(), y[1].lower())) return dl and DisplayList(pairs) or objects # fallback - all Lab Contacts for contact in bsc(portal_type='LabContact', is_active=True, sort_on='sortable_title'): pairs.append((contact.UID, contact.Title)) if not dl: objects.append(contact.getObject()) return dl and DisplayList(pairs) or objects
def guard_send_to_lab(context): """ Guard for send_to_lab transition. Returns true if the current user is a client contact, the Sample (context) is active and it belongs to the same client. """ sample = get_sample(context) if not sample: return False # If Sample is inactive, we cannot send the sample to the lab if not api.is_active(sample): return False # Only users from role Client can send the sample to the lab user = api.get_current_user() user_roles = user.getRoles() # Only contacts from the client the Sample belongs to if "Client" in user_roles: client = sample.aq_parent if not client.getContactFromUsername(user.id): return False return True
def isActive(instance): """Returns True if the object is neither in a cancelled nor inactive state """ return api.is_active(instance)
def guard_reinstate(analysis): """Return whether the transition "reinstate" can be performed or not. Returns True only when the Analysis Request the analysis belongs to is in a non-cancelled state. Otherwise, returns False. """ return api.is_active(analysis.getRequest())
def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): """Set/Assign Analyses to this AR :param items: List of Analysis objects/brains, AnalysisService objects/brains and/or Analysis Service uids :type items: list :param prices: Mapping of AnalysisService UID -> price :type prices: dict :param specs: List of AnalysisService UID -> Result Range mappings :type specs: list :param hidden: List of AnalysisService UID -> Hidden mappings :type hidden: list :returns: list of new assigned Analyses """ # This setter returns a list of new set Analyses new_analyses = [] # Current assigned analyses analyses = instance.objectValues("Analysis") # Analyses which are in a non-open state must be retained, except those # that are in a registered state (the sample has not been received) non_open_analyses = filter(lambda an: not an.isOpen(), analyses) non_open_analyses = filter( lambda an: api.get_workflow_status_of(an) != "registered", non_open_analyses) # Prevent removing all analyses # # N.B.: Non-open analyses are rendered disabled in the HTML form. # Therefore, their UIDs are not included in the submitted UIDs. if not items and not non_open_analyses: logger.warn("Not allowed to remove all Analyses from AR.") return new_analyses # Bail out if the items is not a list type if not isinstance(items, (list, tuple)): raise TypeError( "Items parameter must be a tuple or list, got '{}'".format( type(items))) # Bail out if the AR is inactive if not api.is_active(instance): raise Unauthorized( "Inactive ARs can not be modified".format(AddAnalysis)) # Bail out if the user has not the right permission if not check_permission(AddAnalysis, instance): raise Unauthorized( "You do not have the '{}' permission".format(AddAnalysis)) # Convert the items to a valid list of AnalysisServices services = filter(None, map(self._to_service, items)) # Calculate dependencies # FIXME Infinite recursion error possible here, if the formula includes # the Keyword of the Service that includes the Calculation dependencies = map(lambda s: s.getServiceDependencies(), services) dependencies = list(itertools.chain.from_iterable(dependencies)) # Merge dependencies and services services = set(services + dependencies) # Modify existing AR specs with new form values of selected analyses. self._update_specs(instance, specs) # Create a mapping of Service UID -> Hidden status if hidden is None: hidden = [] hidden = dict(map(lambda d: (d.get("uid"), d.get("hidden")), hidden)) # Ensure we have a prices dictionary if prices is None: prices = dict() # CREATE/MODIFY ANALYSES for service in services: service_uid = api.get_uid(service) keyword = service.getKeyword() # Create the Analysis if it doesn't exist if shasattr(instance, keyword): analysis = instance._getOb(keyword) else: analysis = create_analysis(instance, service) new_analyses.append(analysis) # set the hidden status analysis.setHidden(hidden.get(service_uid, False)) # Set the price of the Analysis analysis.setPrice(prices.get(service_uid, service.getPrice())) # DELETE ANALYSES # Service UIDs service_uids = map(api.get_uid, services) # Analyses IDs to delete delete_ids = [] # Assigned Attachments assigned_attachments = [] for analysis in analyses: service_uid = analysis.getServiceUID() # Skip if the Service is selected if service_uid in service_uids: continue # Skip non-open Analyses if analysis in non_open_analyses: continue # Remember assigned attachments # https://github.com/senaite/senaite.core/issues/1025 assigned_attachments.extend(analysis.getAttachment()) analysis.setAttachment([]) # If it is assigned to a worksheet, unassign it before deletion. worksheet = analysis.getWorksheet() if worksheet: worksheet.removeAnalysis(analysis) # Unset the partition reference # TODO Remove in >v1.3.0 - This is kept for backwards-compatibility part = analysis.getSamplePartition() if part: # From this partition, remove the reference to the current # analysis that is going to be removed to prevent inconsistent # states (Sample Partitions referencing to Analyses that do not # exist anymore an_uid = api.get_uid(analysis) part_ans = part.getAnalyses() or [] part_ans = filter(lambda an: api.get_uid(an) != an_uid, part_ans) part.setAnalyses(part_ans) # Unset the Analysis-to-Partition reference analysis.setSamplePartition(None) delete_ids.append(analysis.getId()) if delete_ids: # Note: subscriber might promote the AR instance.manage_delObjects(ids=delete_ids) # Remove orphaned attachments for attachment in assigned_attachments: # only delete attachments which are no further linked if not attachment.getLinkedAnalyses(): logger.info("Deleting attachment: {}".format( attachment.getId())) attachment_id = api.get_id(attachment) api.get_parent(attachment).manage_delObjects(attachment_id) return new_analyses
def set(self, instance, items, prices=None, specs=None, hidden=None, **kw): """Set/Assign Analyses to this AR :param items: List of Analysis objects/brains, AnalysisService objects/brains and/or Analysis Service uids :type items: list :param prices: Mapping of AnalysisService UID -> price :type prices: dict :param specs: List of AnalysisService UID -> Result Range mappings :type specs: list :param hidden: List of AnalysisService UID -> Hidden mappings :type hidden: list :returns: list of new assigned Analyses """ if items is None: items = [] # Bail out if the items is not a list type if not isinstance(items, (list, tuple)): raise TypeError( "Items parameter must be a tuple or list, got '{}'".format( type(items))) # Bail out if the AR is inactive if not api.is_active(instance): raise Unauthorized( "Inactive ARs can not be modified".format(AddAnalysis)) # Bail out if the user has not the right permission if not check_permission(AddAnalysis, instance): raise Unauthorized( "You do not have the '{}' permission".format(AddAnalysis)) # Convert the items to a valid list of AnalysisServices services = filter(None, map(self._to_service, items)) # Calculate dependencies dependencies = map(lambda s: s.getServiceDependencies(), services) dependencies = list(itertools.chain.from_iterable(dependencies)) # Merge dependencies and services services = set(services + dependencies) # Modify existing AR specs with new form values of selected analyses specs = self.resolve_specs(instance, specs) # Add analyses params = dict(prices=prices, hidden=hidden, specs=specs) map(lambda serv: self.add_analysis(instance, serv, **params), services) # Get all analyses (those from descendants included) analyses = instance.objectValues("Analysis") analyses.extend(self.get_analyses_from_descendants(instance)) # Bail out those not in services list or submitted uids = map(api.get_uid, services) to_remove = filter(lambda an: an.getServiceUID() not in uids, analyses) to_remove = filter(lambda an: not ISubmitted.providedBy(an), to_remove) # Remove analyses map(self.remove_analysis, to_remove)
def isActive(object_or_brain): """ Check if obj is inactive or cancelled. """ return api.is_active(object_or_brain)