def upgrade_indexes(): logger.info("Fixing broken calculations (re-assignment of dependents)...") to_index = [] for catalog, name, attribute, meta_type in INDEXES: c = api.get_tool(catalog) # get the index from the catalog index = c._catalog.indexes.get(name, None) # continue if the index exists and has the right meta type if index and index.meta_type == meta_type: logger.info("*** Index '{}' of type '{}' is already in catalog '{}'" .format(name, meta_type, catalog)) continue # remove the existing index with the wrong meta_type if index is not None: logger.info("*** Removing index '{}' from catalog '{}'" .format(name, catalog)) c._catalog.delIndex(name) # add the index with the right meta_type logger.info("*** Adding index '{}' of type '{}' to catalog '{}'" .format(name, meta_type, catalog)) c.addIndex(name, meta_type) to_index.append((catalog, name)) for catalog, name in to_index: c = api.get_tool(catalog) logger.info("*** Indexing new index '{}' of catalog {} ..." .format(name, catalog)) c.manage_reindexIndex(name) logger.info("*** Indexing new index '{}' of catalog {} [DONE]" .format(name, catalog))
def getClientList(self, contentFilter): searchTerm = self.request.get(self.form_id + '_filter', '').lower() mtool = api.get_tool('portal_membership') state = self.request.get('%s_review_state' % self.form_id, self.default_review_state) # This is used to decide how much of the objects need to be waked up # for further permission checks, which might get expensive on sites # with many clients list_pagesize = self.request.get("list_pagesize", self.pagesize) states = { 'default': ['active', ], 'active': ['active', ], 'inactive': ['inactive', ], 'all': ['active', 'inactive'] } # Use the catalog to speed things up and also limit the results catalog = api.get_tool("portal_catalog") catalog_query = { "portal_type": "Client", "inactive_state": states[state], "sort_on": "sortable_title", "sort_order": "ascending", } # Inject the searchTerm to narrow the results further if searchTerm: catalog_query["SearchableText"] = searchTerm logger.debug("getClientList::catalog_query=%s" % catalog_query) brains = catalog(catalog_query) clients = [] for brain in brains: # only wake up objects if they are shown on one page if len(clients) > list_pagesize: # otherwise append only the brain clients.append(brain) continue # wake up the object client = brain.getObject() # skip clients where the search term does not match if searchTerm and not client_match(client, searchTerm): continue # Only show clients to which we have Manage AR rights. # (ritamo only sees Happy Hills). if not mtool.checkPermission(ManageAnalysisRequests, client): continue clients.append(brain) return clients
def _get_catalog_for_uid(uid): at = api.get_tool('archetype_tool') uc = api.get_tool('uid_catalog') pc = api.get_tool('portal_catalog') # get uid_catalog brain for uid ub = uc(UID=uid)[0] # get portal_type of brain pt = ub.portal_type # get the registered catalogs for portal_type cats = at.getCatalogsByType(pt) # try avoid 'portal_catalog'; XXX multiple catalogs in setuphandlers.py? cats = [cat for cat in cats if cat != pc] if cats: return cats[0] return pc
def copy_to_new_allowed(self): mtool = api.get_tool('portal_membership') if mtool.checkPermission(ManageAnalysisRequests, self.context) \ or mtool.checkPermission(ModifyPortalContent, self.context) \ or mtool.checkPermission(AddAnalysisRequest, self.portal): return True return False
def search_by_prefix(portal_type, prefix): """Returns brains which share the same portal_type and ID prefix """ catalog = api.get_tool("uid_catalog") brains = catalog({"portal_type": portal_type}) # Filter brains with the same ID prefix return filter(lambda brain: api.get_id(brain).startswith(prefix), brains)
def reindexMovedObject(obj, event): """Reindex moved/renamed object """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reidexing moved object '{}' in catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) # old and new name old_name = event.oldName new_name = event.newName if old_name and new_name: old_parent = event.oldParent old_ppath = api.get_path(old_parent) old_path = "/".join([old_ppath, old_name]) # uncatalog the old path catalog.uncatalog_object(old_path) # reindex object catalog.reindexObject(obj)
def setup_dashboard_panels_visibility_registry(section_name): """ Initializes the values for panels visibility in registry_records. By default, only users with LabManager or Manager roles can see the panels. :param section_name: :return: An string like: "role1,yes,role2,no,rol3,no" """ registry_info = get_dashboard_registry_record() role_permissions_list = [] # Getting roles defined in the system roles = [] acl_users = get_tool("acl_users") roles_tree = acl_users.portal_role_manager.listRoleIds() for role in roles_tree: roles.append(role) # Set view permissions to each role as 'yes': # "role1,yes,role2,no,rol3,no" for role in roles: role_permissions_list.append(role) visible = 'no' if role in ['LabManager', 'Manager']: visible = 'yes' role_permissions_list.append(visible) role_permissions = ','.join(role_permissions_list) # Set permissions string into dict registry_info[get_unicode(section_name)] = get_unicode(role_permissions) # Set new values to registry record set_dashboard_registry_record(registry_info) return registry_info
def _get_services(self, full_objects=False): """Fetch and return analysis service objects """ bsc = api.get_tool('bika_setup_catalog') brains = bsc(portal_type='AnalysisService') if full_objects: return map(api.get_object, brains) return brains
def upgrade_attachments_to_blobs(portal): """get/set the attachment file fields to migrate existing fields to blob """ logger.info("Upgrading Attachments to Blobs") pc = api.get_tool("portal_catalog") attachments = map(api.get_object, pc({"portal_type": "Attachment"})) for attachment in attachments: attachment.setAttachmentFile(attachment.getAttachmentFile())
def _getLabContacts(self): bsc = api.get_tool('bika_setup_catalog') # fallback - all Lab Contacts pairs = [['', '']] for contact in bsc(portal_type='LabContact', inactive_state='active', sort_on='sortable_title'): pairs.append((contact.UID, contact.Title)) return DisplayList(pairs)
def del_column(catalog_id, name): """Removes the given metadata column from the catalog """ catalog = api.get_tool(catalog_id) if name not in catalog.schema(): logger.info("Column '{}' not in catalog '{}'".format(name, catalog_id)) return False catalog.delColumn(name) logger.info("Column '{}' removed from '{}'".format(name, catalog_id)) return True
def indexObject(obj, event): """Additionally index the object into the bika catalogs """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Indexing object '{}' into catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) catalog.indexObject(obj)
def reindexObject(obj, event): """Reindex an object in all registered catalogs """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Unindexing object '{}' from catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) catalog.reindexObject(obj)
def del_index(catalog_id, name): """Removes the given index from the catalog """ catalog = api.get_tool(catalog_id) if name not in catalog.indexes(): logger.info("Index '{}' not in catalog '{}'".format(name, catalog_id)) return False catalog.delIndex(name) logger.info("Index '{}' removed from '{}'".format(name, catalog_id)) return True
def del_index(portal, catalog_id, index_name): logger.info("Removing '{}' index from '{}' ..." .format(index_name, catalog_id)) catalog = api.get_tool(catalog_id) if index_name not in catalog.indexes(): logger.info("Index '{}' not in catalog '{}' [SKIP]" .format(index_name, catalog_id)) return catalog.delIndex(index_name) logger.info("Removing old index '{}' ...".format(index_name))
def __call__(self): self.context_actions = {} mtool = api.get_tool('portal_membership') if (mtool.checkPermission(AddClient, self.context)): self.context_actions[_('Add')] = \ {'url': 'createObject?type_name=Client', 'icon': '++resource++bika.lims.images/add.png'} if mtool.checkPermission(ManageClients, self.context): self.show_select_column = True return super(ClientFolderContentsView, self).__call__()
def getAnalysisCategories(self): """Return all available analysis categories """ bsc = api.get_tool("bika_setup_catalog") cats = [] for st in bsc(portal_type="AnalysisCategory", is_active=True, sort_on="sortable_title"): cats.append((st.UID, st.Title)) return DisplayList(cats)
def apply_doctor_permissions_for_clients(portal, ut): workflow_tool = api.get_tool("portal_workflow") workflow = workflow_tool.getWorkflowById('senaite_health_doctor_workflow') catalog = api.get_tool('portal_catalog') brains = catalog(portal_type='Doctor') counter = 0 total = len(brains) logger.info("Changing permissions for doctor objects: {0}".format(total)) for brain in brains: obj = api.get_object(brain) workflow.updateRoleMappingsFor(obj) obj.reindexObject() counter += 1 if counter % 100 == 0: logger.info("Changing permissions for doctor objects: " + "{0}/{1}".format(counter, total)) logger.info("Changed permissions for doctor objects: " + "{0}/{1}".format(counter, total))
def get_workflows(): """Returns a mapping of id->workflow """ wftool = api.get_tool("portal_workflow") wfs = {} for wfid in wftool.objectIds(): wf = wftool.getWorkflowById(wfid) if hasattr(aq_base(wf), "updateRoleMappingsFor"): wfs[wfid] = wf return wfs
def update_workflow_mappings_for(portal, wf_id, brains): wf_tool = api.get_tool("portal_workflow") workflow = wf_tool.getWorkflowById(wf_id) total = len(brains) for num, brain in enumerate(brains): if num and num % 100 == 0: logger.info("Updating role mappings: {0}/{1}".format(num, total)) obj = api.get_object(brain) workflow.updateRoleMappingsFor(obj) obj.reindexObject(idxs=["allowedRolesAndUsers"])
def _get_worksheet_templates_brains(self): """ Returns available Worksheet Templates as brains. Only active Worksheet Templates are considered :return: list of brains """ catalog = api.get_tool('bika_setup_catalog') brains = catalog(portal_type='WorksheetTemplate', inactive_state='active') return brains
def load_analysis_categories(self): # Getting analysis categories bsc = api.get_tool('bika_setup_catalog') analysis_categories = bsc(portal_type="AnalysisCategory", sort_on="sortable_title") # Sorting analysis categories self.analysis_categories_order = dict([ (b.Title, "{:04}".format(a)) for a, b in enumerate(analysis_categories) ])
def add_index(portal, catalog_id, index_name, index_attribute, index_metatype): logger.info("Adding '{}' index to '{}' ...".format(index_name, catalog_id)) catalog = api.get_tool(catalog_id) if index_name in catalog.indexes(): logger.info("Index '{}' already in catalog '{}' [SKIP]".format( index_name, catalog_id)) return catalog.addIndex(index_name, index_metatype) logger.info("Indexing new index '{}' ...".format(index_name)) catalog.manage_reindexIndex(index_name)
def update_patients_role_mappings(portal): """Updates the role mappings for patients folder cause we've changed the workflow bound to this type and we've added permission to Delete Objects """ logger.info("Updating role mappings of patients folder ...") wf_tool = api.get_tool("portal_workflow") workflow = wf_tool.getWorkflowById("senaite_health_patients_workflow") workflow.updateRoleMappingsFor(portal.patients) portal.patients.reindexObject() logger.info("Updating role mappings of patients folder [DONE]")
def _get_instruments_brains(self): """ Returns available Instruments as brains. Only active Instruments are considered :return: list of brains """ catalog = api.get_tool('bika_setup_catalog') brains = catalog(portal_type='Instrument', inactive_state='active') return brains
def setup_auditlog_catalog(portal): """Setup auditlog catalog """ logger.info("*** Setup Audit Log Catalog ***") catalog_id = auditlog_catalog.CATALOG_AUDITLOG catalog = api.get_tool(catalog_id) for name, meta_type in auditlog_catalog._indexes.iteritems(): indexes = catalog.indexes() if name in indexes: logger.info("*** Index '%s' already in Catalog [SKIP]" % name) continue logger.info("*** Adding Index '%s' for field '%s' to catalog ..." % (meta_type, name)) catalog.addIndex(name, meta_type) # Setup TextIndexNG3 for listings # XXX is there another way to do this? if meta_type == "TextIndexNG3": index = catalog._catalog.getIndex(name) index.index.default_encoding = "utf-8" index.index.query_parser = "txng.parsers.en" index.index.autoexpand = "always" index.index.autoexpand_limit = 3 logger.info("*** Added Index '%s' for field '%s' to catalog [DONE]" % (meta_type, name)) # Attach the catalog to all known portal types at = api.get_tool("archetype_tool") pt = api.get_tool("portal_types") for portal_type in pt.listContentTypes(): catalogs = at.getCatalogsByType(portal_type) if catalog not in catalogs: new_catalogs = map(lambda c: c.getId(), catalogs) + [catalog_id] at.setCatalogsByType(portal_type, new_catalogs) logger.info("*** Adding catalog '{}' for '{}'".format( catalog_id, portal_type))
def update_rolemappings_for(brains, workflow_id): logger.info("Updating role mappings for '{}'".format(workflow_id)) wf_tool = api.get_tool("portal_workflow") workflow = wf_tool.getWorkflowById(workflow_id) total = len(brains) num = 0 for num, brain in enumerate(brains, start=1): workflow.updateRoleMappingsFor(api.get_object(brain)) if num % 100 == 0: logger.info("Updating role mappings: {0}/{1}".format(num, total)) logger.info("{} objects updated".format(num))
def fix_permission_on_analysisrequests(): catalog = api.get_tool(CATALOG_ANALYSIS_REQUEST_LISTING) valid_states = ['sample_due', 'sample_received', 'sampled', 'to_be_sampled', 'to_be_preserved'] brains = catalog(cancellation_state='active', review_state=valid_states) for brain in brains: obj = api.get_object(brain) mp = obj.manage_permission mp(permissions.DeleteObjects, ['Manager', 'LabManager', 'Owner'], 0) logger.info("Fixed '{}' permission on '{}'".format( permissions.DeleteObjects, obj.Title()))
def get_groups(user=None): """Return the groups of the user :param user: A user id, memberdata object or None for the current user :returns: List of groups """ portal_groups = get_tool("portal_groups") user = get_user(user) if user is None: return [] return portal_groups.getGroupsForPrincipal(user)
def check_permission(permission, obj): """ Returns if the current user has rights for the permission passed in against the obj passed in :param permission: name of the permission :param obj: the object to check the permission against for the current user :return: 1 if the user has rights for this permission for the passed in obj """ mtool = api.get_tool('portal_membership') object = api.get_object(obj) return mtool.checkPermission(permission, object)
def get_brain(self, uid, catalog): brain = self.uids_map.get(uid, None) if brain: return brain logger.warning("UID not found in brains map: {}".format(uid)) cat = api.get_tool(catalog) brain = cat(UID=uid) if not brain or len(brain) == 0: return None return brain[0]
def fix_ar_sample_workflow(brain_or_object): """Re-set the state of an AR, Sample and SamplePartition to match the least-early state of all contained valid/current analyses. Ignores retracted/rejected/cancelled analyses. """ def log_change_state(ar_id, obj_id, src, dst): msg = "While fixing {ar_id}: " \ "state changed for {obj_id}: " \ "{src} -> {dst}".format(**locals()) ar = get_object(brain_or_object) if not IAnalysisRequest.providedBy(ar): return wf = api.get_tool('portal_workflow') arwf = wf['bika_ar_workflow'] anwf = wf['bika_analysis_workflow'] swf = wf['bika_sample_workflow'] ignored = ['retracted', 'rejected'] tmp = filter(lambda x: x[0] not in ignored, arwf.states.items()) arstates = OrderedDict(tmp) tmp = filter(lambda x: x[0] not in ignored, swf.states.items()) samplestates = OrderedDict(tmp) tmp = filter(lambda x: x[0] in arstates, anwf.states.items()) anstates = OrderedDict(tmp) # find least-early analysis state # !!! Assumes states in definitions are roughly ordered earliest to latest ar_dest_state = arstates.items()[0][0] for anstate in anstates: if ar.getAnalyses(review_state=anstate): ar_dest_state = anstate # Force state of AR ar_state = get_review_status(ar) if ar_state != ar_dest_state: changeWorkflowState(ar, arwf.id, ar_dest_state) log_change_state(ar.id, ar.id, ar_state, ar_dest_state) # Force state of Sample sample = ar.getSample() sample_state = get_review_status(sample) if ar_dest_state in samplestates: changeWorkflowState(sample, swf.id, ar_dest_state) log_change_state(ar.id, sample.id, sample_state, ar_dest_state) # Force states of Partitions for part in sample.objectValues(): part_state = get_review_status(part) if get_review_status(part) != ar_dest_state: changeWorkflowState(sample, swf.id, ar_dest_state) log_change_state(ar.id, part.id, part_state, ar_dest_state)
def fix_service_profile_template_inconsistences(): catalog = api.get_tool('bika_setup_catalog') brains = catalog(portal_type='AnalysisService') for brain in brains: obj = api.get_object(brain) if api.is_active(obj): continue # If this service is inactive, be sure is not used neither in Profiles # nor in AR Templates obj.after_deactivate_transition_event()
def workflow_script_deactivate(self): # A instance cannot be deactivated if it contains services query = dict(portal_type="AnalysisService", category_uid=self.UID()) brains = api.search(query, SETUP_CATALOG) if brains: pu = api.get_tool("plone_utils") message = _("Category cannot be deactivated because it contains " "Analysis Services") pu.addPortalMessage(message, 'error') transaction.abort() raise WorkflowException
def getSamples(self, **kwargs): """Return samples this Doctor is associated to """ catalog = api.get_tool(CATALOG_ANALYSIS_REQUEST_LISTING, context=self) query = dict([(k, v) for k, v in kwargs.items() if k in catalog.indexes()]) query["getDoctorUID"] = api.get_uid(self) brains = api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING) if not kwargs.get("full_objects", False): return brains return map(api.get_object, brains)
def getSamples(self, **kwargs): """Return samples taken from this Patient """ catalog = api.get_tool(CATALOG_ANALYSIS_REQUEST_LISTING, context=self) query = dict([(k, v) for k, v in kwargs.items() if k in catalog.indexes()]) query["getPatientUID"] = api.get_uid(self) brains = api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING) if not kwargs.get("full_objects", False): return brains return map(api.get_object, brains)
def fix_service_profile_template_inconsistences(): catalog = api.get_tool('bika_setup_catalog') brains = catalog(portal_type='AnalysisService') for brain in brains: obj = api.get_object(brain) if isActive(obj): continue # If this service is inactive, be sure is not used neither in Profiles # nor in AR Templates obj.after_deactivate_transition_event()
def fix_items_stuck_in_sample_prep_states(portal, ut): """Removing sample preparation workflows from the system may have left some samples ARs and Analyses in the state 'sample_prep'. These should be transitioned to 'sample_due' so that they can be receieved normally. :param portal: portal object :return: None """ wftool = api.get_tool('portal_workflow') catalog_ids = [ 'bika_catalog', 'bika_analysis_catalog', 'bika_catalog_analysisrequest_listing' ] for catalog_id in catalog_ids: catalog = api.get_tool(catalog_id) brains = catalog(review_state='sample_prep') for brain in brains: instance = brain.getObject() wfid = get_workflows_for(instance)[0] wf = wftool[wfid] # get event properties for last event that is not sample_prep rh = wftool.getInfoFor(instance, 'review_history') event = [ x for x in rh if 'prep' not in x['review_state'] and not x['comments'] ][-1] state_id, action_id = event['review_state'], event['action'] # set state changeWorkflowState(instance, wfid, state_id) # fire transition handler for the action that originally was fired. old_sdef = new_sdef = wf.states[state_id] if action_id is not None: tdef = wf.transitions[action_id] notify( AfterTransitionEvent(instance, wf, old_sdef, new_sdef, tdef, event, {})) # check AR state matches the analyses if IAnalysisRequest.providedBy(instance): fix_ar_sample_workflow(instance) logger.info("Removed sample_prep state from {} items in {}.".format( len(brains), catalog_id))
def reindexObjectSecurity(obj, event): """Reindex only security information on catalogs """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reindex security for object '{}' from catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) catalog.reindexObject(obj, idxs=obj._cmf_security_indexes, update_metadata=0)
def get_catalog_date_indexes(self): """Returns available catalog date indexes for the selected query type """ catalog = api.get_tool(self.get_query_catalog()) indexes = catalog.getIndexObjects() date_indexes = [] for index in indexes: if index.meta_type not in DATE_INDEX_TYPES: continue name = index.getId() date_indexes.append(name) return sorted(date_indexes)
def get_group(group): """Return the group :param group: The group name/id :returns: Group """ portal_groups = get_tool("portal_groups") if isinstance(group, basestring): group = portal_groups.getGroupById(group) elif isinstance(group, GroupData): group = group return group
def getBatches(self, **kwargs): """ Returns the Batches this Doctor is assigned to """ catalog = api.get_tool("bika_catalog") query = dict([(k, v) for k, v in kwargs.items() if k in catalog.indexes()]) query["getDoctorUID"] = api.get_uid(self) brains = api.search(query, "bika_catalog") if not kwargs.get("full_objects", False): return brains return map(api.get_object, brains)
def init_auditlog(portal): """Initialize the contents for the audit log """ # reindex the auditlog folder to display the icon right in the setup portal.bika_setup.auditlog.reindexObject() # Initialize contents for audit logging start = time.time() uid_catalog = api.get_tool("uid_catalog") brains = uid_catalog() total = len(brains) logger.info("Initializing {} objects for the audit trail...".format(total)) for num, brain in enumerate(brains): # Progress notification if num and num % 1000 == 0: transaction.commit() logger.info("{}/{} ojects initialized for audit logging".format( num, total)) # End progress notification if num + 1 == total: end = time.time() duration = float(end - start) logger.info( "{} ojects initialized for audit logging in {:.2f}s".format( total, duration)) if api.get_portal_type(brain) in SKIP_TYPES_FOR_AUDIT_LOG: continue obj = api.get_object(brain) if not supports_snapshots(obj): continue if has_snapshots(obj): continue # Take one snapshot per review history item rh = api.get_review_history(obj, rev=False) for item in rh: actor = item.get("actor") user = get_user(actor) if user: # remember the roles of the actor item["roles"] = get_roles(user) # The review history contains the variable "time" which we will set # as the "modification" time timestamp = item.pop("time", DateTime()) item["time"] = timestamp.ISO() item["modified"] = timestamp.ISO() item["remote_address"] = None take_snapshot(obj, **item)
def getContactUIDForUser(self): """Get the UID of the user associated with the authenticated user """ membership_tool = api.get_tool("portal_membership") member = membership_tool.getAuthenticatedMember() username = member.getUserName() r = self.portal_catalog( portal_type="Contact", getUsername=username ) if len(r) == 1: return r[0].UID
def set_guards_to_inactive_workflow(): wtool = api.get_tool('portal_workflow') workflow = wtool.getWorkflowById('bika_inactive_workflow') deactivate = workflow.transitions['deactivate'] deactivate_guard = deactivate.getGuard() deactivate_guard.expr = Expression('python:here.guard_deactivate_transition()') deactivate.guard = deactivate_guard activate = workflow.transitions['activate'] activate_guard = activate.getGuard() activate_guard.expr = Expression('python:here.guard_activate_transition()') activate.guard = activate_guard
def is_uid(context, value): """Checks that the string passed is a valid UID of an existing object :param context: Context is only used for acquiring uid_catalog tool. :type context: BaseContent :param value: A UID. :type value: string :return: True if the value is a UID and exists as an entry in uid_catalog. :rtype: bool """ uc = api.get_tool('uid_catalog', context=context) brains = uc(UID=value) return brains and True or False
def search_catalogs(portal_type): """Returns brains which share the same portal_type """ catalog_names = ['portal_catalog', 'bika_setup_catalog', 'bika_catalog'] UIDs = [] all_brains = [] for catalog_name in catalog_names: catalog = api.get_tool(catalog_name) brains = api.search({"portal_type": portal_type}, catalog=catalog_name) for brain in brains: if brain.UID not in UIDs: all_brains.append(brain) UIDs.append(brain.UID) return all_brains
def __call__(self): protect.CheckAuthenticator(self.request) searchTerm = self.request.get('searchTerm', '').lower() page = self.request.get('page', 1) nr_rows = self.request.get('rows', 20) sort_order = self.request.get('sord') or 'ascending' sort_index = self.request.get('sidx') or 'sortable_title' if sort_order == "desc": sort_order = "descending" # Use the catalog to speed things up and also limit the results catalog = api.get_tool("portal_catalog") catalog_query = { "portal_type": "Client", "inactive_state": "active", "sort_on": sort_index, "sort_order": sort_order, "sort_limit": 500 } # Inject the searchTerm to narrow the results further if searchTerm: catalog_query["SearchableText"] = searchTerm logger.debug("ajaxGetClients::catalog_query=%s" % catalog_query) brains = catalog(catalog_query) rows = [] for brain in brains: client = brain.getObject() # skip clients where the search term does not match if searchTerm and not client_match(client, searchTerm): continue rows.append( { "ClientID": client.getClientID(), "Title": client.Title(), "ClientUID": client.UID(), } ) pages = len(rows) / int(nr_rows) pages += divmod(len(rows), int(nr_rows))[1] and 1 or 0 ret = {'page': page, 'total': pages, 'records': len(rows), 'rows': rows[(int(page) - 1) * int(nr_rows): int(page) * int(nr_rows)]} return json.dumps(ret)
def apply_doctor_permissions_for_clients(portal, ut): # Add doctor action for client portal_type add_doctor_action_for_client(portal) # Allow client contacts to list/add/edit Doctors workflow_tool = api.get_tool("portal_workflow") workflow = workflow_tool.getWorkflowById('bika_doctor_workflow') catalog = api.get_tool('portal_catalog') # Adding new index and columns in portal_catalog for doctors ut.addIndexAndColumn('portal_catalog', 'allowedRolesAndUsers', 'FieldIndex') ut.addIndex('portal_catalog', 'getPrimaryReferrerUID', 'FieldIndex') brains = catalog(portal_type='Doctor') counter = 0 total = len(brains) logger.info( "Changing permissions for doctor objects: {0}".format(total)) for brain in brains: allowed = brain.allowedRolesAndUsers or [] if 'Client' not in allowed: obj = api.get_object(brain) workflow.updateRoleMappingsFor(obj) obj.reindexObject() counter += 1 if counter % 100 == 0: logger.info( "Changing permissions for doctor objects: " + "{0}/{1}".format(counter, total)) logger.info( "Changed permissions for doctor objects: " + "{0}/{1}".format(counter, total)) # Allowing client to view clients folder add_permission_for_role(portal.doctors, permissions.View, 'Client') add_permission_for_role(portal.doctors, AddDoctor, 'Client')
def _change_inactive_state(service, new_state): msg = "Upgrade v1.2.1: Updating status of {} to '{}'".\ format(service.getKeyword(), new_state) logger.info(msg) wtool = api.get_tool('portal_workflow') workflow = wtool.getWorkflowById('bika_inactive_workflow') wf_state = { 'action': None, 'actor': None, 'comments': msg, 'inactive_state': new_state, 'time': DateTime(), } wtool.setStatusOf('bika_inactive_workflow', service, wf_state) workflow.updateRoleMappingsFor(service) service.reindexObject(idxs=['allowedRolesAndUsers', 'inactive_state'])
def get_service_by_keyword(self, keyword, default=None): """Get a service by keyword """ logger.info("Get service by keyword={}".format(keyword)) bsc = api.get_tool("bika_setup_catalog") results = bsc(portal_type='AnalysisService', getKeyword=keyword) if not results: logger.exception("No Analysis Service found for Keyword '{}'. " "Related: LIMS-1614".format(keyword)) return default elif len(results) > 1: logger.exception("More than one Analysis Service found for Keyword '{}'. " .format(keyword)) return default else: return api.get_object(results[0])
def update_reflexrules_workflow_state(portal): """ Updates Reflex Rules' inactive_state, otherwise they don't have it by default. :param portal: Portal object :return: None """ wf_tool = getToolByName(portal, 'portal_workflow') logger.info("Updating Reflex Rules' 'inactive_state's...") wf = wf_tool.getWorkflowById("bika_inactive_workflow") uc = api.get_tool('portal_catalog') r_rules = uc(portal_type='ReflexRule') for rr in r_rules: obj = rr.getObject() wf.updateRoleMappingsFor(obj) obj.reindexObject() logger.info("Reflex Rules' 'inactive_state's were updated.")
def _get_object(context, value): """Resolve a UID to an object. :param context: context is the object containing the field's schema. :type context: BaseContent :param value: A UID. :type value: string :return: Returns a Content object. :rtype: BaseContent """ if api.is_at_content(value) or api.is_dexterity_content(value): return value elif value and is_uid(context, value): uc = api.get_tool('uid_catalog', context=context) brains = uc(UID=value) assert len(brains) == 1 return brains[0].getObject()
def __call__(self): # Allow "Modify portal content" to see edit widgets mtool = api.get_tool('portal_membership') self.allow_edit = mtool.checkPermission("Modify portal content", self.context) # Allow certain users to duplicate ARs (Copy to new). if self.copy_to_new_allowed: review_states = [] for review_state in self.review_states: custom_transitions = review_state.get('custom_transitions', []) custom_transitions.extend( [{'id': 'copy_to_new', 'title': _('Copy to new'), 'url': 'workflow_action?action=copy_to_new'}, ]) review_state['custom_transitions'] = custom_transitions review_states.append(review_state) self.review_states = review_states return super(BatchBookView, self).__call__()