def get_archive_path(self): """Get the unified archive path """ site = self.environ.getSite() site_path = api.get_path(site) obj_path = api.get_path(self.context) return obj_path.replace(site_path, SITE_ID, 1)
def resolve_query_for_shareable(portal_type, context=None): """Resolves a query filter for the portal_type passed in and the context for which the query has to be filtered by """ # Resolve the client from the object, if possible client = context and get_client_from_chain(context) or None if client and is_internal_client(client): # Client is internal and the portal type is "shareable", the query # must display all items of this portal_type that are located # inside any of the clients from "internal_clients" folder folder = api.get_portal().internal_clients return { "path": { "query": api.get_path(folder), "depth": 2 }, "portal_type": portal_type, } elif client: # Client is external. Only the items that belong to this client return { "path": { "query": api.get_path(client), "depth": 1 }, "portal_type": portal_type, } # We don't know neither the client nor the type of client return {"portal_type": portal_type}
def update_items(portal_type=None, uid=None, endpoint=None, **kw): """ update items 1. If the uid is given, the user wants to update the object with the data given in request body 2. If no uid is given, the user wants to update a bunch of objects. -> each record contains either an UID, path or parent_path + id """ # disable CSRF req.disable_csrf_protection() # the data to update records = req.get_request_data() # we have an uid -> try to get an object for it obj = get_object_by_uid(uid) if obj: record = records[0] # ignore other records if we got an uid # Can this object be updated? if not is_update_allowed(obj): fail(401, "Update of {} is not allowed".format(api.get_path(obj))) obj = update_object_with_data(obj, record) return make_items_for([obj], endpoint=endpoint) # no uid -> go through the record items results = [] for record in records: obj = get_object_by_record(record) # no object found for this record if obj is None: continue # Can this object be updated? if not is_update_allowed(obj): fail(401, "Update of {} is not allowed".format(api.get_path(obj))) # update the object with the given record data obj = update_object_with_data(obj, record) results.append(obj) if not results: fail(400, "No Objects could be updated") return make_items_for(results, endpoint=endpoint)
def __init__(self, context, request, **kwargs): AutoImportLogsView.__init__(self, context, request, **kwargs) del self.columns["Instrument"] self.review_states[0]["columns"].remove("Instrument") self.contentFilter = { "portal_type": "AutoImportLog", "path": { "query": api.get_path(context), "depth": 1 # searching just inside the specified folder }, "sort_on": "created", "sort_order": "descending", } self.title = self.context.translate( _("Auto Import Logs of %s" % self.context.Title())) self.icon = "{}/{}".format( self.portal_url, "++resource++bika.lims.images/instrumentcertification_big.png") self.context_actions = {} self.allow_edit = False self.show_select_column = False self.show_workflow_action_buttons = True self.pagesize = 30
def __call__(self): form = self.request.form # Form submit toggle form_submitted = form.get("submitted", False) # Buttons form_preview = form.get("button_preview", False) form_create = form.get("button_create", False) form_cancel = form.get("button_cancel", False) objs = self.get_objects() # No ARs selected if not objs: return self.redirect(message=_("No items selected"), level="warning") # Handle preview if form_submitted and form_preview: logger.info("*** PREVIEW ***") # Handle create if form_submitted and form_create: logger.info("*** CREATE PARTITIONS ***") partitions = [] # create the partitions for partition in form.get("partitions", []): primary_uid = partition.get("primary_uid") sampletype_uid = partition.get("sampletype_uid") analyses_uids = partition.get("analyses") if not analyses_uids or not primary_uid: # Cannot create a partition w/o analyses! continue partition = self.create_partition(primary_uid, sampletype_uid, analyses_uids) partitions.append(partition) logger.info("Successfully created partition: {}".format( api.get_path(partition))) # Force the reception of the partition doActionFor(partition, "receive") if not partitions: # If no partitions were created, show a warning message return self.redirect(message=_("No partitions were created")) message = _("Created {} partitions: {}".format( len(partitions), ", ".join(map(api.get_title, partitions)))) return self.redirect(message=message) # Handle cancel if form_submitted and form_cancel: logger.info("*** CANCEL ***") return self.redirect(message=_("Partitioning canceled")) return self.template()
def _process_value(value): """Convert the value into a human readable diff string """ if not value: value = _("Not set") # handle strings elif isinstance(value, basestring): # XXX: bad data, e.g. in AS Method field if value == "None": value = _("Not set") # 0 is detected as the portal UID elif value == "0": value = "0" # handle physical paths elif value.startswith("/"): # remove the portal path to reduce noise in virtual hostings portal_path = api.get_path(api.get_portal()) value = value.replace(portal_path, "", 1) elif api.is_uid(value): value = _get_title_or_id_from_uid(value) # handle dictionaries elif isinstance(value, (dict)): value = json.dumps(sorted(value.items()), indent=1) # handle lists and tuples elif isinstance(value, (list, tuple)): value = sorted(map(_process_value, value)) value = "; ".join(value) # handle unicodes if isinstance(value, unicode): value = api.safe_unicode(value).encode("utf8") return str(value)
def __init__(self, context, request): super(AnalysisSpecsView, self).__init__(context, request) self.catalog = "bika_setup_catalog" self.contentFilter = { "portal_type": "AnalysisSpec", "sort_on": "sortable_title", "sort_order": "ascending", "path": { "query": api.get_path(context), "level": 0} } self.context_actions = { _("Add"): { "url": "createObject?type_name=AnalysisSpec", "permission": AddAnalysisSpec, "icon": "++resource++bika.lims.images/add.png"} } self.title = self.context.translate(_("Analysis Specifications")) self.icon = "{}/{}".format( self.portal_url, "/++resource++bika.lims.images/analysisspec_big.png" ) self.show_select_row = False self.show_select_column = True self.pagesize = 25 self.columns = collections.OrderedDict(( ("Title", { "title": _("Analysis Specification"), "index": "sortable_title"}), ("SampleType", { "title": _("Sample Type"), "index": "getSampleTypeTitle"}), )) self.review_states = [ { "id": "default", "title": _("Active"), "contentFilter": {"is_active": True}, "transitions": [{"id": "deactivate"}, ], "columns": self.columns.keys(), }, { "id": "inactive", "title": _("Inactive"), "contentFilter": {'is_active': False}, "transitions": [{"id": "activate"}, ], "columns": self.columns.keys(), }, { "id": "all", "title": _("All"), "contentFilter": {}, "columns": self.columns.keys(), }, ]
def reindexMovedObject(obj, event): """Reindex moved/renamed object """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reidexing moved object '{}' in catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) # old and new name old_name = event.oldName new_name = event.newName if old_name and new_name: old_parent = event.oldParent old_ppath = api.get_path(old_parent) old_path = "/".join([old_ppath, old_name]) # uncatalog the old path catalog.uncatalog_object(old_path) # reindex object catalog.reindexObject(obj)
def search_analysis_from(container, keywords): """Searches an analysis with the specified keyword within the container """ # Base query query = dict(getKeyword=keywords, portal_type="Analysis", review_state=["unassigned", "assigned"]) # Build the query container = api.get_object(container) container_uid = api.get_uid(container) if IAnalysisRequest.providedBy(container): query.update({"getAncestorsUIDs": container_uid}) elif IWorksheet.providedBy(container): query.update({"getWorksheetUID": container_uid}) else: path = api.get_path(container) raise ValueError("Could not get analyses from {}".format(path)) # Search for a unique result analyses = api.search(query, CATALOG_ANALYSIS_LISTING) if len(analyses) == 1: return api.get_object(analyses[0]) return None
def renameAfterCreation(obj): """Rename the content after it was created/added """ # Check if the _bika_id was already set bika_id = getattr(obj, "_bika_id", None) if bika_id is not None: return bika_id # Can't rename without a subtransaction commit when using portal_factory transaction.savepoint(optimistic=True) # The id returned should be normalized already new_id = None # Checking if an adapter exists for this content type. If yes, we will # get new_id from adapter. for name, adapter in getAdapters((obj, ), IIdServer): if new_id: logger.warn(('More than one ID Generator Adapter found for' 'content type -> %s') % obj.portal_type) new_id = adapter.generate_id(obj.portal_type) if not new_id: new_id = generateUniqueId(obj) # TODO: This is a naive check just in current folder # -> this should check globally for duplicate objects with same prefix # N.B. a check like `search_by_prefix` each time would probably slow things # down too much! # -> A solution could be to store all IDs with a certain prefix in a storage parent = api.get_parent(obj) if new_id in parent.objectIds(): # XXX We could do the check in a `while` loop and generate a new one. raise KeyError("The ID {} is already taken in the path {}".format( new_id, api.get_path(parent))) # rename the object to the new id parent.manage_renameObject(obj.id, new_id) return new_id
def reindexMovedObject(obj, event): """Reindex moved/renamed object """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reidexing moved object '{}' in catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) # old and new name old_name = event.oldName new_name = event.newName if old_name and new_name: old_parent = event.oldParent old_ppath = api.get_path(old_parent) old_path = "/".join([old_ppath, old_name]) # uncatalog the old path catalog.uncatalog_object(old_path) # reindex object catalog.reindexObject(obj)
def renameAfterCreation(obj): """Rename the content after it was created/added """ # Check if the _bika_id was aready set bika_id = getattr(obj, "_bika_id", None) if bika_id is not None: return bika_id # Can't rename without a subtransaction commit when using portal_factory transaction.savepoint(optimistic=True) # The id returned should be normalized already new_id = None # Checking if an adapter exists for this content type. If yes, we will # get new_id from adapter. for name, adapter in getAdapters((obj, ), IIdServer): if new_id: logger.warn(('More than one ID Generator Adapter found for' 'content type -> %s') % obj.portal_type) new_id = adapter.generate_id(obj.portal_type) if not new_id: new_id = generateUniqueId(obj) # TODO: This is a naive check just in current folder # -> this should check globally for duplicate objects with same prefix # N.B. a check like `search_by_prefix` each time would probably slow things # down too much! # -> A solution could be to store all IDs with a certain prefix in a storage. parent = api.get_parent(obj) if new_id in parent.objectIds(): # XXX We could do the check in a `while` loop and generate a new one. raise KeyError("The ID {} is already taken in the path {}".format( new_id, api.get_path(parent))) # rename the object to the new id parent.manage_renameObject(obj.id, new_id) return new_id
def get_object_info(self): object_info = self.get_base_info() # Default values for other fields when the Batch is selected doctor = self.context.getField("Doctor").get(self.context) client = self.context.getClient() field_values = { "Doctor": self.to_field_value(doctor), "Client": self.to_field_value(client), } patient = self.context.getField("Patient").get(self.context) if patient: field_values.update({ "Patient": self.to_field_value(patient), "ClientPatientID": { "uid": api.get_uid(patient), "title": patient.getClientPatientID() or api.get_id(patient), } }) # Allow to choose Patients from same Client only and apply # generic filters when a client is selected too filter_queries = {} if client: query = {"query": api.get_path(client), "depth": 1} filter_queries = { "Patient": {"path": query}, "ClientPatientID": {"path": query}, } object_info["field_values"] = field_values object_info["filter_queries"] = filter_queries return object_info
def __init__(self, context, request): super(PatientMultifileView, self).__init__(context, request) self.catalog = "bika_setup_catalog" self.contentFilter = { "portal_type": "Multifile", "path": { "query": api.get_path(context), "depth": 1 # searching just inside the specified folder }, "sort_on": "created", "sort_order": "descending", } self.form_id = "patientfiles" self.title = self.context.translate(_("Patient Files")) self.icon = "{}/{}".format( self.portal_url, "++resource++bika.lims.images/instrumentcertification_big.png" ) self.context_actions = { _("Add"): { "url": "createObject?type_name=Multifile", "icon": "++resource++bika.lims.images/add.png" } } self.allow_edit = False self.show_select_column = False self.show_workflow_action_buttons = True self.pagesize = 30 self.columns = { "DocumentID": {"title": _("Document ID"), "index": "sortable_title"}, "DocumentVersion": {"title": _("Document Version"), "index": "sortable_title"}, "DocumentLocation": {"title": _("Document Location"), "index": "sortable_title"}, "DocumentType": {"title": _("Document Type"), "index": "sortable_title"}, "FileDownload": {"title": _("File")} } self.review_states = [ { "id": "default", "title": _("All"), "contentFilter": {}, "columns": [ "DocumentID", "DocumentVersion", "DocumentLocation", "DocumentType", "FileDownload" ] }, ]
def __init__(self, context, request): BikaListingView.__init__(self, context, request) self.context = context self.request = request self.catalog = "portal_catalog" self.contentFilter = { 'portal_type': 'ARReport', 'path': { 'query': api.get_path(self.context), 'depth': 1, }, 'sort_order': 'reverse' } self.context_actions = {} self.show_select_column = True self.show_workflow_action_buttons = False self.form_id = 'published_results' self.icon = "{}//++resource++bika.lims.images/report_big.png".format( self.portal_url) self.title = self.context.translate(_("Published results")) self.columns = { 'COANumber': { 'title': _('COA') }, 'Date': { 'title': _('Published Date') }, 'PublishedBy': { 'title': _('Published By') }, 'DownloadPDF': { 'title': _('Download PDF') }, 'DownloadCSV': { 'title': _('Download CSV') }, 'Recipients': { 'title': _('Recipients') }, } self.review_states = [ { 'id': 'default', 'title': 'All', 'contentFilter': {}, 'columns': [ 'COANumber', 'Date', 'PublishedBy', 'Recipients', 'DownloadPDF', 'DownloadCSV', ] }, ]
def __init__(self, context, request): super(PatientMultifileView, self).__init__(context, request) self.catalog = "bika_setup_catalog" self.contentFilter = { "portal_type": "Multifile", "path": { "query": api.get_path(context), "depth": 1 # searching just inside the specified folder }, "sort_on": "created", "sort_order": "descending", } self.form_id = "patientfiles" self.title = self.context.translate(_("Patient Files")) self.icon = "{}/{}".format( self.portal_url, "++resource++bika.lims.images/instrumentcertification_big.png" ) self.context_actions = { _("Add"): { "url": "createObject?type_name=Multifile", "icon": "++resource++bika.lims.images/add.png" } } self.allow_edit = False self.show_select_column = False self.show_workflow_action_buttons = True self.pagesize = 30 self.columns = { "DocumentID": {"title": _("Document ID"), "index": "sortable_title"}, "DocumentVersion": {"title": _("Document Version"), "index": "sortable_title"}, "DocumentLocation": {"title": _("Document Location"), "index": "sortable_title"}, "DocumentType": {"title": _("Document Type"), "index": "sortable_title"}, "FileDownload": {"title": _("File")} } self.review_states = [ { "id": "default", "title": _("All"), "contentFilter": {}, "columns": [ "DocumentID", "DocumentVersion", "DocumentLocation", "DocumentType", "FileDownload" ] }, ]
def __init__(self, context, request): super(TemplatesView, self).__init__(context, request) self.catalog = "bika_setup_catalog" self.contentFilter = { "portal_type": "ARTemplate", "sort_on": "sortable_title", "sort_order": "ascending", "path": { "query": api.get_path(context), "level": 0}, } self.context_actions = { _("Add"): { "url": "createObject?type_name=ARTemplate", "permission": "Add portal content", "icon": "++resource++bika.lims.images/add.png"} } self.title = self.context.translate(_("AR Templates")) self.icon = "{}/{}".format( self.portal_url, "/++resource++bika.lims.images/artemplate_big.png" ) self.show_select_row = False self.show_select_column = True self.columns = collections.OrderedDict(( ("Title", { "title": _("Profile"), "index": "sortable_title"}), ("Description", { "title": _("Description"), "index": "Description", "toggle": True, }), )) self.review_states = [ { "id": "default", "title": _("Active"), "contentFilter": {"inactive_state": "active"}, "columns": self.columns.keys(), }, { "id": "inactive", "title": _("Dormant"), "contentFilter": {"inactive_state": "inactive"}, "columns": self.columns.keys(), }, { "id": "all", "title": _("All"), "contentFilter": {}, "columns": self.columns.keys(), }, ]
def __init__(self, context, request): super(ClientSamplesView, self).__init__(context, request) self.contentFilter["path"] = { "query": api.get_path(context), "level": 0 } self.remove_column("Client")
def get(self, instance, **kwargs): """Returns a list of Analyses assigned to this AR Return a list of catalog brains unless `full_objects=True` is passed. Overrides "ViewRetractedAnalyses" when `retracted=True` is passed. Other keyword arguments are passed to bika_analysis_catalog :param instance: Analysis Request object :param kwargs: Keyword arguments to be passed to control the output :returns: A list of Analysis Objects/Catalog Brains """ full_objects = False # If get_reflexed is false don't return the analyses that have been # reflexed, only the final ones get_reflexed = True if 'full_objects' in kwargs: full_objects = kwargs['full_objects'] del kwargs['full_objects'] if 'get_reflexed' in kwargs: get_reflexed = kwargs['get_reflexed'] del kwargs['get_reflexed'] if 'retracted' in kwargs: retracted = kwargs['retracted'] del kwargs['retracted'] else: mtool = getToolByName(instance, 'portal_membership') retracted = mtool.checkPermission( ViewRetractedAnalyses, instance) bac = getToolByName(instance, CATALOG_ANALYSIS_LISTING) contentFilter = dict([(k, v) for k, v in kwargs.items() if k in bac.indexes()]) contentFilter['portal_type'] = "Analysis" contentFilter['sort_on'] = "getKeyword" contentFilter['path'] = {'query': api.get_path(instance), 'level': 0} analyses = bac(contentFilter) if not retracted or full_objects or not get_reflexed: analyses_filtered = [] for a in analyses: if not retracted and a.review_state == 'retracted': continue if full_objects or not get_reflexed: a_obj = a.getObject() # Check if analysis has been reflexed if not get_reflexed and \ a_obj.getReflexRuleActionsTriggered() != '': continue if full_objects: a = a_obj analyses_filtered.append(a) analyses = analyses_filtered return analyses
def get(self, instance, **kwargs): """Returns a list of Analyses assigned to this AR Return a list of catalog brains unless `full_objects=True` is passed. Overrides "ViewRetractedAnalyses" when `retracted=True` is passed. Other keyword arguments are passed to bika_analysis_catalog :param instance: Analysis Request object :param kwargs: Keyword arguments to be passed to control the output :returns: A list of Analysis Objects/Catalog Brains """ full_objects = False # If get_reflexed is false don't return the analyses that have been # reflexed, only the final ones get_reflexed = True if 'full_objects' in kwargs: full_objects = kwargs['full_objects'] del kwargs['full_objects'] if 'get_reflexed' in kwargs: get_reflexed = kwargs['get_reflexed'] del kwargs['get_reflexed'] if 'retracted' in kwargs: retracted = kwargs['retracted'] del kwargs['retracted'] else: mtool = getToolByName(instance, 'portal_membership') retracted = mtool.checkPermission( ViewRetractedAnalyses, instance) bac = getToolByName(instance, CATALOG_ANALYSIS_LISTING) contentFilter = dict([(k, v) for k, v in kwargs.items() if k in bac.indexes()]) contentFilter['portal_type'] = "Analysis" contentFilter['sort_on'] = "getKeyword" contentFilter['path'] = {'query': api.get_path(instance), 'level': 0} analyses = bac(contentFilter) if not retracted or full_objects or not get_reflexed: analyses_filtered = [] for a in analyses: if not retracted and a.review_state == 'retracted': continue if full_objects or not get_reflexed: a_obj = a.getObject() # Check if analysis has been reflexed if not get_reflexed and \ a_obj.getReflexRuleActionsTriggered() != '': continue if full_objects: a = a_obj analyses_filtered.append(a) analyses = analyses_filtered return analyses
def update_ast_analysis(analysis, antibiotics, remove=False): # There is nothing to do if the analysis has been verified analysis = api.get_object(analysis) if IVerified.providedBy(analysis): return # Convert antibiotics to interim fields keyword = analysis.getKeyword() interim_fields = map(lambda ab: to_interim(keyword, ab), antibiotics) # Get the analysis interim fields an_interims = copy.deepcopy(analysis.getInterimFields()) or [] an_keys = sorted(map(lambda i: i.get("keyword"), an_interims)) # Remove non-specified antibiotics if remove: in_keys = map(lambda i: i.get("keyword"), interim_fields) an_interims = filter(lambda a: a["keyword"] in in_keys, an_interims) # Keep analysis' original antibiotics abx = filter(lambda a: a["keyword"] not in an_keys, interim_fields) an_interims.extend(abx) # Is there any difference? new_keys = sorted(map(lambda i: i.get("keyword"), an_interims)) if new_keys == an_keys: # No changes return # If no antibiotics, remove the analysis if remove and not an_interims: sample = analysis.getRequest() sample._delObject(api.get_id(analysis)) return if ISubmitted.providedBy(analysis): # Analysis has been submitted already, retract succeed, message = wf.doActionFor(analysis, "retract") if not succeed: path = api.get_path(analysis) logger.error("Cannot retract analysis '{}'".format(path)) return # Assign the antibiotics analysis.setInterimFields(an_interims) # Compute all combinations of interim/antibiotic and possible result and # and generate the result options for this analysis (the "Result" field is # never displayed and is only used for reporting) result_options = get_result_options(analysis) analysis.setResultOptions(result_options) # Apply the IASTAnalysis marker interface (just in case) alsoProvides(analysis, IASTAnalysis) analysis.reindexObject()
def get_metadata_for(instance, catalog): """Returns the metadata for the given instance from the specified catalog """ path = api.get_path(instance) try: return catalog.getMetadataForUID(path) except KeyError: logger.warn("Cannot get metadata from {}. Path not found: {}".format( catalog.id, path)) return {}
def folderitem(self, obj, item, index): """Service triggered each time an item is iterated in folderitems. The use of this service prevents the extra-loops in child objects. :obj: the instance of the class to be foldered :item: dict containing the properties of the object to be used by the template :index: current index of the item """ item["replace"]["Title"] = get_link_for(obj) item["Description"] = obj.Description() retention_period = obj.getRetentionPeriod() if retention_period: hours = retention_period.get("hours", "0") minutes = retention_period.get("minutes", "0") days = retention_period.get("days", "0") item["RetentionPeriod"] = _( "hours: {} minutes: {} days: {}".format(hours, minutes, days)) sample_matrix = obj.getSampleMatrix() item["replace"]["SampleMatrix"] = get_link_for(sample_matrix) container_type = obj.getContainerType() item["replace"]["ContainerType"] = get_link_for(container_type) # Hide sample points assigned to this sample type that do not belong # to the same container (Client or Setup) sample_points = obj.getSamplePoints() path = api.get_path(self.context) setup = api.get_setup() if api.get_parent(self.context) == setup: path = api.get_path(setup.bika_samplepoints) sample_points = filter(lambda sp: api.get_parent_path(sp) == path, sample_points) # Display the links to the sample points links = map(get_link_for, sample_points) item["replace"]["SamplePoints"] = ", ".join(links) return item
def get_coa_number(self, model): obj = model.instance query = { 'portal_type': 'ARReport', 'path': { 'query': api.get_path(obj), 'depth': 1 } } brains = api.search(query, 'portal_catalog') obj_id = api.get_id(obj) coa_num = '{}-COA-{}'.format(obj_id, len(brains) + 1) return coa_num
def update_object_with_data(content, record): """Update the content with the record data :param content: A single folderish catalog brain or content object :type content: ATContentType/DexterityContentType/CatalogBrain :param record: The data to update :type record: dict :returns: The updated content object :rtype: object :raises: APIError, :class:`~plone.jsonapi.routes.exceptions.APIError` """ # ensure we have a full content object content = get_object(content) # Look for an update-specific adapter for this object adapter = queryAdapter(content, IUpdate) if adapter: # Use the adapter to update the object logger.info("Delegating 'update' operation of '{}'".format( api.get_path(content))) adapter.update_object(**record) else: # Fall-back to default update machinery # get the proper data manager dm = IDataManager(content) if dm is None: fail(400, "Update for this object is not allowed") # Bail-out non-update-able fields purged_records = copy.deepcopy(record) map(lambda key: purged_records.pop(key, None), SKIP_UPDATE_FIELDS) # Iterate through record items for k, v in purged_records.items(): try: success = dm.set(k, v, **record) except Unauthorized: fail(401, "Not allowed to set the field '%s'" % k) except ValueError, exc: fail(400, str(exc)) if not success: logger.warn("update_object_with_data::skipping key=%r", k) continue logger.debug("update_object_with_data::field %r updated", k)
def __init__(self, context, request): super(AnalysisRequestPublishedResults, self).__init__(context, request) self.catalog = "portal_catalog" self.contentFilter = { 'portal_type': 'ARReport', 'path': { 'query': api.get_path(self.context), 'depth': 1, }, 'sort_order': 'reverse' } self.context_actions = {} self.show_select_column = True self.show_workflow_action_buttons = False self.form_id = 'published_results' self.icon = self.portal_url + "/++resource++bika.lims.images/report_big.png" self.title = self.context.translate(_("Published results")) self.columns = { 'Title': { 'title': _('File') }, 'FileSize': { 'title': _('Size') }, 'Date': { 'title': _('Published Date') }, 'PublishedBy': { 'title': _('Published By') }, 'DatePrinted': { 'title': _('Printed Date') }, 'Recipients': { 'title': _('Recipients') }, } self.review_states = [ { 'id': 'default', 'title': 'All', 'contentFilter': {}, 'columns': [ 'Title', 'FileSize', 'Date', 'PublishedBy', 'DatePrinted', 'Recipients' ] }, ]
def get_samples_data(self): """Returns a list of AR data """ for obj in self.get_objects_from_request(): obj = api.get_object(obj) yield { "obj": obj, "id": api.get_id(obj), "uid": api.get_uid(obj), "title": api.get_title(obj), "path": api.get_path(obj), "url": api.get_url(obj), "sample_type": api.get_title(obj.getSampleType()) }
def create_items(portal_type=None, uid=None, endpoint=None, **kw): """ create items 1. If the uid is given, get the object and create the content in there (assumed that it is folderish) 2. If the uid is 0, the target folder is assumed the portal. 3. If there is no uid given, the payload is checked for either a key - `parent_uid` specifies the *uid* of the target folder - `parent_path` specifies the *physical path* of the target folder """ # disable CSRF req.disable_csrf_protection() # destination where to create the content container = uid and get_object_by_uid(uid) or None # extract the data from the request records = req.get_request_data() results = [] for record in records: # get the portal_type if portal_type is None: # try to fetch the portal type out of the request data portal_type = record.pop("portal_type", None) if container is None: # find the container for content creation container = find_target_container(record) # Check if we have a container and a portal_type if not all([container, portal_type]): fail(400, "Please provide a container path/uid and portal_type") # check if it is allowed to create the portal_type if not is_creation_allowed(portal_type, container): fail( 401, "Creation of '{}' in '{}' is not allowed".format( portal_type, api.get_path(container))) # create the object and pass in the record data obj = create_object(container, portal_type, **record) results.append(obj) if not results: fail(400, "No Objects could be created") return make_items_for(results, endpoint=endpoint)
def create_object(container, portal_type, **data): """Creates an object slug :returns: The new created content object :rtype: object """ if "id" in data: # always omit the id as senaite LIMS generates a proper one id = data.pop("id") logger.warn("Passed in ID '{}' omitted! Senaite LIMS " "generates a proper ID for you".format(id)) try: # Is there any adapter registered to handle the creation of this type? adapter = queryAdapter(container, ICreate, name=portal_type) if adapter and adapter.is_creation_delegated(): logger.info("Delegating 'create' operation of '{}' in '{}'".format( portal_type, api.get_path(container))) return adapter.create_object(**data) # Special case for ARs # => return immediately w/o update if portal_type == "AnalysisRequest": obj = create_analysisrequest(container, **data) # Omit values which are already set through the helper data = u.omit(data, "SampleType", "Analyses") # Set the container as the client, as the AR lives in it data["Client"] = container return obj # Standard content creation else: # we want just a minimun viable object and set the data later obj = api.create(container, portal_type) # obj = api.create(container, portal_type, **data) except Unauthorized: fail(401, "You are not allowed to create this content") # Update the object with the given data, but omit the id try: update_object_with_data(obj, data) except APIError: # Failure in creation process, delete the invalid object # NOTE: We bypass the permission checks container._delObject(obj.id) # reraise the error raise return obj
def get_base_info(self, obj): """Extract the base info from the given object """ review_state = api.get_workflow_status_of(obj) state_title = review_state.capitalize().replace("_", " ") return { "obj": obj, "id": api.get_id(obj), "uid": api.get_uid(obj), "title": api.get_title(obj), "path": api.get_path(obj), "url": api.get_url(obj), "review_state": review_state, "state_title": state_title, }
def get_samples_data(self): """Returns a list of Samples data (dictionary) """ for obj in self.get_samples_from_request(): yield { "obj": obj, "id": api.get_id(obj), "uid": api.get_uid(obj), "title": api.get_title(obj), "path": api.get_path(obj), "url": api.get_url(obj), "sample_type": obj.getSampleTypeTitle(), "client_title": obj.getClientTitle(), "date": ulocalized_time(obj.created(), long_format=True), }
def before_render(self): # Use the title of the containing folder title = api.get_title(self.context) self.listing.title = self.context.translate(title) # Use the icon of the containing folder icon = self.context.icon().replace(".png", "_big.png") self.listing.icon = "{}/{}".format(self.listing.portal_url, icon) # Display clients that belong to current folder only self.listing.contentFilter.update({ "path": { "query": api.get_path(self.context), "depth": 1 }, })
def __init__(self, context, request): super(SupplierReferenceSamplesView, self).__init__(context, request) self.contentFilter["path"]["query"] = api.get_path(context) self.context_actions = { _("Add"): { "url": "createObject?type_name=ReferenceSample", "permission": "Add portal content", "icon": "++resource++bika.lims.images/add.png"} } # Remove the Supplier column from the list del self.columns["Supplier"] for rs in self.review_states: rs["columns"] = [col for col in rs["columns"] if col != "Supplier"]
def get_path_query(self, context=None, level=0): """Return a path query :param context: The context to get the physical path from :param level: The depth level of the search :returns: Catalog path query """ if context is None: context = self.context path = api.get_path(context) return { "path": { "query": path, "level": level, } }
def reindexMovedObject(obj, event): """Reindex moved/renamed object """ bika_catalogs = getattr(obj, "_bika_catalogs", []) for name in bika_catalogs: logger.debug("Reidexing moved object '{}' in catalog '{}'".format( obj.getId(), name)) catalog = api.get_tool(name) # check if the object was renamed old_name = event.oldName if old_name: new_path = api.get_path(obj) base_path = new_path.replace(event.newName, "") old_path = "".join([base_path, old_name]) # uncatalog the old path catalog.uncatalog_object(old_path) catalog.reindexObject(obj)
def get_path(brain_or_object): """Proxy to bika.lims.api.get_path """ return api.get_path(brain_or_object)