def init_with_instance(self, instance): """Initialize with an instance object """ self._uid = api.get_uid(instance) self._brain = None self._catalog = self.get_catalog_for(instance) self._instance = instance
def init_with_brain(self, brain): """Initialize with a catalog brain """ self._uid = api.get_uid(brain) self._brain = brain self._catalog = self.get_catalog_for(brain) self._instance = None
def _do_obj_creation(self, row): """ With the given dictionary: 1. Finds object's parents, create them and update their local UID's 2. Creates plain object and saves its local UID :param row: A row dictionary from the souper :type row: dict """ remote_path = row.get(REMOTE_PATH) remote_parent_path = utils.get_parent_path(remote_path) # If parent creation failed previously, do not try to create the object if remote_parent_path in self.skipped: logger.warning( "Parent creation failed previously, skipping: {}".format( remote_path)) return None local_path = self.translate_path(remote_path) existing = self.portal.unrestrictedTraverse(local_path, None) if existing: rec = self.sh.find_unique(REMOTE_PATH, remote_path) if not rec.get(LOCAL_UID, None) or not rec.get(LOCAL_PATH, None): local_uid = api.get_uid(existing) self.sh.update_by_remote_path(remote_path, local_uid=local_uid, local_path=local_path) return existing if not self._parents_created(remote_path): logger.warning( "Parent creation failed, skipping: {}".format(remote_path)) return None parent_path = utils.get_parent_path(local_path) container = self.portal.unrestrictedTraverse(str(parent_path), None) obj_data = { "id": utils.get_id_from_path(local_path), "portal_type": row.get(PORTAL_TYPE) } obj = self._create_object_slug(container, obj_data) if obj is not None: local_uid = api.get_uid(obj) self.sh.update_by_remote_path(remote_path, local_uid=local_uid) return obj
def _parents_created(self, remote_path): """ Check if parents have been already created and create all non-existing parents and updates local UIDs for the existing ones. :param path: object path in the remote :return: True if ALL the parents were created successfully """ p_path = utils.get_parent_path(remote_path) if p_path == "/": return True # Skip if its the portal object. if self.is_portal_path(p_path): return True # Incoming path was remote path, translate it into local one local_p_path = self.translate_path(p_path) # Check if the parent already exists. If yes, make sure it has # 'local_uid' value set in the soup table. existing = self.portal.unrestrictedTraverse(local_p_path, None) if existing: p_row = self.sh.find_unique(REMOTE_PATH, p_path) if p_row is None: # This should never happen return False p_local_uid = p_row.get(LOCAL_UID, None) if not p_local_uid: # Update parent's local path if it is not set already if hasattr(existing, "UID") and existing.UID(): p_local_uid = existing.UID() self.sh.update_by_remote_path(p_path, local_uid=p_local_uid) return True # Before creating an object's parent, make sure grand parents are # already ready. if not self._parents_created(p_path): return False parent = self.sh.find_unique(REMOTE_PATH, p_path) grand_parent = utils.get_parent_path(local_p_path) container = self.portal.unrestrictedTraverse(grand_parent, None) parent_data = { "id": utils.get_id_from_path(local_p_path), "remote_path": p_path, "portal_type": parent.get(PORTAL_TYPE) } parent_obj = self._create_object_slug(container, parent_data) if parent_obj is None: logger.warning("Couldn't create parent of {}".format(remote_path)) return False # Parent is created, update it in the soup table. p_local_uid = api.get_uid(parent_obj) self.sh.update_by_remote_path(p_path, local_uid=p_local_uid) return True
def spotlight_search_route(context, request): """The spotlight search route """ catalogs = [ "portal_catalog", "bika_setup_catalog", "bika_catalog", # "bika_analysis_catalog" ] search_results = [] for catalog in catalogs: search_results.extend(search(catalog=catalog)) def get_state(brain): state = getattr(brain, "review_state", "") if not isinstance(state, basestring): return "" return state items = [] for brain in search_results: icon = api.get_icon(brain) # avoid 404 errors with these guys if "document_icon.gif" in icon: icon = "" id = api.get_id(brain) title = api.get_title(brain) items.append({ "id": id, "title": title, "title_or_id": title or id, "description": api.get_description(brain), "uid": api.get_uid(brain), "path": api.get_path(brain), "url": api.get_url(brain), "state": get_state(brain), "icon": icon, }) return { "count": len(items), "items": items, }
def to_super_model(obj): # avoid circular imports from senaite.core.supermodel import SuperModel # Object is already a SuperModel, return immediately if isinstance(obj, SuperModel): return obj # Only portal objects are supported if not api.is_object(obj): raise TypeError("Expected a portal object, got '{}'".format( type(obj))) # Wrap the object into a specific Publication Object Adapter uid = api.get_uid(obj) portal_type = api.get_portal_type(obj) adapter = queryAdapter(uid, ISuperModel, name=portal_type) if adapter is None: return SuperModel(uid) return adapter
def get_uid(brain_or_object): """Proxy to senaite.api.get_uid """ return api.get_uid(brain_or_object)
def update_object_with_data(self, obj, data, domain): """Update an existing object with data """ # get the storage and UID map storage = self.get_storage(domain=domain) uidmap = storage["uidmap"] # Proxy Fields must be set after its dependency object is already set. # Thus, we will store all the ProxyFields and set them in the end proxy_fields = [] for fieldname, field in api.get_fields(obj).items(): fm = IFieldManager(field) value = data.get(fieldname) # handle JSON data reference fields if isinstance(value, dict) and value.get("uid"): # dereference the referenced object value = self.dereference_object(value.get("uid"), uidmap) elif isinstance(value, (list, tuple)): for item in value: # If it is list of json data dict of objects, add local # uid to that dictionary. This local_uid can be used in # Field Managers. if isinstance(item, dict): for k, v in item.iteritems(): if 'uid' in k: local_uid = uidmap.get(v) item[k] = local_uid # handle file fields if field.type in ("file", "image", "blob"): if data.get(fieldname) is not None: fileinfo = data.get(fieldname) url = fileinfo.get("download") filename = fileinfo.get("filename") data["filename"] = filename response = requests.get(url) value = response.content # Leave the Proxy Fields for later if isinstance(fm, ProxyFieldManager): proxy_fields.append({ 'field_name': fieldname, 'fm': fm, 'value': value }) continue logger.info("Setting value={} on field={} of object={}".format( repr(value), fieldname, api.get_id(obj))) try: fm.set(obj, value) except: logger.error("Could not set field '{}' with value '{}'".format( fieldname, value)) # All reference fields are set. We can set the proxy fields now. for pf in proxy_fields: field_name = pf.get("field_name") fm = pf.get("fm") value = pf.get("value") logger.info("Setting value={} on field={} of object={}".format( repr(value), field_name, api.get_id(obj))) try: fm.set(obj, value) except: logger.error("Could not set field '{}' with value '{}'".format( field_name, value)) # Set the workflow states wf_info = data.get("workflow_info", []) for wf_dict in wf_info: wf_id = wf_dict.get("workflow") review_history = wf_dict.get("review_history") self.import_review_history(obj, wf_id, review_history) # finally reindex the object self.uids_to_reindex.append([api.get_uid(obj), repr(obj)])
def import_data(self, domain): """Import the data from the storage identified by domain """ logger.info("*** IMPORT DATA {} ***".format(domain)) storage = self.get_storage(domain=domain) datastore = storage["data"] indexstore = storage["index"] uidmap = storage["uidmap"] credentials = storage["credentials"] # At some points api cannot retrieve objects by UID in the end of # creation process. Thus we keep them in an dictionary to access easily. objmap = {} # We will create objects from top to bottom, but will update from bottom # to up. ordered_uids = [] # initialize a new session with the stored credentials for later requests username = credentials.get("username") password = credentials.get("password") self.session = self.get_session(username, password) logger.info("Initialized a new session for user {}".format(username)) # Get UIDs grouped by their parent path ppaths = indexstore.get("by_parent_path") if ppaths is None: message = _( "No parent path info found in the import data. " "Please install senaite.jsonapi>=1.1.1 on the source instance " "and clear&refetch this storage") self.add_status_message(message, "warning") return # Import by paths from top to bottom for ppath in sorted(ppaths): # nothing to do if not ppath: continue logger.info("Importing items for parent path {}".format(ppath)) uids = ppaths[ppath] for uid in uids: ordered_uids.append(uid) # get the data for this uid data = datastore[uid] # check if the object exists in this instance remote_path = data.get("path") local_path = self.translate_path(remote_path) existing = self.portal.unrestrictedTraverse( str(local_path), None) if existing: # remember the UID -> object UID mapping for the update step uidmap[uid] = api.get_uid(existing) objmap[uid] = existing else: # get the container object by path container_path = self.translate_path(ppath) container = self.portal.unrestrictedTraverse( str(container_path), None) # create an object slug in this container obj = self.create_object_slug(container, data) # remember the UID -> object UID mapping for the update step uidmap[uid] = api.get_uid(obj) objmap[uid] = obj # When creation process is done, commit the transaction to avoid # ReferenceField relation problems. transaction.commit() # UIDs were added from up to bottom. Reverse the list to update objects # from bottom to up. ordered_uids.reverse() # Update all objects with the given data for uid in ordered_uids: obj = objmap.get(uid, None) if obj is None: logger.warn("Object not found: {} ".format(uid)) continue logger.info("Update object {} with import data".format( api.get_path(obj))) self.update_object_with_data(obj, datastore[uid], domain) self.reindex_updated_objects()
def ajax_save_reports(self): """Render all reports as PDFs and store them as AR Reports """ # Data sent via async ajax call as JSON data from the frontend data = self.get_json() # This is the html after it was rendered by the client browser and # eventually extended by JavaScript, e.g. Barcodes or Graphs added etc. # N.B. It might also contain multiple reports! html = data.get("html") # Metadata paperformat = data.get("format") template = data.get("template") orientation = data.get("orientation", "portrait") timestamp = DateTime().ISO8601() is_multi_template = self.is_multi_template(template) store_individually = self.store_multireports_individually() # Generate the print CSS with the set format/orientation css = self.get_print_css(paperformat=paperformat, orientation=orientation) logger.info(u"Print CSS: {}".format(css)) # get an publisher instance publisher = self.publisher # add the generated CSS to the publisher publisher.add_inline_css(css) # TODO: Refactor code below to be not AR specific # remember the values of the last iteration for the exit url client_url = None report_uids = None for report_node in publisher.parse_reports(html): # generate the PDF pdf = publisher.write_pdf(report_node) # get contained AR UIDs in this report uids = filter(None, report_node.get("uids", "").split(",")) # get the AR objects objs = map(api.get_object_by_uid, uids) # sort the objects by created to have the most recent object first # -> supersedes https://github.com/senaite/senaite.impress/pull/48 objs = sorted(objs, key=methodcaller("created"), reverse=True) # remember generated report objects reports = [] for obj in objs: # TODO: refactor to adapter # Create a report object which holds the generated PDF title = "Report-{}".format(obj.getId()) report = api.create(obj, "ARReport", title=title) report.edit(AnalysisRequest=api.get_uid(obj), Pdf=pdf, Html=publisher.to_html(report_node), ContainedAnalysisRequests=uids, Metadata={ "template": template, "paperformat": paperformat, "orientation": orientation, "timestamp": timestamp, "contained_requests": uids, }) reports.append(report) client_url = api.get_url(obj.getClient()) # generate report only for the primary object if is_multi_template and not store_individually: break # remember the generated report UIDs for this iteration report_uids = map(api.get_uid, reports) # This is the clicked button name from the ReactJS component action = data.get("action", "save") exit_url = self.context.absolute_url() if all([client_url, report_uids]): endpoint = "reports_listing" if action == "email": endpoint = "email?uids={}".format(",".join(report_uids)) exit_url = "{}/{}".format(client_url, endpoint) return exit_url
def _update_object_with_data(self, obj, data): """Update an existing object with data """ # Proxy Fields must be set after its dependency object is already set. # Thus, we will store all the ProxyFields and set them in the end proxy_fields = [] for fieldname, field in api.get_fields(obj).items(): if fieldname in self.fields_to_skip: continue fm = IFieldManager(field) value = data.get(fieldname) kwargs = {} # Computed Fields don't have set methods. if isinstance(fm, ComputedFieldManager): continue # handle JSON data reference fields if isinstance(value, dict) and value.get("uid"): # dereference the referenced object local_uid = self.sh.get_local_uid(value.get("uid")) if local_uid: value = api.get_object_by_uid(local_uid) else: value = None elif isinstance(value, (list, tuple)): for item in value: # If it is list of json data dict of objects, add local # uid to that dictionary. This local_uid can be used in # Field Managers. if isinstance(item, dict): for k, v in item.iteritems(): if 'uid' in k: local_uid = self.sh.get_local_uid(v) item[k] = local_uid # handle file fields if field.type in ("file", "image", "blob"): if data.get(fieldname) is not None: fileinfo = data.get(fieldname) url = fileinfo.get("download") filename = fileinfo.get("filename") kwargs["filename"] = filename response = self.session.get(url) value = response.content # Leave the Proxy Fields for later if isinstance(fm, ProxyFieldManager): proxy_fields.append({ 'field_name': fieldname, 'fm': fm, 'value': value }) continue try: fm.set(obj, value, **kwargs) except: logger.debug("Could not set field '{}' with value '{}'".format( fieldname, value)) # All reference fields are set. We can set the proxy fields now. for pf in proxy_fields: field_name = pf.get("field_name") fm = pf.get("fm") value = pf.get("value") try: fm.set(obj, value) except: logger.debug("Could not set field '{}' with value '{}'".format( field_name, value)) # Set the workflow states wf_info = data.get("workflow_info", []) for wf_dict in wf_info: wf_id = wf_dict.get("workflow") review_history = wf_dict.get("review_history") self._import_review_history(obj, wf_id, review_history) # finally reindex the object self.uids_to_reindex.append(api.get_uid(obj))