def multiple(self, values, rows=None, list_type=True, show_link=True): """ Represent multiple values as a comma-separated list. @param values: list of values @param rows: the referenced rows (if values are foreign keys) @param show_link: render each representation as link """ self._setup() show_link = show_link and self.show_link # Get the values if rows and self.table: key = self.key values = [row[key] for row in rows] elif self.list_type and list_type: from itertools import chain try: hasnone = None in values if hasnone: values = [i for i in values if i != None] values = list(set(chain.from_iterable(values))) if hasnone: values.append(None) except TypeError: raise ValueError("List of lists expected, got %s" % values) else: values = [values] if type(values) is not list else values # Lookup the representations if values: default = self.default items = self._lookup(values, rows=rows) if show_link: link = self.link labels = [[link(v, s3_unicode(items[v])), ", "] if v in items else [default, ", "] for v in values] if labels: from itertools import chain return TAG[""](list(chain.from_iterable(labels))[:-1]) else: return "" else: labels = [s3_unicode(items[v]) if v in items else default for v in values] if labels: return ", ".join(labels) return self.none
def render_node(self, element, attributes, name): """ Render as text or attribute of an XML element @param element: the element @param attributes: the attributes dict of the element @param name: the attribute name """ # Render value text = self.represent() text = s3_unicode(text) # Strip markup + XML-escape if text and "<" in text: try: stripper = S3MarkupStripper() stripper.feed(text) text = stripper.stripped() except: pass # Add to node if text is not None: if element is not None: element.text = text else: attributes[name] = text return
def format_datetime(self, dt, dtfmt=None, local=False): """ Format a datetime according to this calendar @param dt: the datetime (datetime.datetime) @return: the datetime as string """ if dt is None: return current.messages["NONE"] # Default format if dtfmt is None: if local: dtfmt = current.deployment_settings.get_L10n_datetime_format() else: dtfmt = ISOFORMAT # ISO Date/Time Format # Deal with T's try: dtfmt = str(dtfmt) except (UnicodeDecodeError, UnicodeEncodeError): dtfmt = s3_unicode(dtfmt).encode("utf-8") # Remove microseconds # - for the case that the calendar falls back to .isoformat if isinstance(dt, datetime.datetime): dt = dt.replace(microsecond=0) return self.calendar._format(dt, dtfmt)
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget @param resource: the S3Resource """ opts = self.opts attr = self._attr(resource) ftype, options, noopt = self._options(resource) if noopt: options = {attr["_id"]: str(noopt)} else: widget_type = opts["widget"] if widget_type in ("multiselect-bootstrap", "multiselect"): # Produce a simple list of tuples options = { attr["_id"]: [(k, s3_unicode(v)) for k, v in options] } else: # Use the widget method to group and sort the options widget = S3GroupedOptionsWidget(options=options, multiple=True, cols=opts["cols"], size=opts["size"] or 12, help_field=opts["help_field"]) options = {attr["_id"]: widget._options({"type": ftype}, [])} return options
def represent_row(self, row): """ Represent the referenced row. (in foreign key representations) @param row: the row @return: the representation of the Row, or None if there is an error in the Row """ labels = self.labels if self.slabels: # String Template v = labels % row elif self.clabels: # External Renderer v = labels(row) else: # Default values = [row[f] for f in self.fields if row[f] not in (None, "")] if values: v = " ".join([s3_unicode(v) for v in values]) else: v = self.none if self.translate and not type(v) is lazyT: return current.T(v) else: return v
def json_message(success=True, statuscode=None, message=None, **kwargs): """ Provide a nicely-formatted JSON Message @param success: action succeeded or failed @param status_code: the HTTP status code @param message: the message text @param kwargs: other elements for the message @keyword tree: error tree to include as JSON object (rather than as string) for easy decoding """ if statuscode is None: statuscode = success and 200 or 404 status = success and "success" or "failed" code = str(statuscode) output = {"status": status, "statuscode": str(code)} tree = kwargs.get("tree", None) if message: output["message"] = s3_unicode(message) for k, v in kwargs.items(): if k != "tree": output[k] = v output = json.dumps(output) if message and tree: output = output[:-1] + ', "tree": %s}' % tree return output
def widget(self, field, attr): """ Widget renderer (parameter description see base class) """ attr["_readonly"] = "true" attr["_default"] = s3_unicode(field.default) return TAG["input"](self.label(), **attr)
def render_list(self, value, labels, show_link=True): """ Helper method to render list-type representations from bulk()-results. @param value: the list @param labels: the labels as returned from bulk() @param show_link: render references as links, should be the same as used with bulk() """ show_link = show_link and self.show_link if show_link: labels = [(labels[v], ", ") if v in labels else (self.default, ", ") for v in value] if labels: from itertools import chain return TAG[""](list(chain.from_iterable(labels))[:-1]) else: return "" else: return ", ".join([s3_unicode(labels[v]) if v in labels else self.default for v in value])
def _html(self, node_id, widget_id, represent=None): """ Recursively render a node as list item (with subnodes as unsorted list inside the item) @param node_id: the node ID @param widget_id: the unique ID for the outermost list @param represent: the node ID representation method @return: the list item (LI) """ node = self.nodes.get(node_id) if not node: return None label = self.label(node_id, represent=represent) if label is None: label = s3_unicode(node_id) subnodes = node["s"] item = LI( label, _id="%s-%s" % (widget_id, node_id), _rel="parent" if subnodes else "leaf", _class="s3-hierarchy-node", ) html = self._html if subnodes: sublist = UL( [html(n, widget_id, represent=represent) for n in subnodes]) item.append(sublist) return item
def format_date(self, dt, dtfmt=None, local=False): """ Format a date according to this calendar @param dt: the date (datetime.date or datetime.datetime) @return: the date as string """ if dt is None: return current.messages["NONE"] # Default format if dtfmt is None: if local: dtfmt = current.deployment_settings.get_L10n_date_format() else: dtfmt = "%Y-%m-%d" # ISO Date Format # Deal with T's try: dtfmt = str(dtfmt) except (UnicodeDecodeError, UnicodeEncodeError): dtfmt = s3_unicode(dtfmt).encode("utf-8") return self.calendar._format(dt, dtfmt)
def _html(self, node_id, widget_id, represent=None): """ Recursively render a node as list item (with subnodes as unsorted list inside the item) @param node_id: the node ID @param widget_id: the unique ID for the outermost list @param represent: the node ID representation method @return: the list item (LI) """ node = self.nodes.get(node_id) if not node: return None label = self.label(node_id, represent=represent) if label is None: label = s3_unicode(node_id) subnodes = node["s"] item = LI(A(label, _href="#", _class="s3-hierarchy-node"), _id = "%s-%s" % (widget_id, node_id), _rel = "parent" if subnodes else "leaf") html = self._html if subnodes: sublist = UL([html(n, widget_id, represent=represent) for n in subnodes]) item.append(sublist) return item
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget @param resource: the S3Resource """ opts = self.opts attr = self._attr(resource) ftype, options, noopt = self._options(resource) if noopt: options = {attr["_id"]: noopt} else: widget_type = opts["widget"] if widget_type in ("multiselect-bootstrap", "multiselect"): # Produce a simple list of tuples options = {attr["_id"]: [(k, s3_unicode(v)) for k, v in options]} else: # Use the widget method to group and sort the options widget = S3GroupedOptionsWidget( options = options, multiple = True, cols = opts["cols"], size = opts["size"] or 12, help_field = opts["help_field"]) options = {attr["_id"]: widget._options({"type": ftype}, [])} return options
def aadata(self, totalrows, displayrows, id, sEcho, flist, stringify=True, action_col=None, **attr): """ Method to render the data into a json object @param totalrows: The total rows in the unfiltered query. @param displayrows: The total rows in the filtered query. @param id: The id of the table for which this ajax call will respond to. @param sEcho: An unaltered copy of sEcho sent from the client used by dataTables as a draw count. @param flist: The list of fields @param attr: dictionary of attributes which can be passed in dt_action_col: The column where the action buttons will be placed dt_bulk_actions: list of labels for the bulk actions. dt_bulk_col: The column in which the checkboxes will appear, by default it will be the column immediately before the first data item dt_group_totals: The number of record in each group. This will be displayed in parenthesis after the group title. """ data = self.data if not flist: flist = self.lfields start = self.start end = self.end if action_col is None: action_col = attr.get("dt_action_col", 0) structure = {} aadata = [] for i in xrange(start, end): row = data[i] details = [] for field in flist: if field == "BULK": details.append( "<INPUT id='select%s' type='checkbox' class='bulkcheckbox'>" % row[flist[action_col]] ) else: details.append(s3_unicode(row[field])) aadata.append(details) structure["dataTable_id"] = id structure["dataTable_filter"] = self.filterString structure["dataTable_groupTotals"] = attr.get("dt_group_totals", []) structure["dataTable_sort"] = self.orderby structure["aaData"] = aadata structure["iTotalRecords"] = totalrows structure["iTotalDisplayRecords"] = displayrows structure["sEcho"] = sEcho if stringify: from gluon.serializers import json return json(structure) else: return structure
def _setup(self): """ Lazy initialization of defaults """ if self.setup: return self.queries = 0 # Default representations messages = current.messages if self.default is None: self.default = s3_unicode(messages.UNKNOWN_OPT) if self.none is None: self.none = messages["NONE"] # Initialize theset if self.options is not None: self.theset = self.options else: self.theset = {} # Lookup table parameters and linkto if self.table is None: tablename = self.tablename if tablename: table = current.s3db.table(tablename) if table is not None: if self.key is None: self.key = table._id.name if not self.fields: if "name" in table: self.fields = ["name"] else: self.fields = [self.key] self.table = table if self.linkto is None and self.show_link: c, f = tablename.split("_", 1) self.linkto = URL(c=c, f=f, args=["[id]"], extension="") # What type of renderer do we use? labels = self.labels # String template? self.slabels = isinstance(labels, basestring) # External renderer? self.clabels = callable(labels) # Hierarchy template if isinstance(self.hierarchy, basestring): self.htemplate = self.hierarchy else: self.htemplate = "%s > %s" self.setup = True return
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget @param resource: the S3Resource """ attr = self._attr(resource) ftype, options, noopt = self._options(resource) if noopt: return {attr["_id"]: noopt} else: return {attr["_id"]: [(k, s3_unicode(v)) for k,v in options]}
def _represent(self, node_ids=None, renderer=None): """ Represent nodes as labels, the labels are stored in the nodes as attribute "l". @param node_ids: the node IDs (None for all nodes) @param renderer: the representation method (falls back to the "name" field in the target table if present) """ theset = self.theset LABEL = "l" if node_ids is None: node_ids = self.nodes.keys() pending = set() for node_id in node_ids: node = theset.get(node_id) if not node: continue if LABEL not in node: pending.add(node_id) if renderer is None: renderer = self.represent if renderer is None: tablename = self.tablename table = current.s3db.table(tablename) if tablename else None if table and "name" in table.fields: from s3fields import S3Represent self.represent = renderer = S3Represent(lookup = tablename, key = self.pkey.name) else: renderer = s3_unicode if hasattr(renderer, "bulk"): labels = renderer.bulk(list(pending), list_type = False) for node_id, label in labels.items(): if node_id in theset: theset[node_id][LABEL] = label else: for node_id in pending: try: label = renderer(node_id) except: label = s3_unicode(node_id) theset[node_id][LABEL] = label return
def _represent(self, node_ids=None, renderer=None): """ Represent nodes as labels, the labels are stored in the nodes as attribute "l". @param node_ids: the node IDs (None for all nodes) @param renderer: the representation method (falls back to the "name" field in the target table if present) """ theset = self.theset LABEL = "l" if node_ids is None: node_ids = self.nodes.keys() pending = set() for node_id in node_ids: node = theset.get(node_id) if not node: continue if LABEL not in node: pending.add(node_id) if renderer is None: renderer = self.represent if renderer is None: tablename = self.tablename table = current.s3db.table(tablename) if tablename else None if table and "name" in table.fields: from s3fields import S3Represent self.represent = renderer = S3Represent(lookup=tablename, key=self.pkey.name) else: renderer = s3_unicode if hasattr(renderer, "bulk"): labels = renderer.bulk(list(pending), list_type=False) for node_id, label in labels.items(): if node_id in theset: theset[node_id][LABEL] = label else: for node_id in pending: try: label = renderer(node_id) except: label = s3_unicode(node_id) theset[node_id][LABEL] = label return
def html_render_group_footer(self, tbody, group, level=0): """ Render the group footer (=group totals) @param tbody: the TBODY or TABLE to append to @param group: the group dict @param level: the grouping level @todo: add group label to footer if no group headers @todo: add totals label """ data = self.data columns = data.get("c") totals = group.get("t") value = group.get("v") footer_row = TR(_class="gi-group-footer gi-level-%s" % level) if not totals: if not self.group_headers: footer_row.append(TD(value, _colspan = len(columns))) tbody.append(footer_row) return if columns: label = None span = 0 for column in columns: has_value = column in totals if label is None: if not has_value: span += 1 continue else: label = TD("%s %s" % (s3_unicode(s3_strip_markup(value)), self.totals_label, ), _class = "gi-group-footer-label", _colspan = span, ) footer_row.append(label) value = totals[column] if has_value else "" footer_row.append(TD(value)) tbody.append(footer_row)
def link(self, k, v): """ Represent a (key, value) as hypertext link. - Typically, k is a foreign key value, and v the representation of the referenced record, and the link shall open a read view of the referenced record. - In the base class, the linkto-parameter expects a URL (as string) with "[id]" as placeholder for the key. @param k: the key @param v: the representation of the key """ if self.linkto: k = s3_unicode(k) return A(v, _href=self.linkto.replace("[id]", k).replace("%5Bid%5D", k)) else: return v
def represent_row(self, row, prefix=None): """ Represent the referenced row. (in foreign key representations) @param row: the row @return: the representation of the Row, or None if there is an error in the Row """ labels = self.labels if self.slabels: # String Template v = labels % row elif self.clabels: # External Renderer v = labels(row) else: # Default values = [row[f] for f in self.fields if row[f] not in (None, "")] if values: sep = self.field_sep v = sep.join([s3_unicode(v) for v in values]) else: v = self.none if self.translate and not type(v) is lazyT: output = current.T(v) else: output = v if prefix and self.hierarchy: return self.htemplate % (prefix, output) return output
def render(self, timetuple, dtfmt): """ Render a timetuple as string according to the given format @param timetuple: the timetuple (y, m, d, hh, mm, ss) @param dtfmt: the date/time format (string) @todo: support day-of-week options """ y, m, d, hh, mm, ss = timetuple T = current.T calendar = self.calendar from s3utils import s3_unicode rules = { "d": "%02d" % d, "b": T(calendar.MONTH_ABBR[m - 1]), "B": T(calendar.MONTH_NAME[m - 1]), "m": "%02d" % m, "y": "%02d" % (y % 100), "Y": "%04d" % y, "H": "%02d" % hh, "I": "%02d" % ((hh % 12) or 12), "p": T("AM") if hh < 12 else T("PM"), "M": "%02d" % mm, "S": "%02d" % ss, } # Interpret the format result = [] sequence = [] def close(s): s = "".join(s) if s: result.append(s) rule = False for c in s3_unicode(dtfmt): if rule and c in rules: # Close previous sequence sequence.pop() close(sequence) # Append control rule result.append(s3_unicode(rules[c])) # Start new sequence sequence = [] # Close rule rule = False continue if c == "%" and not rule: rule = True else: rule = False sequence.append(c) if sequence: close(sequence) return "".join(result)
def json(self, fields=None, labels=None, represent=None, as_dict=False, master=True): """ Serialize this group as JSON @param columns: the columns to include for each item @param labels: columns labels as dict {key: label}, including the labels for grouping axes @param represent: dict of representation methods for grouping axis values {colname: function} @param as_dict: return output as dict rather than JSON string @param master: this is the top-level group (internal) """ T = current.T output = {} if not fields: raise SyntaxError if master: # Add columns and grouping information to top level group if labels is None: labels = {} def check_label(colname): if colname in labels: label = labels[colname] or "" else: fname = colname.split(".", 1)[-1] label = " ".join([s.strip().capitalize() for s in fname.split("_") if s]) label = labels[colname] = T(label) return str(label) grouping = [] groupby = self.groupby if groupby: for axis in groupby: check_label(axis) grouping.append(axis) output["g"] = grouping columns = [] for colname in fields: check_label(colname) columns.append(colname) output["c"] = columns output["l"] = dict((c, str(l)) for c, l in labels.items()) key = self.key if key: output["k"] = key data = [] add_group = data.append for group in self.groups: # Render subgroup gdict = group.json(fields, labels, represent = represent, as_dict = True, master = False, ) # Add subgroup attribute value value = group[key] renderer = represent.get(key) if represent else None if renderer is None: value = s3_unicode(value).encode("utf-8") else: # @todo: call bulk-represent if available value = s3_unicode(renderer(value)).encode("utf-8") gdict["v"] = value add_group(gdict) output["d"] = data output["i"] = None else: oitems = [] add_item = oitems.append for item in self.items: # Render item oitem = {} for colname in fields: if colname in item: value = item[colname] or "" else: # Fall back to raw value raw = item.get("_row") try: value = raw.get(colname) except (AttributeError, TypeError): # _row is not a dict value = None if value is None: value = "" else: value = s3_unicode(value).encode("utf-8") oitem[colname] = value add_item(oitem) output["d"] = None output["i"] = oitems # Convert to JSON unless requested otherwise if master and not as_dict: output = json.dumps(output, separators=SEPARATORS) return output
def json(self, resource, start=None, limit=None, fields=None, orderby=None, represent=False, tooltip=None): """ Export a resource as JSON @param resource: the resource to export from @param start: index of the first record to export @param limit: maximum number of records to export @param fields: list of field selectors for fields to include in the export (None for all fields) @param orderby: ORDERBY expression @param represent: whether values should be represented @param tooltip: additional tooltip field, either a field selector or an expression "f(k,v)" where f is a function name that can be looked up from s3db, and k,v are field selectors for the row, f will be called with a list of tuples (k,v) for each row and is expected to return a dict {k:tooltip} => used by filterOptionsS3 to extract onhover-tooltips for Ajax-update of options """ if fields is None: # json_fields falls back to list_fields if not-defined. # If that's not defined either, it falls back to all readable fields in the table fields = resource.list_fields("json_fields", id_column=True) if orderby is None: orderby = resource.get_config("orderby", None) tooltip_function = None if tooltip: if type(tooltip) is list: tooltip = tooltip[-1] import re match = re.match("(\w+)\((\w+),(\w+)\)", tooltip) if match: function_name, kname, vname = match.groups() # Try to resolve the function name tooltip_function = current.s3db.get(function_name) if tooltip_function: if kname not in fields: fields.append(kname) if vname not in fields: fields.append(vname) else: if tooltip not in fields: fields.append(tooltip) # Get the data _rows = resource.select(fields, start=start, limit=limit, orderby=orderby, represent=represent).rows # Simplify to plain fieldnames for fields in this table tn = "%s." % resource.tablename rows = [] rappend = rows.append for _row in _rows: row = {} for f in _row: v = _row[f] if tn in f: f = f.split(tn, 1)[1] row[f] = v rappend(row) if tooltip: if tooltip_function: # Resolve key and value names against the resource try: krfield = resource.resolve_selector(kname) vrfield = resource.resolve_selector(vname) except (AttributeError, SyntaxError): import sys current.log.error(sys.exc_info()[1]) else: # Extract key and value fields from each row and # build options dict for function call options = [] items = {} for row in rows: try: k = krfield.extract(row) except KeyError: break try: v = vrfield.extract(row) except KeyError: break items[k] = row options.append((k, v)) # Call tooltip rendering function try: tooltips = tooltip_function(options) except: import sys current.log.error(sys.exc_info()[1]) else: # Add tooltips as "_tooltip" to the corresponding rows if isinstance(tooltips, dict): from s3utils import s3_unicode for k, v in tooltips.items(): if k in items: items[k]["_tooltip"] = s3_unicode(v) else: # Resolve the tooltip field name against the resource try: tooltip_rfield = resource.resolve_selector(tooltip) except (AttributeError, SyntaxError): import sys current.log.error(sys.exc_info()[1]) else: # Extract the tooltip field from each row # and add it as _tooltip from s3utils import s3_unicode for row in rows: try: value = tooltip_rfield.extract(row) except KeyError: break if value: row["_tooltip"] = s3_unicode(value) # Return as JSON response = current.response if response: response.headers["Content-Type"] = "application/json" from gluon.serializers import json as jsons return jsons(rows)
def __init__(self, report, show_totals=True, **attributes): """ Constructor @param report: the S3Pivottable instance @param attributes: the HTML attributes for the table """ T = current.T TOTAL = T("Total") TABLE.__init__(self, **attributes) components = self.components = [] self.json_data = None layers = report.layers resource = report.resource tablename = resource.tablename cols = report.cols rows = report.rows numcols = report.numcols numrows = report.numrows rfields = report.rfields get_label = self._get_label get_total = self._totals represent = lambda f, v, d="": \ self._represent(rfields, f, v, default=d) layer_label = None col_titles = [] add_col_title = col_titles.append col_totals = [] add_col_total = col_totals.append row_titles = [] add_row_title = row_titles.append row_totals = [] add_row_total = row_totals.append # Table header -------------------------------------------------------- # # Layer titles labels = [] get_mname = S3Report.mname for field_name, method in layers: label = get_label(rfields, field_name, tablename, "fact") mname = get_mname(method) if not labels: m = method == "list" and get_mname("count") or mname layer_label = "%s (%s)" % (label, m) labels.append("%s (%s)" % (label, mname)) layers_title = TH(" / ".join(labels)) # Columns field title if cols: col_label = get_label(rfields, cols, tablename, "cols") _colspan = numcols + 1 else: col_label = "" _colspan = numcols cols_title = TH(col_label, _colspan=_colspan, _scope="col") titles = TR(layers_title, cols_title) # Rows field title row_label = get_label(rfields, rows, tablename, "rows") rows_title = TH(row_label, _scope="col") headers = TR(rows_title) add_header = headers.append # Column headers values = report.col for i in xrange(numcols): value = values[i].value v = represent(cols, value) add_col_title(s3_truncate(unicode(v))) colhdr = TH(v, _scope="col") add_header(colhdr) # Row totals header if show_totals and cols is not None: add_header(TH(TOTAL, _class="totals_header rtotal", _scope="col")) thead = THEAD(titles, headers) # Table body ---------------------------------------------------------- # tbody = TBODY() add_row = tbody.append # lookup table for cell list values cell_lookup_table = {} # {{}, {}} cells = report.cell rvals = report.row for i in xrange(numrows): # Initialize row _class = i % 2 and "odd" or "even" tr = TR(_class=_class) add_cell = tr.append # Row header row = rvals[i] v = represent(rows, row.value) add_row_title(s3_truncate(unicode(v))) rowhdr = TD(v) add_cell(rowhdr) # Result cells for j in xrange(numcols): cell = cells[i][j] vals = [] cell_ids = [] add_value = vals.append for layer_idx, layer in enumerate(layers): f, m = layer value = cell[layer] if m == "list": if isinstance(value, list): l = [represent(f, v, d="-") for v in value] elif value is None: l = "-" else: if type(value) in (int, float): l = IS_NUMBER.represent(value) else: l = unicode(value) add_value(", ".join(l)) else: if type(value) in (int, float): add_value(IS_NUMBER.represent(value)) else: add_value(unicode(value)) # hold the references layer_ids = [] # get previous lookup values for this layer layer_values = cell_lookup_table.get(layer_idx, {}) if m == "count": rfield = rfields[f] field = rfield.field colname = rfield.colname has_fk = field is not None and s3_has_foreign_key(field) for id in cell.records: # cell.records == [#, #, #] record = report.records[id] try: fvalue = record[colname] except AttributeError: fvalue = None if fvalue is not None: if has_fk: if not isinstance(fvalue, list): fvalue = [fvalue] # list of foreign keys for fk in fvalue: if fk not in layer_ids: layer_ids.append(fk) layer_values[fk] = str(field.represent(fk)) else: if id not in layer_ids: layer_ids.append(id) layer_values[id] = s3_unicode(represent(f, fvalue)) cell_ids.append(layer_ids) cell_lookup_table[layer_idx] = layer_values vals = " / ".join(vals) if any(cell_ids): cell_attr = { "_data-records": cell_ids } vals = (A(_class="report-cell-zoom"), vals) else: cell_attr = {} add_cell(TD(vals, **cell_attr)) # Row total totals = get_total(row, layers, append=add_row_total) if show_totals and cols is not None: add_cell(TD(totals)) add_row(tr) # Table footer -------------------------------------------------------- # i = numrows _class = i % 2 and "odd" or "even" _class = "%s %s" % (_class, "totals_row") col_total = TR(_class=_class) add_total = col_total.append add_total(TH(TOTAL, _class="totals_header", _scope="row")) # Column totals for j in xrange(numcols): col = report.col[j] totals = get_total(col, layers, append=add_col_total) add_total(TD(IS_NUMBER.represent(totals))) # Grand total if cols is not None: grand_totals = get_total(report.totals, layers) add_total(TD(grand_totals)) tfoot = TFOOT(col_total) # Wrap up ------------------------------------------------------------- # append = components.append append(thead) append(tbody) if show_totals: append(tfoot) # Chart data ---------------------------------------------------------- # drows = dcols = None BY = T("by") top = self._top if rows and row_titles and row_totals: drows = top(zip(row_titles, row_totals)) if cols and col_titles and col_totals: dcols = top(zip(col_titles, col_totals)) row_label = "%s %s" % (BY, str(row_label)) if col_label: col_label = "%s %s" % (BY, str(col_label)) layer_label=str(layer_label) json_data = json.dumps(dict(rows=drows, cols=dcols, data=report.compact(10, represent=True), row_label=row_label, col_label=col_label, layer_label=layer_label, cell_lookup_table=cell_lookup_table )) self.report_data = Storage(row_label=row_label, col_label=col_label, layer_label=layer_label, json_data=json_data)
def set_format(self, dtfmt): """ Update the date/time format for this parser, and generate the corresponding pyparsing grammar @param dtfmt: the date/time format """ if not isinstance(dtfmt, basestring): raise TypeError("Invalid date/time format: %s (%s)" % (dtfmt, type(dtfmt))) import pyparsing as pp self.ParseException = pp.ParseException from s3utils import s3_unicode # Get the rules rules = self.rules if rules is None: rules = self.rules = self._get_rules() # Interpret the format result = [] sequence = [] def close(s): s = "".join(s).strip() if s: result.append(pp.Suppress(pp.Literal(s))) rule = False for c in s3_unicode(dtfmt): if rule and c in rules: # Close previous sequence sequence.pop() close(sequence) # Append control rule result.append(rules[c]) # Start new sequence sequence = [] # Close rule rule = False continue if c == "%" and not rule: rule = True else: rule = False sequence.append(c) if sequence: close(sequence) if result: grammar = result[0] for item in result[1:]: grammar += item else: # Default = ignore everything grammar = pp.Suppress(pp.Regex(".*")) self.grammar = grammar return grammar
def mdata_import(self, r, **attr): """ Process data submission from mobile app @param r: the S3Request instance @param attr: controller attributes @returns: JSON message """ output = {} # Extract the data files = {} content_type = r.env.get("content_type") if content_type and content_type.startswith("multipart/"): # Record data s = r.post_vars.get("data") try: data = json.loads(s) except JSONERRORS: msg = sys.exc_info()[1] r.error(400, msg) # Attached files import cgi for key in r.post_vars: value = r.post_vars[key] if isinstance(value, cgi.FieldStorage) and value.filename: files[value.filename] = value.file else: s = r.body s.seek(0) try: data = json.load(s) except JSONERRORS: msg = sys.exc_info()[1] r.error(400, msg) xml = current.xml resource = r.resource tablename = resource.tablename records = data.get(tablename) if records: # Create import tree TAG = xml.TAG ATTRIBUTE = xml.ATTRIBUTE IGNORE_FIELDS = xml.IGNORE_FIELDS FIELDS_TO_ATTRIBUTES = xml.FIELDS_TO_ATTRIBUTES RESOURCE = TAG.resource DATA = TAG.data NAME = ATTRIBUTE.name FIELD = ATTRIBUTE.field rfields = resource.fields table = resource.table root = etree.Element(TAG.root) SubElement = etree.SubElement for record in records: row = SubElement(root, RESOURCE) row.set(NAME, tablename) for fieldname, value in record.items(): if value is None: continue elif fieldname not in rfields: continue elif fieldname in IGNORE_FIELDS: continue elif fieldname in FIELDS_TO_ATTRIBUTES: row.set(fieldname, value) else: col = SubElement(row, DATA) col.set(FIELD, fieldname) ftype = table[fieldname].type if ftype == "upload": # Field value is name of attached file filename = s3_unicode(value) if filename in files: col.set("filename", filename) else: col.text = s3_unicode(value) tree = etree.ElementTree(root) # Try importing the tree # @todo: error handling try: resource.import_xml(tree, files=files) except IOError: r.unauthorised() else: import_result = self.import_result(resource) output = xml.json_message(**import_result) else: output = xml.json_message(True, 200, "No records to import") current.response.headers = {"Content-Type": "application/json"} return output
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget @param resource: the S3Resource """ T = current.T NOOPT = T("No options available") EMPTY = T("None") attr = self.attr opts = self.opts # Resolve the field selector selector = self.field if isinstance(selector, (tuple, list)): selector = selector[0] rfield = S3ResourceField(resource, selector) field = rfield.field colname = rfield.colname ftype = rfield.ftype # Find the options if opts.options is not None: # Custom dict of options {value: label} or a callable # returning such a dict: options = opts.options if callable(options): options = options() opt_keys = options.keys() else: # Determine the options from the field type options = None if ftype == "boolean": opt_keys = (True, False) elif field or rfield.virtual: multiple = ftype[:5] == "list:" groupby = field if field and not multiple else None virtual = field is None rows = resource.select(fields=[selector], start=None, limit=None, orderby=field, groupby=groupby, virtual=virtual) opt_keys = [] if rows: if multiple: kextend = opt_keys.extend for row in rows: vals = row[colname] if vals: kextend([v for v in vals if v not in opt_keys]) else: kappend = opt_keys.append for row in rows: v = row[colname] if v not in opt_keys: kappend(v) else: opt_keys = [] # No options? if len(opt_keys) < 1 or len(opt_keys) == 1 and not opt_keys[0]: return (ftype, None, opts.get("no_opts", NOOPT)) # Represent the options opt_list = [] # list of tuples (key, value) # Custom represent? (otherwise fall back to field represent) represent = opts.represent if not represent or ftype[:9] != "reference": represent = field.represent if options is not None: # Custom dict of {value:label} => use this label opt_list = options.items() elif callable(represent): # Callable representation function: if hasattr(represent, "bulk"): # S3Represent => use bulk option opt_dict = represent.bulk(opt_keys, list_type=False, show_link=False) if None in opt_keys: opt_dict[None] = EMPTY elif None in opt_dict: del opt_dict[None] if "" in opt_keys: opt_dict[""] = EMPTY opt_list = opt_dict.items() else: # Simple represent function args = {"show_link": False} \ if "show_link" in represent.func_code.co_varnames else {} if multiple: repr_opt = lambda opt: opt in (None, "") and (opt, EMPTY) or \ (opt, represent([opt], **args)) else: repr_opt = lambda opt: opt in (None, "") and (opt, EMPTY) or \ (opt, represent(opt, **args)) opt_list = map(repr_opt, opt_keys) elif isinstance(represent, str) and ftype[:9] == "reference": # Represent is a string template to be fed from the # referenced record # Get the referenced table db = current.db ktable = db[ftype[10:]] k_id = ktable._id.name # Get the fields referenced by the string template fieldnames = [k_id] fieldnames += re.findall("%\(([a-zA-Z0-9_]*)\)s", represent) represent_fields = [ktable[fieldname] for fieldname in fieldnames] # Get the referenced records query = (ktable.id.belongs([k for k in opt_keys if str(k).isdigit()])) & \ (ktable.deleted == False) rows = db(query).select(*represent_fields).as_dict(key=k_id) # Run all referenced records against the format string opt_list = [] ol_append = opt_list.append for opt_value in opt_keys: if opt_value in rows: opt_represent = represent % rows[opt_value] if opt_represent: ol_append((opt_value, opt_represent)) else: # Straight string representations of the values (fallback) opt_list = [(opt_value, s3_unicode(opt_value)) for opt_value in opt_keys if opt_value] none = opts["none"] opt_list.sort(key=lambda item: item[1]) options = [] empty = None for k, v in opt_list: if k is None: empty = ("NONE", v) else: options.append((k, v)) if empty and none: options.append(empty) # Sort the options return (ftype, options, None)
def __init__(self, report, show_totals=True, **attributes): """ Constructor @param report: the S3Pivottable instance @param attributes: the HTML attributes for the table """ T = current.T TOTAL = T("Total") TABLE.__init__(self, **attributes) components = self.components = [] self.json_data = None layers = report.layers resource = report.resource tablename = resource.tablename cols = report.cols rows = report.rows numcols = report.numcols numrows = report.numrows rfields = report.rfields get_label = self._get_label get_total = self._totals represent = lambda f, v, d="": \ self._represent(rfields, f, v, default=d) layer_label = None col_titles = [] add_col_title = col_titles.append col_totals = [] add_col_total = col_totals.append row_titles = [] add_row_title = row_titles.append row_totals = [] add_row_total = row_totals.append # Table header -------------------------------------------------------- # # Layer titles labels = [] get_mname = S3Report.mname for field_name, method in layers: label = get_label(rfields, field_name, tablename, "fact") mname = get_mname(method) if not labels: m = method == "list" and get_mname("count") or mname layer_label = "%s (%s)" % (label, m) labels.append("%s (%s)" % (label, mname)) layers_title = TH(" / ".join(labels)) # Columns field title if cols: col_label = get_label(rfields, cols, tablename, "cols") _colspan = numcols + 1 else: col_label = "" _colspan = numcols cols_title = TH(col_label, _colspan=_colspan, _scope="col") titles = TR(layers_title, cols_title) # Rows field title row_label = get_label(rfields, rows, tablename, "rows") rows_title = TH(row_label, _scope="col") headers = TR(rows_title) add_header = headers.append # Column headers values = report.col for i in xrange(numcols): value = values[i].value v = represent(cols, value) add_col_title(s3_truncate(unicode(v))) colhdr = TH(v, _scope="col") add_header(colhdr) # Row totals header if show_totals and cols is not None: add_header(TH(TOTAL, _class="totals_header rtotal", _scope="col")) thead = THEAD(titles, headers) # Table body ---------------------------------------------------------- # tbody = TBODY() add_row = tbody.append # lookup table for cell list values cell_lookup_table = {} # {{}, {}} cells = report.cell rvals = report.row for i in xrange(numrows): # Initialize row _class = i % 2 and "odd" or "even" tr = TR(_class=_class) add_cell = tr.append # Row header row = rvals[i] v = represent(rows, row.value) add_row_title(s3_truncate(unicode(v))) rowhdr = TD(v) add_cell(rowhdr) # Result cells for j in xrange(numcols): cell = cells[i][j] vals = [] cell_ids = [] add_value = vals.append for layer_idx, layer in enumerate(layers): f, m = layer value = cell[layer] if m == "list": if isinstance(value, list): l = [represent(f, v, d="-") for v in value] elif value is None: l = "-" else: if type(value) in (int, float): l = IS_NUMBER.represent(value) else: l = unicode(value) add_value(", ".join(l)) else: if type(value) in (int, float): add_value(IS_NUMBER.represent(value)) else: add_value(unicode(value)) # hold the references layer_ids = [] # get previous lookup values for this layer layer_values = cell_lookup_table.get(layer_idx, {}) if m == "count": for id in cell.records: # cell.records == [#, #, #] field = rfields[f].field record = report.records[id] if field.tablename in record: fvalue = record[field.tablename][field.name] else: fvalue = record[field.name] if fvalue is not None: if s3_has_foreign_key(field): if not isinstance(fvalue, list): fvalue = [fvalue] # list of foreign keys for fk in fvalue: if fk not in layer_ids: layer_ids.append(fk) layer_values[fk] = str(field.represent(fk)) else: if id not in layer_ids: layer_ids.append(id) layer_values[id] = s3_unicode(represent(f, fvalue)) cell_ids.append(layer_ids) cell_lookup_table[layer_idx] = layer_values vals = " / ".join(vals) if any(cell_ids): cell_attr = { "_data-records": cell_ids } vals = (A(_class="report-cell-zoom"), vals) else: cell_attr = {} add_cell(TD(vals, **cell_attr)) # Row total totals = get_total(row, layers, append=add_row_total) if show_totals and cols is not None: add_cell(TD(totals)) add_row(tr) # Table footer -------------------------------------------------------- # i = numrows _class = i % 2 and "odd" or "even" _class = "%s %s" % (_class, "totals_row") col_total = TR(_class=_class) add_total = col_total.append add_total(TH(TOTAL, _class="totals_header", _scope="row")) # Column totals for j in xrange(numcols): col = report.col[j] totals = get_total(col, layers, append=add_col_total) add_total(TD(IS_NUMBER.represent(totals))) # Grand total if cols is not None: grand_totals = get_total(report.totals, layers) add_total(TD(grand_totals)) tfoot = TFOOT(col_total) # Wrap up ------------------------------------------------------------- # append = components.append append(thead) append(tbody) if show_totals: append(tfoot) # Chart data ---------------------------------------------------------- # drows = dcols = None BY = T("by") top = self._top if rows and row_titles and row_totals: drows = top(zip(row_titles, row_totals)) if cols and col_titles and col_totals: dcols = top(zip(col_titles, col_totals)) row_label = "%s %s" % (BY, str(row_label)) if col_label: col_label = "%s %s" % (BY, str(col_label)) layer_label=str(layer_label) json_data = json.dumps(dict(rows=drows, cols=dcols, row_label=row_label, col_label=col_label, layer_label=layer_label, cell_lookup_table=cell_lookup_table )) self.report_data = Storage(row_label=row_label, col_label=col_label, layer_label=layer_label, json_data=json_data)
def json(self, fields=None, labels=None, represent=None, as_dict=False, master=True): """ Serialize this group as JSON @param columns: the columns to include for each item @param labels: columns labels as dict {key: label}, including the labels for grouping axes @param represent: dict of representation methods for grouping axis values {colname: function} @param as_dict: return output as dict rather than JSON string @param master: this is the top-level group (internal) JSON Format: {"c": [key, ...], ....... list of keys for visible columns "g": [key, ...], ....... list of keys for grouping axes "l": [(key, label), ...], ....... list of key-label pairs "k": key, ....... grouping key for subgroups "d": [ ....... list of sub-groups {"v": string, ....... the grouping value for this subgroup (represented) "k": key ....... the grouping key for subgroups "d": [...] ....... list of subgroups (nested) "i": [ ....... list of items in this group {key: value, ....... key-value pairs for visible columns }, ... ], "t": { ....... list of group totals key: value, ....... key-value pairs for totals } }, ... ], "i": [...], ....... list of items (if no grouping) "t": [...], ....... list of grand totals "e": boolean ....... empty-flag } """ T = current.T output = {} if not fields: raise SyntaxError if master: # Add columns and grouping information to top level group if labels is None: labels = {} def check_label(colname): if colname in labels: label = labels[colname] or "" else: fname = colname.split(".", 1)[-1] label = " ".join([s.strip().capitalize() for s in fname.split("_") if s]) label = labels[colname] = T(label) return str(label) grouping = [] groupby = self.groupby if groupby: for axis in groupby: check_label(axis) grouping.append(axis) output["g"] = grouping columns = [] for colname in fields: check_label(colname) columns.append(colname) output["c"] = columns output["l"] = dict((c, str(l)) for c, l in labels.items()) key = self.key if key: output["k"] = key data = [] add_group = data.append for group in self.groups: # Render subgroup gdict = group.json(fields, labels, represent = represent, as_dict = True, master = False, ) # Add subgroup attribute value value = group[key] renderer = represent.get(key) if represent else None if renderer is None: value = s3_unicode(value).encode("utf-8") else: # @todo: call bulk-represent if available value = s3_unicode(renderer(value)).encode("utf-8") gdict["v"] = value add_group(gdict) if master: output["e"] = len(data) == 0 output["d"] = data output["i"] = None else: oitems = [] add_item = oitems.append for item in self.items: # Render item oitem = {} for colname in fields: if colname in item: value = item[colname] or "" else: # Fall back to raw value raw = item.get("_row") try: value = raw.get(colname) except (AttributeError, TypeError): # _row is not a dict value = None if value is None: value = "" else: value = s3_unicode(value).encode("utf-8") oitem[colname] = value add_item(oitem) if master: output["e"] = len(oitems) == 0 output["d"] = None output["i"] = oitems # Render group totals aggregates = self._aggregates totals = {} for k, a in aggregates.items(): method, colname = k # @todo: call represent for totals totals[colname] = s3_unicode(a.result).encode("utf-8") output["t"] = totals # Convert to JSON unless requested otherwise if master and not as_dict: output = json.dumps(output, separators=SEPARATORS) return output
def __init__(self, report, show_totals=True, url=None, filter_query=None, **attributes): """ Constructor @param report: the S3Pivottable instance @param show_totals: show totals for rows and columns @param url: link cells to this base-URL @param filter_query: use this S3ResourceQuery with the base-URL @param attributes: the HTML attributes for the table """ T = current.T TOTAL = T("Total") TABLE.__init__(self, **attributes) components = self.components = [] self.json_data = None layers = report.layers resource = report.resource tablename = resource.tablename cols = report.cols rows = report.rows numcols = report.numcols numrows = report.numrows rfields = report.rfields get_label = self._get_label get_total = self._totals represent = lambda f, v, d="": \ self._represent(rfields, f, v, default=d) layer_label = None col_titles = [] add_col_title = col_titles.append col_totals = [] add_col_total = col_totals.append row_titles = [] add_row_title = row_titles.append row_totals = [] add_row_total = row_totals.append # Layer titles: # Get custom labels from report options layer_labels = Storage() report_options = resource.get_config("report_options", None) if report_options and "fact" in report_options: layer_opts = report_options["fact"] for item in layer_opts: if isinstance(item, (tuple, list)) and len(item) == 3: if not "." in item[0].split("$")[0]: item = ("%s.%s" % (resource.alias, item[0]), item[1], item[2]) layer_labels[(item[0], item[1])] = item[2] labels = [] get_mname = S3Report.mname for layer in layers: if layer in layer_labels: # Custom label label = layer_labels[layer] if not labels: layer_label = label labels.append(s3_unicode(label)) else: # Construct label from field-label and method label = get_label(rfields, layer[0], tablename, "fact") mname = get_mname(layer[1]) if not labels: m = layer[1] == "list" and get_mname("count") or mname layer_label = "%s (%s)" % (label, m) labels.append("%s (%s)" % (label, mname)) layers_title = TH(" / ".join(labels)) # Columns field title if cols: col_label = get_label(rfields, cols, tablename, "cols") _colspan = numcols + 1 else: col_label = "" _colspan = numcols cols_title = TH(col_label, _colspan=_colspan, _scope="col") titles = TR(layers_title, cols_title) # Sort dimensions: cells = report.cell def sortdim(dim, items): """ Sort a dimension """ rfield = rfields[dim] if not rfield: return ftype = rfield.ftype sortby = "value" if ftype == "integer": requires = rfield.requires if isinstance(requires, (tuple, list)): requires = requires[0] if isinstance(requires, IS_EMPTY_OR): requires = requires.other if isinstance(requires, IS_IN_SET): sortby = "text" elif ftype[:9] == "reference": sortby = "text" items.sort(key=lambda item: item[0][sortby]) # Sort rows rvals = report.row rows_list = [] for i in xrange(numrows): row = rvals[i] # Add representation value of the row header row["text"] = represent(rows, row.value) rows_list.append((row, cells[i])) sortdim(rows, rows_list) # Sort columns cvals = report.col cols_list = [] for j in xrange(numcols): column = cvals[j] column["text"] = represent(cols, column.value) cols_list.append((column, j)) sortdim(cols, cols_list) # Build the column headers: # Header for the row-titles column row_label = get_label(rfields, rows, tablename, "rows") rows_title = TH(row_label, _scope="col") headers = TR(rows_title) add_header = headers.append # Headers for the cell columns for j in xrange(numcols): v = cols_list[j][0].text add_col_title(s3_truncate(unicode(v))) colhdr = TH(v, _scope="col") add_header(colhdr) # Header for the row-totals column if show_totals and cols is not None: add_header(TH(TOTAL, _class="totals_header rtotal", _scope="col")) thead = THEAD(titles, headers) # Render the table body: tbody = TBODY() add_row = tbody.append # Lookup table for cell list values cell_lookup_table = {} # {{}, {}} cell_vals = Storage() for i in xrange(numrows): # Initialize row _class = i % 2 and "odd" or "even" tr = TR(_class=_class) add_cell = tr.append # Row header row = rows_list[i][0] v = row["text"] add_row_title(s3_truncate(unicode(v))) rowhdr = TD(v) add_cell(rowhdr) row_cells = rows_list[i][1] # Result cells for j in xrange(numcols): cell_idx = cols_list[j][1] cell = row_cells[cell_idx] vals = [] cell_ids = [] add_value = vals.append for layer_idx, layer in enumerate(layers): f, m = layer value = cell[layer] if m == "list": if isinstance(value, list): l = [represent(f, v, d="-") for v in value] elif value is None: l = ["-"] else: if type(value) in (int, float): l = IS_NUMBER.represent(value) else: l = unicode(value) #add_value(", ".join(l)) add_value(UL([LI(v) for v in l])) else: if type(value) in (int, float): add_value(IS_NUMBER.represent(value)) else: add_value(unicode(value)) layer_ids = [] layer_values = cell_lookup_table.get(layer_idx, {}) if m == "count": rfield = rfields[f] field = rfield.field colname = rfield.colname has_fk = field is not None and s3_has_foreign_key(field) for id in cell.records: record = report.records[id] try: fvalue = record[colname] except AttributeError: fvalue = None if fvalue is not None: if has_fk: if type(fvalue) is not list: fvalue = [fvalue] # list of foreign keys for fk in fvalue: if fk is not None and fk not in layer_ids: layer_ids.append(int(fk)) layer_values[fk] = s3_unicode(field.represent(fk)) else: if type(fvalue) is not list: fvalue = [fvalue] for val in fvalue: if val is not None: if val not in cell_vals: next_id = len(cell_vals) cell_vals[val] = next_id layer_ids.append(next_id) layer_values[next_id] = s3_unicode(represent(f, val)) else: prev_id = cell_vals[val] if prev_id not in layer_ids: layer_ids.append(prev_id) cell_ids.append(layer_ids) cell_lookup_table[layer_idx] = layer_values vals = [DIV(v, _class="report-cell-value") for v in vals] if any(cell_ids): cell_attr = {"_data-records": cell_ids} vals.append(DIV(_class="report-cell-zoom")) else: cell_attr = {} add_cell(TD(vals, **cell_attr)) # Row total totals = get_total(row, layers, append=add_row_total) if show_totals and cols is not None: add_cell(TD(totals)) add_row(tr) # Table footer: i = numrows _class = i % 2 and "odd" or "even" _class = "%s %s" % (_class, "totals_row") col_total = TR(_class=_class) add_total = col_total.append add_total(TH(TOTAL, _class="totals_header", _scope="row")) # Column totals for j in xrange(numcols): cell_idx = cols_list[j][1] col = report.col[cell_idx] totals = get_total(col, layers, append=add_col_total) add_total(TD(IS_NUMBER.represent(totals))) # Grand total if cols is not None: grand_totals = get_total(report.totals, layers) add_total(TD(grand_totals)) tfoot = TFOOT(col_total) # Wrap up: append = components.append append(thead) append(tbody) if show_totals: append(tfoot) # Chart data: layer_label = s3_unicode(layer_label) BY = T("by") row_label = "%s %s" % (BY, s3_unicode(row_label)) if col_label: col_label = "%s %s" % (BY, s3_unicode(col_label)) if filter_query and hasattr(filter_query, "serialize_url"): filter_vars = filter_query.serialize_url(resource=report.resource) else: filter_vars = {} hide_opts = current.deployment_settings.get_ui_hide_report_options() json_data = json.dumps(dict(t=layer_label, x=col_label, y=row_label, r=report.rows, c=report.cols, d=report.compact(n=50, represent=True), u=url, f=filter_vars, h=hide_opts, cell_lookup_table=cell_lookup_table)) self.report_data = Storage(row_label=row_label, col_label=col_label, layer_label=layer_label, json_data=json_data)
def json(self, fields=None, labels=None, represent=None, as_dict=False, master=True): """ Serialize this group as JSON @param columns: the columns to include for each item @param labels: columns labels as dict {key: label}, including the labels for grouping axes @param represent: dict of representation methods for grouping axis values {colname: function} @param as_dict: return output as dict rather than JSON string @param master: this is the top-level group (internal) """ T = current.T output = {} if not fields: raise SyntaxError if master: # Add columns and grouping information to top level group if labels is None: labels = {} def check_label(colname): if colname in labels: label = labels[colname] or "" else: fname = colname.split(".", 1)[-1] label = " ".join([ s.strip().capitalize() for s in fname.split("_") if s ]) label = labels[colname] = T(label) return str(label) grouping = [] groupby = self.groupby if groupby: for axis in groupby: check_label(axis) grouping.append(axis) output["g"] = grouping columns = [] for colname in fields: check_label(colname) columns.append(colname) output["c"] = columns output["l"] = dict((c, str(l)) for c, l in labels.items()) key = self.key if key: output["k"] = key data = [] add_group = data.append for group in self.groups: # Render subgroup gdict = group.json( fields, labels, represent=represent, as_dict=True, master=False, ) # Add subgroup attribute value value = group[key] renderer = represent.get(key) if represent else None if renderer is None: value = s3_unicode(value).encode("utf-8") else: # @todo: call bulk-represent if available value = s3_unicode(renderer(value)).encode("utf-8") gdict["v"] = value add_group(gdict) output["d"] = data output["i"] = None else: oitems = [] add_item = oitems.append for item in self.items: # Render item oitem = {} for colname in fields: if colname in item: value = item[colname] or "" else: # Fall back to raw value raw = item.get("_row") try: value = raw.get(colname) except (AttributeError, TypeError): # _row is not a dict value = None if value is None: value = "" else: value = s3_unicode(value).encode("utf-8") oitem[colname] = value add_item(oitem) output["d"] = None output["i"] = oitems # Convert to JSON unless requested otherwise if master and not as_dict: output = json.dumps(output, separators=SEPARATORS) return output
def __init__(self, report, show_totals=True, url=None, filter_query=None, **attributes): """ Constructor @param report: the S3Pivottable instance @param attributes: the HTML attributes for the table """ T = current.T TOTAL = T("Total") TABLE.__init__(self, **attributes) components = self.components = [] self.json_data = None layers = report.layers resource = report.resource tablename = resource.tablename cols = report.cols rows = report.rows numcols = report.numcols numrows = report.numrows rfields = report.rfields get_label = self._get_label get_total = self._totals represent = lambda f, v, d="": \ self._represent(rfields, f, v, default=d) layer_label = None col_titles = [] add_col_title = col_titles.append col_totals = [] add_col_total = col_totals.append row_titles = [] add_row_title = row_titles.append row_totals = [] add_row_total = row_totals.append # Layer titles -------------------------------------------------------- labels = [] get_mname = S3Report.mname for field_name, method in layers: # @todo: get the layer label from the report options label = get_label(rfields, field_name, tablename, "fact") mname = get_mname(method) if not labels: m = method == "list" and get_mname("count") or mname layer_label = "%s (%s)" % (label, m) labels.append("%s (%s)" % (label, mname)) layers_title = TH(" / ".join(labels)) # Columns field title ------------------------------------------------- if cols: col_label = get_label(rfields, cols, tablename, "cols") _colspan = numcols + 1 else: col_label = "" _colspan = numcols cols_title = TH(col_label, _colspan=_colspan, _scope="col") titles = TR(layers_title, cols_title) # Rows field title ---------------------------------------------------- row_label = get_label(rfields, rows, tablename, "rows") rows_title = TH(row_label, _scope="col") headers = TR(rows_title) # Column headers ------------------------------------------------------ add_header = headers.append values = report.col for i in xrange(numcols): value = values[i].value v = represent(cols, value) add_col_title(s3_truncate(unicode(v))) colhdr = TH(v, _scope="col") add_header(colhdr) # Row totals header --------------------------------------------------- if show_totals and cols is not None: add_header(TH(TOTAL, _class="totals_header rtotal", _scope="col")) thead = THEAD(titles, headers) # Table body ---------------------------------------------------------- tbody = TBODY() add_row = tbody.append # lookup table for cell list values cell_lookup_table = {} # {{}, {}} cells = report.cell rvals = report.row for i in xrange(numrows): # Initialize row _class = i % 2 and "odd" or "even" tr = TR(_class=_class) add_cell = tr.append # Row header row = rvals[i] v = represent(rows, row.value) add_row_title(s3_truncate(unicode(v))) rowhdr = TD(v) add_cell(rowhdr) # Result cells for j in xrange(numcols): cell = cells[i][j] vals = [] cell_ids = [] add_value = vals.append for layer_idx, layer in enumerate(layers): f, m = layer value = cell[layer] if m == "list": if isinstance(value, list): l = [represent(f, v, d="-") for v in value] elif value is None: l = ["-"] else: if type(value) in (int, float): l = IS_NUMBER.represent(value) else: l = unicode(value) #add_value(", ".join(l)) add_value(UL([LI(v) for v in l])) else: if type(value) in (int, float): add_value(IS_NUMBER.represent(value)) else: add_value(unicode(value)) # hold the references layer_ids = [] # get previous lookup values for this layer layer_values = cell_lookup_table.get(layer_idx, {}) if m == "count": rfield = rfields[f] field = rfield.field colname = rfield.colname has_fk = field is not None and s3_has_foreign_key( field) for id in cell.records: # cell.records == [#, #, #] record = report.records[id] try: fvalue = record[colname] except AttributeError: fvalue = None if fvalue is not None: if has_fk: if not isinstance(fvalue, list): fvalue = [fvalue] # list of foreign keys for fk in fvalue: if fk not in layer_ids: layer_ids.append(fk) layer_values[fk] = str( field.represent(fk)) else: if id not in layer_ids: layer_ids.append(id) layer_values[id] = s3_unicode( represent(f, fvalue)) cell_ids.append(layer_ids) cell_lookup_table[layer_idx] = layer_values # @todo: with multiple layers - show the first, hide the rest # + render layer selector in the layer title corner to # + switch between layers # OR: give every layer a title row (probably better method) vals = DIV([DIV(v) for v in vals]) if any(cell_ids): cell_attr = {"_data-records": cell_ids} vals = (A(_class="report-cell-zoom"), vals) else: cell_attr = {} add_cell(TD(vals, **cell_attr)) # Row total totals = get_total(row, layers, append=add_row_total) if show_totals and cols is not None: add_cell(TD(totals)) add_row(tr) # Table footer -------------------------------------------------------- i = numrows _class = i % 2 and "odd" or "even" _class = "%s %s" % (_class, "totals_row") col_total = TR(_class=_class) add_total = col_total.append add_total(TH(TOTAL, _class="totals_header", _scope="row")) # Column totals ------------------------------------------------------- for j in xrange(numcols): col = report.col[j] totals = get_total(col, layers, append=add_col_total) add_total(TD(IS_NUMBER.represent(totals))) # Grand total --------------------------------------------------------- if cols is not None: grand_totals = get_total(report.totals, layers) add_total(TD(grand_totals)) tfoot = TFOOT(col_total) # Wrap up ------------------------------------------------------------- append = components.append append(thead) append(tbody) if show_totals: append(tfoot) # Chart data ---------------------------------------------------------- layer_label = s3_unicode(layer_label) BY = T("by") row_label = "%s %s" % (BY, s3_unicode(row_label)) if col_label: col_label = "%s %s" % (BY, s3_unicode(col_label)) if filter_query and hasattr(filter_query, "serialize_url"): filter_vars = filter_query.serialize_url(resource=report.resource) else: filter_vars = {} json_data = json.dumps( dict(t=layer_label, x=col_label, y=row_label, r=report.rows, c=report.cols, d=report.compact(n=50, represent=True), u=url, f=filter_vars, cell_lookup_table=cell_lookup_table)) self.report_data = Storage(row_label=row_label, col_label=col_label, layer_label=layer_label, json_data=json_data)
def __init__(self, report, show_totals=True, url=None, filter_query=None, **attributes): """ Constructor @param report: the S3Pivottable instance @param show_totals: show totals for rows and columns @param url: link cells to this base-URL @param filter_query: use this S3ResourceQuery with the base-URL @param attributes: the HTML attributes for the table """ T = current.T TOTAL = T("Total") TABLE.__init__(self, **attributes) components = self.components = [] self.json_data = None layers = report.layers resource = report.resource tablename = resource.tablename cols = report.cols rows = report.rows numcols = report.numcols numrows = report.numrows rfields = report.rfields get_label = self._get_label get_total = self._totals represent = lambda f, v, d="": \ self._represent(rfields, f, v, default=d) layer_label = None col_titles = [] add_col_title = col_titles.append col_totals = [] add_col_total = col_totals.append row_titles = [] add_row_title = row_titles.append row_totals = [] add_row_total = row_totals.append # Layer titles: # Get custom labels from report options layer_labels = Storage() report_options = resource.get_config("report_options", None) if report_options and "fact" in report_options: layer_opts = report_options["fact"] for item in layer_opts: if isinstance(item, (tuple, list)) and len(item) == 3: if not "." in item[0].split("$")[0]: item = ("%s.%s" % (resource.alias, item[0]), item[1], item[2]) layer_labels[(item[0], item[1])] = item[2] labels = [] get_mname = S3Report.mname for layer in layers: if layer in layer_labels: # Custom label label = layer_labels[layer] if not labels: layer_label = label labels.append(s3_unicode(label)) else: # Construct label from field-label and method label = get_label(rfields, layer[0], tablename, "fact") mname = get_mname(layer[1]) if not labels: m = layer[1] == "list" and get_mname("count") or mname layer_label = "%s (%s)" % (label, m) labels.append("%s (%s)" % (label, mname)) layers_title = TH(" / ".join(labels)) # Columns field title if cols: col_label = get_label(rfields, cols, tablename, "cols") _colspan = numcols + 1 else: col_label = "" _colspan = numcols cols_title = TH(col_label, _colspan=_colspan, _scope="col") titles = TR(layers_title, cols_title) # Sort dimensions: cells = report.cell def sortdim(dim, items): """ Sort a dimension """ rfield = rfields[dim] if not rfield: return ftype = rfield.ftype sortby = "value" if ftype == "integer": requires = rfield.requires if isinstance(requires, (tuple, list)): requires = requires[0] if isinstance(requires, IS_EMPTY_OR): requires = requires.other if isinstance(requires, IS_IN_SET): sortby = "text" elif ftype[:9] == "reference": sortby = "text" items.sort(key=lambda item: item[0][sortby]) # Sort rows rvals = report.row rows_list = [] for i in xrange(numrows): row = rvals[i] # Add representation value of the row header row["text"] = represent(rows, row.value) rows_list.append((row, cells[i])) sortdim(rows, rows_list) # Sort columns cvals = report.col cols_list = [] for j in xrange(numcols): column = cvals[j] column["text"] = represent(cols, column.value) cols_list.append((column, j)) sortdim(cols, cols_list) # Build the column headers: # Header for the row-titles column row_label = get_label(rfields, rows, tablename, "rows") rows_title = TH(row_label, _scope="col") headers = TR(rows_title) add_header = headers.append # Headers for the cell columns for j in xrange(numcols): v = cols_list[j][0].text add_col_title(s3_truncate(unicode(v))) colhdr = TH(v, _scope="col") add_header(colhdr) # Header for the row-totals column if show_totals and cols is not None: add_header(TH(TOTAL, _class="totals_header rtotal", _scope="col")) thead = THEAD(titles, headers) # Render the table body: tbody = TBODY() add_row = tbody.append # Lookup table for cell list values cell_lookup_table = {} # {{}, {}} cell_vals = Storage() for i in xrange(numrows): # Initialize row _class = i % 2 and "odd" or "even" tr = TR(_class=_class) add_cell = tr.append # Row header row = rows_list[i][0] v = row["text"] add_row_title(s3_truncate(unicode(v))) rowhdr = TD(v) add_cell(rowhdr) row_cells = rows_list[i][1] # Result cells for j in xrange(numcols): cell_idx = cols_list[j][1] cell = row_cells[cell_idx] vals = [] cell_ids = [] add_value = vals.append for layer_idx, layer in enumerate(layers): f, m = layer value = cell[layer] if m == "list": if isinstance(value, list): l = [represent(f, v, d="-") for v in value] elif value is None: l = ["-"] else: if type(value) in (int, float): l = IS_NUMBER.represent(value) else: l = unicode(value) #add_value(", ".join(l)) add_value(UL([LI(v) for v in l])) else: if type(value) in (int, float): add_value(IS_NUMBER.represent(value)) else: add_value(unicode(value)) layer_ids = [] layer_values = cell_lookup_table.get(layer_idx, {}) if m == "count": rfield = rfields[f] field = rfield.field colname = rfield.colname has_fk = field is not None and s3_has_foreign_key( field) for id in cell.records: record = report.records[id] try: fvalue = record[colname] except AttributeError: fvalue = None if fvalue is not None: if has_fk: if type(fvalue) is not list: fvalue = [fvalue] # list of foreign keys for fk in fvalue: if fk is not None and fk not in layer_ids: layer_ids.append(int(fk)) layer_values[fk] = s3_unicode( field.represent(fk)) else: if type(fvalue) is not list: fvalue = [fvalue] for val in fvalue: if val is not None: if val not in cell_vals: next_id = len(cell_vals) cell_vals[val] = next_id layer_ids.append(next_id) layer_values[ next_id] = s3_unicode( represent(f, val)) else: prev_id = cell_vals[val] if prev_id not in layer_ids: layer_ids.append(prev_id) cell_ids.append(layer_ids) cell_lookup_table[layer_idx] = layer_values vals = [DIV(v, _class="report-cell-value") for v in vals] if any(cell_ids): cell_attr = {"_data-records": cell_ids} vals.append(DIV(_class="report-cell-zoom")) else: cell_attr = {} add_cell(TD(vals, **cell_attr)) # Row total totals = get_total(row, layers, append=add_row_total) if show_totals and cols is not None: add_cell(TD(totals)) add_row(tr) # Table footer: i = numrows _class = i % 2 and "odd" or "even" _class = "%s %s" % (_class, "totals_row") col_total = TR(_class=_class) add_total = col_total.append add_total(TH(TOTAL, _class="totals_header", _scope="row")) # Column totals for j in xrange(numcols): cell_idx = cols_list[j][1] col = report.col[cell_idx] totals = get_total(col, layers, append=add_col_total) add_total(TD(IS_NUMBER.represent(totals))) # Grand total if cols is not None: grand_totals = get_total(report.totals, layers) add_total(TD(grand_totals)) tfoot = TFOOT(col_total) # Wrap up: append = components.append append(thead) append(tbody) if show_totals: append(tfoot) # Chart data: layer_label = s3_unicode(layer_label) BY = T("by") row_label = "%s %s" % (BY, s3_unicode(row_label)) if col_label: col_label = "%s %s" % (BY, s3_unicode(col_label)) if filter_query and hasattr(filter_query, "serialize_url"): filter_vars = filter_query.serialize_url(resource=report.resource) else: filter_vars = {} hide_opts = current.deployment_settings.get_ui_hide_report_options() json_data = json.dumps( dict(t=layer_label, x=col_label, y=row_label, r=report.rows, c=report.cols, d=report.compact(n=50, represent=True), u=url, f=filter_vars, h=hide_opts, cell_lookup_table=cell_lookup_table)) self.report_data = Storage(row_label=row_label, col_label=col_label, layer_label=layer_label, json_data=json_data)
def json(self, resource, start=None, limit=None, fields=None, orderby=None, tooltip=None): """ Export a resource as JSON @param resource: the resource to export from @param start: index of the first record to export @param limit: maximum number of records to export @param fields: list of field selectors for fields to include in the export (None for all fields) @param orderby: ORDERBY expression @param tooltip: additional tooltip field, either a field selector or an expression "f(k,v)" where f is a function name that can be looked up from s3db, and k,v are field selectors for the row, f will be called with a list of tuples (k,v) for each row and is expected to return a dict {k:tooltip} => used by filterOptionsS3 to extract onhover-tooltips for Ajax-update of options """ if fields is None: fields = [f.name for f in resource.table if f.readable] tooltip_function = None if tooltip: if type(tooltip) is list: tooltip = tooltip[-1] import re match = re.match("(\w+)\((\w+),(\w+)\)", tooltip) if match: function_name, kname, vname = match.groups() # Try resolve the function name tooltip_function = current.s3db.get(function_name) if tooltip_function: if kname not in fields: fields.append(kname) if vname not in fields: fields.append(vname) else: if tooltip not in fields: fields.append(tooltip) # Get the rows and return as json rows = resource.select(fields, start=start, limit=limit, orderby=orderby, as_rows=True) if tooltip: import sys from s3utils import s3_unicode if tooltip_function: # Resolve key and value names against the resource try: krfield = resource.resolve_selector(kname) vrfield = resource.resolve_selector(vname) except (AttributeError, SyntaxError): current.log.error(sys.exc_info()[1]) else: # Extract key and value fields from each row and # build options dict for function call options = [] items = {} for row in rows: try: k = krfield.extract(row) except KeyError: break try: v = vrfield.extract(row) except KeyError: break items[k] = row options.append((k, v)) # Call tooltip rendering function try: tooltips = tooltip_function(options) except: current.log.error(sys.exc_info()[1]) else: # Add tooltips as "_tooltip" to the corresponding rows if isinstance(tooltips, dict): for k, v in tooltips.items(): if k in items: items[k]["_tooltip"] = s3_unicode(v) pass else: # Resolve the tooltip field name against the resource try: tooltip_rfield = resource.resolve_selector(tooltip) except (AttributeError, SyntaxError): current.log.error(sys.exc_info()[1]) else: # Extract the tooltip field from each row # and add it as _tooltip for row in rows: try: value = tooltip_rfield.extract(row) except KeyError: break if value: row["_tooltip"] = s3_unicode(value) response = current.response if response: response.headers["Content-Type"] = "application/json" return rows.json()
def __init__(self, report, show_totals=True, url=None, filter_query=None, **attributes): """ Constructor @param report: the S3Pivottable instance @param attributes: the HTML attributes for the table """ T = current.T TOTAL = T("Total") TABLE.__init__(self, **attributes) components = self.components = [] self.json_data = None layers = report.layers resource = report.resource tablename = resource.tablename cols = report.cols rows = report.rows numcols = report.numcols numrows = report.numrows rfields = report.rfields get_label = self._get_label get_total = self._totals represent = lambda f, v, d="": \ self._represent(rfields, f, v, default=d) layer_label = None col_titles = [] add_col_title = col_titles.append col_totals = [] add_col_total = col_totals.append row_titles = [] add_row_title = row_titles.append row_totals = [] add_row_total = row_totals.append # Layer titles -------------------------------------------------------- # Get custom labels from report options layer_labels = Storage() report_options = resource.get_config("report_options", None) if report_options and "fact" in report_options: layer_opts = report_options["fact"] for item in layer_opts: if isinstance(item, (tuple, list)) and len(item) == 3: if not "." in item[0].split("$")[0]: item = ("%s.%s" % (resource.alias, item[0]), item[1], item[2]) layer_labels[(item[0], item[1])] = item[2] labels = [] get_mname = S3Report.mname for layer in layers: if layer in layer_labels: # Custom label label = layer_labels[layer] if not labels: layer_label = label labels.append(s3_unicode(label)) else: # Construct label from field-label and method label = get_label(rfields, layer[0], tablename, "fact") mname = get_mname(layer[1]) if not labels: m = layer[1] == "list" and get_mname("count") or mname layer_label = "%s (%s)" % (label, m) labels.append("%s (%s)" % (label, mname)) layers_title = TH(" / ".join(labels)) # Columns field title ------------------------------------------------- if cols: col_label = get_label(rfields, cols, tablename, "cols") _colspan = numcols + 1 else: col_label = "" _colspan = numcols cols_title = TH(col_label, _colspan=_colspan, _scope="col") titles = TR(layers_title, cols_title) # Rows field title ---------------------------------------------------- row_label = get_label(rfields, rows, tablename, "rows") rows_title = TH(row_label, _scope="col") headers = TR(rows_title) # Column headers ------------------------------------------------------ add_header = headers.append values = report.col for i in xrange(numcols): value = values[i].value v = represent(cols, value) add_col_title(s3_truncate(unicode(v))) colhdr = TH(v, _scope="col") add_header(colhdr) # Row totals header --------------------------------------------------- if show_totals and cols is not None: add_header(TH(TOTAL, _class="totals_header rtotal", _scope="col")) thead = THEAD(titles, headers) # Table body ---------------------------------------------------------- tbody = TBODY() add_row = tbody.append # lookup table for cell list values cell_lookup_table = {} # {{}, {}} cells = report.cell rvals = report.row cell_vals = Storage() for i in xrange(numrows): # Initialize row _class = i % 2 and "odd" or "even" tr = TR(_class=_class) add_cell = tr.append # Row header row = rvals[i] v = represent(rows, row.value) add_row_title(s3_truncate(unicode(v))) rowhdr = TD(v) add_cell(rowhdr) # Result cells for j in xrange(numcols): cell = cells[i][j] vals = [] cell_ids = [] add_value = vals.append for layer_idx, layer in enumerate(layers): f, m = layer value = cell[layer] if m == "list": if isinstance(value, list): l = [represent(f, v, d="-") for v in value] elif value is None: l = ["-"] else: if type(value) in (int, float): l = IS_NUMBER.represent(value) else: l = unicode(value) #add_value(", ".join(l)) add_value(UL([LI(v) for v in l])) else: if type(value) in (int, float): add_value(IS_NUMBER.represent(value)) else: add_value(unicode(value)) # hold the references layer_ids = [] # get previous lookup values for this layer layer_values = cell_lookup_table.get(layer_idx, {}) if m == "count": rfield = rfields[f] field = rfield.field colname = rfield.colname has_fk = field is not None and s3_has_foreign_key(field) for id in cell.records: # cell.records == [#, #, #] record = report.records[id] try: fvalue = record[colname] except AttributeError: fvalue = None if fvalue is not None: if has_fk: if type(fvalue) is not list: fvalue = [fvalue] # list of foreign keys for fk in fvalue: if fk is not None and fk not in layer_ids: layer_ids.append(int(fk)) layer_values[fk] = s3_unicode(field.represent(fk)) else: if type(fvalue) is not list: fvalue = [fvalue] for val in fvalue: if val is not None: if val not in cell_vals: next_id = len(cell_vals) cell_vals[val] = next_id layer_ids.append(next_id) layer_values[next_id] = s3_unicode(represent(f, val)) else: prev_id = cell_vals[val] if prev_id not in layer_ids: layer_ids.append(prev_id) #if id is not None and id not in layer_ids: #layer_ids.append(int(id)) #layer_values[id] = s3_unicode(represent(f, fvalue)) cell_ids.append(layer_ids) cell_lookup_table[layer_idx] = layer_values # @todo: with multiple layers - show the first, hide the rest # + render layer selector in the layer title corner to # + switch between layers # OR: give every layer a title row (probably better method) vals = [DIV(v, _class="report-cell-value") for v in vals] if any(cell_ids): cell_attr = {"_data-records": cell_ids} vals.append(DIV(_class="report-cell-zoom")) else: cell_attr = {} add_cell(TD(vals, **cell_attr)) # Row total totals = get_total(row, layers, append=add_row_total) if show_totals and cols is not None: add_cell(TD(totals)) add_row(tr) # Table footer -------------------------------------------------------- i = numrows _class = i % 2 and "odd" or "even" _class = "%s %s" % (_class, "totals_row") col_total = TR(_class=_class) add_total = col_total.append add_total(TH(TOTAL, _class="totals_header", _scope="row")) # Column totals ------------------------------------------------------- for j in xrange(numcols): col = report.col[j] totals = get_total(col, layers, append=add_col_total) add_total(TD(IS_NUMBER.represent(totals))) # Grand total --------------------------------------------------------- if cols is not None: grand_totals = get_total(report.totals, layers) add_total(TD(grand_totals)) tfoot = TFOOT(col_total) # Wrap up ------------------------------------------------------------- append = components.append append(thead) append(tbody) if show_totals: append(tfoot) # Chart data ---------------------------------------------------------- layer_label = s3_unicode(layer_label) BY = T("by") row_label = "%s %s" % (BY, s3_unicode(row_label)) if col_label: col_label = "%s %s" % (BY, s3_unicode(col_label)) if filter_query and hasattr(filter_query, "serialize_url"): filter_vars = filter_query.serialize_url(resource=report.resource) else: filter_vars = {} hide_opts = current.deployment_settings.get_ui_hide_report_options() json_data = json.dumps(dict(t=layer_label, x=col_label, y=row_label, r=report.rows, c=report.cols, d=report.compact(n=50, represent=True), u=url, f=filter_vars, h=hide_opts, cell_lookup_table=cell_lookup_table)) self.report_data = Storage(row_label=row_label, col_label=col_label, layer_label=layer_label, json_data=json_data)
def __repr__(self): return s3_unicode(self.represent())
def notify(cls, resource_id): """ Asynchronous task to notify a subscriber about updates, runs a POST?format=msg request against the subscribed controller which extracts the data and renders and sends the notification message (see send()). @param resource_id: the pr_subscription_resource record ID """ _debug("S3Notifications.notify(resource_id=%s)", resource_id) db = current.db s3db = current.s3db stable = s3db.pr_subscription rtable = db.pr_subscription_resource ftable = s3db.pr_filter # Extract the subscription data join = stable.on(rtable.subscription_id == stable.id) left = ftable.on(ftable.id == stable.filter_id) # @todo: should not need rtable.resource here row = db(rtable.id == resource_id).select(stable.id, stable.pe_id, stable.frequency, stable.notify_on, stable.method, stable.email_format, rtable.id, rtable.resource, rtable.url, rtable.last_check_time, ftable.query, join=join, left=left).first() if not row: return True s = getattr(row, "pr_subscription") r = getattr(row, "pr_subscription_resource") f = getattr(row, "pr_filter") # Create a temporary token to authorize the lookup request auth_token = str(uuid4()) # Store the auth_token in the subscription record r.update_record(auth_token=auth_token) db.commit() # Construct the send-URL settings = current.deployment_settings public_url = settings.get_base_public_url() lookup_url = "%s/%s/%s" % (public_url, current.request.application, r.url.lstrip("/")) # Break up the URL into its components purl = list(urlparse.urlparse(lookup_url)) # Subscription parameters last_check_time = s3_encode_iso_datetime(r.last_check_time) query = {"subscription": auth_token, "format": "msg"} if "upd" in s.notify_on: query["~.modified_on__ge"] = last_check_time else: query["~.created_on__ge"] = last_check_time # Filters if f.query: from s3filter import S3FilterString resource = s3db.resource(r.resource) fstring = S3FilterString(resource, f.query) for k, v in fstring.get_vars.iteritems(): if v is not None: if k in query: value = query[k] if type(value) is list: value.append(v) else: query[k] = [value, v] else: query[k] = v query_nice = s3_unicode(fstring.represent()) else: query_nice = None # Add subscription parameters and filters to the URL query, and # put the URL back together query = urlencode(query) if purl[4]: query = "&".join((purl[4], query)) page_url = urlparse.urlunparse([purl[0], # scheme purl[1], # netloc purl[2], # path purl[3], # params query, # query purl[5], # fragment ]) # Serialize data for send (avoid second lookup in send) data = json.dumps({"pe_id": s.pe_id, "notify_on": s.notify_on, "method": s.method, "email_format": s.email_format, "resource": r.resource, "last_check_time": last_check_time, "filter_query": query_nice, "page_url": lookup_url, "item_url": None, }) # Send the request _debug("Requesting %s", page_url) req = urllib2.Request(page_url, data=data) req.add_header("Content-Type", "application/json") success = False try: response = json.loads(urllib2.urlopen(req).read()) message = response["message"] if response["status"] == "success": success = True except urllib2.HTTPError, e: message = ("HTTP %s: %s" % (e.code, e.read()))
def notify(cls, resource_id): """ Asynchronous task to notify a subscriber about updates, runs a POST?format=msg request against the subscribed controller which extracts the data and renders and sends the notification message (see send()). @param resource_id: the pr_subscription_resource record ID """ _debug = current.log.debug _debug("S3Notifications.notify(resource_id=%s)" % resource_id) db = current.db s3db = current.s3db stable = s3db.pr_subscription rtable = db.pr_subscription_resource ftable = s3db.pr_filter # Extract the subscription data join = stable.on(rtable.subscription_id == stable.id) left = ftable.on(ftable.id == stable.filter_id) # @todo: should not need rtable.resource here row = db(rtable.id == resource_id).select(stable.id, stable.pe_id, stable.frequency, stable.notify_on, stable.method, stable.email_format, stable.attachment, rtable.id, rtable.resource, rtable.url, rtable.last_check_time, ftable.query, join=join, left=left).first() if not row: return True s = getattr(row, "pr_subscription") r = getattr(row, "pr_subscription_resource") f = getattr(row, "pr_filter") # Create a temporary token to authorize the lookup request auth_token = str(uuid4()) # Store the auth_token in the subscription record r.update_record(auth_token=auth_token) db.commit() # Construct the send-URL public_url = current.deployment_settings.get_base_public_url() lookup_url = "%s/%s/%s" % (public_url, current.request.application, r.url.lstrip("/")) # Break up the URL into its components purl = list(urlparse.urlparse(lookup_url)) # Subscription parameters last_check_time = s3_encode_iso_datetime(r.last_check_time) query = {"subscription": auth_token, "format": "msg"} if "upd" in s.notify_on: query["~.modified_on__ge"] = last_check_time else: query["~.created_on__ge"] = last_check_time # Filters if f.query: from s3filter import S3FilterString resource = s3db.resource(r.resource) fstring = S3FilterString(resource, f.query) for k, v in fstring.get_vars.iteritems(): if v is not None: if k in query: value = query[k] if type(value) is list: value.append(v) else: query[k] = [value, v] else: query[k] = v query_nice = s3_unicode(fstring.represent()) else: query_nice = None # Add subscription parameters and filters to the URL query, and # put the URL back together query = urlencode(query) if purl[4]: query = "&".join((purl[4], query)) page_url = urlparse.urlunparse([ purl[0], # scheme purl[1], # netloc purl[2], # path purl[3], # params query, # query purl[5], # fragment ]) # Serialize data for send (avoid second lookup in send) data = json.dumps({ "pe_id": s.pe_id, "notify_on": s.notify_on, "method": s.method, "email_format": s.email_format, "attachment": s.attachment, "resource": r.resource, "last_check_time": last_check_time, "filter_query": query_nice, "page_url": lookup_url, "item_url": None, }) # Send the request _debug("Requesting %s" % page_url) req = urllib2.Request(page_url, data=data) req.add_header("Content-Type", "application/json") success = False try: response = json.loads(urllib2.urlopen(req).read()) message = response["message"] if response["status"] == "success": success = True except urllib2.HTTPError, e: message = ("HTTP %s: %s" % (e.code, e.read()))
def merge(self, r, **attr): """ Merge form for two records @param r: the S3Request @param **attr: the controller attributes for the request @note: this method can always only be POSTed, and requires both "selected" and "mode" in post_vars, as well as the duplicate bookmarks list in session.s3 """ T = current.T session = current.session response = current.response output = dict() tablename = self.tablename # Get the duplicate bookmarks s3 = session.s3 DEDUPLICATE = self.DEDUPLICATE if DEDUPLICATE in s3: bookmarks = s3[DEDUPLICATE] if tablename in bookmarks: record_ids = bookmarks[tablename] # Process the post variables post_vars = r.post_vars if "mode" in post_vars: mode = post_vars["mode"] if "selected" in post_vars: selected = post_vars["selected"] else: selected = "" selected = selected.split(",") if mode == "Inclusive": ids = selected elif mode == "Exclusive": ids = [i for i in record_ids if i not in selected] if len(ids) != 2: r.error(501, T("Please select exactly two records"), next = r.url(id=0, vars={})) # Get the selected records table = self.table query = (table._id == ids[0]) | (table._id == ids[1]) orderby = table.created_on if "created_on" in table else None rows = current.db(query).select(orderby=orderby, limitby=(0, 2)) if len(rows) != 2: r.error(404, r.ERROR.BAD_RECORD, next = r.url(id=0, vars={})) original = rows[0] duplicate = rows[1] # Prepare form construction formfields = [f for f in table if f.readable or f.writable] ORIGINAL, DUPLICATE, KEEP = self.ORIGINAL, self.DUPLICATE, self.KEEP keep_o = KEEP.o in post_vars and post_vars[KEEP.o] keep_d = KEEP.d in post_vars and post_vars[KEEP.d] trs = [] represent = current.manager.represent init_requires = self.init_requires for f in formfields: # Render the widgets oid = "%s_%s" % (ORIGINAL, f.name) did = "%s_%s" % (DUPLICATE, f.name) sid = "swap_%s" % f.name init_requires(f, original[f], duplicate[f]) if keep_o or not any((keep_o, keep_d)): owidget = self.widget(f, original[f], _name=oid, _id=oid) else: try: owidget = represent(f, value=original[f]) except: owidget = s3_unicode(original[f]) if keep_d or not any((keep_o, keep_d)): dwidget = self.widget(f, duplicate[f], _name=did, _id=did) else: try: dwidget = represent(f, value=duplicate[f]) except: dwidget = s3_unicode(duplicate[f]) # Swap button if not any((keep_o, keep_d)): swap = INPUT(_value="<-->", _class="swap-button", _id=sid, _type="button") else: swap = DIV(_class="swap-button") if owidget is None or dwidget is None: continue # Render label row label = f.label trs.append(TR(TD(label, _class="w2p_fl"), TD(), TD(label, _class="w2p_fl"))) # Append widget row trs.append(TR(TD(owidget, _class="mwidget"), TD(swap), TD(dwidget, _class="mwidget"))) # Show created_on/created_by for each record if "created_on" in table: original_date = original.created_on duplicate_date = duplicate.created_on if "created_by" in table: represent = table.created_by.represent original_author = represent(original.created_by) duplicate_author = represent(duplicate.created_by) created = T("Created on %s by %s") original_created = created % (original_date, original_author) duplicate_created = created % (duplicate_date, duplicate_author) else: created = T("Created on %s") original_created = created % original_date duplicate_created = created % duplicate_date else: original_created = "" duplicate_created = "" # Page title and subtitle output["title"] = T("Merge records") #output["subtitle"] = self.crud_string(tablename, "title_list") # Submit buttons if keep_o or not any((keep_o, keep_d)): submit_original = INPUT(_value=T("Keep Original"), _type="submit", _name=KEEP.o, _id=KEEP.o) else: submit_original = "" if keep_d or not any((keep_o, keep_d)): submit_duplicate = INPUT(_value=T("Keep Duplicate"), _type="submit", _name=KEEP.d, _id=KEEP.d) else: submit_duplicate = "" # Build the form form = FORM(TABLE( THEAD( TR(TH(H3(T("Original"))), TH(), TH(H3(T("Duplicate"))), ), TR(TD(original_created), TD(), TD(duplicate_created), _class="authorinfo", ), ), TBODY(trs), TFOOT( TR(TD(submit_original), TD(), TD(submit_duplicate), ), ), ), # Append mode and selected - required to get back here! hidden = { "mode": mode, "selected": ",".join(ids), } ) output["form"] = form # Add RESET and CANCEL options output["reset"] = FORM(INPUT(_value=T("Reset"), _type="submit", _name="reset", _id="form-reset"), A(T("Cancel"), _href=r.url(id=0, vars={}), _class="action-lnk"), hidden = {"mode": mode, "selected": ",".join(ids)}) # Process the merge form formname = "merge_%s_%s_%s" % (tablename, original[table._id], duplicate[table._id]) if form.accepts(post_vars, session, formname=formname, onvalidation=lambda form: self.onvalidation(tablename, form), keepvalues=False, hideerror=False): s3db = current.s3db if form.vars[KEEP.d]: prefix = "%s_" % DUPLICATE original, duplicate = duplicate, original else: prefix = "%s_" % ORIGINAL data = Storage() for key in form.vars: if key.startswith(prefix): fname = key.split("_", 1)[1] data[fname] = form.vars[key] search = False resource = s3db.resource(tablename) try: resource.merge(original[table._id], duplicate[table._id], update=data) except current.auth.permission.error: r.unauthorized() except KeyError: r.error(404, r.ERROR.BAD_RECORD) except: r.error(424, T("Could not merge records. (Internal Error: %s)") % sys.exc_info()[1], next=r.url()) else: # Cleanup bookmark list if mode == "Inclusive": bookmarks[tablename] = [i for i in record_ids if i not in ids] if not bookmarks[tablename]: del bookmarks[tablename] search = True elif mode == "Exclusive": bookmarks[tablename].extend(ids) if not selected: search = True # Confirmation message # @todo: Having the link to the merged record in the confirmation # message would be nice, but it's currently not clickable there :/ #result = A(T("Open the merged record"), #_href=r.url(method="read", #id=original[table._id], #vars={})) response.confirmation = T("Records merged successfully.") # Go back to bookmark list if search: self.next = r.url(method="search", id=0, vars={}) else: self.next = r.url(id=0, vars={}) # View response.view = self._view(r, "merge.html") return output
def aadata(self, totalrows, displayrows, id, draw, flist, stringify=True, action_col=None, **attr): """ Method to render the data into a json object @param totalrows: The total rows in the unfiltered query. @param displayrows: The total rows in the filtered query. @param id: The id of the table for which this ajax call will respond to. @param draw: An unaltered copy of draw sent from the client used by dataTables as a draw count. @param flist: The list of fields @param attr: dictionary of attributes which can be passed in dt_action_col: The column where the action buttons will be placed dt_bulk_actions: list of labels for the bulk actions. dt_bulk_col: The column in which the checkboxes will appear, by default it will be the column immediately before the first data item dt_group_totals: The number of record in each group. This will be displayed in parenthesis after the group title. """ data = self.data if not flist: flist = self.colnames start = self.start end = self.end if action_col is None: action_col = attr.get("dt_action_col", 0) structure = {} aadata = [] for i in xrange(start, end): row = data[i] details = [] for field in flist: if field == "BULK": details.append("<INPUT id='select%s' type='checkbox' class='bulkcheckbox'>" % \ row[flist[action_col]]) else: details.append(s3_unicode(row[field])) aadata.append(details) structure["dataTable_id"] = id structure["dataTable_filter"] = self.filterString structure["dataTable_groupTotals"] = attr.get("dt_group_totals", []) structure["dataTable_sort"] = self.orderby structure["data"] = aadata structure["recordsTotal"] = totalrows structure["recordsFiltered"] = displayrows structure["draw"] = draw if stringify: from gluon.serializers import json as jsons return jsons(structure) else: return structure
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget @param resource: the S3Resource """ T = current.T EMPTY = T("None") NOOPT = T("No options available") attr = self.attr opts = self.opts # Resolve the field selector selector = self.field if isinstance(selector, (tuple, list)): selector = selector[0] rfield = S3ResourceField(resource, selector) field = rfield.field colname = rfield.colname ftype = rfield.ftype # Find the options if opts.options is not None: # Custom dict of options {value: label} or a callable # returning such a dict: options = opts.options if callable(options): options = options() opt_keys = options.keys() else: # Determine the options from the field type options = None if ftype == "boolean": opt_keys = (True, False) elif field or rfield.virtual: multiple = ftype[:5] == "list:" groupby = field if field and not multiple else None virtual = field is None rows = resource.select(fields=[selector], start=None, limit=None, orderby=field, groupby=groupby, virtual=virtual) opt_keys = [] if rows: if multiple: kextend = opt_keys.extend for row in rows: vals = row[colname] if vals: kextend([v for v in vals if v not in opt_keys]) else: kappend = opt_keys.append for row in rows: v = row[colname] if v not in opt_keys: kappend(v) else: opt_keys = [] # No options? if len(opt_keys) < 1 or len(opt_keys) == 1 and not opt_keys[0]: return (ftype, None, opts.get("no_opts", NOOPT)) # Represent the options opt_list = [] # list of tuples (key, value) # Custom represent? (otherwise fall back to field represent) represent = opts.represent if not represent or ftype[:9] != "reference": represent = field.represent if options is not None: # Custom dict of {value:label} => use this label opt_list = options.items() elif callable(represent): # Callable representation function: if hasattr(represent, "bulk"): # S3Represent => use bulk option opt_dict = represent.bulk(opt_keys, list_type=False, show_link=False) if None in opt_keys: opt_dict[None] = EMPTY elif None in opt_dict: del opt_dict[None] if "" in opt_keys: opt_dict[""] = EMPTY opt_list = opt_dict.items() else: # Simple represent function args = {"show_link": False} \ if "show_link" in represent.func_code.co_varnames else {} if multiple: repr_opt = lambda opt: opt in (None, "") and (opt, EMPTY) or \ (opt, represent([opt], **args)) else: repr_opt = lambda opt: opt in (None, "") and (opt, EMPTY) or \ (opt, represent(opt, **args)) opt_list = map(repr_opt, opt_keys) elif isinstance(represent, str) and ftype[:9] == "reference": # Represent is a string template to be fed from the # referenced record # Get the referenced table db = current.db ktable = db[ftype[10:]] k_id = ktable._id.name # Get the fields referenced by the string template fieldnames = [k_id] fieldnames += re.findall("%\(([a-zA-Z0-9_]*)\)s", represent) represent_fields = [ktable[fieldname] for fieldname in fieldnames] # Get the referenced records query = (ktable.id.belongs([k for k in opt_keys if str(k).isdigit()])) & \ (ktable.deleted == False) rows = db(query).select(*represent_fields).as_dict(key=k_id) # Run all referenced records against the format string opt_list = [] ol_append = opt_list.append for opt_value in opt_keys: if opt_value in rows: opt_represent = represent % rows[opt_value] if opt_represent: ol_append((opt_value, opt_represent)) else: # Straight string representations of the values (fallback) opt_list = [(opt_value, s3_unicode(opt_value)) for opt_value in opt_keys if opt_value] # Sort the options opt_list.sort(key = lambda item: item[1]) options = [] empty = None for k, v in opt_list: if k is None: empty = ("NONE", v) else: options.append((k, v)) if empty: options.append(empty) return (ftype, options, None)
def merge(self, r, **attr): """ Merge form for two records @param r: the S3Request @param **attr: the controller attributes for the request @note: this method can always only be POSTed, and requires both "selected" and "mode" in post_vars, as well as the duplicate bookmarks list in session.s3 """ T = current.T session = current.session response = current.response output = dict() tablename = self.tablename # Get the duplicate bookmarks s3 = session.s3 DEDUPLICATE = self.DEDUPLICATE if DEDUPLICATE in s3: bookmarks = s3[DEDUPLICATE] if tablename in bookmarks: record_ids = bookmarks[tablename] # Process the post variables post_vars = r.post_vars if "mode" in post_vars: mode = post_vars["mode"] if "selected" in post_vars: selected = post_vars["selected"] else: selected = "" selected = selected.split(",") if mode == "Inclusive": ids = selected elif mode == "Exclusive": ids = [i for i in record_ids if i not in selected] if len(ids) != 2: r.error(501, T("Please select exactly two records"), next=r.url(id=0, vars={})) # Get the selected records table = self.table query = (table._id == ids[0]) | (table._id == ids[1]) orderby = table.created_on if "created_on" in table else None rows = current.db(query).select(orderby=orderby, limitby=(0, 2)) if len(rows) != 2: r.error(404, r.ERROR.BAD_RECORD, next=r.url(id=0, vars={})) original = rows[0] duplicate = rows[1] # Prepare form construction formfields = [f for f in table if f.readable or f.writable] ORIGINAL, DUPLICATE, KEEP = self.ORIGINAL, self.DUPLICATE, self.KEEP keep_o = KEEP.o in post_vars and post_vars[KEEP.o] keep_d = KEEP.d in post_vars and post_vars[KEEP.d] trs = [] represent = current.manager.represent init_requires = self.init_requires for f in formfields: # Render the widgets oid = "%s_%s" % (ORIGINAL, f.name) did = "%s_%s" % (DUPLICATE, f.name) sid = "swap_%s" % f.name init_requires(f, original[f], duplicate[f]) if keep_o or not any((keep_o, keep_d)): owidget = self.widget(f, original[f], _name=oid, _id=oid) else: try: owidget = represent(f, value=original[f]) except: owidget = s3_unicode(original[f]) if keep_d or not any((keep_o, keep_d)): dwidget = self.widget(f, duplicate[f], _name=did, _id=did) else: try: dwidget = represent(f, value=duplicate[f]) except: dwidget = s3_unicode(duplicate[f]) # Swap button if not any((keep_o, keep_d)): swap = INPUT(_value="<-->", _class="swap-button", _id=sid, _type="button") else: swap = DIV(_class="swap-button") if owidget is None or dwidget is None: continue # Render label row label = f.label trs.append( TR(TD(label, _class="w2p_fl"), TD(), TD(label, _class="w2p_fl"))) # Append widget row trs.append( TR(TD(owidget, _class="mwidget"), TD(swap), TD(dwidget, _class="mwidget"))) # Show created_on/created_by for each record if "created_on" in table: original_date = original.created_on duplicate_date = duplicate.created_on if "created_by" in table: represent = table.created_by.represent original_author = represent(original.created_by) duplicate_author = represent(duplicate.created_by) created = T("Created on %s by %s") original_created = created % (original_date, original_author) duplicate_created = created % (duplicate_date, duplicate_author) else: created = T("Created on %s") original_created = created % original_date duplicate_created = created % duplicate_date else: original_created = "" duplicate_created = "" # Page title and subtitle output["title"] = T("Merge records") #output["subtitle"] = self.crud_string(tablename, "title_list") # Submit buttons if keep_o or not any((keep_o, keep_d)): submit_original = INPUT(_value=T("Keep Original"), _type="submit", _name=KEEP.o, _id=KEEP.o) else: submit_original = "" if keep_d or not any((keep_o, keep_d)): submit_duplicate = INPUT(_value=T("Keep Duplicate"), _type="submit", _name=KEEP.d, _id=KEEP.d) else: submit_duplicate = "" # Build the form form = FORM( TABLE( THEAD( TR( TH(H3(T("Original"))), TH(), TH(H3(T("Duplicate"))), ), TR( TD(original_created), TD(), TD(duplicate_created), _class="authorinfo", ), ), TBODY(trs), TFOOT(TR( TD(submit_original), TD(), TD(submit_duplicate), ), ), ), # Append mode and selected - required to get back here! hidden={ "mode": "Inclusive", "selected": ",".join(ids), }) output["form"] = form # Add RESET and CANCEL options output["reset"] = FORM(INPUT(_value=T("Reset"), _type="submit", _name="reset", _id="form-reset"), A(T("Cancel"), _href=r.url(id=0, vars={}), _class="action-lnk"), hidden={ "mode": mode, "selected": ",".join(ids) }) # Process the merge form formname = "merge_%s_%s_%s" % (tablename, original[table._id], duplicate[table._id]) if form.accepts( post_vars, session, formname=formname, onvalidation=lambda form: self.onvalidation(tablename, form), keepvalues=False, hideerror=False): s3db = current.s3db if form.vars[KEEP.d]: prefix = "%s_" % DUPLICATE original, duplicate = duplicate, original else: prefix = "%s_" % ORIGINAL data = Storage() for key in form.vars: if key.startswith(prefix): fname = key.split("_", 1)[1] data[fname] = form.vars[key] search = False resource = s3db.resource(tablename) try: resource.merge(original[table._id], duplicate[table._id], update=data) except current.auth.permission.error: r.unauthorized() except KeyError: r.error(404, r.ERROR.BAD_RECORD) except: r.error(424, T("Could not merge records. (Internal Error: %s)") % sys.exc_info()[1], next=r.url()) else: # Cleanup bookmark list if mode == "Inclusive": bookmarks[tablename] = [ i for i in record_ids if i not in ids ] if not bookmarks[tablename]: del bookmarks[tablename] search = True elif mode == "Exclusive": bookmarks[tablename].extend(ids) if not selected: search = True # Confirmation message # @todo: Having the link to the merged record in the confirmation # message would be nice, but it's currently not clickable there :/ #result = A(T("Open the merged record"), #_href=r.url(method="read", #id=original[table._id], #vars={})) response.confirmation = T("Records merged successfully.") # Go back to bookmark list if search: self.next = r.url(method="search", id=0, vars={}) else: self.next = r.url(id=0, vars={}) # View response.view = self._view(r, "merge.html") return output
def render(self, timetuple, dtfmt): """ Render a timetuple as string according to the given format @param timetuple: the timetuple (y, m, d, hh, mm, ss) @param dtfmt: the date/time format (string) @todo: support day-of-week options """ y, m, d, hh, mm, ss = timetuple T = current.T calendar = self.calendar from s3utils import s3_unicode rules = {"d": "%02d" % d, "b": T(calendar.MONTH_ABBR[m - 1]), "B": T(calendar.MONTH_NAME[m - 1]), "m": "%02d" % m, "y": "%02d" % (y % 100), "Y": "%04d" % y, "H": "%02d" % hh, "I": "%02d" % ((hh % 12) or 12), "p": T("AM") if hh < 12 else T("PM"), "M": "%02d" % mm, "S": "%02d" % ss, } # Interpret the format result = [] sequence = [] def close(s): s = "".join(s) if s: result.append(s) rule = False for c in s3_unicode(dtfmt): if rule and c in rules: # Close previous sequence sequence.pop() close(sequence) # Append control rule result.append(s3_unicode(rules[c])) # Start new sequence sequence = [] # Close rule rule = False continue if c == "%" and not rule: rule = True else: rule = False sequence.append(c) if sequence: close(sequence) return "".join(result)