def table_data(self): # Because there are many locals needed :) # pylint: disable=too-many-locals """ Return elements data in json format as of Datatables SSP protocol More info: https://datatables.net/manual/server-side Example URL:: POST /? draw=1& columns[0][data]=alias& columns[0][name]=& columns[0][searchable]=true& columns[0][orderable]=true& columns[0][search][value]=& columns[0][search][regex]=false& ... order[0][column]=0& order[0][dir]=asc& start=0& length=10& search[value]=& search[regex]=false& Request parameters are Json formatted Request Parameters: - object_type: object type managed by the datatable - links: urel prefix to be used by the links in the table - embedded: true / false whether the table is embedded by an external application **Note**: those three first parameters are not datatable specific parameters :) - draw, index parameter to be returned in the response Pagination: - start / length, for pagination Searching: - search (value or regexp) search[value]: Global search value. To be applied to all columns which are searchable search[regex]: true if search[value] is a regex Sorting: - order[i][column] / order[i][dir] index of the columns to order and sort direction (asc/desc) Columns: - columns[i][data]: Column's data source, as defined by columns.data. - columns[i][name]: Column's name, as defined by columns.name. - columns[i][searchable]: Flag to indicate if this column is searchable (true). - columns[i][orderable]: Flag to indicate if this column is orderable (true). - columns[i][search][value]: Search value to apply to this specific column. - columns[i][search][regex]: Flag to indicate if the search term for this column is a regex. Response data: - draw - recordsTotal: total records, before filtering (i.e. total number of records in the database) - recordsFiltered: Total records, after filtering (i.e. total number of records after filtering has been applied - not just the number of records being returned for this page of data). - data: The data to be displayed in the table. an array of data source objects, one for each row, which will be used by DataTables. - error (optional): Error message if an error occurs Not included if there is no error. """ # Manage request parameters ... logger.info("request data for table: %s", request.params.get('object_type')) # Because of specific datatables parameters name (eg. columns[0] ...) # ... some parameters have been json.stringify on client side ! params = {} for key in request.params.keys(): if key == 'columns' or key == 'order' or key == 'search': params[key] = json.loads(request.params.get(key)) else: params[key] = request.params.get(key) # params now contains 'valid' query parameters as we should have found them ... logger.debug("table request parameters: %s", params) parameters = {} # Manage page number ... # start is the first requested row and we must transform to a page count ... first_row = int(params.get('start', '0')) # length is the number of requested rows rows_count = int(params.get('length', '25')) parameters['page'] = (first_row // rows_count) + 1 parameters['max_results'] = rows_count logger.info("get %d rows from row #%d -> page: %d", rows_count, first_row, parameters['page']) # Columns ordering # order:[{"column":2,"dir":"desc"}] if 'order' in params and 'columns' in params and params['order']: sorted_columns = [] for order in params['order']: idx = int(order['column']) if params['columns'][idx] and params['columns'][idx]['data']: logger.debug("sort by column %d (%s), order: %s ", idx, params['columns'][idx]['data'], order['dir']) if order['dir'] == 'desc': sorted_columns.append('-' + params['columns'][idx]['data']) else: sorted_columns.append(params['columns'][idx]['data']) if sorted_columns: parameters['sort'] = ','.join(sorted_columns) logger.info("backend order request parameters: %s", parameters) # Individual column search parameter searched_columns = [] if 'columns' in params and params['columns']: for column in params['columns']: if 'searchable' not in column or 'search' not in column: # pragma: no cover continue if 'value' not in column[ 'search'] or not column['search']['value']: continue logger.debug("search column '%s' for '%s'", column['data'], column['search']['value']) for field in self.table_columns: if field['data'] != column['data']: continue # Some specific types... if field['type'] == 'boolean': searched_columns.append({ column['data']: column['search']['value'] == 'true' }) elif field['type'] == 'integer': searched_columns.append( {column['data']: int(column['search']['value'])}) elif field['format'] == 'select': values = column['search']['value'].split(',') if len(values) > 1: searched_columns.append( {column['data']: { "$in": values }}) else: searched_columns.append( {column['data']: values[0]}) # ... the other fields :) else: # Do not care about 'smart' and 'caseInsensitive' boolean parameters ... if column['search']['regex']: searched_columns.append({ column['data']: { "$regex": ".*" + column['search']['value'] + ".*" } }) else: searched_columns.append( {column['data']: column['search']['value']}) break logger.info("backend search columns parameters: %s", searched_columns) # Global search parameter # search:{"value":"test","regex":false} searched_global = [] # pylint: disable=too-many-nested-blocks # Will be too complex else ... if 'search' in params and 'columns' in params and params['search']: if 'value' in params['search'] and params['search']['value']: logger.debug("search requested, value: %s ", params['search']['value']) for column in params['columns']: if 'searchable' in column and column['searchable']: logger.debug("search global '%s' for '%s'", column['data'], params['search']['value']) if 'regex' in params['search']: if params['search']['regex']: searched_global.append({ column['data']: { "$regex": ".*" + params['search']['value'] + ".*" } }) else: searched_global.append({ column['data']: params['search']['value'] }) logger.info("backend search global parameters: %s", searched_global) if searched_columns and searched_global: parameters['where'] = { "$and": [{ "$and": searched_columns }, { "$or": searched_global }] } if searched_columns: parameters['where'] = {"$and": searched_columns} if searched_global: parameters['where'] = {"$or": searched_global} # Embed linked resources parameters['embedded'] = {} for field in self.table_columns: if field['type'] == 'objectid' and field['format'] != 'objectid': parameters['embedded'].update({field['data']: 1}) if parameters['embedded']: logger.info("backend embedded parameters: %s", parameters['embedded']) # Update global table records count, require total count from backend self.records_total = self.backend.count(self.object_type) # Request objects from the backend ... logger.debug("table data get parameters: %s", parameters) items = self.backend.get(self.object_type, params=parameters) if not items: # Empty response return json.dumps({ # draw is the request number ... "draw": int(params.get('draw', '0')), "recordsTotal": 0, "recordsFiltered": 0, "data": [] }) # Create an object ... object_class = [ kc for kc in self.datamgr.known_classes if kc.get_type() == self.object_type ][0] bo_object = object_class() # Update inner properties self.id_property = '_id' if hasattr(bo_object.__class__, 'id_property'): self.id_property = bo_object.__class__.id_property self.name_property = 'name' if hasattr(bo_object.__class__, 'name_property'): self.name_property = bo_object.__class__.name_property self.status_property = 'status' if hasattr(bo_object.__class__, 'status_property'): self.status_property = bo_object.__class__.status_property # Change item content ... for item in items: bo_object = object_class(item) logger.debug("livestate object item: %s", bo_object) for key in item.keys(): for field in self.table_columns: if field['data'] != key: continue if field['data'] == self.name_property: item[key] = bo_object.get_html_link( prefix=request.params.get('links')) if field['data'] == self.status_property: item[key] = bo_object.get_html_state() if field['data'] == "business_impact": item[key] = Helper.get_html_business_impact( bo_object.business_impact) # Specific fields type if field['type'] == 'datetime' or field['format'] == 'date': item[key] = bo_object.get_date(item[key]) if field['type'] == 'boolean': item[key] = Helper.get_on_off(item[key]) if field['type'] == 'list': item[key] = Helper.get_html_item_list( bo_object.id, key, getattr(bo_object, key), title=field['title']) if field['type'] == 'objectid' and \ key in parameters['embedded'] and item[key]: related_object_class = [ kc for kc in self.datamgr.known_classes if kc.get_type() == field['format'] ][0] linked_object = related_object_class(item[key]) item[key] = linked_object.get_html_link( prefix=request.params.get('links')) break # Very specific fields... if self.responsive: item['#'] = '' # Total number of filtered records self.records_filtered = self.records_total if 'where' in parameters and parameters['where'] != {}: logger.debug("update filtered records: %s", parameters['where']) self.records_filtered = len(items) logger.info("filtered records: %d out of total: %d", self.records_filtered, self.records_total) # Prepare response rsp = { # draw is the request number ... "draw": int(params.get('draw', '0')), "recordsTotal": self.records_total, "recordsFiltered": self.records_filtered, "data": items } return json.dumps(rsp)
def table_data(self, plugin_table): # Because there are many locals needed :) # pylint: disable=too-many-locals """Return elements data in json format as of Datatables SSP protocol More info: https://datatables.net/manual/server-side Example URL:: POST /? draw=1& columns[0][data]=alias& columns[0][name]=& columns[0][searchable]=true& columns[0][orderable]=true& columns[0][search][value]=& columns[0][search][regex]=false& ... order[0][column]=0& order[0][dir]=asc& start=0& length=10& search[value]=& search[regex]=false& Request parameters are Json formatted Request Parameters: - object_type: object type managed by the datatable - links: url prefix to be used by the links in the table - embedded: true / false whether the table is embedded by an external application **Note**: those three first parameters are not datatable specific parameters :) - draw, index parameter to be returned in the response Pagination: - start / length, for pagination Searching: - search (value or regexp) search[value]: Global search value. To be applied to all columns which are searchable search[regex]: true if search[value] is a regex Sorting: - order[i][column] / order[i][dir] index of the columns to order and sort direction (asc/desc) Columns: - columns[i][data]: Column's data source, as defined by columns.data. - columns[i][name]: Column's name, as defined by columns.name. - columns[i][searchable]: Flag to indicate if this column is searchable (true). - columns[i][orderable]: Flag to indicate if this column is orderable (true). - columns[i][search][value]: Search value to apply to this specific column. - columns[i][search][regex]: Flag to indicate if the search term for this column is a regex. Response data: - draw - recordsTotal: total records, before filtering (i.e. total number of records in the database) - recordsFiltered: Total records, after filtering (i.e. total number of records after filtering has been applied - not just the number of records being returned for this page of data). - data: The data to be displayed in the table. an array of data source objects, one for each row, which will be used by DataTables. - error (optional): Error message if an error occurs Not included if there is no error. """ # Manage request parameters ... logger.info("request data for table: %s, templates: %s", request.forms.get('object_type'), self.templates) # Because of specific datatables parameters name (eg. columns[0] ...) # ... some parameters have been json.stringify on client side ! params = {} for key in list(request.params.keys()): if key in ['columns', 'order', 'search']: params[key] = json.loads(request.params.get(key)) else: params[key] = request.params.get(key) # params now contains 'valid' query parameters as we should have found them ... logger.debug("table request parameters: %s", params) parameters = {} # Manage page number ... # start is the first requested row and we must transform to a page count ... first_row = int(params.get('start', '0')) # length is the number of requested rows rows_count = int(params.get('length', '25')) parameters['page'] = (first_row // rows_count) + 1 parameters['max_results'] = rows_count logger.debug("get %d rows from row #%d -> page: %d", rows_count, first_row, parameters['page']) # Columns ordering # order:[{"column":2,"dir":"desc"}] if 'order' in params and 'columns' in params and params['order']: sorted_columns = [] for order in params['order']: idx = int(order['column']) if params['columns'][idx] and params['columns'][idx]['data']: logger.debug( "sort by column %d (%s), order: %s ", idx, params['columns'][idx]['data'], order['dir'] ) if order['dir'] == 'desc': sorted_columns.append('-' + params['columns'][idx]['data']) else: sorted_columns.append(params['columns'][idx]['data']) if sorted_columns: parameters['sort'] = ','.join(sorted_columns) logger.info("backend order request parameters: %s", parameters) # Individual column search parameter s_columns = [] if 'columns' in params and params['columns']: for column in params['columns']: if 'searchable' not in column or 'search' not in column: # pragma: no cover continue if 'value' not in column['search'] or not column['search']['value']: continue logger.debug("search column '%s' for '%s'", column['data'], column['search']['value']) for field in self.table_columns: if field['data'] != column['data']: continue # Some specific types... if field['type'] == 'boolean': s_columns.append( {column['data']: column['search']['value'] == 'true'} ) elif field['type'] == 'integer': s_columns.append( {column['data']: int(column['search']['value'])} ) elif field['format'] == 'select': values = column['search']['value'].split(',') if len(values) > 1: s_columns.append( { column['data']: { "$in": values } } ) else: s_columns.append( {column['data']: values[0]} ) # ... the other fields :) else: # Do not care about 'smart' and 'caseInsensitive' boolean parameters ... if column['search']['regex']: s_columns.append( { column['data']: { "$regex": ".*" + column['search']['value'] + ".*" } } ) else: s_columns.append( {column['data']: column['search']['value']} ) break logger.info("backend search individual columns parameters: %s", s_columns) # Global search parameter # search:{"value":"test","regex":false} s_global = {} # pylint: disable=too-many-nested-blocks # Will be too complex else ... if 'search' in params and 'columns' in params and params['search']: # params['search'] contains something like: {u'regex': False, u'value': u'name:pi1'} # global regex is always ignored ... in favor of the column declared regex logger.info("global search requested: %s ", params['search']) if 'value' in params['search'] and params['search']['value']: # There is something to search for... logger.debug("search requested, value: %s ", params['search']['value']) # New strategy: decode search patterns... search = Helper.decode_search(params['search']['value'], plugin_table) logger.info("decoded search pattern: %s", search) # Old strategy: search for the value in all the searchable columns... # if not search: # logger.info("applying datatable search pattern...") # for column in params['columns']: # if not column['searchable']: # continue # logger.debug("search global '%s' for '%s'", # column['data'], params['search']['value']) # if 'regex' in params['search']: # if params['search']['regex']: # s_global.update( # {column['data']: { # "$regex": ".*" + params['search']['value'] + ".*"}}) # else: # s_global.update({column['data']: params['search']['value']}) s_global = search logger.info("backend search global parameters: %s", s_global) # Specific hack to filter the log check results that are not dated! if self.object_type == 'logcheckresult': s_columns.append({"last_check": {"$ne": 0}}) if s_columns and s_global: parameters['where'] = {"$and": [{"$and": s_columns}, s_global]} elif s_columns: parameters['where'] = {"$and": s_columns} elif s_global: parameters['where'] = s_global # Embed linked resources / manage templated resources parameters['embedded'] = {} for field in self.embedded: parameters['embedded'].update({field: 1}) logger.debug("backend embedded parameters: %s", parameters['embedded']) # Count total elements excluding templates if necessary if self.is_templated: if self.ui_visibility: self.records_total = self.datamgr.my_backend.count( self.object_type, params={'where': {'_is_template': self.templates, 'webui_visible': True}} ) else: self.records_total = self.datamgr.my_backend.count( self.object_type, params={'where': {'_is_template': self.templates}} ) if 'where' in parameters: parameters['where'].update({'_is_template': self.templates}) else: parameters['where'] = {'_is_template': self.templates} else: if self.ui_visibility: self.records_total = self.datamgr.my_backend.count( self.object_type, params={'where': {'webui_visible': True}} ) else: self.records_total = self.datamgr.my_backend.count(self.object_type) if self.ui_visibility: if 'where' in parameters: parameters['where'].update({'webui_visible': True}) else: parameters['where'] = {'webui_visible': True} # Request objects from the backend ... logger.info("table data get parameters: %s", parameters) items = self.datamgr.my_backend.get(self.object_type, params=parameters) logger.info("table data got %d items", len(items)) if not items: logger.info("No backend elements match search criteria: %s", parameters) # Empty response return json.dumps({ # draw is the request number ... "draw": int(params.get('draw', '0')), "recordsTotal": 0, "recordsFiltered": 0, "data": [] }) # Create an object ... object_class = [kc for kc in self.datamgr.known_classes if kc.get_type() == self.object_type] if not object_class: # pragma: no cover, should never happen! logger.warning("datatable, unknown object type: %s", self.object_type) # Empty response return json.dumps({ # draw is the request number ... "draw": int(params.get('draw', '0')), "recordsTotal": 0, "recordsFiltered": 0, "data": [] }) # Update table inner properties with the object class defined properties object_class = object_class[0] self.id_property = '_id' if hasattr(object_class, 'id_property'): self.id_property = object_class.id_property self.name_property = 'name' if hasattr(object_class, 'name_property'): self.name_property = object_class.name_property self.status_property = 'status' if hasattr(object_class, 'status_property'): self.status_property = object_class.status_property logger.debug("datatable, object type: '%s' and properties: %s / %s / %s", object_class, self.id_property, self.name_property, self.status_property) # Change item content... rows = [] # Total number of filtered records self.records_filtered = self.records_total for item in items: bo_object = object_class(item) if not bo_object.ui_visible: logger.debug("Not UI visible object: %s", bo_object) continue logger.info("table data object: %s", bo_object) logger.debug("table data object: %s", bo_object) # This is an awful hack that allows to update the objects filtered for a table. # Two main interests: # - update the backend because some massive modifications are necessary for testing # - prepare a massive update feature in the Web UI :) # ----- # if self.is_templated and self.templates and self.object_type == 'service': # logger.warning("service template: %s for host: %s", # bo_object.name, bo_object['host']) # if bo_object['host'].name.startswith('fdj'): # logger.info("To be updated...") # data = { # "check_freshness": True, # "freshness_threshold": 86400, # "passive_checks_enabled": True, # "active_checks_enabled": False # } # result = self.datamgr.update_object(element=bo_object, data=data) # if result is True: # logger.info("updated.") # else: # logger.error("update failed!") # ----- # Each item contains the total number of records matching the search filter self.records_filtered = item['_total'] row = {} row['DT_RowData'] = {} row['_id'] = bo_object.id for field in self.table_columns: logger.debug(" - field: %s", field) # Specific fields if field['data'] == self.name_property: # Create a link to navigate to the item page row[self.name_property] = bo_object.html_link # Store the item name in a specific field of the row. # The value will be retrieved by the table actions (ack, recheck, ...) row['DT_RowData'].update({"object_%s" % self.object_type: bo_object.name}) continue if field['data'] == self.status_property: # Replace the text status with the specific item HTML state row[self.status_property] = bo_object.get_html_state(text=None, title=bo_object.status) # Use the item status to specify the table row class # row['DT_RowClass'] = "table-row-%s" % (bo_object.status.lower()) continue if field['data'] in ['_overall_state_id', 'overall_state', 'overall_status']: # Get the item overall state from the data manager f_get_overall_state = getattr(self.datamgr, 'get_%s_overall_state' % self.object_type) if f_get_overall_state: (dummy, overall_status) = f_get_overall_state(bo_object) # Get element state configuration row[field['data']] = ElementState().get_html_state( self.object_type, bo_object, text=None, title=bo_object.overall_state_to_title[bo_object.overall_state], use_status=overall_status ) # Use the item overall state to specify the table row class row['DT_RowClass'] = "table-row-%s" % (overall_status) else: # pragma: no cover, should never happen! logger.warning("Missing get_overall_state method for: %s", self.object_type) row[field['data']] = 'XxX' continue if "business_impact" in field['data']: # Replace the BI count with the specific item HTML formatting row[field['data']] = Helper.get_html_business_impact(bo_object.business_impact) continue # Specific fields type if field['type'] == 'datetime' or field['format'] == 'datetime': # Replace the timestamp with the formatted date row[field['data']] = bo_object.get_date(bo_object[field['data']]) continue if field['type'] == 'boolean': # Replace the boolean vaue with the specific item HTML formatting row[field['data']] = Helper.get_on_off(bo_object[field['data']]) continue if field['type'] == 'list': # Replace the list with the specific list HTML formatting if hasattr(bo_object, field['data']): row[field['data']] = Helper.get_html_item_list( bo_object.id, field['data'], getattr(bo_object, field['data']), title=field['title'] ) else: row[field['data']] = 'Unknown' continue if field['type'] == 'dict': # Replace the dictionary with the specific dict HTML formatting row[field['data']] = Helper.get_html_item_list( bo_object.id, field['data'], getattr(bo_object, field['data']), title=field['title'] ) continue if field['type'] == 'objectid': if isinstance(bo_object[field['data']], BackendElement): row[field['data']] = bo_object[field['data']].get_html_link( prefix=request.params.get('links') ) # row['DT_RowData'].update({ # "object_%s" % field['data']: bo_object[field['data']].name # }) else: logger.warning("Table field is supposed to be an object: %s, %s = %s", bo_object.name, field['data'], getattr(bo_object, field['data'])) row[field['data']] = getattr(bo_object, field['data']) if row[field['data']] == field['resource']: row[field['data']] = '...' continue # For any non-specific fields, send the field value to the table row[field['data']] = getattr(bo_object, field['data'], 'unset') logger.debug(" -> field: %s", field) logger.debug("table data row: %s", row) # logger.debug("Table row: %s", row) rows.append(row) logger.debug("filtered records: %d out of total: %d", self.records_filtered, self.records_total) # Send response return json.dumps({ "draw": int(float(params.get('draw', '0'))), "recordsTotal": self.records_total, "recordsFiltered": self.records_filtered, "data": rows })
def table_data(self): """ Return elements data in json format as of Datatables SSP protocol More info: https://datatables.net/manual/server-side Example URL:: POST /? draw=1& columns[0][data]=alias& columns[0][name]=& columns[0][searchable]=true& columns[0][orderable]=true& columns[0][search][value]=& columns[0][search][regex]=false& ... order[0][column]=0& order[0][dir]=asc& start=0& length=10& search[value]=& search[regex]=false& Request parameters are Json formatted Request Parameters: - draw, index parameter to be returned in the response Pagination: - start / length, for pagination Searching: - search (value or regexp) search[value]: Global search value. To be applied to all columns which are searchable search[regex]: true if searh[value] is a regex Sorting: - order[i][column] / order[i][dir] index of the columns to order and sort direction (asc/desc) Columns: - columns[i][data]: Column's data source, as defined by columns.data. - columns[i][name]: Column's name, as defined by columns.name. - columns[i][searchable]: Flag to indicate if this column is searchable (true). - columns[i][orderable]: Flag to indicate if this column is orderable (true). - columns[i][search][value]: Search value to apply to this specific column. - columns[i][search][regex]: Flag to indicate if the search term for this column is a regex. Response data: - draw - recordsTotal: total records, before filtering (i.e. total number of records in the database) - recordsFiltered: Total records, after filtering (i.e. total number of records after filtering has been applied - not just the number of records being returned for this page of data). - data: The data to be displayed in the table. an array of data source objects, one for each row, which will be used by DataTables. - error (optional): Error message if an error occurs Not included if there is no error. """ # Manage request parameters ... # Because of specific datatables parameters name (eg. columns[0] ...) # ... some parameters have been json.stringify on client side ! params = {} logger.debug("table request parameters: %s", request.forms) for key in request.forms.keys(): if key == 'columns' or key == 'order': params[key] = json.loads(request.forms.get(key)) elif key == 'search': params[key] = json.loads(request.forms.get(key)) else: params[key] = request.forms.get(key) # params now contains 'valid' query parameters as we should have found them ... logger.debug("table request parameters: %s", params) parameters = {} # Manage page number ... # start is the first requested row and we must transform to a page count ... first_row = int(params.get('start', '0')) # length is the number of requested rows rows_count = int(params.get('length', '25')) parameters['page'] = (first_row // rows_count) + 1 parameters['max_results'] = rows_count logger.info( "get %d rows from row #%d -> page: %d", rows_count, first_row, parameters['page'] ) # Columns ordering # order:[{"column":2,"dir":"desc"}] if 'order' in params and 'columns' in params and params['order']: sorted_columns = [] for order in params['order']: idx = int(order['column']) if params['columns'][idx] and params['columns'][idx]['data']: logger.debug( "sort by column %d (%s), order: %s ", idx, params['columns'][idx]['data'], order['dir'] ) if order['dir'] == 'desc': sorted_columns.append('-' + params['columns'][idx]['data']) else: sorted_columns.append(params['columns'][idx]['data']) if sorted_columns: parameters['sort'] = ','.join(sorted_columns) logger.info("backend order request parameters: %s", parameters) # Columns searching # Individual search parameter searched_columns = [] if 'columns' in params and params['columns']: for column in params['columns']: if 'searchable' not in column or 'search' not in column: # pragma: no cover continue if 'value' not in column['search'] or not column['search']['value']: continue logger.debug( "search column '%s' for '%s'", column['data'], column['search']['value'] ) column_type = 'string' for field in self.table_columns: if field['name'] == column['data']: column_type = field['type'] break if 'regex' in column['search']: if column['search']['regex']: if column_type == 'integer': searched_columns.append( '{ "%s": { "$regex": .*%s.* } }' % ( column['data'], column['search']['value'] ) ) else: searched_columns.append( '{ "%s": { "$regex": ".*%s.*" } }' % ( column['data'], column['search']['value'] ) ) else: if column_type == 'integer': searched_columns.append( '{ "%s": %s }' % ( column['data'], column['search']['value'] ) ) else: searched_columns.append( '{ "%s": "%s" }' % ( column['data'], column['search']['value'] ) ) logger.info("backend search columns parameters: %s", searched_columns) # Columns searching # Global search parameter # search:{"value":"test","regex":false} searched_global = [] if 'search' in params and 'columns' in params and params['search']: if 'value' in params['search'] and params['search']['value']: logger.debug( "search requested, value: %s ", params['search']['value'] ) for column in params['columns']: if 'searchable' in column and column['searchable']: logger.debug( "search global '%s' for '%s'", column['data'], params['search']['value'] ) if 'regex' in params['search']: if params['search']['regex']: searched_global.append( '{ "%s": { "$regex": ".*%s.*" } }' % ( column['data'], params['search']['value'] ) ) else: searched_global.append( '{ "%s": "%s" }' % ( column['data'], params['search']['value'] ) ) logger.info("backend search global parameters: %s", searched_global) if searched_columns and searched_global: parameters['where'] = '{"$and": [ %s, %s ] }' % ( '{"$and": [' + ','.join(searched_columns) + '] }', '{"$or": [' + ','.join(searched_global) + '] }' ) if searched_columns: parameters['where'] = '{"$and": [' + ','.join(searched_columns) + '] }' if searched_global: parameters['where'] = '{"$or": [' + ','.join(searched_global) + '] }' # Embed linked resources embedded = {} for field in self.table_columns: if field['type'] == 'objectid' and field['format'] != 'objectid': embedded.update({field['name']: 1}) if embedded: parameters['embedded'] = json.dumps(embedded) logger.info("backend embedded parameters: %s", parameters['embedded']) # Request ALL objects from the backend recordsTotal = self.get_total_records() # Request objects from the backend ... logger.info("backend get parameters: %s", parameters) resp = self.backend.get(self.object_type, params=parameters) logger.debug("response _meta: %s", resp['_meta']) logger.debug("response _links: %s", resp['_links']) # logger.debug("response _items: %s", resp['_items']) # Total number of filtered records recordsFiltered = len(resp['_items']) if '_meta' in resp and (searched_columns or searched_global): recordsFiltered = int(resp['_meta']['total']) recordsFiltered = int(resp['_meta']['total']) # Create an object ... if resp['_items']: bo_object = None for k in globals().keys(): if isinstance(globals()[k], type) and \ '_type' in globals()[k].__dict__ and \ globals()[k].getType() == self.object_type: bo_object = globals()[k]() logger.debug("created: %s", bo_object) break # Change item content ... for item in resp['_items']: logger.debug("Object: %s", bo_object) bo_object._update(item) for key in item.keys(): for field in self.table_columns: if field['name'] != key: continue # logger.debug("Setting field: %s", field) if field['name'] == 'name': item[key] = "%s %s" % (bo_object.get_html_state(), item[key]) break if field['name'] == 'status': item[key] = bo_object.get_html_state() break if field['type'] == 'datetime': item[key] = bo_object.get_date() break if field['type'] == 'boolean': item[key] = Helper.get_on_off(item[key]) break if field['type'] == 'objectid' and key in embedded: for k in globals().keys(): if isinstance(globals()[k], type) and \ '_type' in globals()[k].__dict__ and \ globals()[k]._type == field['format']: linked_object = globals()[k](item[key]) logger.debug("created: %s", linked_object) item[key] = linked_object.get_html_state( label=linked_object.get_name() ) break # Prepare response rsp = { # draw is the request number ... "draw": int(params.get('draw', '0')), "recordsTotal": recordsTotal, "recordsFiltered": recordsFiltered, "data": resp['_items'] } return json.dumps(rsp)
def table_data(self, plugin_table): # Because there are many locals needed :) # pylint: disable=too-many-locals """Return elements data in json format as of Datatables SSP protocol More info: https://datatables.net/manual/server-side Example URL:: POST /? draw=1& columns[0][data]=alias& columns[0][name]=& columns[0][searchable]=true& columns[0][orderable]=true& columns[0][search][value]=& columns[0][search][regex]=false& ... order[0][column]=0& order[0][dir]=asc& start=0& length=10& search[value]=& search[regex]=false& Request parameters are Json formatted Request Parameters: - object_type: object type managed by the datatable - links: url prefix to be used by the links in the table - embedded: true / false whether the table is embedded by an external application **Note**: those three first parameters are not datatable specific parameters :) - draw, index parameter to be returned in the response Pagination: - start / length, for pagination Searching: - search (value or regexp) search[value]: Global search value. To be applied to all columns which are searchable search[regex]: true if search[value] is a regex Sorting: - order[i][column] / order[i][dir] index of the columns to order and sort direction (asc/desc) Columns: - columns[i][data]: Column's data source, as defined by columns.data. - columns[i][name]: Column's name, as defined by columns.name. - columns[i][searchable]: Flag to indicate if this column is searchable (true). - columns[i][orderable]: Flag to indicate if this column is orderable (true). - columns[i][search][value]: Search value to apply to this specific column. - columns[i][search][regex]: Flag to indicate if the search term for this column is a regex. Response data: - draw - recordsTotal: total records, before filtering (i.e. total number of records in the database) - recordsFiltered: Total records, after filtering (i.e. total number of records after filtering has been applied - not just the number of records being returned for this page of data). - data: The data to be displayed in the table. an array of data source objects, one for each row, which will be used by DataTables. - error (optional): Error message if an error occurs Not included if there is no error. """ # Manage request parameters ... logger.info("request data for table: %s, templates: %s", request.forms.get('object_type'), self.templates) # Because of specific datatables parameters name (eg. columns[0] ...) # ... some parameters have been json.stringify on client side ! params = {} for key in list(request.params.keys()): if key in ['columns', 'order', 'search']: params[key] = json.loads(request.params.get(key)) else: params[key] = request.params.get(key) # params now contains 'valid' query parameters as we should have found them ... logger.debug("table request parameters: %s", params) parameters = {} # Manage page number ... # start is the first requested row and we must transform to a page count ... first_row = int(params.get('start', '0')) # length is the number of requested rows rows_count = int(params.get('length', '25')) parameters['page'] = (first_row // rows_count) + 1 parameters['max_results'] = rows_count logger.debug("get %d rows from row #%d -> page: %d", rows_count, first_row, parameters['page']) # Columns ordering # order:[{"column":2,"dir":"desc"}] if 'order' in params and 'columns' in params and params['order']: sorted_columns = [] for order in params['order']: idx = int(order['column']) if params['columns'][idx] and params['columns'][idx]['data']: logger.debug("sort by column %d (%s), order: %s ", idx, params['columns'][idx]['data'], order['dir']) if order['dir'] == 'desc': sorted_columns.append('-' + params['columns'][idx]['data']) else: sorted_columns.append(params['columns'][idx]['data']) if sorted_columns: parameters['sort'] = ','.join(sorted_columns) logger.info("backend order request parameters: %s", parameters) # Individual column search parameter s_columns = [] if 'columns' in params and params['columns']: for column in params['columns']: if 'searchable' not in column or 'search' not in column: # pragma: no cover continue if 'value' not in column[ 'search'] or not column['search']['value']: continue logger.debug("search column '%s' for '%s'", column['data'], column['search']['value']) for field in self.table_columns: if field['data'] != column['data']: continue # Some specific types... if field['type'] == 'boolean': s_columns.append({ column['data']: column['search']['value'] == 'true' }) elif field['type'] == 'integer': s_columns.append( {column['data']: int(column['search']['value'])}) elif field['format'] == 'select': values = column['search']['value'].split(',') if len(values) > 1: s_columns.append({column['data']: {"$in": values}}) else: s_columns.append({column['data']: values[0]}) # ... the other fields :) else: # Do not care about 'smart' and 'caseInsensitive' boolean parameters ... if column['search']['regex']: s_columns.append({ column['data']: { "$regex": ".*" + column['search']['value'] + ".*" } }) else: s_columns.append( {column['data']: column['search']['value']}) break logger.info("backend search individual columns parameters: %s", s_columns) # Global search parameter # search:{"value":"test","regex":false} s_global = {} # pylint: disable=too-many-nested-blocks # Will be too complex else ... if 'search' in params and 'columns' in params and params['search']: # params['search'] contains something like: {u'regex': False, u'value': u'name:pi1'} # global regex is always ignored ... in favor of the column declared regex logger.info("global search requested: %s ", params['search']) if 'value' in params['search'] and params['search']['value']: # There is something to search for... logger.debug("search requested, value: %s ", params['search']['value']) # New strategy: decode search patterns... search = Helper.decode_search(params['search']['value'], plugin_table) logger.info("decoded search pattern: %s", search) # Old strategy: search for the value in all the searchable columns... # if not search: # logger.info("applying datatable search pattern...") # for column in params['columns']: # if not column['searchable']: # continue # logger.debug("search global '%s' for '%s'", # column['data'], params['search']['value']) # if 'regex' in params['search']: # if params['search']['regex']: # s_global.update( # {column['data']: { # "$regex": ".*" + params['search']['value'] + ".*"}}) # else: # s_global.update({column['data']: params['search']['value']}) s_global = search logger.info("backend search global parameters: %s", s_global) # Specific hack to filter the log check results that are not dated! if self.object_type == 'logcheckresult': s_columns.append({"last_check": {"$ne": 0}}) if s_columns and s_global: parameters['where'] = {"$and": [{"$and": s_columns}, s_global]} elif s_columns: parameters['where'] = {"$and": s_columns} elif s_global: parameters['where'] = s_global # Embed linked resources / manage templated resources parameters['embedded'] = {} for field in self.embedded: parameters['embedded'].update({field: 1}) logger.debug("backend embedded parameters: %s", parameters['embedded']) # Count total elements excluding templates if necessary if self.is_templated: if self.ui_visibility: self.records_total = self.datamgr.my_backend.count( self.object_type, params={ 'where': { '_is_template': self.templates, 'webui_visible': True } }) else: self.records_total = self.datamgr.my_backend.count( self.object_type, params={'where': { '_is_template': self.templates }}) if 'where' in parameters: parameters['where'].update({'_is_template': self.templates}) else: parameters['where'] = {'_is_template': self.templates} else: if self.ui_visibility: self.records_total = self.datamgr.my_backend.count( self.object_type, params={'where': { 'webui_visible': True }}) else: self.records_total = self.datamgr.my_backend.count( self.object_type) if self.ui_visibility: if 'where' in parameters: parameters['where'].update({'webui_visible': True}) else: parameters['where'] = {'webui_visible': True} # Request objects from the backend ... logger.info("table data get parameters: %s", parameters) items = self.datamgr.my_backend.get(self.object_type, params=parameters) logger.info("table data got %d items", len(items)) if not items: logger.info("No backend elements match search criteria: %s", parameters) # Empty response return json.dumps({ # draw is the request number ... "draw": int(params.get('draw', '0')), "recordsTotal": 0, "recordsFiltered": 0, "data": [] }) # Create an object ... object_class = [ kc for kc in self.datamgr.known_classes if kc.get_type() == self.object_type ] if not object_class: # pragma: no cover, should never happen! logger.warning("datatable, unknown object type: %s", self.object_type) # Empty response return json.dumps({ # draw is the request number ... "draw": int(params.get('draw', '0')), "recordsTotal": 0, "recordsFiltered": 0, "data": [] }) # Update table inner properties with the object class defined properties object_class = object_class[0] self.id_property = '_id' if hasattr(object_class, 'id_property'): self.id_property = object_class.id_property self.name_property = 'name' if hasattr(object_class, 'name_property'): self.name_property = object_class.name_property self.status_property = 'status' if hasattr(object_class, 'status_property'): self.status_property = object_class.status_property logger.debug( "datatable, object type: '%s' and properties: %s / %s / %s", object_class, self.id_property, self.name_property, self.status_property) # Change item content... rows = [] # Total number of filtered records self.records_filtered = self.records_total for item in items: bo_object = object_class(item) if not bo_object.ui_visible: logger.debug("Not UI visible object: %s", bo_object) continue logger.info("table data object: %s", bo_object) logger.debug("table data object: %s", bo_object) # This is an awful hack that allows to update the objects filtered for a table. # Two main interests: # - update the backend because some massive modifications are necessary for testing # - prepare a massive update feature in the Web UI :) # ----- # if self.is_templated and self.templates and self.object_type == 'service': # logger.warning("service template: %s for host: %s", # bo_object.name, bo_object['host']) # if bo_object['host'].name.startswith('fdj'): # logger.info("To be updated...") # data = { # "check_freshness": True, # "freshness_threshold": 86400, # "passive_checks_enabled": True, # "active_checks_enabled": False # } # result = self.datamgr.update_object(element=bo_object, data=data) # if result is True: # logger.info("updated.") # else: # logger.error("update failed!") # ----- # Each item contains the total number of records matching the search filter self.records_filtered = item['_total'] row = {} row['DT_RowData'] = {} row['_id'] = bo_object.id for field in self.table_columns: logger.debug(" - field: %s", field) # Specific fields if field['data'] == self.name_property: # Create a link to navigate to the item page row[self.name_property] = bo_object.html_link # Store the item name in a specific field of the row. # The value will be retrieved by the table actions (ack, recheck, ...) row['DT_RowData'].update( {"object_%s" % self.object_type: bo_object.name}) continue if field['data'] == self.status_property: # Replace the text status with the specific item HTML state row[self.status_property] = bo_object.get_html_state( text=None, title=bo_object.status) # Use the item status to specify the table row class # row['DT_RowClass'] = "table-row-%s" % (bo_object.status.lower()) continue if field['data'] in [ '_overall_state_id', 'overall_state', 'overall_status' ]: # Get the item overall state from the data manager f_get_overall_state = getattr( self.datamgr, 'get_%s_overall_state' % self.object_type) if f_get_overall_state: (dummy, overall_status) = f_get_overall_state(bo_object) # Get element state configuration row[field['data']] = ElementState().get_html_state( self.object_type, bo_object, text=None, title=bo_object.overall_state_to_title[ bo_object.overall_state], use_status=overall_status) # Use the item overall state to specify the table row class row['DT_RowClass'] = "table-row-%s" % (overall_status) else: # pragma: no cover, should never happen! logger.warning( "Missing get_overall_state method for: %s", self.object_type) row[field['data']] = 'XxX' continue if "business_impact" in field['data']: # Replace the BI count with the specific item HTML formatting row[field['data']] = Helper.get_html_business_impact( bo_object.business_impact) continue # Specific fields type if field['type'] == 'datetime' or field['format'] == 'datetime': # Replace the timestamp with the formatted date row[field['data']] = bo_object.get_date( bo_object[field['data']]) continue if field['type'] == 'boolean': # Replace the boolean vaue with the specific item HTML formatting row[field['data']] = Helper.get_on_off( bo_object[field['data']]) continue if field['type'] == 'list': # Replace the list with the specific list HTML formatting if hasattr(bo_object, field['data']): row[field['data']] = Helper.get_html_item_list( bo_object.id, field['data'], getattr(bo_object, field['data']), title=field['title']) else: row[field['data']] = 'Unknown' continue if field['type'] == 'dict': # Replace the dictionary with the specific dict HTML formatting row[field['data']] = Helper.get_html_item_list( bo_object.id, field['data'], getattr(bo_object, field['data']), title=field['title']) continue if field['type'] == 'objectid': if isinstance(bo_object[field['data']], BackendElement): row[field['data']] = bo_object[ field['data']].get_html_link( prefix=request.params.get('links')) # row['DT_RowData'].update({ # "object_%s" % field['data']: bo_object[field['data']].name # }) else: logger.warning( "Table field is supposed to be an object: %s, %s = %s", bo_object.name, field['data'], getattr(bo_object, field['data'])) row[field['data']] = getattr(bo_object, field['data']) if row[field['data']] == field['resource']: row[field['data']] = '...' continue # For any non-specific fields, send the field value to the table row[field['data']] = getattr(bo_object, field['data'], 'unset') logger.debug(" -> field: %s", field) logger.debug("table data row: %s", row) # logger.debug("Table row: %s", row) rows.append(row) logger.debug("filtered records: %d out of total: %d", self.records_filtered, self.records_total) # Send response return json.dumps({ "draw": int(float(params.get('draw', '0'))), "recordsTotal": self.records_total, "recordsFiltered": self.records_filtered, "data": rows })