def create_job(self, *args, **kwargs): output = ODataEntity() responseCode = 500 try: serverStatus, serverResponse = self.forward_request() responseCode = serverStatus.status root = et.fromstring(serverResponse) if root.findtext('sid'): output.data = {'sid': root.findtext('sid')} elif self.request["form"].get("exec_mode", "").lower() == "oneshot": output.data = self._parseResultData(root, format=ResultFormat.ROW) # service may return messages in the body; try to parse them try: msg = splunk.rest.extractMessages(root) if msg: output.messages.extend(msg) except: pass except splunk.RESTException, e: responseCode = e.statusCode output.messages.append({ 'type': 'HTTP', 'text': '%s %s' % (e.statusCode, e.msg) }) if e.extendedMessages: output.messages.extend(e.extendedMessages)
def create_job(self, *args, **kwargs): output = ODataEntity() responseCode = 500 try: serverStatus, serverResponse = self.forward_request() responseCode = serverStatus.status root = et.fromstring(serverResponse) if root.findtext('sid'): output.data = { 'sid': root.findtext('sid') } elif self.request["form"].get("exec_mode", "").lower() == "oneshot": output.data = self._parseResultData(root, format=ResultFormat.ROW) # service may return messages in the body; try to parse them try: msg = splunk.rest.extractMessages(root) if msg: output.messages.extend(msg) except: pass except splunk.RESTException, e: responseCode = e.statusCode output.messages.append({ 'type': 'HTTP', 'text': '%s %s' % (e.statusCode, e.msg) }) if e.extendedMessages: output.messages.extend(e.extendedMessages)
def entry2odata(self, entry_node, entity_class): ''' parses lxml <entry> node to odata struct ''' node = entry_node tmpEntity = ODataEntity() tmpEntity.entity_class = entity_class content_nodes = node.xpath('a:content', namespaces={'a': ATOM_NS}) if len(content_nodes): content_node = content_nodes[0] if content_node.get('type') == 'text/xml': tmpEntity.data = splunk.rest.format.nodeToPrimitive( content_node[0]) else: tmpEntity.data = {'__text': content_node.text} # move the metadata around if isinstance(tmpEntity.data, dict): to_delete = [] for k in tmpEntity.data: if k.startswith('eai:') and k != 'eai:data': to_delete.append(k) if hasattr(tmpEntity.metadata, k[4:]): setattr(tmpEntity.metadata, k[4:], tmpEntity.data[k]) else: logger.warn('encountered unknown EAI attribute: %s' % k) # the one exception if 'eai:data' in tmpEntity.data: tmpEntity.data['rawdata'] = tmpEntity.data['eai:data'] to_delete.append('eai:data') for k in to_delete: del tmpEntity.data[k] # pull in all the links for link in node.xpath('a:link', namespaces={'a': ATOM_NS}): tmpEntity.metadata.links.append({ 'href': link.get('href'), 'rel': link.get('rel') }) # set other randoms tmpEntity.id = node.xpath('a:id', namespaces={'a': ATOM_NS})[0].text tmpEntity.name = node.xpath('a:title', namespaces={'a': ATOM_NS})[0].text return tmpEntity
def dispatch_job(self, owner, namespace, kwargs): output = ODataEntity() responseCode = 500 uri = splunk.entity.buildEndpoint('search', 'jobs', owner=owner, namespace=namespace) try: serverStatus, serverResponse = self.simpleRequest( uri, postargs=kwargs, method='POST', raiseAllErrors=True) responseCode = serverStatus.status root = et.fromstring(serverResponse) if root.findtext('sid'): output.data = {'sid': root.findtext('sid')} # service may return messages in the body; try to parse them try: msg = splunk.rest.extractMessages(root) if msg: output.messages.extend(msg) except: pass except splunk.RESTException, e: responseCode = e.statusCode output.messages.append({ 'type': 'HTTP', 'text': '%s %s' % (e.statusCode, e.msg) }) if e.extendedMessages: output.messages.extend(e.extendedMessages)
def dispatch_job(self, owner, namespace, kwargs): output = ODataEntity() responseCode = 500 uri = splunk.entity.buildEndpoint("search", "jobs", owner=owner, namespace=namespace) try: serverStatus, serverResponse = self.simpleRequest(uri, postargs=kwargs, method="POST", raiseAllErrors=True) responseCode = serverStatus.status root = et.fromstring(serverResponse) if root.findtext("sid"): output.data = {"sid": root.findtext("sid")} # service may return messages in the body; try to parse them try: msg = splunk.rest.extractMessages(root) if msg: output.messages.extend(msg) except: pass except splunk.RESTException, e: responseCode = e.statusCode output.messages.append({"type": "HTTP", "text": "%s %s" % (e.statusCode, e.msg)}) if e.extendedMessages: output.messages.extend(e.extendedMessages)
def entry2odata(self, entry_node, entity_class): """ parses lxml <entry> node to odata struct """ node = entry_node tmpEntity = ODataEntity() tmpEntity.entity_class = entity_class content_nodes = node.xpath("a:content", namespaces={"a": ATOM_NS}) if len(content_nodes): content_node = content_nodes[0] if content_node.get("type") == "text/xml": tmpEntity.data = splunk.rest.format.nodeToPrimitive(content_node[0]) else: tmpEntity.data = {"__text": content_node.text} # move the metadata around if isinstance(tmpEntity.data, dict): to_delete = [] for k in tmpEntity.data: if k.startswith("eai:") and k != "eai:data": to_delete.append(k) if hasattr(tmpEntity.metadata, k[4:]): setattr(tmpEntity.metadata, k[4:], tmpEntity.data[k]) else: logger.warn("encountered unknown EAI attribute: %s" % k) # the one exception if "eai:data" in tmpEntity.data: tmpEntity.data["rawdata"] = tmpEntity.data["eai:data"] to_delete.append("eai:data") for k in to_delete: del tmpEntity.data[k] # pull in all the links for link in node.xpath("a:link", namespaces={"a": ATOM_NS}): tmpEntity.metadata.links.append({"href": link.get("href"), "rel": link.get("rel")}) # set other randoms tmpEntity.id = node.xpath("a:id", namespaces={"a": ATOM_NS})[0].text tmpEntity.name = node.xpath("a:title", namespaces={"a": ATOM_NS})[0].text return tmpEntity
def http_simple_input(self, *args, **kwargs): # init output = ODataEntity() responseCode = 500 serverResponse = None messages = [] # Unfortunately, we can't use the Splunk API for this, so we just do it # ourselves base_url = "https://" + self.request['headers']['host'] + "/" path = self.request['path'].replace("/services/json/v1/", "") query = self.request["query"] query_string = "" if len(query): query_string = "?" + urllib.urlencode(query) uri = base_url + path + query_string # fetch data # Construct httplib2 such that it works with the version of httplib2 that requires SSL disabling h = None try: h = httplib2.Http(timeout=splunk.rest.SPLUNKD_CONNECTION_TIMEOUT, disable_ssl_certificate_validation=True) except: h = httplib2.Http(timeout=splunk.rest.SPLUNKD_CONNECTION_TIMEOUT) serverStatus, serverResponse = h.request( uri, self.method, headers=self.request["headers"], body=self.request["payload"] ) responseCode = serverStatus.status # convert XML to struct if serverResponse: root = et.fromstring(serverResponse) result = {} for field in root.findall("results/result/field"): result[field.get("k")] = field.findtext("value/text") output.data = result # service may return messages in the body; try to parse them try: msg = splunk.rest.extractMessages(root) if msg: messages.append(msg) except: raise # package and return output.messages = messages return responseCode, self.render_odata(output)
def http_simple_input(self, *args, **kwargs): # init output = ODataEntity() responseCode = 500 serverResponse = None messages = [] # Unfortunately, we can't use the Splunk API for this, so we just do it # ourselves base_url = "https://" + self.request['headers']['host'] + "/" path = self.request['path'].replace("/services/json/v1/", "") query = self.request["query"] query_string = "" if len(query): query_string = "?" + urllib.urlencode(query) uri = base_url + path + query_string # fetch data # Construct httplib2 such that it works with the version of httplib2 that requires SSL disabling h = None try: h = httplib2.Http(timeout=splunk.rest.SPLUNKD_CONNECTION_TIMEOUT, disable_ssl_certificate_validation=True) except: h = httplib2.Http(timeout=splunk.rest.SPLUNKD_CONNECTION_TIMEOUT) serverStatus, serverResponse = h.request( uri, self.method, headers=self.request["headers"], body=self.request["payload"]) responseCode = serverStatus.status # convert XML to struct if serverResponse: root = et.fromstring(serverResponse) result = {} for field in root.findall("results/result/field"): result[field.get("k")] = field.findtext("value/text") output.data = result # service may return messages in the body; try to parse them try: msg = splunk.rest.extractMessages(root) if msg: messages.append(msg) except: raise # package and return output.messages = messages return responseCode, self.render_odata(output)
def entry2odata(self, entry_node, entity_class): ''' parses lxml <entry> node to odata struct ''' node = entry_node tmpEntity = ODataEntity() tmpEntity.entity_class = entity_class tmpEntity.data = {} content_xpath = node.xpath('a:content', namespaces={'a': ATOM_NS}) if (len(content_xpath) > 0): if (len(content_xpath[0]) > 0): content_node = content_xpath[0][0] tmpEntity.data = splunk.rest.format.nodeToPrimitive(content_node) else: tmpEntity.data = {"data": content_xpath[0].text} # move the metadata around if isinstance(tmpEntity.data, dict): to_delete = [] for k in tmpEntity.data: if k.startswith('eai:') and k != 'eai:data': to_delete.append(k) if hasattr(tmpEntity.metadata, k[4:]): setattr(tmpEntity.metadata, k[4:], tmpEntity.data[k]) else: logger.warn('encountered unknown EAI attribute: %s' % k) # the one exception if 'eai:data' in tmpEntity.data: tmpEntity.data['rawdata'] = tmpEntity.data['eai:data'] to_delete.append('eai:data') for k in to_delete: del tmpEntity.data[k] # pull in all the links for link in node.xpath('a:link', namespaces={'a': ATOM_NS}): tmpEntity.metadata.links.append({ 'href': link.get('href'), 'rel': link.get('rel') }) # set other randoms tmpEntity.id = node.xpath('a:id', namespaces={'a': ATOM_NS})[0].text tmpEntity.name = tmpEntity.data.get("name", node.xpath('a:title', namespaces={'a': ATOM_NS})[0].text) published_info = node.xpath('a:published', namespaces={'a': ATOM_NS}) if published_info: tmpEntity.data["published"] = published_info[0].text updated_info = node.xpath('a:updated', namespaces={'a': ATOM_NS}) if updated_info: tmpEntity.data["updated"] = updated_info[0].text author_info = node.xpath('a:author/a:name', namespaces={'a': ATOM_NS}) if author_info: tmpEntity.data["author"] = author_info[0].text return tmpEntity
def entry2odata(self, entry_node, entity_class): ''' parses lxml <entry> node to odata struct ''' node = entry_node tmpEntity = ODataEntity() tmpEntity.entity_class = entity_class tmpEntity.data = {} content_xpath = node.xpath('a:content', namespaces={'a': ATOM_NS}) if (len(content_xpath) > 0): if (len(content_xpath[0]) > 0): content_node = content_xpath[0][0] tmpEntity.data = splunk.rest.format.nodeToPrimitive( content_node) else: tmpEntity.data = {"data": content_xpath[0].text} # move the metadata around if isinstance(tmpEntity.data, dict): to_delete = [] for k in tmpEntity.data: if k.startswith('eai:') and k != 'eai:data': to_delete.append(k) if hasattr(tmpEntity.metadata, k[4:]): setattr(tmpEntity.metadata, k[4:], tmpEntity.data[k]) else: logger.warn('encountered unknown EAI attribute: %s' % k) # the one exception if 'eai:data' in tmpEntity.data: tmpEntity.data['rawdata'] = tmpEntity.data['eai:data'] to_delete.append('eai:data') for k in to_delete: del tmpEntity.data[k] # pull in all the links for link in node.xpath('a:link', namespaces={'a': ATOM_NS}): tmpEntity.metadata.links.append({ 'href': link.get('href'), 'rel': link.get('rel') }) # set other randoms tmpEntity.id = node.xpath('a:id', namespaces={'a': ATOM_NS})[0].text tmpEntity.name = tmpEntity.data.get( "name", node.xpath('a:title', namespaces={'a': ATOM_NS})[0].text) published_info = node.xpath('a:published', namespaces={'a': ATOM_NS}) if published_info: tmpEntity.data["published"] = published_info[0].text updated_info = node.xpath('a:updated', namespaces={'a': ATOM_NS}) if updated_info: tmpEntity.data["updated"] = updated_info[0].text author_info = node.xpath('a:author/a:name', namespaces={'a': ATOM_NS}) if author_info: tmpEntity.data["author"] = author_info[0].text return tmpEntity