def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) assert six.ensure_str(handler.headers['content-type'] ) == 'application/x-www-form-urlencoded' assert six.ensure_str(body) == 'baz=baf&hep=cat' or six.ensure_str( body) == 'hep=cat&baz=baf'
def handler(url, message, **kwargs): assert six.ensure_str( url ) == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" assert six.ensure_str(message["body"]) == "testkey=testvalue" return splunklib.data.Record({ "status": 200, "headers": [], })
def _read_chunk(istream): # noinspection PyBroadException assert isinstance(istream.read(0), six.binary_type), 'Stream must be binary' try: header = istream.readline() except Exception as error: raise RuntimeError( 'Failed to read transport header: {}'.format(error)) if not header: return None match = SearchCommand._header.match(six.ensure_str(header)) if match is None: raise RuntimeError( 'Failed to parse transport header: {}'.format(header)) metadata_length, body_length = match.groups() metadata_length = int(metadata_length) body_length = int(body_length) try: metadata = istream.read(metadata_length) except Exception as error: raise RuntimeError( 'Failed to read metadata of length {}: {}'.format( metadata_length, error)) decoder = MetadataDecoder() try: metadata = decoder.decode(six.ensure_str(metadata)) except Exception as error: raise RuntimeError( 'Failed to parse metadata of length {}: {}'.format( metadata_length, error)) # if body_length <= 0: # return metadata, '' body = "" try: if body_length > 0: body = istream.read(body_length) except Exception as error: raise RuntimeError('Failed to read body of length {}: {}'.format( body_length, error)) return metadata, six.ensure_str(body)
def thefilter(self, record, pattern): values = "" for fieldname in self.fieldnames: #multivalue fields come through as a list, iterate through the list and run the Wildcard against each entry #in the multivalued field if not fieldname in record: continue if isinstance(record[fieldname], list): for aRecord in record[fieldname]: matches = pattern.findall(six.ensure_str(aRecord)) for match in matches: values = values + " " + match else: matches = pattern.findall(six.ensure_str(record[fieldname])) for match in matches: values = values + " " + match return values
def write_xml_document(self, document): """Writes a string representation of an ``ElementTree`` object to the output stream. :param document: An ``ElementTree`` object. """ self._out.write(ensure_str(ET.tostring(document))) self._out.flush()
def _run_command(self, name, action=None, phase=None, protocol=2): command = self._get_search_command_path(name) # P2 [ ] TODO: Test against the version of Python that ships with the version of Splunk used to produce each # recording # At present we use whatever version of splunk, if any, happens to be on PATH # P2 [ ] TODO: Examine the contents of the app and splunklib log files (?) expected, output, errors, process = None, None, None, None for recording in Recordings(name, action, phase, protocol): compressed_file = recording.input_file uncompressed_file = os.path.splitext(recording.input_file)[0] try: with gzip.open(compressed_file, 'rb') as ifile: with io.open(uncompressed_file, 'wb') as ofile: b = bytearray(io.DEFAULT_BUFFER_SIZE) n = len(b) while True: count = ifile.readinto(b) if count == 0: break if count < n: ofile.write(b[:count]) break ofile.write(b) with io.open(uncompressed_file, 'rb') as ifile: env = os.environ.copy() env['PYTHONPATH'] = os.pathsep.join(sys.path) process = Popen(recording.get_args(command), stdin=ifile, stderr=PIPE, stdout=PIPE, env=env) output, errors = process.communicate() with io.open(recording.output_file, 'rb') as ifile: expected = ifile.read() finally: os.remove(uncompressed_file) return six.ensure_str(expected), six.ensure_str( output), six.ensure_str(errors), process.returncode
def runOSProcess(command, logger, timeout=60, shell=True): logger.debug("Begin OS process run of %s" % (command)) # if this is Linux use the shell #if platform.system() != "Windows": # shell = True try: proc = Popen(command, stdout=PIPE, stderr=PIPE, shell=shell) except: stderr = traceback.format_exc() stdout = "" res = False logger.error(stderr) return six.ensure_str(stdout), six.ensure_str(stderr), res timer = Timer(timeout, proc.kill) try: timer.start() stdout, stderr = proc.communicate() finally: timer.cancel() if not timer.isAlive(): res = False logger.warn("OS process timed out after %s seconds, for command %s" % (timeout, command)) proc.terminate() return "", "timeout after %s seconds" % (timeout), False else: if proc.returncode != 0: logger.debug( "OS process exited with non-zero code of %s, for command %s" % (proc.returncode, command)) res = False else: logger.debug("OS process exited with zero code, for command %s" % (command)) res = True return six.ensure_str(stdout), six.ensure_str(stderr), res
def stream(self, records): # Set the output field if self.mode == 'append': dest_field = 'base64' else: dest_field = self.field for record in records: # Return unchanged record if the field is not present if self.field not in record: yield record continue # Process field field_data_list = record[self.field] output_data_list = [] # Ensure all values are in a list if not isinstance(field_data_list, list): field_data_list = [field_data_list] for field_data in field_data_list: try: # Base64 Encoding if self.action == 'encode': # Expected input is UTF-8 read as Unicode. # To pass other formats, it must be unescaped from backslash_escape if self.backslash_escape: field_data = field_data.encode('utf-8', errors='ignore').decode('unicode_escape') field_data = field_data.encode(self.encoding, errors='ignore') # Add encoded ASCII data to output output_data_list.append(ensure_str( to_b64(field_data, custom_alphabet=self.alphabet) )) # Base64 Decoding else: output_data = from_b64(field_data, custom_alphabet=self.alphabet, recurse=self.recurse) # Try specified encoding if self.encoding: try: decode_attempt = output_data.decode(self.encoding, errors='strict') if '\x00' not in decode_attempt: output_data_list.append(decode_attempt) continue except UnicodeDecodeError: pass # Backlash escape output # Null values will break the data passed back through stdout if self.backslash_escape or b'\x00' in output_data: output_data_list.append( backslash_escape(output_data) ) # If encoding was not set, backslash_escape was not set, and no null found else: output_data_list.append( output_data.decode('utf8', errors='replace') ) except Exception as e: if not self.suppress_error: raise e record[dest_field] = output_data_list yield record
def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) assert six.ensure_str( handler.headers['content-type']) == 'application/json' assert json.loads(body)["baz"] == "baf"
def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) assert six.ensure_str(body) == "foo=bar"
def __init__(self, version, meta, data): self.version = six.ensure_str(version) self.meta = json.loads(meta) dialect = splunklib.searchcommands.internals.CsvDialect self.data = csv.DictReader(io.StringIO(data.decode("utf-8")), dialect=dialect)