def __init__(self, input_bytes): """FuzzingHelper initializer. Args: input_bytes: Input randomized bytes used to create a FuzzedDataProvider. """ self.fdp = atheris.FuzzedDataProvider(input_bytes)
def TestOneInput(input_bytes): fdp = atheris.FuzzedDataProvider(input_bytes) original = fdp.ConsumeUnicode(sys.maxsize) try: ujson_data = ujson.loads(original) json_data = json.loads(original) except Exception as e: # It would be interesting to enforce that if one of the libraries throws an # exception, the other does too. However, uJSON accepts many invalid inputs # that are uninteresting, such as "00". So, that is not done. return # Uncomment these lines to ignore the errors described in the docstring of # this file. # json_data = ClearAllIntegers(json_data) # ujson_data = ClearAllIntegers(ujson_data) json_dumped = json.dumps(json_data) ujson_dumped = json.dumps(ujson_data) if json_dumped != ujson_dumped: raise RuntimeError( "Decoding/encoding disagreement!\nInput: %s\nJSON data: %s\nuJSON data: %s\nJSON-dumped: %s\nuJSON-dumped: %s\n" % (original, json_data, ujson_data, json_dumped, ujson_dumped))
def TestOneInput(input_bytes): fdp = atheris.FuzzedDataProvider(input_bytes) original = fdp.ConsumeUnicode(sys.maxsize) try: ujson_data = ujson.loads(original) except ValueError: return # We make sure there's no error in encoding, but we don't actually compare # (encoded == original) because it's not entirely preserving. For example, # it does not preserve whitespace. encoded = ujson.dumps(ujson_data) del encoded
def TestOneInput(input_bytes): """Test randomized integer fuzzing input for tf.raw_ops.RaggedCountSparseOutput.""" fdp = atheris.FuzzedDataProvider(input_bytes) random_split_length = fdp.ConsumeIntInRange(0, 500) random_length = fdp.ConsumeIntInRange(501, 1000) splits = fdp.ConsumeIntListInRange(random_split_length, 1, 100000) # First value of splits has to be 0. splits.insert(0, 0) # Last value of splits has to be length of the values/weights. splits.append(random_length) values = fdp.ConsumeIntListInRange(random_length, 0, 100000) weights = fdp.ConsumeIntListInRange(random_length, 0, 100000) _, _, _, = tf.raw_ops.RaggedCountSparseOutput(splits=splits, values=values, weights=weights, binary_output=False)