def read_items():
    fn = os.path.join(BASE, FN)
#    images_dir = os.path.join(BASE, HARTAKARUN, 'HK Category Images')
    lines = csv.reader(codecs.open(fn, encoding=ENCODING), delimiter=DELIMITER)
    lines = list(lines)
    records = []
    headers = [l[0] for l in lines]
    values  = [l[1] for l in lines]
    print headers
    headers = [MAP_FIELDS_TO_MODEL[k] for k in headers]
    resolutions = read_resolutions()
    lines = [headers, values]
    
    
    for i, l in enumerate(lines[1:]):
        
        l = [unicode(x) for x in l]
        r = dict(zip(headers, l))
        for k, v in r.items():
            if 'date' in k or 'time' in k:
                r[k] = to_date(v)

            if len(v) > 255:
                print k
                print v
                print '-' * 20
        
        r['pk'] = str(i + 1)
        records.append(r)
        
    for x in records:
        print x
    print headers
    return records
示例#2
0
def consumer(args):
    # connect to kafka
    brokers = args.kafka_brokers.split(",")
    kafka_consumer = kafka.KafkaConsumer(args.topic, bootstrap_servers=brokers)

    # if debug not set, write libpcap global header
    if args.debug == 0:
        sys.stdout.write(global_header())

    # start packet capture
    packet_count = 0
    for msg in kafka_consumer:

        # if debug not set, write the packet header and packet
        if args.debug == 0:
            sys.stdout.write(packet_header(msg.value, msg.key))
            sys.stdout.write(msg.value)

        elif packet_count % args.debug == 0:
            print "Packet: count=%s dt=%s topic=%s" % (packet_count, to_date(unpack_ts(msg.key)), args.topic)
            print to_hex(msg.value)

        packet_count += 1
        if args.packet_count > 0 and packet_count >= args.packet_count:
            break
示例#3
0
def producer(args):
    # connect to kafka
    producer = kafka.KafkaProducer(
        bootstrap_servers=args.kafka_brokers.split(","),
        partitioner=partitioner)

    # initialize packet capture
    capture = pcapy.open_live(args.interface, 65535, True, 3000)
    packet_count = 0

    # start packet capture
    while True:
        (pkt_hdr, pkt_raw) = capture.next()
        if pkt_hdr is not None:

            # send packet to kafka
            pkt_ts = timestamp(pkt_hdr)
            producer.send(args.topic, key=pack_ts(pkt_ts), value=pkt_raw)

            # debug messages, if needed
            packet_count += 1
            if args.debug > 0 and packet_count % args.debug == 0:
                print 'Sent Packet: count=%s dt=%s topic=%s' % (
                    packet_count, to_date(pkt_ts), args.topic)
                print to_hex(pkt_raw)
    def postproduction(self, records):
        IMAGES_DIR = self.IMAGES_DIR
        for r in records:
            for k, v in r.items():
                if 'date' in k or 'time' in k:
                    if r[k]:
                        r[k] = to_date(v)

            img_fn = r['image']
            img_src = os.path.join(BASE, IMAGES_DIR, img_fn)
            assert os.path.exists(img_src), img_src

            img_dst_folder = os.path.join(MEDIA_DIR, IMAGES_DIR)
            img_dst = os.path.join(img_dst_folder, img_fn)
            if not os.path.exists(img_dst_folder):
                os.makedirs(os.path.join(img_dst_folder))
            if not os.path.exists(img_dst):
                cmd = 'cp "%s" "%s"' % (img_src, img_dst)
                print cmd
                sh(cmd)
            r['image'] = os.path.join(IMAGES_DIR, img_fn)
            print r['image']

            from import_journalentry import OUT_FN as fn_journalentries_json
            journalentries = json.load(open(fn_journalentries_json))
            for je in journalentries:
                fr = je['fields']['folio_number_from']
                fr = int(fr)
                to = je['fields']['folio_number_to']
                to = int(to)
                if fr <= int(r['folio_number']) <= to:
                    r['journalentry'] = je['pk']
        return records
示例#5
0
def consumer(args):
    # connect to kafka
    brokers = args.kafka_brokers.split(",")
    kafka_consumer = kafka.KafkaConsumer(args.topic, bootstrap_servers=brokers)

    # if debug not set, write libpcap global header
    if args.debug == 0:
        sys.stdout.write(global_header())

    # start packet capture
    packet_count = 0
    for msg in kafka_consumer:

        # if debug not set, write the packet header and packet
        if args.debug == 0:
            sys.stdout.write(packet_header(msg.value, msg.key))
            sys.stdout.write(msg.value)

        elif packet_count % args.debug == 0:
            print 'Packet: count=%s dt=%s topic=%s' % (
                packet_count, to_date(unpack_ts(msg.key)), args.topic)
            print to_hex(msg.value)

        packet_count += 1
        if args.packet_count > 0 and packet_count >= args.packet_count:
            break
示例#6
0
def main(argv):
    filename_1 = "cve_mitre.dat"
    filename_2 = "cve_mitre_criteria.dat"

    file_1 = DataFileWriter(filename_1)
    file_2 = DataFileWriter(filename_2)

    handler = MyContentHandler()
    parse('allitems.xml', handler)

    for k in sorted(statistics.keys()):
        v = statistics[k]
        v2 = statistics_compare[k]
        line_1 = []
        line_2 = []

        date = to_date(k)

        use_after = v2[CRITERIA['criteria_1']]

        stackc = v[VULNERABILITY['stackc']]
        heapc = v[VULNERABILITY['heapc']]
        intc = v[VULNERABILITY['intc']]
        pointc = v[VULNERABILITY['pointc']]
        fmtc = v[VULNERABILITY['fmtc']]
        otherc = v[VULNERABILITY['otherc']]
        total = stackc + heapc + intc + pointc + fmtc + otherc

        if total == 0:
            print "ERROR (total=0) - {} - {} <-> {} <-> {}".format(
                k, v, v2, percentage)
            percentage = 0
        else:
            percentage = (float(use_after) / float(total)) * float(100)

        line_1.append(str(date))
        line_1.append(str(stackc))
        line_1.append(str(heapc))
        line_1.append(str(intc))
        line_1.append(str(pointc))
        line_1.append(str(fmtc))
        line_1.append(str(otherc))
        line_1.append(str(total))

        line_2.append(str(date))
        line_2.append(str(use_after))
        line_2.append(str(total))
        line_2.append(str(percentage))

        file_1.append("\t".join(line_1))
        file_2.append("\t".join(line_2))

        print "{} - {} <-> {} <-> {}".format(k, v, v2, percentage)

    file_1.close()
    file_2.close()

    return 0
示例#7
0
def main(argv):
  filename_1 = "cve_mitre.dat"
  filename_2 = "cve_mitre_criteria.dat"

  file_1 = DataFileWriter(filename_1)
  file_2 = DataFileWriter(filename_2)

  handler = MyContentHandler()
  parse('allitems.xml', handler)

  for k in sorted(statistics.keys()):
    v = statistics[k]
    v2 = statistics_compare[k]
    line_1 = []
    line_2 = []

    date = to_date(k)
    
    use_after = v2[CRITERIA['criteria_1']]

    stackc = v[VULNERABILITY['stackc']]
    heapc = v[VULNERABILITY['heapc']] 
    intc = v[VULNERABILITY['intc']] 
    pointc = v[VULNERABILITY['pointc']] 
    fmtc = v[VULNERABILITY['fmtc']] 
    otherc = v[VULNERABILITY['otherc']]
    total = stackc + heapc + intc + pointc + fmtc + otherc

    if total == 0:
        print "ERROR (total=0) - {} - {} <-> {} <-> {}".format(k,v, v2, percentage)
        percentage = 0
    else:
        percentage = (float(use_after) / float(total)) * float(100)

    line_1.append(str(date))
    line_1.append(str(stackc))
    line_1.append(str(heapc))
    line_1.append(str(intc))
    line_1.append(str(pointc))
    line_1.append(str(fmtc))
    line_1.append(str(otherc))
    line_1.append(str(total))

    line_2.append(str(date))
    line_2.append(str(use_after))
    line_2.append(str(total))
    line_2.append(str(percentage))

    file_1.append("\t".join(line_1))
    file_2.append("\t".join(line_2))

    print "{} - {} <-> {} <-> {}".format(k,v, v2, percentage)

  file_1.close()
  file_2.close()

  return 0
示例#8
0
    def postproduction(self, records):
        errors = []
        for i, record in enumerate(records):
            record['pk'] = i + 1
            record['order'] = i + 1
            if i < len(records) - 1:
                record['next'] = unicode(record['pk'] + 1)

            # we have two dates:
            # 1. data isued
            # 2. date published

            # for both of them, we check if they represent a valid date
            y = m = d = None
            for fld in ['issued_date', 'published_date']:
                for s in 'ymd':
                    if record[fld + '_' + s].endswith('.0'):
                        record[fld + '_' + s] = record[fld + '_' + s][:-2]
                vals = [record[fld + '_' + s] for s in 'ymd']
                vals = [v for v in vals if v]
                if len(vals) == 3:
                    try:
                        vals = [str(int(v)) for v in vals]
                    except Exception, error:
                        errors.append(
                            (error,
                             'In record {pk}: invalid date for {fld}: {vals}'.
                             format(fld=fld, vals=vals, **record)))
                        for s in 'ymd':
                            if not record[fld + '_' + s].isdigit():
                                record[fld + '_' + s] = None
                        continue
                    try:
                        to_date('-'.join(
                            [record[fld + '_' + s] for s in 'ymd']))
                    except Exception as error:
                        raise Exception(
                            unicode(error) +
                            ' in record {record}'.format(**locals()))
                elif len(vals) != 0 and fld == 'published_date':
                    errors.append(
                        (None,
                         'In record {pk}: incomplete date for {fld}: {vals}'.
                         format(fld=fld, vals=vals, **record)))
示例#9
0
def consumer(args, poll_timeout=3.0):
    """ Consumes packets from a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    # connect to kafka
    logging.debug("Connecting to Kafka; %s", args.kafka_configs)
    kafka_consumer = Consumer(args.kafka_configs)
    kafka_consumer.subscribe([args.kafka_topic])

    # if 'pretty-print' not set, write libpcap global header
    if args.pretty_print == 0:
        sys.stdout.write(global_header(args))
        sys.stdout.flush()

    try:
        pkts_in = 0
        while not finished.is_set() and (args.max_packets <= 0 or pkts_in < args.max_packets):

            # consume a message from kafka
            msg = kafka_consumer.poll(timeout=poll_timeout)
            if msg is None:
                # no message received
                continue;

            elif msg.error():

                if msg.error().code() == KafkaError._PARTITION_EOF:
                    logging.debug("reached end of topar: topic=%s, partition=%d, offset=%s", msg.topic(), msg.partition(), msg.offset())
                elif msg.error():
                    raise KafkaException(msg.error())

            else:
                pkts_in += 1
                logging.debug("Packet received: pkts_in=%d", pkts_in)

                if args.pretty_print == 0:

                    # write the packet header and packet
                    sys.stdout.write(packet_header(msg))
                    sys.stdout.write(msg.value())
                    sys.stdout.flush()

                elif pkts_in % args.pretty_print == 0:

                    # pretty print
                    print 'Packet: count=%s date=%s topic=%s' % (
                        pkts_in, to_date(unpack_ts(msg.key())), args.kafka_topic)
                    print to_hex(msg.value())

    finally:
        sys.stdout.close()
        kafka_consumer.close()
示例#10
0
def read_items():
    fn = os.path.join(BASE, FN)
    #    images_dir = os.path.join(BASE, HARTAKARUN, 'HK Category Images')
    lines = csv.reader(codecs.open(fn, encoding=ENCODING), delimiter=DELIMITER)
    lines = list(lines)
    records = []
    headers = lines[0]
    print headers
    headers = [MAP_FIELDS_TO_MODEL[k] for k in headers]
    resolutions = read_resolutions()

    for l in lines[1:]:
        l = [unicode(x) for x in l]
        r = dict(zip(headers, l))
        for k, v in r.items():
            if 'date' in k or 'time' in k:
                r[k] = to_date(v)

        img_fn = r['image']
        img_src = os.path.join(BASE, IMAGES_DIR, img_fn)
        assert os.path.exists(img_src), img_src

        img_dst_folder = os.path.join(MEDIA_DIR, IMAGES_DIR)
        img_dst = os.path.join(img_dst_folder, img_fn)
        if not os.path.exists(img_dst_folder):
            os.makedirs(os.path.join(img_dst_folder))
        if not os.path.exists(img_dst):
            cmd = 'cp "%s" "%s"' % (img_src, img_dst)
            print cmd
            sh(cmd)
        r['image'] = os.path.join(IMAGES_DIR, img_fn)

        try:
            resolution = resolutions[r['file_id']]
            r['resolution'] = resolution['pk']
        except KeyError:
            r['resolution'] = None

        records.append(r)

    for x in records:
        print x
    print headers
    return records
示例#11
0
def delivery_callback(err, msg):
    """ Callback executed when message delivery either succeeds or fails. """

    # initialize counter, if needed
    if not hasattr(delivery_callback, "pkts_out"):
         delivery_callback.pkts_out = 0

    if err:
        logging.error("message delivery failed: error=%s", err)

    elif msg is not None:
        delivery_callback.pkts_out += 1

        pretty_print = 0
        pretty_print = producer_args.pretty_print

        if pretty_print > 0 and delivery_callback.pkts_out % pretty_print == 0:
            print 'Packet delivered[%s]: date=%s topic=%s partition=%s offset=%s len=%s' % (
                delivery_callback.pkts_out, to_date(unpack_ts(msg.key())), msg.topic(),
                msg.partition(), msg.offset(), len(msg.value()))
def read_items():
    fn = os.path.join(BASE, FN)
    #    images_dir = os.path.join(BASE, HARTAKARUN, 'HK Category Images')
    lines = csv.reader(codecs.open(fn, encoding=ENCODING), delimiter=DELIMITER)
    lines = list(lines)
    records = []
    headers = lines[0]
    print headers
    headers = [MAP_FIELDS_TO_MODEL[k] for k in headers]

    for l in lines[1:]:
        l = [unicode(x) for x in l]
        r = dict(zip(headers, l))
        for k, v in r.items():
            if 'date' in k or 'time' in k:
                r[k] = to_date(v)

        r['journalentry_class'] = '1'  #
        records.append(r)

    for x in records:
        print x
    print headers
    return records
示例#13
0
def main(argv):
    filename_1 = "cve_nist.dat"
    filename_2 = "cve_nist_criteria.dat"

    file_1 = DataFileWriter(filename_1)
    file_2 = DataFileWriter(filename_2)

    s_year = 2003
    e_year = datetime.now().year + 1
    r_year = range(s_year, e_year)

    input_file = []
    for year in r_year:
        input_file.append("nvdcve-2.0-" + str(year) + ".xml")

    for f in input_file:
        print "[*] Analyzing file: {}".format(f)
        handler = MyContentHandler()
        parse(f, handler)

    for k in sorted(statistics.keys()):
        v = statistics[k]
        v2 = statistics_compare[k]
        line_1 = []
        line_2 = []

        date = to_date(k)

        use_after = v2[CRITERIA['criteria_1']]

        stackc = v[VULNERABILITY['stackc']]
        heapc = v[VULNERABILITY['heapc']]
        intc = v[VULNERABILITY['intc']]
        pointc = v[VULNERABILITY['pointc']]
        fmtc = v[VULNERABILITY['fmtc']]
        otherc = v[VULNERABILITY['otherc']]
        total = stackc + heapc + intc + pointc + fmtc + otherc

        if total == 0:
            percentage = 0
        else:
            percentage = (float(use_after) / float(total)) * float(100)

        line_1.append(str(date))
        line_1.append(str(stackc))
        line_1.append(str(heapc))
        line_1.append(str(intc))
        line_1.append(str(pointc))
        line_1.append(str(fmtc))
        line_1.append(str(otherc))
        line_1.append(str(total))

        line_2.append(str(date))
        line_2.append(str(use_after))
        line_2.append(str(total))
        line_2.append(str(percentage))

        file_1.append("\t".join(line_1))
        file_2.append("\t".join(line_2))

        print "{} - {} <-> {} <-> {}".format(k, v, v2, percentage)

    file_1.close()
    file_2.close()

    return 0
示例#14
0
def consumer(args, poll_timeout=3.0):
    """ Consumes packets from a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    # where to start consuming messages from
    kafka_offset_options = {
        "begin": seek_to_begin,
        "end": seek_to_end,
        "stored": seek_to_stored
    }
    on_assign_cb = kafka_offset_options[args.kafka_offset]

    # connect to kafka
    logging.debug("Connecting to Kafka; %s", args.kafka_configs)
    kafka_consumer = Consumer(args.kafka_configs)
    kafka_consumer.subscribe([args.kafka_topic], on_assign=on_assign_cb)

    # if 'pretty-print' not set, write libpcap global header
    if args.pretty_print == 0:
        sys.stdout.write(global_header(args))
        sys.stdout.flush()

    try:
        pkts_in = 0
        while not finished.is_set() and (args.max_packets <= 0 or pkts_in < args.max_packets):

            # consume a message from kafka
            msg = kafka_consumer.poll(timeout=poll_timeout)
            if msg is None:
                # no message received
                continue;

            elif msg.error():

                if msg.error().code() == KafkaError._PARTITION_EOF:
                    if args.pretty_print > 0:
                        print "Reached end of topar: topic=%s, partition=%d, offset=%s" % (
                            msg.topic(), msg.partition(), msg.offset())
                else:
                    raise KafkaException(msg.error())

            else:
                pkts_in += 1
                logging.debug("Packet received: pkts_in=%d", pkts_in)

                if args.pretty_print == 0:

                    # write the packet header and packet
                    sys.stdout.write(packet_header(msg))
                    sys.stdout.write(msg.value())
                    sys.stdout.flush()

                elif pkts_in % args.pretty_print == 0:

                    # pretty print
                    print 'Packet[%s]: date=%s topic=%s partition=%s offset=%s len=%s' % (
                        pkts_in, to_date(unpack_ts(msg.key())), args.kafka_topic,
                        msg.partition(), msg.offset(), len(msg.value()))

    finally:
        sys.stdout.close()
        kafka_consumer.close()
示例#15
0
文件: bca.py 项目: mfaris16/ibanking
    if option.mutasi_file:
        content = open_file(option.mutasi_file)
        parser = MutasiParser()
        parser.feed(content)
        pprint(parser.get_clean_data())
        sys.exit()

    if option.saldo_file:
        content = open_file(option.saldo_file)
        parser = SaldoParser()
        parser.feed(content)
        pprint(parser.get_clean_data())
        sys.exit()

    if not option.username or not option.password:
        print('--username dan --password harus diisi')
        sys.exit()

    if option.date:
        crawler = MutasiBrowser(option.username, option.password,
                                option.output_file)
        tgl = to_date(option.date)
        data = crawler.run(tgl)
        pprint(data)
    else:
        crawler = SaldoBrowser(option.username, option.password,
                               option.output_file)
        data = crawler.run()
        pprint(data)
示例#16
0
def main(argv):
  filename_1 = "cve_nist.dat"
  filename_2 = "cve_nist_criteria.dat"

  file_1 = DataFileWriter(filename_1)
  file_2 = DataFileWriter(filename_2)

  s_year = 2003;
  e_year = datetime.now().year + 1;
  r_year = range(s_year, e_year);

  input_file = []
  for year in r_year:
    input_file.append("nvdcve-2.0-" + str(year) + ".xml");

  for f in input_file:
    print "[*] Analyzing file: {}".format(f)
    handler = MyContentHandler()
    parse(f, handler)

  for k in sorted(statistics.keys()):
    v = statistics[k]
    v2 = statistics_compare[k]
    line_1 = []
    line_2 = []

    date = to_date(k)
    
    use_after = v2[CRITERIA['criteria_1']]

    stackc = v[VULNERABILITY['stackc']]
    heapc = v[VULNERABILITY['heapc']] 
    intc = v[VULNERABILITY['intc']] 
    pointc = v[VULNERABILITY['pointc']] 
    fmtc = v[VULNERABILITY['fmtc']] 
    otherc = v[VULNERABILITY['otherc']]
    total = stackc + heapc + intc + pointc + fmtc + otherc

    if total == 0:
      percentage = 0
    else:
      percentage = (float(use_after) / float(total)) * float(100)

    line_1.append(str(date))
    line_1.append(str(stackc))
    line_1.append(str(heapc))
    line_1.append(str(intc))
    line_1.append(str(pointc))
    line_1.append(str(fmtc))
    line_1.append(str(otherc))
    line_1.append(str(total))

    line_2.append(str(date))
    line_2.append(str(use_after))
    line_2.append(str(total))
    line_2.append(str(percentage))

    file_1.append("\t".join(line_1))
    file_2.append("\t".join(line_2))

    print "{} - {} <-> {} <-> {}".format(k,v, v2, percentage)

  file_1.close()
  file_2.close()

  return 0
示例#17
0
def consumer(args, poll_timeout=3.0):
    """ Consumes packets from a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    # where to start consuming messages from
    kafka_offset_options = {
        "begin": seek_to_begin,
        "end": seek_to_end,
        "stored": seek_to_stored
    }
    on_assign_cb = kafka_offset_options[args.kafka_offset]

    # connect to kafka
    logging.debug("Connecting to Kafka; %s", args.kafka_configs)
    kafka_consumer = Consumer(args.kafka_configs)
    kafka_consumer.subscribe([args.kafka_topic], on_assign=on_assign_cb)

    # if 'pretty-print' not set, write libpcap global header
    if args.pretty_print == 0:
        sys.stdout.write(global_header(args))
        sys.stdout.flush()

    try:
        pkts_in = 0
        while not finished.is_set() and (args.max_packets <= 0 or pkts_in < args.max_packets):

            # consume a message from kafka
            msg = kafka_consumer.poll(timeout=poll_timeout)
            if msg is None:
                # no message received
                continue;

            elif msg.error():

                if msg.error().code() == KafkaError._PARTITION_EOF:
                    if args.pretty_print > 0:
                        print "Reached end of topar: topic=%s, partition=%d, offset=%s" % (
                            msg.topic(), msg.partition(), msg.offset())
                else:
                    raise KafkaException(msg.error())

            else:
                pkts_in += 1
                logging.debug("Packet received: pkts_in=%d", pkts_in)

                if args.pretty_print == 0:

                    # write the packet header and packet

                    # AT:  We are just sending over the results of the scan -- a list of macs/rssi's -- where this code
                    # was dealing with network packet sniffers --
                    sys.stdout.write(json.dumps(msg.value(), indent=2))
                    # sys.stdout.write(packet_header(msg))
                    # sys.stdout.write(msg.value())
                    sys.stdout.flush()

                elif pkts_in % args.pretty_print == 0:

                    # pretty print
                    print 'Packet[%s]: date=%s topic=%s partition=%s offset=%s len=%s' % (
                        pkts_in, to_date(unpack_ts(msg.key())), args.kafka_topic,
                        msg.partition(), msg.offset(), len(msg.value()))

    finally:
        sys.stdout.close()
        kafka_consumer.close()