def test_publisher(self):
     for i in range(0, 21):
         if i % 2 == 0:
             publisher.info('test' + str(i))
         elif i % 3 == 0:
             publisher.warning('test' + str(i))
         elif i % 5 == 0:
             publisher.error('test' + str(i))
         elif i % 7 == 0:
             publisher.critical('test' + str(i))
         else:
             publisher.debug('test' + str(i))
         time.sleep(1)
Beispiel #2
0
 def test_publisher(self):
     for i in range(0, 21):
         if i % 2 == 0:
             publisher.info('test' + str(i))
         elif i % 3 == 0:
             publisher.warning('test' + str(i))
         elif i % 5 == 0:
             publisher.error('test' + str(i))
         elif i % 7 == 0:
             publisher.critical('test' + str(i))
         else:
             publisher.debug('test' + str(i))
         time.sleep(1)
                return True
    open(c.path_bviewtimesamp, 'w').write(date + ' ' + hour)
    return False


if __name__ == '__main__':

    publisher.redis_instance = get_redis_connector()
    publisher.channel = 'bviewfetch'

    while 1:
        try:
            current_date = datetime.date.today()
            # Initialization of the URL to fetch
            year_month = current_date.strftime("%Y.%m")
            file_day = current_date.strftime("%Y%m%d")

            for hour in reversed(hours):
                url = base_url.format(year_month=year_month,
                                      file_day=file_day, hour=hour)
                if checkURL(url):
                    if not already_downloaded(file_day, hour):
                        publisher.info("New bview file found: " + url)
                        downloadURL(url)
                        publisher.info("Downloaded.")
                        last_hour = hour
                        break
        except:
            publisher.critical('Unable to download bview file. Server does not respond.')
        time.sleep(sleep_timer)
Beispiel #4
0
def insert():
    """
        Re-insert in the database the data provided by the module and
        extracted by :meth:`get_all_information` in a sorted form.
    """
    while True:
        i = 0
        try:
            while temp_db.scard(uid_list) > 0:
                infos = get_all_information()
                if infos is None:
                    continue
                uid, ip, src, timestamp = infos
                if ip is None:
                    publisher.error('Entry without IP, invalid')
                    continue
                if src is None:
                    publisher.error(ip + ' without source, invalid')
                    continue
                if timestamp.date() < datetime.date.today() - \
                        datetime.timedelta(1) and not accept_old_entries:
                    publisher.warning('The timestamp ({ts}) of {ip} from {source} is too old.'.\
                            format(ts = timestamp.isoformat(), ip = ip, source = src))
                    continue
                try:
                    # Check and normalize the IP
                    ip_bin = IPy.IP(ip)
                    if ip_bin.iptype() != 'PUBLIC':
                        publisher.warning(
                            str(ip_bin) + ' is not a PUBLIC IP Address')
                        continue
                    ip = ip_bin.strCompressed()
                except:
                    publisher.error('This IP: ' + ip + ' in invalid.')
                    continue

                iso_timestamp = timestamp.isoformat()
                date = timestamp.date().isoformat()
                index_day_src = '{date}{sep}{key}'.format(sep=separator,
                                                          date=date,
                                                          key=list_sources)
                index_day_ips = 'temp{sep}{date}{sep}{source}{sep}{key}'.format(
                    sep=separator, date=date, source=src, key=list_ips)
                ip_details = '{ip}{sep}{timestamp}'.format(
                    sep=separator, ip=ip, timestamp=iso_timestamp)

                global_db.sadd(index_day_src, src)
                pipeline_temp_db = temp_db.pipeline()
                pipeline_temp_db.sadd(index_day_ips, ip_details)
                pipeline_temp_db.sadd(temp_ris, ip)
                pipeline_temp_db.sadd(temp_no_asn, index_day_ips)
                pipeline_temp_db.delete(uid)
                pipeline_temp_db.execute()
                i += 1
                if i % 100 == 0 and config_db.exists(stop_db_input):
                    break
                if i % 10000 == 0:
                    publisher.info('{nb} new entries to insert'\
                            .format(nb = temp_db.scard(uid_list)))
        except:
            publisher.critical('Unable to insert, redis does not respond')
            break
        time.sleep(sleep_timer)
        if config_db.exists(stop_db_input):
            publisher.info('DatabaseInput stopped.')
            break
            if prec_filename is None or filename != prec_filename:
                creditcard_set = set([])
                PST = Paste.Paste(filename)

                for x in PST.get_regex(creditcard_regex):
                    if lib_refine.is_luhn_valid(x):
                        creditcard_set.add(x)

                PST.__setattr__(channel, creditcard_set)
                PST.save_attribute_redis(channel, creditcard_set)

                pprint.pprint(creditcard_set)
                to_print = 'CreditCard;{};{};{};'.format(
                    PST.p_source, PST.p_date, PST.p_name)
                if (len(creditcard_set) > 0):
                    publisher.critical('{}Checked {} valid number(s)'.format(
                        to_print, len(creditcard_set)))
                else:
                    publisher.info('{}CreditCard related'.format(to_print))

            prec_filename = filename

        else:
            if h.redis_queue_shutdown():
                print "Shutdown Flag Up: Terminating"
                publisher.warning("Shutdown Flag Up: Terminating.")
                break
            publisher.debug("Script creditcard is idling 1m")
            time.sleep(60)

        message = h.redis_rpop()
                return True
    open(c.path_bviewtimesamp, 'w').write(date + ' ' + hour)
    return False


if __name__ == '__main__':

    publisher.redis_instance = get_redis_connector()
    publisher.channel = 'bviewfetch'

    while 1:
        try:
            current_date = datetime.date.today()
            # Initialization of the URL to fetch
            year_month = current_date.strftime("%Y.%m")
            file_day = current_date.strftime("%Y%m%d")

            for hour in reversed(hours):
                url = base_url.format(year_month=year_month,
                                      file_day=file_day, hour=hour)
                if checkURL(url):
                    if not already_downloaded(file_day, hour):
                        publisher.info("New bview file found: " + url)
                        downloadURL(url)
                        publisher.info("Downloaded.")
                        last_hour = hour
                        break
        except:
            publisher.critical('Unable to download bview file. Server does not respond.')
        time.sleep(sleep_timer)
                        help='Set the server port.')
    parser.add_argument("-c",
                        "--channel",
                        type=str,
                        required=True,
                        help='Channel to publish into.')

    args = parser.parse_args()

    if args.use_unix_socket:
        publisher.use_tcp_socket = False
        publisher.unix_socket = args.unix_socket_path
    else:
        publisher.hostname = args.hostname
        publisher.port = args.port

    publisher.channel = args.channel

    for i in range(0, 21):
        if i % 2 == 0:
            publisher.info('test' + str(i))
        elif i % 3 == 0:
            publisher.warning('test' + str(i))
        elif i % 5 == 0:
            publisher.error('test' + str(i))
        elif i % 7 == 0:
            publisher.critical('test' + str(i))
        else:
            publisher.debug('test' + str(i))
        time.sleep(1)
Beispiel #8
0
def insert():
    """
        Re-insert in the database the data provided by the module and
        extracted by :meth:`get_all_information` in a sorted form.
    """
    while True:
        i = 0
        try:
            while temp_db.scard(uid_list) > 0:
                infos = get_all_information()
                if infos is None:
                    continue
                uid, ip, src, timestamp = infos
                if ip is None:
                    publisher.error('Entry without IP, invalid')
                    continue
                if src is None:
                    publisher.error(ip + ' without source, invalid')
                    continue
                if timestamp.date() < datetime.date.today() - \
                        datetime.timedelta(1) and not accept_old_entries:
                    publisher.warning('The timestamp ({ts}) of {ip} from {source} is too old.'.\
                            format(ts = timestamp.isoformat(), ip = ip, source = src))
                    continue
                try:
                    # Check and normalize the IP
                    ip_bin = IPy.IP(ip)
                    if ip_bin.iptype() != 'PUBLIC':
                        publisher.warning(str(ip_bin) + ' is not a PUBLIC IP Address')
                        continue
                    ip = ip_bin.strCompressed()
                except:
                    publisher.error('This IP: ' + ip + ' in invalid.')
                    continue

                iso_timestamp = timestamp.isoformat()
                date = timestamp.date().isoformat()
                index_day_src = '{date}{sep}{key}'.format(sep = separator,
                        date=date, key=list_sources)
                index_day_ips = 'temp{sep}{date}{sep}{source}{sep}{key}'.format(
                        sep = separator, date=date, source=src, key=list_ips)
                ip_details = '{ip}{sep}{timestamp}'.format(sep = separator,
                        ip = ip, timestamp = iso_timestamp)

                global_db.sadd(index_day_src, src)
                pipeline_temp_db = temp_db.pipeline()
                pipeline_temp_db.sadd(index_day_ips, ip_details)
                pipeline_temp_db.sadd(temp_ris, ip)
                pipeline_temp_db.sadd(temp_no_asn, index_day_ips)
                pipeline_temp_db.delete(uid)
                pipeline_temp_db.execute()
                i += 1
                if i%100 == 0 and config_db.exists(stop_db_input):
                    break
                if i%10000 == 0:
                    publisher.info('{nb} new entries to insert'\
                            .format(nb = temp_db.scard(uid_list)))
        except:
            publisher.critical('Unable to insert, redis does not respond')
            break
        time.sleep(sleep_timer)
        if config_db.exists(stop_db_input):
            publisher.info('DatabaseInput stopped.')
            break
Beispiel #9
0
    parser.add_argument("-H", "--hostname", default='localhost',
                        type=str, help='Set the hostname of the server.')
    parser.add_argument("-p", "--port", default=6379,
                        type=int, help='Set the server port.')
    parser.add_argument("-c", "--channel",
                        type=str, required=True, help='Channel to publish into.')

    args = parser.parse_args()

    if args.use_unix_socket:
        publisher.use_tcp_socket = False
        publisher.unix_socket = args.unix_socket_path
    else:
        publisher.hostname = args.hostname
        publisher.port = args.port

    publisher.channel = args.channel

    for i in range(0, 21):
        if i % 2 == 0:
            publisher.info('test' + str(i))
        elif i % 3 == 0:
            publisher.warning('test' + str(i))
        elif i % 5 == 0:
            publisher.error('test' + str(i))
        elif i % 7 == 0:
            publisher.critical('test' + str(i))
        else:
            publisher.debug('test' + str(i))
        time.sleep(1)
def main():
    """Main Function"""

    # CONFIG #
    cfg = ConfigParser.ConfigParser()
    cfg.read(configfile)

    # REDIS #
    r_serv = redis.StrictRedis(
        host = cfg.get("Redis_Queues", "host"),
        port = cfg.getint("Redis_Queues", "port"),
        db = cfg.getint("Redis_Queues", "db"))

    r_serv1 = redis.StrictRedis(
        host = cfg.get("Redis_Data_Merging", "host"),
        port = cfg.getint("Redis_Data_Merging", "port"),
        db = cfg.getint("Redis_Data_Merging", "db"))

    p_serv = r_serv.pipeline(False)

    # LOGGING #
    publisher.channel = "Script"

    # ZMQ #
    Sub = ZMQ_PubSub.ZMQSub(configfile, "PubSub_Categ", "creditcard_categ", "cards")

    # FUNCTIONS #
    publisher.info("Creditcard script subscribed to channel creditcard_categ")

    message = Sub.get_msg_from_queue(r_serv)
    prec_filename = None

    creditcard_regex = "4[0-9]{12}(?:[0-9]{3})?"

    mastercard_regex = "5[1-5]\d{2}([\ \-]?)\d{4}\1\d{4}\1\d{4}"
    visa_regex = "4\d{3}([\ \-]?)\d{4}\1\d{4}\1\d{4}"
    discover_regex = "6(?:011\d\d|5\d{4}|4[4-9]\d{3}|22(?:1(?:2[6-9]|[3-9]\d)|[2-8]\d\d|9(?:[01]\d|2[0-5])))\d{10}"
    jcb_regex = "35(?:2[89]|[3-8]\d)([\ \-]?)\d{4}\1\d{4}\1\d{4}"
    amex_regex = "3[47]\d\d([\ \-]?)\d{6}\1\d{5}"
    chinaUP_regex = "62[0-5]\d{13,16}"
    maestro_regex = "(?:5[0678]\d\d|6304|6390|67\d\d)\d{8,15}"

    while True:
        if message != None:
            channel, filename, word, score  = message.split()

            if prec_filename == None or filename != prec_filename:
                Creditcard_set = set([])
                PST = P.Paste(filename)

                for x in PST.get_regex(creditcard_regex):
                    if lib_refine.is_luhn_valid(x):
                        Creditcard_set.add(x)


                PST.__setattr__(channel, Creditcard_set)
                PST.save_attribute_redis(r_serv1, channel, Creditcard_set)

                pprint.pprint(Creditcard_set)
                if (len(Creditcard_set) > 0):
                    publisher.critical('{0};{1};{2};{3};{4}'.format("CreditCard", PST.p_source, PST.p_date, PST.p_name,"Checked " + str(len(Creditcard_set))+" valid number(s)" ))
                else:
                    publisher.info('{0};{1};{2};{3};{4}'.format("CreditCard", PST.p_source, PST.p_date, PST.p_name, "CreditCard related" ))

            prec_filename = filename

        else:
            if r_serv.sismember("SHUTDOWN_FLAGS", "Creditcards"):
                r_serv.srem("SHUTDOWN_FLAGS", "Creditcards")
                print "Shutdown Flag Up: Terminating"
                publisher.warning("Shutdown Flag Up: Terminating.")
                break
            publisher.debug("Script creditcard is idling 1m")
            time.sleep(60)

        message = Sub.get_msg_from_queue(r_serv)