Exemplo n.º 1
0
def db_import(filename, day):
    routing_db = get_redis_connector()
    with open(filename, 'r') as f:
        entry = ''
        pipeline = routing_db.pipeline()
        i = 0
        for line in f:
            # End of block, extracting the information
            if line == '\n':
                i += 1
                parsed = re.findall('(?:ASPATH|PREFIX): ([^\n{]*)', entry)
                try:
                    block = parsed[0].strip()
                    # RIPE-NCC-RIS BGP IPv6 Anchor Prefix @RRC00
                    # RIPE-NCC-RIS BGP Anchor Prefix @ rrc00 - RIPE NCC
                    if block in ['2001:7fb:ff00::/48', '84.205.80.0/24',
                                 '2001:7fb:fe00::/48', '84.205.64.0/24']:
                        asn = 12654
                    else:
                        asn = int(parsed[1].split()[-1].strip())
                    pipeline.hset(block, day, asn)
                except:
                    # FIXME: check the cause of the exception
                    publisher.warning(entry)
                entry = ''
                if i % 10000 == 0:
                    pipeline.execute()
                    pipeline = routing_db.pipeline()
            else:
                # append the line to the current block.
                entry += line
        pipeline.execute()
        publisher.info('{f} finished, {nb} entries impported.'.format(
            f=filename, nb=i))
Exemplo n.º 2
0
def db_import(filename, day):
    routing_db = get_redis_connector()
    with open(filename, 'r') as f:
        entry = ''
        pipeline = routing_db.pipeline()
        i = 0
        for line in f:
            # End of block, extracting the information
            if line == '\n':
                i += 1
                parsed = re.findall('(?:ASPATH|PREFIX): ([^\n{]*)', entry)
                try:
                    block = parsed[0].strip()
                    # RIPE-NCC-RIS BGP IPv6 Anchor Prefix @RRC00
                    # RIPE-NCC-RIS BGP Anchor Prefix @ rrc00 - RIPE NCC
                    if block in [
                            '2001:7fb:ff00::/48', '84.205.80.0/24',
                            '2001:7fb:fe00::/48', '84.205.64.0/24'
                    ]:
                        asn = 12654
                    else:
                        asn = int(parsed[1].split()[-1].strip())
                    pipeline.hset(block, day, asn)
                except:
                    # FIXME: check the cause of the exception
                    publisher.warning(entry)
                entry = ''
                if i % 10000 == 0:
                    pipeline.execute()
                    pipeline = routing_db.pipeline()
            else:
                # append the line to the current block.
                entry += line
        pipeline.execute()
        publisher.info('{f} finished, {nb} entries impported.'.format(
            f=filename, nb=i))
        fname = filename.format(day=cur_day.strftime("%Y%m%d"))
        if cur_day > format_change:
            cur_day += one_day
            url = base_url.format(year_month=cur_day.strftime("%Y.%m"),
                                  file_day=cur_day.strftime("%Y%m%d"))
        else:
            url = base_url_old.format(year_month=cur_day.strftime("%Y.%m"),
                                      file_day=cur_day.strftime("%Y%m%d"))
            cur_day += one_day
        url_list.append((fname, url))
    return sorted(url_list, key=lambda tup: tup[0], reverse=True)


if __name__ == '__main__':
    check_dirs()
    publisher.redis_instance = get_redis_connector()
    publisher.channel = 'bviewfetch'
    publisher.use_tcp_socket = False
    socket.setdefaulttimeout(30)

    parser = argparse.ArgumentParser(
        description='Fetch all the bview files of an interval.')
    parser.add_argument("-f",
                        "--firstdate",
                        required=True,
                        type=str,
                        help='First date of the interval [YYYY-MM-DD].')
    parser.add_argument("-l",
                        "--lastdate",
                        type=str,
                        default=None,
Exemplo n.º 4
0
    """
        Verify that the date and the hour of the file we try to
        download is newer than the latest downloaded file.
    """
    if os.path.exists(c.path_bviewtimesamp):
        ts = open(c.path_bviewtimesamp, 'r').read().split()
        if ts[0] == date:
            if int(ts[1]) >= int(hour):
                return True
    open(c.path_bviewtimesamp, 'w').write(date + ' ' + hour)
    return False


if __name__ == '__main__':

    publisher.redis_instance = get_redis_connector()
    publisher.channel = 'bviewfetch'

    while 1:
        try:
            current_date = datetime.date.today()
            # Initialization of the URL to fetch
            year_month = current_date.strftime("%Y.%m")
            file_day = current_date.strftime("%Y%m%d")

            for hour in reversed(hours):
                url = base_url.format(year_month=year_month,
                                      file_day=file_day, hour=hour)
                if checkURL(url):
                    if not already_downloaded(file_day, hour):
                        publisher.info("New bview file found: " + url)
Exemplo n.º 5
0
            output.write(line)
        nul_f.close()
    publisher.info('Convertion finished, start splitting...')

    # Split the plain text file
    return file_splitter.fsplit(path_output_bviewfile)


def import_assignations(files):
    publisher.info('Start pushing all routes...')
    run_splitted_processing(simultaneous_db_import, path_to_importer, files)
    publisher.info('All routes pushed.')


if __name__ == '__main__':
    publisher.redis_instance = get_redis_connector()
    publisher.channel = 'bview'

    bgpdump = os.path.join(c.raw_data, path_to_bgpdump_bin)

    routing_db = get_redis_connector()

    # Wait a bit until the bview file is downloaded
    time.sleep(60)

    while 1:
        got_new_files = False
        files = glob.glob(os.path.join(c.bview_dir, 'bview.*.gz'))
        routing_db.set('is_importing', 1)
        while len(files) > 0:
            files = sorted(files)
Exemplo n.º 6
0
            output.write(line)
        nul_f.close()
    publisher.info('Convertion finished, start splitting...')

    # Split the plain text file
    return file_splitter.fsplit(path_output_bviewfile)


def import_assignations(files):
    publisher.info('Start pushing all routes...')
    run_splitted_processing(simultaneous_db_import, path_to_importer, files)
    publisher.info('All routes pushed.')


if __name__ == '__main__':
    publisher.redis_instance = get_redis_connector()
    publisher.channel = 'bview'

    bgpdump = os.path.join(c.raw_data, path_to_bgpdump_bin)

    routing_db = get_redis_connector()

    # Wait a bit until the bview file is downloaded
    time.sleep(60)

    while 1:
        got_new_files = False
        files = glob.glob(os.path.join(c.bview_dir, 'bview.*.gz'))
        routing_db.set('is_importing', 1)
        while len(files) > 0:
            files = sorted(files)