Ejemplo n.º 1
0
 def setUp(self):
     self.loc_file = "%s/delegated-lacnic-latest" % (etc.properties.paths['tmp'])
     try:
         # getfile("ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest", self.loc_file)
         getfile("file:///"+self.loc_file+".org", self.loc_file)
     except Exception as e:
         raise e
     #
     self.dlg = Delegated(self.loc_file)
     self.dlg.read_delegated()
Ejemplo n.º 2
0
 def setUp(self):
     self.loc_file = "%s/delegated-lacnic-latest" % (
         etc.properties.paths['tmp'])
     try:
         # getfile("ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest", self.loc_file)
         getfile("file:///" + self.loc_file + ".org", self.loc_file)
     except Exception as e:
         raise e
     #
     self.dlg = Delegated(self.loc_file)
     self.dlg.read_delegated()
Ejemplo n.º 3
0
dias_pred = 600
dash8 = pow(2, 24)
pool_reservado = 0
ipv4libres_tmp = "tmp/reports_freespace.txt"

#fetch data
print "Fetching IPv4 allocation data...",
sys.stdout.flush()
serie_temporal = array([])
serie_temporal_corrida = array([])
serie_ipv4libres = array([])
serie_temporal_pred = []
serie_ipv4libres_pred = []

getfile.getfile(
    "http://opendata.labs.lacnic.net/ipv4stats/ipv4avail/lacnic?lastdays=%s" %
    (lastdays - 1), ipv4libres_tmp, 30)
print "done!"

print "Parsing JSON data...",
jsd_file = open(ipv4libres_tmp, "r")
datos = json.load(jsd_file)
jsd_file.close()
i = 0
for row in reversed(datos['rows']):
    ts = parseMongoDate(row['c'][0]['v'])
    serie_temporal = append(serie_temporal, ts)
    fip4 = row['c'][1]['v']
    serie_ipv4libres = append(serie_ipv4libres, fip4)
    i = i + 1
print "done!"
print lastdays
dias_pred = 1170
dash8 = pow(2,24)
pool_reservado = 0
ipv4libres_tmp = "tmp/reports_freespace.txt"

#fetch data
print "Fetching IPv4 allocation data...",
sys.stdout.flush()
serie_temporal = array([])
serie_temporal_corrida = array([])
serie_ipv4libres = array([])
serie_temporal_pred = []
serie_ipv4libres_pred = []

getfile.getfile("http://opendata.labs.lacnic.net/ipv4stats/ipv4avail/lacnic?lastdays=%s" % (lastdays-1), ipv4libres_tmp, 30)
print "done!"

print "Parsing JSON data...",
jsd_file = open(ipv4libres_tmp, "r")
datos = json.load(jsd_file)
jsd_file.close()
i = 0
for row in reversed(datos['rows']):
    ts = parseMongoDate(row['c'][0]['v'])
    serie_temporal = append(serie_temporal, ts)
    fip4 = row['c'][1]['v']
    serie_ipv4libres = append(serie_ipv4libres, fip4)
    i = i + 1
    #print fip4, date.fromtimestamp(ts)
print "done!"
Ejemplo n.º 5
0
import commons.dumpimport.sql3load as sq3l
import commons.getfile as gf

print "Fetching riswhois dump for ipv4"
gf.getfile("http://www.ris.ripe.net/dumps/riswhoisdump.IPv4.gz",
           "tmp/riswhoisdump.IPv4.gz")

print "Importing dump into memory"

memdb = sq3l.sql3load([('originas', 'text'), ('prefix', 'text'),
                       ('seenby', 'integer')])
memdb.importFile("tmp/riswhoisdump.IPv4.gz", "\t")

print "Getting import stats"
print memdb.getStats()

print "Get first 10 rows"
q = memdb.query("1=1 ORDER BY id DESC LIMIT 25")
for r in q:
    print dict(r)
freeipv4_tmpfile = "tmp/reports_freespace_fromextended.txt"

#
print "IPv4 RUNDOWN MODELS (c) Carlos M. Martinez, [email protected]"
print "--"
print " "

# fetch data
print "Fetching IPv4 allocation data... ",
sys.stdout.flush()
time_series = array([])
freeipv4_series = array([])
time_series_pred = []
freeipv4_series_pred = []

getfile.getfile("http://opendata.labs.lacnic.net/ipv4stats/ipv4avail/lacnic?lastdays=%s" % (lastdays), freeipv4_tmpfile, 30)
print "done!"

print "Parsing JSON data...",
jsd_file = open(freeipv4_tmpfile, "r")
jsd_data = json.load(jsd_file)
jsd_file.close()
cnt = 0
for row in jsd_data['rows']:
    time_series     = append(time_series, float(lastdays-cnt))
    ref_date = base_date+timedelta(lastdays-cnt)
    fip4 = float(row['c'][1]['v']) 
    if  ref_date <= date_dash9_reached :
        print "0: %s, %s" % (lastdays-cnt, fip4  )
        freeipv4_series = append(freeipv4_series, fip4 )
    elif ref_date < date_debogon_start:
import commons.dumpimport.sql3load as sq3l
import commons.getfile as gf

print "Fetching riswhois dump for ipv4"
gf.getfile("http://www.ris.ripe.net/dumps/riswhoisdump.IPv4.gz", "tmp/riswhoisdump.IPv4.gz")

print "Importing dump into memory"

memdb = sq3l.sql3load([('originas','text'),('prefix','text'),('seenby','integer')])
memdb.importFile("tmp/riswhoisdump.IPv4.gz", "\t")

print "Getting import stats"
print memdb.getStats()

print "Get first 10 rows"
q = memdb.query("1=1 ORDER BY id DESC LIMIT 25")
for r in q:
	print dict(r)
Ejemplo n.º 8
0
from datetime import date
from datetime import timedelta
from time import sleep
from commons import getfile
import sys
import csv
time_horizon = 500
model_degrees = [2, 3, 4]
reserve_pool_size = pow(2, 32 - 10)
base_date = date(2011, 7, 22)

# fetch data
print "Fetching IPv4 allocation data... ",
time_series = array([])
freeipv4_series = array([])
getfile.getfile("http://www.labs.lacnic.net/reports_freespace.txt",
                "tmp/reports_freespace.txt")
print "done!"

with open('tmp/reports_freespace.txt') as csvfile:
    r = csv.reader(csvfile, delimiter='|')
    c = 0
    for row in r:
        #p_time_series = row[3]
        time_series = append(time_series, float(row[0]))
        freeipv4_series = append(freeipv4_series, float(row[1]))
        c = c + 1

print "%s entries loaded." % (c)
# print time_series.shape
# print freeipv4_series.shape