def setUp(self): self.s3_template = [ ('name', 'text'), ('age', 'integer'), ('weigth', 'float') ] self.db_fn = get_tmp_fn(".db") self.db_fn2 = get_tmp_fn(".db") self.s3l = sql3load(self.s3_template, self.db_fn ) r = self.s3l.importFile("test/test-import.txt") self.s3l.addMetaColumn("cDoubleAge VARCHAR(80)")
def __init__(self, **kwargs): """ Class constructor, receives keyword arguments {rir} and {date} """ self.drir = kwargs.get('rir', 'lacnic') self.ddate = kwargs.get('date', 'latest') self.local_file = kwargs.get('local_file', None) self.db_filename = kwargs.get('db_filename', get_tmp_fn(".db")) self.as_cache = kwargs.get('as_cache', False) # get archivo delegated if self.local_file == None: self.dlg_fn_name = self._download_stats_file(rir=self.drir, date=self.ddate) else: self.dlg_fn_name = self.local_file ## load into database self.s3_template = [('rir', 'text'), ('cc', 'text'), ('type', 'text'), ('block', 'text'), ('length', 'integer'), ('date', 'integer'), ('status', 'text'), ('orgid', 'integer')] self.s3l = sql3load(self.s3_template, self.db_filename, "|", "numres", as_cache=self.as_cache, comments_mark="#") r = self.s3l.importFile(self.dlg_fn_name) self.dbh = self.s3l # Add Meta columns self._add_columns() return
def _download_stats_file(self, **kwargs): """ Downloads stats file for a given rir and date. Args 'rir' and 'date' (date can be 'latest') Load delegated-stats file and current RIR for given date (YYYYMMDD|latest) :param rir=: RIR name :param date=: Date as in YYYYMMDD or 'latest' """ dp = dprint() # get delegated ddate = kwargs.get('date', 'latest') drir = kwargs.get('rir', 'lacnic') dp.log("Downloading stat file for RIR %s, date %s...\n" % (self.drir, self.ddate)) dlg_tmpfile = get_tmp_fn(filename="tmp_delegated-extended-%s-%s" % (drir, ddate)) try: dlg_f_url = rirconfig.rir_config_data[self.drir]['dlge'][0] % ( self.ddate) dlg_tmpfile_name = getfile(dlg_f_url, dlg_tmpfile, 3600) except: print "Failed downloading stats file! url=%s" % (dlg_f_url) raise if not dlg_tmpfile_name: dp.log(" FAILED! url: %s\n" % (dlg_f_url)) sys.exit(1) dp.log(" OK\n") return dlg_tmpfile_name
def __init__(self, **kwargs): """ Class constructor, receives keyword arguments {rir} and {date} """ self.drir = kwargs.get('rir','lacnic') self.ddate = kwargs.get('date', 'latest') self.local_file = kwargs.get('local_file', None) self.db_filename = kwargs.get('db_filename', get_tmp_fn(".db") ) self.as_cache = kwargs.get('as_cache', False) # get archivo delegated if self.local_file == None: self.dlg_fn_name = self._download_stats_file(rir=self.drir, date=self.ddate) else: self.dlg_fn_name = self.local_file ## load into database self.s3_template = [ ('rir', 'text'), ('cc', 'text'), ('type', 'text'), ('block', 'text'), ('length', 'integer'), ('date', 'integer'), ('status', 'text'), ('orgid', 'integer') ] self.s3l = sql3load(self.s3_template, self.db_filename , "|", "numres", as_cache=self.as_cache, comments_mark="#") r = self.s3l.importFile(self.dlg_fn_name) self.dbh = self.s3l # Add Meta columns self._add_columns() return
def _download_stats_file(self, **kwargs): """ Downloads stats file for a given rir and date. Args 'rir' and 'date' (date can be 'latest') Load delegated-stats file and current RIR for given date (YYYYMMDD|latest) :param rir=: RIR name :param date=: Date as in YYYYMMDD or 'latest' """ dp = dprint() # get delegated ddate = kwargs.get('date', 'latest') drir = kwargs.get('rir', 'lacnic') dp.log("Downloading stat file for RIR %s, date %s...\n" % (self.drir, self.ddate)) dlg_tmpfile = get_tmp_fn(filename="tmp_delegated-extended-%s-%s" % (drir, ddate) ) try: dlg_f_url = rirconfig.rir_config_data[self.drir]['dlge'][0] % (self.ddate) dlg_tmpfile_name = getfile( dlg_f_url, dlg_tmpfile, 3600) except: print "Failed downloading stats file! url=%s" % (dlg_f_url) raise if not dlg_tmpfile_name: dp.log(" FAILED! url: %s\n" % (dlg_f_url)) sys.exit(1) dp.log(" OK\n") return dlg_tmpfile_name
def _download_dump_files(self): """ Downloads RIPE NCCs dump files, both for ipv4 and ipv6 """ dp = dprint() # get delegated #ddate = kwargs.get('date', 'latest') #drir = kwargs.get('rir', 'lacnic') dp.log("Downloading dump file for ipv4...") dlg_tmpfile = get_tmp_fn(filename="ris-whois-dump-ipv4.gz" ) dlg_tmpfile_name4 = getfile(rirconfig.rir_config_data['ripencc']['ris_whois_v4'] , dlg_tmpfile, 86400) dp.log(" OK\n") dp.log("Downloading dump file for ipv6...") dlg_tmpfile = get_tmp_fn(filename="ris-whois-dump-ipv6.gz" ) dlg_tmpfile_name6 = getfile(rirconfig.rir_config_data['ripencc']['ris_whois_v6'] , dlg_tmpfile, 86400) dp.log(" OK\n") # return [dlg_tmpfile_name4, dlg_tmpfile_name6]
def _getRoaCSVExport(self): """ Downloads RPKI ROA Validator """ dp = dprint() dp.log("Downloading dump file for roadata...") dlg_tmpfile = get_tmp_fn(filename="rpkivalidator-roadata.csv" ) # dlg_tmpfile_name4 = getfile(rirconfig.rir_config_data['ripencc']['ris_whois_v4'] , dlg_tmpfile, 86400) dlg_tmpfile_name = getfile("http://ripeval.labs.lacnic.net:8080/export.csv" , dlg_tmpfile, 3600) return dlg_tmpfile
def _getRoaCSVExport(self): """ Downloads RPKI ROA Validator """ dp = dprint() for n in range(0, self._retries): try: dp.log("Downloading dump file for roadata...") dlg_tmpfile = get_tmp_fn(filename="rpkivalidator-roadata.csv" ) # dlg_tmpfile_name4 = getfile(rirconfig.rir_config_data['ripencc']['ris_whois_v4'] , dlg_tmpfile, 86400) dlg_tmpfile_name = getfile(self.validator_url , dlg_tmpfile, 3600) except: dp.log("Failure. Retrying.") continue # end for return dlg_tmpfile
def getfile(w_url, w_file_name = None, w_update = 3600, ch_size=0): """ Downloads a file object pointed by w_url and stores it on local file w_file_name. The w_update parameter marks how old the file can be. Files are only downloaded if they are older than w_update seconds. :param w_url: URL of the file to get. All urllib2 URLs are supported, including file:/// :param w_update: Freshness timer in seconds. If the file was downloeaded less than this time ago the current copy is used, thus avoiding unnecessary re-downloading files. :param w_file_name: Full file name and path of the locally-saved copy of the file. This parameter can be empty. In this case getfile will choose a random temp file name and, on success, will return this name :param ch_size: Progress bar ticker step. :return : file name of the locally-saved copy. """ if w_file_name == None: w_file_name = get_tmp_fn() if ch_size == 0: log_level = 0 else: log_level = 3 dp = dprint(log_level) dp.log("entering try block\n") try: dp.log("Getting "+w_url+": ") mtime = 0 if os.path.exists(w_file_name): mtime = os.stat(w_file_name).st_mtime now = time.time() # dprint("now: %s, mtime: %s" % (now, mtime)) if now-mtime >= w_update: # sys.stderr.write("opening url\n") uh = urllib2.urlopen(w_url) # sys.stderr.write("creating local file\n") lfh = open(w_file_name, "wb+") # lfh.write(uh.read()) while True: # data = uh.read(ch_size) data = uh.read(1024) if not data: dp.log(": done!") break lfh.write(data) dp.log(".") # return w_file_name else: dp.log("File exists and still fresh (%s secs old) \n" % (now-mtime)) return w_file_name except urllib2.HTTPError as e: # raise dp.log("HTTP Error %s, %s\n" % (e.code, w_url)) return False except urllib2.URLError as f: dp.log("URL Error reason: %s, url: %s\n" % (f.reason, w_url)) # dprint("URLError") return False except: raise
def testImportTabSeparatedFile(self): self.s3l2 = sql3load(self.s3_template, get_tmp_fn("islas.db"), "\t") r = self.s3l2.importFile("test/test-import2.txt") # print "imported rows %s" % (r) self.assertTrue(r>3, "Number of lines read should be larger than 3 but is %s" % (r))
def setUp(self): #sys.stderr.write("Creating sql3load class instance\n") # self.s3_template = [{'name': 'text'}, {'age': 'integer'}, {'weigth': 'float'}] self.s3_template = [ ('name', 'text'), ('age', 'integer'), ('weigth', 'float') ] #se lf.s3l = sql3load(self.s3_template) self.s3l = sql3load(self.s3_template, get_tmp_fn(".db") )