def fetch(opath=None, verbose=False): """ retrieves oui.txt from IEEE and writes to data file :param opath: fullpath of oui.txt :param verbose: write updates to stdout """ # determine if data path is legit if opath is None: opath = OUIPATH if not os.path.isdir(os.path.dirname(opath)): print("Path to data is incorrect {0}".format(opath)) sys.exit(1) # fetch oui file from ieee fout = None # set up url request req = url_request(OUIURL) req.add_header('User-Agent', "PyRIC +https://github.com/wraith-wireless/PyRIC/") try: # retrieve the oui file and parse out generated date if verbose: print('Fetching ', OUIURL) res = url_open(req) if verbose: print("Parsing OUI file") if verbose: print("Opening data file {0} for writing".format(opath)) fout = open(opath, 'w') gen = datetime.datetime.utcnow().isoformat( ) # use current time as the first line fout.write(gen + '\n') # pull out ouis t = time.time() cnt = 0 for l in res: if '(hex)' in l: # extract oui and manufacturer oui, manuf = l.split('(hex)') oui = oui.strip().replace('-', ':') manuf = manuf.strip() if manuf.startswith("IEEE REGISTRATION AUTHORITY"): manuf = "IEEE REGISTRATION AUTHORITY" # write to file & update count fout.write('{0}\t{1}\n'.format(oui, manuf)) cnt += 1 if verbose: print("{0}:\t{1}\t{2}".format(cnt, oui, manuf)) print("Wrote {0} OUIs in {1:.3} secs".format(cnt, time.time() - t)) except url_error as e: print("Error fetching oui file: {0}".format(e)) except IOError as e: print("Error opening output file {0}".format(e)) except Exception as e: print("Error parsing oui file: {0}".format(e)) finally: if fout: fout.close()
def fetch(opath=None,verbose=False): """ retrieves oui.txt from IEEE and writes to data file :param opath: fullpath of oui.txt :param verbose: write updates to stdout """ # determine if data path is legit if opath is None: opath = OUIPATH if not os.path.isdir(os.path.dirname(opath)): print("Path to data is incorrect {0}".format(opath)) sys.exit(1) # fetch oui file from ieee fout = None # set up url request req = url_request(OUIURL) req.add_header('User-Agent',"PyRIC +https://github.com/wraith-wireless/PyRIC/") try: # retrieve the oui file and parse out generated date if verbose: print('Fetching ', OUIURL) res = url_open(req) if verbose: print("Parsing OUI file") if verbose: print("Opening data file {0} for writing".format(opath)) fout = open(opath,'w') gen = datetime.datetime.utcnow().isoformat() # use current time as the first line fout.write(gen+'\n') # pull out ouis t = time.time() cnt = 0 for l in res: if '(hex)' in l: # extract oui and manufacturer oui,manuf = l.split('(hex)') oui = oui.strip().replace('-',':') manuf = manuf.strip() if manuf.startswith("IEEE REGISTRATION AUTHORITY"): manuf = "IEEE REGISTRATION AUTHORITY" # write to file & update count fout.write('{0}\t{1}\n'.format(oui,manuf)) cnt += 1 if verbose: print("{0}:\t{1}\t{2}".format(cnt,oui,manuf)) print("Wrote {0} OUIs in {1:.3} secs".format(cnt,time.time()-t)) except url_error as e: print("Error fetching oui file: {0}".format(e)) except IOError as e: print("Error opening output file {0}".format(e)) except Exception as e: print("Error parsing oui file: {0}".format(e)) finally: if fout: fout.close()
def fetch(url, hash): filename = os.path.basename(url) if not os.path.exists(filename): req = url_request(url) res = url_open(req) dat = res.read() with open(filename, 'wb+') as f: f.write(dat) with open(filename, 'r') as f: dat = f.read() h = hash_algorithm(dat) assert h.hexdigest() == hash
def download_url(url): '''download a URL and return the content''' if sys.version_info.major < 3: from urllib2 import urlopen as url_open from urllib2 import URLError as url_error else: from urllib.request import urlopen as url_open from urllib.error import URLError as url_error try: resp = url_open(url) headers = resp.info() except url_error as e: print('Error downloading %s' % url) return None return resp.read()
def downloader(self): '''the download thread''' while self.tiles_pending() > 0: time.sleep(self.tile_delay) keys = sorted(self._download_pending.keys()) # work out which one to download next, choosing by request_time tile_info = self._download_pending[keys[0]] for key in keys: if self._download_pending[ key].request_time > tile_info.request_time: tile_info = self._download_pending[key] url = tile_info.url(self.service) path = self.tile_to_path(tile_info) key = tile_info.key() try: if self.debug: print("Downloading %s [%u left]" % (url, len(keys))) req = url_request(url) if url.find('google') != -1: req.add_header('Referer', 'https://maps.google.com/') resp = url_open(req) headers = resp.info() except url_error as e: #print('Error loading %s' % url) if not key in self._tile_cache: self._tile_cache[key] = self._unavailable self._download_pending.pop(key) if self.debug: print("Failed %s: %s" % (url, str(e))) continue if 'content-type' not in headers or headers['content-type'].find( 'image') == -1: if not key in self._tile_cache: self._tile_cache[key] = self._unavailable self._download_pending.pop(key) if self.debug: print("non-image response %s" % url) continue else: img = resp.read() # see if its a blank/unavailable tile md5 = hashlib.md5(img).hexdigest() if md5 in BLANK_TILES: if self.debug: print("blank tile %s" % url) if not key in self._tile_cache: self._tile_cache[key] = self._unavailable self._download_pending.pop(key) continue mp_util.mkdir_p(os.path.dirname(path)) h = open(path + '.tmp', 'wb') h.write(img) h.close() try: os.unlink(path) except Exception: pass os.rename(path + '.tmp', path) self._download_pending.pop(key) self._download_thread = None
def downloader(self): '''the download thread''' while self.tiles_pending() > 0: time.sleep(self.tile_delay) keys = sorted(self._download_pending.keys()) # work out which one to download next, choosing by request_time tile_info = self._download_pending[keys[0]] for key in keys: if self._download_pending[key].request_time > tile_info.request_time: tile_info = self._download_pending[key] url = tile_info.url(self.service) path = self.tile_to_path(tile_info) key = tile_info.key() try: if self.debug: print("Downloading %s [%u left]" % (url, len(keys))) req = url_request(url) if url.find('google') != -1: req.add_header('Referer', 'https://maps.google.com/') resp = url_open(req) headers = resp.info() except url_error as e: #print('Error loading %s' % url) if not key in self._tile_cache: self._tile_cache[key] = self._unavailable self._download_pending.pop(key) if self.debug: print("Failed %s: %s" % (url, str(e))) continue if 'content-type' not in headers or headers['content-type'].find('image') == -1: if not key in self._tile_cache: self._tile_cache[key] = self._unavailable self._download_pending.pop(key) if self.debug: print("non-image response %s" % url) continue else: img = resp.read() # see if its a blank/unavailable tile md5 = hashlib.md5(img).hexdigest() if md5 in BLANK_TILES: if self.debug: print("blank tile %s" % url) if not key in self._tile_cache: self._tile_cache[key] = self._unavailable self._download_pending.pop(key) continue mp_util.mkdir_p(os.path.dirname(path)) h = open(path+'.tmp','wb') h.write(img) h.close() try: os.unlink(path) except Exception: pass os.rename(path+'.tmp', path) self._download_pending.pop(key) self._download_thread = None