def run(self, lat, lon, rate): # Worker thread datecycle, cycle, forecast = self.getCycleDate() # Use new grib if dowloaded if self.downloading == True: if not self.download.q.empty(): lastgrib = self.download.q.get() self.downloading = False if lastgrib: if not self.conf.keepOldFiles and self.conf.lastwafsgrib: util.remove( os.sep.join( [self.conf.cachepath, self.conf.lastwafsgrib])) self.lastgrib = lastgrib self.conf.lastwafsgrib = lastgrib if len(self.conf.lastwafsgrib.split(os.sep)) > 0: self.current_datecycle = self.conf.lastwafsgrib.split( os.sep)[1][:10] else: self.lastgrib = False else: # Download fail self.downloadWait = 60 if self.downloadWait > 0: self.downloadWait -= rate # Download new grib if required if self.current_datecycle != datecycle and self.conf.download and not self.downloading and self.downloadWait < 1: self.downloadCycle(datecycle, cycle, forecast)
def run(self, lat, lon, rate): # Worker thread datecycle, cycle, forecast = self.getCycleDate() # Use new grib if dowloaded if self.downloading == True: if not self.download.q.empty(): lastgrib = self.download.q.get() self.downloading = False if lastgrib: if not self.conf.keepOldFiles and self.conf.lastwafsgrib: util.remove(os.sep.join([self.conf.cachepath, self.conf.lastwafsgrib])) self.lastgrib = lastgrib self.conf.lastwafsgrib = lastgrib if len(self.conf.lastwafsgrib.split(os.sep)) > 0: self.current_datecycle = self.conf.lastwafsgrib.split(os.sep)[1][:10] else: self.lastgrib = False else: # Download fail self.downloadWait = 60 if self.downloadWait > 0: self.downloadWait -= rate # Download new grib if required if self.current_datecycle != datecycle and self.conf.download and not self.downloading and self.downloadWait < 1: self.downloadCycle(datecycle, cycle, forecast)
def updateMetar(self, db, path): ''' Updates metar table from Metar file''' f = open(path, 'r') nupdated = 0 nparsed = 0 timestamp = 0 cursor = db.cursor() i = 0 inserts = [] INSBUF = cursor.arraysize today_prefix = datetime.utcnow().strftime('%Y%m') yesterday_prefix = (datetime.utcnow() + timedelta(days=-1)).strftime('%Y%m') today = datetime.utcnow().strftime('%d') for line in f.readlines(): if line[0].isalpha() and len(line) > 11 and line[11] == 'Z': i += 1 icao, mtime, metar = line[0:4], line[5:11], re.sub( r'[^\x00-\x7F]+', ' ', line[5:-1]) metar = metar.split(',')[0] if mtime[-1] == 'Z': mtime = '0' + mtime[:-1] if not mtime.isdigit(): mtime = '000000' # Prepend year and month to the timestamp if mtime[:2] == today: timestamp = today_prefix + mtime else: timestamp = yesterday_prefix + mtime inserts.append((timestamp, metar, icao, timestamp)) nparsed += 1 timestamp = 0 if (i % INSBUF) == 0: cursor.executemany( 'UPDATE airports SET timestamp = ?, metar = ? WHERE icao = ? AND timestamp < ?', inserts) inserts = [] nupdated += cursor.rowcount if len(inserts): cursor.executemany( 'UPDATE airports SET timestamp = ?, metar = ? WHERE icao = ? AND timestamp < ?', inserts) nupdated += cursor.rowcount db.commit() f.close() if not self.conf.keepOldFiles: util.remove(path) return nupdated, nparsed
def run(self, elapsed): """Worker function called by a worker thread to update the data""" if not self.conf.download: return if self.download_wait: self.download_wait -= elapsed return datecycle, cycle, forecast = self.get_cycle_date() cache_file = self.get_cache_filename(datecycle, cycle, forecast) if not self.download: cache_file_path = os.sep.join([self.cache_path, cache_file]) if self.last_grib == cache_file and os.path.isfile( cache_file_path): # Nothing to do return else: # Trigger new download url = self.get_download_url(datecycle, cycle, forecast) print 'Downloading: %s' % cache_file self.download = AsyncTask(GribDownloader.download, url, cache_file_path, binary=True, variable_list=self.variable_list, cancel_event=self.die, decompress=self.conf.wgrib2bin, spinfo=self.conf.spinfo) self.download.start() else: if not self.download.pending(): self.download.join() if isinstance(self.download.result, Exception): print 'Error Downloading Grib file: %s.' % str( self.download.result) if os.path.isfile(cache_file): util.remove(os.sep.join([self.cache_path, cache_file])) # wait a try again self.download_wait = 60 else: # New file available if not self.conf.keepOldFiles and self.last_grib: util.remove( os.path.sep.join([self.cache_path, self.last_grib])) self.last_grib = str( self.download.result.split(os.path.sep)[-1]) print '%s successfully downloaded.' % self.last_grib # reset download self.download = False else: # Waiting for download return
def downloadCycle(self, datecycle, cycle, forecast): ''' Downloads the requested grib file ''' filename = 'gfs.t%02dz.pgrb2full.0p50.f0%02d' % (cycle, forecast) path = os.sep.join([self.conf.cachepath, 'gfs']) cachefile = os.sep.join(['gfs', '%s_%s.grib2' % (datecycle, filename)]) if cachefile == self.lastgrib: # No need to download return if not os.path.exists(path): os.makedirs(path) if self.downloading == True: if not self.download.q.empty(): #Finished downloading lastgrib = self.download.q.get() # Dowload success if lastgrib: if not self.conf.keepOldFiles and self.conf.lastgrib: util.remove(os.sep.join([self.conf.cachepath, self.conf.lastgrib])) self.lastgrib = lastgrib self.conf.lastgrib = self.lastgrib self.newGrib = True #print "new grib file: " + self.lastgrib else: # Wait a minute self.downloadWait = 60 self.downloading = False elif self.conf.download and self.downloadWait < 1: # Download new grib ## Build download url params = self.params; dir = 'dir=%%2Fgfs.%s' % (datecycle) params.append(dir) params.append('file=' + filename) # add variables for level in self.levels: params.append('lev_' + level + '=1') for var in self.variables: params.append('var_' + var + '=1') url = self.baseurl + '&'.join(params) #print('XPGFS: downloading %s' % (url)) self.downloading = True self.download = AsyncDownload(self.conf, url, cachefile) return False
def updateMetar(self, db, path): ''' Updates metar table from Metar file''' f = open(path, 'r') nupdated = 0 nparsed = 0 timestamp = 0 cursor = db.cursor() i = 0 inserts = [] INSBUF = cursor.arraysize today_prefix = datetime.utcnow().strftime('%Y%m') yesterday_prefix = (datetime.utcnow() + timedelta(days=-1)).strftime('%Y%m') today = datetime.utcnow().strftime('%d') for line in f.readlines(): if line[0].isalpha() and len(line) > 11 and line[11] == 'Z': i += 1 icao, mtime, metar = line[0:4], line[5:11] , re.sub(r'[^\x00-\x7F]+',' ', line[5:-1]) metar = metar.split(',')[0] if mtime[-1] == 'Z': mtime = '0' + mtime[:-1] if not mtime.isdigit(): mtime = '000000' # Prepend year and month to the timestamp if mtime[:2] == today: timestamp = today_prefix + mtime else: timestamp = yesterday_prefix + mtime inserts.append((timestamp, metar, icao, timestamp)) nparsed += 1 timestamp = 0 if (i % INSBUF) == 0: cursor.executemany('UPDATE airports SET timestamp = ?, metar = ? WHERE icao = ? AND timestamp < ?', inserts) inserts = [] nupdated += cursor.rowcount if len(inserts): cursor.executemany('UPDATE airports SET timestamp = ?, metar = ? WHERE icao = ? AND timestamp < ?', inserts) nupdated += cursor.rowcount db.commit() f.close() if not self.conf.keepOldFiles: util.remove(path) return nupdated, nparsed
def updateMetar(self, db, path): ''' Updates metar table from Metar file''' f = open(path, 'r') updated = 0 timestamp = 0 cursor = db.cursor() i = 0 inserts = [] INSBUF = 128 for line in f.readlines(): if line[0].isalpha(): i += 1 icao, mtime, metar = line[0:4], line[5:11] , re.sub(r'[^\x00-\x7F]+',' ', line[5:-1]) if mtime[-1] == 'Z': mtime = '0' + mtime[:-1] if not mtime.isdigit(): mtime = '000000' # Create timestamp for VATSIM metars if timestamp == 0: timestamp = int(datetime.utcnow().strftime('%Y%m') + mtime) inserts.append((timestamp, metar, icao, timestamp)) updated += 1 timestamp = 0 if (i % INSBUF) == 0: sys.stdout.flush() cursor.executemany('UPDATE airports SET timestamp = ?, metar = ? WHERE icao = ? AND timestamp < ?', inserts) inserts = [] elif len(line) > 15: strtime = line[0:4] + line[5:7] + line[8:10] + line[11:13] + line[14:16] if strtime.isdigit(): timestamp = int(strtime) if len(inserts): updated += len(inserts) cursor.executemany('UPDATE airports SET timestamp = ?, metar = ? WHERE icao = ? AND timestamp < ?', inserts) db.commit() f.close() xpmetar = os.sep.join([self.conf.syspath, 'METAR.rwx']) util.copy(path, xpmetar) if not self.conf.keepOldFiles: util.remove(path) return updated
def run(self, conf, url, cachepath, cachefile): filepath = os.sep.join([cachepath, cachefile]) tempfile = filepath + '.tmp' if os.path.exists(tempfile): util.remove(tempfile) if os.path.exists(filepath): util.remove(filepath) print "Dowloading: %s" % (cachefile) # Request gzipped file request = urllib2.Request(url) request.add_header('Accept-encoding', 'gzip,deflate') request.add_header('User-Agent', 'XPNOAAWeather/%s' % (conf.__VERSION__)) try: response = urllib2.urlopen(request) except: print "Download error: %s %s" % (sys.exc_info()[0], sys.exc_info()[1]) self.q.put(False) # Check for gzziped file isGzip = response.headers.get('content-encoding', '').find('gzip') >= 0 gz = zlib.decompressobj(16 + zlib.MAX_WBITS) binary = '' if filepath.split('.')[-1] == 'grib2': binary = 'b' of = open(tempfile, 'w' + binary) try: while True: if self.cancel.isSet(): raise Exception() data = response.read(1024 * 128) if not data: print 'Downloaded: %s' % (cachefile) break if isGzip: data = gz.decompress(data) of.write(data) except Exception: if os.path.exists(tempfile): util.remove(tempfile) self.q.put(False) of.close() if os.path.exists( tempfile) and os.path.getsize(tempfile) > self.min_size: # Downloaded if filepath.split('.')[-1] == 'grib2': # Uncompress grib2 file print "Uncompressing grib: %s %s" % (self.wgrib2bin, tempfile) args = [ self.wgrib2bin, tempfile, '-set_grib_type', 'simple', '-grib_out', filepath ] if conf.spinfo: p = subprocess.Popen(args, startupinfo=conf.spinfo, stdout=sys.stdout, stderr=sys.stderr, shell=True) else: p = subprocess.Popen(args, stdout=sys.stdout, stderr=sys.stderr) p.wait() util.remove(tempfile) else: util.rename(tempfile, filepath) # Call callback if defined otherwise put the file on the queue if self.callback: self.callback(cachefile) else: self.q.put(cachefile) else: # File to small, remove file. if os.path.exists(tempfile): util.remove(tempfile) self.q.put(False)
def download(cls, url, file_path, binary=False, **kwargs): """Download grib for the specified variable_lists Args: url (str): URL to the grib file excluding the extension file_path (str): Path to the output file binary (bool): Set to True for binary files or files will get corrupted on Windows. Kwargs: cancel_event (threading.Event): Set the flat to cancel the download at any time variable_list (list): List of variables dicts ex: [{'level': ['500mb', ], 'vars': 'TMP'}, ] decompress (str): Path to the wgrib2 to decompress the file. Returns: str: the path to the final file on success Raises: GribDownloaderError: on fail. GribDownloaderCancel: on cancel. """ variable_list = kwargs.pop('variable_list', []) if variable_list: # Download the index and create a chunk list with TemporaryFile('a+') as idx_file: idx_file.seek(0) try: cls.download_part('%s.idx' % url, idx_file, **kwargs) except urllib2.URLError: raise GribDownloaderError( 'Unable to download index file for: %s' % url) idx_file.seek(0) index = cls.parse_grib_index(idx_file) chunk_list = cls.gen_chunk_list(index, variable_list) flags = 'wb' if binary else 'w' with open(file_path, flags) as grib_file: if not variable_list: # Fake chunk list for non filtered files chunk_list = [[False, False]] for chunk in chunk_list: try: cls.download_part('%s' % url, grib_file, start=chunk[0], end=chunk[1], **kwargs) except urllib2.URLError as err: raise GribDownloaderError('Unable to open url: %s\n\t%s' % (url, str(err))) wgrib2 = kwargs.pop('decompress', False) spinfo = kwargs.pop('spinfo', False) if wgrib2: tmp_file = "%s.tmp" % file_path try: os.rename(file_path, tmp_file) cls.decompress_grib(tmp_file, file_path, wgrib2, spinfo) util.remove(tmp_file) except OSError as err: raise GribDownloaderError('Unable to decompress: %s \n\t%s' % (file_path, str(err))) return file_path
def downloadCycle(self, datecycle, cycle, forecast): ''' Downloads the requested grib file ''' filename = 'gfs.t%02dz.pgrb2full.0p50.f0%02d' % (cycle, forecast) path = os.sep.join([self.conf.cachepath, 'gfs']) cachefile = os.sep.join(['gfs', '%s_%s.grib2' % (datecycle, filename)]) if cachefile == self.lastgrib: # No need to download return if not os.path.exists(path): os.makedirs(path) if self.downloading == True: if not self.download.q.empty(): #Finished downloading lastgrib = self.download.q.get() # Dowload success if lastgrib: if not self.conf.keepOldFiles and self.conf.lastgrib: util.remove( os.sep.join( [self.conf.cachepath, self.conf.lastgrib])) self.lastgrib = lastgrib self.conf.lastgrib = self.lastgrib self.newGrib = True #print "new grib file: " + self.lastgrib else: # Wait a minute self.downloadWait = 60 self.downloading = False elif self.conf.download and self.downloadWait < 1: # Download new grib ## Build download url params = self.params dir = 'dir=%%2Fgfs.%s' % (datecycle) params.append(dir) params.append('file=' + filename) # add variables for level in self.levels: params.append('lev_' + level + '=1') for var in self.variables: params.append('var_' + var + '=1') url = self.baseurl + '&'.join(params) #print 'XPGFS: downloading %s' % (url) self.downloading = True self.download = AsyncDownload(self.conf, url, cachefile) return False
def run(self, conf, url, cachepath, cachefile): filepath = os.sep.join([cachepath, cachefile]) tempfile = filepath + '.tmp' if os.path.exists(tempfile): util.remove(tempfile) if os.path.exists(filepath): util.remove(filepath) print "Downloading: %s" % (cachefile) # Request gzipped file request = urllib2.Request(url) request.add_header('Accept-encoding', 'gzip, deflate') request.add_header('User-Agent', 'XPNOAAWeather/%s' % (conf.__VERSION__)) if hasattr(ssl, '_create_unverified_context'): params = {'context': ssl._create_unverified_context()} else: params = {} try: response = urllib2.urlopen(request, **params) except Exception: print "Download error: %s %s" % (sys.exc_info()[0], sys.exc_info()[1]) self.q.put(False) # Check for gzziped file isGzip = response.headers.get('content-encoding', '').find('gzip') >= 0 isGziped = url[-3:] == '.gz' gz = zlib.decompressobj(16+zlib.MAX_WBITS) binary = '' if filepath.split('.')[-1] == 'grib2': binary = 'b' of = open(tempfile, 'w' + binary) try: while True: if self.cancel.isSet(): raise Exception() data = response.read(1024*128) if not data: print 'Downloaded: %s' % (cachefile) break if isGzip: data = gz.decompress(data) if isGziped: data = gz.decompress(data) of.write(data) except Exception: if os.path.exists(tempfile): util.remove(tempfile) self.q.put(False) of.close() if os.path.exists(tempfile) and os.path.getsize(tempfile) > self.min_size: # Downloaded if filepath.split('.')[-1] == 'grib2': # Uncompress grib2 file print "Uncompressing grib: %s %s" % (self.wgrib2bin, tempfile) args = [self.wgrib2bin, tempfile, '-set_grib_type', 'simple', '-grib_out', filepath] if conf.spinfo: p = subprocess.Popen(args, startupinfo=conf.spinfo, stdout=sys.stdout, stderr=sys.stderr, shell=True) else: p = subprocess.Popen(args, stdout=sys.stdout, stderr=sys.stderr) p.wait() util.remove(tempfile) else: util.rename(tempfile, filepath) # Call callback if defined otherwise put the file on the queue if self.callback: self.callback(cachefile) else: self.q.put(cachefile) else: # File to small, remove file. if os.path.exists(tempfile): util.remove(tempfile) self.q.put(False)