def GetSunInfo(month, day): sunrise = None sunset = None remote = cachedfetch.Fetch( 'http://www.earthtools.org/sun/%s/%s/%d/%d/99/0' % (sys.argv[1], sys.argv[2], day, month), maxage=6 * 30 * 24 * 60 * 60) for l in remote.split('\n'): m = SUNRISE_RE.match(l) if m: sunrise = m.group(1) m = SUNSET_RE.match(l) if m: sunset = m.group(1) # Work out a duration (risehour, risemin, risesec) = sunrise.split(':') (sethour, setmin, setsec) = sunset.split(':') rise = datetime.datetime(2010, 1, 1, int(risehour), int(risemin), int(risesec)) set = datetime.datetime(2010, 1, 1, int(sethour), int(setmin), int(setsec)) duration = set - rise return (sunrise, sunset, duration.seconds)
def Collect(cursor): reading_time = None now = datetime.datetime.now() for url in ['http://reg.bom.gov.au/products/IDN60903/IDN60903.94926.shtml', 'http://reg.bom.gov.au/products/IDN60903/IDN60903.94925.shtml']: id = url.split('.')[-2] print '%s: Fetching (url = %s, id = %s)' %(datetime.datetime.now(), url, id) remote = cachedfetch.Fetch(url, maxage=1800) print '%s: Fetch done' % datetime.datetime.now() for line in remote.split('\n'): line = line.rstrip() m = DATA_RE.match(line) if m: field = m.group(1).split(' ')[-1] value = m.group(2) if field == 'datetime': print '%s: timestr --> %s' %(datetime.datetime.now(), line) (monthday, timestr) = value.split('/') # Find the right day monthday = int(monthday) reading_time = now while monthday != reading_time.day: reading_time -= ONE_DAY # And the right time tstruct = time.strptime(timestr, '%I:%M%p') reading_time = datetime.datetime(reading_time.year, reading_time.month, reading_time.day, tstruct.tm_hour, tstruct.tm_min) else: print ' %s: %s = %s (%s, %s)' %(reading_time, DESCRIPTIONS.get(field, field), value, monthday, timestr) cursor.execute('insert ignore into sensors' '(epoch_seconds, sensor, value, hostname) ' 'values(%s, "%s", "%s", "%s");' %(time.mktime(reading_time.timetuple()), DESCRIPTIONS.get(field, field), value, id)) cursor.execute('commit;')
def req(url=[], params=[], node=['api', 'services'], maxage=1800): """Make an internode api request.""" url.insert(0, BASEURL) svcs = cachedfetch.Fetch('/'.join(url) + '?' + '&'.join(params), maxage=maxage, username=AUTH['username'], password=AUTH['password'], useragent='stillhq.com home automation/v%s' % REV) x = xml.etree.ElementTree.XML(svcs) for n in node: x = x.find(n) return x
def fetch(date, lat=False, lon=False, raw=False, cache=False, path='.', days=1, silent=False, csv=True, datatype=0, empty=-1): datastr = ["solar", "temperature"] s = "" for i in range(days): datetxt = '%04d%02d%02d' % (date.year, date.month, date.day) url = '%s%s%s%s%s%s' % (LZWURL, test( datatype, BOMTEMPURL, BOMSOLARURL), datetxt, datetxt, SUFFIX, SUFFIXC) content = cachedfetch.Fetch(url, maxage=24 * 60 * 60) stream = bz2.decompress(content) if len(stream) == 0: raise Error("Invalid URL (probably incorrect date)", 1) #cache data if requested if cache and not found: handle = open(filename, "wb") handle.write(content) handle.close() if raw: print stream continue else: m = re.compile("ncols ([0-9]+)").search(stream) if not m: raise Error("Can't read number of columns", 2) ncols = int(m.group(1)) m = re.compile("nrows ([0-9]+)").search(stream) if not m: raise Error("Can't read number of rows", 3) sys.exit(1) nrows = int(m.group(1)) m = re.compile("xllcenter (-?([0-9]*\.)?[0-9]+)").search(stream) if not m: raise Error("Can't read longitude", 4) xcenter = float(m.group(1)) m = re.compile("yllcenter (-?([0-9]*\.)?[0-9]+)").search(stream) if not m: raise Error("Can't read latitude", 5) ycenter = float(m.group(1)) m = re.compile("cellsize (-?([0-9]*\.)?[0-9]+)").search(stream) if not m: raise Error("Can't read cell size\n", 6) cellsize = float(m.group(1)) m = re.compile("nodata_value (-?([0-9]*\.)?[0-9]+)").search(stream) if not m: raise Error("Can't read nodata value", 7) nodata = float(m.group(1)) nlon = xcenter - cellsize / 2 nlat = ycenter - cellsize / 2 #nlat + nrows * cellsize , nlon nlat + nrows * cellsize , nlon + ncols * cellsize # # # #nlat,nlon nlat , nlon + ncols * cellsize # Check that latitude or longitude provided are within the area if lat is not False: if lat < nlat or lat > (nlat + nrows * cellsize): raise Error("Invalid latitude (outside the covered zone)", 8) if lon is not False: if lon < nlon or lon > (nlon + ncols * cellsize): raise Error("Invalid longitude (outside the covered zone)", 9) table = [] lines = stream.split('\n') index = 0 for line in lines: numbers = line.split(' ')[1:-1] if len(numbers) == ncols: table.append([]) for number in numbers: number = round(float(number), 2) if number == round(nodata, 2): table[-1].append(empty) else: table[-1].append(number) index += 1 if index != nrows: raise Error("Number of rows retrieved incorrect", 10) indexy = int((nlat + nrows * cellsize - lat) / cellsize) indexx = int((lon - nlon) / cellsize) indexlat = nlat + nrows * cellsize indexlon = nlon if lon is False and lat is not False: #Just latitude provided if csv: s += datetxt + "\n," for x in range(ncols): s += str(indexlon) + "," indexlon += cellsize s += "\n" + str((nlat + (nrows - indexy) * cellsize)) + "," for x in range(ncols): s += str(table[indexy][x]) + "," s += "\n" else: s += str(table[indexy]) + "\n" elif lon is not False and lat is False: #Just longitude provided if csv: s += datetxt + "\n," + str(nlon + indexx * cellsize) + "\n" for y in range(nrows): s += str(indexlat) + "," s += str(table[y][indexx]) + "\n" indexlat -= cellsize s += "\n" else: table2 = [] for row in table: table2.append([]) table2[-1].append(row[indexx]) s += str(table2) + "\n" elif lat and lon: if csv: s += datetxt + "," + str(table[indexy][indexx]) + "\n" else: s += str(table[indexy][indexx]) + "\n" else: if csv: s2 = datetxt + "\n," for y in range(ncols): s2 += str(indexlon) + "," indexlon += cellsize s += s2[:-1] + "\n" for y in range(nrows): s2 = str(indexlat) + "," indexlat -= cellsize for x in range(ncols): s2 += str(table[y][x]) + "," s += s2[:-1] + "\n" else: s += str(table) + "\n" date += datetime.timedelta(days=1) return s
import substitute # Templates f = open(sys.argv[0].replace('py', 'json')) json_template = f.read() f.close() f = open(sys.argv[0].replace('py', 'json.element')) json_element_template = f.read() f.close() # Load inflation data -- this needs to be renormalized (fd, inflation_fname) = tempfile.mkstemp() os.write(fd, cachedfetch.Fetch('http://www.rba.gov.au/statistics/tables/xls/' 'g02hist.xls')) os.close(fd) inflation = {} current_inflation = 100.0 xls = readexcel.readexcel(inflation_fname) for row in xls.iter_list('Data'): # Rows: # - 0: Month-Year # - 1: All groups # - 2: Tradables # - 3: Tradables excluding food # - 4: Non-tradables # - 5: Food # - 6: Alcohol and tobacco # - 7: Clothing and footwear