Example #1
0
	def __querySchool(estate):
		f = HttpReader.retrieveUrl(estate.url)
		#print '+++++++++++++'
		#print f
		#print '+++++++++++++'
		idx = f.find('<li class="nearby-schools-header">')
		if idx != -1:
			idx = idx + len('<li class="nearby-schools-header">')
		else:
			idx = 0
		while idx != -1:
			# <li class="nearby-school">
			idx = f.find('<li class="nearby-school', idx)
			if idx == -1:
				break
			idx2 = f.find('</li>', idx)
			if idx2 == -1:
				print 'cannot find school end tag'
				continue
			idx2 = idx2 + len('</li>')
			s = f[idx:idx2]
			idx = idx2
			#print 'ssssssssssss'
			#print s
			#print 'ssssssssssss'
			school = School(s)
			estate.schools.append(school)
			if school.type == SchoolType.ELEMENTARY:
				if estate.elementary == None:
					estate.elementary = school
				else:
					if estate.elementary.isAssigned == False and school.isAssigned == True:
						estate.elementary = school
					elif estate.elementary.isAssigned == True and school.isAssigned == False:
						pass
					else:
						if estate.elementary.rating < school.rating:
							estate.elementary = school
						print 'multiple elementary school'
						print 'estate.elementary : ' + str(estate.elementary)
						print 'school : ' + str(school)
			elif school.type == SchoolType.MIDDLE:
				if estate.middle == None:
					estate.middle = school
				else:
					print 'multiple middle school'
					print 'estate.middle : ' + str(estate.middle)
					print 'school : ' + str(school)
			elif school.type == SchoolType.HIGH:
				if estate.high == None:
					estate.high = school
				else:
					print 'multiple high school'
					print 'estate.high : ' + str(estate.high)
					print 'school : ' + str(school)
			else:
				print 'unknown type school : ' + str(school)
Example #2
0
	def queryByZip(self, zip):
		url = self.host + '/homes/' + zip + '_rb/'
		#url = 'http://www.zillow.com/search/RealEstateSearch.htm?citystatezip=' + zip
		#r = urllib2.Request(url)
		#r.add_header('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8')
		#r.add_header('Referer', 'http://www.zillow.com/')
		data = HttpReader.retrieveUrl(url)
		#print data
		idx = data.find('<div id="search-results"')
		#print data[idx:]
		self.__collectEstates(data)
		idx = data.find('changePage(')
		pages = []
		while idx != -1:
			idx2 = data.find(')', idx)
			page = data[idx + len('changePage('):idx2]
			if page not in pages:
				pages.append(page)
				self.__queryByZipMore(zip, page)
			idx = data.find('changePage(', idx2)
		#print 'pages = ', pages
		return self.estates
Example #3
0
	def __queryByZipMore(self, zip, page):
		url = self.host + '/homes/' + zip + '_rb/' + page + '_p/'
		data = HttpReader.retrieveUrl(url)
		self.__collectEstates(data)
Example #4
0
             wrtm = SqLiteWriter(db=cr.json['met_wr'][7:],
                                 table='monitoring_met',
                                 filt=['id', 'pressure', 'temp', 'humidity',
                                       'wettemp', 'datetime'])
         else:
             wrtm = CsvWriter(name='met', fname=cr.json['met_wr'],
                              filt=['id', 'temp', 'pressure', 'humidity',
                                    'wettemp', 'datetime'], mode='a')
         data = {'id': cr.json['station_id'], 'temp': temp,
                 'pressure': pres, 'humidity': humi, 'wettemp': wet}
         if wrtm.WriteData(data) == -1:
             logging.error('Met data write failed')
 # get station coordinates
 print("Loading station coords...")
 if re.search('^http[s]?://', cr.json['coo_rd']):
     rd_st = HttpReader(url=cr.json['coo_rd'], ptys=['STA'], \
                        filt=['id', 'east', 'north', 'elev'])
 else:
     rd_st = GeoReader(fname=cr.json['coo_rd'], \
                       filt=['id', 'east', 'north', 'elev'])
 w = rd_st.Load()
 st_coord = [x for x in w if x['id'] == cr.json['station_id']]
 if not st_coord:
     logging.fatal("Station not found: %s", cr.json['station_id'])
     sys.exit(-1)
 # coordinate writer
 fmt = '.%df' % cr.json['decimals']
 if re.search('^http[s]?://', cr.json['coo_wr']):
     wrt = HttpWriter(url=cr.json['coo_wr'], mode='POST', dist=fmt)
 elif re.search('^sqlite:', cr.json['coo_wr']):
     wrt = SqLiteWriter(db=cr.json['coo_wr'][7:], dist=fmt,
                        table='monitoring_coo',
Example #5
0
        else:
            print("Config file not found " + sys.argv[1])
            logging.fatal("Config file not found " + sys.argv[1])
            sys.exit(-1)
    else:
        print("Usage: coord_plot.py config_file")
        sys.exit(-1)

    # logging
    #TODO if the log file does not exist, it causes an error message
    logging.basicConfig(format=cr.json['log_format'], filename=cr.json['log_file'], \
         filemode='w', level=cr.json['log_level'])

    # load reference coordinates of points
    if re.search('^http[s]?://', cr.json['coo_ref']):
        rd_st = HttpReader(url=cr.json['coo_ref'], ptys=['STA'], \
                           filt=['id', 'east', 'north', 'elev'])
    elif re.search('^sqlite:', cr.json['coo_ref']):
        rd_st = SqLiteReader(db=cr.json['coo_ref'][7:], \
                             filt=['id', 'east', 'north', 'elev'])
    else:
        rd_st = GeoReader(fname=cr.json['coo_ref'], \
                          filt=['id', 'east', 'north', 'elev'])
    w = rd_st.Load()
    logging.info('%d rows read from %s' % (len(w), cr.json['coo_ref']))

    #transform into system of reference line
    if len(cr.json['ref_line_points']) == 2:
        logging.info(
            'ref line points: %s, %s' %
            (cr.json['ref_line_points'][0], cr.json['ref_line_points'][1]))
        # A point coordinates in reference line
Example #6
0
         temp = float(bmp.GetTemp()['temp'])
         wet = None  # wet temperature unknown
     elif cr.json['met'].upper() == 'SENSEHAT':
         from sense_hat import SenseHat
         from webmet import WebMet
         sense = SenseHat()
         pres = sense.get_pressure()
         temp = sense.get_temperature()
         humi = sense.get_humidity()
         wet = WebMet.GetWetTemp(temp, humi)
     ts.SetAtmCorr(float(atm['lambda']), pres, temp, wet)
     # TODO send met data to server/file
 # get station coordinates
 print "Loading station coords..."
 if re.search('^http[s]?://', cr.json['coo_rd']):
     rd_st = HttpReader(url=cr.json['coo_rd'], ptys='STA', \
         filt = ['id', 'east', 'north', 'elev'])
     # TODO read from local file if HttpReader failed
     # other file reader from config coo_rd_loc (optional)
 else:
     rd_st = GeoReader(fname=cr.json['coo_rd'], \
         filt = ['id', 'east', 'north', 'elev'])
 w = rd_st.Load()
 print w
 st_coord = [x for x in w if x['id'] == cr.json['station_id']]
 if len(st_coord) == 0:
     logging.error("Station not found: " + cr.json['station_id'])
     sys.exit(-1)
 # coordinate writer & observation writer
 fmt = '.%df' % cr.json['decimals']
 if re.search('^http[s]?://', cr.json['coo_wr']):
     wrt = HttpWriter(url=cr.json['coo_wr'], mode='POST', dist=fmt)