Beispiel #1
0
def makeListener(ip, connected, client):
  conn = int(connected.strip())
  now = datetime.datetime.now()
  start = now - datetime.timedelta(seconds=conn)
  since = start.strftime('%H:%M:%S' if start.day is now.day else '%A, %H:%M')
  time = int((start - EPOCH).total_seconds())

  geocode = Geocode.geocode(ip)

  return dict(ip=ip, geocode=geocode or '', time=time, client=client, since=since)
Beispiel #2
0
def locations(request):
	regions, table = viewLists.listTableMoreInputs()

	if request.method == 'POST':
		# center's maps
		num_clusters = int(request.POST.get('num_clusters'))

		colors = ColorsRandom.generate_colors(num_clusters)

		country = request.POST.get('country')
		state = request.POST.get('state')
		city = request.POST.get('city')
		center = Geocode.geocode([country + ' ' + state + ' ' + city, city], isCenter=True)
		center = str(center['lat']) + "," + str(center['lng'])
		
		# ceps's locations
		ceps_list = [ 
			[country + ' ' + 
			state + ' ' +
			city + ' ' +  
			i, i] for i in request.POST.get('ceps').split(';')]

		# items = Geocode.parallelGeocode(ceps_list)
		items = []
		for i in ceps_list:
			items.append(Geocode.geocode(i))

		points, centroids = k_means.k_means_lists(num_clusters, 20, items)

		dic = k_means.sse(points, centroids)

		t = get_template('maps.html')
		html = t.render(
			Context(
				{
				'regions' : regions,
				'table': table,
				'center' : center ,
				'localizations' : points,
				'colors' : colors,
				'centroids' : centroids,
				'sse' : dic,
				'hospitals' : MongoDB.list_all_healths(),
				'csrf_token' : csrf(request)['csrf_token']
				}
			)
		)
		html = html.replace("'", "'")

		return HttpResponse(html)

	# return render_to_response(
	# 	'index.html',
	# 	context_instance=RequestContext(request))

	t = get_template('index.html')
	html = t.render(
		Context(
			{
			'regions' : regions,
			'table': table,
			'csrf_token' : csrf(request)['csrf_token']
			}
		)
	)
	html = html.replace("'", "'")
	return HttpResponse(html)
Beispiel #3
0
def run_component():

    sys.setrecursionlimit(50000)

    api_key = "AIzaSyCXXvQOEt31dz8Nw070bye9pwEDBEl0g1o"
    #received 2018/01/24/15:52 [email protected], 1st "BC Doctors1"

    file_list = [
        f for f in os.listdir('data_subsets') if not f.startswith('.')
    ]

    #to capture how many requests were made with the given api_key
    search_counter = 0

    for file in file_list:
        pickle_file = "data_subsets/" + file

        data = pickle.load(open(pickle_file, 'rb'))

        num_docs = len(data)
        update_freq = 75
        upload_freq = 50

        counter = 0

        #for each doctor
        for doc_key in data:

            counter += 1

            doctor = data[doc_key]

            #check if it has been googled already
            if 'googled' in doctor['GOOGLE']:
                if doctor['GOOGLE']['googled']:
                    continue

            #set up dictionary
            doctor['GOOGLE'].update({'googled': False})
            doctor['GOOGLE'].update({'geo_data': {}})

            #for each search result for a given doctor
            for result_number in doctor['HTML_PARSING']['parsed_infos']:

                address = doctor['HTML_PARSING']['parsed_infos'][
                    result_number]['address']

                #don't want to waste a request on empty address
                if (address != "" and not address.isspace()):
                    geocode = G.Geocode(api_key, address)
                    geocode.search()
                    search_counter += 1
                    doctor['GOOGLE']['geo_data'].update(
                        {result_number: geocode._dict})

                #update boolean
                doctor['GOOGLE'].update({'googled': True})

            if (counter % update_freq == 0):
                print("percentage completed to google for %s is: %s" %
                      (file, counter * 100 / num_docs))
                print('number of geocode queries made is: %s' %
                      (search_counter))
                print("==")
            if (counter % upload_freq == 0):
                pickle.dump(data, open(pickle_file, 'wb'))
                print('..saved..')

        pickle.dump(data, open(pickle_file, 'wb'))