def test(): api_url = "http://fullcontact.t.proxylocal.com/api/" api_get_list_url = "http://fullcontact.t.proxylocal.com/api/get-list/" # get emails from file filestream = open("emails_test.csv") data = emails_from_file(filestream) # post request for lookup print "REQUEST DATA LOOKUP" response = requests.post( api_url, headers={"content-type": "application/json"}, data=simplejson.dumps({"data": data}) ).json print "LOGS\n", response # wait for sufficient time print "\nWAIT FOR 30 SECONDS...\n" time.sleep(30) # get the aggregated data for one contact print "Get the aggregated data as if first 3 emails belonged to one person" data_for_get = {"email": ",".join([data_tuple[1] for data_tuple in data[:3]])} response = requests.get(api_url, headers={"content-type": "application/json"}, params=data_for_get).json print "RESPONSE\n", response # get the list of aggregated data for all emails grouped as if each contact # owned 3 consecutive email addresses print "Get the aggregated data as if each contact owned 3 consecutive emails" data_for_list_get = [] counter = 0 while counter < len(data): data_for_list_get.append(data[counter : counter + 3]) counter += 3 response = requests.post( api_get_list_url, headers={"content-type": "application/json"}, data=simplejson.dumps({"data": data_for_list_get}), ).json print "RESPONSE\n", response
def result(): if request.method == 'GET': form = ContactForm(request.args) if form.validate(): query = [] if form.email: query.append(('email', form.email.data)) if form.phone: query.append(('phone', form.phone.data.replace('+',''))) if form.twitter: query.append(('twitter', form.twitter.data)) if form.facebook: query.append(('facebookUsername', form.facebook.data)) userdata = aggregate_data(query) return render_template('results/get_results.html', ud=userdata) elif request.method == 'POST': if request.files.get('file'): # get emails from file, if one was uploaded batch_data = emails_from_file(request.files.get('file')) else: batch_data = request.form.get('batch_data') if batch_data: batch_data = batch_data.replace('facebook', 'facebookUsername').split(',') for i in range(len(batch_data)): batch_data[i] = tuple(batch_data[i].split(':')) if batch_data: response = batch_lookup(batch_data, request.url_root + url_for('webhook')[1:]) else: response = ["Nothing to process"] return render_template('results/post_results.html', response=response)
def main(): # have the webhook working by: # 1. python flask_fullcontact.py # 2. proxylocal 5000 --host fullcontact # in Flask, the address of the webhook can be determined by using # request.url_root and url_for function webhook = 'http://fullcontact.t.proxylocal.com/webhook/' # request lookup for emails, phone, twitter and facebook data_list = [ ('email', '*****@*****.**'), ('email', '*****@*****.**'), ('phone', '+48601941311'), ('twitter', 'stefanwojcik'), ('facebookUsername', 'wojcikstefan') ] batch_lookup(data_list, webhook, debug=True) # request lookup for emails from CSV file batch_lookup(emails_from_file(open('emails_test.csv')), webhook) # aggregate data from 2 emails, phone, twitter and facebook u = aggregate_data(data_list) print u.to_dict()