global_data[district]["taluka"][taluka]["panchayat"] = data def fetch_taluka(url, district): encoded_url = _cleanUrl(url) data = talukaExtract(encoded_url) if "taluka" in global_data[district].keys(): global_data[district]["taluka"] =\ dict(data.items() + global_data[district]["taluka"].items()) else: global_data[district]["taluka"] = data jobs = [gevent.spawn(fetch_panchayat, value['url'], district, key) for key, value in data.iteritems()] gevent.joinall(jobs) global_data = local.local() data = districtExtract("http://164.100.112.66/netnrega/writereaddata/citizen"\ "_out/phy_fin_reptemp_Out_18_local_1112.html") global_data = data jobs = [gevent.spawn(fetch_taluka, value["url"], key) for key, value in data.iteritems()] gevent.joinall(jobs) f = open('database/data.json', 'w') output = json.dumps(global_data) print time.time() - start f.write(output)
def fetch_taluka(url, district, year): encoded_url = _cleanUrl(url) data = talukaExtract(encoded_url, year) if "taluka" in global_data[district].keys(): global_data[district]["taluka"] =\ dict(data.items() + global_data[district]["taluka"].items()) else: global_data[district]["taluka"] = data jobs = [gevent.spawn(fetch_panchayat, value['url'], district, key) for key, value in data.iteritems(), year] gevent.joinall(jobs) global_data = local.local() #year 11-12 data = districtExtract(districtyear1112, "2011") global_data = data jobs = [gevent.spawn(fetch_taluka, value["url"], key) for key, value in data.iteritems(), "2011"] gevent.joinall(jobs) f = open('database/data11.json', 'w') output = json.dumps(global_data) f.write(output) #year 10-11 data = districtExtract(districtyear1011, "2010") global_data = data jobs = [gevent.spawn(fetch_taluka, value["url"], key)