def get_args(form_dict): properties.load_properties() file_properties = properties.properties[form_dict["fname"]] file_data = utils.get_file_data(form_dict["fname"]) group = file_properties.get("groups", ["Other"])[0] chart_type = file_properties.get("chart_type") or "Line" args = { "file_data": file_data, "properties": file_properties, "chart_type": chart_type, "group": group, "group_info": properties.groups[group], "consolelog": consolelog, "json": json, "len": len, "title": file_properties.get("title", form_dict["fname"]), "description": file_properties.get("description", form_dict["fname"]) } if chart_type == "Map": args["map_data"] = get_map_data(file_data, file_properties) args["legend"] = file_properties.get("legend") or "" else: args["chart_data"] = get_chart_data(file_data, file_properties, chart_type) return args
def get_args(form_dict): properties.load_properties() group_list = properties.groups.keys() group_list.sort() group_datasets = properties.get_group_datasets() maxsets = max([len(group_datasets[g]) for g in group_datasets]) for g in properties.groups: properties.groups[g]["sets"] = float(len(group_datasets[g])) return { "group_list": group_list, "groups": properties.groups, "group_datasets": group_datasets, "get_hex_shade": utils.get_hex_shade, "maxsets": maxsets, "int": int, "title": "data.gov.in", "description": "A better way to visualize data.gov.in. Featuring all the datasets from data.gov.in in CSV format and with graphs" }
def download(): properties.load_properties() for i in range(61)[39:]: print "for page %s" % i response = requests.get(sources[0] + "?page=%s" % i, verify=False) page_properties = get_url_title_and_description_from_html(response.text) for filename in page_properties: filepath = os.path.join("data", "data.gov.in", filename) if not os.path.exists(filepath): try: url = urllib2.urlopen(page_properties[filename]["url"]) size = url.headers["Content-Length"] if int(size) < int(max_file_size): with open(filepath, "wb") as datafile: r = requests.get(page_properties[filename]["url"]) for chunk in r.iter_content(1024): datafile.write(chunk) else: print "[ignored] [too big] %s (%s)" % (filename, size) except urllib2.HTTPError, e: print e if os.path.exists(filepath): files = convert_to_csv(filepath, os.path.join("data", "csv")) for fpath in files: prepend_property_headers(fpath, page_properties[filename])
def get_args(form_data): properties.load_properties() group_dataset = properties.get_group_datasets()[form_data["group"]] return { "group_dataset": group_dataset, "group_info": properties.groups[form_data["group"]], "maxrows": max([p.get("rows", 0) for p in group_dataset]), "flt": utils.flt }
def get_args(form_data): properties.load_properties() group_list = properties.groups.keys() group_list.sort() return { "group_list": group_list, "groups": properties.groups }
def get_args(form_data): properties.load_properties() group_dataset = properties.get_group_datasets()[form_data["group"]] group_info = properties.groups[form_data["group"]] return { "group_dataset": group_dataset, "group_info": group_info, "maxrows": max([p.get("rows", 0) for p in group_dataset]), "flt": utils.flt, "title": group_info["label"] + " Datasets", "description": "Government of India list of datasets for " + group_info["label"] }
def get_args(form_dict): properties.load_properties() file_properties = properties.properties[form_dict["fname"]] file_data = utils.get_file_data(form_dict["fname"]) group = file_properties.get("groups", ["Other"])[0] return { "file_data": file_data, "chart_data": get_chart_data(file_data, file_properties), "properties": file_properties, "group": group, "group_info": properties.groups[group], "consolelog": consolelog }
def get_args(form_dict): properties.load_properties() group_list = properties.groups.keys() group_list.sort() group_datasets = properties.get_group_datasets() maxsets = max([len(group_datasets[g]) for g in group_datasets]) for g in properties.groups: properties.groups[g]["sets"] = float(len(group_datasets[g])) return { "group_list": group_list, "groups": properties.groups, "group_datasets": group_datasets, "get_hex_shade": utils.get_hex_shade, "maxsets": maxsets, "int": int }
def download(): properties.load_properties() for i in xrange(35): print "page %s" % i response = requests.get(sources[1] + "?page=%s" % i, verify=False) page_properties = get_url_title_and_description_from_html(response.text) for filename in page_properties: print filename filepath = os.path.join("data", "raw", filename) if not os.path.exists(filepath): with open(filepath, "wb") as datafile: r = requests.get(page_properties[filename]["url"]) for chunk in r.iter_content(1024): datafile.write(chunk) properties.update_for_file(filename, page_properties[filename]) properties.save_properties()
def get_args(form_data): properties.load_properties() group_list = properties.groups.keys() group_list.sort() return {"group_list": group_list, "groups": properties.groups}