#!/usr/bin/env python2.7 # -*- coding: utf-8 -*- import requests from datetime import datetime from datetime import timedelta from datetime import date from geopy.distance import vincenty from lib import json_io from lib import csv_io url = 'http://denguefever.csie.ncku.edu.tw/file/dengue_all.csv' data = csv_io.req_csv(url, 'utf-8') weather_data = json_io.read_json('../data/weather.json') drug_data = json_io.read_json('../data/drug_days.json') def format_data(current_date, value): d = datetime.strptime(current_date, '%Y/%m/%d').date().strftime('%Y/%m/%d') return {\ 'date': current_date, \ 'value': value, \ '氣溫': weather_data[d]['氣溫'], \ '相對溼度': weather_data[d]['相對溼度'], \ '降水量': weather_data[d]['降水量'] \ } def insert_village_data(village_data, village_values, current_date): rain, rain_day = get_wather_data(current_date) for v in village_values:
from lib import csv_io data = csv_io.req_csv( 'https://dl.dropboxusercontent.com/u/13580830/Tainan/Tainan_shp/103年12月臺南市統計區人口統計_最小統計區.csv', 'big5') csv_io.write_csv('population.csv', data)
#! /usr/bin/env python # -*- coding: utf-8 -*- import json import os from bs4 import BeautifulSoup import requests import csv import sys from datetime import datetime from datetime import timedelta from datetime import date from lib import csv_io from lib import json_io data = csv_io.req_csv( 'http://data.gov.tw/iisi/logaccess/16702?dataUrl=http://nidss.cdc.gov.tw/download/Dengue_Daily.csv&ndctype=CSV&ndcnid=21025', 'utf-8') data = data[1:] new_data = [] start = False for row in data: if (row[1] and '2015' in row[1] and row[5] == '台南市'): new_data.append([row[5], row[1], row[6], row[7], row[9], row[10]]) new_data = sorted(new_data, key=lambda x: datetime.strptime(x[1], '%Y/%m/%d').date()) new_data.insert(0, ['居住縣市', '日期', '區別', '里別', 'Latitude', 'Longitude']) csv_io.write_csv('../data/dengue_all_v4.csv', new_data)
import csv from datetime import datetime from datetime import timedelta from datetime import date from geopy.distance import vincenty import json from lib import csv_io from lib import json_io if __name__ == '__main__': m8 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/316034ad-f2ae-4a8e-bafd-d6d98e388aaa/download/10408.csv' m9 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/2cdd3bbe-6a8c-438e-b85a-1bde14382944/download/10409.csv' m10 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/fde0f38c-ba91-40e1-a69b-406f061c1a3b/download/10410.csv' m11 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/ede84d86-ffdf-4233-aaa1-b31b329fcaec/download/z10410410411.csv' m8 = csv_io.req_csv(m8, 'utf-8') m9 = csv_io.req_csv(m9, 'utf-8') m10 = csv_io.req_csv(m10, 'utf-8') m11 = csv_io.req_csv(m11, 'utf-8') print(m9[0]) print(m10[0]) for row in m10: row.insert(8, row[-2]) row.insert(9, row[-3]) del row[-2] del row[-2] for row in m11: row.insert(8, row[-2]) row.insert(9, row[-3]) del row[-2]
#!/usr/bin/env python2.7 # -*- coding: utf-8 -*- import requests import sys from datetime import datetime from datetime import timedelta from datetime import date from lib import csv_io from lib import json_io url = 'http://denguefever.csie.ncku.edu.tw/file/dengue_all.csv' tmp = csv_io.req_csv(url, 'utf-8') data = [] for item in tmp: if item[4] and item[-1]: item[4] = '2015/' + item[4].replace('月', '/').replace('日', '') data.append([item[0], item[4], item[1], item[2], item[-1], item[-2]]) header = data[0] data = data[1:] data = sorted(data, key=lambda x: datetime.strptime(x[1], '%Y/%m/%d').date()) now = datetime.strptime(data[-1][1], '%Y/%m/%d').date() header[1], header[2], header[-2], header[ -1] = '日期', '區別', 'Latitude', 'Longitude' header[3] = '里別' data.insert(0, header) csv_io.write_csv('../data/dengue_all_v3.csv', data)
"""write csv""" with open(file_name, 'w') as output_file: writer = csv.writer(output_file) writer.writerows(content) if __name__ == '__main__': data = csv_io.read_csv('../data/dengue_all.csv') urls = [ 'http://data.tainan.gov.tw/dataset/3ad9da64-0c29-4299-b769-320b57a09be8/resource/7bf16e0a-2445-4ccf-a0a0-ae06a8fda4ac/download/z104104121207.csv', 'http://data.tainan.gov.tw/dataset/3ad9da64-0c29-4299-b769-320b57a09be8/resource/d4af5055-3d2c-420f-ad12-373cfae430d3/download/z104104121208.csv' ] for u in urls: print(urls) data += csv_io.req_csv(u, 'utf-8')[1:] #data = csv_io.read_csv('../data/dengue_all.csv') for item in data: if not item[0]: del item continue if len(item) < 7: item.insert(0, '') if '105' in item[1]: item[1] = item[1].replace('105', '2015') if '104' in item[1]: item[1] = item[1].replace('104', '2015') try: if float(item[-1]) < 50: tmp = item[-1]