for item in data:
        key = item[1].replace(' ', '') + item[2]
        if ('2015/07' in item[0]) or '區' in item[2] or not item[2]:
            continue

        if key not in village_values:
            village_values[key] = 0

        event_date = item[0]
        if current_date == event_date:
            value += 1
            village_values[key] += 1
        else:
            output_data.append(format_data(current_date, value))
            insert_village_data(village_data, village_values, current_date)
            insert_to_village_table(village_values)
            current_date = event_date
            value = 1
            village_values = {}
   
    output_data.append(format_data(current_date, value))
    insert_village_data(village_data, village_values, current_date)
    insert_to_village_table(village_values)

    json_io.write_json('../data/dynamic/village_bar_data.json', village_data)
    json_io.write_json('../data/dynamic/bar_data.json', output_data)
    print (output_data[-1], 'done.')

    csv_io.write_csv('../public/data/village_dengue_data.csv', csv_output)
    csv_io.write_csv('../public/data/village_dengue_table.csv', village_table)
Пример #2
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import json
import os
from bs4 import BeautifulSoup
import requests
import csv
import sys
from datetime import datetime
from datetime import timedelta
from datetime import date
from lib import csv_io
from lib import json_io

data = csv_io.req_csv(
    'http://data.gov.tw/iisi/logaccess/16702?dataUrl=http://nidss.cdc.gov.tw/download/Dengue_Daily.csv&ndctype=CSV&ndcnid=21025',
    'utf-8')
data = data[1:]
new_data = []
start = False
for row in data:
    if (row[1] and '2015' in row[1] and row[5] == '台南市'):
        new_data.append([row[5], row[1], row[6], row[7], row[9], row[10]])

new_data = sorted(new_data,
                  key=lambda x: datetime.strptime(x[1], '%Y/%m/%d').date())
new_data.insert(0, ['居住縣市', '日期', '區別', '里別', 'Latitude', 'Longitude'])
csv_io.write_csv('../data/dengue_all_v4.csv', new_data)
from lib import csv_io

data = csv_io.req_csv(
    'https://dl.dropboxusercontent.com/u/13580830/Tainan/Tainan_shp/103年12月臺南市統計區人口統計_最小統計區.csv',
    'big5')
csv_io.write_csv('population.csv', data)
            if '40-60' in data[row[-5]]:
                data[row[-5]]['40-60'] += 1
            else:
                data[row[-5]]['40-60'] = 1
        else:
            if '>60' in data[row[-5]]:
                data[row[-5]]['>60'] += 1
            else:
                data[row[-5]]['>60'] = 1

new_data = [['區別', '0-20', '20-40', '40-60', '>60']]
for region in data:
    d = data[region]
    if '0-20' not in d:
        d['0-20'] = 0
    if '20-40' not in d:
        d['20-40'] = 0
    if '40-60' not in d:
        d['40-60'] = 0
    if '>60' not in d:
        d['>60'] = 0

    p = p_data['臺南市'+region]
    print (d['40-60'])
    new_data.append([region, round(d['0-20']/p['0-20'], 4), \
        round(d['20-40']/p['20-40'], 4), \
        round(d['40-60']/p['40-60'], 4), \
        round(d['>60']/p['>60'], 4)])

csv_io.write_csv('../data/tainan_dengue_age_ratio.csv', new_data)
Пример #5
0
    for item in input_:
        data.append([item[4], item[8], item[9]])
    return data


if __name__ == '__main__':
    url = 'http://denguefever.csie.ncku.edu.tw/file/drug_all.csv'
    data = csv_io.req_csv(url, 'utf-8')
    data = filter_data(data)

    data = data[1:]
    dengue_data = csv_io.read_csv('../data/data.csv')
    tmp = dengue_data[-1][1]
    now = datetime.strptime(tmp, '%Y/%m/%d').date()

    header = ['日期', 'Latitude', 'Longitude']
    output_data = []
    for row in data:
        d = '2015年' + row[0]
        event_date = datetime.strptime(d, '%Y年%m月%d日').date()
        if event_date > now:
            break
        delta = now - event_date
        row[-1], row[-2] = float(row[-1]), float(row[-2])
        if delta.days < 7:
            output_data.append(row)
    output_data.insert(0, header)

    csv_io.write_csv('../data/drug_data.csv', output_data)
    print(output_data, 'done')
Пример #6
0
    print('......')
    three_003_data.insert(0, header)
    three_0025_data.insert(0, header)
    three_002_data.insert(0, header)

    five_003_data = geo.get_hot_points(days5, len(days5) * 0.03, 500)
    print('......')
    five_0025_data = geo.get_hot_points(days5, len(days5) * 0.025, 500)
    print('......')
    five_002_data = geo.get_hot_points(days5, len(days5) * 0.02, 500)
    print('......')
    five_003_data.insert(0, header)
    five_0025_data.insert(0, header)
    five_002_data.insert(0, header)

    csv_io.write_csv('../data/one_002_data.csv', one_002_data)
    csv_io.write_csv('../data/three_002_data.csv', three_002_data)
    csv_io.write_csv('../data/five_002_data.csv', five_002_data)

    csv_io.write_csv('../data/one_0025_data.csv', one_0025_data)
    csv_io.write_csv('../data/three_0025_data.csv', three_0025_data)
    csv_io.write_csv('../data/five_0025_data.csv', five_0025_data)

    csv_io.write_csv('../data/one_003_data.csv', one_003_data)
    csv_io.write_csv('../data/three_003_data.csv', three_003_data)
    csv_io.write_csv('../data/five_003_data.csv', five_003_data)

    header.append('color')
    days7.insert(0, header)
    csv_io.write_csv('../data/seven_data.csv', days7)
    village_weekly = []
    start = datetime.strptime('2015/08/03', '%Y/%m/%d').date()
    insert_to_village_table(village_values)
    header = ['區別', '里別']
    isHeader = True
    for row in village_table:
        if (sum(row[1:])) == 0:
            continue
        r1, r2 = row[0].split('區')
        village_weekly.append([r1 + '區', r2])
        for i in range(1, len(row), 7):
            if ((i + 6) > len(row)):
                break

            w_sum = sum(row[i:i + 7])
            village_weekly[len(village_weekly) - 1].append(w_sum)
            if isHeader:
                end = start + timedelta(days=6)
                d = "'" + str(start.strftime('%m/%d')) + '-' + str(
                    end.strftime('%m/%d')) + "'"
                header.append(d)
                start = end + timedelta(days=1)

        isHeader = False

    village_weekly.insert(0, header)
    csv_io.write_csv('../data/village_dengue_weekly.csv', village_weekly)
    csv_io.write_csv('../public/data/village_dengue_table_v2.csv',
                     village_table)
Пример #8
0
from lib import csv_io
from datetime import datetime

data = csv_io.read_csv('./sheet.csv')
new = [['日期', '里別', 'Latitude', 'Longitude']]
data = data[1:]
for item in data:
    item[6] = datetime.strptime('2015/' + item[6], '%Y/%m/%d').date()

data = sorted(data, key=lambda x: x[6])

for item in data:
    if item[-3] == '陽性':
        item[6] = item[6].strftime('%Y/%m/%d')
        new.append([item[6], item[3], item[-2], item[-1]])
csv_io.write_csv('dengue.csv', new)
Пример #9
0
            insert_to_village_table({})


    village_weekly = []
    start = datetime.strptime('2015/10/01', '%Y/%m/%d').date()
    insert_to_village_table(village_values)
    header = ['區別', '里別']
    isHeader = True
    for row in village_table:
        if (sum(row[1:])) == 0:
            continue
        r1, r2 = row[0].split('區')
        village_weekly.append([r1+'區', r2])
        for i in range(1, len(row), 7):
            if ((i+6) > len(row)):
                break

            w_sum = sum(row[i:i+7])
            village_weekly[len(village_weekly)-1].append(w_sum)
            if isHeader:
                end = start + timedelta(days=6)
                d = "'" + str(start.strftime('%m/%d')) +'-'+str(end.strftime('%m/%d')) + "'"
                header.append(d)
                start = end + timedelta(days=1)

        isHeader = False
    
    village_weekly.insert(0, header)
    csv_io.write_csv('../public/data/kao_village_dengue_table.csv', village_weekly)
    print ('done')
Пример #10
0
from lib import json_io

if __name__ == '__main__':
    m8 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/316034ad-f2ae-4a8e-bafd-d6d98e388aaa/download/10408.csv'
    m9 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/2cdd3bbe-6a8c-438e-b85a-1bde14382944/download/10409.csv'
    m10 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/fde0f38c-ba91-40e1-a69b-406f061c1a3b/download/10410.csv'
    m11 = 'http://data.tainan.gov.tw/dataset/4c260d97-e268-4b4a-8b15-c0fc92a25120/resource/ede84d86-ffdf-4233-aaa1-b31b329fcaec/download/z10410410411.csv'

    m8 = csv_io.req_csv(m8, 'utf-8')
    m9 = csv_io.req_csv(m9, 'utf-8')
    m10 = csv_io.req_csv(m10, 'utf-8')
    m11 = csv_io.req_csv(m11, 'utf-8')
    print(m9[0])
    print(m10[0])
    for row in m10:
        row.insert(8, row[-2])
        row.insert(9, row[-3])
        del row[-2]
        del row[-2]

    for row in m11:
        row.insert(8, row[-2])
        row.insert(9, row[-3])
        del row[-2]
        del row[-2]

    data = m8 + m9[1:] + m10[1:] + m11[1:]
    print(m10[0])
    print(data[-1])
    csv_io.write_csv('../data/drug_all.csv', data)
Пример #11
0
import json
import os
from bs4 import BeautifulSoup
import requests
import csv
import sys
from datetime import datetime
from datetime import timedelta
from datetime import date
from lib import csv_io
from lib import json_io

data = csv_io.req_csv('http://data.gov.tw/iisi/logaccess/16702?dataUrl=http://nidss.cdc.gov.tw/download/Dengue_Daily.csv&ndctype=CSV&ndcnid=21025', 'utf-8')
header = data[0]
data = data[1:]
new_data = []
kao_data = []
start = False
print (header)
for row in data:
    if (row[1] and ('2015' in row[1] or '2016' in row[1]) and row[5] == '高雄市'):
        new_data.append([row[1], row[9], row[10]])
        kao_data.append(row)

new_data = sorted(new_data, key = lambda x: datetime.strptime(x[0], '%Y/%m/%d').date())
new_data.insert(0,['日期','Longitude', 'Latitude'])

kao_data.insert(0, header)
csv_io.write_csv('../../data/2015_kao_dengue.csv', kao_data)
csv_io.write_csv('./2015_dengue.csv', new_data)
Пример #12
0
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import requests
import sys
from datetime import datetime
from datetime import timedelta
from datetime import date
from lib import csv_io
from lib import json_io

url = 'http://denguefever.csie.ncku.edu.tw/file/dengue_all.csv'
tmp = csv_io.req_csv(url, 'utf-8')
data = []
for item in tmp:
    if item[4] and item[-1]:
        item[4] = '2015/' + item[4].replace('月', '/').replace('日', '')
        data.append([item[0], item[4], item[1], item[2], item[-1], item[-2]])

header = data[0]
data = data[1:]
data = sorted(data, key=lambda x: datetime.strptime(x[1], '%Y/%m/%d').date())
now = datetime.strptime(data[-1][1], '%Y/%m/%d').date()

header[1], header[2], header[-2], header[
    -1] = '日期', '區別', 'Latitude', 'Longitude'
header[3] = '里別'
data.insert(0, header)
csv_io.write_csv('../data/dengue_all_v3.csv', data)
# -*- coding: utf-8 -*-
import json
import os
from bs4 import BeautifulSoup
import requests
import csv
import sys
from datetime import datetime
from datetime import timedelta
from datetime import date
from lib import csv_io
from lib import json_io

data = csv_io.req_csv(
    'http://data.gov.tw/iisi/logaccess/16702?dataUrl=http://nidss.cdc.gov.tw/download/Dengue_Daily.csv&ndctype=CSV&ndcnid=21025',
    'utf-8')
print(data[0])
data = data[1:]
new_data = []
start = False
for row in data:
    if (row[0] and '2015' in row[0] and row[5] == '台南市'):
        new_data.append(
            [row[5], row[0], row[4], row[6], row[7], row[8], row[9], row[10]])

new_data = sorted(new_data,
                  key=lambda x: datetime.strptime(x[1], '%Y/%m/%d').date())
new_data.insert(
    0, ['居住縣市', '日期', '年齡', '區別', '里別', '最小統計區', 'Latitude', 'Longitude'])
csv_io.write_csv('../data/dengue_small_region.csv', new_data)
Пример #14
0
    if (temp_count != 0):
        temp = round(temp / temp_count, 2)
    else:
        temp = 'NA'
    if (rh_count != 0):
        rh = round(rh / rh_count, 2)
    else:
        rh = 'NA'
    if (precp_count != 0):
        precp = round(precp)
    else:
        precp = 'NA'

    return temp, rh, precp


if __name__ == '__main__':
    url_t = "http://e-service.cwb.gov.tw/HistoryDataQuery/DayDataController.do?command=viewMain&station=467410&datepicker="
    now = datetime.strptime('2016/01/01', '%Y/%m/%d').date()
    end = datetime.strptime('2016/02/28', '%Y/%m/%d').date()
    data = ['date', 'temp', 'rh', 'precp']
    while now < end:
        print(now)
        html_doc_t = requests.get(url_t + now.strftime('%Y-%m-%d')).text
        temp_t, rh_t, precp_t = get_values(html_doc_t)

        data.append([now.strftime('%Y/%m/%d'), temp_t, rh_t, precp_t])
        now += timedelta(days=1)
    csv_io.write_csv('../data/dynamic/kao_weather_2016.csv', data)
import os
from bs4 import BeautifulSoup
import requests
import csv
import sys
from datetime import datetime
from datetime import timedelta
from datetime import date
from lib import csv_io
from lib import json_io

data = csv_io.req_csv(
    'http://data.gov.tw/iisi/logaccess/16702?dataUrl=http://nidss.cdc.gov.tw/download/Dengue_Daily.csv&ndctype=CSV&ndcnid=21025',
    'utf-8')
header = data[0]
data = data[1:]
new_data = []
kao_data = []
start = False
for row in data:
    if (row[0] and ('2015' not in row[0]) and row[5] == '台南市'):
        new_data.append([row[0], row[9], row[10]])
        kao_data.append(row)

new_data = sorted(new_data,
                  key=lambda x: datetime.strptime(x[0], '%Y/%m/%d').date())
new_data.insert(0, ['日期', 'Longitude', 'Latitude'])

kao_data.insert(0, header)
csv_io.write_csv('./dengue_onset.csv', new_data)
    for item in input_:
        data.append([item[4], item[8], item[9]])
    header = ['日期', 'Latitude', 'Longitude']
    return data, header


if __name__ == '__main__':
    url = 'http://denguefever.csie.ncku.edu.tw/file/drug_all.csv'
    data = csv_io.req_csv(url, 'utf-8')
    data, header = filter_data(data)
    data = data[1:]
    dengue_data = csv_io.read_csv('../data/seven_data.csv')

    tmp = dengue_data[-1][1]
    now = datetime.strptime(tmp, '%Y/%m/%d').date()
    data_2015 = [header]
    print(now, data[0])
    new_data = [header]
    for row in data:
        event_date = datetime.strptime('2015年' + row[0], '%Y年%m月%d日').date()
        row[-1], row[-2] = float(row[-1]), float(row[-2])
        data_2015.append(row)
        if now < event_date:
            continue
        delta = (now - event_date)
        if delta.days < 7:
            new_data.append(row)

    csv_io.write_csv('../data/drug_seven.csv', new_data)
    csv_io.write_csv('../data/drug_2015.csv', data_2015)