/
generator_dfw_csv.py
146 lines (115 loc) · 3.98 KB
/
generator_dfw_csv.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
import generator_wdtf
import logging
import datetime
import sys
import cStringIO
from ftplib import FTP, error_reply
def get_minutes_data(conn, aws_id):
"""
Get yesterday's 15min data for a station for CSV maker
:param conn: a live DB connection
:param aws_id: string
:return: CSV data
"""
"""
colums:
DfW ID
Date
Time
Ave AirTemp (AWS) (degC)
Ave AppTemp (degC)
Ave DewPoint (degC)
Ave Humidity (AWS) (%)
Ave DeltaT (degC)
Ave Soil Temperature (degC)
Ave GSR (W/m^2)
Min WndSpd (m/s)
Ave WndSpd (m/s)
Max WndSpd (m/s)
Ave WndDir (deg)
Total Rain (mm)
Ave LeafWet (% Wet)
Ave AirTemp (Canopy) (degC)
Ave Humidity (Canopy) (%)
"""
sql = """
SELECT COALESCE(tbl_stations.dfw_id, tbl_stations.aws_id), tbl_data_minutes.*
FROM tbl_data_minutes
INNER JOIN tbl_stations
ON tbl_data_minutes.aws_id = tbl_stations.aws_id
WHERE
tbl_data_minutes.aws_id = '""" + aws_id + """'
AND DATE(stamp) = CURDATE() - INTERVAL 1 DAY GROUP BY stamp
ORDER BY stamp;"""
if conn is None:
conn = generator_wdtf.db_connect()
#cursor = conn.cursor (MySQLdb.cursors.DictCursor) -- for named columns
cursor = conn.cursor()
cursor.connection.autocommit(True)
cursor.execute(sql)
rows = cursor.fetchall()
csv_string = ""
for row in rows:
date_time = str(row[3])
row2 = []
#turn None into 0
for col in row:
if col == None:
row2.append(",")
else:
row2.append(str(col)+",")
row_string = str(row2[0])+date_time[0:10]+","+date_time[11:]+","+str(row2[4])+str(row2[5])+str(row2[6])+str(row2[7])+str(row2[8])+str(row2[9])+str(row2[10])+str(row2[11])+str(row2[12])+str(row2[13])+str(row2[14])+str(row2[15])+str(row2[16])+str(row2[17])+str(row2[18])
csv_string += row_string.strip(',') + "\r\n"
cursor.close()
conn.commit()
conn.close()
return csv_string
def make_csv_file(conn, owner):
"""
Writes the 15min data for each station with status 'on' for DfW to a single CSV file (SAMDBNRM_YYYYMMDD.CSV)
Calls get_15min_data()
:param conn: a live DB connection
:param owner: string
:return: a CSV file string
"""
sql = "SELECT aws_id FROM tbl_stations WHERE owner = '" + owner + "' AND status = 'on';"
if conn is None:
conn = generator_wdtf.db_connect()
cursor = conn.cursor()
cursor.connection.autocommit(True)
cursor.execute(sql)
rows = cursor.fetchall()
single_csv_file = "DfW ID,Date,Time,Ave AirTemp (AWS) (degC),Ave AppTemp (degC),Ave DewPoint (degC),Ave Humidity (AWS) (%),Ave DeltaT (degC),Ave Soil Temperature (degC),Ave GSR (W/m^2),Min WndSpd (m/s),Ave WndSpd (m/s),Max WndSpd (m/s),Ave WndDir (deg),Total Rain (mm),Ave LeafWet (% Wet),Ave AirTemp (Canopy) (degC),Ave Humidity (Canopy) (%)\r\n"
for row in rows:
single_csv_file += get_minutes_data(conn, row[0])
cursor.close()
conn.close()
return single_csv_file
def send_csv_to_dfw(conn, owner):
"""
Sends the CSV file via FTP somewhere
:param conn: live db connection
:param owner: string
:return: True if ok
"""
svr = 'e-nrims.dwlbc.sa.gov.au'
usr = 'MEATelem'
pwd = 'meatelem01'
t = datetime.datetime.now()
try:
ftp = FTP(svr)
ftp.set_debuglevel(0)
ftp.login(usr, pwd)
single_csv_file = cStringIO.StringIO(make_csv_file(conn, owner))
ftp.storbinary("STOR " + owner + "_" + t.strftime("%Y%m%d") + ".csv",single_csv_file)
single_csv_file.close()
ftp.quit()
return True
except Exception, e:
logging.error("failed in send_csv_to_dfw()\n" + str(e))
return [False, error_reply.message]
#expected call:
#param: stations' owner
#send_csv_to_dfw(owner)
#--> make_csv_file()
#----> get_15min_data()