def check(): Incident_url = "http://data.livetraffic.com/traffic/hazards/incident.json" Incident_response = urllib.urlopen(Incident_url) # load JSON file Incident_received = json.loads(Incident_response.read()) print print("[INFO] Incident data URL checked") Incident_received_time_mark = Incident_received['lastPublished'] print "[INFO] Incident Received time stamp: ", Incident_received_time_mark Incident_RecordFileName = fileDir + 'Incident_update_history.txt' with open(Incident_RecordFileName, 'a+') as Incident_record: latest_time_mark = Incident_record.readlines( )[-1] #read the last line, assume if the program has iterated more than once if latest_time_mark[:1] == '>' or latest_time_mark[:1] == 'P': # this means that the program has just started Incident_record.seek(0) try: #try to find the last line from the last run latest_time_mark = Incident_record.readlines( )[-3] #Assumption! that program at least updates once for each run print "[INFO] Currently recorded Incident data time stamp: (from last run) ", latest_time_mark except: #out of range for Index, meaning that the history data has just been created, no previous record print "[INFO] No existing Incident data time stamp recorded in history" latest_time_mark = str(0000000000000) else: #last line found, meaning that the program has iterated for at least one time print "[INFO] Currently recorded time stamp: (from last iteration) ", latest_time_mark if int(Incident_received_time_mark) > int(latest_time_mark[:13]): print "[INFO] New Incident Data Arrived" Incident_record.write( str(Incident_received_time_mark) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + ' NEW' + '\n') mkdir_p(fileDir) Incident_fileNameDir = fileDir + "Incident_%d_%d.csv" Incident_fileName = (Incident_fileNameDir % (starttime_int, total_count)) Incident_fileChange = Incident_fileName + ".change" JSONconvert_Incident('features', Incident_received, Incident_fileName, Incident_fileChange) print "[INFO] Updating Incident data finished" elif int(Incident_received_time_mark) == int(latest_time_mark[:13]): Incident_record.write( str(Incident_received_time_mark) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + '\n') print "[INFO] No new Incident data" elif int(Incident_received_time_mark) < int(latest_time_mark[:13]): Incident_record.write( str(int(latest_time_mark[:13])) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + ' DISCARD: ' + str(Incident_received_time_mark) + '\n') print "[INFO] Receive and discard old Incident data" else: print "[ERROR] unexpected (Incident data)" os.pause() MajorEvent_url = "http://data.livetraffic.com/traffic/hazards/majorevent.json" MajorEvent_response = urllib.urlopen(MajorEvent_url) # load JSON file MajorEvent_received = json.loads(MajorEvent_response.read()) print print("[INFO] MajorEvent URL checked") MajorEvent_received_time_mark = MajorEvent_received['lastPublished'] print "[INFO] MajorEvent Received time stamp: ", MajorEvent_received_time_mark MajorEvent_RecordFileName = fileDir + 'MajorEvent_update_history.txt' with open(MajorEvent_RecordFileName, 'a+') as MajorEvent_record: latest_time_mark = MajorEvent_record.readlines( )[-1] #read the last line, assume if the program has iterated more than once if latest_time_mark[:1] == '>' or latest_time_mark[:1] == 'P': # this means that the program has just started MajorEvent_record.seek(0) try: #try to find the last line from the last run latest_time_mark = MajorEvent_record.readlines( )[-3] #Assumption! that program at least updates once for each run print "[INFO] Currently recorded MajorEvent data time stamp: (from last run) ", latest_time_mark except: #out of range for Index, meaning that the history data has just been created, no previous record print "[INFO] No existing time stamp recorded in history" latest_time_mark = str(0000000000000) else: #last line found, meaning that the program has iterated for at least one time print "[INFO] Currently recorded MajorEvent data time stamp: (from last iteration) ", latest_time_mark if int(MajorEvent_received_time_mark) > int(latest_time_mark[:13]): print "[INFO] New MajorEvent Data Arrived" MajorEvent_record.write( str(MajorEvent_received_time_mark) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + ' NEW' + '\n') mkdir_p(fileDir) MajorEvent_fileNameDir = fileDir + "MajorEvent_%d_%d.csv" MajorEvent_fileName = (MajorEvent_fileNameDir % (starttime_int, total_count)) MajorEvent_fileChange = MajorEvent_fileName + ".change" JSONconvert_MajorEvent('features', MajorEvent_received, MajorEvent_fileName, MajorEvent_fileChange) print "[INFO] Updating MajorEvent data finished" elif int(MajorEvent_received_time_mark) == int(latest_time_mark[:13]): MajorEvent_record.write( str(MajorEvent_received_time_mark) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + '\n') print "[INFO] No new MajorEvent data" elif int(MajorEvent_received_time_mark) < int(latest_time_mark[:13]): MajorEvent_record.write( str(int(latest_time_mark[:13])) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + ' DISCARD: ' + str(MajorEvent_received_time_mark) + '\n') print "[INFO] Receive and discard old MajorEvent data" else: print "[ERROR] unexpected (MajorEvent)" os.pause() Roadwork_url = "http://data.livetraffic.com/traffic/hazards/roadwork.json" Roadwork_response = urllib.urlopen(Roadwork_url) # load JSON file Roadwork_received = json.loads(Roadwork_response.read()) print print("[INFO] Roadwork URL checked") Roadwork_received_time_mark = Roadwork_received['lastPublished'] print "[INFO] Roadwork Received time stamp: ", Roadwork_received_time_mark Roadwork_RecordFileName = fileDir + 'Roadwork_update_history.txt' with open(Roadwork_RecordFileName, 'a+') as Roadwork_record: latest_time_mark = Roadwork_record.readlines( )[-1] #read the last line, assume if the program has iterated more than once if latest_time_mark[:1] == '>' or latest_time_mark[:1] == 'P': # this means that the program has just started Roadwork_record.seek(0) try: #try to find the last line from the last run latest_time_mark = Roadwork_record.readlines( )[-3] #Assumption! that program at least updates once for each run print "[INFO] Currently recorded Roadwork data time stamp: (from last run) ", latest_time_mark except: #out of range for Index, meaning that the history data has just been created, no previous record print "[INFO] No existing Roadwork data time stamp recorded in history" latest_time_mark = str(0000000000000) else: #last line found, meaning that the program has iterated for at least one time print "[INFO] Currently recorded Roadwork data time stamp: (from last iteration) ", latest_time_mark if int(Roadwork_received_time_mark) > int(latest_time_mark[:13]): print "[INFO] New Roadwork Data Arrived" Roadwork_record.write( str(Roadwork_received_time_mark) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + ' NEW' + '\n') mkdir_p(fileDir) Roadwork_fileNameDir = fileDir + "Roadwork_%d_%d.csv" Roadwork_fileName = (Roadwork_fileNameDir % (starttime_int, total_count)) Roadwork_fileChange = Roadwork_fileName + ".change" JSONconvert_Roadwork('features', Roadwork_received, Roadwork_fileName, Roadwork_fileChange) print "[INFO] Updating Roadwork data finished" elif int(Roadwork_received_time_mark) == int(latest_time_mark[:13]): Roadwork_record.write( str(Roadwork_received_time_mark) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + '\n') print "[INFO] No new Roadwork data" elif int(Roadwork_received_time_mark) < int(latest_time_mark[:13]): Roadwork_record.write( str(int(latest_time_mark[:13])) + ' @' + strftime("%Y-%m-%d %H:%M:%S", localtime()) + ' DISCARD: ' + str(Roadwork_received_time_mark) + '\n') print "[INFO] Receive and discard old Roadwork data" else: print "[ERROR] unexpected (Roadwork)" os.pause() generated_file_list = [] try: generated_file_list.append(MajorEvent_fileName) except: pass try: generated_file_list.append(Incident_fileName) except: pass try: generated_file_list.append(Roadwork_fileName) except: pass return generated_file_list
import os os.pause()
# -*- coding: utf-8 -*- import sys sys.path.append("C:\\Program Files (x86)\\IronPython 2.7\\Lib") sys.path.append("C:\\Python27\\Lib") import os import csv import requests import time import datetime import sqlalchemy import tempfile os.chdir(tempfile.gettempdir()) print(tempfile.gettempdir()) os.pause() def writeCSV( content ): '''Saves CSV content to folder on desktop''' ## File to save output to if not os.path.exists("Incident"): os.makedirs("Incident") date = time.strftime("%Y%m%d") path = os.getcwd() + "\\Incident\\Incidents.csv" fh = open(path,"w") fh.write(ResolveDoubleNewLine(content)) fh.close() def ResolveDoubleNewLine( content ): '''Resolves the double new line in CSV files''' resolver = content.replace("\r\n","\n")