def test(): '''Test function.''' print('---Module coordExport test---') hurdatExport = fileIO.openFile('HURDAT_Export.txt') hurdatExport.readline() # Reads headers line = hurdatExport.readline() print('***__getStage test***') print('Stage :',__getStage(line)) print('***__getCat test***') print('Cat :',__getCat(line)) print('***__getLandfall test***') print('Landfall:',__getLandfall(line)) print('***__filter test***') filterTerms = {} #filterTerms = {'cat':['TS','H1','H2','H3','H4','H5']} filterTerms = {'stage':'Tropical Cyclone'} searchType = 'and' filteredData = __filterData(hurdatExport, filterTerms, searchType) verbose = False if verbose: for line in filteredData: print(line) print('Number of entires found:',len(filteredData))
def test(): '''Test function.''' print('---Module hurdatReader test---') import os, fileIO ## Atlantic #hurdatData = fileIO.openFile('hurdat2-1851-2017-050118.txt', '..\\data') # East Pacific hurdatData = fileIO.openFile('hurdat2-nepac-1949-2017-050418.txt', '..\\data') hourList = [0, 6, 12, 18] for line in hurdatData: if isHeader(line): ID = getStormID(line) name = getName(line).strip() year = getYear(line) elif isFooter(line): pass else: x = 0 y = 0 # for i in range(4): lon = getLon(line, x, y) if (getLat(line, x, y) and lon != 0.0 and year == 2014): # if lon >= 180.0: # lon = (lon - 360)*-1 # else: # lon = lon * -1 print('\n') print('ID :', str(ID)) print('Name :', name) print('Year :', str(year)) print('Month:', str(getMonth(line))) print('Day :', str(getDay(line))) print('Hour :', str(getHour(line))) # write hour print('Lat :', str(getLat(line, x, y))) print('Lon :', str(lon).strip()) print('Wind :', str(getWind(line, x, y))) print('Press:', str(getPressure(line, x, y))) print('Stage:', getStage(line, x, y)) print('Cat :', getCategory(line, x, y)) print('Land :', str(madeLandfall(line))) # x = y # y = x + 17 if getMonth(line) == 12 and getDay(line) == 31 and getHour( line) == 18: year = year + 1 hurdatData.close()
def test(): '''Test function.''' print('---Module hurdatReader test---') import os, fileIO hurdatData = fileIO.openFile('HURDAT_tracks1851to2010_atl_2011rev.txt', '..\\data') hourList = [0,6,12,18] for line in hurdatData: if isHeader(line): ID = getStormID(line) name = getName(line).strip() year = getYear(line) landfall = madeLandfall(line) elif isFooter(line): pass else: x = 11 y = 28 for i in range(4): lon = getLon(line, x, y) if (getLat(line, x, y) and lon != 0.0 and year == 1886): if lon >= 180.0: lon = (lon - 360)*-1 else: lon = lon * -1 print('\n') print('ID :', str(ID)) print('Name :', name) print('Year :', str(year)) print('Month:', str(getMonth(line))) print('Day :', str(getDay(line))) print('Hour :', str(hourList[i])) # write hour print('Lat :', str(getLat(line, x, y))) print('Lon :', str(lon).strip()) print('Wind :', str(getWind(line, x, y))) print('Press:', str(getPressure(line, x, y))) print('Stage:', getStage(line, x, y)) print('Cat :', getCategory(line, x, y)) print('Land :', str(landfall)) x = y y = x + 17 if getMonth(line) == 12 and getDay(line) == 31 and hourList[i] == 18: year = year + 1 hurdatData.close()
@author: David Stack ''' import hurdatExport, fileIO, coordExport # Read HURDAT Data print('-----') print('Reading HURDAT data...') print('-----') ## Atlantic #hurdatData = fileIO.openFile('hurdat2-1851-2017-050118.txt', '..\\data') # East Pacific hurdatData = fileIO.openFile('hurdat2-nepac-1949-2017-050418.txt', '..\\data') hurdatExport.exportToCSV(hurdatData, '..\\output\\HURDAT_Export_Pacific.csv') hurdatData.seek(0, 0) hurdatExport.exportToTXT(hurdatData, '..\\output\\HURDAT_Export_Pacific.txt') hurdatData.close() ## Average coordinates with all observations #print('-----') #print('Saving average coordinates data...') #print('-----') #hurdatExportFile = fileIO.openFile('..\\output\\HURDAT_Export.txt') #coordExport.exportToTXT(hurdatExportFile, '..\\output\\Coord_Export.txt') # ## Average coordinates using filters ## Only category TS-H5 Storms
preprocessed = fileIO.openJSONFile(config["base"] + project["preprocessed"] + preprocessedFileName) filesToProcess = fileIO.getProjectFiles(config["base"] + project["location"]) # Need to at some point remove the processing of the JSON file # that stores preproccessed files. print("Files indexed in " + project['name'] + ", attempting to match them against licenses.") for document in filesToProcess: if preprocessedFileName not in document: hashOfFile = hashlib.md5( fileIO.openFile(config['base'] + project["location"] + document).read().encode('utf-8')).hexdigest() hashOfFile = str(hashOfFile) # A flag that signals a chance to the # preprocessed file of an individual project. editToPreprocessed = 0 for license in licenseAttributes["licenses"]: if (hashOfFile in preprocessed['preprocessed']): # In most cases I don't imagine a user is going to add their own # license file. However, if they want to, we still want to utilise # the preprocessed speedup. if (license in preprocessed['preprocessed'][hashOfFile]): if (preprocessed['preprocessed'][hashOfFile][license] == "true"): foundLicenses.append(license)
licenseAttributes = fileIO.openJSONFile(config['base'] + config['attributes_files']) print("Beginning to scan through project files defined in config.") for project in config["projects"]: foundLicenses = [] preprocessed = fileIO.openJSONFile(config["base"]+project["preprocessed"] + preprocessedFileName) filesToProcess = fileIO.getProjectFiles(config["base"]+project["location"]) # Need to at some point remove the processing of the JSON file # that stores preproccessed files. print("Files indexed in "+project['name']+", attempting to match them against licenses.") for document in filesToProcess: if preprocessedFileName not in document: hashOfFile = hashlib.md5(fileIO.openFile(config['base']+project["location"]+document).read().encode('utf-8')).hexdigest() hashOfFile = str(hashOfFile); # A flag that signals a chance to the # preprocessed file of an individual project. editToPreprocessed = 0 for license in licenseAttributes["licenses"]: if(hashOfFile in preprocessed['preprocessed']): # In most cases I don't imagine a user is going to add their own # license file. However, if they want to, we still want to utilise # the preprocessed speedup. if(license in preprocessed['preprocessed'][hashOfFile]): if(preprocessed['preprocessed'][hashOfFile][license] == "true"): foundLicenses.append(license)
#!/usr/bin/env python 3.2 ''' Main file to run HURDAT analysis. @author: David Stack ''' import hurdatExport, fileIO, coordExport # Read HURDAT Data print('-----') print('Reading HURDAT data...') print('-----') hurdatData = fileIO.openFile('HURDAT_tracks1851to2010_atl_2011rev.txt', '..\\data') hurdatExport.exportToCSV(hurdatData, '..\\output\\HURDAT_Export.csv') hurdatData.seek(0, 0) hurdatExport.exportToTXT(hurdatData, '..\\output\\HURDAT_Export.txt') hurdatData.close() # Average coordinates with all observations print('-----') print('Saving average coordinates data...') print('-----') hurdatExportFile = fileIO.openFile('..\\output\\HURDAT_Export.txt') coordExport.exportToTXT(hurdatExportFile, '..\\output\\Coord_Export.txt') # Average coordinates using filters # Only category TS-H5 Storms hurdatExportFile.seek(0, 0) filterTerms = {'cat':['TS','H1','H2','H3','H4','H5']} coordExport.exportToTXT(hurdatExportFile, '..\\output\\Coord_Export_TS-H5.txt', filterTerms)