def testTileIntegrity(self): # The sum of altitudes of all 1201*1201 points in two tiles # Melbourne (West): west = loadTile('Australia', "S37E144") self.assertEqual(west.sum(),223277664) ## Melbourne (East): east = loadTile('Australia', "S37E145") self.assertEqual(east.sum(), 271136969)
def testInsertTileIntoDatabase(self): # Load example tile fulltile = loadTile('Australia', 'S37E145') tile = [] for row in fulltile[0:11]: tile.append(row[0:11]) # Get lat and lon from filename [lat,lon] = getLatLonFromFileName("S37E145") # Make the tile smaller, so this will be faster: # 11x11 tile: because the top row and right column are dropped, # only the bottom-left 10x10 tile will be stored in the # database. # Insert tile into our Cassandra ColumnFamily self.db_cas.insertTile(tile, lat, lon) # Check if the tile is indeed in the ColumnFamily tile_back = self.db_cas.readTile(lat, lon) for i in range(len(tile) - 1): for j in range(len(tile) - 1): self.assert_(tile_back[i][j] == tile[i+1][j])
def testInsertTileWithNull(self): # Create table self.assert_(self.db_psycopg2.createTableAltitude()) # Some tiles contain the value -32768, which means NULL (not implemented yet) # Tile S27E123 has several -32768 values, for example tile[1086][462] fulltile = loadTile('Australia', 'S27E123') self.assertEqual(fulltile[1086][462], -32768) # Take part of the tile around that area tile = [] for row in fulltile[1080:1091]: tile.append(row[460:471]) # Get lat and lon from filename [lat,lon] = getLatLonFromFileName("S27E123") # Insert tile into database self.db_psycopg2.insertTile(tile, lat, lon) # Check if the tile is indeed in the database tile_back = self.db_psycopg2.readTile(lat, lon) for i in range(len(tile) - 1): for j in range(len(tile) - 1): self.assert_(tile_back[i][j] == tile[i+1][j])
def testInsertTileIntoDatabase(self): # Create table self.assert_(self.db_psycopg2.createTableAltitude()) # Load example tile fulltile = loadTile('Australia', 'S37E145') tile = [] for row in fulltile[0:11]: tile.append(row[0:11]) # Get lat and lon from filename [lat,lon] = getLatLonFromFileName("S37E145") # Make the tile smaller, so this will be faster: # 11x11 tile: because the top row and right column are dropped, # only the bottom-left 10x10 tile will be stored in the # database. # Insert tile into database # We use psycopg2 for the connection in this case. self.db_psycopg2.insertTile(tile, lat, lon) # Check if the tile is indeed in the database tile_back = self.db_psycopg2.readTile(lat, lon) for i in range(len(tile) - 1): for j in range(len(tile) - 1): self.assert_(tile_back[i][j] == tile[i+1][j])
def testTileOverlap(self): # Uses two adjecant tiles around Melbourne to test # * Eastern edge of West Melbourne tile should be the same as # western edge of East Melbourne tile. # Melbourne (West): west = loadTile('Australia', "S37E144") ## Melbourne (East): east = loadTile('Australia', "S37E145") for i in range(1201): self.assertEqual(west[i][1200] - east[i][0],0) # Also try for north - south boundary: north = loadTile('Australia', "S36E144") south = loadTile('Australia', "S37E144") for i in range(1201): self.assertEqual(north[1200][i] - south[0][i],0) # Some other area north - south boundary: south = loadTile('Australia', "S33E147") north = loadTile('Australia', "S32E147") for i in range(1201): self.assertEqual(north[1200][i] - south[0][i],0) # Some other area west - east boundary: west = loadTile('Australia', "S33E147") ## Melbourne (East): east = loadTile('Australia', "S33E148") for i in range(1201): self.assertEqual(west[i][1200] - east[i][0],0)
def testFirstNumber(self): data = loadTile('Australia', 'S11E119') self.assertEqual(data[0][0],0)
def testPointsPerTile(self): data = loadTile('Australia', 'S11E119') self.assertEqual(len(data) * len(data[0]),1201*1201)
def main(): db_cas = ColumnFamilyCass(database_cas.keyspace, database_cas.cf_name, database_cas.nodelist) # Does the user want to empty the column family? if 'empty' in sys.argv: print "Purging data." db_cas.purge() print "Done..." exit() try: continent = '_'.join(map(lambda s: s.capitalize(), re.split('[ _]', sys.argv[1]))) except: print "Please specify the continent. Africa, Australia, Eurasia, Islands, North_America or South_America." sys.exit(1) [north, south, west, east] = util.getBoundingBox(sys.argv, 3) files_hashes = util.getFilesHashes(continent) number_of_tiles = util.numberOfFiles(files_hashes, north, south, west, east) # Verify result? if 'verify' in sys.argv: verify(db_cas, number_of_tiles, files_hashes, continent, north, south, west, east) #@todo how does this works? # If a tile name is given as the second argument it will resume from there. p = re.compile('[NSEW]\d*') resume_from = "" try: if(p.find(sys.argv[2])): resume_from = sys.argv[2] except: None i = 0 for file in files_hashes: # Strip .hgt.zip extension: file = file[1][0:-8] # Get latitude and longitude from file name [lat,lon] = util.getLatLonFromFileName(file) if util.inBoundingBox(lat, lon, north, south, west, east): i = i + 1 # Are we resuming? if resume_from == file: resume_from = "" if resume_from == "": # First check if the tile is not already in the database: try: db_cas.fetchTopLeftAltitude(lat, lon) print("Skipping tile " + file + " (" + str(i) + " / " + str(number_of_tiles) + ") ...") except IndexError: print("Insert data for tile " + file + " (" + str(i) + " / " + str(number_of_tiles) + ") ...") # Load tile from file tile = loadTile(continent, file) db_cas.insertTile(tile, lat, lon) print("All tiles inserted. You will want to run a nodetool repair.") print("Import is done. Pleasy verify the result with python read_data_cas.py verify")
def main(): db_pg = DatabasePg(database_pg.db, database_pg.db_user, database_pg.db_pass) db_psycopg2 = DatabasePsycopg2(database_pg.db, database_pg.db_user, database_pg.db_pass) try: continent = sys.argv[1] except: print "Please specify the continent. Africa, Australia, Eurasia, Islands, North_America or South_America." # Does the user want to empty the database? if 'empty' in sys.argv: print "Deleting tables from databse..." db_pg.dropAllTables() print "Done..." exit() [north, south, west, east] = util.getBoundingBox(sys.argv, 3) files_hashes = util.getFilesHashes(continent) number_of_tiles = util.numberOfFiles(files_hashes, north, south, west, east) # Verify result? if 'verify' in sys.argv: verify(db_pg, number_of_tiles, files_hashes, continent, north, south, west, east) # If a tile name is given as the second argument it will resume from there. p = re.compile('[NSEW]\d*') resume_from = "" try: if(p.find(sys.argv[2])): resume_from = sys.argv[2] except: None db_pg.createTableAltitude() i = 0 for file in files_hashes: # Strip .hgt.zip extension: file = file[1][0:-8] # Get latitude and longitude from file name [lat,lon] = util.getLatLonFromFileName(file) if util.inBoundingBox(lat, lon, north, south, west, east): i = i + 1 # Are we resuming? if resume_from == file: resume_from = "" if resume_from == "": # Load tile from file tile = loadTile(continent, file) # First check if the tile is not already in the database: try: db_pg.fetchTopLeftAltitude(lat, lon) print("Skipping tile " + file + " (" + str(i) + " / " + str(number_of_tiles) + ") ...") except: print("Insert data for tile " + file + " (" + str(i) + " / " + str(number_of_tiles) + ") ...") db_psycopg2.insertTile(tile, lat, lon) print("All tiles inserted. Pleasy verify the result with python \ read_data.py verify")
for row in range(top_row, bottom_row + 1 ): for col in range(left_col, right_col + 1): f.write(str(\ begin + (row-1) * 1200 + col\ ) + ", " + str(tile[row][col] ) + "\n") f.close() if __name__ == '__main__': # We will only upload 1 tile to the Google App Engine. This will take quite # a bit of time. For the offline data store, we will only "upload" the city # of Heidelberg; the offline data store is very slow. # For this we need tile N49E008. name = "N49E008" tile = loadTile("Eurasia", name) [lat,lon] = util.getLatLonFromFileName(name) if not ("online" in sys.argv or "offline" in sys.argv): print "Online or offline?" exit() if sys.argv[1] == "offline": # If we are offline, we'll only look the center of Heidelberg. # 49.39 --- 49.42 # 8.67 --- 8.71 # That corresponds to: row_top = int((1.0 - 0.42) * 1200.) row_bottom = int((1.0 - 0.39) * 1200.) col_left = int(0.67 * 1200.) col_right = int(0.71 * 1200.)
for col in range(left_col, right_col + 1): f.write(str(\ begin + (row-1) * 1200 + col\ ) + ", " + str(tile[row][col] ) + "\n") f.close() if __name__ == '__main__': # We will only upload 1 tile to the Google App Engine. This will take quite # a bit of time. For the offline data store, we will only "upload" the city # of Heidelberg; the offline data store is very slow. # For this we need tile N49E008. name = "N49E008" tile = loadTile("Eurasia", name) [lat, lon] = util.getLatLonFromFileName(name) if not ("online" in sys.argv or "offline" in sys.argv): print "Online or offline?" exit() if sys.argv[1] == "offline": # If we are offline, we'll only look the center of Heidelberg. # 49.39 --- 49.42 # 8.67 --- 8.71 # That corresponds to: row_top = int((1.0 - 0.42) * 1200.) row_bottom = int((1.0 - 0.39) * 1200.) col_left = int(0.67 * 1200.) col_right = int(0.71 * 1200.)