def main(): full_start_time = datetime.now() # set command line arguments args = arguments.set_arguments() # get settings from arguments settings = arguments.get_settings(args) # connect to Postgres try: pg_conn = psycopg2.connect(settings['pg_connect_string']) except psycopg2.Error: logger.fatal( "Unable to connect to database\nACTION: Check your Postgres parameters and/or database security" ) return False pg_conn.autocommit = True pg_cur = pg_conn.cursor() # get in-memory stream of data from query data_stream = pg_export.run_query(pg_cur, settings['sql']) # export data stream to flat file if settings['format'] in ["csv", "tsv", "psv"]: file_stream = pg_export.export_to_delimited_file( data_stream, settings['delimiter']) elif settings['format'] == "xlsx": file_stream = pg_export.export_to_xlsx(data_stream, settings['temp_dir'].name) else: logger.fatal( "Invalid export file format - only csv, tsv, psv and xlsx are supported! - " "check your settings") return False # set file name in ZIP file file_name = "{0}.{1}".format(settings['filename'], settings['format']) # add result to in-memory ZIP file zip_stream = io.BytesIO() zip_file = zipfile.ZipFile(zip_stream, mode='w', compression=zipfile.ZIP_DEFLATED) zip_file.writestr(file_name, file_stream.getvalue()) # write ZIP file to disk zip_file_path = "{0}{1}{2}.zip".format(settings['filepath'], os.sep, settings['locpid']) f = open(zip_file_path, "wb") # use `wb` mode f.write(zip_stream.getvalue()) f.close() pg_cur.close() pg_conn.close() return True
def main(): full_start_time = datetime.now() # set command line arguments args = arguments.set_arguments() # get settings from arguments settings = arguments.get_settings(args) # connect to Postgres try: pg_conn = psycopg2.connect(settings['pg_connect_string']) except psycopg2.Error: logger.fatal("Unable to connect to database\nACTION: Check your Postgres parameters and/or database security") return False pg_conn.autocommit = True pg_cur = pg_conn.cursor() # get in-memory stream of data from query data_stream = pg_export.run_query(pg_cur, settings['sql']) # export data stream to flat file if settings['format'] in ["csv", "tsv", "psv"]: file_stream = pg_export.export_to_delimited_file(data_stream, settings['delimiter']) elif settings['format'] == "xlsx": file_stream = pg_export.export_to_xlsx(data_stream, settings['temp_dir'].name) else: logger.fatal("Invalid export file format - only csv, tsv, psv and xlsx are supported! - " "check your settings") return False # set file name in ZIP file file_name = "{0}.{1}".format(settings['filename'], settings['format']) # add result to in-memory ZIP file zip_stream = io.BytesIO() zip_file = zipfile.ZipFile(zip_stream, mode='w', compression=zipfile.ZIP_DEFLATED) zip_file.writestr(file_name, file_stream.getvalue()) # write ZIP file to disk zip_file_path = "{0}{1}{2}.zip".format(settings['filepath'], os.sep, settings['locpid']) f = open(zip_file_path, "wb") # use `wb` mode f.write(zip_stream.getvalue()) f.close() pg_cur.close() pg_conn.close() return True
def main(): full_start_time = datetime.now() # set command line arguments args = arguments.set_arguments() # get settings from arguments settings = arguments.get_settings(args) if settings is None: logger.fatal("Invalid Census Year\nACTION: Set value to 2011 or 2016") return False # connect to Postgres try: pg_conn = psycopg2.connect(settings['pg_connect_string']) except psycopg2.Error: logger.fatal( "Unable to connect to database\nACTION: Check your Postgres parameters and/or database security" ) return False pg_conn.autocommit = True pg_cur = pg_conn.cursor() # add postgis to database (in the public schema) - run this in a try to confirm db user has privileges try: pg_cur.execute( "SET search_path = public, pg_catalog; CREATE EXTENSION IF NOT EXISTS postgis" ) except psycopg2.Error: logger.fatal( "Unable to add PostGIS extension\nACTION: Check your Postgres user privileges or PostGIS install" ) return False # log PostGIS version utils.check_postgis_version(pg_cur, settings, logger) # # test if ST_ClusterKMeans exists (only in PostGIS 2.3+). # # It's used to create classes to display the data in the map # if not settings.get('st_clusterkmeans_supported'): # logger.warning("YOU NEED TO INSTALL POSTGIS 2.3 OR HIGHER FOR THE MAP SERVER TO WORK\n" # "it utilises the ST_ClusterKMeans() function in v2.3+") # START LOADING DATA # test runtime parameters - 2011 # --census-year=2011 # --data-schema=census_2011_data # --boundary-schema=census_2011_bdys # --web-schema=census_2011_web # --census-data-path=/Users/hugh/tmp/abs_census_2011_data # --census-bdys-path=/Users/hugh/tmp/abs_census_2011_bdys # test runtime parameters - 2016 # --census-data-path=/Users/hugh/tmp/abs_census_2016_data # --census-bdys-path=/Users/hugh/tmp/abs_census_2016_bdys # PART 1 - load census data from CSV files logger.info("") start_time = datetime.now() logger.info( "Part 1 of 2 : Start census data load : {0}".format(start_time)) create_metadata_tables(pg_cur, settings['metadata_file_prefix'], settings['metadata_file_type'], settings) populate_data_tables(settings['data_file_prefix'], settings['data_file_type'], settings['table_name_part'], settings['bdy_name_part'], settings) logger.info( "Part 1 of 2 : Census data loaded! : {0}".format(datetime.now() - start_time)) # PART 2 - load census boundaries from Shapefiles and optimise them for web visualisation logger.info("") start_time = datetime.now() logger.info( "Part 2 of 2 : Start census boundary load : {0}".format(start_time)) load_boundaries(pg_cur, settings) # add bdy type prefix to bdy id to enabled joins with stat data (Census 2016 data issue only) if settings["census_year"] == "2016": fix_boundary_ids(settings) else: logger.info( "\t- Step 2 of 3 : boundary id prefixes not required : {0}".format( datetime.now() - start_time)) create_display_boundaries(pg_cur, settings) logger.info( "Part 2 of 2 : Census boundaries loaded! : {0}".format(datetime.now() - start_time)) # close Postgres connection pg_cur.close() pg_conn.close() logger.info("") logger.info("Total time : : {0}".format(datetime.now() - full_start_time)) return True
from contextlib import contextmanager from flask import Flask from flask import render_template from flask import request from flask import Response from flask_compress import Compress from psycopg2.extensions import AsIs from psycopg2.pool import ThreadedConnectionPool app = Flask(__name__, static_url_path='') Compress(app) # set command line arguments args = arguments.set_arguments() # get settings from arguments settings = arguments.get_settings(args) # create database connection pool pool = ThreadedConnectionPool(10, 30, database=settings["pg_db"], user=settings["pg_user"], password=settings["pg_password"], host=settings["pg_host"], port=settings["pg_port"]) # get the boundary name that suits each (tiled map) zoom level and its minimum value to colour in