from datetime import datetime, timedelta import psycopg2 from smarttypes.utils.postgres_handle import PostgresHandle postgres_handle = PostgresHandle(smarttypes.connection_string) ################################################ ##get rid of old connections ##we have db dumps, so we do have an archive ##if ever needed ################################################ retention_days = 30 * 4 #about 4 months delete_before_this_date = datetime.now() - timedelta(days=retention_days) #delete users sql = """ delete from twitter_user where last_loaded_following_ids < %(delete_before_this_date)s;""" #print sql % {'delete_before_this_date':delete_before_this_date} postgres_handle.execute_query(sql, {'delete_before_this_date':delete_before_this_date}, return_results=False) postgres_handle.connection.commit() #drop tables sql = """drop table twitter_user_following_%(postfix)s;""" for year_week_st in time_utils.year_weeknum_strs(delete_before_this_date - timedelta(days=7), 20, forward=False): #print sql % {'postfix':year_week_st} postgres_handle.execute_query(sql % {'postfix':year_week_st}, return_results=False) postgres_handle.connection.commit()
import smarttypes from smarttypes.config import * from smarttypes.utils import time_utils from datetime import datetime import psycopg2 from smarttypes.utils.postgres_handle import PostgresHandle postgres_handle = PostgresHandle(smarttypes.connection_string) try: postgres_handle.execute_query("CREATE LANGUAGE plpgsql;", return_results=False) postgres_handle.connection.commit() except psycopg2.ProgrammingError: postgres_handle.connection.rollback() pass ts_modifieddate = """ CREATE OR REPLACE FUNCTION ts_modifieddate() RETURNS trigger AS $$ BEGIN NEW.modifieddate = now(); RETURN NEW; END; $$ LANGUAGE plpgsql; """ postgres_handle.execute_query(ts_modifieddate, return_results=False) postgres_handle.connection.commit()
queue.join() for i in range(num_threads): renderers[i].join() if __name__ == "__main__": postgres_handle = PostgresHandle(smarttypes.connection_string) #get reduction_id qry = """ select tr.id from twitter_reduction tr where tr.tiles_are_written_to_disk = False order by tr.id desc limit 1; """ reduction_id = postgres_handle.execute_query(qry, {})[0]['id'] tile_dir = '../static/tiles/%s/' % reduction_id if not os.path.isdir(tile_dir): os.mkdir(tile_dir) style_file = 'mapnik.xml' min_zoom = 0 max_zoom = 5 bbox = (-180, -85.0511, 180, 85.0511) render_tiles(bbox, style_file, tile_dir, min_zoom, max_zoom) reduction = TwitterReduction.get_by_id(reduction_id, postgres_handle) reduction.tiles_are_written_to_disk = True reduction.save() postgres_handle.connection.commit()
""" think about circular references """ import smarttypes, psycopg2 from smarttypes.model.ppygis import Geometry from smarttypes.utils.postgres_handle import PostgresHandle postgres_handle = PostgresHandle(smarttypes.connection_string) sql = """ select pg_type.oid from pg_type where typname = 'geometry'; """ geometry_oid = postgres_handle.execute_query(sql)[0]['oid'] GEOMETRY = psycopg2.extensions.new_type((geometry_oid, ), "GEOMETRY", Geometry.read_ewkb) psycopg2.extensions.register_type(GEOMETRY)
queue.put(None) # wait for pending rendering jobs to complete queue.join() for i in range(num_threads): renderers[i].join() if __name__ == "__main__": postgres_handle = PostgresHandle(smarttypes.connection_string) #get reduction_id qry = """ select tr.id from twitter_reduction tr where tr.tiles_are_written_to_disk = False order by tr.id desc limit 1; """ reduction_id = postgres_handle.execute_query(qry, {})[0]['id'] tile_dir = '../static/tiles/%s/' % reduction_id if not os.path.isdir(tile_dir): os.mkdir(tile_dir) style_file = 'mapnik.xml' min_zoom = 0 max_zoom = 5 bbox = (-180, -85.0511, 180, 85.0511) render_tiles(bbox, style_file, tile_dir, min_zoom, max_zoom) reduction = TwitterReduction.get_by_id(reduction_id, postgres_handle) reduction.tiles_are_written_to_disk = True reduction.save() postgres_handle.connection.commit()
################################################ ##get rid of old connections ##we have db dumps, so we do have an archive ##if ever needed ################################################ retention_days = 30 * 4 #about 4 months delete_before_this_date = datetime.now() - timedelta(days=retention_days) #delete users sql = """ delete from twitter_user where last_loaded_following_ids < %(delete_before_this_date)s;""" #print sql % {'delete_before_this_date':delete_before_this_date} postgres_handle.execute_query( sql, {'delete_before_this_date': delete_before_this_date}, return_results=False) postgres_handle.connection.commit() #drop tables sql = """drop table twitter_user_following_%(postfix)s;""" for year_week_st in time_utils.year_weeknum_strs(delete_before_this_date - timedelta(days=7), 20, forward=False): #print sql % {'postfix':year_week_st} postgres_handle.execute_query(sql % {'postfix': year_week_st}, return_results=False) postgres_handle.connection.commit()