public | user_password | table | users public | users | table | users """ from psycopg2.extras import LoggingConnection if __name__ == '__main__': import sys dsn = "host='{}' user='******' password='******' dbname={}".format( os.environ['OLD_USERS_DB_HOST'], os.environ['OLD_USERS_DB_USER'], os.environ['OLD_USERS_DB_PASSWORD'], os.environ['OLD_USERS_DB_NAME']) conn = psycopg2.connect(dsn) logfile = open('/tmp/db.log', 'a') conn = LoggingConnection(dsn) conn.initialize(logfile) cur = conn.cursor() try: dsn2 = "host='{}' user='******' password='******' dbname={}".format( os.environ['USERS_DB_HOST'], os.environ['USERS_DB_USER'], os.environ['USERS_DB_PASSWORD'], os.environ['USERS_DB_NAME']) conn2 = psycopg2.connect(dsn2) logfile2 = open('/tmp/db_new.log', 'a') conn2 = LoggingConnection(dsn2) conn2.initialize(logfile2) cur2 = conn2.cursor(cursor_factory=DictCursor) try: fecha = None ''' sinc usuarios '''
""" Analytics consumer - pull events from analytics events queue and fan out to worker lambdas. """ import logging import json import os from psycopg2.extras import execute_values, LoggingConnection DB_URL = os.environ['DB_URL'] TABLE_NAME = os.environ['TABLE_NAME'] logger = logging.getLogger() logger.setLevel(logging.INFO) # set to DEBUG to log SQL queries conn = LoggingConnection(DB_URL) conn.initialize(logger) EVENT_KEYS = [ 'event_id', 'event_timestamp', 'event_type', 'event_version', 'app_title', 'app_version', 'user_id', 'user_name', 'meta', 'token_payload' ] JSON_FIELDS = ['meta', 'token_payload'] INSERT_QUERY = f""" INSERT INTO {TABLE_NAME} ({', '.join(EVENT_KEYS)}) VALUES %s ON CONFLICT (event_id) DO NOTHING """ def main(event, context): logger.info('Received %d event items', len(event))