def main(): print('starting etl') # establish connection for target database (sql-server) target_cnx = pyodbc.connect(**datawarehouse_db_config) # loop through credentials # mysql for config in mysql_db_config: try: print("loading db: " + config['database']) etl_process(mysql_queries, target_cnx, config, 'mysql') except Exception as error: print("etl for {} has error".format(config['database'])) print('error message: {}'.format(error)) continue # sql-server for config in sqlserver_db_config: try: print("loading db: " + config['database']) etl_process(sqlserver_queries, target_cnx, config, 'sqlserver') except Exception as error: print("etl for {} has error".format(config['database'])) print('error message: {}'.format(error)) continue # factom-server target_cnx.close()
def main(): print('starting etl') # establish connection with target database (mysql) target_cnx = pymysql.connect(**datawarehouse_db_config) # mysql for config in mysql_db_config: try: print("loading db: " + config['database']) etl_process(mysql_queries, target_cnx, config, 'mysql') except Exception as error: print("etl for {} has error".format(config['database'])) print('error message: {}'.format(error)) tcur = target_cnx.cursor() error_insert = ( '''INSERT INTO log_table (`job_name`, `failure_reason`) VALUES (%s, %s)''' ) error_params = ('roxy', str(error)) tcur.execute(error_insert, error_params) # -------QQ: # Based on my newbie understanding of Python, I think your # target DB and your log DB are the same. It will be good to keep # logs/telemetry on seperate DB. Let us discuss WHY? # ------- target_cnx.commit() continue target_cnx.close()
def main(): database = raw_input("Which database would you like to scrape?") if database == "mongo": xl_write(mongoDB(), database) elif database == "mysql" or database == "sql": # mysql for config in mysql_db_config: etl_process(mysql_queries, config, 'mysql') else: print("Sorry, that functionality hasn't been built yet!")
def main(source_details={}, target_details={}): print('************************************Starting ETL************************************') try: print("Loading data from {0} to {1}".format(source_details["name"], target_details["name"])) if target_details["name"] == 'csv': to_csv(source_details, target_details) else: etl_process(source_details, target_details) except Exception as error: print("Error: ", error) raise error
def main(): print('start etl') target_cnx = pyodbc.connect(**datawarehouse_db_config) try: etl_process(mysql_query, target_cnx, mysql_db_config, platform) except Exception as error: print(error) target_cnx.close()
def main(): print('iniciando etl-python') # estabelecimento de conexão com o banco de dados alvo (sql-server) target_cnx = pyodbc.connect(**datawarehouse_db_config) # loop através das credenciais # mysql for config in mysql_db_config: try: print("carregando bd: " + config['database']) etl_process(mysql_queries, target_cnx, config, 'mysql') except Exception as error: print("etl para {} apresenta erro".format(config['database'])) print('mensagem de erro: {}'.format(error)) continue # sql-server for config in sqlserver_db_config: try: print("carregando bd: " + config['database']) etl_process(sqlserver_queries, target_cnx, config, 'sqlserver') except Exception as error: print("etl para {} apresenta erro".format(config['database'])) print('mensagem de erro: {}'.format(error)) continue # firebird for config in fbd_db_config: try: print("carregando bd: " + config['database']) etl_process(fbd_queries, target_cnx, config, 'firebird') except Exception as error: print("etl para {} apresenta erro".format(config['database'])) print('mensagem de erro: {}'.format(error)) continue target_cnx.close()
def main(): print('Starting etl..') #establish connnection to target db target_conn = pgdb.connect(**datawarehouse_db_config) #loop through credentials for each source db # locations db for config in source1_db_config: try: print('Loading db: {}'.format(source1_name)) etl_process(source1_db_queries, target_conn, config, 'postgres') except Exception as error: print('etl for {} has error'.format(source1_name)) print('Error message: {}'.format(error)) continue # patrons db for config in source2_db_config: try: print('Loading db: {}'.format(source2_name)) etl_process(source2_db_queries, target_conn, config, 'postgres') except Exception as error: print('etl for {} has error'.format(source2_name)) print('Error message: {}'.format(error)) continue # librarymanager db for config in source3_db_config: try: print('Loading db: {}'.format(source3_name)) etl_process(source3_db_queries, target_conn, config, 'postgres') except Exception as error: print('etl for {} has error'.format(source3_name)) print('Error message: {}'.format(error)) continue target_conn.close()