def executeSQL(DB_CONF_FILE, sql_script, cod_rule, return_value=False):

    conn = getDatabaseConnection(DB_CONF_FILE)
    cur = conn.cursor()

    try:
        cur.execute(sql_script)
    except Exception as e:
        conn.rollback()
        scai.processEnd(DB_CONF_FILE, DSC_PROCESS, COD_INTEGRATION,
                        COD_COUNTRY, '', '', 3)  # SCAI
        scai.integrationEnd(DB_CONF_FILE, COD_INTEGRATION, COD_COUNTRY, 3,
                            cod_rule)  # SCAI
        print(e)
        print(e.pgerror)
        sys.exit(
            "The process aborted with error when attempting to execute task calc."
        )
    else:
        conn.commit()

    if return_value:
        result = cur.fetchone()
        cur.close()
        conn.close()
        return result[0]
    else:
        cur.close()
        conn.close()
示例#2
0
def copyFromDatabaseToS3(source_conf, target_conf, resources, schema, last_updates_dict, aux_path, scai_last_execution_status=1):
	print('Connecting to Chandra...')
	conn = getDatabaseConnection(source_conf)
	cur = conn.cursor()
	credentials = getS3Keys(source_conf)
	sc_conf = json.load(open(source_conf))
	
	#UNLOAD resources data	
	print('Unloading from Chandra...')
	for resource in resources:
		print('\t' + resource + ": " + last_updates_dict[resource])
		tg_table = 'stg_' + COUNTRY + '_' + resource[4:]	# Target table name has the country in the middle of the source table name (for example, stg_d_base_contacts -> stg_pt_d_base_contacts)
		scai_process_name = scai.getProcessShortDescription(target_conf, tg_table)				# SCAI
		
		if(scai_last_execution_status==3):
			scai_process_status = scai.processCheck(target_conf, scai_process_name, COD_INTEGRATION, COD_COUNTRY,scai_last_execution_status)	# SCAI
				
		# Is normal execution or re-execution starting from the step that was in error	
		if (scai_last_execution_status == 1 or (scai_last_execution_status == 3 and scai_process_status == 3)):
			scai.processStart(target_conf, scai_process_name, COD_INTEGRATION, COD_COUNTRY)			# SCAI
			try:
				cur.execute(
					"UNLOAD ('SELECT * from %(schema)s.%(resource)s "\
					"		WHERE meta_event_time >= \\\'%(last_update_date)s\\\' "\
					"		AND base_account_country = \\\'%(BASE_ACCOUNT_COUNTRY)s\\\'') "\
					"TO 's3://%(aux_path)s/%(schema)s_%(resource)s/data_' "\
					"CREDENTIALS '%(credentials)s' "\
					"ESCAPE "\
					"manifest;"
				% {
				'schema':schema,
				'resource':resource,
				'last_update_date':last_updates_dict[resource],
				'credentials':credentials,
				'aux_path':aux_path,
				'BASE_ACCOUNT_COUNTRY':BASE_ACCOUNT_COUNTRY
				}		
				)
			except Exception as e:
				conn.rollback()
				scai.processEnd(target_conf, scai_process_name, COD_INTEGRATION, COD_COUNTRY, tg_table, 'meta_event_time',3)	# SCAI
				scai.integrationEnd(target_conf, COD_INTEGRATION, COD_COUNTRY, 3)		# SCAI
				print (e)
				print (e.pgerror)
				sys.exit("The process aborted with error.")
			else:
				conn.commit()
				
				#Enable execution of following processes
				scai_last_execution_status = 1


	#Close connection
	cur.close()
	conn.close()
示例#3
0
def copyFromS3ToDatabase(target_conf, resources, sc_schema, tg_schema, aux_path, scai_last_execution_status=1):		
	#LOAD to target redshift
	print('Connecting to Yamato...')
	conn_target = getDatabaseConnection(target_conf)
	cur_target = conn_target.cursor()
	credentials = getS3Keys(target_conf)

	print('Loading to Yamato...')
	for resource in resources:
		tg_table = 'stg_' + COUNTRY + '_' + resource[4:]	# Target table name has the country in the middle of the source table name (for example, stg_d_base_contacts -> stg_pt_d_base_contacts)
		print('Loading %(tg_schema)s.%(tg_table)s...' % {'tg_schema':tg_schema, 'tg_table':tg_table })
		scai_process_name = scai.getProcessShortDescription(target_conf, tg_table)				# SCAI
		
		if(scai_last_execution_status==3):
			scai_process_status = scai.processCheck(target_conf, scai_process_name, COD_INTEGRATION, COD_COUNTRY,scai_last_execution_status)	# SCAI
				
		# Is normal execution or re-execution starting from the step that was in error	
		if (scai_last_execution_status == 1 or (scai_last_execution_status == 3 and scai_process_status == 3)):
		
			try:
				cur_target.execute(
					"TRUNCATE TABLE %(tg_schema)s.%(tg_table)s; "\
					"COPY %(tg_schema)s.%(tg_table)s "\
					"FROM 's3://%(aux_path)s/%(sc_schema)s_%(resource)s/data_manifest' "\
					"CREDENTIALS '%(credentials)s' "\
					"REGION 'us-west-2' "\
					"ESCAPE "\
					"manifest; "\
					"ANALYZE %(tg_schema)s.%(tg_table)s;"
				% {
				'tg_schema':tg_schema,
				'tg_table':tg_table,
				'resource':resource,
				'credentials':credentials,
				'aux_path':aux_path,
				'sc_schema':sc_schema
				}	
				)
			except Exception as e:
				conn_target.rollback()
				scai.processEnd(target_conf, scai_process_name, COD_INTEGRATION, COD_COUNTRY, tg_table, 'meta_event_time',3)	# SCAI
				scai.integrationEnd(target_conf, COD_INTEGRATION, COD_COUNTRY, 3)		# SCAI
				print (e)
				print (e.pgerror)
				sys.exit("The process aborted with error.")
			else:
				conn_target.commit()
				scai.processEnd(target_conf, scai_process_name, COD_INTEGRATION, COD_COUNTRY, tg_table, 'meta_event_time',1)	# SCAI
				
				#Enable execution of following processes
				scai_last_execution_status = 1

	cur_target.close()
	conn_target.close()
示例#4
0
def updateFacAutoTask(cod_auto_task, cod_status, message, opr_task=0):
    try:
        cur.execute(
        " update "\
        "	crm_integration_anlt.t_fac_auto_task "\
        "	set cod_status = " + cod_status + ", "\
        "   dat_task_sync = cast(to_char(sysdate,'YYYYMMDD') as integer), "\
        "   opr_task = case when '" + str(opr_task) + "' = 0 then opr_task else '" + str(opr_task) + "' end "\
        "	where cod_auto_task = " + str(cod_auto_task) + " ")
    except Exception as e:
        scai.processEnd(DB_CONF_FILE, DSC_PROCESS, COD_INTEGRATION,
                        COD_COUNTRY, '', '', 3)  # SCAI
        scai.integrationEnd(DB_CONF_FILE, COD_INTEGRATION, COD_COUNTRY,
                            3)  # SCAI
        print(e)
        print(e.pgerror)
        conn.rollback()
        sys.exit(message)

    conn.commit()
示例#5
0
cur = conn.cursor()

#Begin scai execution
scai.integrationStart(DB_CONF_FILE, COD_INTEGRATION, COD_COUNTRY)  # SCAI

scai.processStart(DB_CONF_FILE, DSC_PROCESS, COD_INTEGRATION,
                  COD_COUNTRY)  # SCAI

try:
    cur.execute(
     " select token "\
     " from crm_integration_anlt.t_lkp_token "\
     " where 1 = 1 "\
     " and application = '" + BASE_APPLICATON + "' ")
except Exception as e:
    scai.processEnd(DB_CONF_FILE, DSC_PROCESS, COD_INTEGRATION, COD_COUNTRY,
                    '', '', 3)  # SCAI
    scai.integrationEnd(DB_CONF_FILE, COD_INTEGRATION, COD_COUNTRY, 3)  # SCAI
    print(e)
    print(e.pgerror)
    sys.exit("The process aborted with error when attempting to get token.")

BASE_TOKEN = cur.fetchone()[0]
#print (BASE_TOKEN)

try:
    cur.execute(
     " select "\
     "	task.cod_auto_task, "\
     "	task.content, "\
     "	task.resource_type, "\
     "	task.due_date, "\
示例#6
0
print('End Query: ' + time.strftime("%H:%M:%S"))

print('Starting Updating: ' + time.strftime("%H:%M:%S"))

# Threading implementation
number_active_threads = 0
number_deals = len(result_list)
deals_per_thread = - (-number_deals // MAX_ACTIVE_THREADS) # Ceiling of integer division
thread_list = []

i = 0
j = deals_per_thread
for n in range(0, MAX_ACTIVE_THREADS):
	t = threading.Thread(target=updateDealsInBase, args=(client, result_list[i:j]))
	thread_list.append(t)
	t.start()
	print('Spawned thread #' + str(n+1))
	i = i + deals_per_thread
	j = j + deals_per_thread
	if j > number_deals:
		j = number_deals

for t in thread_list:
	t.join()

print('End of Updating: ' + time.strftime("%H:%M:%S"))

scai.processEnd(conf_file, scai_process_name, COD_INTEGRATION, COD_COUNTRY, '', '',1)	# SCAI
scai.integrationEnd(conf_file, COD_INTEGRATION, COD_COUNTRY, 1)		# SCAI
cur.close()
conn.close()
示例#7
0
def createDealsInBase(client, result_list, conf_file):
    number_of_errors = 0

    for result in result_list:
        keep_trying = True
        while keep_trying:
            try:
                if (number_of_errors > MAX_ERRORS_SKIPPED):
                    scai.processEnd(conf_file, scai_process_name,
                                    COD_INTEGRATION, COD_COUNTRY, '', '',
                                    3)  # SCAI
                    scai.integrationEnd(conf_file, COD_INTEGRATION,
                                        COD_COUNTRY, 3)  # SCAI
                    sys.exit("The process aborted for exceeding " +
                             str(MAX_ERRORS_SKIPPED) + " errors.")
                deal = client.deals.create(name=result[0],
                                           contact_id=result[1],
                                           owner_id=result[2],
                                           source_id=result[3],
                                           stage_id=result[4],
                                           value=result[5],
                                           custom_fields={
                                               'L2 Categoria': result[6],
                                               'L1 Categoria': result[7],
                                               'Payment Date': result[8],
                                               'Transaction ID': result[9],
                                               'Deal created by': result[10]
                                           })
                break
            except basecrm.errors.ResourceError as err:
                scai.logError(conf_file, scai_process_name, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.ResourceError",
                              str(err))
                print("Error: basecrm.errors.ResourceError: " + str(err) +
                      "\nSkipping creation of deal for contact with ID " +
                      str(result[1]))
                number_of_errors = number_of_errors + 1
                keep_trying = False
            except basecrm.errors.ServerError as err:
                scai.logError(conf_file, scai_process_name, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.ServerError",
                              str(err))
                print("Error: basecrm.errors.ServerError. Trying again...")
                number_of_errors = number_of_errors + 1
            except basecrm.errors.RateLimitError as err:
                scai.logError(conf_file, scai_process_name, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.RateLimitError",
                              str(err))
                print(
                    "Error: basecrm.errors.RateLimitError. Trying again in 1 second..."
                )
                number_of_errors = number_of_errors + 1
                time.sleep(1)
            except requests.exceptions.ConnectionError as err:
                scai.logError(conf_file, scai_process_name, COD_INTEGRATION,
                              COD_COUNTRY,
                              "requests.exceptions.ConnectionError", str(err))
                print(
                    "Error: requests.exceptions.ConnectionError. Reconnecting and trying again..."
                )
                number_of_errors = number_of_errors + 1
            except Exception as err:
                scai.logError(conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "Exception with general handling",
                              str(err))
                print("Error\nDescription: " + str(err) +
                      "\nTrying again in 1 second...")
                number_of_errors = number_of_errors + 1
                time.sleep(1)
示例#8
0
def main(conf_file):
    print('Starting Process... ' + time.strftime("%H:%M:%S"))

    global BASE_API_TOKEN
    BASE_API_TOKEN = json.load(open(conf_file))['base_api_token_olxpt']

    client = getBaseConnection()

    # Create Redshift Connection
    conn = getDatabaseConnection(conf_file)
    cur = conn.cursor()

    scai_process_status = scai.processCheck(conf_file, scai_process_name,
                                            COD_INTEGRATION, COD_COUNTRY,
                                            1)  # SCAI

    #First time ever execution
    if (not scai_process_status):
        scai_process_status = 1

    #if (scai_process_status != 1):
    #	sys.exit("The integration is already running or there was an error with the last execution that has to be fixed manually.")

    scai.integrationStart(conf_file, COD_INTEGRATION, COD_COUNTRY)  # SCAI
    scai.processStart(conf_file, scai_process_name, COD_INTEGRATION,
                      COD_COUNTRY)  # SCAI

    print('Starting Data Query... ' + time.strftime("%H:%M:%S"))
    cur.execute(
       "select "\
        "fac.name, "\
        "cast(lkp_contact.opr_contact as integer), "\
        "lkp_base_user.opr_base_user, "\
        "4616871 as opr_base_source, "\
        "case "\
         "when "\
           "v_lkp_pi.cod_index_type = 1 /* VAS */ "\
           "and to_date(fac.paidads_valid_to,'yyyy-mm-dd') /* active package */ < to_date(sysdate,'yyyy-mm-dd') "\
           "and last_call < to_date(sysdate,'yyyy-mm-dd') -15 "\
          "then 7344246 /* organico */ "\
        "else 2950782 /* sales pipeline */ "\
        "end as opr_stage, "\
        "(fac.price * (-1))/1.23 as val_price, "\
        "lkp_category.name_pt sub_category, "\
        "lkp_category_parent.name_pt main_category, "\
        "to_char(to_date(fac.date,'yyyy/mm/dd'),'dd/mm/yyyy') as dat_payment, "\
           "fac.id_transaction, "\
           "'Automation' as deal_created_by "\
       "from "\
        "db_atlas.olxpt_paidads_user_payments fac, "\
        "db_atlas.olxpt_users lkp_atlas_user, "\
        "db_atlas.olxpt_ads lkp_ad, "\
        "db_atlas.olxpt_categories lkp_category, "\
        "( "\
         "select id opr_category, name_pt, parent_level1, parent_level2, parent_id from db_atlas.olxpt_categories lkp_category "\
         "where "\
         "parent_level2 is null "\
         "and parent_level1 is null "\
        ") lkp_category_parent, "\
        "crm_integration_anlt.t_lkp_contact lkp_contact, "\
        "crm_integration_anlt.t_lkp_base_user lkp_base_user, "\
        "crm_integration_anlt.t_lkp_paidad_index lkp_pi, "\
        "crm_integration_anlt.t_lkp_paidad_index_type lkp_pit, "\
        "crm_integration_anlt.v_lkp_paidad_index v_lkp_pi, "\
        "(select cod_contact, to_date(max(updated_at),'yyyy-mm-dd') last_call from crm_integration_anlt.t_fac_call where cod_source_system = 16 group by cod_contact) fac_call "\
       "where "\
        "fac.id_user = lkp_atlas_user.id "\
        "and lkp_contact.cod_source_system = 16 "\
        "and lower(lkp_atlas_user.email) = lower(lkp_contact.email) "\
        "and fac.id_index = lkp_pi.opr_paidad_index "\
        "and lkp_pi.cod_paidad_index_type = lkp_pit.cod_paidad_index_type "\
        "and lkp_pit.valid_to = 20991231 "\
        "and lkp_pit.cod_source_system = 8 "\
        "and lkp_pit.opr_paidad_index_type in ('ad_homepage','highlight','bundle','nnl','pushup','logo','topads','topupaccount','paid_subscription','paid_limits_single','paid_for_post') "\
        "and lkp_pi.cod_source_system = 8 "\
        "and fac.id_ad = lkp_ad.id "\
        "and lkp_ad.category_id = lkp_category.id "\
        "and lkp_contact.cod_base_user_owner = lkp_base_user.cod_base_user "\
        "and lkp_base_user.cod_source_system = 16 "\
        "and lkp_base_user.valid_to = 20991231 "\
        "and v_lkp_pi.cod_paidad_index = lkp_pi.cod_paidad_index "\
        "and isnull(lkp_category.parent_level1,-2) = lkp_category_parent.opr_category "\
        "and fac.price < 0 "\
        "and trunc(fac.date) > (select last_processing_datetime from crm_integration_anlt.aut_deals_insert_to_base_date where source_system = 'pthorizontal') "\
        "and lkp_contact.valid_to = 20991231 "\
        "and lkp_pi.valid_to = 20991231 "\
        "and lkp_contact.cod_contact = fac_call.cod_contact (+); ")
    result_list = cur.fetchall()
    print('Ending Data Query... ' + time.strftime("%H:%M:%S"))

    print('Starting Delete Dates Query... ' + time.strftime("%H:%M:%S"))
    cur.execute(
        "delete from crm_integration_anlt.aut_deals_insert_to_base_date where source_system = 'pthorizontal'; "
    )
    print('Ending Delete Dates Query... ' + time.strftime("%H:%M:%S"))

    print('Starting Dates Query... ' + time.strftime("%H:%M:%S"))
    cur.execute(
       "insert into crm_integration_anlt.aut_deals_insert_to_base_date "\
        "select "\
         "'pthorizontal' as source_system, "\
         "max(fac.date) "\
       "from "\
        "db_atlas.olxpt_paidads_user_payments fac, "\
        "db_atlas.olxpt_users lkp_atlas_user, "\
        "db_atlas.olxpt_ads lkp_ad, "\
        "db_atlas.olxpt_categories lkp_category, "\
        "( "\
         "select id opr_category, name_pt, parent_level1, parent_level2, parent_id from db_atlas.olxpt_categories lkp_category "\
         "where "\
         "parent_level2 is null "\
         "and parent_level1 is null "\
        ") lkp_category_parent, "\
        "crm_integration_anlt.t_lkp_contact lkp_contact, "\
        "crm_integration_anlt.t_lkp_base_user lkp_base_user, "\
        "crm_integration_anlt.t_lkp_paidad_index lkp_pi, "\
        "crm_integration_anlt.t_lkp_paidad_index_type lkp_pit, "\
        "crm_integration_anlt.v_lkp_paidad_index v_lkp_pi "\
       "where "\
        "fac.id_user = lkp_atlas_user.id "\
        "and lkp_contact.cod_source_system = 16 "\
        "and lower(lkp_atlas_user.email) = lower(lkp_contact.email) "\
        "and fac.id_index = lkp_pi.opr_paidad_index "\
        "and lkp_pi.cod_paidad_index_type = lkp_pit.cod_paidad_index_type "\
        "and lkp_pit.valid_to = 20991231 "\
        "and lkp_pit.cod_source_system = 8 "\
        "and lkp_pit.opr_paidad_index_type in ('ad_homepage','highlight','bundle','nnl','pushup','logo','topads','topupaccount','paid_subscription','paid_limits_single','paid_for_post') "\
        "and lkp_pi.cod_source_system = 8 "\
        "and fac.id_ad = lkp_ad.id "\
        "and lkp_ad.category_id = lkp_category.id "\
        "and lkp_contact.cod_base_user_owner = lkp_base_user.cod_base_user "\
        "and lkp_base_user.cod_source_system = 16 "\
        "and lkp_base_user.valid_to = 20991231 "\
        "and v_lkp_pi.cod_paidad_index = lkp_pi.cod_paidad_index "\
        "and isnull(lkp_category.parent_level1,-2) = lkp_category_parent.opr_category "\
        "and fac.price < 0 "\
        "and lkp_contact.valid_to = 20991231 "\
        "and lkp_pi.valid_to = 20991231; ")
    conn.commit()
    print('Ending Dates Query... ' + time.strftime("%H:%M:%S"))

    print('Starting Deals Creations in Base... ' + time.strftime("%H:%M:%S"))

    # Threading implementation
    number_active_threads = 0
    number_deals = len(result_list)
    deals_per_thread = -(-number_deals // MAX_ACTIVE_THREADS
                         )  # Ceiling of integer division
    thread_list = []

    i = 0
    j = deals_per_thread
    for n in range(0, MAX_ACTIVE_THREADS):
        t = threading.Thread(target=createDealsInBase,
                             args=(client, result_list[i:j], conf_file))
        thread_list.append(t)
        t.start()
        print('Spawned thread #' + str(n + 1))
        i = i + deals_per_thread
        j = j + deals_per_thread
        if j > number_deals:
            j = number_deals

    for t in thread_list:
        t.join()

    print('Ending Deals Creations in Base... ' + time.strftime("%H:%M:%S"))

    scai.processEnd(conf_file, scai_process_name, COD_INTEGRATION, COD_COUNTRY,
                    '', '', 1)  # SCAI
    scai.integrationEnd(conf_file, COD_INTEGRATION, COD_COUNTRY, 1)  # SCAI
    cur.close()
    conn.close()

    print('Ending Process... ' + time.strftime("%H:%M:%S"))
示例#9
0
def copyHydraTable(db_conf_file,
                   sc_schema,
                   tg_schema,
                   resource,
                   last_update_date,
                   horizontal_name,
                   scai_last_execution_status=1):
    print('Connecting to Yamato...')
    conn = getDatabaseConnection(db_conf_file)
    cur = conn.cursor()

    tg_table = 'stg_%(COUNTRY)s_%(sc_schema)s_%(resource)s' % {
        'resource': resource,
        'sc_schema': sc_schema,
        'COUNTRY': COUNTRY
    }
    scai_process_name = scai.getProcessShortDescription(
        db_conf_file, tg_table)  # SCAI
    if (scai_last_execution_status == 3):
        scai_process_status = scai.processCheck(
            db_conf_file, scai_process_name, COD_INTEGRATION, COD_COUNTRY,
            scai_last_execution_status)  # SCAI

    # Is normal execution or re-execution starting from the step that was in error
    if (scai_last_execution_status == 1
            or (scai_last_execution_status == 3 and scai_process_status == 3)):
        scai.processStart(db_conf_file, scai_process_name, COD_INTEGRATION,
                          COD_COUNTRY)  # SCAI
        print(
            'Loading %(tg_schema)s.%(tg_table)s from %(last_update)s...' % {
                'tg_schema': tg_schema,
                'tg_table': tg_table,
                'last_update': last_update_date
            })
        try:
            cur.execute(
             "TRUNCATE TABLE %(tg_schema)s.%(tg_table)s; "\
             "INSERT INTO %(tg_schema)s.%(tg_table)s "\
             "SELECT "\
             "	server_date_day, "\
             "	ad_id, "\
             "	action_type, "\
             "	%(horizontal_name)s source, "\
             "	count(*) occurrences, "\
             "	count(distinct session_long) distinct_occurrences "\
             "FROM hydra.web "\
             "WHERE upper(country_code) = '%(HYDRA_COUNTRY_CODE)s' "\
             "AND ad_id is not null "\
             "AND server_date_day >= '%(last_update_date)s' "\
             "GROUP BY server_date_day, ad_id, action_type; "\
             "ANALYZE %(tg_schema)s.%(tg_table)s;"
            % {
            'tg_table':tg_table,
            'tg_schema':tg_schema,
            'horizontal_name':horizontal_name,
            'HYDRA_COUNTRY_CODE':HYDRA_COUNTRY_CODE,
            'last_update_date':last_update_date
            }
            )
        except Exception as e:
            conn.rollback()
            scai.processEnd(db_conf_file, scai_process_name, COD_INTEGRATION,
                            COD_COUNTRY, tg_table, 'server_date_day',
                            3)  # SCAI
            scai.integrationEnd(db_conf_file, COD_INTEGRATION, COD_COUNTRY,
                                3)  # SCAI
            print(e)
            print(e.pgerror)
            sys.exit("The process aborted with error.")
        else:
            conn.commit()
            scai.processEnd(db_conf_file, scai_process_name, COD_INTEGRATION,
                            COD_COUNTRY, tg_table, 'server_date_day', 1)

            #Enable execution of following processes
            scai_last_execution_status = 1  # SCAI

    cur.close()
    cur.close()

    # If error was solved here, return new status to use in subsequent processes
    return scai_last_execution_status
示例#10
0
def copyHydraVerticalsTable(db_conf_file,
                            sc_schema,
                            tg_schema,
                            resource,
                            last_update_date,
                            hydra_verticals_names,
                            anlt_verticals_names,
                            scai_last_execution_status=1):
    print('Connecting to Yamato...')
    conn = getDatabaseConnection(db_conf_file)
    cur = conn.cursor()

    tg_table = 'stg_%(COUNTRY)s_%(sc_schema)s_%(resource)s' % {
        'resource': resource,
        'sc_schema': sc_schema,
        'COUNTRY': COUNTRY
    }
    scai_process_name = scai.getProcessShortDescription(
        db_conf_file, tg_table)  # SCAI

    if (scai_last_execution_status == 3):
        scai_process_status = scai.processCheck(
            db_conf_file, scai_process_name, COD_INTEGRATION, COD_COUNTRY,
            scai_last_execution_status)  # SCAI

    # Is normal execution or re-execution starting from the step that was in error
    if (scai_last_execution_status == 1
            or (scai_last_execution_status == 3 and scai_process_status == 3)):
        scai.processStart(db_conf_file, scai_process_name, COD_INTEGRATION,
                          COD_COUNTRY)  # SCAI
        print(
            'Loading %(tg_schema)s.%(tg_table)s from %(last_update)s...' % {
                'tg_schema': tg_schema,
                'tg_table': tg_table,
                'last_update': last_update_date
            })

        # Dynamically build CASE statement according to number of verticals
        case_statement = "CASE"
        for i in range(len(anlt_verticals_names)):
            case_statement += " WHEN lower(host) LIKE '%%" + hydra_verticals_names[
                i] + "%%' THEN " + anlt_verticals_names[i]
        case_statement += " ELSE 'other' END"

        try:
            cur.execute(
             "TRUNCATE TABLE %(tg_schema)s.%(tg_table)s; "\
             "INSERT INTO %(tg_schema)s.%(tg_table)s "\
             "SELECT "\
             "	server_date_day, "\
             "	ad_id, "\
             "	trackname, "\
             "	%(case_statement)s source, "\
             "	count(*) occurrences, "\
             "	count(distinct session_long) distinct_occurrences "\
             "FROM hydra_verticals.web "\
             "WHERE upper(country_code) = '%(HYDRA_COUNTRY_CODE)s' "\
             "AND ad_id is not null "\
             "AND server_date_day >= '%(last_update_date)s' "\
             "GROUP BY server_date_day, ad_id, trackname, "\
             "	%(case_statement)s; "\
             "ANALYZE %(tg_schema)s.%(tg_table)s;"
            % {
            'tg_table':tg_table,
            'tg_schema':tg_schema,
            'HYDRA_COUNTRY_CODE':HYDRA_COUNTRY_CODE,
            'last_update_date':last_update_date,
            'case_statement':case_statement
            }
            )
        except Exception as e:
            conn.rollback()
            scai.processEnd(db_conf_file, scai_process_name, COD_INTEGRATION,
                            COD_COUNTRY, tg_table, 'server_date_day',
                            3)  # SCAI
            scai.integrationEnd(db_conf_file, COD_INTEGRATION, COD_COUNTRY,
                                3)  # SCAI
            print(e)
            print(e.pgerror)
            sys.exit("The process aborted with error.")
        else:
            conn.commit()
            scai.processEnd(db_conf_file, scai_process_name, COD_INTEGRATION,
                            COD_COUNTRY, tg_table, 'server_date_day',
                            1)  # SCAI

            #Enable execution of following processes
            scai_last_execution_status = 1

    cur.close()
    cur.close()

    # If error was solved here, return new status to use in subsequent processes
    return scai_last_execution_status
def updateContactsInBase(client, contact_list, conf_file, return_queue):
    number_of_updates = 0
    number_of_errors = 0

    for contact in contact_list:
        keep_trying = True
        while keep_trying:
            try:
                if (number_of_errors > MAX_ERRORS_SKIPPED):
                    scai.processEnd(conf_file, DSC_PROCESS, COD_INTEGRATION,
                                    COD_COUNTRY, '', '', 3)  # SCAI
                    scai.integrationEnd(conf_file, COD_INTEGRATION,
                                        COD_COUNTRY, 3)  # SCAI
                    sys.exit("The process aborted for exceeding " +
                             str(MAX_ERRORS_SKIPPED) + " errors.")
                client.contacts.update(contact.id, contact)
                break
            except basecrm.errors.ResourceError as err:
                scai.logError(conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.ResourceError",
                              str(err))
                print("Error: basecrm.errors.ResourceError\nDescription: " +
                      str(err) + "\nSkipping update of contact with ID " +
                      str(contact.id))
                number_of_errors = number_of_errors + 1
                keep_trying = False
            except basecrm.errors.RequestError as err:
                scai.logError(conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.RequestError",
                              str(err))
                print("Error: basecrm.errors.RequestError\nDescription: " +
                      str(err) + "\nSkipping update of contact with ID " +
                      str(contact.id))
                number_of_errors = number_of_errors + 1
                keep_trying = False
            except basecrm.errors.ServerError as err:
                scai.logError(conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.ServerError",
                              str(err))
                print("Error: basecrm.errors.ServerError\nDescription: " +
                      str(err) + "\nTrying again...")
                number_of_errors = number_of_errors + 1
            except basecrm.errors.RateLimitError as err:
                scai.logError(conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.RateLimitError",
                              str(err))
                print("Error: basecrm.errors.RateLimitError\nDescription: " +
                      str(err) + "\nTrying again in 1 second...")
                number_of_errors = number_of_errors + 1
                time.sleep(1)
            except requests.exceptions.ConnectionError as err:
                scai.logError(conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY,
                              "requests.exceptions.ConnectionError", str(err))
                print(
                    "Error: requests.exceptions.ConnectionError\nDescription: "
                    + str(err) + "\nTrying again in 1 second...")
                number_of_errors = number_of_errors + 1
                time.sleep(1)
            except Exception as err:
                scai.logError(conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "Exception with general handling",
                              str(err.__class__.__name__) + ": " + str(err))
                print("Error\nDescription: " + str(err.__class__.__name__) +
                      ": " + str(err) + "\nTrying again in 1 second...")
                number_of_errors = number_of_errors + 1
                time.sleep(1)

        number_of_updates = number_of_updates + 1

    print('Thread done sending contacts to Base!')

    return_queue.put(number_of_updates)
def main(db_conf_file, conf_file):
    print(datetime.now().time())

    data = json.load(open(conf_file))
    cod_source_system = data['cod_source_system']
    base_api_token = data['base_api_token']

    global DSC_PROCESS
    DSC_PROCESS = data['dsc_process']
    global COD_COUNTRY
    COD_COUNTRY = int(data['cod_country'])

    # Create Redshift Connection
    print('Connecting to Database...')
    conn = getDatabaseConnection(db_conf_file)
    cur = conn.cursor()

    scai.processStart(db_conf_file, DSC_PROCESS, COD_INTEGRATION,
                      COD_COUNTRY)  # SCAI

    # Obtain the list of custom fields and contacts to update in Base; This is a list of tuples (opr_contact, dsc_custom_field, custom_field_value)
    print(
        'Querying for contacts with custom fields to update to Base with cod_source_system '
        + cod_source_system + '...')
    # TODO: Confirm dsc_process_short name
    try:
        cur.execute(
         "SELECT cast(contact.opr_contact as integer), "\
         "  custom_field.dsc_custom_field, "\
         "  fac.custom_field_value "\
         "FROM crm_integration_anlt.t_fac_base_integration_snap fac, "\
         "crm_integration_anlt.t_lkp_contact contact, "\
         "crm_integration_anlt.t_lkp_custom_field custom_field, "\
         "crm_integration_anlt.t_rel_scai_integration_process rel, "\
         "crm_integration_anlt.t_lkp_scai_process process "\
         "WHERE fac.cod_custom_field = custom_field.cod_custom_field "\
         "AND fac.cod_contact = contact.cod_contact "\
         "AND fac.dat_snap = rel.dat_processing "\
         "AND rel.cod_process = process.cod_process "\
         "AND process.dsc_process_short = '" + DSC_PROCESS + "' "\
         "AND fac.cod_source_system = " + cod_source_system + " "\
         "AND contact.valid_to = 20991231;")
    except Exception as e:
        scai.processEnd(db_conf_file, DSC_PROCESS, COD_INTEGRATION,
                        COD_COUNTRY, '', '', 3)  # SCAI
        scai.integrationEnd(db_conf_file, COD_INTEGRATION, COD_COUNTRY,
                            3)  # SCAI
        print(e)
        print(e.pgerror)
        sys.exit("The process aborted with error.")

    print('Extracting query results...')
    result_list = cur.fetchall()
    #print('Results:')
    #print(result_list)

    print('Closing Database connection...')

    cur.close()
    conn.close()

    print(datetime.now().time())

    # Create Base Connection
    print('Connecting to ' + DSC_PROCESS + '...')
    client = basecrm.Client(access_token=base_api_token)

    # Put all query results in a dictionary with key as opr_contact, and value as a list of the tuples the query returned (one for each custom field of that contact)
    result_dictionary = dict()
    for result in result_list:
        if result[CONTACT_ID_IDX] in result_dictionary:
            result_dictionary[result[CONTACT_ID_IDX]].append(result)
        else:
            result_dictionary[result[CONTACT_ID_IDX]] = [result]

    # Get contacts and iterate through them
    contact_dictionary = dict()
    page_nbr = 1
    contacts_data = '1'
    number_of_updates = 0
    number_of_errors = 0

    while len(contacts_data) > 0:
        while True:
            try:
                if (number_of_errors > MAX_ERRORS_SKIPPED):
                    scai.processEnd(db_conf_file, DSC_PROCESS, COD_INTEGRATION,
                                    COD_COUNTRY, '', '', 3)  # SCAI
                    scai.integrationEnd(db_conf_file, COD_INTEGRATION,
                                        COD_COUNTRY, 3)  # SCAI
                    sys.exit("The process aborted for exceeding " +
                             str(MAX_ERRORS_SKIPPED) + " errors.")
                print('Page #' + str(page_nbr))
                contacts_data = client.contacts.list(page=page_nbr,
                                                     per_page=100)
                break
            except basecrm.errors.ServerError as err:
                scai.logError(db_conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.ServerError",
                              str(err))
                print("Error: basecrm.errors.ServerError\nDescription: " +
                      str(err) + "\nTrying again...")
                number_of_errors = number_of_errors + 1
            except basecrm.errors.RateLimitError as err:
                scai.logError(db_conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "basecrm.errors.RateLimitError",
                              str(err))
                print("Error: basecrm.errors.RateLimitError\nDescription: " +
                      str(err) + "\nTrying again in 1 second...")
                number_of_errors = number_of_errors + 1
                time.sleep(1)
            except requests.exceptions.ConnectionError as err:
                scai.logError(db_conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY,
                              "requests.exceptions.ConnectionError", str(err))
                print(
                    "Error: requests.exceptions.ConnectionError\nDescription: "
                    + str(err) + "\nTrying again in 1 second...")
                number_of_errors = number_of_errors + 1
                time.sleep(1)
            except Exception as err:
                scai.logError(db_conf_file, DSC_PROCESS, COD_INTEGRATION,
                              COD_COUNTRY, "Exception with general handling",
                              str(err))
                print("Error\nDescription: " + str(err) +
                      "\nTrying again in 1 second...")
                number_of_errors = number_of_errors + 1
                time.sleep(1)

        # Code could be further improved if all contacts are acquired from Base first, put in a dictionary, and then do updates later (will use much more memory, however)
        # Alternatively, a dictionary with 100 contacts could be created for every iteration here, instead of putting all contacts in a single dictionary; there could still be some time gains compared to current implementation
        for contact in contacts_data:
            if contact.id in result_dictionary:
                for result in result_dictionary[contact.id]:
                    contact.custom_fields[
                        result[CUSTOM_FIELD_NAME_IDX]] = result[
                            CUSTOM_FIELD_VALUE_IDX]
                    number_of_updates = number_of_updates + 1
                contact_dictionary[contact.id] = contact

        page_nbr = page_nbr + 1

    print(datetime.now().time())
    print('Number of updates done in code: ' + str(number_of_updates))

    # Update contacts in Base
    print('Updating #' + str(len(contact_dictionary)) + ' contacts in Base')

    #input('Ready to send contacts to Base. Proceed?')

    # Threading implementation
    number_active_threads = 0
    contact_list = list(contact_dictionary.values())
    number_contacts = len(contact_list)
    contacts_per_thread = -(-number_contacts // MAX_ACTIVE_THREADS
                            )  # Ceiling of integer division
    thread_list = []
    thread_return_values_queue = queue.Queue()

    i = 0
    j = contacts_per_thread
    for n in range(0, MAX_ACTIVE_THREADS):
        t = threading.Thread(target=updateContactsInBase,
                             args=(client, contact_list[i:j], db_conf_file,
                                   thread_return_values_queue))
        thread_list.append(t)
        t.start()
        print('Spawned thread #' + str(n + 1))
        i = i + contacts_per_thread
        j = j + contacts_per_thread
        if j > number_contacts:
            j = number_contacts

    for t in thread_list:
        t.join()

    number_of_updates = 0
    while not thread_return_values_queue.empty():
        number_of_updates = number_of_updates + thread_return_values_queue.get(
        )

    print('Number of updates done in ' + DSC_PROCESS + ': ' +
          str(number_of_updates))
    scai.processEnd(db_conf_file, DSC_PROCESS, COD_INTEGRATION, COD_COUNTRY,
                    '', '', 1)  # SCAI
    print(datetime.now().time())
    print('Done\n')
def copyBaseTables(db_conf_file,
                   sc_schema,
                   tg_schema,
                   resources,
                   last_updates_dict,
                   verticals_names='',
                   scai_last_execution_status=1):
    print('Connecting to Yamato...')
    conn_target = getDatabaseConnection(db_conf_file)
    cur_target = conn_target.cursor()

    for resource in resources:
        tg_table = 'stg_' + COUNTRY + '_' + resource[
            4:]  # Target table name has the country in the middle of the source table name (for example, stg_d_base_contacts -> stg_pt_d_base_contacts)
        scai_process_name = scai.getProcessShortDescription(
            db_conf_file, tg_table)  # SCAI
        if (scai_last_execution_status == 3):
            scai_process_status = scai.processCheck(
                db_conf_file, scai_process_name, COD_INTEGRATION, COD_COUNTRY,
                scai_last_execution_status)  # SCAI

        # Is normal execution or re-execution starting from the step that was in error
        if (scai_last_execution_status == 1 or
            (scai_last_execution_status == 3 and scai_process_status == 3)):
            scai.processStart(db_conf_file, scai_process_name, COD_INTEGRATION,
                              COD_COUNTRY)  # SCAI
            print(
                'Loading %(tg_schema)s.%(tg_table)s from %(last_update)s...' %
                {
                    'tg_schema': tg_schema,
                    'tg_table': tg_table,
                    'last_update': last_updates_dict[resource]
                })
            try:
                cur_target.execute(
                 "TRUNCATE TABLE %(tg_schema)s.%(tg_table)s; "\
                 "INSERT INTO %(tg_schema)s.%(tg_table)s "\
                 "SELECT * FROM %(sc_schema)s.%(resource)s "\
                 "WHERE meta_event_time >= '%(last_update_date)s' "\
                 "AND base_account_country = '%(BASE_ACCOUNT_COUNTRY)s'; "\
                 "ANALYZE %(tg_schema)s.%(tg_table)s;"
                % {
                'tg_table':tg_table,
                'tg_schema':tg_schema,
                'sc_schema':sc_schema,
                'resource':resource,
                'last_update_date':last_updates_dict[resource],
                'BASE_ACCOUNT_COUNTRY':BASE_ACCOUNT_COUNTRY
                }
                )
            except Exception as e:
                conn_target.rollback()
                scai.processEnd(db_conf_file, scai_process_name,
                                COD_INTEGRATION, COD_COUNTRY, tg_table,
                                'meta_event_time', 3)  # SCAI
                scai.integrationEnd(db_conf_file, COD_INTEGRATION, COD_COUNTRY,
                                    3)  # SCAI
                print(e)
                print(e.pgerror)
                sys.exit("The process aborted with error.")
            else:
                conn_target.commit()
                scai.processEnd(db_conf_file, scai_process_name,
                                COD_INTEGRATION, COD_COUNTRY, tg_table,
                                'meta_event_time', 1)  # SCAI

                #Enable execution of following processes
                scai_last_execution_status = 1

    cur_target.close()
    conn_target.close()

    # If error was solved here, return new status to use in subsequent processes
    return scai_last_execution_status