Beispiel #1
0
def main():
    # Picklist Error handling: {'statusCode': 'INVALID_OR_NULL_FOR_RESTRICTED_PICKLIST', 'message': 'Language: bad value for restricted picklist field: en_AU', 'fields': ['LanguageLocaleKey']}
    usernames = [
        # "[email protected]",
        # "[email protected]",
        # "[email protected]",
        # "*****@*****.**",
        # "*****@*****.**",
        "*****@*****.**",
        # "*****@*****.**",
    ]
    sessions = [Salesforce_API(item) for item in usernames if item is not None]
    # users = pd.read_excel('/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/Import Sandbox Test Users.xlsx').query("Filter == True")
    # users = pd.read_excel('/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/Australia Master Data/PROD Release Users.xlsx').query("Filter == True")
    users = pd.read_excel(
        '/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/UK Master Data/Rome UK Users.xlsx'
    ).query("Filter == True")
    if prompt("Look up EOS Ids for these users?", boolean=True):
        sql = SQL_Server_API(sql_creds)
        emails_str = "', '".join(
            users['Email'].str.strip().str.lower().tolist())
        eos_users = (pd.DataFrame(
            sql.query(
                f"SELECT Id, Name, Email FROM IEmsUser WHERE Email IS NULL OR LOWER(Email) IN ('{emails_str}')"
            )).assign(Email=lambda df: df['Email'].str.strip().str.lower()))
        eos_email_map = eos_users.dropna(
            subset=['Email']).set_index('Email').to_dict('index')
        eos_name_map = eos_users.fillna('').query(
            "Email == ''").drop_duplicates(
                subset=['Name'], keep=False).set_index('Name').to_dict('index')

        def get_eos_id(row):
            email, name = str(row['Email']).strip().lower(), str(
                row['Name']).strip()
            matching_record = eos_email_map.get(email,
                                                eos_name_map.get(name, None))
            if matching_record:
                return matching_record['Id']
            return None

        if len(eos_users) > 0:
            users['EOSId__c'] = users.apply(get_eos_id, axis=1)
    for session in sessions:
        session.create_users(
            users,
            defaults={
                # 'Country': 'United Kingdom'
                # , 'DEFAULTCURRENCYISOCODE': 'GBP'
                # , 'TIMEZONESIDKEY': 'GMT'
            })
Beispiel #2
0
def main_uk():
    uat = Salesforce_API("*****@*****.**")
    lne = Salesforce_API("*****@*****.**")
    sql = SQL_Server_API(eos_prod_creds)

    # lneaccounts = lne.select("SELECT {} FROM Account WHERE EOSId__c <> NULL".format(get_fields(lne, 'Account')))
    # lnecontacts = lne.select("SELECT {} FROM Contact WHERE EOSId__c <> NULL".format(get_fields(lne, 'Contact')))

    # uat.add_bypass_settings()
    # uat.upsert('Account', lneaccounts, 'EOSId__c')
    # uat.upsert('Contact', lnecontacts, 'EOSId__c')
    # uat.remove_bypass_settings()

    # uk_venue_contacts(sql, uat)
    uk_users(sql, uat)

    # uat.clone_records(
    #     "SELECT Id FROM Account WHERE EOSId__c <> NULL"
    #     ,source_session=lne
    #     ,target_session=uat
    # )
    # uat.clone_records(
    #     "SELECT Id FROM Contact WHERE EOSId__c <> NULL"
    #     ,source_session=lne
    #     ,target_session=uat
    # )
    return
Beispiel #3
0
def get_sessions(usernames, sf_sessions, sql_sessions):
    sf_sessions = sf_sessions or {}
    sql_sessions = sql_sessions or {}
    output = []
    for sf_username, sql_name in usernames:
        sf = sf_sessions.get(
            sf_username, Salesforce_API(sf_username)) if sf_username else None
        sql = sql_sessions.get(sql_name,
                               SQL_Server_API(sql_name)) if sql_name else None
        output.append((sf, sql))
    return output
def main():
    # sql = SQL_Server_API('EOS-pre-prod')
    sql = SQL_Server_API('EOS-prod')
    result, bycolumn = uk.diff_eos_master_data_from_backup(
        sql, 'Prod 2021-08-05', True)
    result = {
        f'{key} ({len(val)})': val
        for key, val in result.items() if len(val) > 0
    }
    bycolumn = {
        key: {f'{f} ({len(df)})': df
              for f, df in dfs.items() if len(df) > 0}
        for key, dfs in bycolumn.items()
    }
    pdh.to_excel(result, 'UK Master Data Changes.xlsx')
    for key, val in bycolumn.items():
        pdh.to_excel(val, f'UK Master Data Changes By Column - {key}.xlsx')
    return
    to_update = bycolumn['Venue']['RegionId (1340)'][[
        'Id', 'RegionId_BACKUP'
    ]].rename(columns={'RegionId_Backup': 'RegionId'})
    sql.update('Venue', 'Id', to_update.to_dict('records'))
Beispiel #5
0
def main():
    sf = Salesforce_API('*****@*****.**')
    sql = SQL_Server_API('EOS-prod')
    # sql = SQL_Server_API('EOS-pre-prod')

    offer_ids = sf.select('SELECT EOSId__c FROM Tour__c WHERE EOSId__c <> NULL', return_type='dataframe').EOSId__c.astype('int').tolist()
    # offer_ids = [54441, 57933, 57965, 58400, 58737, 58050, 57930, 57263, 53728, 55808]

    result, bycolumn = uk.diff_eos_offers_from_backup(sql, 'Prod 2021-08-16', offer_ids, True)
    result = {f'{key} ({len(val)})': val for key,val in result.items() if len(val) > 0}
    bycolumn = {key: {f'{f} ({len(df)})': df for f, df in dfs.items() if len(df) > 0} for key,dfs in bycolumn.items()}
    pdh.to_excel(result, 'UK Tour Data Changes.xlsx')
    for key, val in bycolumn.items():
        if len(val) > 0:
            pdh.to_excel(val, f'UK Tour Data Changes By Column - {key}.xlsx')
    return
def main():
    sf = Salesforce_API("*****@*****.**")
    sql = SQL_Server_API('EOS-stage')

    fix_identical_names_with_different_ids(sf, sql)

    # romevenues = sf.select("SELECT Id, VenueName__c, BillingCity, EOSId__c FROM Account WHERE RecordType.Name = 'Venue'", return_type='dataframe')
    # eosvenues = pd.DataFrame(sql.query("SELECT Id, Name FROM Venue"))

    # romevenues['Name'] = romevenues['VenueName__c'] + ' (' + romevenues['BillingCity'] + ')'
    # eosvenues['EOSId__c'] = 'Venue-' + eosvenues['Id'].astype(str)

    # allvenues = romevenues.merge(eosvenues, on='Name', how='outer', suffixes=['_R','_E'], indicator=True)
    # allvenues['IsDuplicate'] = allvenues.duplicated('Name')

    # if len(allvenues) > 0:
    #     print(allvenues[allvenues['IsDuplicate']==True])
    #     print('^ Duplicates')

    return
Beispiel #7
0
def main():
    sql = SQL_Server_API(eos_prod_creds)
    sf = Salesforce_API('*****@*****.**')
    file_data = dfconcerts_get_file_data(top=5)
    # offer_ids = [37008,39524,40799,38880,40115]
    offer_ids = pdh.int_to_str(file_data['All']['Tour__c.EOSId__c'].drop_duplicates().dropna()).tolist()
    eos_data = uk.query_tours(sql, offer_ids)
    dfconcerts_data_quality_issues(file_data, eos_data)
    eos_data_with_missing_ids_removed, eos_ids_missing_in_rome, removed_eos_ids_by_tour = uk.remove_eos_ids_missing_in_rome(sf, eos_data)
    all_data = dfconcerts_add_file_data_to_eos_data(eos_data_with_missing_ids_removed, file_data)


    all_data_computed = uk.add_computed_fields(sf, all_data, 'EOSId__c', 'SourceSystemId__c', 'SourceSystemId__c', 'EOSId__c', 'SourceSystemId__c', 'SourceSystemId__c')

    threading.new(pdh.to_excel, all_data, 'DF Data - Raw.xlsx')
    threading.new(pdh.to_excel, all_data_computed.data2, 'DF Data - With Computed Fields.xlsx')
    uk.upsert_eos_data_to_rome(sf, all_data_computed.data2, delete_tours_first=True)


    print(f'Missing EOS Ids in Rome: {eos_ids_missing_in_rome}')
    print(f'Missing Event Ids in File: {eos_ids_missing_in_file}')
    return
def store_backup(inst, label):
    uk.store_eos_backup(SQL_Server_API(inst), label)
from classes.salesforce_api import Salesforce_API
from classes.data_change_monitor import DataChangeMonitor
import classes.sf_livenation_model_compute as sfc
import pandas as pd
import functions.pandas_helpers as pdh
from classes.sql_server_api import SQL_Server_API
from functions.prompt import prompt

import functions.uk_eos as uk

lne = Salesforce_API('*****@*****.**')
uat = Salesforce_API('*****@*****.**')
sit = Salesforce_API('*****@*****.**')

sf = uat
sql = SQL_Server_API('EOS-pre-prod')
# sql = SQL_Server_API('EOS-prod')


def main():
    # store_backup('EOS-prod', 'Prod 2021-08-16')
    # set_romeid_on_eos_offers()
    # find_artist_dupes()
    # find_venue_dupes()
    # remove_old_tour_ids()
    return


def find_missing_eos_ids():

    # # FIX for accidental id deletion
Beispiel #10
0
def main():
    session = Salesforce_API(username)
    sql = SQL_Server_API("euw1wsqldbu01")

    xlsx_file = xlrd.open_workbook(historicalDataFile)
    sheets = xlsx_file.sheet_names()

    LE_rows = get_sheet_table(xlsx_file, "LedgerEntry__c", [
        "Tour__c.EOSId__c", "Event__c.EOSId__c",
        "LedgerEntry__c.Event__r.EOSId__c", "Tour__c.OracleProjectCode__c"
    ])
    LEB_rows = get_sheet_table(xlsx_file, "LedgerEntryBreakout__c",
                               ["Event__r.EOSId__c"])

    LE_rows = [
        r for r in LE_rows if r["LedgerEntry__c.Event__r.EOSId__c"] != ""
    ]
    LEB_rows = [r for r in LEB_rows if r["Event__r.EOSId__c"] != ""]

    tourIdMap = {
        r["Tour__c.EOSId__c"]: None
        for r in LE_rows if r["Tour__c.EOSId__c"] != ""
    }
    eventIdMap = {
        r["Event__c.EOSId__c"]: None
        for r in LE_rows if r["Event__c.EOSId__c"] != ""
    }
    glCodeSet = {r["LedgerEntry__c.GLAccount__r.GLCode__c"] for r in LE_rows}
    coPromoterNames = set()

    tours = []
    tourLegs = []
    tourArtistPayments = []
    tourCoPromoterDeals = []
    events = []
    # eventDateTimes = []
    deals = []
    ticketScales = []
    # deductions = []
    ledgerEntries = []
    ledgerEntryBreakouts = []

    # for row in LE_rows:
    #     tourId = row["Tour__c.EOSId__c"]
    #     if tourId not in tourIdMap:
    #         tours.append({key.replace("Tour__c.",""): value for (key, value) in row.items() if key.startswith("Tour__c.")})
    #         # tourLegs.append({key.replace("TourLeg__c.",""): value for (key, value) in row.items() if key.startswith("TourLeg__c.")})
    #         if tourId != "":
    #             tourIdMap.add(tourId)

    recordTypes = session.select_records(
        "SELECT Id, Name FROM RecordType WHERE SobjectType = 'LedgerEntry__c'")
    glCodes = session.select_records(
        "SELECT GLCode__c, Type__c FROM GLAccount__c WHERE GLCode__c IN ('{}')"
        .format("','".join(glCodeSet)))
    glCodeRecordTypeMap = {
        r["GLCode__c"]:
        [t["Id"] for t in recordTypes if t["Name"] == r["Type__c"]][0]
        for r in glCodes
    }

    customMetadata = session.select_records(
        """SELECT Label, GLCode__c, Category__c, PicklistName__c 
    FROM PicklistOption__mdt 
    WHERE PicklistName__c = 'LEB Expense Type' 
    AND AppScope__c LIKE '%UK%'""")

    tourLegs = session.select_records("""
    SELECT Id, Tour__r.EOSId__c, Tour__r.TourHeadliner__r.EOSId__c, SourceSystemId__c, LegName__c, Crossed__c 
    FROM TourLeg__c 
    WHERE Tour__r.EOSId__c IN ('{}')
    """.format(",".join(tourIdMap.keys())))

    deals = session.select_records("""
    SELECT Id, Event__r.EOSId__c, Type__c, GuaranteeAmount__c, CurrencyIsoCode, DealExchangeRate__c
    FROM Deal__c
    WHERE Event__r.TourLeg__r.Tour__r.EOSId__c IN ('{}')
    AND Type__c = 'Primary Headliner'
    """.format(",".join(tourIdMap.keys())))

    def getTourLegSourceSystemIdForEvent(tourLegs, row):
        crossedTourLegs = [
            r for r in tourLegs
            if r["Tour__r.EOSId__c"] == row["Tour__c.EOSId__c"]
            and r["Crossed__c"] == "true"
        ]
        uncrossedTourLegs = [
            r for r in tourLegs
            if r["Tour__r.EOSId__c"] == row["Tour__c.EOSId__c"]
            and r["Crossed__c"] == "false"
        ]
        if row["TourLeg__c.Crossed__c"] == 0:
            if len(uncrossedTourLegs) > 0:
                return uncrossedTourLegs[0]["SourceSystemId__c"]
            else:
                uncrossedTourLeg = {
                    "Tour__r.EOSId__c": row["Tour__c.EOSId__c"],
                    "SourceSystemId__c":
                    "Uncrossed-" + row["Tour__c.EOSId__c"],
                    "LegName__c": "Uncrossed",
                    "Crossed__c": "false"
                }
                tourLegs.append(uncrossedTourLeg)
                return uncrossedTourLeg["SourceSystemId__c"]
        else:
            crossedTourLegs[0]["LegName__c"] = "Crossed"
            return crossedTourLegs[0]["SourceSystemId__c"]

    for row in LE_rows:
        tourId = row["Tour__c.EOSId__c"]
        eventId = row["Event__c.EOSId__c"]

        row["LedgerEntry__c.RecordTypeId"] = glCodeRecordTypeMap[
            row["LedgerEntry__c.GLAccount__r.GLCode__c"]]

        if tourId != "" and tourIdMap[tourId] is None:
            tour_row = {
                key.replace("Tour__c.", ""): value
                for (key, value) in row.items() if key.startswith("Tour__c.")
            }
            tours.append(tour_row)
            if row["TourCoPromoter1Name"] != "":
                coPromoterNames.add(row["TourCoPromoter1Name"])
                tourCoPromoterDeals.append({
                    "Tour__r.EOSId__c":
                    row["Tour__c.EOSId__c"],
                    "CoPromoterName":
                    row["TourCoPromoter1Name"],
                    "EventPL__c":
                    row["TourCoPromoter1Percent"],
                    "AncillaryRevenue__c":
                    row["TourCoPromoter1Percent"],
                    "SourceSystemId__c":
                    row["Tour__c.EOSId__c"] + "-TourCoPromoter1"
                })
            if row["TourCoPromoter2Name"] != "":
                coPromoterNames.add(row["TourCoPromoter2Name"])
                tourCoPromoterDeals.append({
                    "Tour__r.EOSId__c":
                    row["Tour__c.EOSId__c"],
                    "CoPromoterName":
                    row["TourCoPromoter2Name"],
                    "EventPL__c":
                    row["TourCoPromoter2Percent"],
                    "AncillaryRevenue__c":
                    row["TourCoPromoter2Percent"],
                    "SourceSystemId__c":
                    row["Tour__c.EOSId__c"] + "-TourCoPromoter2"
                })
            tourArtistPayments.append({
                "Tour__r.EOSId__c":
                row["Tour__c.EOSId__c"],
                "Amount__c":
                row["Tour__c.ProjectedGuaranteeInArtistCurrency__c"],
                "Artist__r.EOSId__c": [
                    r["Tour__r.TourHeadliner__r.EOSId__c"] for r in tourLegs
                    if r["Tour__r.EOSId__c"] == row["Tour__c.EOSId__c"]
                ][0],
                "Type__c":
                "Offer",
                "Description__c":
                "Offer",
                "SourceSystemId__c":
                row["Tour__c.EOSId__c"] + "|Offer",
                "TourExchangeRate__c":
                row["Deal__c.DealExchangeRate__c"],
                "CurrencyIsoCode":
                row["Tour__c.ArtistCurrency__c"]
            })
            tourIdMap[tourId] = tour_row
        if eventId != "" and eventIdMap[eventId] is None:
            event_row = {
                key.replace("Event__c.", ""): value
                for (key, value) in row.items() if key.startswith("Event__c.")
            }
            event_row[
                "TourLeg__r.SourceSystemId__c"] = getTourLegSourceSystemIdForEvent(
                    tourLegs, row)
            artist_deal_row = [
                r for r in deals
                if r["Event__r.EOSId__c"] == row["Event__c.EOSId__c"]
            ][0]
            artist_deal_row["CurrencyIsoCode"] = row["Deal__c.CurrencyIsoCode"]
            artist_deal_row["GuaranteeAmount__c"] = row[
                "Deal__c.GuaranteeAmount__c"]
            artist_deal_row["DealExchangeRate__c"] = row[
                "Deal__c.DealExchangeRate__c"]
            events.append(event_row)
            eventIdMap[eventId] = event_row

        ledgerEntries.append({
            key.replace("LedgerEntry__c.", ""): value
            for (key, value) in row.items()
            if key.startswith("LedgerEntry__c.")
        })

    coPromoters = session.select_records(
        "SELECT Id, Name FROM Account WHERE RecordType.DeveloperName = 'CoPromoter' AND Name = '{}'"
        .format("','".join(coPromoterNames)))

    for row in tourCoPromoterDeals:
        matches = [
            r["Id"] for r in coPromoters if r["Name"] == row["CoPromoterName"]
        ]
        if len(matches) == 0:
            print("Warning: found no Co-Promoter named '{}'".format(
                row["CoPromoterName"]))
        else:
            row["CoPromoter__c"] = matches[0]
        row.pop("CoPromoterName")

    for row in LEB_rows:
        matchingStandardExpenses = [
            r for r in customMetadata if r["Label"] == row["Label__c"] and
            r["GLCode__c"] == row["GLCodePicklist__c"] and r["Category__c"] ==
            row["UKCategory__c"] and r["PicklistName__c"] == "LEB Expense Type"
        ]
        if len(matchingStandardExpenses) > 0:
            row["Type__c"] = matchingStandardExpenses[0]["Label"]

    for row in tourLegs:
        if "Tour__r.TourHeadliner__r.EOSId__c" in row:
            row.pop("Tour__r.TourHeadliner__r.EOSId__c")

    ledgerEntryBreakouts = LEB_rows

    # session.delete_records(session.select_records("""SELECT Id FROM LedgerEntry__c
    # WHERE Event__r.TourLeg__r.Tour__r.EOSId__c IN ({})""".format(",".join(["'" + item["EOSId__c"] + "'" for item in tours]))))
    # session.delete_records(session.select_records("""SELECT Id FROM LedgerEntryBreakout__c
    # WHERE Event__r.TourLeg__r.Tour__r.EOSId__c IN ({})""".format(",".join(["'" + item["EOSId__c"] + "'" for item in tours]))))

    session.default_batch_size = 50
    session.add_bypass_settings()

    threading.new(session.upsert_records, "Tour__c", tours, "EOSId__c")
    threading.new(session.upsert_records, "TourLeg__c", tourLegs,
                  "SourceSystemId__c")
    threading.wait()

    threading.new(session.upsert_records, "TourArtistPayment__c",
                  tourArtistPayments, "SourceSystemId__c")
    threading.new(session.upsert_records, "Deal__c", tourCoPromoterDeals,
                  "SourceSystemId__c")
    threading.new(session.upsert_records, "Event__c", events, "EOSId__c")
    threading.new(session.upsert_records, "Deal__c", deals, "Id")
    threading.new(session.upsert_records, "LedgerEntry__c", ledgerEntries,
                  "SourceSystemId__c")
    threading.wait()

    threading.new(session.upsert_records, "LedgerEntryBreakout__c",
                  ledgerEntryBreakouts, "SourceSystemId__c")
    threading.wait()

    session.remove_bypass_settings()

    return
def main():
    # sql = SQL_Server_API(eos_prod_creds)
    # sf = Salesforce_API('*****@*****.**')
    sql = SQL_Server_API(eos_stage_creds)
    sf = Salesforce_API('*****@*****.**')

    sf.bypass_prod_operation_approval()
    delete_tours_first = False
    skip_already_created_tours = False

    # all_21_sept = [37225,31270,31270,22916,22695,53206,52018,52018,54352,52872,52090,51322,51322,50940,42648,20125,22958,29920,23463,22398,19370,19370,22884,28579,26342,35517,38949,38949,38586,35627,33668,28753,31358,35591,39302,35581,22495,52850,44233,44265,44265,51030,51028,43995,41284,41284,43884,42729,53383,49144,33642,40596,38839,41018,41018,44235,44235,37310,48932,50809,49168,39253,39253,39874,39874,46689,42649,42649,40842,51665,42653,49209,50770,40975,51555,51555,51560,35693,31353,31220,31220,37306,30794,23619,23619,38877,38877,30697,31576,37381,38894,32096,32096,33484,33484,30044,34283,23121,19405,22356,22356,23668,24095,25145,28657,19175,23122,23152,20541,20367,23745,23744,22532,23024,23505,25144,20225,20225,22689,20448,20448,20374,20374,30189,54035,54544,40974,41007,31962,33940,33940,34009,39123,22324,22835,22835,22825,22825,22870,23123,23123,23155,32282,52020,37205,37205,43740,53714,48756,50791,50791,39719,52059,52428,51243,51243,51589,50762,50762,47739,38843,51102,45676,45676,49117,48857,48857,51261,48847,41321,40872,50865,43765,41338,38841,44251,41404,41404,45686,44488,44488,35803,40531,41303,40578,42612,40965,43873,43873,40866,38790,38790,40986,40986,40867,40867,40748,40031,40397,39421,37335,37335,40453,37140,37140,22832,22832,39128,37509,34220,39715,38839,37174,37006,39178,31041,34156,39736,37426,37426,35557,35721,35721,31854,37251,37251,32278,38549,38540,39186,38959,32052,37453,40102,38976,37029,37029,39020,31054,31558,37485,31844,35620,33628,33966,30910,30910,30697,31664,31664,30811,30811,31542,31542,31029,33684,31974,31309,31345,31001,30696,30583,28593,30315,23619,23619,24113,24113,28769,30172,30172,28622,23622,23622,23795,23795,23597,23849,23849,30209,25157,30684,23869,23869,23882,30731,23845,23845,23845,25178,25178,24021,24021,22914,25117,22945,23690,25163,29923,25179,25179,28701,28701,30036,28684,28571,24100,23657,23656,23791,26332,22829,30244,28582,24098,24098,23713,23319,23319,28699,23366,24108,23500,22045,22045,22452,22831,22831,22836,22836,20881,22029,23498,22446,20440,20440,22425,19446,19446,20882,20773,20773,22436,19396,20637,20373,20373,20484,20484,22596,20451,19343,19343,18763,18633,20830,22256,54015,51782,53686,51104,51352,40287,51534,48753,51404,51404,48899,49137,49137,40952,45535,49190,49190,51624,51624,40860,42670,44103,52470,45678,40258,43800,43800,41446,39966,39638,40229,40235,37514,39713,37435,38633,37128,40997,39935,39935,37027,37047,34385,38921,37342,39112,38922,35684,35684,30310,30501,30501,30246,30947,30947,31113,31055,22918,30268,28575,24101,24101,25126,23938,23938,23919,22535,22524,22524,22524,20237,20565,28635,28635,29885,18623,31737,39346,39346,39263,52027,52028,41009,44119,44373,49243,50779,51153,51153,37515,31116,22312,24037,24037,29836,29836,22261,52324,40252,40252,39707,39707,31374,45657,49321,49321,53613,54619,40132,41317,49210,35543,51594,53300,24022,35585,39184,20954,49573,48970,48970,39304,39304,23871,45685,43742,43742,20844,20844,20844,34376,23523,23523,22553,22553,23323,28576,22438,39113,30360,30360,34232,34232,50646,51002,39728,22993,22448,22448,26195,20713,22192,20478,52718,53284,54553,39945,40425,40518,48880,39566,40218,51159,50625,38981,43950,51469,52362,52362,41037,41104,44505,49143,51780,52383,50918,40457,40457,39629,39629,38983,38811,43818,51511,49244,49244,43903,40984,41008,41008,35839,35839,49520,49520,49441,49441,32090,31636,35889,38829,33860,31949,33996,28616,28616,31368,31935,30848,33680,35780,37309,37370,30507,30507,31075,34289,37167,33505,34028,34028,40579,34309,38694,38701,39648,31959,33484,33484,35659,34065,40127,32376,39745,23488,23873,23873,29829,26200,22449,24106,24106,19990,19990,22040,22550,22662,23522,23522,30175,30375,25124,28637,29956,29956,20304,20294,22454,20685,20685,24040,23653,26354,30177,24093,24093,19752,22264,38575,38575,40385,40385,40894,51911,51911,41194,44374,44374,44402,50807,50998,52686,53459,40699,40987,41001,49335,52160,52422,52348,31551,31544,31545,31546,31547,31548,37463,34399,33616,33616,39571,39125,23642,22271,38840,37431,22660,22660,24022,33642,28635,28635,38973,49322,49322,46730,23622,23622,52851,31711,20480,22406,38702,31947,33723,33723,32150,51045,51045,51359,51359,53542,38853,35592,22612,20210,52242,52242,28770,30537,30537,52721,53925,51266,43845,43845,53076,40290,41340,49533,40540,40958,44177,44177,33679,36979,35939,37130,37130,13218,13218,23297,23911,22147,23145,23145,51011,51011,39678,39678,28821,28821,31531,30487,51651,51651,30476,24067,24067,20194,52018,52018,30190,32059,32061,49450,51208,51208,30851,30851,34043,51504,41002,51384,51384,18201,20155,52210,52445,52637,52322,52322,52322,53888,40261,40261,44248,44526,51448,49534,40135,43763,43763,40549,40833,40833,39727,30854,30854,39173,36996,32398,32398,30835,30835,34423,34423,30950,30950,39391,33965,39514,33702,31945,34244,19855,19855,22816,20709,20350,20350,23788,20599,52832,50870,53129,53129,46730,53516,51653,51750,52357,52470,50792,50792,48980,50827,50827,39043,39043,45569,41467,51509,51405,51591,53245,51756,50637,50951,41206,48894,40239,40239,44382,38833,41493,41493,40538,32283,39515,39515,39730,38545,39615,39615,37432,38673,38673,39288,39489,35943,33674,34405,35948,34007,32395,32395,33757,35683,33955,31061,33781,30961,31110,31552,31312,31831,31831,30493,30293,30293,30276,30547,24008,24008,24009,23291,24015,23157,24011,19446,19446,23189,22830,21986,22834,22834,22038,22273,18889,19659,20566,20573,20573,19413,30530]
    # all_22 = [37225,31270,22916,22695,53206,52018,54352,52872,52090,51322,50940,42648,20125,22958,29920,23463,22398,19370,22884,28579,26342,35517,38949,38586,35627,33668,28753,31358,35591,39302,35581,22495,52850,44233,44265,51030,51028,43995,41284,43884,42729,53383,49144,33642,40596,38839,41018,44235,37310,48932,34107,50809,49168,39253,39874,46689,42649,40842,51665,41576,42653,49209,50770,40975,51555,51560,35693,31353,31220,37306,30794,23619,38877,30697,31576,40326,40323,40179,37381,38894,32096,33484,30044,34283,23121,19405,22356,23668,24095,25145,28657,19175,23122,23152,20541,20367,23745,23744,23480,22532,23024,23505,25144,20225,22689,20448,20374,30189,19750,54035,54544,40974,41007,31962,33940,34009,30882,39123,22324,23670,41563,22835,22825,22870,23123,23155,32282,38815,52020,39743,41085,37205,43740,53714,48756,50791,39719,52059,52428,51243,51589,50762,47739,38843,51102,45676,49117,48857,51261,48847,41321,40872,50865,43765,41338,38841,44251,41404,45686,44488,35803,40531,41303,40578,42612,40965,43873,40866,38790,40986,40867,40748,40031,40397,39421,37335,40453,37140,22832,39128,37509,34220,39715,38839,37174,37006,39178,31041,34156,39736,37426,35557,35721,31854,37251,32278,38549,38540,39186,38959,32052,37453,30106,40102,38976,37029,39943,39020,31054,31558,37485,31844,35620,33628,33966,30910,30697,31664,30811,31542,31029,33684,31974,31309,31345,31001,30696,30583,28593,30315,23619,24113,28769,30172,28683,28622,23622,23795,23597,23849,30209,25157,30684,23869,23882,23129,30731,23845,25178,24021,22118,22914,25117,22945,23690,25163,23337,29923,25179,19750,28701,30036,28684,28571,24100,23657,23656,23791,26332,22829,30244,28582,24112,24098,23713,23319,28699,23366,24108,23500,22045,22452,22831,22836,20881,22029,23498,22260,22446,20440,22425,19446,20882,20773,22436,20447,19560,19396,20637,20373,19392,20484,22596,20451,19343,18763,18633,20830,22256,54015,51782,53686,51104,51352,40287,51534,48753,51404,48899,49137,40952,45535,49190,51624,40860,42670,44103,52470,45678,40258,43800,41446,39966,39638,40229,40235,37514,39713,37435,38633,37128,40997,39935,37027,37047,34385,38921,37342,39112,38922,35684,30310,30501,30246,30947,31113,31055,22918,30268,28575,24101,25126,23938,23919,22535,22524,20237,20565,28635,29885,20777,18623,31737,39346,39263,30882,52027,52028,41009,44119,44373,49243,50779,51153,37515,31116,22312,24037,29836,22261,52324,40252,39707,31374,45657,49321,53613,54619,40132,41317,34107,49210,35543,51594,53300,24022,35585,39184,20954,49573,48970,39304,23871,45685,43742,20844,20764,34376,23523,22553,23323,28576,22438,39113,30360,34232,50646,51002,39728,22993,22448,26195,20713,22192,28564,20478,52718,53284,54553,39945,40425,39700,40518,48880,39566,40218,51159,50625,38981,43950,51469,52362,41037,41104,44505,49143,51780,34107,52383,50918,40457,39629,38983,38811,43818,51511,49244,43903,40984,41008,35839,49520,49441,32090,31636,35889,38829,33860,31949,33996,28616,31368,31935,30848,32065,33680,35780,37309,37370,30507,31075,34289,37167,33505,34028,40579,34309,38694,38701,39648,31959,33484,35659,34065,40127,32376,39745,23488,23873,29829,26200,22449,24106,19990,22040,20925,22550,22662,23522,30175,30375,25124,28637,29956,20304,20294,22454,20685,24040,23653,26354,30177,24093,19752,22264,38575,40385,40894,51911,41194,44374,44402,50807,50998,52686,53459,40699,40987,41001,49335,52160,52422,52348,31551,31544,31545,31546,31547,31548,37463,34399,33616,39571,39125,23642,23595,22271,38840,37431,22660,24022,33642,28635,38973,49322,39961,46730,23622,52851,31711,20480,22406,38702,51354,39424,31947,33723,32150,51045,51359,53542,38853,35592,22612,20210,52242,28770,30537,41248,52721,53925,51266,43845,53076,40290,41340,49533,40540,40958,44177,33679,36979,35939,37130,13218,23297,23911,22147,23145,51011,39678,28821,31531,30487,51651,30476,24067,20194,52018,30190,32059,32061,49450,51208,30851,34043,41468,51504,41002,51384,18201,20155,52210,52445,52637,52322,53888,40261,44248,44526,51448,49534,40135,43763,40549,40833,39727,30854,39173,36996,32398,30835,34423,30950,39391,33965,39514,33702,31945,34244,19855,22816,20709,20350,23788,20599,52832,50870,53129,46730,53516,51653,51131,51750,52357,52470,50792,48980,50827,39043,45569,48942,41467,51509,51405,51591,53245,51756,50637,50951,41206,48894,39216,40239,44382,38833,41493,40538,32283,39515,39730,40125,38545,39615,37432,38673,39288,39489,35943,33674,31566,34405,35948,34007,32395,33757,35683,33955,31061,33781,30961,31110,31552,31312,31831,30493,30293,30276,30547,22088,24008,24009,23291,24015,23157,24011,19446,23189,22830,19750,21986,22834,19555,22038,22273,18889,19659,20566,20573,19413,39193,30530,20474,30573,49546]
    # lewis_1102 = [22829,22831,22660,22662,22830,22695,22816,22495,22524,22596,22535,30177,30189,30268,30293,30310,30315,30360,30476,30487,30501,30530,30696,30583,30276,30684,30493,30209,30507,30244,30375,30547,30246,28657,28701,28753,28821,29829,29920,30172,30175,29923,29836,28684,29885,28699,22832,19659,23463,23505,23522,20155,19370,19396,19413,23323,23498,20194,23488,19446,19405,19752,20125,22449,22452,22454,23157,23189,23795,23882,23919,23938,24015,24021,23668,23690,24022,30794,30811,23713,23791,23291,23319,24067,30854,30848,22045,22264,22356,22398,22406,22446,24008,30731,23744,23745,23788,24037,24040,30910,22448,23845,23656,23657,23297,22261,22436,23849,24009,22273,23911,22438,24011,22192,22312,22324,24093,20350,20367,13218,18633,18763,18201,18623,20374,20882,22029,33781,33860,34007,40952,34043,41009,41018,34309,20713,20830,20881,33684,33940,41007,35581,34220,34232,20954,41194,34244,34283,33668,34009,41037,41284,34385,34399,21986,34289,34405,33674,34423,22038,40958,40965,40975,33955,34028,35543,28616,28622,33679,35557,33680,33757,33965,40974,40986,41206,40987,40997,33966,41001,33996,18889,20448,20451,20478,20484,20541,20480,20565,20440,32283,23873,32395,33616,33628,31962,31974,32096,32278,32376,23869,32398,41303,31959,32090,32052,39874,52362,52445,52637,52686,52718,52721,52832,52357,52850,52383,52422,39745,52851,39935,52428,38877,38894,51624,51665,51780,51782,51911,38540,38545,38586,38673,38701,38790,38811,38829,38833,38840,38843,38853,52242,52028,38694,51651,52027,52059,52090,52160,52322,52210,38633,38839,38549,38841,38921,38959,38575,38922,51589,51750,52348,51756,51594,51653,52324,39253,36996,39186,50791,50792,50865,50870,50951,50998,51002,51045,51102,51159,39288,37027,37029,37128,39302,37130,37140,39304,37205,51011,51153,51243,51266,39263,39346,51104,51208,37167,37174,39391,39489,51384,50625,50807,51352,50918,51322,50646,50809,50940,50779,40833,40842,40866,40867,40872,40860,40894,39966,40031,40132,40218,40235,40239,39945,40102,40135,40229,53284,53459,53542,53686,53888,54015,54035,54553,54619,40252,40261,40287,53300,53516,41321,41446,53925,54352,43742,52872,43763,43903,43884,43873,44233,54544,31636,44265,53129,53383,31711,44374,53714,45535,45685,48880,53613,38983,37431,48980,49143,48894,53206,49209,53245,39184,28575,28576,25126,25145,25163,25179,26195,26200,26342,28579,26354,28582,24113,25144,25178,26332,28593,25117,28571,23622,23642,23653,23024,22993,23121,46689,37306,37310,43765,43818,43845,37370,37426,44505,37251,43740,37335,44526,45686,37309,45657,37342,37381,45676,43995,44177,44235,44382,44488,30950,42729,48847,48932,48970,44103,31029,42649,43950,41317,41404,41493,30961,48857,44119,44248,42653,48753,30947,44251,42612,48756,44373,48899,41338,42670,41340,23597,22914,22918,22884,20210,20237,35693,35721,38981,35780,35803,35889,51504,35683,51405,51509,51511,51534,20294,35684,39020,49190,49244,49322,20304,35627,51448,51555,51560,35839,51469,51404,38976,39112,39113,39125,39128,39173,39123,35939,35943,35948,36979,31220,31309,24100,24106,31353,31358,40290,40385,40425,31552,31558,31664,24108,40453,40457,31374,31075,31110,31116,24101,31737,31368,24098,40397,40578,35591,35592,31854,24095,40258,40518,31531,40531,40538,40549,40596,35620,40748,31054,35585,31844,31542,31547,31551,31055,31544,31545,31548,40699,49117,49137,49168,49321,49450,49520,49534,49335,49533,49144,49441,20685,20709,39515,39566,39571,39615,39629,20573,20599,39678,39736,39707,39713,39715,39719,39727,39728,39730,39638,39648,37435,37485,37509,37515,37432,37453,37463,37514]
    all_1104 = [
        37225, 31270, 22916, 52018, 42648, 22958, 35517, 38949, 51030, 51028,
        33642, 34107, 41576, 50770, 23619, 30697, 31576, 40326, 40323, 40179,
        33484, 30044, 19175, 23122, 23152, 23480, 22532, 20225, 22689, 19750,
        30882, 23670, 41563, 22835, 22825, 22870, 23123, 23155, 32282, 38815,
        52020, 39743, 41085, 50762, 47739, 51261, 39421, 37006, 39178, 31041,
        34156, 30106, 39943, 30697, 31345, 31001, 23619, 28769, 28683, 25157,
        23129, 22118, 22945, 23337, 19750, 30036, 24112, 23366, 23500, 22836,
        22260, 22425, 20773, 20447, 19560, 20637, 20373, 19392, 19343, 22256,
        52470, 45678, 43800, 37047, 31113, 28635, 20777, 30882, 49243, 34107,
        49210, 49573, 23871, 20844, 20764, 34376, 23523, 22553, 28564, 39700,
        41104, 34107, 40984, 41008, 31949, 31935, 32065, 33505, 40579, 35659,
        34065, 40127, 19990, 22040, 20925, 22550, 25124, 28637, 29956, 44402,
        31546, 23595, 22271, 33642, 28635, 38973, 39961, 46730, 38702, 51354,
        39424, 31947, 33723, 32150, 51359, 22612, 28770, 30537, 41248, 53076,
        40540, 22147, 23145, 30190, 32059, 32061, 30851, 41468, 41002, 30835,
        39514, 33702, 31945, 19855, 46730, 51131, 50827, 39043, 45569, 48942,
        41467, 51591, 50637, 39216, 40125, 31566, 31061, 31312, 31831, 22088,
        19750, 22834, 19555, 20566, 39193, 20474, 30573, 49546
    ]

    offer_ids = all_1104
    # offer_ids = [37225]

    if sf.instance == 'lne' or skip_already_created_tours:
        current_tours = set([
            item.EOSId__c for item in sf.select(
                """SELECT EOSId__c FROM Tour__c WHERE EOSId__c <> NULL 
        AND IsHistoricalTour__c = False""")
        ])
        tours_to_not_import = [
            item for item in offer_ids if str(item) in current_tours
        ]
        if len(tours_to_not_import) > 0:
            print(
                f'Skipping the following tours because they are already in Production: {tours_to_not_import}'
            )
            offer_ids = [
                item for item in offer_ids if str(item) not in current_tours
            ]

    assert len(offer_ids) > 0

    eos_data = uk.query_tours(sql, offer_ids, is_onsale=False)
    if len(eos_data.Tour__c) == 0: raise Exception('No Offers to migrate')
    eos_data_with_remapped_eos_ids, remapped_eos_ids = uk.replace_duplicate_eos_ids(
        eos_data)
    eos_data_with_split_headliners, artist_ids_missing_in_rome_by_tour = uk.split_headliner_and_coheadliner(
        sf, eos_data_with_remapped_eos_ids)
    eos_data_with_missing_ids_removed, eos_ids_missing_in_rome, removed_eos_ids_by_tour = uk.remove_eos_ids_missing_in_rome(
        sf, eos_data_with_split_headliners)

    all_missing_eos_ids_by_tour = combine_missing_ids_dicts(
        removed_eos_ids_by_tour, artist_ids_missing_in_rome_by_tour)
    eos_ids_missing_in_rome.update(
        itertools.chain.from_iterable(
            artist_ids_missing_in_rome_by_tour.values()))
    assert len(
        eos_ids_missing_in_rome
    ) == 0 or sf.instance != 'lne', f'Some EOS Ids are missing: {eos_ids_missing_in_rome}\nThe following tours have missing data: {[int(s) for s in all_missing_eos_ids_by_tour]}'

    eos_data_dfs = ObjDict({
        obj: pd.DataFrame(data)
        for obj, data in eos_data_with_missing_ids_removed.items()
    })
    eos_data_with_file_data = uk.merge_eos_data_with_file_data(eos_data_dfs,
                                                               is_onsale=False)

    eos_data_computed = uk.add_computed_fields(sf, eos_data_with_file_data)

    validations(eos_data_computed, eos_ids_missing_in_rome,
                sf.credentials['sandbox'] == 'False')

    threading.new(pdh.to_excel, eos_data_computed.data2,
                  'Migrate EOS Historical Tours.xlsx')
    sf.bypass_prod_operation_approval()
    rome_results = uk.upsert_eos_data_to_rome(
        sf,
        eos_data_computed.data2,
        is_onsale=False,
        delete_tours_first=delete_tours_first)
    tour_results = itertools.chain.from_iterable(
        [job.results for job in rome_results if job.object_name == 'Tour__c'])
    event_results = itertools.chain.from_iterable(
        [job.results for job in rome_results if job.object_name == 'Event__c'])
    # Do NOT Update RomeIds for Offers in EOS, for historical Tours: uk.update_romeids_in_eos
    # Do NOT set Tour Personnel for Historical Tours

    if eos_ids_missing_in_rome:
        missing_eos_id_info = uk.query_by_eos_ids(
            sql, eos_ids_missing_in_rome,
            ['Name', 'FirstName', 'LastName', 'Email', 'EmailAddress'])
        pdh.to_excel(missing_eos_id_info, 'Missing EOS Data.xlsx')
        print(f'Missing EOS Ids in Rome: {eos_ids_missing_in_rome}')

    # tourlegs = sf.select("""
    # SELECT Id
    # FROM TourLeg__c
    # WHERE Tour__r.AppScope__c = 'UK'
    # AND CreatedBy.Name = 'DataMigration User'
    # AND Id NOT IN (SELECT TourLeg__c FROM Event__c)
    # """)
    # sf.delete(tourlegs)

    return
def main():
    session = Salesforce_API(username)
    sql = SQL_Server_API('EOS-prod')
    sql = SQL_Server_API('EOS-stage')
    

    # whereClause = """
    # WHERE o.OracleCode = '1573007431'
    # -- AND o.RomeId IS NULL
    # """

    # whereClause = """
    # WHERE o.Id IN(43748
    #     ,22660
    #     ,49573)
    # """

    artistsSQL = open(sqlFileArtists, 'r', encoding='utf-8-sig').read()
    coPromotersSQL = open(sqlFileCoPromoters, 'r', encoding='utf-8-sig').read()
    venuesSQL = open(sqlFileVenues, 'r', encoding='utf-8-sig').read()
    artistAgentsSQL = open(sqlFileArtistAgents, 'r', encoding='utf-8-sig').read()
    artistAgenciesSQL = open(sqlFileArtistAgencies, 'r', encoding='utf-8-sig').read()
    ticketAgenciesSQL = open(sqlFileTicketAgencies, 'r', encoding='utf-8-sig').read()

    # stageSQL = open(stageSQLFile, 'r', encoding='utf-8-sig').read().replace("QUERY_WHERE_CLAUSE_HERE", whereClause)
    # querySQL = open(querySQLFile, 'r', encoding='utf-8-sig').read()
    # tourOnSalesQuerySQL = open(tourOnSalesQuerySQLFile, 'r', encoding='utf-8-sig').read().replace("QUERY_WHERE_CLAUSE_HERE", whereClause)
    # eventOnSalesQuerySQL = open(eventOnSalesQuerySQLFile, 'r', encoding='utf-8-sig').read().replace("QUERY_WHERE_CLAUSE_HERE", whereClause)
    
    print("EOS queries in progress")
    eosArtists =        sql.query(artistsSQL)
    eosCoPromoters =    sql.query(coPromotersSQL)
    eosVenues =         sql.query(venuesSQL)
    eosArtistAgents =   sql.query(artistAgentsSQL)
    eosArtistAgencies = sql.query(artistAgenciesSQL)
    eosTicketAgencies = sql.query(ticketAgenciesSQL)
    print("EOS queries complete")

    eos_accounts = eosArtists + eosCoPromoters + eosVenues + eosArtistAgencies + eosTicketAgencies
    eos_contacts = eosArtistAgents

    eos_dfs = {
        'Artists': pd.DataFrame(eosArtists)
        ,'Co-Promoters': pd.DataFrame(eosCoPromoters)
        ,'Venues': pd.DataFrame(eosVenues)
        ,'ArtistAgents': pd.DataFrame(eosArtistAgents)
        ,'ArtistAgencies': pd.DataFrame(eosArtistAgencies)
        ,'TicketAgencies': pd.DataFrame(eosTicketAgencies)
    }
    eos_dfs = {'{} ({})'.format(key, len(val)): val for key,val in eos_dfs.items()}
    pdh.multiple_df_to_excel(eos_dfs, '/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/EOS Master Data.xlsx')

    # df_eosArtists =        eosArtists
    # df_eosCoPromoters =    eosCoPromoters
    # df_eosVenues =         eosVenues
    # df_eosArtistAgents =   eosArtistAgents
    # df_eosArtistAgencies = eosArtistAgencies
    # df_eosTicketAgencies = eosTicketAgencies
    # df_eosArtists['RecordType.Name'] = 'Artist'
    # df_eosCoPromoters['RecordType.Name'] = 'Co-Promoter'
    # df_eosVenues['RecordType.Name'] = 'Venue'
    # df_eosArtistAgents['RecordType.Name'] = 'Artist Agent'
    # df_eosArtistAgencies['RecordType.Name'] = 'Agency'
    # df_eosTicketAgencies['RecordType.Name'] = 'Ticket Agency'

    # df_eosaccounts = pd.concat(df_eosArtists, df_eosCoPromoters, df_eosVenues, df_eosArtistAgencies, df_eosTicketAgencies)


    accounts = session.select_records("""SELECT Id, Name, SourceSystemId__c, EOSId__c, RecordType.Name
    FROM Account
    WHERE RecordType.Name IN ('Artist','Co-Promoter','Venue','Agency','Ticket Agency')
    """)
    contacts = session.select_records("""SELECT Id, Name, SourceSystemId__c, EOSId__c
    FROM Contact
    WHERE Account.RecordType.Name = 'Agency'
    OR EOSId__c <> NULL
    """)

    # df_romeaccounts = pd.DataFrame(accounts)

    account_eos_ids = {item['RecordType.Name'] + item["EOSId__c"]: item["Id"] for item in accounts if item["EOSId__c"] != ""}
    contact_eos_ids = {item["EOSId__c"]: item["Id"] for item in contacts if item["EOSId__c"] != ""}

    for item in eos_accounts:
        key = item["RecordType"] + item["EOSId__c"]
        item["RomeId"] = account_eos_ids[key] if key in account_eos_ids else ""
    for item in eos_contacts:
        item["RomeId"] = contact_eos_ids[item["EOSId__c"]] if item["EOSId__c"] in contact_eos_ids else ""

    account_ids_map = {
        item.Id: item
        for item in accounts
    }
    account_names_map = {}
    for item in accounts:
        key = (item['RecordType.Name'] + item["Name"]).lower()
        if key not in account_names_map:
            account_names_map[key] = []
        account_names_map[key].append(item)

    for item in eos_accounts:
        key = (item['RecordType'] + item["Name"]).lower()
        if item['RomeId'] != '':
            item['Note'] = 'Mapped'
            item['Case Mismatch'] = True if account_ids_map[item['RomeId']]['Name'] != item['Name'] else ''
        elif item["RomeId"] == '' and key in account_names_map and len(account_names_map[key]) == 1:
            item['Note'] = '1-1 mapping found'
            item['Case Mismatch'] = True if account_names_map[key][0]['Name'] != item['Name'] else ''
            item['Matched record'] = account_names_map[key][0]['Id']
        elif item["RomeId"] == '' and key in account_names_map and len(account_names_map[key]) > 1:
            item['Note'] = '1-many mapping found'
            item['Matched record'] = ', '.join([item['Name'] for item in account_names_map[key]])
        else:
            item['Note'] = 'No match found'
    
    record_types = set({item['RecordType'] for item in eos_accounts})
    account_dfs = {
        key: pd.DataFrame([
            item for item in eos_accounts 
            if item['RecordType'] == key 
            # and item['Note'] != 'Mapped'
        ])
        for key in record_types
    }
    pdh.multiple_df_to_excel(account_dfs, '/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/EOS Master Data Diff to Rome.xlsx')
    

    print("Push data to {}?".format(session.instance))
    if str(input()).upper() == 'Y':
        session.add_bypass_settings()
        for key, df in account_dfs.items():
            matched = df[df['Note'] == '1-1 mapping found']
            matched['Id'] = matched['Matched record']
            matched = matched[['Id','EOSId__c']].to_dict('records')
            new = df[df['Note'] == 'No match found'].to_dict('records')
            # print("Push {} {} to {}?".format(len(matched + new), key, session.instance))
            # if str(input()).upper() == 'Y':
            session.insert('Account', new)
            session.update(matched)
        session.remove_bypass_settings()




    # session.write_file(sqlFileArtists.replace("EOS Data/EOS", "EOS Data/Data | EOS").replace(".sql",".csv"), eosArtists)
    # session.write_file(sqlFileCoPromoters.replace("EOS Data/EOS", "EOS Data/Data | EOS").replace(".sql",".csv"), eosCoPromoters)
    # session.write_file(sqlFileVenues.replace("EOS Data/EOS", "EOS Data/Data | EOS").replace(".sql",".csv"), eosVenues)
    # session.write_file(sqlFileArtistAgents.replace("EOS Data/EOS", "EOS Data/Data | EOS").replace(".sql",".csv"), eosArtistAgents)
    # session.write_file(sqlFileArtistAgencies.replace("EOS Data/EOS", "EOS Data/Data | EOS").replace(".sql",".csv"), eosArtistAgencies)
    # session.write_file(sqlFileTicketAgencies.replace("EOS Data/EOS", "EOS Data/Data | EOS").replace(".sql",".csv"), eosTicketAgencies)



    for item in contacts:
        name = item["Name"]
        item["RecordType.Name"] = "Contact"
        if name not in account_names_map:
            account_names_map[name] = []
        account_names_map[name].append(item)

    # session.write_file(sqlFileArtists.replace("EOS Data/EOS", "EOS Data/Data Unmapped | EOS").replace(".sql",".csv"), [item for item in eosArtists if item["RomeId"] == ""])
    # session.write_file(sqlFileCoPromoters.replace("EOS Data/EOS", "EOS Data/Data Unmapped | EOS").replace(".sql",".csv"), [item for item in eosCoPromoters if item["RomeId"] == ""])
    # session.write_file(sqlFileVenues.replace("EOS Data/EOS", "EOS Data/Data Unmapped | EOS").replace(".sql",".csv"), [item for item in eosVenues if item["RomeId"] == ""])
    # session.write_file(sqlFileArtistAgencies.replace("EOS Data/EOS", "EOS Data/Data Unmapped | EOS").replace(".sql",".csv"), [item for item in eosArtistAgencies if item["RomeId"] == ""])
    # session.write_file(sqlFileArtistAgents.replace("EOS Data/EOS", "EOS Data/Data Unmapped | EOS").replace(".sql",".csv"), [item for item in eosArtistAgents if item["RomeId"] == ""])
    # session.write_file(sqlFileTicketAgencies.replace("EOS Data/EOS", "EOS Data/Data Unmapped | EOS").replace(".sql",".csv"), [item for item in eosTicketAgencies if item["RomeId"] == ""])


    # ################################################################################################
    # exactMatchArtists = [
    #     {
    #         "Id": account_names_map[item["Name"].lower()][0]["Id"]
    #         , "EOSId__c": item["EOSId__c"]
    #         , "Rome Name": account_names_map[item["Name"].lower()][0]["Name"]
    #         , "EOS Name": item["Name"]
    #         , "Already Mapped": account_names_map[item["Name"].lower()][0]["EOSId__c"] != ""
    #         , "Case Mismatch": item["Name"] != account_names_map[item["Name"].lower()][0]["Name"]
    #     } 
    #     for item in eosArtists
    #     if item["Name"].lower() in account_names_map and len(account_names_map[item["Name"].lower()]) == 1 and account_names_map[item["Name"].lower()][0]["RecordType.Name"] == "Artist"
    # ]

    # session.write_file(sqlFileArtists.replace("EOS Data/EOS", "EOS Data/Data Matches | EOS").replace(".sql",".csv"), exactMatchArtists)

    # # session.add_bypass_settings()
    # # session.update_records([item for item in exactMatchArtists if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # # For first insert attempt, try to insert Active. Then use In Review for the remaining rows which failed due to duplicate checker
    # # for item in eosArtists:
    # #     item["Status__c"] = 'Active'
    # # session.add_bypass_settings()
    # # session.insert_records("Account", [item for item in eosArtists if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # For remaining failure rows, attempt to find other partial matches before doing forced insert

    # ################################################################################################
    # exactMatchCoPromoters = [
    #     {
    #         "Id": account_names_map[item["Name"]][0]["Id"]
    #         , "EOSId__c": item["EOSId__c"]
    #         , "Rome Name": account_names_map[item["Name"]][0]["Name"]
    #         , "EOS Name": item["Name"]
    #         # , "RomeId": item["RomeId"]
    #         , "Already Mapped": account_names_map[item["Name"].lower()][0]["EOSId__c"] != ""
    #         , "Case Mismatch": item["Name"] != account_names_map[item["Name"].lower()][0]["Name"]
    #     } 
    #     for item in eosCoPromoters
    #     if item["Name"] in account_names_map and len(account_names_map[item["Name"]]) == 1 and account_names_map[item["Name"]][0]["RecordType.Name"] == "Co-Promoter"
    # ]

    # session.write_file(sqlFileCoPromoters.replace("EOS Data/EOS", "EOS Data/Data Matches | EOS").replace(".sql",".csv"), exactMatchCoPromoters)

    # # session.add_bypass_settings()
    # # session.update_records([item for item in exactMatchCoPromoters if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # # For first insert attempt, try to insert Active. Then use In Review for the remaining rows which failed due to duplicate checker
    # # # for item in eosCoPromoters:
    # # #     item["Status__c"] = 'Active'
    # # session.add_bypass_settings()
    # # session.insert_records("Account", [item for item in eosCoPromoters if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # For remaining failure rows, attempt to find other partial matches before doing forced insert

    # ################################################################################################
    # exactMatchArtistAgencies = [
    #     {
    #         "Id": account_names_map[item["Name"]][0]["Id"]
    #         , "EOSId__c": item["EOSId__c"]
    #         , "Rome Name": account_names_map[item["Name"]][0]["Name"]
    #         , "EOS Name": item["Name"]
    #         # , "RomeId": item["RomeId"]
    #         , "Already Mapped": account_names_map[item["Name"].lower()][0]["EOSId__c"] != ""
    #         , "Case Mismatch": item["Name"] != account_names_map[item["Name"].lower()][0]["Name"]
    #     } 
    #     for item in eosArtistAgencies
    #     if item["Name"] in account_names_map and len(account_names_map[item["Name"]]) == 1 and account_names_map[item["Name"]][0]["RecordType.Name"] == "Agency"
    # ]

    # session.write_file(sqlFileArtistAgencies.replace("EOS Data/EOS", "EOS Data/Data Matches | EOS").replace(".sql",".csv"), exactMatchArtistAgencies)

    # # session.add_bypass_settings()
    # # session.update_records([item for item in exactMatchArtistAgencies if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # # For first insert attempt, try to insert Active. Then use In Review for the remaining rows which failed due to duplicate checker
    # # for item in eosArtistAgencies:
    # #     item["Status__c"] = 'Active'
    # # session.add_bypass_settings()
    # # session.insert_records("Account", [item for item in eosArtistAgencies if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # For remaining failure rows, attempt to find other partial matches before doing forced insert

    # ################################################################################################
    # exactMatchTicketAgencies = [
    #     {
    #         "Id": account_names_map[item["Name"]][0]["Id"]
    #         , "EOSId__c": item["EOSId__c"]
    #         , "Rome Name": account_names_map[item["Name"]][0]["Name"]
    #         , "EOS Name": item["Name"]
    #         # , "RomeId": item["RomeId"]
    #         , "Already Mapped": account_names_map[item["Name"].lower()][0]["EOSId__c"] != ""
    #         , "Case Mismatch": item["Name"] != account_names_map[item["Name"].lower()][0]["Name"]
    #     } 
    #     for item in eosTicketAgencies
    #     if item["Name"] in account_names_map and len(account_names_map[item["Name"]]) == 1 and account_names_map[item["Name"]][0]["RecordType.Name"] == "Ticket Agency"
    # ]

    # session.write_file(sqlFileTicketAgencies.replace("EOS Data/EOS", "EOS Data/Data Matches | EOS").replace(".sql",".csv"), exactMatchTicketAgencies)

    # # session.add_bypass_settings()
    # # session.update_records([item for item in exactMatchTicketAgencies if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # # For first insert attempt, try to insert Active. Then use In Review for the remaining rows which failed due to duplicate checker
    # # for item in eosTicketAgencies:
    # #     item["Status__c"] = 'Active'
    # # session.add_bypass_settings()
    # # session.insert_records("Account", [item for item in eosTicketAgencies if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # For remaining failure rows, attempt to find other partial matches before doing forced insert

    # ################################################################################################
    # exactMatchArtistAgents = [
    #     {
    #         "Id": account_names_map[item["FirstName"] + " " + item["LastName"]][0]["Id"]
    #         , "EOSId__c": item["EOSId__c"]
    #         , "Rome Name": account_names_map[item["FirstName"] + " " + item["LastName"]][0]["Name"]
    #         , "EOS Name": item["FirstName"] + " " + item["LastName"]
    #         , "RomeId": item["RomeId"]
    #     } 
    #     for item in eosArtistAgents
    #     if item["FirstName"] + " " + item["LastName"] in account_names_map and len(account_names_map[item["FirstName"] + " " + item["LastName"]]) == 1 and account_names_map[item["FirstName"] + " " + item["LastName"]][0]["RecordType.Name"] == "Contact"
    # ]

    # session.write_file(sqlFileArtistAgents.replace("EOS Data/EOS", "EOS Data/Data Matches | EOS").replace(".sql",".csv"), exactMatchArtistAgents)

    # # session.add_bypass_settings()
    # # session.update_records([item for item in exactMatchArtistAgents if item["RomeId"] == ""])
    # # session.remove_bypass_settings()

    # # session.add_bypass_settings()
    # # session.upsert_records("Contact", [item for item in eosArtistAgents if item["RomeId"] == ""], "EOSId__c")
    # # session.upsert_records("Contact", eosArtistAgents, "EOSId__c")
    # # session.remove_bypass_settings()

    # return

    pass
def get_eos_data(offer_ids_list):
    data = ObjDict()
    sql = SQL_Server_API('EOS-prod')

    whereClause = """
    WHERE o.Id IN ({})
    """.format(",".join(offer_ids_list))
    # -- AND o.RomeId IS NULL

    rome_data = ObjDict({
        'User': threading.new(session.select, "SELECT Id, EOSId__c FROM User WHERE EOSId__c <> NULL", return_type='dataframe', mute=True)
        , 'Account': threading.new(session.select, "SELECT Id, EOSId__c FROM Account WHERE EOSId__c <> NULL AND RecordType.Name IN ('Venue','Artist','Agency','Office')", return_type='dataframe', mute=True)
        , 'Contact': threading.new(session.select, "SELECT Id, EOSId__c FROM Contact WHERE EOSId__c <> NULL", return_type='dataframe', mute=True)
    })

    stageSQL = open(stageSQLFile, 'r', encoding='utf-8-sig').read().replace("QUERY_WHERE_CLAUSE_HERE", whereClause)
    querySQL = open(querySQLFile, 'r', encoding='utf-8-sig').read()
    tourOnSalesQuerySQL = open(tourOnSalesQuerySQLFile, 'r', encoding='utf-8-sig').read().replace("QUERY_WHERE_CLAUSE_HERE", whereClause)
    eventOnSalesQuerySQL = open(eventOnSalesQuerySQLFile, 'r', encoding='utf-8-sig').read().replace("QUERY_WHERE_CLAUSE_HERE", whereClause)
    
    print("EOS queries in progress")
    sql.query(stageSQL, cast_to_string=True)
    rows = sql.query(querySQL, cast_to_string=True)
    tourOnSales = sql.query(tourOnSalesQuerySQL, cast_to_string=True)
    eventOnSales = sql.query(eventOnSalesQuerySQL, cast_to_string=True)
    print("EOS queries complete")

    df1 = pd.DataFrame(rows)
    df2 = pd.DataFrame(tourOnSales)
    df3 = pd.DataFrame(eventOnSales)

    df1["Tour__c.OldTourName"] = df1["Tour__c.TourName__c"]
    # df1["Tour__c.TourName__c"] = df1["Tour__c.TourName__c"].apply(html.unescape)
    df1 = df1.apply(html.unescape)

    def set_blank_ts_type(row):
        # desc = row['TicketScale__c.Notes__c']
        if row['TicketScale__c.Type__c'] == '':
            return f'PL{row["TicketScale__c.PriceLevelRank"]}'
        return row['TicketScale__c.Type__c']
    df1['TicketScale__c.Type__c'] = df1.apply(set_blank_ts_type, axis=1)

    cols_to_string = [] + [c for c in list(df1.columns)+list(df2.columns) if 'EOSId' in c or 'SourceSystemId' in c]
    for col in cols_to_string:
        if col in df1:
            df1[col] = df1[col].astype(str).replace('\.0','',regex=True)
        if col in df2:
            df2[col] = df2[col].astype(str).replace('\.0','',regex=True)
        if col in df3:
            df3[col] = df3[col].astype(str).replace('\.0','',regex=True)

    data.Issues = []
    eosidcolumns = [
        c for c in df1.columns.values 
        if '__r.EOSId__c' in c 
        and 'Tour__r.EOSId__c' not in c 
        and 'Event__r.EOSId__c' not in c 
        and 'TicketBand__r.EOSId__c' not in c]
    eosids = set(df1[eosidcolumns].to_numpy().flatten())
    romeeosids = set()
    for d in rome_data.values():
        if len(d.result()) > 0:
            romeeosids.update(d.result()['EOSId__c'].tolist())
    missingeosids = {item for item in eosids if item not in romeeosids and item != ''}
    data.Issues.append(pd.DataFrame([
        {'Issue': 'Rome is missing Master Data', 'EOSId__c': item} for item in missingeosids
    ]).sort_values(by='EOSId__c'))

    for item in missingeosids:
        df1[eosidcolumns] = df1[eosidcolumns].replace(item, '')
    # pdh.to_excel({
    #     'FULL': df1
    #     , 'Tour Onsales': df2
    #     , 'Event Onsales': df3
    # }, 'UK EOS Data Raw.xlsx')

    data.Tour__c = (
        df1[[c for c in df1 if 'Tour__c.' in c]]
        .drop_duplicates(subset='Tour__c.EOSId__c')
        .copy()
    )
    data.TourDeal__c = (
        df1[[c for c in df1 if 'TourDeal__c.' in c]]
        .drop_duplicates(subset='TourDeal__c.Tour__r.EOSId__c')
        .copy()
    )
    data.TourLeg__c = (
        df1[[c for c in df1 if 'TourLeg__c.' in c]]
        .drop_duplicates(subset='TourLeg__c.Tour__r.EOSId__c')
        .copy()
    )
    data.Event__c = (
        df1[[c for c in df1 if 'Event__c.' in c]]
        .drop_duplicates(subset='Event__c.EOSId__c')
        .copy()
    )
    data.EventDateTime__c = (
        df1[[c for c in df1 if 'EventDateTime__c.' in c]]
        .drop_duplicates(subset='EventDateTime__c.SourceSystemId__c')
        .copy()
    )
    data.Deal__c = (
        df1[[c for c in df1 if c.startswith('Deal__c.')]]
        .drop_duplicates(subset='Deal__c.SourceSystemId__c')
        .copy()
    )
    data.TicketScale__c = (
        df1[[c for c in df1 if 'TicketScale__c.' in c]]
        .drop_duplicates(subset='TicketScale__c.EOSId__c')
        .copy()
    )

    data.TourOnSale__c = df2
    data.EventOnSale__c = df3

    rename(data.Tour__c, 'Tour__c')
    rename(data.TourDeal__c, 'TourDeal__c')
    rename(data.TourLeg__c, 'TourLeg__c')
    rename(data.Event__c, 'Event__c')
    rename(data.EventDateTime__c, 'EventDateTime__c')
    rename(data.Deal__c, 'Deal__c')
    rename(data.TicketScale__c, 'TicketScale__c')

    return data
def main():
    session = Salesforce_API("*****@*****.**")
    sql = SQL_Server_API('EOS-stage')

    accounts = session.select("SELECT Id, EOSId__c FROM Account",
                              return_type='dataframe')
    account_ids = set(accounts.set_index('Id').to_dict('index'))
    account_eos_ids = set(
        accounts.fillna('').query("EOSId__c != ''").set_index(
            'EOSId__c').to_dict('index'))

    venue = sql.query(
        f"SELECT *, 'Venue-'+CAST(Id AS VARCHAR(10)) AS EOSId__c FROM Venue")
    artist = sql.query(
        f"SELECT *, 'Artist-'+CAST(Id AS VARCHAR(10)) AS EOSId__c FROM Artist")
    copromoter = sql.query(
        f"SELECT *, 'Copromoter-'+CAST(Id AS VARCHAR(10)) AS EOSId__c FROM Copromoter"
    )
    artistagency = sql.query(
        f"SELECT *, 'ArtistAgency-'+CAST(Id AS VARCHAR(10)) AS EOSId__c FROM ArtistAgency"
    )
    ticketagency = sql.query(
        f"SELECT *, 'TicketAgent-'+CAST(Id AS VARCHAR(10)) AS EOSId__c FROM TicketAgent"
    )

    output = {
        'Venue': venue,
        'Artist': artist,
        'Copromoter': copromoter,
        'ArtistAgency': artistagency,
        'TicketAgent': ticketagency,
    }
    to_wipe_RomeId = {obj: [] for obj in output}
    to_delete = {obj: [] for obj in output}
    for obj, data in output.items():
        for record in data:
            if record['EOSId__c'] in account_eos_ids or record['EOSId__c'] in (
                    'ArtistAgency-1', 'ArtistAgency-149', 'ArtistAgency-158'):
                to_wipe_RomeId[obj].append(record)
            elif record['RomeId'] not in (
                    None, '',
                    'None') and record['RomeId'] not in account_ids and record[
                        'CreatedBy'] == 'Rome_EOS_Integration':
                to_delete[obj].append(record)
    pdh.to_excel(to_wipe_RomeId, 'EOSDataToWipeRomeId.xlsx')
    pdh.to_excel(to_delete, 'EOSDataToDelete.xlsx')

    def delete(table, data):
        if len(data) > 0:
            ids = pd.DataFrame(data).Id.tolist()
            ids_text = ','.join([str(i) for i in ids])
            s = f'DELETE FROM {table} WHERE Id IN({ids_text})'
            print(s)
            sql.execute(s, commit=True)

    def blank_romeid(table, df):
        if len(data) > 0:
            ids = pd.DataFrame(data).Id.tolist()
            ids_text = ','.join([str(i) for i in ids])
            s = f'UPDATE {table} SET RomeId = NULL WHERE Id IN({ids_text})'
            print(s)
            sql.execute(s, commit=True)

    if prompt('Ready to update EOS?', boolean=True):
        for obj, data in to_delete.items():
            delete(obj, data)
        for obj, data in to_wipe_RomeId.items():
            blank_romeid(obj, data)
    return
def main():
    # CURRENT KNOWN ISSUES
    # If an event is not marked as a roll date in EOS, but it has different onsales than other events, we are going to mark it a roll date
    # We have no handling for the "ITB" OfferType. What to do?
    # What to do with Offers which do not have a tour currency of GBP? These are broken. Out of scope?
    # Changed fields: ArtistGuaranteeCurrencyId, LastUpdatedBy, LastUpdatedDateTime, OfferTypeId, OfficeId, OwnerId, PromoterProfitCurrencyId, UpdateId
    # TODO: Map the Artist Guarantee amounts (and artist currency?)
    # POTENTIAL ISSUE: Offer Artist Payments @ 1.00 Exch Rate, for artist currency other than GBP?
    # TODO: Identify problematic tours which are on the list to migrate:
    #   Tours with EOS Ids missing in prod
    #   Tours with tour currency that is not GBP
    #   Tours with all past dates? Finance - need to identify if we will be treating past dates in an onsale tour as historical

    # username = "******"
    # username = '******'

    # sql = SQL_Server_API(eos_prod_creds)
    # sf = Salesforce_API('*****@*****.**')
    sql = SQL_Server_API(eos_stage_creds)
    sf = Salesforce_API('*****@*****.**')

    keep_connection_alive(sf)
    # recs = sf.select("SELECT Id, EOSLastModifiedDate__c, EOSIntegrationSyncDate__c FROM Tour__c WHERE EOSId__c <> NULL", return_type='dataframe')
    # recs['EOSIntegrationSyncDate__c'] = recs['EOSLastModifiedDate__c']
    # sf.update(recs)

    # {'ArtistAgent-974', '2469', 'Artist-22297', 'Artist-22296', 'ArtistAgent-748', 'nan', 'ArtistAgency-2774', 'Artist-19279', '1299'}

    delete_tours_first = True
    skip_already_created_tours = False
    # offer_ids = [54766]
    # offer_ids = [44189]
    # offer_ids = [52474]
    # offer_ids = [54441, 57933, 57965, 58400, 58737, 58050, 57930, 57263, 53728, 55808]

    # all_offer_ids_with_costings = [53981,57933,56534,56602,57906,58892,53556,57278,52720,58451,58490,55051,59093,58432,58803,55163,57965,58400,58737,54711,54297,52356,54441,57424,53846,57707,57276,55166,54006,58070,58406,57406,56517,58029,58073,58114,57273,57256,57337,53176,58802,54555,54980,57350,59121,53943,57996,56116,59140,58492,53909,52453,56561,56469,52474,58741,57923,58404,56813,53911,52477,58824,56527,58115,58050,52693,56321,55513,54326,58875,58121,57013,43748,57404,56537,58032,53890,57242,57843,54573,57788,58839,57894,57165,57992,53449,57006,57957,54134,53617,55138,44189,58560,57930,57469,57263,55224,56487,58842,51264,54069,55469,56957,58820,53816,53870,39597,56518,58821,58905,58739,58430,57279,52382,54888,57713,57030,58757,57403,55486,57847,58039,58505,56449,57773,56117,53683,58762,57086,58813,57943,57000,54932,56525,56998,53612,55222,54639,53722,53847,57037,57878,58691,53568,52697,57417,53944,54554,58770,57338,51663,54039,53728,52605,54206,58405,54195,53739,57972,55808,57291,54020,58606,58953,45648,57206,55741,57255,53602,53986,52427,54847,57709,56381,53623,57223,58086,58493,53812,55823,57011,55139,52419,53500,54443,52021,58804,53163,59151,56567,57269,51776,56173,55745,57024,53825,56440,54255,57167,56169,55405,52109,55466,58504,54208,56412,57959,58777,58423,58055,57186,54256,53182,54181,54633,53824,57418,57792,58794,58812,54634]
    # offer_ids = all_offer_ids_with_costings

    # safe_offer_ids_with_costings = [53981,57933,56534,56602,57906,58892,53556,57278,52720,58451,58490,55051,59093,58803,57965,58400,58737,54711,52356,54441,57424,53846,57276,58070,58406,57406,56517,58029,58073,58114,57273,57337,53176,58802,52453,56561,56469,52474,58741,57923,58404,56813,53911,52477,58824,58050,52693,56321,58121,53617,58560,57930,57469,57263,55224,56487,58842,54069,55469,56957,58820,53816,53870,56518,58905,58430,57279,52382,54888,57713,57030,58757,57403,55486,57847,58039,56449,56117,53683,58762,57086,57943,57000,54932,56525,53612,55222,54639,53722,53847,57037,57878,53568,52697,57417,53944,54554,58770,57338,51663,53728,52605,54206,57972,55808,57291,54020,58953,57206,55741,53986,54847,53825,56440,54255,56169,52109,55466,58504,54208,56412,57959,58777,58423,58055,57186,54256,54181,54633,53824,57418,58794,58812,54634]
    # offer_ids = safe_offer_ids_with_costings
    # offer_ids_with_no_costings = [52015,55146,55487,54925,54837,54766,54561,55451,54838,55458,54382,55134,55033,54743,55624,53591,55538,52725,54501,41498,53204,54815,57025,55588,57253,53328,53864,54926,57145,56889,56399,54243,55131,54662,56909,55164,55796,58088,58041,55161,58508,57354,57292,57863,56871,58455,58487,58417,58522,58523,59133,58998,57970,57852,56885,56869,58445,58447,59085,58868,55589,58774,59114,57886,53410,57887,58890,58823,58756,57939,56886,56874,57940,58510,58518,58519,59079,58450,57873,54249,53680,54313,57941,57911,58898,58771,58107,58858,54965,58833,58834,58017,52629,55168,58056,58798,56867,56959,57014,57034,53631,55313,54530,54307,57384,58427,54581,54593,54216,58092,57197,58089,59003,58091,55816,53530,56454,58809,58151,57527,57919,57907,57849,58801,58743,55599,53904,57739,58825,57869,54874,57741,58165,59127,57711,57039,52852,54975,56588,56436,54226,58429,58955,58395,58081,58064,58805,57925,53923,54386,57736,58537,54897,54200,58876,56106,54833,55440,53858,58191,58639,58909,57720,58118,58399,57968,58065,58456,58796,54203,55590,58453,58420,58443,56523,56425,55148,55123,54209,55477,57366,58002,58066,58896,56560,54350,57860,58507,56450,54199,56470,58817,59112,58431,54883,54611,56815,58144,58521,56509,55557,58440,58036,56862,56411,55402,59002,59158,59104,59142,56533,56566,57920,58410,55586,54198,59030,54207,57910,57271,58067,58108,57867,56400,58845,58763,58828,54080,57875,57876,53201,56520,57227,57184,54800,54221,57423,57201,57408,54522,55912,57230,57264,57457,57786,58690,58085,53815,54337,53203,58018,57331,59144,59083,57493,58793,56589,58035,54817,54161,54401,54622,58844,58530,57035,59152,56505,53202,55159,55584,57433,54769,54879,59162,58598,57884,57311,56519,56260,57730,56834,58123,58038,57169,57723,57877,58996,55119,53632,54759,57218,59080,59089,57989,55943,55806,56579,54540,58075,59134,58448,57297,57732,56906,58811,56424,57444,57127,58444,58795,53494,54556,57915,55735,58991,57848,54409,57528,59090,59081,58994,57936,53457,59136,59109,58877,58879,54830,53492,59153,57842,58037,58641,58826,58854,58259,59115,57855,58841,45568,59150,58740,54954,59157,55467,58850,57846,56456,55544,55457,57974,58764,52804,54803,58077,58019,57232,57219,52676,58843,58829,58558,53134,53538,53855,57021,56577,52962,57850,55760,58766,53687,55819,58792,57355,53881,52957,56865,53158,58765,58761,57987,54310,56866,55231,59145,57262,55117,54786,55797,55798,55800,57053,56868,55799,52902,56863,57328,57329,53826,51729,57185,55661,57892,53998,54818,58001,58779,57284,53892,58871,58884,58889,58886,58872,58900]
    # offer_ids_no_costings_18 = [54895,54638,55461,54840,55160,52733,54375,54240,54964,56595,53616,54779,58479,58480,58495,58496,58473,58895,54499,58497,58435,59177,59233,58498,58499,58753,58117,59163,58754,56383,57675,58109,55515,57529,57893,57369,58442,58768,52584,56582,54642,54315,58744,57198,59201,59286,54887,59278,59269,59231,58989,59254,59274,55313,54664,54307,57018,57859,58776,57293,58993,53358,57772,57414,56310,56219,59222,58475,54826,57714,53021,55587,56864,57916,57162,54377,54359,59197,56256,57845,57865,57415,57243,56455,55944,59168,54517,57719,56526,56596,54604,59271,59261,59221,57928,56903,56557,57674,58008,58897,58723,55046,56538,55764,56604,54493,58952,58903,56468,57270,58667,58424,58456,57922,57826,56591,58005,54589,56430,55583,57089,57771,57856,57888,58083,56446,54636,57728,56524,59284,54230,57166,57984,55053,54967,58848,58506,58282,56114,56441,56431,58816,59132,58507,56416,54785,54620,53867,54282,55807,57800,57912,57742,57839,56442,54641,54277,56594,58469,57981,59225,59226,59253,57333,56218,54426,53482,54909,56531,56968,57150,57726,56162,54541,57426,57917,57287,58501,57737,56504,56405,59165,52462,58663,56216,58470,58840,57298,56513,57471,59227,59161,59148,58525,58126,59199,59203,54800,54212,57056,57216,57201,58031,58869,59189,56414,57782,57182,59264,59171,55598,58742,58491,57207,57882,57990,54388,54911,56444,56472,59234,56574,54254,57718,59220,59232,56828,58822,53692,56593,56420,56206,59244,59257,59267,59194,59159,58095,58142,58425,54373,56543,59172,57299,55776,56522,57971,58967,58902,59178,57991,57724,56217,54876,55047,54612,58116,58411,58167,58520,59170,59241,59169,59218,57456,57260,54371,56254,56255,59242,59146,57978,57356,58815,57787,55349,55911,59182,57163,57073,58051,58472,58030,57412,57191,59116,58082,58090,57840,54423,57738,59166,57999,56541,56178,59117,59249,58494,53496,52982,53312,59175,57187,57889,56423,54719,58068,55961,58441,57442,56506,58152,57500,59237,56996,57202,59111,59243,57960,54765,58867,53177,57007,58020,59282,53519,59108,54717,55082,58760,58797,54204,57962,59262,57909,54801,54867,55453,57862,59107,59235,54592,54777,59193,59245,55198,58028,59135,56486,54609,54809,59216,58166,59185,58052,58800,59105,49139,56395,57927,59200,58788,58345,57857,53735,59276,55008,54570,54799,56170,56528,57967,58883,56438,58906,59275,53466,55763,56516,58736,57979,58143,59122,59154,57286,55552,54534,58990,58827,58449,58113,58488,57793,59123,49521,58418,58511,58799,56153,56422,59103,57866,59095,53715,55347,57272,57161,57986,56421,56094,58080,53733,59266,56572,59155,52083,59259,58759,54335,52986,57851,53286,54211,57790,53433,53737,59260,59240,53167,53743,55414,56510,52459,56558,59209,52896,56445,58076,58806,56121,52884,53823,58901,54961,59191,59187,57020,54848,54413,59213,59212,59224,59211,59214]
    # offer_ids = offer_ids_no_costings_18
    # for_prod = [58504,58794,56469,57959,58842,57713,54441,54206,57186,55741,58050,53728,56449,53617,54069,55466,56169,56517,57418,54639,58802,53816,53870,58905,57206,58820,58406,58404,57000,53568,57037,53176,57273,53846,56602,57291,57847,57923,55486,58073,58812,52453,59093,53722,57906,58741,53847,54847,57030,56534,58953,58560,55469,53986,52605,56957,53911,57424,58029,57263,52356,57933,52474,54208,53825,53612,57086,58423,52382,58451,55222,58892,54554]
    # offer_ids = for_prod[0:5]
    # offer_ids = for_prod
    # dawn_list = [52382,53176,53556,53612,53825,53846,56412,57206,57273,57276,57278,58406,58423,58777]
    # dawn_list_17 = [55138,54039,58821,57269,58430,57037,57279,57792,58839,58820,58406,57206,45648,58050,58953,53825,53612,52382,58423,52021,57086,56567,53846,56412,57403,53500,59140,57273,53176,57242,57276,58777,57923,56813,53556,57278,52477,52697,53182,55486,57957,57878,58073,57256,57337,57338,53683,56487]
    # dawn_list_18 = [53568,53722,53722,53824,53824,52419,54633,54181,53449,55139,54573,57000,54932,57424,54441,56537,56998,56525,56537,56998,54441,57424,57424,56169,53890,54639,57709,54555,56169,52109,56169,57843,53943,52109,54208,54980,57709,54888,54888,55222,57959,55222,56381,57713,57713,57223,57350,57350,57223,57165,57350,57011,57992,59121,58070,59121,57186,58606,57186,56116,58493,54256,57011,57418,54443,56517,56517,57417,58794,58794,53944,53944,56518,58812,54554,54554,58770,57847,57847,58812,58039,58842,58842,54006,54195,58804,58804,53163,53163,59151,56117,54634,58762,58762,58405,53909,56440,56440,51663,53847,57404,57404,54255,56173,57788,58504,56173,54255,57788,58504,57707,57707,57469,57030,55166,53617,57030,58055,57030,57894,57406,57006,57006,58691,58691,58029,58114,54206,54206,53870,54020,58802,58802,53816,54134,52427]
    # dawn_list_19_uat = [58032,58560,57996,54069,58824,58492,58505,57263,55405,44189,53623]
    # dawn_list_19 = [52720,58905,57291,57291,58739,57965,53739,58400,57930,58737,56561,56561,55469,56957,56957,56602,56534,56602,57906,57906,52109,52109,56469,52474,54354,58757,58741,58741,58757,55224,58404,58892,58892,53911,57894,53911,56527,54297,54711,58451,58490,59093,39597,57013,52693,56321,55051,57013,58432,58432,53986,53986,58115,58813,58813,58813,58803,54326,58875,58121,58121,55163]
    # dawn_list_20 = [58032,58560,57996,54069,58824,58492,58505,57263,55405,44189,53623]
    # lewis_list_01 = [59196,59198,59094,59291,59251,59238,59268,59246,59270,58449]
    # dawn_list_02 = [55454,55014,59184,59286,52629,59379,59205,58093,59368,59328,59358,58032,57167,59326,59332,59256,59333,59287,59384,59281,59366,59315,57127,59292,53457,59295,59329,58492,59228,59371,53158,55117,59272,52034,54510,54508]
    # dawn_red_list_02 = [54847,51776,58086,59268,59246,53602,55513]
    # all_remaining_02 = [54625,52936,52019,53423,54341,54605,54845,54760,55157,55137,55357,54631,55353,54590,55616,55124,54889,54107,54381,53874,58087,52592,52922,59286,59294,56539,59278,59231,59274,56814,53875,52874,57368,59205,59379,57585,55352,59222,56568,57028,58810,57254,53757,56453,52825,57002,57295,59358,59328,58428,57958,54846,58421,51541,57870,56221,55147,57400,53446,54000,59321,53619,58610,57727,57108,56564,57982,57938,56447,59293,53866,59219,57275,56583,55165,57881,56172,59223,57179,57708,59323,58419,54376,59196,54623,59326,59336,52819,57734,53357,59332,57282,55464,59318,59291,59322,59251,59385,55455,59283,59256,58885,59333,59287,57334,58561,58855,57783,56545,57041,59303,59384,57985,55350,59281,59364,55783,53227,58069,55528,59366,58584,53900,55189,59380,57175,57858,54576,58458,58426,57983,59315,59252,58159,54802,54385,58457,56140,57087,59327,57178,55759,52478,59279,59280,57924,53729,55404,57729,59306,57854,59390,57785,56275,59305,59143,57386,59292,59388,58838,59324,54797,59361,59268,57966,59190,59250,58836,56448,57419,57716,56508,59317,59393,59295,59330,58481,59314,59300,53910,57770,59110,54822,59329,58927,59391,55140,58071,53605,59082,59285,59215,59087,59141,57721,57733,58778,55474,54721,59316,59228,59360,59290,59248,59246,57861,54197,58856,58156,52376,53332,53614,57413,59311,59230,58482,54038,59091,58559,53157,59371,57769,55480,54446,54003,54910,59386,59265,52695,57045,59301,59374,58808,59369,58010,52042,58141,57791,58446,59398,59270,59359,58775,55532,58531,56515,45559,58738,59195,59247,59363,52985,58503,53882,54149,58852,58468,57170,59375,52726,59272,54868,56443,53744,54311,54899,54900,54898,54903,54904,55143,59149,59387,57280,59373,54338,57407,59210]
    # club_shows_03 = [55160,59286,59294,59231,59278,59274,55352,59222,59358,59328,58897,59321,59293,59326,59336,59291,59318,59283,59256,59333,59094,59303,59384,59281,59366,57787,59315,59327,55513,59279,59305,54717,59324,59317,57857,59316,59360,59311,59374,59398,59359,59270,59363,58806,59387,59373,52356,58032,54847,59322,59287,59292,59295,59332,59385,59306]
    # lewis_list_03 = [51541,55466,55823,53812,59190,59250,59215,59141,59290,59369,55143,52453]
    # all_remaining_03 = [59231,59278,59274,59379,59205,59222,59219,59223,59323,59196,59251,59364,59380,59252,59280,59390,59388,59361,59268,59190,59250,59393,59330,59300,59329,59391,59285,59228,59290,59248,59246,59230,59371,59265,59369,59195,59247,59375,59272,59210]
    # lewis_list_07 = [51541,55466,55823,53812,59190,59215,59290,55143,52453]
    # dawn_list_09 = [55454,55014,59198,52629,58093,53812,51541,57400,55466,55823,52034,51776,43748,53728,55745,57127,55741,53457,58492,59141,53602,57773,54510,53158,56449,55117,51264,52605,55143,54508,59272,59371,59228,59379,59205,59190,59329,59290]
    df_list_09 = [
        55353, 55160, 54107, 54381, 53874, 52922, 59231, 59278, 59274, 53875,
        52874, 57368, 57585, 59222, 56568, 57028, 58810, 57254, 53757, 52825,
        57002, 57295, 58897, 57958, 54846, 58421, 56221, 53446, 54000, 53619,
        57727, 57108, 56564, 57938, 56447, 53866, 59219, 57275, 55165, 57881,
        56172, 59223, 57179, 57708, 59323, 58419, 54376, 54623, 57734, 53357,
        57282, 55464, 55455, 58885, 57334, 58561, 58855, 57783, 56545, 57041,
        55350, 59364, 55783, 53227, 58069, 55528, 58584, 57787, 53900, 59380,
        57218, 57175, 57858, 54576, 58458, 58426, 57983, 59252, 58159, 54802,
        58457, 56140, 53729, 57087, 57178, 55759, 52478, 59280, 57924, 55404,
        53729, 57729, 59188, 57854, 59390, 57785, 56275, 59388, 54717, 58838,
        54797, 59361, 57966, 58836, 56448, 57419, 59393, 56508, 59330, 58481,
        57857, 59300, 53910, 57770, 59110, 54822, 58927, 59391, 55140, 58071,
        53605, 59082, 59285, 59087, 57721, 57733, 58778, 55474, 54721, 59248,
        57861, 54197, 58856, 58156, 52376, 53614, 53332, 57413, 59230, 58482,
        54038, 59091, 58559, 53157, 57769, 55480, 54446, 54003, 54910, 59265,
        57045, 52695, 59301, 58808, 58010, 52042, 58141, 57791, 58446, 58775,
        55532, 58531, 56515, 45559, 58738, 59247, 52985, 58503, 53882, 58806,
        54149, 58852, 58468, 57170, 59375, 52726, 54868, 56443, 53744, 54311,
        54899, 54900, 54898, 54903, 54904, 59149, 57280, 54338, 57407
    ]
    # all_remaining_09 = [55160,59231,59278,59274,59222,53812,58897,57400,55466,55823,52034,59196,59251,43748,57787,53728,55745,55741,43748,55741,53812,55745,54717,59268,59250,57857,59141,59246,53602,53602,57773,57773,59369,59195,56449,58806,51264,52605,59479,55143,51264,52453,59210,57255,57255,57024,57024]
    # all_remaining_10 = [55160,59274,59592,59222,53812,58897,57400,55466,55823,52034,59196,59251,43748,57787,53728,55745,55741,43748,55741,55745,53812,54717,59268,59250,57857,59141,59246,53602,53602,57773,57773,59369,59195,56449,58806,59523,59549,59589,59590,51264,52605,59479,55143,51264,52453,59210,57255,57255,57024,57024]
    # all_remaining_13 = [55160,59222,53812,58897,57400,55466,55823,52034,59196,59251,43748,57787,53728,55745,55741,43748,55741,55745,53812,54717,59268,59250,57857,59141,59246,53602,53602,57773,57773,59369,59195,56449,58806,59523,59549,59589,59590,59592,51264,52605,59479,55143,51264,52453,59210,57255,57255,57024,57024]
    # dawn_list_13 = [53812,55466,55823,52034,55745,55741,51264,52605,55143,52453,57255,57024] # TO DO
    dawn_list_14 = [59196, 59251, 59268, 59250, 59246, 59369]
    all_nov_19 = [
        60219, 60203, 60183, 60088, 59772, 59604, 59592, 59590, 59589, 59549,
        59523, 59386, 59314, 59210, 59195, 59143, 59141, 58806, 57857, 57787,
        57773, 57773, 57716, 56449, 55189, 55160, 54717, 53602, 53602
    ]

    offer_ids = [56449, 53602]

    # sf.delete_tours("""SELECT Id FROM Tour__c WHERE EOSId__c IN @offer_ids""")

    # new_tours = pd.DataFrame(sf.select("SELECT EOSId__c, LastModifiedDate FROM Tour__c WHERE EOSId__c IN @offer_ids", mode='simple'))
    # tours_to_update = new_tours.assign(
    #     EOSIntegrationSyncDate__c = lambda df: df['LastModifiedDate'],
    #     EOSLastModifiedDate__c    = lambda df: df['LastModifiedDate'],
    #     IsHistoricalTour__c       = False,
    # ).drop(columns=['LastModifiedDate'])
    # sf.upsert('Tour__c', tours_to_update, 'EOSId__c', mode='simple')

    # tours_with_leb_0_issue = [54847]
    tours_with_null_venue_event = []  # [57894]
    tours_with_missing_booker_eos_id = [
    ]  # [54382, 53158, 52629, 53457, 52109]
    offer_ids = [
        item for item in offer_ids
        if item not in tours_with_missing_booker_eos_id
        and item not in tours_with_null_venue_event
    ]

    # 2469 Josh Casey DF ignore
    # 1299 Chris Loomes
    missing_1299_booker = [
        54717, 57787, 57857, 58473, 58480, 58496, 58499, 58753, 58754, 58806,
        58897, 59222, 59231, 59274, 59278
    ]
    missing_2469_booker = [55160]
    missing_974_artist_agent = []  # [59286]
    offer_ids = [
        item for item in offer_ids
        if item not in missing_1299_booker and item not in missing_2469_booker
        and item not in missing_974_artist_agent
    ]

    # temporary_tours_with_missing_eos_ids = [54307, 55313, 58456, 54382, 58507, 54243, 55588, 54800, 57127, 53158, 52629, 53457, 55117, 57218, 57201] + [59286, 58499, 58897, 58496, 58480, 58473, 57787, 57857, 58753, 58754, 59278, 59222, 59231, 59274, 58806, 55160, 54717, 59282, 59235]
    # offer_ids = [item for item in offer_ids if item not in temporary_tours_with_missing_eos_ids]

    # offer_ids = [59111,54840]

    # # To re-migrate all tours
    # offer_ids = [int(item.EOSId__c) for item in sf.select("""
    # SELECT Id, EOSId__c
    # FROM Tour__c
    # WHERE IsHistoricalTour__c = False
    # AND EOSId__c != NULL
    # AND SourceSystemId__c LIKE '%Offer%'
    # """)]

    # sample_eos_offers = sql.query("""
    # SELECT TOP 1 o.Id
    # FROM Offer o
    # LEFT JOIN Currency ppc
    #         ON ppc.Id = o.PromoterProfitCurrencyId
    # WHERE o.Id IN (
    #     SELECT DISTINCT OfferId
    # 	FROM vwEOSShow
    # 	WHERE (ShowDate>=GetDate() OR PostponedDateTBC=1)
    # 	AND CountryId = 1
    # 	AND OfferStatusName IN ('Confirmed','On Sale','Settled','Draft')
    # )
    # AND o.RomeId IS NULL
    # AND o.ArtistGuaranteeAmount > 0
    # AND o.CopromoterId IS NOT NULL
    # AND ppc.IsoCode = 'GBP'
    # """)
    # offer_ids = [item['Id'] for item in sample_eos_offers]

    if sf.instance == 'lne' or skip_already_created_tours:
        current_tours = set([
            item.EOSId__c for item in sf.select(
                """SELECT EOSId__c FROM Tour__c WHERE EOSId__c <> NULL 
        AND IsHistoricalTour__c = False""")
        ])
        tours_to_not_import = [
            item for item in offer_ids if str(item) in current_tours
        ]
        if len(tours_to_not_import) > 0:
            print(
                f'Skipping the following tours because they are already in Production: {tours_to_not_import}'
            )
            offer_ids = [
                item for item in offer_ids if str(item) not in current_tours
            ]

    # offer_ids = offer_ids[:50]
    # offer_ids = [53943]
    assert len(offer_ids) > 0

    eos_data = uk.query_tours(sql, offer_ids, is_onsale=True)
    pdh.to_excel(eos_data,
                 'Migrate EOS On-Sale Tours - Raw EOS Query Data.xlsx')
    if len(eos_data.Tour__c) == 0: raise Exception('No Offers to migrate')
    eos_data_with_remapped_eos_ids, remapped_eos_ids = uk.replace_duplicate_eos_ids(
        eos_data)
    eos_data_with_split_headliners, artist_ids_missing_in_rome_by_tour = uk.split_headliner_and_coheadliner(
        sf, eos_data_with_remapped_eos_ids)
    eos_data_with_missing_ids_removed, eos_ids_missing_in_rome, removed_eos_ids_by_tour = uk.remove_eos_ids_missing_in_rome(
        sf, eos_data_with_split_headliners)

    all_missing_eos_ids_by_tour = combine_missing_ids_dicts(
        removed_eos_ids_by_tour, artist_ids_missing_in_rome_by_tour)
    eos_ids_missing_in_rome.update(
        itertools.chain.from_iterable(
            artist_ids_missing_in_rome_by_tour.values()))
    # print([int(k) for k,v in all_missing_eos_ids_by_tour.items() if '1299' in v])
    assert len(
        eos_ids_missing_in_rome
    ) == 0 or sf.instance != 'lne', f'Some EOS Ids are missing: {eos_ids_missing_in_rome}\nThe following tours have missing data: {[int(s) for s in all_missing_eos_ids_by_tour]}'

    eos_data_dfs = ObjDict({
        obj: pd.DataFrame(data)
        for obj, data in eos_data_with_missing_ids_removed.items()
    })
    eos_data_with_file_data = uk.merge_eos_data_with_file_data(eos_data_dfs,
                                                               is_onsale=True)

    eos_data_computed = uk.add_computed_fields(sf, eos_data_with_file_data)

    validations(eos_data_computed, eos_ids_missing_in_rome,
                sf.credentials['sandbox'] == 'False')

    threading.new(pdh.to_excel, eos_data_computed.data2,
                  'Migrate EOS On-Sale Tours.xlsx')
    # monitor_eos = uk.monitor_eos_tours(sql, [item['EOSId__c'] for item in eos_data.Tour__c])
    # monitor_rome = uk.monitor_rome_tours(sf, [item['EOSId__c'] for item in eos_data.Tour__c])
    # sf.dynamic_upsert(eos_data_computed.data2, mode='dynamic')
    sf.bypass_prod_operation_approval()
    rome_results = uk.upsert_eos_data_to_rome(
        sf,
        eos_data_computed.data2,
        is_onsale=True,
        delete_tours_first=delete_tours_first)
    tour_results = itertools.chain.from_iterable(
        [job.results for job in rome_results if job.object_name == 'Tour__c'])
    event_results = itertools.chain.from_iterable(
        [job.results for job in rome_results if job.object_name == 'Event__c'])
    uk.update_romeids_in_eos(sql, tour_results, event_results)
    if sf.instance == 'lne':
        uk.add_default_tour_personnel(
            sf, [item['EOSId__c'] for item in tour_results])

    failed_deal_jobs = [
        job.errors for job in rome_results
        if job.object_name == 'Deal__c' and len(job.errors) > 0
    ]

    if eos_ids_missing_in_rome:
        missing_eos_id_info = uk.query_by_eos_ids(
            sql, eos_ids_missing_in_rome,
            ['Name', 'FirstName', 'LastName', 'Email', 'EmailAddress'])
        pdh.to_excel(missing_eos_id_info, 'Missing EOS Data.xlsx')
        print(f'Missing EOS Ids in Rome: {eos_ids_missing_in_rome}')
        # print(pd.concat(missing_eos_id_info.values()).to_string())

    # tourlegs = sf.select("""
    # SELECT Id
    # FROM TourLeg__c
    # WHERE Tour__r.AppScope__c = 'UK'
    # AND CreatedBy.Name = 'DataMigration User'
    # AND Id NOT IN (SELECT TourLeg__c FROM Event__c)
    # """)
    # sf.delete(tourlegs)

    # OfferID	Currency	Exchange Rate
    # 54297	USD	0.72314944
    # 55166	USD	0.71428572
    # 54326	USD	0.72825256
    # 58875	USD	0.74074073
    # 54134	USD	0.82644628
    # 39597	USD	0.77363454
    # 58821	USD	0.70611495
    # 56998	USD	1
    # 45648	USD	1
    # 55139	USD	1

    # print(f'EOS Monitor in progress')
    return
Beispiel #16
0
def main():
    devfilter = {'Artist'}
    session = Salesforce_API(username)
    sql = SQL_Server_API('EOS-prod')
    output = {}
    rome_accounts = session.select("""
        SELECT Id
        , LocalTerms__c, CurrencyIsoCode, Type, GuestsOnTop__c, Email__c, OwnershipType__c, BillingCountry, Genre__c
        , BillingState, BillingCity, RecordTypeId, RecordType.Name, AlternativeNames__c, EOSId__c, BillingStreet
        , Phone, BillingPostalCode, Status__c, VenueName__c, Fax, Website, StandardCurfewTime__c, AgeRestrictionsSeated__c
        , Description, PrimaryOffice__r.SourceSystemId__c, StandardDoorTime__c, Name, AgeRestrictionsStanding__c
        , CreatedDate, Capacity__c, Market__c
        FROM Account
        WHERE RecordType.Name != 'Office'
        """,
                                   return_type='dataframe',
                                   mode='bulk')
    rome_accounts.fillna('', inplace=True)

    rome_contacts = session.select("""
        SELECT Id, EOSId__c, Salutation, FirstName, LastName, Title, Email, Phone, Fax, AccountId, Account.Name, Account.EOSId__c, MailingStreet, Account.RecordType.Name
        FROM Contact
        WHERE Account.RecordType.Name IN('Agency','Venue')
    """,
                                   return_type='dataframe',
                                   mode='simple')

    # UK

    # reference_xlsx = pdh.get_online_excel(loc.uk_master_data_reference)
    reference_xlsx = pd.ExcelFile(loc.uk_master_data_reference)
    reference = ukutil.get_mappings()
    # reference_xlsx = pd.ExcelFile('/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/UK Master Data/REFERENCE_CoHeadliner_Dupe_Map CURRENT.xlsx')
    reference_xlsx2 = pd.ExcelFile(
        '/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/UK Master Data/REFERENCE_Non-UK master data.xlsx'
    )
    coheadliner_maps = reference.coheadliner_maps
    duplicate_map = reference.duplicate_map
    field_ignore_map = reference.ignore_map
    reviewcomplete = reference.review_complete_set
    ignorelist = set(
        pd.read_excel(
            reference_xlsx2,
            'IGNORELIST').query('AlreadyInRome == False')['EOSId__c'].tolist())
    uk = threading.new(ukutil.get_uk_master_data, sql)

    potential_venues_to_delete = threading.new(session.select,
                                               """
    SELECT Id, Name, EOSId__c, CreatedBy.Name, BillingCity, BillingCountry, CurrencyIsoCode, (SELECT Id FROM Events__r)
    FROM Account
    WHERE RecordType.Name = 'Venue'
    AND EOSId__c IN @ignorelist
    AND Id NOT IN (SELECT Venue__c FROM Event__c)
    """,
                                               return_type='dataframe',
                                               mute=True)
    potential_artists_to_delete = threading.new(session.select,
                                                """
    SELECT Id, Name, EOSId__c, CreatedBy.Name, CurrencyIsoCode, (SELECT Id FROM Deals__r)
    FROM Account
    WHERE RecordType.Name = 'Artist'
    AND EOSId__c IN @ignorelist
    AND Id NOT IN (SELECT Artist__c FROM Deal__c)
    AND Id NOT IN (SELECT TourHeadliner__c FROM Tour__c)
    """,
                                                return_type='dataframe',
                                                mute=True)
    potential_copromoters_to_delete = threading.new(session.select,
                                                    """
    SELECT Id, Name, EOSId__c, CreatedBy.Name, CurrencyIsoCode, (SELECT Id FROM Deals__r)
    FROM Account
    WHERE RecordType.Name = 'Co-Promoter'
    AND EOSId__c IN @ignorelist
    AND Id NOT IN (SELECT CoPromoter__c FROM Deal__c)
    """,
                                                    return_type='dataframe',
                                                    mute=True)
    potential_ticketagencies_to_delete = threading.new(session.select,
                                                       """
    SELECT Id, Name, EOSId__c, CreatedBy.Name, CurrencyIsoCode, (SELECT Id FROM TicketAgencyEvents__r)
    FROM Account
    WHERE RecordType.Name = 'Ticket Agency'
    AND EOSId__c IN @ignorelist
    AND Id NOT IN (SELECT TicketAgency__c FROM EventTicketAgency__c)
    """,
                                                       return_type='dataframe',
                                                       mute=True)
    potential_artistagencies_to_delete = threading.new(session.select,
                                                       """
    SELECT Id, Name, EOSId__c, CreatedBy.Name, CurrencyIsoCode, (SELECT Id FROM Deals__r)
    FROM Account
    WHERE RecordType.Name = 'Agency'
    AND EOSId__c IN @ignorelist
    AND Id NOT IN (SELECT Agency__c FROM Deal__c)
    """,
                                                       return_type='dataframe',
                                                       mute=True)

    # names_df = pd.concat([df[[c for c in ['EOSId__c','Name','FirstName','LastName'] if c in df.columns]] for df in uk.values()])
    # pdh.to_excel(names_df, f'{folder_path}UK Names.xlsx')

    # duplicate_map = .set_index('EOS Id').to_dict('index')
    def remap_duplicate(row):
        src = row['Account.EOSId__c']
        tar = duplicate_map.get(src, None)
        return src if tar is None else tar

    uk = uk.result()
    threading.new(pdh.to_excel, uk, loc.uk_master_data_full_pull)

    uk.Artist = uk.Artist[~uk.Artist.EOSId__c.isin(
        coheadliner_maps
    )]  # Filter out artists that will be mapped to multiple Rome Artists
    uk.VenueContact[['FirstName', 'MiddleName',
                     'LastName']] = uk.VenueContact.apply(pdh.parse_name,
                                                          axis=1,
                                                          result_type='expand')
    del uk.VenueContact['Name']

    all_eosids = set().union(
        *[set(df['EOSId__c'].tolist()) for df in uk.values()])

    for key, df in uk.items():
        df = df[~df.EOSId__c.isin(duplicate_map)].copy(
        )  # Filter out any master data that is a duplicate and will be mapped to another record
        df = df[~df.EOSId__c.isin(ignorelist)].copy(
        )  # Filter out any master data that is not for the UK group and can be ignored
        if 'Account.EOSId__c' in df:
            df['Account.EOSId__c'] = df.apply(remap_duplicate, axis=1)
            df = df[~df['Account.EOSId__c'].isin(duplicate_map)].copy(
            )  # Filter out any master data that is a duplicate and will be mapped to another record
            df = df[~df['Account.EOSId__c'].isin(ignorelist)].copy(
            )  # Filter out any master data that is not for the UK group and can be ignored
        uk[key] = df

    # eosids_in_rome = set(rome_accounts.EOSId__c.fillna('NULL').tolist()) | set(rome_contacts.EOSId__c.fillna('NULL').tolist())
    rome_accounts_with_invalid_eosids = rome_accounts.query(
        "EOSId__c != '' and EOSId__c not in @all_eosids and EOSId__c not in @ignorelist"
    )[['Id', 'EOSId__c', 'RecordType.Name', 'Name']]

    if len(rome_accounts_with_invalid_eosids) > 0:
        output[
            'Rome Accounts w Ignored EOS Ids'] = rome_accounts_with_invalid_eosids.sort_values(
                'EOSId__c')
        print(
            f"{len(rome_accounts_with_invalid_eosids)} Rome Accounts have an EOSId__c which does not appear in EOS"
        )
        # s = rome_accounts_with_invalid_eosids.to_string()
        # if prompt(f"The following Rome Accounts have an EOSId__c which does not appear in EOS. Would you like to set their EOSId__c to blank?\n{s}", boolean=True):
        #     to_update = rome_accounts_with_nonexistent_eosids.copy()
        #     to_update['EOSId__c'] = None
        #     session.add_bypass_settings()
        #     session.update(to_update, mode='simple')
        #     session.remove_bypass_settings

    allukaccountcols = set().union(*[
        set(df.columns.values) for obj, df in uk.items()
        if 'RecordType.Name' in df
    ])
    allukcontactcols = set().union(*[
        set(df.columns.values) for obj, df in uk.items()
        if 'RecordType.Name' not in df
    ])

    rome_master_data_by_recordtype = rome_accounts.groupby('RecordType.Name')
    rome_master_data = {
        g: rome_master_data_by_recordtype.get_group(g)
        for g in rome_master_data_by_recordtype.groups
    }
    rome_master_data['ArtistAgent'] = rome_contacts.query(
        "`Account.RecordType.Name` == 'Agency'")
    rome_master_data['VenueContact'] = rome_contacts.query(
        "`Account.RecordType.Name` == 'Venue'")

    artist_genre_map = {
        'Pop': 'Top 40 / Pop',
        'Urban, Rap, Hip Hop': 'Rap / Hip Hop',
        'Arts & Entertainment': 'Theatre',
    }

    def ignore_lambda(x, y, field_name, record):
        if record['EOSId__c'] in reviewcomplete:
            return True
        if str(x).strip() == str(y).strip():
            return True
        if field_name in {
                'BillingStreet', 'BillingCountry', 'BillingCity',
                'BillingState', 'BillingPostalCode', 'Phone', 'Fax', 'Type',
                'Description'
        } and not (pdh.isnull(x)) and pdh.isnull(y):
            return True
        if field_name in {
                'Email', 'Email__c'
        } and str(x).strip().lower() == str(y).strip().lower():
            return True
        if field_name == 'Genre__c' and artist_genre_map.get(y, '_') == x:
            return True
        if (record['EOSId__c'], field_name) in field_ignore_map:
            return True

    def artist_ignore_lambda(x, y, field_name, record):
        if field_name in {'CurrencyIsoCode', 'Name'}:
            return True
        if field_name == 'Description' and y == 'Alias: ':
            return True
        return ignore_lambda(x, y, field_name, record)

    def ignore_lambda_venues(x, y, field_name, record):
        if record['EOSId__c'] in reviewcomplete:
            return True
        if str(x).strip() == str(y).strip():
            return True
        if field_name == 'Market__c' and y == 'Unassigned':
            return True
        if (record['EOSId__c'], field_name) in field_ignore_map:
            return True

    venue_diff = pdh.diff_by_column(rome_master_data['Venue'],
                                    uk['Venue'],
                                    'EOSId__c', ['_Rom', ''],
                                    ignore_lambda_venues,
                                    include_combination_tab=True)
    # venue_diff = {f'Diff {key} ({len(val)})':val for key, val in venue_diff.items() if len(val) > 0}
    # pdh.to_excel(venue_diff, f'{folder_path}UK Master Data Venue Diff.xlsx')
    pdh.to_excel(output, loc.uk_master_data_venue_diff_output)

    for obj, ukdf in uk.items():
        # if obj not in devfilter: continue # For dev
        print(f'Processing {obj}')
        romedf = rome_master_data[obj]
        cols_to_compare = [
            c for c in romedf.columns.values
            if c in ukdf.columns.values or c in {'Id'}
        ]
        fuzzy_match_fields = [
            c for c in ['Name', 'FirstName', 'LastName', 'BillingCity']
            if c in romedf.columns.values and c in ukdf.columns.values
        ]
        partition_fields = ['Account.EOSId__c'] if obj in {
            'ArtistAgent', 'VenueContact'
        } else []
        diff = pdh.diff_and_fuzzy_match(
            romedf,
            ukdf,
            'EOSId__c',
            fuzzy_match_fields,
            partition_fields, ['Id', 'EOSId__c'], ['_Rom', '_EOS'],
            multi=False,
            ignore_lambda=(artist_ignore_lambda
                           if obj == 'Artist' else ignore_lambda))
        diff['Diff'].query("Diff != 'Rom_only'", inplace=True)
        formatted = {
            f'{key} {obj} ({len(val)})': val
            for key, val in diff.items()
        }
        output.update(formatted)

    output['Potential DEL Venues'] = potential_venues_to_delete.result()
    output['Potential DEL Artists'] = potential_artists_to_delete.result()
    output[
        'Potential DEL Co-Promoters'] = potential_copromoters_to_delete.result(
        )
    output[
        'Potential DEL Artist Agencies'] = potential_artistagencies_to_delete.result(
        )
    output[
        'Potential DEL Ticket Agencies'] = potential_ticketagencies_to_delete.result(
        )

    potential_dupe_venues_to_merge = uk['Venue'].query(
        'Status__c == "Inactive"')
    potential_dupe_venues_to_merge_ids = set(
        potential_dupe_venues_to_merge['EOSId__c'].tolist())
    potential_dupe_venues_to_merge = pdh.compare_datasets_for_merge(
        rome_master_data['Venue'].query(
            "EOSId__c not in @potential_dupe_venues_to_merge_ids"),
        potential_dupe_venues_to_merge, ['VenueName__c', 'BillingCity'], [],
        ['Id', 'EOSId__c'])
    output['Potential Dupe Venues to Merge'] = potential_dupe_venues_to_merge
    output['Rome Accounts Invalid EOS Id'] = rome_accounts_with_invalid_eosids

    venue_fields_to_compare = (output[[
        s for s in output if s.startswith('Diff Venue')
    ][0]]['Diff'].replace('Changed fields:', '',
                          regex=True).str.strip().tolist())
    # venue_fields_to_compare = list({f:f for f in (itertools.chain.from_iterable([[s2.strip() for s2 in s.split(',')] for s in venue_fields_to_compare]))})
    # venue_diff = {
    #     f: pdh.diff(rome_master_data['Venue'][{'Id', 'EOSId__c', 'VenueName__c', f}]
    #         , uk['Venue'][{'EOSId__c', 'VenueName__c', f}]
    #         , 'EOSId__c'
    #         , ['_Rom', '_EOS']
    #         , (lambda x, y, field_name: True if (field_name == 'VenueName__c' or (str(x).strip() == str(y).strip())) else False)
    #     ).query("Diff not in ('Rom_only','EOS_only')")
    #     for f in venue_fields_to_compare
    # }
    output = {key: val for key, val in output.items() if len(val) > 0}
    pdh.to_excel(output, loc.uk_master_data_diff_output)
    # pdh.to_excel(output, f'{folder_path}UK Master Data Diff.xlsx')
    print('Diff Files Written')
    return
Beispiel #17
0
from classes.obj_dict import ObjDict
import itertools

from classes.ant_migration_tool import AntMetadataFolder
from classes.salesforce_api import Salesforce_API
from classes.sql_server_api import SQL_Server_API
import classes.salesforce_metadata_file as sme
import re
import functions.pandas_helpers as pdh

from classes.py_threading import ThreadPool
threading = ThreadPool()

sf_prod = Salesforce_API('*****@*****.**')
sf_uat = Salesforce_API('*****@*****.**')
eos_prod = SQL_Server_API('EOS-prod')
eos_stage = SQL_Server_API('EOS-stage')

import pandas as pd

def main():
    fix_stage_uat_tour_data()
    # fix_stage_uat_master_data(
    #     'Artist'
    #     , f"""
    #     SELECT Id, EOSId__c, Name
    #     FROM Account
    #     WHERE RecordType.Name = 'Artist'
    #     AND EOSId__c != NULL
    #     """
    #     ,f"""