def main(sessions, do_fix=False): import functions.pandas_helpers as pdh from functions.prompt import prompt from classes.py_threading import ThreadPool threading = ThreadPool() sessions = { username: sf for username, sf in sessions.items() if username in instances_to_run } return_string = "" objects = [ 'EventDateTime__c', 'Deal__c', 'TicketScale__c', 'Deduction__c', 'LedgerEntry__c' ] if do_fix is True and prompt("Delete orphan records?", boolean=True) is False: do_fix = False @threading.run_async def inner(sf): if do_fix: sf.bypass_prod_operation_approval() results = {} for obj in objects: obj_fields = { f.name: f for f in sf.get_object_description(obj).fields } fields = ['Name', 'EventName__c'] fields = [f for f in fields if f in obj_fields] fields_str = ', '.join(fields) query = f""" SELECT Id, {fields_str}, Event__c, CreatedBy.Name, CreatedDate, LastModifiedBy.Name, LastModifiedDate FROM {obj} WHERE IsTouringApp__c = True AND Event__c = NULL AND CreatedDate >= THIS_YEAR """ results[obj] = threading.new(sf.select, query, mode='bulk') for obj in objects: records = results[obj].result() if len(records) > 0: if do_fix: sf.add_bypass_settings() sf.delete(records, mode='bulk') sf.remove_bypass_settings() return results # Run for each Salesforce instance results = {sf.instance: inner(sf) for sf in sessions.values()} for sf in sessions.values(): result = results[sf.instance].result() for obj in objects: records = result[obj].result() return_string += f"\n{sf.instance.upper()} has {len(records)} {obj} orphaned records with IsTouringApp__c == True" pdh.to_excel(results[sf.instance], f'({sf.instance}) Orphaned Event Child Records.xlsx') return return_string
def main(sessions, do_fix=False): import pandas as pd import functions.pandas_helpers as pdh from classes.py_threading import ThreadPool, Thread threading = ThreadPool() sessions = { username: sf for username, sf in sessions.items() if username in instances_to_run } return_string = "" fields_to_match = [ 'Venue__r.Name', 'Office__r.Name', 'PrimaryHeadlinerArtist__c', # 'EventFirstDate__c', ] @threading.run_async def inner(sf): fields_to_match_str = ', '.join(f'{f}, BusinessPlanEvent__r.{f}' for f in fields_to_match) query = f""" SELECT Id, Tour__r.TourTitle__c, BusinessPlanEvent__c, EventTitle__c, {fields_to_match_str} FROM Event__c WHERE BusinessPlanEvent__c <> NULL """ records = sf.select(query, mode='simple', return_type='dataframe').fillna('') # pdquery = ' or '.join(f'(`{f}` != `BusinessPlanEvent__r.{f}`)' for f in fields_to_match) def diff(row): differences = [ f for f in fields_to_match if row[f] != row['BusinessPlanEvent__r.' + f] ] return ', '.join(differences) # mismatched = records.query(pdquery) records['Diff'] = records.apply(diff, axis=1) return records.query('Diff != ""') # Run for each Salesforce instance results = {sf.instance: inner(sf) for sf in sessions.values()} for sf in sessions.values(): result = results[sf.instance].result() for f in fields_to_match: mismatches_for_field = result.query( f"`{f}` != `BusinessPlanEvent__r.{f}`") results[f'{f} Mismatches'] = mismatches_for_field return_string += f'\n{sf.instance.upper()} has {len(mismatches_for_field)} mismatches on the {f} field between BP and Local Events' if do_fix: print(return_string) pdh.to_excel(results, 'Local Events with Field Mismatches.xlsx') return return_string
def main(sessions, do_fix=False): import pandas as pd import functions.pandas_helpers as pdh from classes.py_threading import ThreadPool, Thread threading = ThreadPool() sessions = { username: sf for username, sf in sessions.items() if username in instances_to_run } return_string = "" @threading.run_async def inner(sf): query = """ SELECT Id, Event__r.EventTitle__c, Type__c, Label__c, Capacity__c, ProjectedPaidTickets__c, Price__c FROM TicketScale__c WHERE CreatedDate >= THIS_YEAR AND IsTouringApp__c = True AND StageType__c IN ('Plan','Projection') AND (Capacity__c = NULL OR ProjectedPaidTickets__c = NULL OR Price__c = NULL) """ records = sf.select(query, mode='simple', return_type='dataframe') return records # Run for each Salesforce instance results = {sf.instance: inner(sf) for sf in sessions.values()} for sf in sessions.values(): result = results[sf.instance].result() if len(result) > 0: return_string += f"\n{sf.instance.upper()} has {len(result)} Ticket Scales with blank Capacity, ProjectedPaidTickets, or Price__c" if do_fix: if len(result) > 0: fix = lambda x: 0 if pd.isnull(x) else x for sf in sessions.values(): result = results[sf.instance].result() fixed = result.copy() fixed['Capacity__c'] = fixed['Capacity__c'].apply(fix) fixed['ProjectedPaidTickets__c'] = fixed[ 'ProjectedPaidTickets__c'].apply(fix) fixed['Price__c'] = fixed['Price__c'].apply(fix) pdh.to_excel({ 'Original': result, 'Fixed': fixed }, f'({sf.instance}) Ticket Scales with blank Capacity ProjectedPaidTickets or Price__c.xlsx' ) sf.add_bypass_settings() sf.update(fixed[[ 'Id', 'Capacity__c', 'ProjectedPaidTickets__c', 'Price__c' ]]) sf.remove_bypass_settings() return return_string
import os import re from classes.salesforce_api import Salesforce_API, Cache from classes.sql_server_api import SQL_Server_API from classes.py_threading import ThreadPool threading = ThreadPool() from classes.obj_dict import ObjDict import pandas as pd import functions.pandas_helpers as pdh from functions.prompt import prompt import functions.uk_eos as ukutil from functions.itertools import itertools from datetime import datetime, timedelta import locations as loc import time ########################### PARAMETERS ########################### # username = "******" # username = "******" # username = "******" # username = "******" # username = "******" username = "******" folder_path = '/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/UK Master Data/'
def problem_data(sf=None, sql=None): from classes.py_threading import ThreadPool import pandas as pd import functions.pandas_helpers as pdh threading = ThreadPool() return_string = "" geographies = sf.select( "SELECT Name FROM Account WHERE RecordType.Name = 'Office' AND Type = 'Geography'" ) divisions = sf.select( "SELECT Name FROM Account WHERE RecordType.Name = 'Office' AND Type = 'Division'" ) officenames = sf.select( "SELECT Name FROM Account WHERE RecordType.Name = 'Office'") query1 = """ SELECT Id, EventTitle__c, VenueOwnership__c, Venue__r.Name, Venue__r.OwnershipType__c FROM Event__c WHERE ( (VenueOwnership__c = 'Third Party' and Venue__r.OwnershipType__c != 'Third Party') OR (VenueOwnership__c = 'Exclusively Booked' and Venue__r.OwnershipType__c != 'Exclusively Booked') OR (VenueOwnership__c = 'Owned/Operated' and Venue__r.OwnershipType__c != 'Owned/Operated') ) AND Venue__c != NULL """ officename_dfs = [] primaryvenueoffice_dfs = [] for chunk in pdh.chunks(officenames, 40): query2 = """ SELECT Id, EventTitle__c, OfficeName__c, Office__r.Name FROM Event__c WHERE ( """ + " OR ".join([ f"(OfficeName__c = '{item['Name']}' AND Office__r.Name != '{item['Name']}')" for item in chunk ]) + """ ) AND Office__c != NULL """ query3 = """ SELECT Id, EventTitle__c, PrimaryVenueOffice__c, Venue__r.Name, Venue__r.PrimaryOffice__r.Name FROM Event__c WHERE ( """ + " OR ".join([ f"(PrimaryVenueOffice__c = '{item['Name']}' AND Venue__r.PrimaryOffice__r.Name != '{item['Name']}')" for item in chunk ]) + """ ) AND Venue__c != NULL """ officename_dfs.append( threading.new(sf.select, query2, return_type='dataframe')) primaryvenueoffice_dfs.append( threading.new(sf.select, query3, return_type='dataframe')) query4 = """ SELECT Id, EventTitle__c, Division__c, Office__r.Division__c FROM Event__c WHERE ( """ + " OR ".join([ f"(Division__c = '{item['Name']}' AND Office__r.Division__c != '{item['Name']}')" for item in divisions ]) + """ ) AND Office__c != NULL """ query5 = """ SELECT Id, EventTitle__c, Geography__c, Office__r.Geography__c FROM Event__c WHERE ( """ + " OR ".join([ f"(Geography__c = '{item['Name']}' AND Office__r.Geography__c != '{item['Name']}')" for item in geographies ]) + """ ) AND Office__c != NULL """ venueownership = threading.new(sf.select, query1, return_type='dataframe') officename = pd.concat([th.result() for th in officename_dfs]) primaryvenueoffice = pd.concat( [th.result() for th in primaryvenueoffice_dfs]) division = threading.new(sf.select, query4, return_type='dataframe') geography = threading.new(sf.select, query5, return_type='dataframe') venueownership = venueownership.result() division = division.result() geography = geography.result() if len(venueownership) > 0: return_string += f"{sf.instance} has {len(venueownership)} Events with incorrect VenueOwnership__c\n" if len(officename) > 0: return_string += f"{sf.instance} has {len(officename)} Events with incorrect OfficeName__c\n" if len(primaryvenueoffice) > 0: return_string += f"{sf.instance} has {len(primaryvenueoffice)} Events with incorrect VenuePrimaryOffice__c\n" if len(division) > 0: return_string += f"{sf.instance} has {len(division)} Events with incorrect Division__c\n" if len(geography) > 0: return_string += f"{sf.instance} has {len(geography)} Events with incorrect Geography__c\n" data = pd.concat([ venueownership[['Id']], officename[['Id']], primaryvenueoffice[['Id']], division[['Id']], geography[['Id']], ]).drop_duplicates() error_message = return_string return data, locals(), error_message
def main(sessions, do_fix=False): from classes.py_threading import ThreadPool, Thread import functions.pandas_helpers as pdh return_string = '' threading = ThreadPool() sf = sessions['*****@*****.**'] sf.default_mode = 'bulk' result = check_tours( sf, threading, """ SELECT Id, Status__c, BuyingGroup__c, TourSubmittedForApproval__c, OwnerId, Owner.Name, Owner.IsActive FROM Tour__c WHERE RecordType.Name = 'Booking' AND IsTouringApp__c = True """) # select = threading.run_async(sf.select) # tours = select(""" # SELECT Id # FROM Tour__c # WHERE RecordType.Name = 'Booking' # AND IsTouringApp__c = True # """, return_type='dataframe') # tour_ids_str = "','".join(tours.Id.tolist()) # # AND Id = 'a1s1Q000006GyfrQAC' # existing_shares = select(f""" # SELECT ParentId, UserOrGroupId, AccessLevel # FROM Tour__Share # WHERE ParentId IN ('{tour_ids_str}') # """, return_type='dataframe') # tour_team_members = select(f""" # SELECT Id, Tour__c, Tour__r.OwnerId, Role__c, AccessLevel__c, User__c, User__r.Name, User__r.IsActive # FROM TourTeamMember__c # WHERE Role__c <> 'Owner' # AND User__c <> NULL # AND Tour__c IN ('{tour_ids_str}') # """, return_type='dataframe') # tour_team_members['AccessLevel__c'] = tour_team_members['AccessLevel__c'].apply(lambda x: access_map[x] if x in access_map else x) # # tour_team_members_map = tour_team_members.set_index(['Tour__c', 'User__c']).to_dict('index') # # new_tour_team_member_records = tour_team_members[(tour_team_members['Role__c'] == 'Owner') & (tour_team_members['Tour__r.OwnerId'] != tour_team_members['User__c'])] # # existing_shares = existing_shares.result() # new_shares = ( # tour_team_members # .copy() # .query("User__c != `Tour__r.OwnerId` and `User__r.IsActive` == True") # .rename(columns={ # 'User__c': 'UserOrGroupId', # 'Tour__c': 'ParentId', # 'AccessLevel__c': 'AccessLevel' # }) # .drop(columns=['Id']) # ) # shares_to_insert = new_shares.merge(existing_shares.result(), on=['ParentId','UserOrGroupId','AccessLevel'], how='outer', indicator=True).query("_merge == 'left_only'") tour_shares_to_insert = result['Tour__Share'].query( "_merge == 'missing from SF'")[[ 'UserOrGroupId', 'AccessLevel', 'ParentId' ]] # event_shares_to_insert = result['Event__Share'].query("_merge == 'missing from SF'")[['UserOrGroupId','AccessLevel','ParentId']] if do_fix: pdh.to_excel(result, 'Missing Tour Shares.xlsx') if len(tour_shares_to_insert) > 0: return_string += f'\n{len(tour_shares_to_insert)} missing Tour__Share records' if do_fix: print(f'Inserting {len(tour_shares_to_insert)} shares.') sf.insert('Tour__Share', tour_shares_to_insert) # if len(event_shares_to_insert) > 0: # return_string += f'\n{len(event_shares_to_insert)} missing Event__Share records' # if do_fix: # print(f'Inserting {len(event_shares_to_insert)} shares.') # sf.insert('Event__Share', event_shares_to_insert) return return_string
def main(sessions, do_fix=False): import pandas as pd import functions.pandas_helpers as pdh from classes.py_threading import ThreadPool, Thread import classes.salesforce_metadata_file as sme threading = ThreadPool() sessions = { username: sf for username, sf in sessions.items() if username in instances_to_run } return_string = "" for u, sf in sessions.items(): path = '/Users/daniel.hicks_1/Documents/Tower/liveNationSFDC PSDEV/src/objects/EventDateTime__c.object' edt = sme.SalesforceMetadataFile.new(sf, 'EventDateTime__c', 'CustomObject', path) edt.from_xml(path) state_field = [ item for item in edt.tree.CustomObject.fields if item.fullName == 'State__c' ][0] state_countries_map = {} for item in state_field.valueSet.valueSettings: countries, state = item.controllingFieldValue, item.valueName if type(countries) is str: countries = [countries] if state not in state_countries_map: state_countries_map[state] = [] for country in countries: state_countries_map[state].append(country) edt = sf.select(""" SELECT Id, State__c, Country__c, Event__r.Venue__r.BillingCountryCode, Event__r.Venue__r.BillingCountry, Event__r.Venue__r.BillingStateCode, Event__r.Venue__r.BillingState FROM EventDateTime__c WHERE Event__c != NULL AND (State__c != NULL OR Country__c != NULL) AND LastModifiedDate >= LAST_N_DAYS:30 """, return_type='generator', mode='bulk') def get_new_state_and_country(item): state = item['Event__r.Venue__r.BillingStateCode'] + ' - ' + item[ 'Event__r.Venue__r.BillingState'] country = item[ 'Event__r.Venue__r.BillingCountryCode'] + ' - ' + item[ 'Event__r.Venue__r.BillingCountry'] state = '' if state == ' - ' else state country = '' if country == ' - ' else country return state, country def records_to_fix(): for item in edt: state, country = item['State__c'], item['Country__c'] new_state, new_country = get_new_state_and_country(item) valid_countries = state_countries_map.get(state, []) if state != '' and (state not in state_countries_map or country not in valid_countries): # if state != new_state or country != new_country: item['Old State'] = state item['Old Country'] = country item['State__c'], item[ 'Country__c'] = get_new_state_and_country(item) yield item to_update = records_to_fix() to_update = list(to_update) if len(to_update) > 0: return_string += f"\n{sf.instance.upper()} has {len(to_update)} EDT records with incorrect State or Country" if do_fix: with sf.bypass_settings(): print(return_string) sf.update(to_update) return return_string
from sys import argv import json from classes.salesforce_api import Salesforce_API from classes.py_threading import ThreadPool threading = ThreadPool() # script, action, instance = argv script, action, instance = ["", "remove", "all"] if len(argv) != 3 else argv def main(username): session = Salesforce_API(username) session.print_messages = False if action == "add": session.add_bypass_settings() print("Added Bypass Settings for {}".format(username)) else: session.remove_bypass_settings() print("Removed Bypass Settings for {}".format(username)) if instance == "all": threading.mute_errors = True for item in allCreds: threading.new(main, item["username"]) threading.wait() else: username = "******" + (
# import json import time import math import re import csv import datetime import os from classes.salesforce_api import Salesforce_API from classes.sql_server_api import SQL_Server_API # from simple_salesforce import SalesforceLogin # from simple_salesforce import Salesforce from classes.py_threading import ThreadPool, ThreadPool threading = ThreadPool() # from classes.obj_dict import ObjDict # import pandas as pd # import functions.pandas_helpers as pdh ########################### PARAMETERS ########################### ################################################################## # import queue # q = queue.Queue() # a = q.put(1) # b = q.put(2)
def main(sessions, do_fix=False): import pandas as pd import functions.pandas_helpers as pdh from classes.py_threading import ThreadPool threading = ThreadPool() sessions = { username: session for username, session in sessions.items() if username in instances_to_run } return_string = "" # Run for each Salesforce instance for username, session in sessions.items(): if do_fix: session.bypass_prod_operation_approval() src1 = threading.new(session.select, """ SELECT Id, EventTitle__c, RecordType.Name, Tour__c, TourLeg__c, BuyingGroup__c, Tour__r.BuyingGroup__c, TourLeg__r.Tour__r.BuyingGroup__c, Promoter__c, Tour__r.TourBooker__c, TourLeg__r.Tour__r.TourBooker__c, Promoter__r.Name, Tour__r.TourBooker__r.Name, TourLeg__r.Tour__r.TourBooker__r.Name, BusinessPlanEvent__c, SourceSystemId__c, CreatedDate, CreatedBy.Name FROM Event__c WHERE (Tour__c <> NULL OR TourLeg__r.Tour__c <> NULL) AND CreatedDate > 2021-01-01T00:00:00Z ORDER BY Tour__c, TourLeg__r.Tour__c """, return_type='dataframe') src2 = threading.new(session.select, """ SELECT Id, EventTitle__c, NumberofTrucks__c, TourLeg__r.NumberofTrucks__c, CreatedBy.Name, CreatedDate, (SELECT Id FROM LedgerEntryBreakouts__r WHERE OfferRateType__c = 'Per Paid Truck') FROM Event__c WHERE TourLeg__c <> NULL AND NumberofTrucks__c <> NULL AND TourLeg__r.NumberofTrucks__c <> NULL AND IsTouringApp__c = True ORDER BY TourLeg__c """, return_type='dataframe', mode='simple') src3 = threading.new(session.select, """ SELECT Id, Deal__r.Event__c, Deal__r.Event__r.EventTitle__c, Type__c, BonusBase__c, BonusAmount__c, StartPoint__c, ApplyTo__c, CreatedBy.Name, CreatedDate FROM BonusDetail__c WHERE Deal__r.Event__r.BusinessPlanEvent__c <> NULL AND Type__c = 'Flat' AND BonusBase__c = 'Paid Tickets' AND BonusAmount__c = 0 AND StartPoint__c = 0 AND BonusPotentialatSellout__c = 0 """, return_type='dataframe', mode='simple') src4 = threading.new(session.select, """ SELECT Id, Event__c, Event__r.EventTitle__c, Type__c, DealType__c, BackendPercent__c, SplitPercentage__c, SplitBackendPercent__c, CreatedDate, CreatedBy.Name FROM Deal__c WHERE Event__r.BusinessPlanEvent__c <> NULL AND RecordType.Name = 'Artist' AND Type__c = 'Primary Headliner' AND ((SplitBackendPercent__c = NULL AND BackendPercent__c <> NULL) OR SplitPercentage__c = NULL OR SplitPercentage__c = 1 OR (SplitBackendPercent__c <= 1 AND BackendPercent__c > 1)) ORDER BY Event__r.EventTitle__c, Type__c """, return_type='dataframe', mode='simple') src5 = threading.new(session.select, """ SELECT Id, Event__r.EventTitle__c, Event__c, DealExchangeRate__c, CurrencyIsoCode, Event__r.TourExchangeRate__c, Event__r.CurrencyIsoCode, Event__r.TourLeg__r.Tour__r.CurrencyIsoCode, CreatedDate, CreatedBy.Name FROM Deal__c WHERE RecordType.Name = 'Artist' AND Event__r.IsTouringApp__c = True AND Event__r.TourLeg__c <> NULL AND Event__r.TourLeg__r.Tour__r.WorkingCopy__c = NULL """, return_type='dataframe', mode='simple') src6 = threading.new(session.select, """ SELECT Id, Event__r.EventTitle__c, TouringCategory__c, Type__c, OfferRateType__c, OfferRate__c, InHouseRate__c, OfferMin__c, OfferMax__c, InHouseMin__c, InHouseMax__c FROM LedgerEntryBreakout__c WHERE OfferRateType__c = 'Flat' AND (OfferMin__c <> NULL OR InHouseMin__c <> NULL OR OfferMax__c <> NULL OR InHouseMax__c <> NULL) AND CreatedDate >= THIS_YEAR AND IsTouringApp__c = True AND Event__r.TourLeg__c <> NULL """, return_type='dataframe', mode='simple') src7 = threading.new(session.select, """ SELECT Id, Tour__c, Tour__r.OwnerId, Tour__r.Owner.Name, AccessLevel__c, Email__c, Role__c, SourceSystemId__c, User__c, User__r.Name FROM TourTeamMember__c WHERE Role__c = 'Owner' """, return_type='dataframe', mode='simple') src8 = threading.new(session.select, """ SELECT Id, EventTitle__c, LastModifiedDate, BusinessPlanEvent__r.TouringBuildDate__c, (SELECT Id FROM TicketScales__r), (SELECT Id FROM Deals__r WHERE RecordType.Name = 'Artist') FROM Event__c WHERE BusinessPlanEvent__c <> NULL AND Id NOT IN (SELECT Event__c FROM TicketScale__c) """, return_type='dataframe', mode='simple') src9 = threading.new(session.select, """ SELECT Id, Event__r.EventTitle__c, EventPL__c, EventProfit__c, EventLoss__c, ExpenseAdjustment__c, ExpenseAdjustmentProfit__c, ExpenseAdjustmentLoss__c, SourceSystemId__c FROM Deal__c WHERE RecordType.Name = 'Co-Promoter' AND EventPL__c != NULL AND (EventProfit__c = NULL OR EventLoss__c = NULL) AND CreatedDate >= THIS_YEAR """, return_type='dataframe', mode='simple') src10 = threading.new(session.select, """ SELECT Id, Event__r.Status__c, EventStatus__c FROM EventDateTime__c WHERE IsTouringApp__c = False AND Event__c <> NULL AND Event__r.BusinessPlanEvent__c <> NULL AND CreatedDate >= THIS_YEAR """, return_type='dataframe', mode='simple') src1 = src1.result() df2 = src1[(pd.notnull(src1['Tour__c'])) & ( src1['BuyingGroup__c'] != src1['Tour__r.BuyingGroup__c'])].copy() df2.desc = "Local Events where Buying Group does not match Tour" def f2(df): df['BuyingGroup__c'] = df['Tour__r.BuyingGroup__c'] session.update(df[['Id', 'BuyingGroup__c']], mode='simple') df2.fix = f2 df3 = src1[(pd.notnull(src1['TourLeg__c'])) & (src1['BuyingGroup__c'] != src1['TourLeg__r.Tour__r.BuyingGroup__c'])].copy() df3.desc = "Touring App Events where Buying Group does not match Tour" def f3(df): df['BuyingGroup__c'] = df['TourLeg__r.Tour__r.BuyingGroup__c'] session.update(df[['Id', 'BuyingGroup__c']], mode='simple') df3.fix = f3 # df4 = src1[(pd.notnull(src1['Tour__c'])) & (src1['Promoter__c'] != src1['Tour__r.TourBooker__c'])].copy() df4 = src1.fillna('').query( "Promoter__c != `Tour__r.TourBooker__c` and Tour__c != '' and BusinessPlanEvent__c != ''" ) df4 = df4[df4['SourceSystemId__c'].str[:18] == df4['BusinessPlanEvent__c']] df4.desc = "Local Events where Promoter does not match Tour and Event was built from Business Plan" df5 = src1[(pd.notnull(src1['TourLeg__c'])) & (src1['Promoter__c'] != src1['TourLeg__r.Tour__r.TourBooker__c'])].copy() df5.desc = "Touring App Events where Promoter does not match Tour" def f5(df): df['Promoter__c'] = df['TourLeg__r.Tour__r.TourBooker__c'] session.update(df[['Id', 'Promoter__c']]) df5.fix = f5 df6 = src2.result().fillna('').query( "NumberofTrucks__c != `TourLeg__r.NumberofTrucks__c`").copy() def f6(df): df['NumberofTrucks__c'] = df['TourLeg__r.NumberofTrucks__c'] session.update(df[['Id', 'NumberofTrucks__c']], mode='simple') df6.fix = f6 df6.desc = "Touring App Events where # of Trucks does not match Tour Leg" df7 = src3.result() df7.desc = "Touring Local-Built Events with a blank Signing Bonus record" def f7(df): session.delete(df, mode='simple') event_ids = set(df['Deal__r.Event__c'].tolist()) threading.new(session.run_financial_calculator, event_ids, 5) df7.fix = f7 df8 = src4.result() df8.desc = "Touring Local-Built Deals with a blank SplitBackendPercent field" def f8(df): df['SplitBackendPercent__c'] = df['BackendPercent__c'] df['SplitPercentage__c'] = 100 session.update( df[['Id', 'SplitPercentage__c', 'SplitBackendPercent__c']], mode='simple') df8.fix = f8 src5 = src5.result() src5['Inverted_EventExchangeRate'] = ( 1 / src5['Event__r.TourExchangeRate__c']) df9 = src5[(src5['CurrencyIsoCode'] == src5['Event__r.TourLeg__r.Tour__r.CurrencyIsoCode']) & (src5['DealExchangeRate__c'].round(2) != src5['Inverted_EventExchangeRate'].round(2))] df9.desc = "Touring Artist Deals w Currency Mismatch in Exch Rate" def f9(df): blanks = df.fillna('').query( "`Event__r.TourExchangeRate__c` == '' and `Event__r.CurrencyIsoCode` == `Event__r.TourLeg__r.Tour__r.CurrencyIsoCode`" )[['Event__c']] blanks.rename(columns={'Event__c': 'Id'}, inplace=True) blanks.drop_duplicates('Id', inplace=True) blanks['TourExchangeRate__c'] = 1 session.update(blanks) df9.fix = f9 df10 = src6.result() df10.desc = "Touring Flat LEBs with Min or Max" def f10(df): df[[ 'OfferMin__c', 'OfferMax__c', 'InHouseMin__c', 'InHouseMax__c' ]] = [None, None, None, None] session.update(df[[ 'Id', 'OfferMin__c', 'OfferMax__c', 'InHouseMin__c', 'InHouseMax__c' ]], mode='simple') df10.fix = f10 df11 = src7.result().query("`Tour__r.OwnerId` != User__c") df11.desc = "Tour Personnel Owner Issue" # def f11(df): # df[['OfferMin__c', 'OfferMax__c', 'InHouseMin__c', 'InHouseMax__c']] = [None,None,None,None] # session.update(df[['Id', 'OfferMin__c', 'OfferMax__c', 'InHouseMin__c', 'InHouseMax__c']], mode='simple') # df11.fix = f11 df12 = src8.result() df12.desc = "Touring Local Build Events with empty Ticket Scales" df13 = src9.result() df13.desc = "Co-Promoter Deals with blank Event Profit or Loss fields" def f13(df): def inner(row): row['EventProfit__c'] = row['EventPL__c'] if pd.isnull( row['EventProfit__c']) else row['EventProfit__c'] row['EventLoss__c'] = row['EventPL__c'] if pd.isnull( row['EventLoss__c']) else row['EventLoss__c'] row['ExpenseAdjustmentProfit__c'] = row[ 'ExpenseAdjustment__c'] if pd.isnull( row['ExpenseAdjustmentProfit__c'] ) else row['ExpenseAdjustmentProfit__c'] row['ExpenseAdjustmentLoss__c'] = row[ 'ExpenseAdjustment__c'] if pd.isnull( row['ExpenseAdjustmentLoss__c'] ) else row['ExpenseAdjustmentLoss__c'] return row df = df.apply(inner, axis=1) session.update(df, mode='simple') df13.fix = f13 df14 = src10.result().fillna('').query( "EventStatus__c != `Event__r.Status__c`") df14.desc = "EDT records with mismatched EventStatus__c" def f14(df): if len(df) > 0: df['EventStatus__c'] = df['Event__r.Status__c'] session.add_bypass_settings() session.update(df, mode='simple') session.remove_bypass_settings() df14.fix = f14 # dfs = [df2, df3, df4, df5, df6, df7, df8, df9, df10, df11] dfs = [val for var, val in locals().items() if var.startswith('df')] pdh.to_excel({df.desc: df for df in dfs if len(df) > 0}, "Touring Data Defects.xlsx") for df in dfs: if len(df) > 0: return_string += "{} has {} {}\n".format( session.instance, len(df), df.desc) if do_fix and len( [df for df in dfs if len(df) > 0 and hasattr(df, 'fix')]) > 0: session.bypass_prod_operation_approval() session.add_bypass_settings() for df in dfs: if len(df) > 0 and hasattr(df, 'fix'): print(f"FIX: {df.desc}") df.fix(df) session.remove_bypass_settings() return return_string
from git import Repo import os import time import re import json from sys import argv from classes.obj_dict import ObjDict from classes.salesforce_api import Salesforce_API from classes.py_threading import ThreadPool threading = ThreadPool() # A "Fields" suffix will be added check_in_story = 'RMANZ-28' check_in_objects = [ 'Tour__c', 'TourLeg__c', 'TourDeal__c', 'TourArtistPayment__c', 'TourBonusDetail__c', 'TourDealRetroStepUpDetail__c', 'TourOnSale__c', 'Event__c', 'EventDateTime__c', 'EventOnSale__c', 'Deal__c', 'TicketScale__c', 'Deduction__c', 'LedgerEntry__c', 'LedgerEntryBreakout__c', 'AdPlan__c' ] dev_session = Salesforce_API('[email protected]') target_session = Salesforce_API('[email protected]') dev_descs = { obj: threading.new(dev_session.get_object_description, obj) for obj in check_in_objects } target_descs = {
from classes.salesforce_api import Salesforce_API from classes.py_threading import ThreadPool, Thread threading = ThreadPool() import pandas as pd import functions.pandas_helpers as pdh ########################### PARAMETERS ########################### ################################################################## # sf = Salesforce_API('*****@*****.**') sf = Salesforce_API('*****@*****.**') # sf = Salesforce_API('*****@*****.**') sf.default_mode = 'bulk' select = threading.run_async(sf.select) access_map = {'Manage': 'Edit'} def main(): check_tours(""" SELECT Id, BuyingGroup__c, TourSubmittedForApproval__c, OwnerId, Owner.Name, Owner.IsActive FROM Tour__c WHERE IsTouringApp__c = True AND RecordType.Name = 'Booking' AND Id = 'a1sM0000002Oc4YIAS' """) return
import time import random import json import html import csv import re import os import multiprocessing from pandas.core.indexes import multi from classes.salesforce_api import Salesforce_API from classes.sql_server_api import SQL_Server_API from functions.prompt import prompt from classes.py_threading import ThreadPool threading = ThreadPool() import pandas as pd import functions.pandas_helpers as pdh from classes.obj_dict import ObjDict # username = "******" # username = "******" # username = "******" # username = "******" username = "******" # username = "******" stageSQLFile = "./resources/EOS_FullPull_Stage.sql" querySQLFile = "./resources/EOS_FullPull_Query.sql"