def main(): usernames = [ None # , "[email protected]" # , "[email protected]" # , "[email protected]" # , "*****@*****.**" , "*****@*****.**" # , "*****@*****.**" # , "*****@*****.**" ] # lne = Salesforce_API('*****@*****.**') # uat = Salesforce_API('*****@*****.**') sit = Salesforce_API('*****@*****.**') venues = sit.select(""" SELECT Id, Name, VenueName__c, RecordTypeId, BillingCity FROM Account WHERE RecordType.Name = 'Venue' LIMIT 100 """) job1 = sit.update('Account',venues,'Id',mode='simple') job2 = sit.update('Account',venues,'Id',mode='bulk') return
def main_uk(): uat = Salesforce_API("*****@*****.**") lne = Salesforce_API("*****@*****.**") sql = SQL_Server_API(eos_prod_creds) # lneaccounts = lne.select("SELECT {} FROM Account WHERE EOSId__c <> NULL".format(get_fields(lne, 'Account'))) # lnecontacts = lne.select("SELECT {} FROM Contact WHERE EOSId__c <> NULL".format(get_fields(lne, 'Contact'))) # uat.add_bypass_settings() # uat.upsert('Account', lneaccounts, 'EOSId__c') # uat.upsert('Contact', lnecontacts, 'EOSId__c') # uat.remove_bypass_settings() # uk_venue_contacts(sql, uat) uk_users(sql, uat) # uat.clone_records( # "SELECT Id FROM Account WHERE EOSId__c <> NULL" # ,source_session=lne # ,target_session=uat # ) # uat.clone_records( # "SELECT Id FROM Contact WHERE EOSId__c <> NULL" # ,source_session=lne # ,target_session=uat # ) return
def main(): session = Salesforce_API("*****@*****.**") query = """ SELECT Id, CreatedDate, CreatedBy.Name, SourceSystemId__c FROM {} WHERE IsTouringApp__c = True AND Event__c = NULL AND CreatedDate >= 2020-09-01T00:00:00Z ORDER BY CreatedDate DESC """ objects = [ 'EventDateTime__c', 'Deal__c', 'TicketScale__c', 'Deduction__c', 'LedgerEntry__c', 'LedgerEntryBreakout__c' ] data = {} session.bulk for o in objects: data[o] = threading.new(session.select, query.format(o)) for key, val in data.items(): val = val.result() prefix = session.get_object_description(key).keyPrefix for row in val: if str(row.SourceSystemId__c)[:3] == prefix: row.IsLikelyClone = True data[key] = val pdh.to_excel(data, 'Orphaned child records {}.xlsx'.format(session.instance)) return
def example5(): lne = Salesforce_API('*****@*****.**') sit = Salesforce_API('*****@*****.**') lne.clone_tours( "a1s1Q000006GzYWQA0", source_session=lne, target_session=sit, )
def main(): psdev = Salesforce_API('*****@*****.**') sit = Salesforce_API('*****@*****.**') lne = Salesforce_API('*****@*****.**') session = lne session.save_record_snapshot_on_select = True tours = session.select("SELECT Id, TourName__c FROM Tour__c WHERE LastModifiedDate >= LAST_WEEK", contentType='JSON') tourlegs = session.select(""" SELECT Id, Tour__c, LegName__c, Order__c, TicketScalePriceLevels__c FROM TourLeg__c WHERE Tour__c IN ('{}') ORDER BY Tour__c, Order__c ASC """.format("','".join([item.Id for item in tours])), contentType='JSON') ticketscales = session.select(""" SELECT Id, Event__r.TourLeg__r.Tour__r.TourTitle__c, Event__r.TourLeg__r.LegName__c, Event__r.EventName__c, Event__r.TourLeg__c, Event__c, Type__c, Label__c FROM TicketScale__c WHERE Event__r.TourLeg__r.Tour__c IN ('{}') """.format("','".join([item.Id for item in tours])), contentType='JSON') issues = [] for tourleg in tourlegs: tourleg.ticketscales = [item for item in ticketscales if item.Event__r.TourLeg__c == tourleg.Id] labels = None if tourleg.TicketScalePriceLevels__c is None else ObjDict.deepclone(json.loads(tourleg.TicketScalePriceLevels__c)) label_map = {item['type']: item for item in labels} if labels is not None else None for ts in tourleg.ticketscales: config = label_map[ts.Type__c] if labels is not None and ts.Type__c in label_map else None ts.tsOriginalLabel = ts.Label__c ts.tourLegLabel = config.label if config is not None else None if config is not None and config.type != config.label and ts.Label__c is None: issues.append({'ts':ts, 'issue': "Ticket Scale missing label when custom label is set"}) ts.Label__c = config.label elif config is not None and ts.Label__c != config.label and ts.Label__c is not None: issues.append({'ts':ts, 'issue': "Ticket Scale label doesn't match Tour Leg Type-Label"}) ts.Label__c = config.label elif config is None and ts.Label__c is not None: if ts.Label__c == ts.Type__c: issues.append({'ts':ts, 'issue': "Tour Leg has no Type-Label maps, but Ticket Scale has a value in Label__c that matches Type__c"}) else: issues.append({'ts':ts, 'issue': "Tour Leg has no Type-Label maps, but Ticket Scale has a custom value in Label__c that is different than Type__c"}) # ts.Label__c = None # Need to use simple api tourleg.labels = labels pass print('\n'.join([item['issue'] for item in issues])) if len(issues) > 0: pdh.multiple_df_to_excel({'Sheet1': pd.DataFrame(pdh.flatten(issues))}, 'Data Issues - TS Label__c.xlsx') session.add_bypass_settings() session.update(ticketscales) session.remove_bypass_settings() return
def main(): sf = Salesforce_API('*****@*****.**') sf.bypass_prod_operation_approval() # sf.helpers.delete_tours("SELECT Id FROM Tour__c WHERE Id = 'a1s1Q000005MunrQAC'") sf.helpers.undelete_tours( "SELECT Id FROM Tour__c WHERE Id = 'a1s1Q000006H09CQAS'") return
def main(): session = Salesforce_API(username) records = session.select(""" SELECT * FROM LedgerEntryBreakout__c WHERE Event__c = 'a1QS000000Ag6LbMAJ' AND ParentRecordTypeName__c = 'Expenses' LIMIT 1 """, return_type=None, mode='simple') # records.OfferRate__c = records.OfferRate__c + 1 del records[0]['SourceSystemId__c'] del records[0]['LastModifiedDate'] del records[0]['IsDeleted'] del records[0]['MayEdit'] del records[0]['IsLocked'] del records[0]['SystemModstamp'] del records[0]['Name'] del records[0]['CreatedById'] del records[0]['CreatedDate'] del records[0]['LastModifiedById'] session.add_bypass_settings() session.upsert('LedgerEntryBreakout__c', records, 'Id', batch_size=5, mode='simple') session.remove_bypass_settings()
def main(): for username in usernames: session = Salesforce_API(username) session.save_record_snapshot_on_select = True changes = session.ant.build_destructive_changes_xml({ 'ApexClass': ['LNE_TBDCountUpdateClass', 'LNE_TBDCountUpdateClass_Test'], 'FlexiPage': ['TBD_Events'] }) print('\nDeploying destructive changes to {}'.format(session.instance)) session.ant.deploy_destructive_changes(changes) return
def main(): session = Salesforce_API("*****@*****.**") records = session.select(""" SELECT Id, Event__r.EventTitle__c, Capacity__c, SellableCapacity__c, ProjectedPaidTickets__c, LastModifiedDate FROM TicketScale__c WHERE LastModifiedDate >= THIS_YEAR ORDER BY Event__r.EventYear__c, Event__c """, return_type='dataframe') issues = records.query("SellableCapacity__c < ProjectedPaidTickets__c") pdh.to_excel(issues, 'Ticket Scales with Projected greater than Sellable Capacity.xlsx') return
def main(): sf = Salesforce_API('*****@*****.**') sql = SQL_Server_API('EOS-prod') # sql = SQL_Server_API('EOS-pre-prod') offer_ids = sf.select('SELECT EOSId__c FROM Tour__c WHERE EOSId__c <> NULL', return_type='dataframe').EOSId__c.astype('int').tolist() # offer_ids = [54441, 57933, 57965, 58400, 58737, 58050, 57930, 57263, 53728, 55808] result, bycolumn = uk.diff_eos_offers_from_backup(sql, 'Prod 2021-08-16', offer_ids, True) result = {f'{key} ({len(val)})': val for key,val in result.items() if len(val) > 0} bycolumn = {key: {f'{f} ({len(df)})': df for f, df in dfs.items() if len(df) > 0} for key,dfs in bycolumn.items()} pdh.to_excel(result, 'UK Tour Data Changes.xlsx') for key, val in bycolumn.items(): if len(val) > 0: pdh.to_excel(val, f'UK Tour Data Changes By Column - {key}.xlsx') return
def get_new_sessions(): sessions.update({ item['username']: Salesforce_API(item) for item in json.load(open("./credentials.json", "r")) }) for username, session in sessions.items(): session.save_record_snapshot_on_select = True
def main(): usernames = [ None # , "[email protected]" # , "[email protected]" # , "[email protected]" # , "*****@*****.**" # , "*****@*****.**" # , "*****@*****.**" , "*****@*****.**" ] sessions = [Salesforce_API(item) for item in usernames if item is not None] for session in sessions: members = [ ('AustraliaNZTouringPublic', ('RoleAndSubordinatesInternal', 'Australia')), ('AustraliaNZTouringPublic', ('RoleAndSubordinatesInternal', 'NewZealand')), ('SecretSoundsTouringPublic', ('RoleAndSubordinatesInternal', 'SecretSounds')), ('NACTouringPublic', ('Regular', 'TouringOffice')), ('UKIrelandTouringPublic', ('RoleAndSubordinatesInternal', 'UKIreland')), ('AustraliaNZDivision', ('RoleAndSubordinatesInternal', 'AustraliaNZ')), ('AustraliaOffice', ('RoleAndSubordinatesInternal', 'Australia')), ('NewZealandOffice', ('RoleAndSubordinatesInternal', 'NewZealand')), ('SecretSoundsOffice', ('RoleAndSubordinatesInternal', 'SecretSounds')), ] session.helpers.create_group_members(members)
def main(): usernames = [ None # , "[email protected]" # , "[email protected]" # , "[email protected]" # , "*****@*****.**" # , "*****@*****.**" # , "*****@*****.**" , "*****@*****.**" ] sessions = [Salesforce_API(item) for item in usernames if item is not None] for session in sessions: records = session.select(""" SELECT Id, TouringCategory__c FROM LedgerEntryBreakout__c WHERE TouringCategory__c IN ('Production', 'Venue Cost') AND Event__r.TourLeg__r.Tour__r.AppScope__c <> 'UK' AND CreatedDate >= LAST_YEAR """) mapper = { 'Production': 'Production Related', 'Venue Cost': 'Venue Related' } for record in records: record['TouringCategory__c'] = mapper[record['TouringCategory__c']] session.add_bypass_settings() session.update(records, mode='bulk') session.remove_bypass_settings()
def main(): # f = [ # '/Users/daniel.hicks_1/Documents/Tower/liveNationSFDC PSDEV/src/classes/LNE_EventMetrics_T.cls' # ,'/Users/daniel.hicks_1/Documents/Tower/liveNationSFDC PSDEV/src/classes/LNE_EventMetrics_T.cls-meta.xml' # ] # session.ant.deploy(f) psdev = Salesforce_API("*****@*****.**") sit = Salesforce_API("*****@*****.**") pull = psdev.ant.retrieve( psdev.ant.build_package_xml({'ApexClass': ['TOU_EventCloningLogic']})) results = pull.get_results('binaryfilelistfullpath') sit.ant.deploy( [item for item in results.keys() if 'package.xml' not in item]) return
def main2(): session = Salesforce_API("*****@*****.**") records = session.select(""" SELECT Id, Name FROM Account WHERE Status__c = 'Inactive' AND EOSId__c <> NULL AND RecordType.Name = 'Venue'""", return_type='dataframe') records['Status__c'] = 'Active' session.add_bypass_settings() session.update(records, mode='bulk', batch_size=500) session.remove_bypass_settings()
def main(username): session = Salesforce_API(username) query = """ SELECT Id FROM Account """ result = session.select(query) # For a bulk query, use: # result = session.select(query, mode='bulk') sheets_data = { 'Sheet1': result, 'Sheet2': pd.DataFrame() # can export multiple sheets in the same excel file } pdh.to_excel(sheets_data, 'Query Result.xlsx')
def main(): usernames = [ None # , "[email protected]" # , "[email protected]" # , "[email protected]" # , "*****@*****.**" # , "*****@*****.**" # , "*****@*****.**" , "*****@*****.**" ] sessions = [Salesforce_API(item) for item in usernames if item is not None] for session in sessions: session.save_record_snapshot_on_select = True events = session.select(""" SELECT Id , ShowCount__c , (SELECT GrossSales__c, ProjectedGrossSales__c FROM TicketScales__r WHERE StageType__c = 'Plan') FROM Event__c WHERE IsTouringApp__c = True AND TourLeg__c <> NULL """, mode='simple') for event in events: event.SelloutGrossSales__c = event.ShowCount__c * sum([item.GrossSales__c for item in event.TicketScales__r]) event.ProjectedGrossSales__c = event.ShowCount__c * sum([item.GrossSales__c for item in event.TicketScales__r]) deals = session.select(""" SELECT Id, Event__r.TourLeg__r.Tour__r.ArtistAgency__c, Event__r.TourLeg__r.Tour__r.ArtistAgent__c, Type__c, DealType__c, BackendPercent__c, SplitPercentage__c, SplitBackendPercent__c FROM Deal__c WHERE Event__c <> NULL AND Event__r.IsTouringApp__c = True AND Event__r.TourLeg__c <> NULL AND RecordType.Name = 'Artist' """, return_type='dataframe', mode='simple') pdh.to_excel({ 'Events': events, 'Deals': deals }, 'Inspect Deal data backfill BEFORE.xlsx') deals['IncludeToBeSharedAmount__c'] = True deals.loc[deals['Type__c'] == 'Support', 'DealType__c'] = 'Flat' deals.loc[deals['Type__c'] == 'Primary Headliner', 'SplitPercentage__c'] = 100 deals.loc[deals['Type__c'] == 'Primary Headliner', 'SplitBackendPercent__c'] = deals[deals['Type__c'] == 'Primary Headliner']['BackendPercent__c'] deals.fillna('', inplace=True) pdh.to_excel({ 'Events': events, 'Deals': deals }, 'Inspect Deal data backfill.xlsx') session.add_bypass_settings() # job1 = session.update(events, mode='bulk') job2 = session.update(deals, mode='bulk', batch_size=1000) threading.wait() session.remove_bypass_settings() return
def main(): usernames = [ # "[email protected]", # "[email protected]", # "[email protected]", # "*****@*****.**", # "*****@*****.**", # "*****@*****.**", "*****@*****.**", ] sessions = [Salesforce_API(item) for item in usernames if item is not None] for session in sessions: accounts = session.select(""" SELECT Id, RecordType.Name, Name, VenueName__c, BillingCity, BillingCountry, SourceSystemId__c, EOSId__c, CreatedBy.Name FROM Account """, return_type='dataframe') output = {} recordtypes = [ 'Venue', # 'Co-Promoter', # 'Artist', # 'Agency', # 'Ticket Agency', ] def wipe_same_record(x): for i in range(1, 4): if x[f'm{i}_Id'] == x['Id']: for key in x.keys(): if key.startswith(f'm{i}_'): x[key] = '' return x for r in recordtypes: filtered_accounts = accounts.query("`RecordType.Name` == @r") print(f'Processing {len(filtered_accounts)} {r} records') matching_fields = ['VenueName__c', 'BillingCity' ] if r == 'Venue' else ['Name'] df = pdh.compare_datasets_for_merge( filtered_accounts, filtered_accounts, matching_fields, fields_to_return=[ 'Id', 'BillingCity', 'EOSId__c', 'SourceSystemId__c' ] + matching_fields, num_matches=4) df = df.apply(wipe_same_record, axis=1) df.sort_values(by=['m1_%', 'm2_%', 'm3_%', 'm4_%'], ascending=False, inplace=True) # df.drop(columns=[c for c in df.columns.values if c.startswith('m1_')], inplace=True) output[r] = df pdh.to_excel(output, 'Rome Duplicate Analysis.xlsx') return
def get_sessions(usernames, sf_sessions, sql_sessions): sf_sessions = sf_sessions or {} sql_sessions = sql_sessions or {} output = [] for sf_username, sql_name in usernames: sf = sf_sessions.get( sf_username, Salesforce_API(sf_username)) if sf_username else None sql = sql_sessions.get(sql_name, SQL_Server_API(sql_name)) if sql_name else None output.append((sf, sql)) return output
def main(): target_org_id = '00D0q0000001u1T' sessions = { item['username']: Salesforce_API(item) for item in Salesforce_API.instance_credentials.values() } for sf in sessions.values(): try: print(sf.instance, sf.organization_id, sf.organization_id == target_org_id) except: pass
def main(username): session = Salesforce_API(username) session.print_messages = False if action == "add": session.add_bypass_settings() print("Added Bypass Settings for {}".format(username)) else: session.remove_bypass_settings() print("Removed Bypass Settings for {}".format(username))
def main(): sf = Salesforce_API('*****@*****.**') objects = sf.get_org_description().sobjects object_names = { o.name.replace('__c', '').replace('__mdt', ''): o.name for o in objects } # url = 'https://lneallaccess-my.sharepoint.com/personal/mike_wishner_lyv_livenation_com/_layouts/15/download.aspx?UniqueId=e52013f7%2D6be8%2D4228%2Dbc31%2D6adedfa4184c' r = requests.get(metadata_reference_url, allow_redirects=True) file_name = f"{repo_path}{target_save_path}Rome Touring Object Model.xlsx" write_file = open(file_name, 'wb') write_file.write(bytearray(r.content)) write_file.close() xlsx = pd.ExcelFile(file_name) sheets = xlsx.sheet_names sheet_objects = { sheet: object_names[sheet.replace(' ', '')] for sheet in sheets if sheet.replace(' ', '') in object_names } data = { object_name: pd.read_excel(file_name, sheet) for sheet, object_name in sheet_objects.items() } for object_name, df in data.items(): records = df.where(pd.notnull(df), None).to_dict('records') for record in records: for key, val in record.items(): if type(val) is str and '\n' in val: record[key] = '\n' + val + '\n' file_path = f'{repo_path}{target_save_path}{object_name}.json' json_text = json.dumps(records, indent=2).replace('\\n', '\n') with open(file_path, 'w+') as f: f.write(json_text) with open(file_path, 'w+') as f: f.write(json_text) return None
def main(): usernames = [ None # , "[email protected]" # , "[email protected]" # , "[email protected]" # , "*****@*****.**" # , "*****@*****.**" # , "*****@*****.**" , "*****@*****.**" ] sessions = [Salesforce_API(item) for item in usernames if item is not None] for session in sessions: members = [ ('LiveNationUKMusicTeam', ('RoleAndSubordinatesInternal', 'LiveNationUK')), ('LiveNationUKComedyTeam', ('RoleAndSubordinatesInternal', 'LiveNationUK')), ('LiveNationUKBowderyTeam', ('RoleAndSubordinatesInternal', 'LiveNationUK')), ('DFConcertsTeam', ('RoleAndSubordinatesInternal', 'DFConcerts')), ('CuffeTaylorTeam', ('RoleAndSubordinatesInternal', 'CuffeTaylor')), ('MetropolisMusicTeam', ('RoleAndSubordinatesInternal', 'MetropolisMusic')), ('SoundcrashTeam', ('RoleAndSubordinatesInternal', 'Soundcrash')), ('FestivalRepublicTeam', ('RoleAndSubordinatesInternal', 'FestivalRepublic')), ('CreamTeam', ('RoleAndSubordinatesInternal', 'Cream')), ] # LiveNationUKMusicTeam # LiveNationUKComedyTeam # LiveNationUKBowderyTeam # DFConcertsTeam # CuffeTaylorTeam # MetropolisMusicTeam # SoundcrashTeam # FestivalRepublicTeam # CreamTeam # Live Nation UK Music Team # Live Nation UK Comedy Team # Live Nation UK Bowdery Team # DF Concerts Team # Cuffe & Taylor Team # Metropolis Music Team # Soundcrash Team # Festival Republic Team # Cream Team session.create_group_members(members)
def main(): sf = Salesforce_API('*****@*****.**') def job(): df = sf.select("SELECT * FROM TicketScale__c WHERE Event__c = 'a1Q7X00000GXoQOUA1'", return_type='dataframe', mute=True) df.drop(columns=['LastModifiedDate','SystemModstamp'], inplace=True) return df monitor = DataChangeMonitor() monitor.set_frequency(5) monitor.add_job("Test Ticket Scales Changes", "Id", job) monitor.set_log_destination(None, 'Test Change Monitor') monitor.start() return
def main(): session = Salesforce_API(username) queries = { 'Venues': """ SELECT Id, VenueName__c, BillingCity, BillingState, BillingCountry FROM Account WHERE RecordType.Name = 'Venue' AND Status__c = 'Active' AND BillingCountry IN ('United States','Canada') ORDER BY BillingCountry DESC, BillingState, BillingCity, VenueName__c """, 'Artists': """ SELECT Id, Name FROM Account WHERE RecordType.Name = 'Artist' AND Status__c = 'Active' ORDER BY Name """, 'Co-Promoters': """ SELECT Id, Name FROM Account WHERE RecordType.DeveloperName = 'CoPromoter' AND Status__c = 'Active' ORDER BY Name """, 'Record Types': """ SELECT Id, SobjectType, Name FROM RecordType WHERE SobjectType IN('Account','Event__c','LedgerEntry__c') ORDER BY SobjectType, Name """ } datasets = {key: session.select(val) for key, val in queries.items()} pdh.multiple_df_to_excel(datasets, 'ROME Master Data.xlsx')
def main(): # Picklist Error handling: {'statusCode': 'INVALID_OR_NULL_FOR_RESTRICTED_PICKLIST', 'message': 'Language: bad value for restricted picklist field: en_AU', 'fields': ['LanguageLocaleKey']} usernames = [ # "[email protected]", # "[email protected]", # "[email protected]", # "*****@*****.**", # "*****@*****.**", "*****@*****.**", # "*****@*****.**", ] sessions = [Salesforce_API(item) for item in usernames if item is not None] # users = pd.read_excel('/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/Import Sandbox Test Users.xlsx').query("Filter == True") # users = pd.read_excel('/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/Australia Master Data/PROD Release Users.xlsx').query("Filter == True") users = pd.read_excel( '/Users/daniel.hicks_1/Documents/Rome/Rome Downloads/UK Master Data/Rome UK Users.xlsx' ).query("Filter == True") if prompt("Look up EOS Ids for these users?", boolean=True): sql = SQL_Server_API(sql_creds) emails_str = "', '".join( users['Email'].str.strip().str.lower().tolist()) eos_users = (pd.DataFrame( sql.query( f"SELECT Id, Name, Email FROM IEmsUser WHERE Email IS NULL OR LOWER(Email) IN ('{emails_str}')" )).assign(Email=lambda df: df['Email'].str.strip().str.lower())) eos_email_map = eos_users.dropna( subset=['Email']).set_index('Email').to_dict('index') eos_name_map = eos_users.fillna('').query( "Email == ''").drop_duplicates( subset=['Name'], keep=False).set_index('Name').to_dict('index') def get_eos_id(row): email, name = str(row['Email']).strip().lower(), str( row['Name']).strip() matching_record = eos_email_map.get(email, eos_name_map.get(name, None)) if matching_record: return matching_record['Id'] return None if len(eos_users) > 0: users['EOSId__c'] = users.apply(get_eos_id, axis=1) for session in sessions: session.create_users( users, defaults={ # 'Country': 'United Kingdom' # , 'DEFAULTCURRENCYISOCODE': 'GBP' # , 'TIMEZONESIDKEY': 'GMT' })
def main(): session = Salesforce_API('*****@*****.**') rows = session.select("""SELECT Id, TouringCategory__c, UKCategory__c FROM LedgerEntryBreakout__c WHERE IsTouringApp__c = True AND UKCategory__c <> NULL AND Event__r.TourLeg__r.Tour__c IN (SELECT Id FROM Tour__c WHERE RecordTypeId = '0121Q000000oInEQAU') """, mode='bulk') counter = 0 for row in rows: if row.TouringCategory__c != row.UKCategory__c: counter += 1 row.TouringCategory__c = row.UKCategory__c print(f'Updated {counter} records') session.add_bypass_settings() session.update(rows) session.remove_bypass_settings()
def main(): session = Salesforce_API("*****@*****.**") session._prod_operation_approval_is_bypassed = True records = session.select(""" SELECT Id, CreatedBy.Name FROM Event__c WHERE TourLeg__r.Tour__c = 'a1s7X000000UvBfQAK' AND BusinessPlanOption__c = False ORDER BY TourLegOrder__c desc """, mode='simple') # AND CreatedBy.Name IN ('Joe Giuliano') # AND CreatedDate >= THIS_YEAR # # , 'Alex Eldridge', 'Steve Ackles', 'Alex Maxwell', 'Jake Balbes', 'Erik Kammerer' session.remove_bypass_settings() session.run_financial_calculator(records, 1, retry_errors=True) return
def main(): sf = Salesforce_API("*****@*****.**") sql = SQL_Server_API('EOS-stage') fix_identical_names_with_different_ids(sf, sql) # romevenues = sf.select("SELECT Id, VenueName__c, BillingCity, EOSId__c FROM Account WHERE RecordType.Name = 'Venue'", return_type='dataframe') # eosvenues = pd.DataFrame(sql.query("SELECT Id, Name FROM Venue")) # romevenues['Name'] = romevenues['VenueName__c'] + ' (' + romevenues['BillingCity'] + ')' # eosvenues['EOSId__c'] = 'Venue-' + eosvenues['Id'].astype(str) # allvenues = romevenues.merge(eosvenues, on='Name', how='outer', suffixes=['_R','_E'], indicator=True) # allvenues['IsDuplicate'] = allvenues.duplicated('Name') # if len(allvenues) > 0: # print(allvenues[allvenues['IsDuplicate']==True]) # print('^ Duplicates') return
def main(): reports = session.select(""" SELECT Id, OwnerId, Owner.Name, FolderName, CreatedDate, CreatedById, CreatedBy.Name , LastModifiedDate, LastModifiedById, LastModifiedBy.Name, IsDeleted, Name, Description , DeveloperName, NamespacePrefix, LastRunDate, SystemModstamp, Format, LastViewedDate, LastReferencedDate FROM Report WHERE Name = 'Tour Events with Artist, Venue, and EDTs' ORDER BY SystemModstamp DESC """, mode='simple') reportstopull = reports[0:5] meta = session.ant.retrieve(session.ant.build_package_xml({ 'Report': [item.FolderName + '/' + item.DeveloperName for item in reportstopull] , 'ReportType': ['*'] # , 'ReportType': ['Deal_with_Event_Info__c'] })) parsed = meta.get_results('parsed') binary = meta.get_results('binaryfilelist') used_report_types = set() for folder in parsed.reports: for reportname, report in parsed.reports[folder].items(): used_report_types.add(report.reportType) itemstopush = { key:val for key,val in binary.items() if 'package.xml' not in key and ( any(['reportTypes/' + name.replace('__c','') in key for name in used_report_types]) or '/reports/' in key ) } pass uat = Salesforce_API('*****@*****.**') uat.ant.deploy(itemstopush) pass