def sobjects(self): sf = Salesforce(username=env['SALESFORCE_USER'], password=env['SALESFORCE_PASSWORD'], security_token=env['SALESFORCE_TOKEN']) with open('/work/sobjects.txt', 'w') as f: for sobject in tqdm(sf.describe()["sobjects"]): if check_sobject(sf, sobject['name']): f.write(sobject["name"] + '\n')
def refresh(self, force=False): """ Refreshes this token if the session has expired. :param force: Refresh this token even if the session hasn't expired """ if force: auth.refresh_token(self) return Salesforce(instance_url=self.instance_url, session_id=self.access_token, sandbox=self.is_sandbox) try: connection = Salesforce(instance_url=self.instance_url, session_id=self.access_token, sandbox=self.is_sandbox) connection.describe() except (SalesforceExpiredSession, SalesforceGeneralError): auth.refresh_token(self) return Salesforce(instance_url=self.instance_url, session_id=self.access_token, sandbox=self.is_sandbox) else: return connection
def returnsObject(self): sf = Salesforce(instance_url=self.instance_url, session_id=self.session_id) object_list = [] for x in sf.describe()['sobjects']: l1 = {} l1['name']=x['name'] l1['custom']=x['custom'] l1['label']=x['label'] object_list.append(l1) return object_list
def handler(event, context): log.debug("Received event {}".format(json.dumps(event))) sf = Salesforce(instance_url=event["instance_url"], session_id=event["session_id"]) objs = sf.describe()["sobjects"] objects = map(describe(sf),objs) client = boto3.client("dynamodb") client.batch_write_item(RequestItems={'sobs-metadata-dev':map((lambda obj: {'PutRequest':{'Item':obj}}),objects)}) return {}
def main(): print("Start") sf = Salesforce(username='******', password="******", security_token="token") description = sf.describe() names = [ obj['name'] for obj in description['sobjects'] if obj['queryable'] ] print(sf.session) print(sf.sf_instance) # print(sf.describe()) print(names) datapath = Path() / date.today().isoformat() print(datapath) try: datapath.mkdir(parents=True) except FileExistsError: pass print(datapath) for name in names: print(name) print("")
class Codex: """ Acts as a conduit to a Salesforce instance, as well as a central repository for metadata on the tables in the instance. """ def __init__(self): self.client = Salesforce(username=SFCONFIG.username, password=SFCONFIG.password, security_token=SFCONFIG.security_token) self._tables = dict() for o in self.client.describe()['sobjects']: n = o['name'] if o['createable'] and (n in SFCONFIG.tables or len(SFCONFIG.tables) == 0): self.get_table(n) def get_table(self, table_api_name: str): """ Adds a table from the attached Salesforce instance as a new Table object attribute on the Codex. Args: table_api_name: A string, the API name of the table in the attached Salesforce instance. Returns: The generated Table object. """ t = Table(getattr(self.client, table_api_name), self) self._tables[table_api_name] = t setattr(self, table_api_name, t) return t def query(self, q: str): """ Queries the Salesforce REST API using the passed SOQL query string. Args: q: A valid SOQL query string. Returns: A list of OrderedDicts, the records resulting from the bulk query. """ r = self.client.query(q) results = r['records'] while not r['done']: r = self.client.query_more(r['nextRecordsUrl'], True) results += r['records'] return results def queryb(self, q: str, table_api_name: str) -> list: """ Queries the Salesforce Bulk API using the passed SOQL query string and table_api_name. Args: q: A valid SOQL query string. table_api_name: A table's api_name. Returns: A list of OrderedDicts, the records resulting from the bulk query. """ return getattr(self.client.bulk, table_api_name).query(q)
password=password, security_token=security_token, domain=domain) #sf = Salesforce(instance_url=instance,session_id=session_id) sf = Salesforce(instance=instance, session_id=session_id) print(sf) for element in dir(sf): if not element.startswith('_'): if isinstance(getattr(sf, element), str): print('Property Name:{0} ;Value:{1}'.format( element, getattr(sf, element))) #to know instance instance # for getting Salesforce Metadata metadata_org = sf.describe() type(metadata_org) metadata_org.keys() print(metadata_org['encoding']) print(metadata_org['maxBatchSize']) #print(metadata_org['sobjects']) print(type(metadata_org['sobjects'])) # to store the huge list in pandas df_sobjects = pd.DataFrame(metadata_org['sobjects']) print(df_sobjects) #pd.set_option('display.max_columns',100) #pd.set_option('display.max_rows',500) #pd.set_option('display.min_rows',500) #pd.set_option('display.max_colwidth',150)
class SFInteraction(object): """Simple class that interacts with Salesforce""" def __init__(self, username, password, token, uat, project_id='datacoco3.db', session_id=None, instance=None, version=None): """Instantiate a Salesforce interaction manager. UAT mode is explicitly set to a boolean value in case a string is provided. If Salesforce session credentials do not exist, attempt to retrieve. :param username: Username :param password: Password :param token: API token :param uat: Whether or not in UAT mode :param project_id: to identify project source api calls :param session_id: Access token for existing session :param instance: Domain of Salesforce instance """ if not username or not password or not token or uat is None: raise RuntimeError("%s request all __init__ arguments" % __name__) self.username = username self.password = password self.token = token self.session_id = session_id self.instance = instance self.project_id = project_id self.version = version self.conn = None self.bulk = None self.job_id = None self.batch_max_attempts = None self.batch_timeout = None self.batch_sleep_interval = None self.temp_file = "sf_temp_results.txt" self.redis_conn = None self.session_credentials_key = "sf_session_credentials" # Handle string uat which should be boolean if isinstance(uat, str) and uat.lower() in ("true", "t"): self.uat = True else: self.uat = False # Retrieve session_id and/or instance if they do not exist if not self.session_id or not self.instance: self._get_session_credentials() def connect(self): """Connect to the Salesforce API client. Only executes if there is not an existing open Salesforce connection. If there are a session_id and an instance, attempt to connect to the existing session. The existing session connection is verified with a Salesforce API describe call. If that fails, create a new connection. There are 3 retry attempts """ if self.session_id and self.instance: retry_count = 1 while True: if retry_count > 3: LOG.l( "Could not connect to Salesforce in the specified number of retries." ) LOG.l("Starting a new connection...") break else: LOG.l( f"Connecting to Salesforce: attempt {retry_count} of 3..." ) try: self.conn = Salesforce(session_id=self.session_id, instance=self.instance, client_id=self.project_id, version=self.version) self.conn.describe() # Connection health check return #Sucess, leave this function except SalesforceError as sfe: LOG.l( f"Encountered error connecting to Salesforce:\n{sfe}") retry_count += 1 sleep(5) continue #If reconnecting didn't work or session_id is not set, then start a new connection try: self._create_new_connection() except: raise Exception("Could not initiate connection to Salesforce!") def fetch_soql( self, db_table, soql, batch=True, batch_timeout=600, batch_sleep_int=10, batch_max_attempts=1, ): """Fetch results from Salesforce soql queries. Batch Salesforce queries results saved to a file and retrieved because they are in CSV format and to avoid bulk queries timeouts. :param db_table: Database table name :param soql: Soql queries :param batch: Whether to use Salesforce Batch or Simple API :param batch_sleep_int: Salesforce Bulk query sleep interval :param batch_timeout: Batch job timeout in seconds :param batch_max_attempts: Maximum number of batch query creation attempts :return: If success, List of result dictionaries; Else empty list """ try: if batch: # Set batch operation attributes self.batch_timeout = batch_timeout self.batch_sleep_interval = batch_sleep_int self.batch_max_attempts = batch_max_attempts results = self.get_query_records_dict(db_table, soql) # save to and read from file to avoid connection timeout self._save_results_to_file(results) records = self._get_results_from_file() else: result = self.conn.query(soql) # if there isn't a result return an empty list if result["records"]: salesforce_records = json.loads( json.dumps(result["records"][0])) parsed_records = parse_sf_records(salesforce_records) records = [parsed_records ] # put result in a list object for consistency else: records = [] except BulkApiError as e: self.bulk.abort_job(self.job_id) # TODO Handle failed bulk API transaction better raise e return records def get(self, object_name: str, object_id: str): """ To get a dictionary with all the information regarding that record """ return self.conn.__getattr__(object_name).get(object_id) def get_by_custom_id(self, object_name: str, field: str, id: str): """ To get a dictionary with all the information regarding that record using a **custom** field that was defined as External ID: """ return self.conn.__getattr__(object_name).get_by_custom_id(field, id) def upsert(self, object_name: str, field: str, id: str, data: dict): """ To insert or update (upsert) a record using an external ID """ return self.conn.__getattr__(object_name).upsert(f'{field}/{id}', data) def get_query_records_dict(self, db_table, soql_query): """Execute bulk Salesforce soql queries and return results as generator of dictionaries. :param db_table: Database table name :param soql_query: Soql queries :return: If success, List of result record dictionaries; Else empty list """ self.bulk = SalesforceBulk(sessionId=self.session_id, host=self.instance) job = self.bulk.create_query_job(db_table, contentType="JSON") batch = self.bulk.query(job, soql_query) self.bulk.close_job(job) while not self.bulk.is_batch_done(batch): print("Waiting for batch query to complete") sleep(10) dict_records = [] rec_count = 0 print("Iterating through batch result set") for result in self.bulk.get_all_results_for_query_batch(batch): result = json.load(IteratorBytesIO(result)) for row in result: rec_count += 1 dict_records.append(row) print("Current fetched record count: ", rec_count) return dict_records def batch_query_records_dict(self, db_table, soql_query, concurrency='Serial'): """Execute bulk Salesforce soql queries and return results as generator of dictionaries. works only for PK CHUNKING enabled SF tables. Allows millions of record read. :param db_table: Database table name :param soql_query: Soql queries :return: If success, List of result record dictionaries; Else empty list """ self.bulk = SalesforceBulk(sessionId=self.session_id, host=self.instance) job = self.bulk.create_query_job(db_table, contentType="JSON", pk_chunking=True, concurrency=concurrency) try: batch = self.bulk.query(job, soql_query) batch_list = self.bulk.get_batch_list(job) print('first batch', batch_list[0]) batch_id = batch_list[0]['id'] job_id = batch_list[0]['jobId'] state = batch_list[0]['state'] while state == 'Queued' or state == 'InProgress': print( "Waiting for batch state Queued or InProgress to change " + state) sleep(10) state = self.bulk.batch_state(batch_id, job_id) batch_list = self.bulk.get_batch_list(job) print(f'number of batches: {len(batch_list)}') for item in batch_list: print('item', item) batch_id = item['id'] job_id = item['jobId'] state = item['state'] if state == 'NotProcessed': continue while not self.bulk.is_batch_done(batch_id, job_id): print( f"Waiting for batch query to complete batch_id:{batch_id}, job_id: {job_id}, state: {state}" ) sleep(10) state = self.bulk.batch_state(batch_id, job_id) total_retry_count = len(batch_list) retry = len(batch_list) lastIndex = 0 while retry > 0: print(f'retry {retry} times left') try: for result in list( self.bulk.get_all_results_for_query_batch( batch_id, job_id))[lastIndex:]: result = json.load(IteratorBytesIO(result)) lastIndex += 1 yield result break except requests.exceptions.ChunkedEncodingError as e: print('Chunking failed') retry -= 1 self.connect() self.bulk = SalesforceBulk(sessionId=self.session_id, host=self.instance) pass except Exception as e: print('There was an error') traceback.print_exc() retry -= 1 self.connect() self.bulk = SalesforceBulk(sessionId=self.session_id, host=self.instance) pass if retry <= 0: raise Exception( f'Retried {total_retry_count} times and it still failed' ) except BulkApiError as e: self.bulk.abort_job(self.job_id) raise e def upload_records_to_s3(self, records, s3_bucket, s3_key, aws_access_key, aws_secret_key): """Upload records to s3. :param records: Records filename """ self._save_results_to_file(records) datetime_today = datetime.today().strftime("%Y-%m-%d-%X") s3_dest_key = s3_key + datetime_today s3_interaction = S3Interaction(aws_access_key, aws_secret_key) s3_interaction.put_file_to_s3(s3_bucket, s3_dest_key, self.temp_file) return s3_dest_key def get_description(self, object_name): """Retrieves object description :param object_name: Salesforce object/table name """ retry = True while retry: try: return self.conn.__getattr__(object_name).describe() except SalesforceError as sfe: retry = self._sf_except_reconnect(sfe) def _sf_except_reconnect(self, e): """ Used in try/catch blocks to reinit the connection returns true if the code should be retried, false if no connection could be made """ LOG.l(f"Encountered error:\n{e}") try: self.connect() return True except Exception: return False def _create_new_connection(self): """Create a new Salesforce API client connection. After the connection is created, the Salesforce session credentials are stored externally. """ self.conn = Salesforce(username=self.username, password=self.password, security_token=self.token, sandbox=self.uat, client_id=self.project_id) self.session_id = str(self.conn.session_id) self.instance = str(self.conn.sf_instance) self._set_session_credentials() def _save_results_to_file(self, records): """Save Salesforce Bulk API results to a temp file. :param records: Records to save """ with open(self.temp_file, "w") as f: for r in records: f.write("\n") f.write(str(str(r).encode("utf-8"))) def _get_results_from_file(self): """Get Salesforce Bulk API results from a temp file. The records must be parsed. After the results are retrieved. The file is deleted. :return: Iterator with records. """ results = [] with open(self.temp_file, "r") as f: records = f.read()[1:].splitlines() for r in records: r = ast.literal_eval(r) results.append(r) os.remove(self.temp_file) return results def _get_session_credentials(self): """Get Salesforce session credentials stored in Redis. If the credentials variables do not exist, set the credentials as None. """ # Establish connection to Redis self._connect_to_redis() # Get salesforce credentials if exists if self.redis_conn.conn.exists(self.session_credentials_key): self.session_id = self.redis_conn.fetch_by_key_name( self.session_credentials_key, "session_id") self.instance = self.redis_conn.fetch_by_key_name( self.session_credentials_key, "instance") else: self.session_id = None self.instance = None def _set_session_credentials(self): """Set Salesforce session credentials in Redis. """ sf_session_credentials = { "session_id": self.session_id, "instance": self.instance, } self.redis_conn.set_key(self.session_credentials_key, sf_session_credentials) def _connect_to_redis(self): """Connect to Redis. """ CONF = config() host = CONF["redis"]["server"] port = CONF["redis"]["port"] db = CONF["redis"]["db"] self.redis_conn = RedisInteraction(host, port, db) self.redis_conn.connect()
sf = Salesforce(username=salesforceusername, password=salesforcepassword, security_token=salesforcesecuritytoken) # establish connection to local db print ("Testing local MySQL DB connection") try: cnx = mysql.connector.connect(**config) cursor = cnx.cursor(buffered=True) print ("Connection successful") except mysql.connector.Error as err: print("MySQL db settings error: {}".format(err)) # silent test to verify connection to salesforce complete print ("Testing Salesforce Instance connection") try: sf.describe() print ("Connection successful") except SalesforceError as err: print("Salesforce connection error: {}".format(err)) # check if first run of script print ("Checking to See if Default SF Object Table is available") sfobjectcheck = "SHOW TABLES LIKE 'sfobject_sfobject'" cursor.execute(sfobjectcheck) result1 = cursor.fetchone() if result1: sfobjcheck = 1; else: sobjectablecreate = ("CREATE TABLE sfobject_sfobject (data_id int(11) NOT NULL AUTO_INCREMENT Primary Key, obj_name TEXT,obj_keyPrefix TEXT,obj_label TEXT,obj_createable TEXT,obj_custom TEXT,obj_customSetting TEXT,created_at DATETIME DEFAULT NULL,modified_at DATETIME DEFAULT NULL)") cursor.execute(sobjectablecreate) sfobjcheck = 0;
def returnsObject(self): sf = Salesforce(username=self.username, password=self.password, security_token=self.security_token) l1 = [[x['name'],x['custom'],x['label']] for x in sf.describe()['sobjects']] return l1
#create soql query for extraction... soql has major differences to sql... #read sf documentation on soql to better understand how it works soql = 'SELECT {} FROM {} limit 10000'.format(columns, object_name) #get results and convert to pandas df results = sf.query_all(soql)['records'] df = pd.DataFrame(results).drop(['attributes'], axis=1) #your sfdc query results now live in the df object... this can be loaded to excel, sql database, etc... ################################# #Ways to find salesforce object names and column names/labels in sfdc ################################ #Get all object names in sf object_names = [name['name'] for name in sf.describe()['sobjects']] #change Account to any other object name from sfdc to get the column names and their labels newdf = pd.DataFrame([['Account', x['name'], x['label']] for x in sf.Account.describe()['fields']], columns=['ObjectName', 'ColumnName', 'LabelName']) ''' metadata_df = pd.DataFrame() for i in object_names: exec("test = sf." + i.lower()+ ".describe()['fields']") print([[i, x['name'], x['label']] for x in test]) '''
username = '******' password = '******' security_token = 'HxK2ciSHbsjN5PvAE8psL9w9F' sf = Salesforce(username=username,password=password, security_token=security_token) #get a list of queryable object names we will need to backup if args.objectNames: names = args.objectNames else: #get a description of our global salesforce instance, see layout: # https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/dome_describeGlobal.htm description = sf.describe() names = [obj['name'] for obj in description['sobjects'] if obj['queryable']] #for every object we'll need all the fields it has that are exportable. for name in names: salesforceObject = sf.__getattr__(name) # so get a list of the object fields for this object. fieldNames = [field['name'] for field in salesforceObject.describe()['fields']] # then build a SOQL query against that object and do a query_all try: results = sf.query_all( "SELECT " + ", ".join(fieldNames) + " FROM " + name )['records'] except SalesforceMalformedRequest as e: # ignore objects with rules about getting queried. continue outputfile = datapath / (name+".csv") with outputfile.open(mode='w', encoding='utf_8_sig) as csvfile:
class Login(Frame): sf = None interior = None loginForm = {} def __init__(self, master=None): Frame.__init__(self, master) self.loginData = self.loadLoginData() self.buildLoginPane() def buildLoginPane(self): usernameVar = StringVar() usernameRememberVar = IntVar() passwordVar = StringVar() passwordRememberVar = IntVar() tokenVar = StringVar() tokenRememberVar = IntVar() sandboxVar = IntVar() savedLoginData = self.loadLoginData() if savedLoginData['username'] != '': usernameRememberVar.set(1) usernameVar.set(savedLoginData['username']) if savedLoginData['password'] != '': passwordRememberVar.set(1) passwordVar.set(savedLoginData['password']) if savedLoginData['token'] != '': tokenRememberVar.set(1) tokenVar.set(savedLoginData['token']) if savedLoginData['sandbox']: sandboxVar.set(1) outer = Frame(background=dark_1) outer.grid() f = Frame(outer, background=dark_1) f.grid(padx=24, pady=48) Label(f, text='Salesforce Schema Builder Login', font=Font(family='Roboto', size=18), background=dark_1, foreground=white).grid(column=0, row=0, columnspan=3, pady=24) Label(f, text='Salesforce Username:'******'Roboto'), background=dark_1, foreground=white).grid(column=0, row=1, sticky=E, pady=8, padx=8) Entry(f, textvariable=usernameVar, width=40).grid(column=1, row=1, pady=8, padx=8) Checkbutton(f, variable=usernameRememberVar, text=' Save', font=Font(family='Roboto'), background=dark_1, activebackground=dark_1, foreground=white).grid(column=2, row=1, pady=8, padx=8) Label(f, text='Password:'******'Roboto'), background=dark_1, foreground=white).grid(column=0, row=2, sticky=E, pady=8, padx=8) Entry(f, textvariable=passwordVar, show='*', width=40).grid(column=1, row=2, pady=8, padx=8) Checkbutton(f, variable=passwordRememberVar, text=' Save', font=Font(family='Roboto'), background=dark_1, activebackground=dark_1, foreground=white).grid(column=2, row=2, pady=8, padx=8) Label(f, text='Security Token:', font=Font(family='Roboto'), background=dark_1, foreground=white).grid(column=0, row=3, sticky=E, pady=8, padx=8) Entry(f, textvariable=tokenVar, width=40).grid(column=1, row=3, pady=8, padx=8) Checkbutton(f, variable=tokenRememberVar, text=' Save', font=Font(family='Roboto'), background=dark_1, activebackground=dark_1, foreground=white).grid(column=2, row=3, pady=8, padx=8) Checkbutton(f, variable=sandboxVar, text=' Sandbox', font=Font(family='Roboto'), background=dark_1, activebackground=dark_1, foreground=white).grid(column=1, row=4, pady=8, sticky=W, padx=8) Button(f, command=self.doLogin, text='LOGIN', background=primary, foreground='#FFF', font=Font(family='Roboto', size=14, weight='bold'), padx=32, pady=4, relief=FLAT).grid(column=0, row=5, sticky=W + E, pady=8, padx=8, columnspan=3) self.loginForm = { 'username': usernameVar, 'unRemember': usernameRememberVar, 'password': passwordVar, 'pwRemember': passwordRememberVar, 'token': tokenVar, 'tkRemember': tokenRememberVar, 'sandbox': sandboxVar } style = Style() if 'aqua' in style.theme_names(): self.theme = 'aqua' elif 'vista' in style.theme_names(): self.theme = 'vista' else: self.theme = 'classic' style.theme_use(self.theme) def doLogin(self): try: f = self.loginForm self.sf = Salesforce(username=f['username'].get(), password=f['password'].get(), security_token=f['token'].get(), sandbox=f['sandbox'].get() == 1) self.saveLoginInfo() except: messagebox.showerror( title='Failed to login', message='Please check your username and password') return None try: sObjects = self.buildDataStructure() loginRoot.destroy() openApp(sObjects) except Exception as e: print(e) def loadLoginData(self): if isfile('DONTcommitMe.pkl'): try: with open('DONTcommitMe.pkl', 'rb') as pkl: return pickle.load(pkl) except: pass return {'username': '', 'password': '', 'token': '', 'sandbox': 0} def saveLoginInfo(self): f = self.loginForm loginData = { 'username': f['username'].get() if f['unRemember'].get() else '', 'password': f['password'].get() if f['pwRemember'].get() else '', 'token': f['token'].get() if f['tkRemember'].get() else '', 'sandbox': f['sandbox'].get() } try: with open('DONTcommitMe.pkl', 'wb+') as creds: pickle.dump(loginData, creds) except Exception as e: messagebox.showerror(title='Filesystem Error', message='Failed to save login data:' + str(e)) def buildDataStructure(self): describe = self.sf.describe()["sobjects"] existingTypes = [ 'id', 'boolean', 'reference', 'string', 'picklist', 'textarea', 'double', 'address', 'phone', 'url', 'currency', 'int', 'date', 'time', 'datetime', 'percent', 'email', 'base64', 'combobox', 'multipicklist' ] conversion = { 'boolean': 'boolean', 'multipicklist': 'array(string)', 'datetime': 'datetime', 'int': 'integer' } sObjects = {} for obj in describe: if obj['layoutable']: # We don't want to deal with sobjects like oauth tokens or apex classes objDesc = getattr(self.sf, obj['name']).describe() fields = [] for field in objDesc['fields']: name = field['name'] label = field['label'] objType = field['type'] reference = None valuePairs = None if objType not in existingTypes: existingTypes.append(objType) print('WARNING: type not found: ' + objType) print(field) print('--------------------') if objType == 'relationship': reference = field['referenceTo'][0] if objType == 'picklist' or objType == 'combobox' or objType == 'multipicklist': valuePairs = [] for val in field['picklistValues']: if val['active']: valuePairs.append((val['label'], val['value'])) defaultValue = field['defaultValue'] size = field['byteLength'] unique = field['unique'] readOnly = not field['updateable'] required = not field['nillable'] fields.append({ 'name': name, 'label': label, 'type': objType, 'reference': reference, 'values': valuePairs, 'defualt': defaultValue, 'size': size, 'unique': unique, 'required': required, 'readOnly': readOnly }) sObjects[obj['name']] = fields return sObjects
NVL(TO_CHAR(trg.{1}), "NULL") <> NVL(TO_CHAR(src.{1}), "NULL") ''' union = '\n\nUNION ALL\n\n' queries = [] #Map <String, Schema.SObjectType> schemaMap = # Schema.getGlobalDescribe(); #Map <String, Schema.SObjectField> fieldMap = # schemaMap.get('Campaign').getDescribe().fields.getMap(); # Do login user = '******' #user = input('User: '******'******' #password = getpass() token = 'IWsUC5iuU2EX54yg8Yhgdm8AE' #token = getpass('Token: ') sf = Salesforce(username=user, password=password, security_token=token, domain='test') for o in sf.describe()['sobjects']: if (o['label'].lower() == objectName): object = o print(object['metadata'])
from simple_salesforce import Salesforce from pandas import DataFrame from collections import defaultdict ## create database engine and open a connection engine = databaseconfig.DB_ENGINE conn = engine.connect() ## create a salesforce session sf = Salesforce(username=SF_UID, password=SF_PWD, security_token=SF_TOKEN, organizationId='00Di0000000hSDR') ktc_tables = sf.describe() ktc_tables = ktc_tables[u'sobjects'] table_names = [] for i in ktc_tables: table_names.append(i[u'name']) #the ones we want desired_tables = ['Account', 'Contact', 'College_Persistence__c', 'Contact', 'Contact_Note__c', 'Enrollment__c'] all_tables = defaultdict() for i in desired_tables: print 'Retrieving fields for table %s from Salesforce.' % i #this gets us the attributes of the table this_table = sf.__getattr__(i).describe()
password=salesforcepassword, security_token=salesforcesecuritytoken) # establish connection to local db print("Testing local MySQL DB connection") try: cnx = mysql.connector.connect(**config) cursor = cnx.cursor(buffered=True) print("Connection successful") except mysql.connector.Error as err: print("MySQL db settings error: {}".format(err)) # silent test to verify connection to salesforce complete print("Testing Salesforce Instance connection") try: sf.describe() print("Connection successful") except SalesforceError as err: print("Salesforce connection error: {}".format(err)) # check if first run of script print("Checking to See if Default SF Object Table is available") sfobjectcheck = "SHOW TABLES LIKE 'sfobject_sfobject'" cursor.execute(sfobjectcheck) result1 = cursor.fetchone() if result1: sfobjcheck = 1 else: sobjectablecreate = ( "CREATE TABLE sfobject_sfobject (data_id int(11) NOT NULL AUTO_INCREMENT Primary Key, obj_name TEXT,obj_keyPrefix TEXT,obj_label TEXT,obj_createable TEXT,obj_custom TEXT,obj_customSetting TEXT,created_at DATETIME DEFAULT NULL,modified_at DATETIME DEFAULT NULL)" )