def login(self): if self.password is None: self.password = getpass.getpass(prompt='Salesforce Password: '******'%030x' % random.randrange(16**30) return sf
def login_to_salesforce(username, password, security_token=None, organizationId=None, client_id=None, API_version=DEFAULT_API_VERSION): if client_id: client_id = "{prefix}/{app_name}".format( prefix=DEFAULT_CLIENT_ID_PREFIX, app_name=client_id) else: client_id = DEFAULT_CLIENT_ID_PREFIX if all(arg is not None for arg in (username, password, security_token)): # Pass along the username/password to our login helper return SalesforceLogin(username=username, password=password, security_token=security_token, sf_version=API_version, client_id=client_id) elif all(arg is not None for arg in (username, password, organizationId)): # Pass along the username/password to our login helper return SalesforceLogin(username=username, password=password, organizationId=organizationId, sf_version=API_version, client_id=client_id) else: raise TypeError( 'You must provide login information or an instance and token')
def __init__(self, username=None, password=None, security_token=None, organization_id=None, domain='login', API_version="46.0", batch_size=10000, logger=None, verbose=True): # use SalesforceLogin from simple_salesforce for authentication self.session_id, host = SalesforceLogin(username=username, password=password, security_token=security_token, organizationId=organization_id, domain=domain) endpoint = "https://" + host if host[0:4] != 'http' else host endpoint += "/services/async/%s" % API_version self.endpoint = endpoint self.batch_size = batch_size if not logger: logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) if verbose: h = logging.StreamHandler() h.setFormatter( logging.Formatter("%(levelname)s - %(message)s")) logger.addHandler(h) else: logger.addHandler(logging.NullHandler()) self._logger = logger
def login_attempt(username, password, token): """ Login instance to a Salesforce Sandbox :param username, password, token: required fields :return: status(200=OK, 401=Error), simple_salesforce object, current session id, sandbox instance """ try: sf = None session_id = None instance = None sf = Salesforce(username=username, password=password, security_token=token, sandbox=True) session_id, instance = SalesforceLogin(username=username, password=password, security_token=token, sandbox=True) print(instance) except SalesforceAuthenticationFailed as e: print(e) return 401, sf, session_id, instance print(sf, session_id, instance) return 200, sf, session_id, instance
def getSFconnection(): # TODO create condition to validate when executed on a PRODUCTION environment # user would need to input manually the username to avoid prod data corruption return SalesforceLogin(username='******', password='******', security_token='<SECURITYTOKEN>', domain='test')
def simple_salesorce_login(app_properties): """ :param app_properties: :return: tuple of session ID and instance URL (without https://) """ from simple_salesforce import SalesforceLogin from simple_salesforce import exceptions usr_name = app_properties["username"] passwd = app_properties["password"] security_token = app_properties["security_token"] api_version = app_properties["sfdc_api_version"] try: _sfdc_creds = SalesforceLogin(username=usr_name, password=passwd, security_token=security_token, sf_version=api_version, client_id='INFA-SFDC Validator' # domain='test' ) # Simple Salesforce returns instance value without 'https://' # need to attach it for compatibility with the rest of the code sfdc_creds = (_sfdc_creds[0], "https://" + _sfdc_creds[1]) return sfdc_creds except exceptions.SalesforceAuthenticationFailed as e: print("Cannot connect to Salesforce: Reason: {}".format(e)) exit(1)
def __init__(self): # self.postgres_client = self._get_psycopg_conn() self.session_id, self.instance = SalesforceLogin(username=config.SALESFORCE['username'], password=config.SALESFORCE['password'], domain=config.SALESFORCE['domain']) logger.debug('session_id={}, instance={}'.format(self.session_id, self.instance))
def connect(self): session_id, instance = SalesforceLogin( username=self.username, password=self.password, security_token=self.security_token) sf = Salesforce(session_id=session_id, instance=instance) return sf
def login_to_salesforce( username, password, sandbox=False, security_token=None, organizationId=None, client_id=None, API_version=DEFAULT_API_VERSION, # domain is passed directly to SalesforceLogin and should be 'test' or # 'login' or 'something.my' domain=None): if client_id: client_id = "{prefix}/{app_name}".format( prefix=DEFAULT_CLIENT_ID_PREFIX, app_name=client_id) else: client_id = DEFAULT_CLIENT_ID_PREFIX if domain is None and sandbox: domain = 'test' if all(arg is not None for arg in (username, password, security_token)): # Pass along the username/password to our login helper return SalesforceLogin(username=username, password=password, security_token=security_token, domain=domain, sf_version=API_version, client_id=client_id) elif all(arg is not None for arg in (username, password, organizationId)): # Pass along the username/password to our login helper return SalesforceLogin(username=username, password=password, organizationId=organizationId, domain=domain, sf_version=API_version, client_id=client_id) else: raise TypeError( 'You must provide login information or an instance and token')
def get_sf(): login_info = SalesforceLogin(username=username, password=password, security_token=security_token, domain="test" if is_sandbox else "login") session_id, instance = login_info version = '51.0' return Salesforce(instance=instance, session_id=session_id, version=version)
def initSFconnection(username, password, security_token): global session_id, instance, sfurl session_id, instance = SalesforceLogin( username=username, password=password, security_token=security_token ) sfurl = "https://" + instance print(session_id) print(instance) print(sf)
def connect_to_salesforce(self): try: session_id, instance = SalesforceLogin( username='******', password='******', security_token='spAsycjVt9iBA56mXwFxRuRoD') sales_force = Salesforce(instance=instance, session_id=session_id) self._logger.info( 'successfully connect to sales_force. sales_force= %s' % self.sales_force) return sales_force except Exception as e: Warning(_(str(e)))
def extractDataFromQuery(query, fileName): session_id, instance = SalesforceLogin(username='******', password='******', security_token='<SECURITYTOKEN>', domain='test') queryResult = createQueryJob(instance, session_id, query) jobId = queryResult[1] if queryResult[0] == True else -1 print(jobId) print(queryResult) awaitQueryJobCompletion(instance, session_id, jobId) downloadQueryResults(instance, session_id, jobId, fileName=fileName, maxRecords=100000)
class FulcrumRecordToSalesforceRecord: _sfdcSession_id, _sfdcInstance = SalesforceLogin(username=_sfdcUsername, password=_sfdcPassword, security_token=_sfdcToken, domain=_sfdcDomain) sfdc = Salesforce(instance=_sfdcInstance, session_id=_sfdcSession_id) fulcrum = Fulcrum(key=_fulcrumXApiToken) fulcrumHeaders = {'X-ApiToken': _fulcrumXApiToken} def sf_api_call(self, action, parameters={}, method='get', data={}, multipart=False, boundary=None): """ Helper function to make calls to Salesforce REST API. Parameters: action (the URL), URL params, method (get, post or patch), data for POST/PATCH. """ headers = {} if multipart == False: headers = { 'Content-type': 'application/json', 'Accept-Encoding': 'gzip', 'Authorization': 'OAuth ' + self._sfdcSession_id, } else: headers = { 'Content-type': 'multipart/form-data; boundary=' + boundary, 'Accept-Encoding': 'gzip', 'Authorization': 'OAuth ' + self._sfdcSession_id, } if method == 'get': r = requests.request(method, 'https://' + self._sfdcInstance + action, headers=headers, params=parameters, timeout=30) elif method in ['post', 'patch']: r = requests.request(method, 'https://' + self._sfdcInstance + action, headers=headers, json=data, params=parameters, timeout=10) else: # other methods not implemented in this example raise ValueError('Method should be get or post or patch.') #print('Debug: API %s call: %s' % (method, r.url) ) if r.status_code < 300: if method == 'patch': return None else: return r.json() else: raise Exception('API error when calling %s : %s' % (r.url, r.content)) # Generates a random string def id_generator(self, size=32, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) #checks to see if a key exists in a dictonary def checkKey(self, dictionary, key): try: if key in dictionary.keys(): return True else: return False except KeyError: return False ## pass JSON Directly def composite_salesforce_create(self, objectId, records): response = self.sfdc.restful(method='POST', path='composite/tree/' + objectId, json=records) return response #must have Salesforce record IDs def composite_salesforce_update(self, objectId, extCustomField, extIdValue, records): response = self.sfdc.restful(method='PATCH', path='composite/sobjects', json=records) return response def composite_salesforce_request(self, objectId, extCustomField, extIdValue, records): response = self.sfdc.restful(method='POST', path='composite/sobjects/' + objectId, json=records) return reponse # Data should either be a single JSON encapsulating base64 encoded blob up to 34MB # Or a multipart message encapsulating a base64 encoded blob up to 2GB # https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/dome_sobject_insert_update_blob.htm def contentVersion_salesforce_create(self, data): return self.sf_api_call('/services/data/v40.0/sobjects/ContentVersion', method="post", data=data) def contentVersion_2GB_salesforce_create(self, data, boundary): return self.sf_api_call('/services/data/v40.0/sobjects/ContentVersion', method="post", data=data, multipart=True, boundary=boundary) # Data should be an ID def contentVersion_salesforce_get(self, data): return self.sf_api_call( '/services/data/v40.0/sobjects/ContentVersion/%s' % data) def contentDocumentLink_salesforce_create(self, data): return self.sf_api_call( '/services/data/v40.0/sobjects/ContentDocumentLink', method='post', data=data) def create_output_json(self, recordJson): recordJson = json.dumps(recordJson) recordJson = recordJson[1:-1] recordJson = recordJson.replace('null', '') return recordJson def process_generate_field(self, fieldId, fieldValue, fieldType='Data'): print ' ' + str(fieldType) + ': ' + str(_sfdcPrefix) + str( fieldId) + '__c:' + str(fieldValue) if fieldType == 'Latitude' or fieldType == 'Longitude': return { _sfdcPrefix + fieldId + '__' + fieldType + '__s': fieldValue } else: return {_sfdcPrefix + fieldId + '__c': fieldValue} def upload_2GB_file_to_salesforce_and_attach_to_record( self, recordId, fileTitle, fileDescription, fileName, fileContents): boundary = self.id_generator() fileContents = base64.b64encode(fileContents) #Multi part request can handle 2GB Max ContentVersionMetadata = { 'Title': fileTitle, 'Description': fileDescription, 'PathOnClient': fileName, } ContentVersionData = """--""" + boundary + """ Content-Disposition: form-data; name="entity_content"; Content-Type: application/json { "Title" : """ + '"' + fileTitle + '"' + """, "Description" : """ + '"' + fileDescription + '"' + """, "PathOnClient" : """ + '"' + fileName + '"' + """ } --""" + boundary + """ Content-Disposition: form-data; name="VersionData"; filename=""" + '"' + fileName + '"' + """ Content-Type: application/octet-stream """ + fileContents + """ --""" + boundary + """--""" # 1: Insert the Content Document ContentVersion = self.contentVersion_2GB_salesforce_create( data=ContentVersionData, boundary=boundary) ContentVersionId = ContentVersion.get('id') # 2: Get the ContentDocumentId from the just inserted ContentVersion ContentVersion = self.contentVersion_salesforce_get(ContentVersionId) ContentDocumentId = ContentVersion.get('ContentDocumentId') # 3: Create a ContentDocumentLink between the ContentDocumentId and the Record contentDocumentLinkMetadata = { 'ContentDocumentId': ContentDocumentId, 'LinkedEntityId': recordId, 'ShareType': 'V' } ContentDocumentLink = self.contentDocumentLink_salesforce_create( contentDocumentLinkMetadata) return { 'ContentVersionId': ContentVersionId, 'ContentDocumentId': ContentDocumentId, 'ContentDocumentLink': ContentDocumentLink } def upload_file_to_salesforce_and_attach_to_record(self, recordId, fileTitle, fileDescription, fileName, fileContent, fulcrumId): fileContent = base64.b64encode(fileContent) #Single part request can handle ~34MB Max ContentVersionData = { 'Title': fileTitle, 'Description': fileDescription, 'PathOnClient': fileName, 'VersionData': fileContent, _sfdcPrefix + 'Fulcrum_Id__c': fulcrumId, # _sfdcPrefix + 'Location__c':fulcrumLocation } # 1: Insert the Content Document ContentVersion = self.contentVersion_salesforce_create( data=ContentVersionData) ContentVersionId = ContentVersion.get('id') # 2: Get the ContentDocumentId from the just inserted ContentVersion ContentVersion = self.contentVersion_salesforce_get(ContentVersionId) ContentDocumentId = ContentVersion.get('ContentDocumentId') # 3: Create a ContentDocumentLink between the ContentDocumentId and the Record contentDocumentLinkMetadata = { 'ContentDocumentId': ContentDocumentId, 'LinkedEntityId': recordId, 'ShareType': 'V' } ContentDocumentLink = self.contentDocumentLink_salesforce_create( contentDocumentLinkMetadata) return { 'ContentVersionId': ContentVersionId, 'ContentDocumentId': ContentDocumentId, 'ContentDocumentLink': ContentDocumentLink } def process_file_fields(self, record, recordId): #print record newFiles = [] for fieldId in record['form_values']: files = self.detect_file_field_type_and_process_field( fieldId, record, recordId=recordId) #print files if isinstance(files, dict): newFiles.append(files) return newFiles def process_video_field(self, fieldValue, recordId): print 'Downloading Video File From Fulcrum ... ' + fieldValue[ 'video_id'] baseurl = _fulcrumBaseURL + 'videos/' + fieldValue['video_id'] blob = requests.request('GET', baseurl + '.mp4', headers=self.fulcrumHeaders) if blob.status_code == 200: videoMetadata = self.fulcrum.videos.find(fieldValue['video_id']) print 'Uploading Video File To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['video_id'] + ' Video', fileDescription=fieldValue['caption'], fileName=fieldValue['video_id'] + '.mp4', fileContent=blob.content, fulcrumId=fieldValue['video_id']) blob = requests.request('GET', baseurl + '/track.json', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Video Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['video_id'] + ' JSON Track', fileDescription='JSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.json', fileContent=blob.content) blob = requests.request('GET', baseurl + '/track.geojson', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Video GeoJSON Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['video_id'] + ' GEO JSON Track', fileDescription='GeoJSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.geojson', fileContent=blob.content) blob = requests.request('GET', baseurl + '/track.gpx', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Video GPX Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['video_id'] + ' GPX Track', fileDescription='GPX Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.gpx', fileContent=blob.content) blob = requests.request('GET', baseurl + '/track.kml', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Video KML Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['video_id'] + ' KML Track', fileDescription='KML Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.kml', fileContent=blob.content) return def process_photo_field(self, fieldValue, recordId): print 'Downloading Photo File From Fulcrum ... ' + fieldValue[ 'photo_id'] blob = requests.request('GET', _fulcrumBaseURL + 'photos/' + fieldValue['photo_id'] + '.jpg', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Photo File To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['photo_id'] + ' Photo', fileDescription=fieldValue['caption'], fileName=fieldValue['photo_id'] + '.jpg', fileContent=blob.content, fulcrumId=fieldValue['photo_id']) return def process_signature_field(self, fieldValue, recordId): print 'Downloading Signature File From Fulcrum ... ' + fieldValue[ 'signature_id'] blob = requests.request('GET', _fulcrumBaseURL + 'signature/' + fieldValue['signature_id'] + '.png', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Signature File To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['photo_id'] + ' Signature', fileDescription='Signed At: ' + fieldValue['timestamp'], fileName=fieldValue['signature_id'] + '.png', fileContent=blob.content, fulcrumId=fieldValue['signature_id']) return def process_audio_field(self, fieldValue, recordId): print 'Downloading Audio File From Fulcrum ... ' + fieldValue[ 'audio_id'] blob = requests.request('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '.mp4', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Audio File To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['audio_id'] + ' Video', fileDescription=fieldValue['caption'], fileName=fieldValue['audio_id'] + '.mp4', fileContent=blob.content, fulcrumId=fieldValue['audio_id']) blob = requests.request('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.json', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Audio Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['audio_id'] + ' JSON Track', fileDescription='JSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.json', fileContent=blob.content) blob = requests.request('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.geojson', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Audio GeoJSON Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['audio_id'] + ' GEO JSON Track', fileDescription='GeoJSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.geojson', fileContent=blob.content) blob = requests.request('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.gpx', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Audio GPX Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['audio_id'] + ' GPX Track', fileDescription='GPX Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.gpx', fileContent=blob.content) blob = requests.request('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.kml', headers=self.fulcrumHeaders) if blob.status_code == 200: print 'Uploading Audio KML Track To Salesforce... ' + recordId self.upload_file_to_salesforce_and_attach_to_record( recordId=recordId, fileTitle=fieldValue['audio_id'] + ' KML Track', fileDescription='KML Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.kml', fileContent=blob.content) return def process_date_field(self, fieldId, fieldValue): #Generate Date Time return self.process_generate_field(fieldId, fieldValue, 'Date') def process_datetime_field(self, record, isDateField, fieldId, fieldValue): #Generate Date Time # Check to see if the last field processed was a Date Field if isDateField != _isDateFieldDefault: dateValue = record['form_values'][isDateField] dateTimeValue = dateValue + ' ' + fieldValue return self.process_generate_field(isDateField + '_' + fieldId, dateTimeValue, 'DateTime') #Not paired with a Date Field else: return self.process_generate_field(fieldId, fieldValue, 'Time') def process_address_and_choice_field(self, fieldId, subFieldKey, subFieldValue): if subFieldValue == 'sub_thoroughfare': return self.process_generate_field(fieldId + '_1', subFieldValue, 'Street Number') elif subFieldKey == 'thoroughfare': return self.process_generate_field(fieldId + '_2', subFieldValue, 'Street Name') elif subFieldKey == 'suite': return self.process_generate_field(fieldId + '_3', subFieldValue, 'Suite') elif subFieldKey == 'locality': return self.process_generate_field(fieldId + '_4', subFieldValue, 'City') elif subFieldKey == 'sub_admin_area': return self.process_generate_field(fieldId + '_5', subFieldValue, 'County') elif subFieldKey == 'admin_area': return self.process_generate_field(fieldId + '_6', subFieldValue, 'State/Province') elif subFieldKey == 'postal_code': return self.process_generate_field(fieldId + '_7', subFieldValue, 'Postal Code') elif subFieldKey == 'country': return self.process_generate_field(fieldId + '_8', subFieldValue, 'Country') elif subFieldKey == 'choice_values': choices = [] multiSelectChoices = subFieldValue[0] for choice in subFieldValue: choices.append(choice) if multiSelectChoices != choice: multiSelectChoices += ';' + choice if len(choices) == 1: self.process_generate_field(fieldId, choices, 'Choices') else: return self.process_generate_field(fieldId, multiSelectChoices, 'Multiselect Choices') elif subFieldKey == 'other_values': for choice in subFieldValue: return self.process_generate_field(fieldId, choice, 'Other Choice') # Determine the type of field and process it. This handles files. def detect_file_field_type_and_process_field(self, fieldId, record, recordId, detail=False): fieldValue = '' if detail == False: fieldValue = record['form_values'][fieldId] elif detail == True: fieldValue = record[fieldId] isDictField = isinstance(fieldValue, dict) isListField = isinstance(fieldValue, list) #print fieldValue if isListField == True: for complexFieldValue in fieldValue: #print complexFieldValue isComplexDictField = isinstance(complexFieldValue, dict) if isComplexDictField == True: isRepeatingSections = self.checkKey( complexFieldValue, 'form_values') isPhotoField = self.checkKey(complexFieldValue, 'photo_id') isVideoField = self.checkKey(complexFieldValue, 'video_id') isAudioField = self.checkKey(complexFieldValue, 'audio_id') if isPhotoField == True: print "Photo Field Detected..." return self.process_photo_field( complexFieldValue, recordId) elif isVideoField == True: print "Video Field Detected..." return self.process_video_field( complexFieldValue, recordId) elif isAudioField == True: print "Audio Field Detected..." return self.process_audio_field( complexFieldValue, recordId) elif isRepeatingSections == True: print "Child Record Detected..." return self.process_file_fields( complexFieldValue, recordId) elif isDictField == True: isSignatureField = self.checkKey(fieldValue, 'signature_id') if isSignatureField == True: print "Signature Field Detected..." return self.process_signature_field(fieldValue, recordId) # Determine the type of field and process it. This handles data. def detect_field_type_and_process_field(self, fieldId, record, isDateField=_isDateFieldDefault, detail=False): fieldValue = '' if detail == False: fieldValue = record['form_values'][fieldId] elif detail == True: fieldValue = record[fieldId] isListField = isinstance(fieldValue, list) isDictField = isinstance(fieldValue, dict) if isListField == True: for complexFieldValue in fieldValue: isRepeatingSections = self.checkKey(complexFieldValue, 'form_values') isDictComplexField = isinstance(complexFieldValue, dict) isJunctionObject = self.checkKey(complexFieldValue, 'record_id') elif isDictField == True: for subFieldKey in fieldValue: subFieldValue = fieldValue[subFieldKey] return self.process_address_and_choice_field( fieldId, subFieldKey, subFieldValue) # Date Time field elif re.match(r"([0-2][0-9]:[0-5][0-9])", fieldValue): return self.process_datetime_field(record, isDateField, fieldId, fieldValue) # Date field elif re.match(r"([1-2][0-9][0-9][0-9]-[0-1][0-9]-[0-3][0-9])", fieldValue): #Mark that this loop was a Date, in prep for a Time Field isDateField = fieldId return self.process_date_field(fieldId, fieldValue) #easy field else: return self.process_generate_field(fieldId, fieldValue) def generate_junction_records(self, complexFormValues): return def generate_detail_fields(self, complexFormValues): dict(complexFormValues) sfdcFields = [] for detailRecord in complexFormValues: isDateField = _isDateFieldDefault fieldAppend = self.detect_field_type_and_process_field( detailRecord, complexFormValues, isDateField, True) #print fieldAppend if isinstance(fieldAppend, dict): sfdcFields.append(fieldAppend) if isDateField != detailRecord: isDateField = _isDateFieldDefault sfdcFields = json.dumps(sfdcFields).replace('[', '').replace( ']', '').replace('{', '').replace('}', '') return sfdcFields def generate_fields(self, record): sfdcFields = [] isDateField = _isDateFieldDefault #print record for fieldId in record['form_values']: fieldAppend = self.detect_field_type_and_process_field( fieldId, record, isDateField) #print fieldAppend if isinstance(fieldAppend, dict): sfdcFields.append(fieldAppend) # If this Loop was not a Date Field, Reset Back to Default Value if isDateField != fieldId: isDateField = _isDateFieldDefault sfdcFields = json.dumps(sfdcFields).replace('[', '').replace( ']', '').replace('{', '').replace('}', '') return sfdcFields def create_sfdc_fulcrum_record(self, record): objectId = (_sfdcPrefix + record['form_id'] + '__c').replace('-', '_') sfdcCreateRecords = self.generate_sfdc_fulcrum_record(record) sfdcCreateRecords = json.loads(sfdcCreateRecords) return fulcrumToSalesforce.composite_salesforce_create( objectId, sfdcCreateRecords) def update_sfdc_fulcrum_record(self, record): objectId = (_sfdcPrefix + record['form_id'] + '__c').replace('-', '_') sfdcObject = SFType(objectId, self.sfdc.session_id, self.sfdc.sf_instance) recordExists = sfdcObject.get_by_custom_id( _sfdcPrefix + 'fulcrum_id__c', record['id']) if recordExists: ## Get Child Records for fieldId in record['form_values']: fieldValue = record['form_values'][fieldId] isListField = isinstance(fieldValue, list) if isListField == True: complexFieldType = fieldValue[0] isRepeatingSections = self.checkKey( complexFieldType, 'form_values') isJunctioonObject = self.checkKey(complexFieldType, 'record_id') if isRepeatingSections == True: objectId = _sfdcPrefix + record['form_id'][ 0:13].replace('-', '_') + '_' + fieldId + '_d__c' objectReferenceId = _sfdcPrefix + record['form_id'][ 0:13].replace('-', '_') + '_' + fieldId + '_d__r' sfdcInsertRecord = '' for complexFieldValue in fieldValue: detailRecordExists = sfdcObject.get_by_custom_id( _sfdcPrefix + 'fulcrum_id__c', complexFieldValue['id']) if detailRecordExists: sfdcRecordUpdate = generate_sfdc_fulcrum_detail_record( self, complexFieldValue) print sfdcRecordUpdate exit() else: self.create_sfdc_fulcrum_record(record) def generate_sfdc_fulcrum_record(self, record): print '---------------------------------------' print 'Processing Fulcrum Record...' objectId = (_sfdcPrefix + record['form_id'] + '__c').replace('-', '_') sfdcRecord = self.standard_fields_master_record(record) sfdcFields = self.generate_fields(record) objectIdString = '"' + objectId + '"' recordIdString = '"' + record['id'] + '"' sfdcRecord = json.dumps(sfdcRecord).replace('[', '').replace( ']', '').replace('{', '').replace('}', '') sfdcDetailRecords = self.generate_sfdc_fulcrum_detail_records(record) if sfdcDetailRecords is None: sfdcRecord = """{"records": [{"attributes": {"type" : """ + objectIdString + """, "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + """ }]}""" else: detailRecordJson = sfdcDetailRecords[0] for detailRecord in sfdcDetailRecords: if detailRecord != detailRecordJson: detailRecordJson += "," + detailRecordJson sfdcRecord = """{"records": [{"attributes": {"type" : """ + objectIdString + """, "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + ', ' + detailRecordJson + """ }]}""" return sfdcRecord def generate_sfdc_fulcrum_detail_record(self, complexFieldValue): complexFormValues = complexFieldValue['form_values'] sfdcFields = self.generate_detail_fields(complexFormValues) objectIdString = '"' + objectId + '"' recordIdString = '"' + complexFieldValue['id'] + '"' #sfdcRecord = json.dumps (sfdcRecord).replace('[','').replace(']','').replace('{','').replace('}','') sfdcRecord = json.dumps(sfdcRecord).replace('[', '').replace( ']', '').replace('{', '').replace('}', '') sfdcRecord = """, { "attributes": {"type" : """ + objectIdString + """ , "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + """ }""" sfdcInsertRecord += sfdcRecord def standard_fields_master_record(self, record): sfdcRecord = [] if record['status'] is not None: sfdcRecord.append( self.process_generate_field('status', record['status'], 'Status')) if record['version'] is not None: sfdcRecord.append( self.process_generate_field('version', record['version'], 'Version')) if record['id'] is not None: sfdcRecord.append( self.process_generate_field('fulcrum_id', record['id'], 'Id')) if record['created_at'] is not None: sfdcRecord.append( self.process_generate_field('created_at', record['created_at'], 'Created At')) if record['updated_at'] is not None: sfdcRecord.append( self.process_generate_field('updated_at', record['updated_at'], 'Updated At')) if record['client_created_at'] is not None: sfdcRecord.append( self.process_generate_field('client_created_at', record['client_created_at'], 'Client Created At')) if record['client_updated_at'] is not None: sfdcRecord.append( self.process_generate_field('client_updated_at', record['client_updated_at'], 'Client Updated At')) if record['created_by'] is not None: sfdcRecord.append( self.process_generate_field('created_by', record['created_by'], 'Created By')) if record['created_by_id'] is not None: sfdcRecord.append( self.process_generate_field('created_by_id', record['created_by_id'], 'Created By Id')) if record['updated_by'] is not None: sfdcRecord.append( self.process_generate_field('updated_by', record['updated_by'], 'Updated By')) if record['updated_by_id'] is not None: sfdcRecord.append( self.process_generate_field('updated_by_id', record['updated_by_id'], 'Updated By Id')) if record['created_location'] is not None: sfdcRecord.append( self.process_generate_field('created_location', record['created_location'], 'Created Location')) if record['updated_location'] is not None: sfdcRecord.append( self.process_generate_field('updated_location', record['updated_location'], 'Updated Location')) if record['created_duration'] is not None: sfdcRecord.append( self.process_generate_field('created_duration', record['created_duration'], 'Created Duration')) if record['updated_duration'] is not None: sfdcRecord.append( self.process_generate_field('updated_duration', record['updated_duration'], 'Updated Duration')) if record['edited_duration'] is not None: sfdcRecord.append( self.process_generate_field('edited_duration', record['edited_duration'], 'Edited Duration')) if record['project_id'] is not None: sfdcRecord.append( self.process_generate_field('project_id', record['project_id'], 'Project Id')) if record['changeset_id'] is not None: sfdcRecord.append( self.process_generate_field('changeset_id', record['changeset_id'], 'Change Set ID')) if record['assigned_to'] is not None: sfdcRecord.append( self.process_generate_field('assigned_to', record['assigned_to'], 'Assigned To')) if record['assigned_to_id'] is not None: sfdcRecord.append( self.process_generate_field('assigned_to_id', record['assigned_to_id'], 'Assigned To Id')) if record['form_id'] is not None: sfdcRecord.append( self.process_generate_field('form_id', record['form_id'], 'Form Id')) if record['latitude'] is not None: sfdcRecord.append( self.process_generate_field('location', record['latitude'], 'Latitude')) if record['longitude'] is not None: sfdcRecord.append( self.process_generate_field('location', record['longitude'], 'Longitude')) if record['speed'] is not None: sfdcRecord.append( self.process_generate_field('speed', record['speed'], 'Speed')) if record['course'] is not None: sfdcRecord.append( self.process_generate_field('course', record['course'], 'Course')) if record['horizontal_accuracy'] is not None: sfdcRecord.append( self.process_generate_field('horizontal_accuracy', record['horizontal_accuracy'], 'Horizontal Accuracy')) if record['vertical_accuracy'] is not None: sfdcRecord.append( self.process_generate_field('vertical_accuracy', record['vertical_accuracy'], 'Vertical Accuracy')) return sfdcRecord def standard_fields_detail_record(self, complexFieldValue): sfdcRecord = [] if complexFieldValue['version'] is not None: sfdcRecord.append( self.process_generate_field('version', complexFieldValue['version'], 'Version')) if complexFieldValue['id'] is not None: sfdcRecord.append( self.process_generate_field('fulcrum_id', complexFieldValue['id'], 'Id')) if complexFieldValue['created_at'] is not None: sfdcRecord.append( self.process_generate_field('created_at', complexFieldValue['created_at'], 'Created At')) if complexFieldValue['updated_at'] is not None: sfdcRecord.append( self.process_generate_field('updated_at', complexFieldValue['updated_at'], 'Updated At')) if complexFieldValue['created_by_id'] is not None: sfdcRecord.append( self.process_generate_field('created_by_id', complexFieldValue['created_by_id'], 'Created By Id')) if complexFieldValue['updated_by_id'] is not None: sfdcRecord.append( self.process_generate_field('updated_by_id', complexFieldValue['updated_by_id'], 'Updated By Id')) if complexFieldValue['created_duration'] is not None: sfdcRecord.append( self.process_generate_field( 'created_duration', complexFieldValue['created_duration'], 'Created Duration')) if complexFieldValue['updated_duration'] is not None: sfdcRecord.append( self.process_generate_field( 'updated_duration', complexFieldValue['updated_duration'], 'Updated Duration')) if complexFieldValue['edited_duration'] is not None: sfdcRecord.append( self.process_generate_field( 'edited_duration', complexFieldValue['edited_duration'], 'Edited Duration')) if complexFieldValue['changeset_id'] is not None: sfdcRecord.append( self.process_generate_field('changeset_id', complexFieldValue['changeset_id'], 'Change Set ID')) if complexFieldValue['geometry'] is not None: sfdcRecord.append( self.process_generate_field( 'location', complexFieldValue['geometry']['coordinates'][1], 'Latitude')) sfdcRecord.append( self.process_generate_field( 'location', complexFieldValue['geometry']['coordinates'][0], 'Longitude')) return sfdcRecord # Fulcrum Record and SFDC Parent Record ID (prefix and postfix added) def generate_sfdc_fulcrum_detail_records(self, record): print '.......................................' print 'Processing Fulcrum Detail Records...' sfdcRecords = [] for fieldId in record['form_values']: fieldValue = record['form_values'][fieldId] isListField = isinstance(fieldValue, list) if isListField == True: complexFieldType = fieldValue[0] isRepeatingSections = self.checkKey(complexFieldType, 'form_values') if isRepeatingSections == True: sfdcInsertRecord = '' objectId = _sfdcPrefix + record['form_id'][0:13].replace( '-', '_') + '_' + fieldId + '_d__c' objectReferenceId = _sfdcPrefix + record['form_id'][ 0:13].replace('-', '_') + '_' + fieldId + '_d__r' for complexFieldValue in fieldValue: print '.......................................' print 'Processing Detail Record...' print ' Object: ' + objectId print ' ReferenceName: ' + objectReferenceId sfdcRecord = self.standard_fields_detail_record( complexFieldValue) complexFormValues = complexFieldValue['form_values'] sfdcFields = self.generate_detail_fields( complexFormValues) objectIdString = '"' + objectId + '"' recordIdString = '"' + complexFieldValue['id'] + '"' #sfdcRecord = json.dumps (sfdcRecord).replace('[','').replace(']','').replace('{','').replace('}','') sfdcRecord = json.dumps(sfdcRecord).replace( '[', '').replace(']', '').replace('{', '').replace('}', '') sfdcRecord = """, { "attributes": {"type" : """ + objectIdString + """ , "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + """ }""" sfdcInsertRecord += sfdcRecord objectReferenceIdString = '"' + str( objectReferenceId) + '"' sfdcInsertRecord = sfdcInsertRecord.replace(',', "", 1) recordJson = objectReferenceIdString + """:{"records":[""" + sfdcInsertRecord + """]}""" sfdcRecords.append(recordJson) return sfdcRecords
def login(self): login = SalesforceLogin(sandbox=self.is_sandbox, **self._credentials._asdict()) self._access_token, host = login self._instance_url = "https://" + host
from simple_salesforce import SalesforceLogin decrypt = "gpg --output secrets.json --decrypt secrets.gpg" if os.path.exists("secrets.gpg"): returned_value = subprocess.call(decrypt, shell=True) else: print("The file does not exist encryption on secrets.json not in use") #continue import json with open('secrets.json', 'r') as f: config = json.load(f) session_id, instance = SalesforceLogin( username=(config['user']['username']), password=(config['user']['password']), security_token=(config['user']['salesforce_token'])) print(session_id) # Options options = Options() options.add_argument("--headless") pd.set_option('display.max_rows', None) # System Variables today = date.today() date = today.strftime("%m/%d/%Y") node = platform.node() system = platform.system() username = getpass.getuser()
def sf_session(): session_id, instance = SalesforceLogin(username=SF_USER, password=SF_PASS, security_token=SF_TOKEN) sf = Salesforce(session_id=session_id, instance=instance) return sf
import config import ids from simple_salesforce import Salesforce, SFType, SalesforceLogin import sys '''This is a salesforce sandbox clearing file that deletes whatever fields I need it to delete''' session_id, instance = SalesforceLogin( username=config.username, password=config.password, security_token=config.token, sandbox=True) sf = Salesforce(instance=instance, session_id=session_id) contact_query = "Select Id, Name from Contact" non_test_query = "SELECT Name, Id FROM Opportunity WHERE not (Name like 'Test%')" opp_owner = "SELECT OwnerId FROM Opportunity" contacts = sf.query(query=contact_query) opportunities = sf.query(query=non_test_query) print('''Do you want to delete: 1. Opportunities 2. Contacts 3. Exit''') select = int(input('Selection: ')) if select == 1: print('Deleting Opportunities...') for record in opportunities['records']: id = record['Id'] sf.Opportunity.delete(id)
load_dotenv(join(dirname(__file__), '.env')) NPQ_COURSES = { '324': 'NPQML', '305': 'NPQH', '325': 'NPQSL', '326': 'NPQEL', } logging.getLogger().setLevel(os.environ.get('LOG_LEVEL', 'DEBUG')) domain = os.environ.get('SALESFORCE_DOMAIN', 'test') session_id, instance = SalesforceLogin(username=os.environ['SALESFORCE_USER'], password=os.environ['SALESFORCE_PASSWORD'], security_token=os.environ['SALESFORCE_SECURITY_TOKEN'], domain=domain) sf = Salesforce(instance=instance, session_id=session_id, domain=domain) def complete_details(detail, participant, date): return detail.replace('<participant>', '' if None == participant else participant).replace('<date>', date[:10]) def lambda_handler(event, context): for record in event['Records']: payload = str(record["body"]) payloads = json.loads(payload)['data'] logging.debug(payload) for payload in payloads: try:
# Serial Data Streaming from Arduino to Salesforce # Created by: Jack Galletta, Summer 2019 # [email protected] # import packages from simple_salesforce import Salesforce, SalesforceLogin from Credentials import username, password import serial import threading import datetime import re # logging into Salesforce session_id, instance = SalesforceLogin(username=username, password=password) sf = Salesforce(instance=instance, session_id=session_id) print('Successfully logged in!') print('Wating to detect cars...') # Logged in! Now perform API actions, SOQL queries, etc. # object to read in serial data from port: '/dev/cu.usbmodem14201' arduinoData = serial.Serial('/dev/cu.usbmodem14201', 9600) myid = str(sf.query("SELECT Id FROM Traffic_Tracker__c")) pattern = "'Id', '(.{18})'" r1 = re.findall(pattern, myid) ttid = r1[0] #query besthour object and strip the ID value constquery = str(sf.query("SELECT BHID__c FROM BestHourConst__c")) id = str(sf.query("SELECT Id FROM Best_Hour__c")) pattern = "'Id', '(.{18})'"
import pandas as pd from simple_salesforce import Salesforce, SalesforceLogin # login credentials username = '******' password = getpass.getpass(prompt='Password: '******'q3hiTl2zYMZ28EPc62JZljDXT' sandbox = 'backup' # time checks start_time = datetime.now() time_check = datetime.now() # get session to start job session_id, instance = SalesforceLogin(username, password, security_token=security_token, sandbox=sandbox) sf = Salesforce(instance=instance, session_id=session_id) # csv to DataFrame, then to iterable tuple csv_df = pd.read_csv('acc_insert_test.csv') csv_tuple = csv_df.itertuples(index=False) # initalize for Bulk API Limits bulk_data = [] count_records = 0 count_chars = 0 count_rows = 0 for each in csv_tuple: if (datetime.now() - time_check).seconds > 15:
def make_das(use_scheduled_units=False, export=False, for_bg=False): """Return DAS report as a dataframe. If use_scheduled_units is True, then it uses Scheduled Units in Salesforce. If False, then it uses Actual Units where available, and Scheduled Units elsewhere. If export is True, then it saves the dataframe as a csv. """ username, password, security_token = get_salesforce_login_info() #1. Export Ad Ops DAS Reporting from Salesforce (Converted Report ID: 00O61000003rUoxEAE, Non-Converted Report ID: 00O61000003KY4AEAW) (session_id, instance) = SalesforceLogin(username=username, password=password, security_token=security_token) query_url = 'https://' + instance + '/00O61000003rUox?export=1&enc=UTF-8&xf=csv' headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + session_id, 'X-PrettyPrint': '1' } s = requests.Session() response = s.get(query_url, headers=headers, cookies={'sid': session_id}) f = open('sf_das.csv', 'wb') f.write(response.content) f.close() #2. Clean up sf_das = pd.read_csv('sf_das.csv', encoding='utf-8') sf_das = sf_das.fillna('N/A') for col in sf_das.columns.tolist(): if ' (converted)' in col: sf_das = sf_das.rename( columns={col: col.replace(' (converted)', '')}) sf_das.loc[sf_das['Actual Units'] == 'N/A', 'Actual Units'] = sf_das.loc[sf_das['Actual Units'] == 'N/A', 'Scheduled Units'] sf_das.loc[sf_das['Actual Amount'] == 'N/A', 'Actual Amount'] = sf_das.loc[sf_das['Actual Amount'] == 'N/A', 'Contracted Amount'] for col in [ 'Sales Price', 'Base Rate', 'Baked-In Production Rate', 'Total Price', 'Total Units', 'Scheduled Units', 'Actual Units', 'Contracted Amount', 'Actual Amount' ]: if for_bg and (col in ['Actual Units', 'Actual Amount']): pass else: sf_das.loc[sf_das[col] == 'N/A', col] = 0 #3. Pivot to create monthly Actual Units columns index_list = [ 'BBR', 'Campaign Name', 'Flight Type', 'Brand: Brand Name', 'Account Name: Account Name', 'Agency: Account Name', 'IO Number', 'Start Date', 'End Date', 'Approval Date', 'Stage', 'Billing Details', 'Customer Billing ID', 'Billing Profile Name', 'Opportunity Owner: Full Name', '2nd Opportunity Owner: Full Name', 'Client Services User: Full Name', 'Campaign Manager: Full Name', 'Advertiser Vertical', 'Product: Product Name', 'Budget Category', 'Media Product', 'Media Product Family', 'Advertiser Vertical Family', 'Contracted Sites', 'Contracted Devices', 'Line Item Number', 'OLI', 'Billable Reporting Source', 'Viewability Source', 'Viewability', 'Blocking System', 'Line Description', 'Contracted Sizes', 'Price Calculation Type', 'Sales Price', 'Base Rate', 'Baked-In Production Rate', 'Total Price', 'Total Units' ] # Fill value if for_bg: fill_value = 'N/A' else: fill_value = 0 if use_scheduled_units: try: das = pd.pivot_table(sf_das, index=index_list, columns=['Active Month'], values='Scheduled Units', fill_value=fill_value, aggfunc=np.sum) except KeyError as e: print('data error: {}'.format(e)) else: try: das = pd.pivot_table(sf_das, index=index_list, columns=['Active Month'], values='Actual Units', fill_value=fill_value, aggfunc=np.sum) except KeyError as e: print('data error: {}'.format(e)) das = das.reset_index() #4. Convert dates to date type for col in ['Start Date', 'End Date', 'Approval Date']: das.loc[das[col] != 'N/A', col] = das[das[col] != 'N/A'].apply( lambda row: datetime.strptime(row[col], '%m/%d/%Y').date(), axis=1) #5. Rename index portion of header rename_dict = { 'Brand: Brand Name': 'Brand', 'Account Name: Account Name': 'Account Name', 'Agency: Account Name': 'Agency', 'Opportunity Owner: Full Name': 'Opportunity Owner', '2nd Opportunity Owner: Full Name': '2nd Opportunity Owner', 'Client Services User: Full Name': 'Account Manager', 'Campaign Manager: Full Name': 'Campaign Manager', 'Product: Product Name': 'Product', 'Billing Profile Name': 'Customer Billing Name' } das = das.rename(columns=rename_dict) renamed_index_list = [] for index in index_list: if index in rename_dict: renamed_index_list.append(rename_dict[index]) else: renamed_index_list.append(index) #6. Reorder months months_list = sf_das['Active Month'].drop_duplicates().values.tolist() months_flipped_list = [] for month in months_list: if re.search('([0-9]+)/', month): mo = re.search('([0-9]+)/', month).group(1) else: continue if len(mo) == 1: mo = '0' + mo yr = re.search('/([0-9]+)', month).group(1) months_flipped_list.append(yr + '/' + mo) months_flipped_list.sort() months_ordered_list = [] for month_flipped in months_flipped_list: mo = re.search('/([0-9]+)', month_flipped).group(1) if mo[0] == '0': mo = mo[1] yr = re.search('([0-9]+)/', month_flipped).group(1) months_ordered_list.append(mo + '/' + yr) #7. Output das = das[renamed_index_list + months_ordered_list] das = das.sort_values( ['BBR', 'Campaign Name', 'Line Item Number', 'Line Description']) das = das[das['Price Calculation Type'] != 'N/A'] if export: das.to_csv('das.csv', index=False, encoding='utf-8') os.remove('sf_das.csv') return das
import json import pandas as pd from simple_salesforce import Salesforce, SalesforceLogin, SFType loginInfo = json.load(open('login.json')) username = loginInfo['username'] password = loginInfo['password'] security_token = loginInfo['security_token'] domain = 'login' session_id, instance = SalesforceLogin(username=username, password=password, security_token=security_token, domain=domain) sf = Salesforce(instance_url=instance, session_id=session_id) # print(sf)
outlook_pass = getpass.getpass(prompt='Outlook Password: '******'imap-mail.outlook.com' attach_dir = '' # sfdc credentials sfdc_user = '******' sfdc_pass = getpass.getpass(prompt='Salesforce Password: '******'H225tAUMxPYmYEf7LSZr7158Q' # log into outlook con = imaplib.IMAP4_SSL(imap_url) con.login(outlook_user, outlook_pass) # log into sfdc session_id, instance = SalesforceLogin(sfdc_user, sfdc_pass, security_token=security_token) sf = Salesforce(instance=instance, session_id=session_id) # remove passwords outlook_pass = '******' sfdc_pass = '******' # get and save file inbox = con.select('INBOX') email_list = search('SUBJECT', 'Salesforce Data to Import', con) email_list_ordered = email_list[0].split() email_list_ordered.reverse() new_file = get_attachment(email_list_ordered) # convert file to use for data load