예제 #1
0
class JobRunner:

    def __init__(self):

        self.sf = Salesforce(username="******", password="******",
                             security_token="XVXqD3cylx2oxt8LDeb0woAQ9", version="36.0")
        # print self.sf.session_id
        print "LoggedIn Successfully"

    def record_analysis(self, data):

        response = self.sf.query("SELECT Enable_Custom_Loan_Status_Predictor__c FROM MySetting__c")

        if response['records'][0]['Enable_Custom_Loan_Status_Predictor__c']:
            self.internal_api(data)
        else:
            self.external_api(data)

    def internal_api(self,data):
        pass

    def external_api(self, data):

        fields = [
                "Loan_Amount__c",
                "Term__c","Interest_Rate__c",
                "Grade__c",
                "Sub_Grade__c",
                "Employee_Length__c",
                "Home_Ownership__c",
                "Annual_Income__c",
                "Verification_Status__c",
                "Purpose__c",
                "Zip_Code__c",
                "Address_State__c",
                "Dti__c",
                "Delinquent_2yrs__c",
                "Earliest_Credit_Line__c",
                "Inquiry_Last_6mths__c",
                "Months_Since_Last_Delinquent__c",
                "Open_Account__c",
                "Total_Account__c",
                "Out_Principle__c",
                "Account_Now_Delinquent__c"
                ]

        field_values = data["data"]["sobject"]
        # print field_values
        valuesToSend = []
        listOfFields = []
        updateId = data["data"]["sobject"]["Id"]
        # print 'updateId ', updateId

        for value in fields:
            if value in data["data"]["sobject"] and data["data"]["sobject"][value] is not None:
                if value == 'Earliest_Credit_Line__c' and data["data"]["sobject"][value]:
                    dt64 = np.datetime64(data["data"]["sobject"][value])
                    x = str(dt64).index('T')
                    dt64 = str(dt64)[:x]
                    # print dt64
                    unix = time.mktime(datetime.strptime(dt64, "%Y-%m-%d").timetuple())
                    formattedTime = datetime.fromtimestamp(int(unix)).strftime('%B-%y')
                    # print formattedTime
                    valuesToSend.append(formattedTime)
                else:
                    valuesToSend.append(data["data"]["sobject"][value])

                listOfFields.append(value)

        # print 'listToSend ',listToSend
        # print 'fieldsList', listOfFields

        # predictionApi.fields_list = listOfFields
        predictionApi.model_id, predictionApi.file_name, predictionApi.listToSend = SelectModel.model(listOfFields,
                                                                                                      valuesToSend)
        # print "model", predictionApi.model_id
        loan_status_response_prediction = predictionApi.main()
        # print 'loan_status_response_prediction ',loan_status_response_prediction

        # write it to SF object
        self.sf.Loan_Application_Prediction__c.update(updateId, {'Predicted_Loan_Status__c': loan_status_response_prediction})


        # predictionApi.main() --
        #save the record Id for later update --
        #convert data to list to send as csv instance --
        #call the ML class function with argument as list
        # store the response loan status token
        #write it back to Sf record

    def runJob(self, query):
        if(query == None) :
            return False

        version = str(36.0)

        name = 'LoanStatusPrediction'   #'Regex'
        # sdata = {'Name':  name,
        #          'Query': query,
        #          'ApiVersion': version,
        #          'NotifyForOperationCreate': 'true',
        #          'NotifyForFields': 'Referenced'
        #          }
        # topicId = self.create_record('PushTopic', sdata, self.sf)['id']
        # print topicId
        url = 'https://' + self.sf.sf_instance.encode()
        # print 'base url : ' + url
        client = BayeuxClient(url + '/cometd/' + version, 'OAuth ' + self.sf.session_id)
        # print "737373"
        client.register('/topic/' + name, self.record_analysis)
        # client.register('/chat/demo', cb)
        try:
            client.start()

            while True:
                pass
            # self.delete_record('PushTopic', topicId)

        except Exception as e:
            print str(e)

    def create_record(self, sobj, sdata, sf):

        new_url = self.sf.base_url + 'sobjects/{object_name}/'.format(object_name=sobj)
        response = self.sf._call_salesforce('POST', new_url, data=json.dumps(sdata))

        return response.json(object_pairs_hook=OrderedDict)

    def delete_record(self, sobj, sid):
        new_url = self.sf.base_url + 'sobjects/{object_name}/{object_id}/'.format(object_name=sobj, object_id=sid)
        response = self.sf._call_salesforce('DELETE', new_url)

        return response
예제 #2
0
class SalesforcePushApi(object):
    """ API Wrapper for the Salesforce Push API """

    def __init__(self, username, password, serverurl, lazy=None, default_where=None):
        sandbox = False
        if serverurl.find('test.salesforce.com') != -1:
            sandbox = True
    
        self.sf = Salesforce(username=username, password=password, security_token='', sandbox=sandbox, version='36.0')

        if not lazy:
            lazy = []
        self.lazy = lazy

        if not default_where:
            default_where = {}
        self.default_where = default_where

    def return_query_records(self, query):
        #print 'Running Query: %s' % query
        res = self.sf.query_all(query)
        if res['totalSize'] > 0:
            return res['records']
        else:
            return []

    def format_where_clause(self, where, obj=None):
        if obj and obj in self.default_where:
            default_where = self.default_where[obj]
            if where:
                where = '(%s) AND (%s)' % (default_where, where)
            else:
                where = '(%s)' % default_where
        if where:
            where = ' WHERE %s' % where
        else:
            where = ''
        return where

    def add_query_limit(self, query, limit):
        if not limit:
            return query

        return '%s LIMIT %s' % (query, limit)

    @memoize
    def get_packages(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT id, name, namespaceprefix FROM MetadataPackage%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_package_objs(self, where=None, limit=None):
        package_objs = []
        for package in self.get_packages(where, limit):
            package_objs.append(
                MetadataPackage(
                    push_api = self,
                    sf_id=package['Id'], 
                    name=package['Name'], 
                    namespace=package['NamespacePrefix']
                )
            )
        return package_objs

    @memoize
    def get_packages_by_id(self, where=None, limit=None):
        packages = {}
        for package in self.get_package_objs(where, limit):
            packages[package.sf_id] = package
        return packages

    @memoize
    def get_package_versions(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT Id, Name, MetadataPackageId, ReleaseState, MajorVersion, MinorVersion, PatchVersion, BuildNumber FROM MetadataPackageVersion%s ORDER BY MajorVersion DESC, MinorVersion DESC, PatchVersion, BuildNumber DESC" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_where_last_version(self, major=None, minor=None, beta=None):
        if beta:
            where = "ReleaseState = 'Beta'"
        else:
            where = "ReleaseState = 'Released'"
        if major:
            where += " AND MajorVersion=%s" % int(major)
        if minor:
            where += " AND MinorVersion=%s" % int(minor)
        return where
        
    @memoize
    def get_package_version_objs(self, where=None, limit=None):
        package_version_objs = []
        packages = self.get_packages_by_id()
        for package_version in self.get_package_versions(where, limit):
            package_version_objs.append(
                MetadataPackageVersion(
                    push_api = self,
                    name = package_version['Name'],
                    package = packages[package_version['MetadataPackageId']],
                    state = package_version['ReleaseState'],
                    major = package_version['MajorVersion'],
                    minor = package_version['MinorVersion'],
                    patch = package_version['PatchVersion'],
                    build = package_version['BuildNumber'],
                    sf_id = package_version['Id'],
                )
            )
        return package_version_objs

    @memoize
    def get_package_versions_by_id(self, where=None, limit=None):
        package_versions = {}
        for package_version in self.get_package_version_objs(where, limit):
            package_versions[package_version.sf_id] = package_version
        return package_versions

    @memoize
    def get_subscribers(self, where=None, limit=None):
        where = self.format_where_clause(where, obj='PackageSubscriber')
        query = "SELECT Id, MetadataPackageVersionId, InstalledStatus, OrgName, OrgKey, OrgStatus, OrgType from PackageSubscriber%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_subscriber_objs(self, where=None, limit=None):
        subscriber_objs = []
        package_versions = self.get_package_versions_by_id()
        for subscriber in self.get_subscribers(where, limit):
            subscriber_objs.append(
                PackageSubscriber(
                    push_api = self,
                    version = package_versions[subscriber['MetadataPackageVersionId']],
                    status = subscriber['InstalledStatus'],
                    org_name = subscriber['OrgName'],
                    org_key = subscriber['OrgKey'],
                    org_status = subscriber['OrgStatus'],
                    org_type = subscriber['OrgType'],
                    sf_id = subscriber['Id'],
                )
            )
        return subscriber_objs

    @memoize
    def get_subscribers_by_org_key(self, where=None, limit=None):
        subscribers = {}
        for subscriber in self.get_subscriber_objs(where, limit):
            subscribers[subscriber.org_key] = subscriber 
        return subscribers

    @memoize
    def get_push_requests(self, where=None, limit=None):
        where = self.format_where_clause(where, obj='PackagePushRequest')
        query = "SELECT Id, PackageVersionId, ScheduledStartTime, Status FROM PackagePushRequest%s ORDER BY ScheduledStartTime DESC" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_push_request_objs(self, where=None, limit=None):
        push_request_objs = []
        package_versions = self.get_package_versions_by_id()
        for push_request in self.get_push_requests(where, limit):
            push_request_objs.append(
                PackagePushRequest(
                    push_api = self,
                    version = package_versions[push_request['PackageVersionId']],
                    start_time=push_request['ScheduledStartTime'],
                    status = push_request['Status'],
                    sf_id = push_request['Id'],
                )
            )
        return push_request_objs

    @memoize
    def get_push_requests_by_id(self, where=None, limit=None):
        push_requests = {}
        for push_request in self.get_push_request_objs(where, limit):
            push_requests[push_request.sf_id] = push_request
        return push_requests

    @memoize
    def get_push_jobs(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT Id, PackagePushRequestId, SubscriberOrganizationKey, Status FROM PackagePushJob%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_push_job_objs(self, where=None, limit=None):
        push_job_objs = []
        lazy = 'subscribers' in self.lazy
        if not lazy:
            subscriberorgs = self.get_subscribers_by_org_key()
        push_requests = self.get_push_requests_by_id()
        for push_job in self.get_push_jobs(where, limit):
            if lazy:
                orgs = self.get_subscriber_objs("OrgKey = '%s'" % push_job['SubscriberOrganizationKey'])
                if not orgs:
                    org = None
                else:
                    org = orgs[0]
            else:
                if push_job['SubscriberOrganizationKey'] not in subscriberorgs:
                    #print 'Skipping job for unmatched subscriber org %s' % push_job['SubscriberOrganizationKey']
                    continue
                else:
                    org = subscriberorgs[push_job['SubscriberOrganizationKey']]
            push_job_objs.append(
                PackagePushJob(
                    push_api = self,
                    request = push_requests[push_job['PackagePushRequestId']],
                    org = org,
                    status = push_job['Status'],
                    sf_id = push_job['Id'],
                )
            )
        return push_job_objs

    @memoize
    def get_push_jobs_by_id(self, where=None, limit=None):
        push_jobs = {}
        for push_job in self.get_push_job_objs(where, limit):
            push_jobs[push_job.sf_id] = push_job
        return push_jobs

    @memoize
    def get_push_errors(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT Id, PackagePushJobId, ErrorSeverity, ErrorType, ErrorTitle, ErrorMessage, ErrorDetails FROM PackagePushError%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_push_error_objs(self, where=None, limit=None):
        push_error_objs = []
        lazy = 'jobs' in self.lazy
        if not lazy:
            push_jobs = self.get_push_jobs_by_id()
        for push_error in self.get_push_errors(where, limit):
            if lazy:
                jobs = self.get_push_job_objs(where="Id = '%s'" % push_error['PackagePushJobId'])
                if jobs:
                    job = jobs[0]
                else:
                    job = None
             
            push_error_objs.append(
                PackagePushError(
                    push_api = self,
                    job = job,
                    severity = push_error['ErrorSeverity'],
                    error_type = push_error['ErrorType'],
                    title = push_error['ErrorTitle'],
                    message = push_error['ErrorMessage'],
                    details = push_error['ErrorDetails'],
                    sf_id = push_error['Id'],
                )
            )
        return push_error_objs

    @memoize
    def get_push_errors_by_id(self, where=None, limit=None):
        push_errors = {}
        for push_error in self.get_push_error_objs(where, limit):
            push_errors[push_error.sf_id] = push_error
        return push_errors

    def create_push_request(self, version, orgs, start=None):
        if not start:
            # By default, delay the push start by 15 minutes to allow manual review and intervention
            start = datetime.datetime.now() + datetime.timedelta(0, 15*60)

        # Create the request
        res = self.sf.PackagePushRequest.create({
            'PackageVersionId': version.sf_id,
            'ScheduledStartTime': start.isoformat(),
        })
        request_id = res['id']

        # Schedule the orgs
        batch_size = 200
        batch_offset = 0

        for batch in batch_list(orgs, batch_size):

            batch_data = {'records': []}
            i = 0

            for org in batch:
                batch_data['records'].append({
                    'attributes': {'type': 'PackagePushJob', 'referenceId': 'org%s' % i},
                    'PackagePushRequestId': request_id,
                    'SubscriberOrganizationKey': org,
                })
                i += 1
            
            try:
                res = self.sf._call_salesforce(
                    'POST', 
                    self.sf.base_url + 'composite/tree/PackagePushJob', 
                    data=json.dumps(batch_data),
                )
            except SalesforceMalformedRequest as e:
                for result in e.content['results']:
                    for error in result['errors']:
                        if error['statusCode'] == 'INVALID_OPERATION':
                            print u'Skipping org, error message = {}'.format(error['message'])
                        else:
                            raise e

        return request_id

    def run_push_request(self, request_id):
        # Set the request to Pending status
        return self.sf.PackagePushRequest.update(request_id, {'Status': 'Pending'})
예제 #3
0
class SalesforcePushApi(object):
    """ API Wrapper for the Salesforce Push API """
    def __init__(self,
                 username,
                 password,
                 serverurl,
                 lazy=None,
                 default_where=None):
        sandbox = False
        if serverurl.find('test.salesforce.com') != -1:
            sandbox = True

        self.sf = Salesforce(username=username,
                             password=password,
                             security_token='',
                             sandbox=sandbox,
                             version='36.0')

        if not lazy:
            lazy = []
        self.lazy = lazy

        if not default_where:
            default_where = {}
        self.default_where = default_where

    def return_query_records(self, query):
        #print 'Running Query: %s' % query
        res = self.sf.query_all(query)
        if res['totalSize'] > 0:
            return res['records']
        else:
            return []

    def format_where_clause(self, where, obj=None):
        if obj and obj in self.default_where:
            default_where = self.default_where[obj]
            if where:
                where = '(%s) AND (%s)' % (default_where, where)
            else:
                where = '(%s)' % default_where
        if where:
            where = ' WHERE %s' % where
        else:
            where = ''
        return where

    def add_query_limit(self, query, limit):
        if not limit:
            return query

        return '%s LIMIT %s' % (query, limit)

    @memoize
    def get_packages(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT id, name, namespaceprefix FROM MetadataPackage%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_package_objs(self, where=None, limit=None):
        package_objs = []
        for package in self.get_packages(where, limit):
            package_objs.append(
                MetadataPackage(push_api=self,
                                sf_id=package['Id'],
                                name=package['Name'],
                                namespace=package['NamespacePrefix']))
        return package_objs

    @memoize
    def get_packages_by_id(self, where=None, limit=None):
        packages = {}
        for package in self.get_package_objs(where, limit):
            packages[package.sf_id] = package
        return packages

    @memoize
    def get_package_versions(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT Id, Name, MetadataPackageId, ReleaseState, MajorVersion, MinorVersion, PatchVersion, BuildNumber FROM MetadataPackageVersion%s ORDER BY MajorVersion DESC, MinorVersion DESC, PatchVersion, BuildNumber DESC" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_where_last_version(self, major=None, minor=None, beta=None):
        if beta:
            where = "ReleaseState = 'Beta'"
        else:
            where = "ReleaseState = 'Released'"
        if major:
            where += " AND MajorVersion=%s" % int(major)
        if minor:
            where += " AND MinorVersion=%s" % int(minor)
        return where

    @memoize
    def get_package_version_objs(self, where=None, limit=None):
        package_version_objs = []
        packages = self.get_packages_by_id()
        for package_version in self.get_package_versions(where, limit):
            package_version_objs.append(
                MetadataPackageVersion(
                    push_api=self,
                    name=package_version['Name'],
                    package=packages[package_version['MetadataPackageId']],
                    state=package_version['ReleaseState'],
                    major=package_version['MajorVersion'],
                    minor=package_version['MinorVersion'],
                    patch=package_version['PatchVersion'],
                    build=package_version['BuildNumber'],
                    sf_id=package_version['Id'],
                ))
        return package_version_objs

    @memoize
    def get_package_versions_by_id(self, where=None, limit=None):
        package_versions = {}
        for package_version in self.get_package_version_objs(where, limit):
            package_versions[package_version.sf_id] = package_version
        return package_versions

    @memoize
    def get_subscribers(self, where=None, limit=None):
        where = self.format_where_clause(where, obj='PackageSubscriber')
        query = "SELECT Id, MetadataPackageVersionId, InstalledStatus, OrgName, OrgKey, OrgStatus, OrgType from PackageSubscriber%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_subscriber_objs(self, where=None, limit=None):
        subscriber_objs = []
        package_versions = self.get_package_versions_by_id()
        for subscriber in self.get_subscribers(where, limit):
            subscriber_objs.append(
                PackageSubscriber(
                    push_api=self,
                    version=package_versions[
                        subscriber['MetadataPackageVersionId']],
                    status=subscriber['InstalledStatus'],
                    org_name=subscriber['OrgName'],
                    org_key=subscriber['OrgKey'],
                    org_status=subscriber['OrgStatus'],
                    org_type=subscriber['OrgType'],
                    sf_id=subscriber['Id'],
                ))
        return subscriber_objs

    @memoize
    def get_subscribers_by_org_key(self, where=None, limit=None):
        subscribers = {}
        for subscriber in self.get_subscriber_objs(where, limit):
            subscribers[subscriber.org_key] = subscriber
        return subscribers

    @memoize
    def get_push_requests(self, where=None, limit=None):
        where = self.format_where_clause(where, obj='PackagePushRequest')
        query = "SELECT Id, PackageVersionId, ScheduledStartTime, Status FROM PackagePushRequest%s ORDER BY ScheduledStartTime DESC" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_push_request_objs(self, where=None, limit=None):
        push_request_objs = []
        package_versions = self.get_package_versions_by_id()
        for push_request in self.get_push_requests(where, limit):
            push_request_objs.append(
                PackagePushRequest(
                    push_api=self,
                    version=package_versions[push_request['PackageVersionId']],
                    start_time=push_request['ScheduledStartTime'],
                    status=push_request['Status'],
                    sf_id=push_request['Id'],
                ))
        return push_request_objs

    @memoize
    def get_push_requests_by_id(self, where=None, limit=None):
        push_requests = {}
        for push_request in self.get_push_request_objs(where, limit):
            push_requests[push_request.sf_id] = push_request
        return push_requests

    @memoize
    def get_push_jobs(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT Id, PackagePushRequestId, SubscriberOrganizationKey, Status FROM PackagePushJob%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_push_job_objs(self, where=None, limit=None):
        push_job_objs = []
        lazy = 'subscribers' in self.lazy
        if not lazy:
            subscriberorgs = self.get_subscribers_by_org_key()
        push_requests = self.get_push_requests_by_id()
        for push_job in self.get_push_jobs(where, limit):
            if lazy:
                orgs = self.get_subscriber_objs(
                    "OrgKey = '%s'" % push_job['SubscriberOrganizationKey'])
                if not orgs:
                    org = None
                else:
                    org = orgs[0]
            else:
                if push_job['SubscriberOrganizationKey'] not in subscriberorgs:
                    #print 'Skipping job for unmatched subscriber org %s' % push_job['SubscriberOrganizationKey']
                    continue
                else:
                    org = subscriberorgs[push_job['SubscriberOrganizationKey']]
            push_job_objs.append(
                PackagePushJob(
                    push_api=self,
                    request=push_requests[push_job['PackagePushRequestId']],
                    org=org,
                    status=push_job['Status'],
                    sf_id=push_job['Id'],
                ))
        return push_job_objs

    @memoize
    def get_push_jobs_by_id(self, where=None, limit=None):
        push_jobs = {}
        for push_job in self.get_push_job_objs(where, limit):
            push_jobs[push_job.sf_id] = push_job
        return push_jobs

    @memoize
    def get_push_errors(self, where=None, limit=None):
        where = self.format_where_clause(where)
        query = "SELECT Id, PackagePushJobId, ErrorSeverity, ErrorType, ErrorTitle, ErrorMessage, ErrorDetails FROM PackagePushError%s" % where
        query = self.add_query_limit(query, limit)
        return self.return_query_records(query)

    @memoize
    def get_push_error_objs(self, where=None, limit=None):
        push_error_objs = []
        lazy = 'jobs' in self.lazy
        if not lazy:
            push_jobs = self.get_push_jobs_by_id()
        for push_error in self.get_push_errors(where, limit):
            if lazy:
                jobs = self.get_push_job_objs(where="Id = '%s'" %
                                              push_error['PackagePushJobId'])
                if jobs:
                    job = jobs[0]
                else:
                    job = None

            push_error_objs.append(
                PackagePushError(
                    push_api=self,
                    job=job,
                    severity=push_error['ErrorSeverity'],
                    error_type=push_error['ErrorType'],
                    title=push_error['ErrorTitle'],
                    message=push_error['ErrorMessage'],
                    details=push_error['ErrorDetails'],
                    sf_id=push_error['Id'],
                ))
        return push_error_objs

    @memoize
    def get_push_errors_by_id(self, where=None, limit=None):
        push_errors = {}
        for push_error in self.get_push_error_objs(where, limit):
            push_errors[push_error.sf_id] = push_error
        return push_errors

    def create_push_request(self, version, orgs, start=None):
        if not start:
            # By default, delay the push start by 15 minutes to allow manual review and intervention
            start = datetime.datetime.now() + datetime.timedelta(0, 15 * 60)

        # Create the request
        res = self.sf.PackagePushRequest.create({
            'PackageVersionId':
            version.sf_id,
            'ScheduledStartTime':
            start.isoformat(),
        })
        request_id = res['id']

        # Schedule the orgs
        batch_size = 200
        batch_offset = 0

        for batch in batch_list(orgs, batch_size):

            batch_data = {'records': []}
            i = 0

            for org in batch:
                batch_data['records'].append({
                    'attributes': {
                        'type': 'PackagePushJob',
                        'referenceId': 'org%s' % i
                    },
                    'PackagePushRequestId': request_id,
                    'SubscriberOrganizationKey': org,
                })
                i += 1

            try:
                res = self.sf._call_salesforce(
                    'POST',
                    self.sf.base_url + 'composite/tree/PackagePushJob',
                    data=json.dumps(batch_data),
                )
            except SalesforceMalformedRequest as e:
                for result in e.content['results']:
                    for error in result['errors']:
                        if error['statusCode'] == 'INVALID_OPERATION':
                            print u'Skipping org, error message = {}'.format(
                                error['message'])
                        else:
                            raise e

        return request_id

    def run_push_request(self, request_id):
        # Set the request to Pending status
        return self.sf.PackagePushRequest.update(request_id,
                                                 {'Status': 'Pending'})
예제 #4
0
class SalesForceModel:
    def __init__(self):
        # print("Instantiated SalesForceModel Object")
        self.config_file = os.path.join(os.path.dirname(__file__),
                                        "config.json")
        self.sf: Salesforce()
        self.user_id = ""
        self.username = ""
        self.password = ""
        self.api_key = ""

    def get_sf_username(self):
        return self.username

    def set_sf_username(self, username):
        self.username = username

    def get_sf_password(self):
        return self.password

    def set_sf_password(self, password):
        self.password = password

    def get_sf_api_key(self):
        return self.api_key

    def set_sf_api_key(self, api_key):
        self.api_key = api_key

    def get_api_secret(self):
        api_secret = ''
        config_file = os.path.join(os.path.dirname(__file__), "config.json")
        try:
            with open(config_file, "r") as f:
                data = json.load(f)
                api_secret = data['secret_key']
        except FileNotFoundError:
            print("no config file found! " + str(FileNotFoundError))
            api_secret = ''
        except KeyError:
            print("no such key - secret_key! " + str(KeyError))
            api_secret = ''
        return api_secret

    def update_conf(self, key, value):
        config_file = self.config_file
        with open(config_file, "r") as cf:
            config_data = json.load(cf)
        config_data.update({key: value})
        with open(config_file, 'w') as cf:
            json.dump(config_data, cf)

    def set_conf_api_secret(self, new_api_key):
        config_file = self.config_file
        with open(config_file, "r") as cf:
            config_data = json.load(cf)
        config_data.update({'secret_key': new_api_key})
        with open(config_file, 'w') as cf:
            json.dump(config_data, cf)

    def sf_log_in(self, username=None, password=None, api_key=None):
        self.username = username
        self.password = password
        self.api_key = api_key
        self.sf = Salesforce(username=username,
                             password=password,
                             security_token=api_key,
                             version='50.0')
        self.user_id = self.get_user_id()
        # return self.sf

    def re_login(self):
        username = self.get_sf_username()
        password = self.get_sf_password()
        api_key = self.get_sf_api_key()
        self.sf = Salesforce(username=username,
                             password=password,
                             security_token=api_key,
                             version='50.0')
        self.user_id = self.get_user_id()

    def logout(self):
        return self.sf.session.close()

    def get_user(self, info=None):
        # return user object
        # the info string can be 'Id', 'FirstName', 'LastName', 'Title'
        identity_url = self.sf.restful('')['identity']
        user = self.sf.User.get(identity_url[-18:])
        if info != None:
            info = user[info]
            return info
        else:
            return user

    def set_user_id(self, user_id):
        self.user_id = user_id

    def get_user_id(self):
        identity_url = self.sf.restful('')['identity']
        user = self.sf.User.get(identity_url[-18:])
        user_id = user['Id']
        return user_id

    def get_user_fname(self):
        identity_url = self.sf.restful('')['identity']
        user = self.sf.User.get(identity_url[-18:])
        user_fname = user['FirstName']
        return user_fname

    def get_user_lname(self):
        identity_url = self.sf.restful('')['identity']
        user = self.sf.User.get(identity_url[-18:])
        user_lname = user['LastName']
        return user_lname

    def get_user_title(self):
        identity_url = self.sf.restful('')['identity']
        user = self.sf.User.get(identity_url[-18:])
        user_title = user['Title']
        return user_title

    def get_current_case_list(self, sf=None):
        userid = self.get_user_id()
        query = "SELECT Id, CaseNumber, OwnerId, Status, Subject FROM Case WHERE OwnerId = '" + \
                userid + "' AND (Status = 'Active' OR Status='New' OR Status='Re-opened' OR Status='On Hold')"
        if sf != None:
            list = sf.query(query)
        else:
            list = self.sf.query(query)
        return list['records']

    def get_attachment_list(self, case_id, sf=None):
        query = """SELECT 
                        Id, Name, Body, BodyLength, CreatedDate, 
                        LastModifiedDate, Description 
                    FROM Attachment 
                        WHERE ParentId = '""" + case_id + "'"
        if sf != None:
            list = sf.query(query)
        else:
            list = self.sf.query(query)
        return list['records']

    def get_sfile_list(self, case_id, sf=None):
        query = """SELECT 
                        Id, Name, 
                        cg__Case__c, 
                        cg__File_Name__c, 
                        cg__File_Size_in_Bytes__c, 
                        cg__File_Size__c,
                        CreatedDate,
                        LastModifiedDate,
                        cg__Description__c,
                        cg__Parent_Folder_Id__c,
                        cg__Key__c
                    FROM cg__CaseFile__c 
                        WHERE cg__Case__c = '""" + case_id + "'"
        if sf != None:
            list = sf.query(query)
        else:
            list = self.sf.query(query)
        return list['records']

    def get_case_by_id(self, case_id, sf=None):
        if sf != None:
            case_obj = sf.Case.get(case_id)
        else:
            case_obj = self.sf.Case.get(case_id)
        return case_obj

    def get_case_by_number(self, case_number, sf=None):
        query = "SELECT Id, CaseNumber, OwnerId, Status, Subject FROM Case WHERE OwnerId = '" + \
                self.user_id + "' AND (Status = 'Active' OR Status='New' OR Status='Re-opened') AND (CaseNumber = '" + case_number + "')"
        if sf != None:
            case_dic = sf.query(query)
        else:
            case_dic = self.sf.query(query)
        return case_dic['records'][0]

    def get_case_id(self, case_number, sf=None):
        query = "SELECT Id, CaseNumber, OwnerId, Status, Subject FROM Case WHERE OwnerId = '" + \
                self.user_id + "' AND (Status = 'Active' OR Status='New' OR Status='Re-opened') AND (CaseNumber = '" + case_number + "')"
        if sf != None:
            case_dic = sf.query(query)
        else:
            case_dic = self.sf.query(query)
        return case_dic['records'][0]['Id']

    def check_session(self, sf=None):
        return self.sf.restful('')['identity']

    def download_file(self, file_path, attach_name, attach_id, attach_body,
                      attach_length, current_row_number):
        # requires to be threading
        # attach progress bar
        # progress = QProgressBar()
        completed = 0
        # progress.setMaximum(100)
        total = attach_length

        base = 'https://tableau.my.salesforce.com/'
        url = base + attach_body
        result = self.sf._call_salesforce(method='GET', url=url)
        # print(result)
        # print(result.headers)
        # print(result.headers.get('content-length'))

        try:
            with open(file_path, 'wb+') as file:
                total = int(total)
                # retrieve the bytes from the resources incrementally
                for chunk in result.iter_content(
                        chunk_size=max(int(total / 1000), 1024 * 1024)):
                    completed += len(chunk)
                    file.write(chunk)
                    done = int(50 * completed / total)
                    print(str(completed))
                    print(str(done))
            print("Download Completed!")
            file_path = file_path.replace("/", "\\")
            # Windows specific function
            open_ex_file = r'explorer /select, "' + file_path + '"'
            subprocess.Popen(open_ex_file)
        except Exception as e:
            print(str(e))

    def create_folder(self, path):
        print(path)
        try:
            os.makedirs(path)
        except OSError as e:
            # print(e)
            # e.errno
            # print(e.errno)
            # print(e.winerror)
            # print(e.strerror)
            if e.winerror == 183:
                return "folder_alreay_exist"
                # messagebox.showwarning("Folder Creation Error", "The folder already exist:  %s" % path)
            else:
                return str(e)
                # messagebox.showerror("Folder Creation Error", "Reason: %s" % e.strerror)
        else:
            return "folder_created"
예제 #5
0
def ingest(table_name, options):
    landing_table = f'data.{table_name}'
    username = options['username']
    password = options['password']
    security_token = options['security_token']
    environment_raw = options['environment']
    environment = 'test' if environment_raw == 'test' else None

    # We will fetch EventLogFiles where the LogDate is greater than the maximum
    # timestamp seen in all previous EventLogFiles
    start_time = db.fetch_latest(landing_table, col='raw:TIMESTAMP_DERIVED')
    if start_time is None:
        start_time = '1900-01-01T00:00:00.000Z'

    # TODO: Support more auth methods, including client certificates.
    sf = Salesforce(
        username=username,
        password=password,
        security_token=security_token,
        client_id='SnowAlert',
        domain=environment,
    )
    event_log_soql_query = (f'SELECT id, eventtype, logdate '
                            f'FROM eventlogfile '
                            f'WHERE interval=\'Hourly\' '
                            f'  AND logdate > {start_time}')
    log.info(f'Querying event logs: {event_log_soql_query}')
    log_files = sf.query_all(event_log_soql_query)

    # Create a temp directory only accessible by the current user, which we will delete after Snowflake upload
    temp_dir = tempfile.mkdtemp('_sfevents')

    # Salesforce will provide a bunch of files, an hourly extract of each of the different event types in CSV format
    # There are around 50 different event types and they all have different fields. Rather than a table per event type,
    # we'll convert them to JSON and do schema-on-read.
    # We'll load from the table stage which has the 'STRIP_OUTER_ARRAY' option, so there will be one row per event.
    total_files = log_files['totalSize']
    log.info(f'Found {total_files} event files to load.')
    if total_files > 0:
        for record in log_files['records']:
            url = record['attributes']['url']
            id = record['Id']
            log.info(f'Downloading event log file {id} from {url}.')

            # The URL provided is relative, but includes part of the base URL which we have to trim out before combining
            # E.g. it could look like /services/data/v38.0/sobjects/EventLogFile/0AT0o00000NSIv5GAB
            # where the base URL will look like: https://ap8.salesforce.com/services/data/v38.0/
            url_relative = 'sobjects/' + url.split('sobjects/')[1] + '/LogFile'
            result = sf._call_salesforce('GET',
                                         sf.base_url + url_relative,
                                         name=url_relative)

            # TODO: Investigate streaming the result and converting to JSON in chunks.

            # Current method has high memory requirements for large files, but unlikely to be
            # multi-GB hourly unless it's a really busy Salesforce org.
            reader = csv.DictReader(io.StringIO(result.text))
            file_path = os.path.join(temp_dir, id + '.json')
            with open(file_path, 'w') as f:
                # This will create a single line JSON file containing an array of objects
                json.dump(list(reader), f)

        # Copy all the staged .json files into the landing table
        log.info(f'Uploading all files to Snowflake stage: {table_name}.')
        db.copy_file_to_table_stage(table_name,
                                    os.path.join(temp_dir, '*.json'))
        log.info(f'Upload successful, deleting all local files.')
        shutil.rmtree(temp_dir)

        # The table is configured to purge upon load from its stage, so we don't need to clean up
        log.info(f'Copying events into Snowflake table from staged files.')
        db.load_from_table_stage(table_name)
        log.info(f'Loaded {total_files} event files.')
    else:
        log.info(f'Skipping load as there are no new event files.')

    return total_files