def uploadFile(site, sessionid, srcdir, filename, accountid, workdir, description): logger.debug("in main") retcode = -1 try: dir,zip,uue = createAttachment(workdir, srcdir, filename, accountid, description) jobid = sf.createJob("insert", "Attachment", "ZIP_CSV", site, sessionid) logger.debug( "JOBID: "+jobid) state = sf.checkJobStatus(jobid, site, sessionid) logger.debug("JOB state: "+state) batchid = sf.submitAttachment(jobid, site, sessionid, zip) logger.debug("BATCHID "+str(batchid)) state = sf.checkJobStatus(jobid, site, sessionid) logger.debug("JOB state: "+state) while True: # loop till we complete or error out bat = sf.checkBatch(jobid, batchid, site, sessionid) logger.debug("BATCH state "+ str(bat)) batchState = str(bat["state"]) if batchState == "Completed": logger.debug("COMPLETE") pnum = int(bat["numberRecordsProcessed"]) if pnum == 1: fnum = int(bat["numberRecordsFailed"]) if fnum == 1: logger.error("Upload failed") mailError("Upload of %s to account %s FAILED"%(filename, accountid)) else: retcode = 0 else: logger.error("Something wrong - numberof records processed is %d"%pnum) mailError("Something wrong - numberof records processed is %d"%pnum) break if batchState == "Failed": logger.error("ATTACH FAILED") mailError("ATTACH FAILED") break import time time.sleep(sleepTime) logger.debug("JOB state AFTER: "+state) sf.closeJob(jobid, site, sessionid) logger.debug("JOB state AFTER CLOSE: "+state) except sf.SFException, e: logger.error(str(e)) mailError(str(e))
def runSFQuery(site, sessionid, sfObject, sfSQL): # FIXME: push this function up to the trunk logger.debug(" runSFQuery "+sfSQL) logger.debug(" sfObject "+sfObject) logger.debug(" site "+site) logger.debug(" sessionid "+sessionid) jobid = sf.createJob("query", sfObject, "CSV", site, sessionid) logger.debug( "Running query JOBID: "+jobid) batchid = sf.submitSelect(jobid, site, sessionid, sfSQL) logger.debug("BATCH "+batchid) state = sf.checkJobStatus(jobid, site, sessionid) logger.debug("JOB state: "+state) rcount = 0 rset = "" while True: # loop till we complete or error out bat = sf.checkBatch(jobid, batchid, site, sessionid) logger.debug("BATCH state "+ str(bat)) batchState = str(bat["state"]) if batchState == "Completed": rcount = int(str(bat["numberRecordsProcessed"])) logger.info("NUMBER OF RECORDS %d"%rcount) rset = sf.getResults(jobid, batchid, site, sessionid) break if batchState == "Failed": logger.error("QUERY FAILED") mailError("QUERY %s FAILED"%sfSQL) break import time time.sleep(sleepTime) logger.debug("JOB state AFTER: "+state) sf.closeJob(jobid, site, sessionid) logger.debug("JOB state AFTER CLOSE: "+state) if rcount: import cStringIO import csv ios = cStringIO.StringIO(rset) rdr = csv.reader(ios, delimiter=',', quotechar='"') f = True rset = [] for row in rdr: if f: f = False else: rset.append(row) return (rcount,rset)
def main(oper, fn, fo, loginURL, username, password): print oper, fn, fo rcount = 0 fcount = 0 res_msg = "" logger = utils.createLogger("upsert") sf.setLogger(logger) op = open(fo, "w") lx = sf.login(username, password, loginURL) if not lx: return site = lx[0] sessionid = lx[1] jobid = sf.createJob(oper, "Contact", "CSV", site, sessionid) logger.debug( "JOBID: "+jobid) state = sf.checkJobStatus(jobid, site, sessionid) logger.debug("JOB state: "+state) batchid = sf.submitUpdate(jobid, site, sessionid, fn) logger.debug("BATCH "+batchid) while True: # loop till we complete or error out bat = sf.checkBatch(jobid, batchid, site, sessionid) logger.debug("BATCH state "+ str(bat)) batchState = str(bat["state"]) if batchState == "Completed": rcount = int(str(bat["numberRecordsProcessed"])) fcount = int(str(bat["numberRecordsFailed"])) logger.info("NUMBER OF RECORDS PROCESSED %d"%rcount) logger.info("NUMBER OF RECORDS FAILED %d"%fcount) r = sf.getUpdateResults(jobid, batchid, site, sessionid) op.write(r) break if batchState == "Failed": logger.error("QUERY FAILED") res_msg = str(bat["stateMessage"]) logger.error(res_msg) break import time time.sleep(sleepTime) logger.debug("JOB state AFTER: "+state) sf.closeJob(jobid, site, sessionid) logger.debug("JOB state AFTER CLOSE: "+state) return res_msg, rcount, fcount
def main(fn, loginURL, username, password, whereClause = None): logger = utils.createLogger("p1.py") sf.setLogger(logger) lx = sf.login(username, password, loginURL) if not lx: return site = lx[0] sessionid = lx[1] jobid = sf.createJob("query", "Contact", "CSV", site, sessionid) logger.debug( "JOBID: "+jobid) sfSQL = "select id, Email, firstname, lastname, FPN_Subscriptions__c,Stormpost_Reason_Date__c,Stormpost_Reason__c from contact" if whereClause is not None: sfSQL = sfSQL + " " + whereClause logger.debug("SQL : "+sfSQL) batchid = sf.submitSelect(jobid, site, sessionid, sfSQL) logger.debug("BATCH "+batchid) state = sf.checkJobStatus(jobid, site, sessionid) logger.debug("JOB state: "+state) while True: # loop till we complete or error out bat = sf.checkBatch(jobid, batchid, site, sessionid) logger.debug("BATCH state "+ str(bat)) batchState = str(bat["state"]) if batchState == "Completed": rcount = int(str(bat["numberRecordsProcessed"])) logger.info("NUMBER OF RECORDS %d"%rcount) r = sf.getResults(jobid, batchid, site, sessionid) op = open(fn, "w").write(r) break if batchState == "Failed": logger.error("QUERY FAILED") break import time time.sleep(sleepTime) logger.debug("JOB state AFTER: "+state) sf.closeJob(jobid, site, sessionid) logger.debug("JOB state AFTER CLOSE: "+state)