Esempio n. 1
0
def main(fout):
    import p1
    bx = utils.getSFSiteCred()

    prepareDestDir(fout)
    tmpfile = "/tmp/getFPUsers.csv"
    
    p1.main(tmpfile, bx[0], bx[1], bx[2], " where FPN_Subscriptions__c != '' and FPN_Subscriptions__c != 'Does Not Want' and FPN_Subscriptions__c != 'Not Authorized - see JMC'")


    map = {}
#    fo = open(fout, "w")
    first = True
    for r in csv.reader(open(tmpfile, 'r')):
        if first: first = False
        else:
            if r[4] != "":
                key = r[1][:lflen].lower()
                if key == "": key = "NOEMAILADDRESS"
                try:
                    m = map[key]
                except KeyError:
                    m = []
                    map[key] = m
                str = "%s|%s|%s|%s\n"%(r[1],r[2], r[3], r[4])
                m.append(str)
    for k in map.keys():
        p = os.path.join(fout, k)
        print fout+" MAKING "+k+" -- "+p
        try:
            os.makedirs(p)
            f = os.path.join(p, "dat.dat")
            map[k].sort()
            of = open(f, "w").write("".join(map[k]))
            f = os.path.join(p, "map.dat")
            of = open(f, "w")
            for z in map[k]:
                of.write(z.split("|")[0]+"\n")
        except OSError:
            print "BAD EMAIL ADDRESS "+k
Esempio n. 2
0
        rt = [x.strip() for x in row]
        if len(rt) == 5:
            if k > 0: 
                rt[4] = ";".join([x.strip() for x in rt[4].strip().split(";") if x in fpns])
                
    #        logger.debug(rt);
            if (rt[4] != ""):  
                # VERY DIRTY HACK for FPN Digest called as EU Free 
                rt[4] = rt[4].replace("EU Digest", "EU Free") 
                fout.writerow(rt)            
        k = k + 1        

def usage():
    print "usage splitter.py runtype infile outfile"

if __name__ == "__main__":
    if len(sys.argv) != 4:
        usage()
        sys.exit(1)
    b = utils.getSFSiteCred()
    fpn_type_1 = ["EU Comp", "EU Contract", "EU Paid", "EU Digest"]
    fpn_type_2 = ["Pharma Global", "Constr Global", "Insur Global", "Bank Global"]
    runtype = int(sys.argv[1])
    if runtype == 1: rt = fpn_type_1
    elif runtype == 2: rt = fpn_type_2
    else:    
        r = utils.getValidFPNs()
        rt = [x for x in r if x not in fpn_type_1 and x not in fpn_type_2]
    main(rt,
        sys.argv[2], sys.argv[3])
Esempio n. 3
0
def main(workDir):
    import ConfigParser
    config = ConfigParser.RawConfigParser()
    config.read(configPath)
    advisenRestURL = config.get('main', 'advisenRestURL') 
    advisenUser = config.get('main', 'advisenUser') 
    advisenPassword = config.get('main', 'advisenPassword') 
    ftpURL = config.get('main', 'ftpURL') 
    ftpUser = config.get('main', 'ftpUser') 
    ftpPassword = config.get('main', 'ftpPassword') 
    sfurl = config.get('main', 'sfurl') 


    logger.debug("Starting %s - %s - %s"%(advisenRestURL, advisenUser, advisenPassword))
    lr = submitAdvRequest(advisenRestURL, "login", 
        {"username":  advisenUser, "password": advisenPassword})
    logger.debug("After advisen login rc = "+str(lr["error"]))
    if lr["error"] != 0:
        logger.error("Cannot login to advisen REST server "+str(lr["error"]))
        mailError("Cannot login to advisen REST server "+str(lr["error"]))
        return lr["error"];
    
    lr = submitAdvRequest(advisenRestURL, "getsfpushreports")
    if lr["error"] != 0:
        logger.error("Cannot get list of records to work on "+str(lr["error"]))
        mailError("Cannot get list of records to work on "+str(lr["error"]))
        return lr["error"];
    aList = lr["result"]
    
    lr = submitAdvRequest(advisenRestURL, "logout")
    # drop all files from the FTP server
    if len(aList) == 0:
        logger.debug("Nothing to do")
        return 0
    mapFTPOK = {}
    mapBad = {}
    mapBadSFUpload = {}
    
    listEmail = []
    
    def genReportName(row):
        return row["reportname"]+".pdf"
        
    # i need 2 loops - to prevent downloading duplicates
    for r in aList:
        report = genReportName(r)
        if report not in mapFTPOK:
            logger.debug("Need to retrieve "+r["reportname"]+".pdf")
            cmd = "ncftpget -u%s -p%s %s %s /sfreports/%s"%(ftpUser, ftpPassword, ftpURL, workDir, report)
            rc,txt = commands.getstatusoutput(cmd)
            logger.debug("After retrieve %d"%rc)
            if rc:
                mapBad[report] = r
            else:
                mapFTPOK[report] = r

    ## FIXME - should be the SF account of the target
    loginURL, username, password = getSFSiteCred(configFile)
    sf.setLogger(logger)
    lx = sf.login(username, password, loginURL)
    
    if not lx:
        logger.error("Unable to login to SF")
        mailError("Unable to login to SF")
    else:
        site = lx[0]
        sessionid = lx[1]
        logger.debug("FIRST SF site: %s sess: %s"%(site, sessionid))
    
        # inGood is the collector for the DB update IN clause
        inGood = []
        for r in aList:
            report = genReportName(r)
            # this is correct
            # we only upload what we got from FTP server
            if report in mapFTPOK:
                accountid = r["accountid"]
                description = r["description"]
                companyname = r["companyname"]
                logger.debug("UPLOADING TO SF account %s - %s"%(accountid, report))
                rc = uploadFile(site, sessionid, workDir, report, accountid, workDir, description)
                if not rc:
                    inGood.append(r["reportid"])
                    sql = "select email, lastname, firstname from user where id = '%s'"%(str(r["userid"]))
                    uset = runSFQuery(site, sessionid, "User", sql)
                    logger.debug(str(uset))
                    if uset[0] == 1:
                        sql = "SELECT Id FROM Attachment where parentid = '%s' ORDER BY CreatedDate DESC NULLS FIRST"%(str(accountid))
                        aset = runSFQuery(site, sessionid, "Attachment", sql)
                        logger.debug(str(aset))
                        if aset[0]:
                            # take the top
                            link = sfurl + str(aset[1][0][0])
                            logger.debug(link)
                            addy = uset[1][0][0]
                            lastname = uset[1][0][1]
                            firstname = uset[1][0][2]
                            listEmail.append((addy, description, link, companyname, lastname, firstname))
                        else:
                            logger.error("Unable to get the ID of the attachment")
                            mailError("Unable to get the ID of the attachment")
                    else:
                        logger.error("USER: %s not found in SF"%(r["userid"]))
                        mailError("USER: %s not found in SF"%(r["userid"]))
                    
                else:
                    mapBadSFUpload[report] = r
        if len(inGood):
            lr = submitAdvRequest(advisenRestURL, "login", 
                {"username":  advisenUser, "password": advisenPassword})
            logger.debug("After advisen login rc = "+str(lr["error"]))
            if lr["error"] != 0:
                logger.error("Cannot login to advisen REST server "+str(lr["error"]))
                mailError("Cannot login to advisen REST server "+str(lr["error"]))
                return lr["error"];
            lr = submitAdvRequest(advisenRestURL, "updsfreportstat", inGood)
            if lr["error"] != 0:
                logger.error("FAILED to update status "+lr["message"])
                mailError("FAILED to update status "+lr["message"])
            
            
    if len(listEmail):
        mm = Mailer(mailhost, 25, logger, administrators, mailsender)
        for r in listEmail:
            body = "%s is now ready for review.  <a href=%s>Click here</a> or sign-on to your SalesForce account"%(r[1], r[2])
            msg = mm.mailAttach("SFPUSH REPORTS TEST CONAME: "+r[3], body, "html")
            mm.doMail(r[0], mailsender, msg)
            
            msg = mm.mailAttach("*** DEBUG *** SFPUSH REPORTS TEST CONAME: %s was sent to %s"%(r[3], r[0]), body, "html")
            mm.doMail("*****@*****.**", mailsender, msg)
            mm.doMail("*****@*****.**", mailsender, msg)
        
    if len(mapBad):
        logger.error("FAILED TO RETRIEVE FILES from FTP")
        msg = "Failed to retrieve the following files from FTP "
        for k in mapBad.keys():
            logger.error(k)
            msg = msg + k
        mailError(msg)
    if len(mapBadSFUpload):
        logger.error("FAILED TO UPLOAD FILES TO SF")
        msg = "Failed to upload the following files from SF "
        for k in mapBadSFUpload.keys():
            logger.error(k)
            msg = msg + k
        mailError(msg)
    
    
    
    return 0
Esempio n. 4
0
def main(sf_csv_file, sp_email_map, maxupds, sendto):
    logger = utils.createLogger("spsfsync.py")



    b = utils.getSFSiteCred()
    workdir = os.path.join(tempdir, utils.temp_name("sfupdworkdir"))
    os.mkdir(workdir)
    
    rc,upds, splits, manifest_map  = \
        __create_sf_updates(sf_csv_file, workdir, sp_email_map, 
        maxupds)
    if rc != 0 or upds == 0:
        if rc != 0:
            logger.error("sp_get_status returned with error %d"%rc)
        else:
            logger.info("There were no updates")
        return rc
    
    logger.debug("THERE ARE %d updates and %d splits"%(upds, len(splits)))
    
    # loop splits here
    # then cat the logfile
    cgood = 0
    cbad = 0

    goodfile = os.path.join(tempdir, utils.temp_name("goodups.txt"))
    badfile = os.path.join(tempdir, utils.temp_name("badupds.txt"))
    logger.debug("GOODFILE "+goodfile)
    
    good = open(goodfile, "w")
    bad = open(badfile, "w")

    updlogfile = os.path.join(tempdir, utils.temp_name("spsynclogfile.csv")) # yes reuse

    ktest = 0
    for spl in splits:
        rc = sfupdspstatus.main(b[0], b[1], b[2], spl, updlogfile)

        csv_spl = csv.reader(open(spl))
        up_file = open(updlogfile)
        csv_log = csv.reader(up_file)

        first = True
        for row_upd in csv_spl:
            row_log = csv_log.next()
            if first:
                first = False
                continue
            lid = row_upd[0]
            try:
                (email, fromv) = manifest_map[lid]
                cto = row_upd[1]

                success = row_log[1].lower()
                error = row_log[3]
                if success == "true":
                    cgood = cgood + 1
                    good.write(email+" changed from "+fromv+" to "+cto+"\n")
                    good.flush()
                else:
                    cbad = cbad + 1
                    bad.write(email+" failed to update from "+fromv+" to "+cto+" "+error+"\n")
                    bad.flush()

            except KeyError:
                logger.error("ID not found "+lid)
        up_file.close()
        ktest = ktest + 1
#        if ktest > 4: break ## remove this
    
    good.close()
    bad.close()
    att = []
    att.append((goodfile, "goodups.txt"))
    if cbad > 0: att.append((badfile, "badupds.txt"))
    logger.debug(str(att))
    sendMail(sendto, "%d SF Stormpost_Reason__c updated, %d failed"%(cgood, cbad), "spsfcync", att)    
    return rc
Esempio n. 5
0
def main(save_dir, work_dir, src_fname, mail_file):
    global logger
    doesnotwant = "Does Not Want"
    logger = utils.createLogger("fpn_unsubs")
    logger.debug("main started")
    if not os.path.isdir(save_dir):
        r,c = commands.getstatusoutput("mkdir -p "+save_dir)
        if r:
            logger.error(c)
            mail_file_o = open(mail_file, "w")
            mail_file_o.write("ERROR\n")
            mail_file_o.write(c)
            sys.exit(1)
    
    # save_dir has timestamp:email_add
    donefile = os.path.join(save_dir, "donefile.dat")
    grep1 = "grep -n \"%%s\" %s"%donefile
    open(donefile, "a").close() # wtf
    consider = {}
    for p in [l for l in os.listdir(work_dir) if l.find(src_fname) != -1]:
        for line in open(os.path.join(work_dir, p)).read().split("\n"):
            if len(line):
                parts = line.split(delim)
                if len(parts) == 3:
                    addy = parts[1].strip().lower()
                    eds = parts[2]
                    token = parts[0]
                    blk = "%s:%s"%(token, addy)
                    cmd = grep1%blk
#                    logger.debug(cmd)
                    r,c = commands.getstatusoutput(cmd)
                    if r == 0:
                        logger.debug("Already done "+line)
                    else:
                        logger.debug("Will consider "+line)
                        try:
                            tup = consider[addy]
                        except KeyError:
                            tup = ([], [])
                            consider[addy] = tup
                        tup[0].append(token)
                        for ed in eds.split(";"):
                            edx = ed.split(",")[0]
                            if edx not in tup[1]:
                                tup[1].append(edx)
            
    
    if len(consider) == 0: 
        logger.info("Nothing to process")
        return 0


    (sfurl, username, password) = utils.getSFSiteCred(os.path.join(sf_home, "sfrunner.cfg"))


    where = None
    
        
    for key in consider.keys():
        def escapeQ(x): 
            return x.replace("'", "\\'")
        if not where:
            where = "where Email in ('%s'"%escapeQ(key)
        else:
            where = where + ",'%s'"%escapeQ(key)

    where = where + ") and FPN_Subscriptions__c != '' and FPN_Subscriptions__c != '%s' and FPN_Subscriptions__c != 'Not Authorized - see JMC'"%doesnotwant
    
    queryRes = os.path.join(work_dir, "fpnunsubsw.csv")
    if os.path.isfile(queryRes):
        try:
            os.unlink(queryRes)
        except OSError:
            mail_file_o = open(mail_file, "w")
            mail_file_o.write("ERROR")
            msg = "*** cannot delete file "+queryRes
            mail_file_o.write(msg)
            logger.error(msg)
            return 1
            
    p1.main(queryRes, sfurl, username, password, where)
    
    if not os.path.isfile(queryRes):
        msg = "ERROR\n**** query for unsubs did not generate output file\n Query: "+where
        logger.error(msg);
        mail_file_o = open(mail_file, "w")
        mail_file_o.write(msg)
        return 1
    
    # diff what we got from the server and what is currently in sf

    sfmap = {}    
    first = True
    for r in csv.reader(open(queryRes, 'r')):
        if first: first = False
        else:
            key = r[1].strip().lower()
            sfmap[key] = (r[0], r[4].split(";")) 
    

    # create the input file
    csv_file = os.path.join(work_dir, "update.csv")
    logger.debug("LOADER CSV "+csv_file)
    csv_fileo = open(csv_file, 'w')
    csvw = csv.writer(csv_fileo, delimiter=',',
        quotechar='"', quoting=csv.QUOTE_NONNUMERIC)
    csvw.writerow(["ID", "FPN_Subscriptions__c"])
    
    id_to_unsub_map = {}
    
    has_one = False
    for key in consider.keys():
        try:
            insf_tup = sfmap[key]
        except KeyError:
            # if this is the case then it means the contact was deleted from SF
            continue
        to_remove = consider[key][1]

        logger.debug("CONTACT: "+key)
        logger.debug("SF val: "+str(insf_tup))
        logger.debug("toRemove: "+str(to_remove))

        has_one = True
        new_val = ""
        for i in insf_tup[1]:
            i1 = i.split(" ")[0]
            if i1 not in to_remove:
                if new_val == "":
                    new_val = i
                else:
                    new_val = new_val + ";" + i
        if new_val == "": new_val = doesnotwant
        csvw.writerow([insf_tup[0], new_val])
        id_to_unsub_map[insf_tup[0]] = (key, to_remove)
    csv_fileo.close()
    
    if has_one:
        logger.debug("id_to_unsub_map "+str(id_to_unsub_map))
    
        stat_file = os.path.join(work_dir, "fpnunsubs.out")
        logger.debug("STAT FILE "+stat_file)
    
        try:
            rmsg, rcount, fcount = upsert.main("update", csv_file, stat_file, sfurl,
                username, password)
            if rmsg != "":
                emsg = "Error at update: %s"%rmsg
                mail_file_o = open(mail_file, "w")
                mail_file_o.write(emsg)
                return 1
        except sf.SFException, e:
            mail_file_o = open(mail_file, "w")
            mail_file_o.write("ERROR")
            mail_file_o.write(str(e))
            logger.error(str(e))
            return 1
        
        mail_file_o = open(mail_file, "w")
        mail_file_o.write("SF Updated: Records Processed: %d\nFailed: %d\n"%(rcount, fcount))
        
    
        mail_file_o.write("Successful updates\nEmail Address\t\tFPN Unsubscribes\n")    
        stat_file_o = open(stat_file)
    
        first = True
        fail_rec = []
        for r in csv.reader(stat_file_o):
            if first: first = False
            else:
                id = r[0]
                success = r[1]
                create = r[2]
                reason = r[3]
                try:
                    email, unsub = id_to_unsub_map[id]
                    if success.lower() == "true":
                         mail_file_o.write("%s\t\t%s\n"%(email, ";".join(unsub)))
                    else:
                        fail_rec.append((email, ";".join(unsub), reason))
                except KeyError:
                    fail_rec.append(("", "", reason))
        if len(fail_rec):
            mail_file_o.write("Failed updates\nEmail Address\t\tFPN Unsubscribes\tReason\n")    
            for z in fail_rec:
                mail_file_o.write("%s\t\t%s\t%s\n"%(z[0], z[1], z[2]))
Esempio n. 6
0
def main(source, dest, runtype):
    # runtype 1 - the one run at midnight
    # runtype 2 - the final day's run
    global logger
    logger = utils.createLogger("runner")
    if not os.path.isdir(source):
        print "invalid directory "+source
        sys.exit(1)
        
    if not prepareDestDir(dest):
        sys.exit(1)
    import datetime
    today = datetime.datetime(2012, 12, 21).now()
    pfile = os.path.join(source, "prev_%s.csv"%runtype)    
    nfile = os.path.join(source, "now_%s.csv"%runtype)        
    backup(source, today, runtype)
    # abort if there is no prev file
    if not os.path.isfile(pfile):
        logger.error("There is no previous file "+pfile)
        sendMail(None, "**** Stormpost SF Error", "There is no previous file "+pfile)
        
        sys.exit(1)
    
    import p2
    bx = utils.getSFSiteCred()
    p2.main(int(runtype), nfile, bx[0], bx[1], bx[2])
    if not os.path.isfile(nfile):
        restorePrev(source, runtype)
        return
        
        
    import diff
    adds, deletes = diff.diff(nfile, pfile)
    print adds, deletes
    tosend = []

    jlt_to_old = {
        "JLT Pharma":"Pharma Global",
        "JLT Constr":"Constr Global",
        "JLT Insur":"Insur Global",
        "JLT Bank":"Bank Global",
        "JLT Telecom":"Telecom Global",
        "JLT Asset & Wealth":"Asset & Wealth Global",
        "JLT Lawyers":"Lawyers Global",
        "JLT Media":"Media Global"}
    global_to_new = {
        "Pharma Global":"Pharma Global Advisen",
        "Constr Global":"Constr Global Advisen",
        "Insur Global":"Insur Global Advisen",
        "Bank Global":"Bank Global Advisen",
        "Telecom Global":"Telecom Global Advisen",
        "Asset & Wealth Global":"Asset & Wealth Global Advisen",
        "Lawyers Global":"Lawyers Global Advisen",
        "Media Global":"Media Global Advisen"}

    
    def createSPFiles(which, decor):
        import string
        for w in which.keys():
            if w in jlt_to_old.keys(): k = jlt_to_old[w]
            else:
                if w in global_to_new: k = global_to_new[w]
                else:
                    k = w
            if decor == "":
                fn = k.replace(" ", "_").replace("/", "_") + "_%04d_%02d_%02d.txt"%(today.year, today.month, today.day)
            else:
                fn = k.replace(" ", "_").replace("/", "_") + "_%s_%04d_%02d_%02d.txt"%(decor,today.year, today.month, today.day)            
            fn = os.path.join(dest, fn)
            tosend.append(fn)
            op = open(fn, "w").write(string.join(which[w], "\n"))
    
    createSPFiles(adds, "")
    createSPFiles(deletes, "remove")
    
    logger.debug("Files to send "+str(tosend))
    sys.exit(0)
Esempio n. 7
0
def __dumpSF(nowfile):
    b = utils.getSFSiteCred()

    where =  "where FPN_Subscriptions__c != '' and FPN_Subscriptions__c != 'Does Not Want'"
    
    p1_a.main(nowfile, b[0], b[1], b[2], where)