Example #1
0
 def __init__(self, dir, dumpDir, logName):
     self.buggy = False
     self.fields = None
     self.dir = dir
     self.dumpDir = dumpDir
     self.logger = createLogger(logName)
     self.logger.debug("FSObject ctor")
     self.newfile = None
     self.updFile = None
     self.notNulls = []
     self.primaryKey = None
     self.primaryKeyOnCSV = None
     self.newCSVLoad = None
     self.updSQLLoad = None
     self.updSQLDiscardFields = []
     self.modTable = None
     
     self.managingFSField = None
     self.parentFSField = None
     self.dumpFile = None
     self.updErrs = []
     self.addErrs = []
     self.mloaded = {}
     self.csvBase = None
     self.changedIDs = None
     self.postBaseLoadErrs = []
Example #2
0
File: p2.py Project: juanurquijo/SF
def main(runtype, csvfile, url, userid, password, ):

    logger = utils.createLogger("p2.py")
    
    if runtype == 1: rt = fpn_type_1
    elif runtype == 2: rt = fpn_type_2
    else:    
        r = utils.getValidFPNs()
        rt = [x for x in r if x not in fpn_type_1 and x not in fpn_type_2]
        logger.debug(str(rt))
    
    w = " and FPN_Subscriptions__c INCLUDES ("+",".join(["'%s'"%x for x in rt])+")"

    where = " where FPN_Subscriptions__c != '' and FPN_Subscriptions__c != 'Does Not Want' and FPN_Subscriptions__c != 'Not Authorized - see JMC'"+w


    logger.debug("WHERE: "+where)

    fout, tfile = utils.openTempFile()
    fout.close()
    
    logger.debug("TEMP FILE "+tfile)
    p1.main(tfile, url, userid, password, where)
    splitter.main(rt, tfile, csvfile)
    return 0
Example #3
0
def convert(xlsfile, csvfile, fields, validate = True, badRecs = None):
    logger = createLogger("xls2csv")
    logger.debug("Entered convert")
    
    xls = load(xlsfile, fields)
    
    if validate: validateAllRFieldTypes(xls)
    
    writer = csv.writer(open(csvfile, 'w'), delimiter=',',\
        quotechar='"', quoting=csv.QUOTE_MINIMAL)
    names = xls.fields.names()
    writer.writerow(names)
    ndx = 0
    for record in xls.records():
        lr = []
        for m in names: lr.append(csvFix(record[m]))
        ndx = ndx + 1
        if badRecs is None or (ndx+1 not in badRecs):
            try:
                writer.writerow(lr)
            except UnicodeEncodeError:
                print lr
                print ndx
                raise
        else:
            logger.debug("Discarding %d"%ndx)
Example #4
0
def load(xlsname, fields):
    global logger
    logger = createLogger("xlsloader")
    logger.debug("Entered xlsloader.load")
    xls = XLSDataSource(xlsname)
    xls.initialize()
    xls.fields = FieldList(fields)
    return xls
Example #5
0
 def __init__(self):
     b = utils.get_sp_cred()
     
     self.logger = utils.createLogger("spsoapclient.py")
     self.soap_client = suds.client.Client(b[0], location=b[1])
     auth = self.soap_client.factory.create("tns:authentication")
     auth.username = b[2]
     auth.password = b[3]
     self.export_job_id = b[4]
     self.soap_client.set_options(soapheaders=auth) 
Example #6
0
def main(err_file, loginURL, username, password):
    logger = utils.createLogger("probe.py")
    sf.setLogger(logger)

    df =  open(err_file, "a")
    try:
        sf.login(username, password, loginURL, True)
    except sf.SFException, e:
        df.write(str(e)+"\n")
        sys.exit(1)
Example #7
0
def csvloader(fin):
    logger = utils.createLogger("csvloader")
    logger.debug("Entered csvloader")
    
    m = {}
    rdr = csv.reader(open(fin), delimiter=',', quotechar='"')
    kdebug = 0
    for row in rdr:
        kdebug = kdebug + 1
        if row[1].strip().lower() != "":
            m[row[1].strip().lower()] = [row[4].strip(), 0]
            
    return m
Example #8
0
def main(oper, fn, fo, loginURL, username, password):
    
    print oper, fn, fo
    rcount = 0
    fcount = 0
    res_msg = ""
    logger = utils.createLogger("upsert")
    sf.setLogger(logger)
    op = open(fo, "w")

    lx = sf.login(username, password, loginURL)
    if not lx: return
    site = lx[0]
    sessionid = lx[1]    
    jobid = sf.createJob(oper, "Contact", "CSV", site, sessionid)
    logger.debug( "JOBID: "+jobid)

    
    state = sf.checkJobStatus(jobid, site, sessionid)
    logger.debug("JOB state: "+state)
    
    batchid = sf.submitUpdate(jobid, site, sessionid, fn)
    logger.debug("BATCH "+batchid)
    while True:
        # loop till we complete or error out
        bat = sf.checkBatch(jobid, batchid, site, sessionid)
        logger.debug("BATCH state "+ str(bat))
        batchState = str(bat["state"])
        
        if batchState == "Completed":
            rcount = int(str(bat["numberRecordsProcessed"]))
            fcount = int(str(bat["numberRecordsFailed"]))
            logger.info("NUMBER OF RECORDS PROCESSED %d"%rcount)
            logger.info("NUMBER OF RECORDS FAILED %d"%fcount)
            r = sf.getUpdateResults(jobid, batchid, site, sessionid)
            op.write(r)
            break
        if batchState == "Failed":
            logger.error("QUERY FAILED")
            res_msg = str(bat["stateMessage"])
            logger.error(res_msg)
            break
            
        import time
        time.sleep(sleepTime)
    
    logger.debug("JOB state AFTER: "+state)
    sf.closeJob(jobid, site, sessionid)
    logger.debug("JOB state AFTER CLOSE: "+state)
    return res_msg, rcount, fcount
Example #9
0
    def __init__(self, name, port=""):
        super().__init__()
        self.name = name
        self.port = port

        self.connected = False

        self.className = type(self).__name__
        loggerName = self.className + ' (' + self.name + ')'
        self.logger = utils.createLogger(loggerName)

        self.createConnectBox()
        self.createDataReceiveBoxes()

        mainLayout = QHBoxLayout()
        mainLayout.addWidget(self.connectBox)
        mainLayout.addWidget(self.dataReceiveBox)
        self.setLayout(mainLayout)
        self.setTitle(loggerName)
Example #10
0
def main(fpns, inname, outname):
    logger = utils.createLogger("splitter.py")
    logger.debug("inname: "+inname+" outname "+outname)
    fout = csv.writer(open(outname, "w"), delimiter=',', 
        quotechar='"',quoting=csv.QUOTE_ALL)
    rdr = csv.reader(open(inname), delimiter=',', quotechar='"')
    k = 0
    for row in rdr:
        rt = [x.strip() for x in row]
        if len(rt) == 5:
            if k > 0: 
                rt[4] = ";".join([x.strip() for x in rt[4].strip().split(";") if x in fpns])
                
    #        logger.debug(rt);
            if (rt[4] != ""):  
                # VERY DIRTY HACK for FPN Digest called as EU Free 
                rt[4] = rt[4].replace("EU Digest", "EU Free") 
                fout.writerow(rt)            
        k = k + 1        
Example #11
0
def main(fn, loginURL, username, password, whereClause = None):
    
    logger = utils.createLogger("p1.py")
    sf.setLogger(logger)
    
    lx = sf.login(username, password, loginURL)
    if not lx: return
    site = lx[0]
    sessionid = lx[1]
    jobid = sf.createJob("query", "Contact", "CSV", site, sessionid)
    logger.debug( "JOBID: "+jobid)
    sfSQL = "select id, Email,  firstname, lastname, FPN_Subscriptions__c,Stormpost_Reason_Date__c,Stormpost_Reason__c from contact"
    if whereClause is not None:
        sfSQL = sfSQL + " " + whereClause
    logger.debug("SQL : "+sfSQL)
    batchid = sf.submitSelect(jobid, site, sessionid, sfSQL)
    logger.debug("BATCH "+batchid)
    state = sf.checkJobStatus(jobid, site, sessionid)
    logger.debug("JOB state: "+state)
    
    while True:
        # loop till we complete or error out
        bat = sf.checkBatch(jobid, batchid, site, sessionid)
        logger.debug("BATCH state "+ str(bat))
        batchState = str(bat["state"])
        
        if batchState == "Completed":
            rcount = int(str(bat["numberRecordsProcessed"]))
            logger.info("NUMBER OF RECORDS %d"%rcount)
            r = sf.getResults(jobid, batchid, site, sessionid)
            op = open(fn, "w").write(r)
            break
        if batchState == "Failed":
            logger.error("QUERY FAILED")
            break
            
        import time
        time.sleep(sleepTime)
    logger.debug("JOB state AFTER: "+state)
    sf.closeJob(jobid, site, sessionid)
    logger.debug("JOB state AFTER CLOSE: "+state)
Example #12
0
def diff(now, prev):
    global logger
    logger = utils.createLogger("diff")
    logger.debug("Entered diff")
    validFPNs = getValidFPNs()
    
    ### VERY DIRTY HACK FOR FPN Free
    validFPNs.append("EU Free")
    
    
    mnow = csvloader.csvloader(now)
    logger.debug("LEN of now: %d"%len(mnow))
    mprev = csvloader.csvloader(prev)
    logger.debug("LEN of prev: %d"%len(mprev))

    adds = {}
    deletes = {} 
    for k in mnow.keys():
        try:
            p = mprev[k]
            p[1] = 1
            n = mnow[k]
            n[1] = 1
            
            prevFPNS = p[0]
            nowFPNS = n[0]
            if prevFPNS != nowFPNS:
                logger.debug("DIFFERENT "+k)
                logger.debug("PREV "+prevFPNS)
                logger.debug("NOW "+nowFPNS)
                newFPNS = getInANotInB(nowFPNS, prevFPNS)
                xFPNS = getInANotInB(prevFPNS, nowFPNS)
                if len(newFPNS) > 0: 
                    putInMap(validFPNs, adds, k, newFPNS)
                if len(xFPNS) > 0: 
                    putInMap(validFPNs, deletes, k, xFPNS)
        except KeyError, e:
            fpns = mnow[k][0].strip()
            if fpns != "":
                logger.debug("contact %s with %s is new"%(k, fpns))
                putInMap(validFPNs, adds, k, fpns.split(";"))
Example #13
0
    def __init__(self, channel_objs):
        super().__init__()
        self.channels = channel_objs
        self.numChannels = len(self.channels)

        className = type(self).__name__
        self.logger = utils.createLogger(className)

        self.createMainSettingsBox()
        self.createChannelSettingsBox()
        self.createDataFileBox()
        self.createChannelGroupBox()

        w = max(self.mainSettingsBox.sizeHint().width(),
                self.channelSettingsBox.sizeHint().width(),
                self.dataFileBox.sizeHint().width()) + 5
        self.mainSettingsBox.setFixedWidth(w)
        self.channelSettingsBox.setFixedWidth(w)
        self.dataFileBox.setFixedWidth(w)

        col1 = QVBoxLayout()
        col1.addWidget(self.mainSettingsBox)
        col1.addWidget(self.channelSettingsBox)
        col1.addWidget(self.dataFileBox)

        self.mainLayout = QHBoxLayout()
        self.mainLayout.addLayout(col1)
        self.mainLayout.addWidget(self.channelGroupBox)
        self.setWindowTitle('Olfactometer Prototype & Accessory Control')

        self.central_widget = QWidget()
        self.central_widget.setLayout(self.mainLayout)
        self.setCentralWidget(self.central_widget)

        # Menu Bar
        self.createMenuBar()

        self.resize(self.sizeHint())
Example #14
0
    def __init__(self, name, port=""):
        super().__init__()
        self.name = name
        self.port = port

        className = type(self).__name__
        loggerName = className + ' (' + self.name + ')'
        self.logger = utils.createLogger(loggerName)
        self.logger.debug('Creating module for %s', loggerName)

        self.createConnectBox()
        self.createSettingsBox()
        col1 = QVBoxLayout()
        col1.addWidget(self.connectBox)
        col1.addWidget(self.settingsBox)

        self.createDataReceiveBoxes()

        mainLayout = QHBoxLayout()
        mainLayout.addLayout(col1)
        #mainLayout.addWidget(self.connectBox)
        mainLayout.addWidget(self.dataReceiveBox)
        self.setLayout(mainLayout)
        self.setTitle(self.name)
import re
import warnings
from pathlib import Path

import numpy as np
import scipy.sparse as sp
import spacy
from sklearn.feature_extraction.text import CountVectorizer, ENGLISH_STOP_WORDS

from utils import createDirs, createLogger

INPUT_DIR = Path(r'../data/aclImdb/')
OUTPUT_DIR = Path(r'../data/clean/')
LOG_DIR = Path(r'../logs/clean/')

logger = createLogger(LOG_DIR, "clean")
logger.info("Logger created, logging to %s" % LOG_DIR.absolute())


def getData(path):
    """
    Get IMDb movie reviews and corresponding labels.

    Inputs:
        - path (str): IMDb movie review directory

    Outputs:
        ([str], [int]): list of movie reviews, list of sentiment labels
    """
    texts, labels = [], []
import utils
import pytz
import sys
import random
from datetime import datetime

logger = utils.createLogger("scheduling")
config = utils.loadConfig()
scheduling_config = config["scheduling"]
timezone_name = scheduling_config["local_timezone"]
try:
    timezone = pytz.timezone(timezone_name)
except pytz.UnknownTimeZoneError:
    logger.error("timezone {} not found in tz data. make sure it is in the list - https://en.wikipedia.org/wiki/List_of_tz_database_time_zones")
    sys.exit(1)
working_days = scheduling_config["working_days"]
min_days_per_week = scheduling_config.get("min_days_per_week", 1)
if min_days_per_week == -1:
    min_days_per_week = len(working_days)
if min_days_per_week < 0 or min_days_per_week > len(working_days):
    logger.error("min_days_per_work should be in [0, len(working_days)] or -1")
    sys.exit(1)
working_hours = scheduling_config["working_hours"]
if len(working_hours) == 0:
    logger.error("working hours must contain atleast one start-end period")
    sys.exit(1)

def getRandomHour(seed):
    total_duration = sum([(period["end"] - period["start"]) for period in working_hours])
    rng = random.Random(seed)
    random_offset = rng.random() * total_duration
Example #17
0
import utils
import csv
import os
from mailer import sendMail


send_emails_to = ["*****@*****.**"]#,
#    "*****@*****.**",
#    "*****@*****.**"]

def usage():
    print "usage spclearlist_and_add listname export_list_job_id csvtoadd tmpdir"
    # csvtoremove is Recips.Address,Recips.SourceDesc,
    # csvtoadd is email
    
logger = utils.createLogger("spclearlist_and_add")
 
    
 
def main(listname, job_id, csvtoadd, tmpdir):
    spf = spsoapclient.spsoapclient()
    m = spf.get_lists_as_map()
    try:
        list_id = m[listname]
    except KeyError:
        logger.error("Cannot find list "+listname)
        return -1
    logger.debug("LIST id "+str(list_id))

    # dump all users - we need this info so we know if we have to create a recip for the adds
    
from joblib import dump, load
from sklearn.decomposition import LatentDirichletAllocation
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler

from evaluate import evaluate
from logistic_regression import LogisticRegressionVal
from utils import createLogger, createDirs, loadClean, writeResults, \
    preprocessClfParser

INPUT_DIR = Path(r'../data/clean')
OUTPUT_DIR = Path(r'../logs/models')
LOG_DIR = Path("../logs/pipeline")

logger = createLogger(LOG_DIR, "lda_clf")
logger.info("Logger created, logging to %s" % LOG_DIR.absolute())

K = np.arange(1, 31) * 5


def LDA(train_size, random_state):
    """
    Classification pipeline with LDA preprocessing.

    Inputs:
        - train_size (int): number of training samples.
        - random_state (int): seed for random number generators

    Output:
        (None)
Example #19
0
from base64 import b64encode
import re
import json
import hmac
import hashlib
import requests
import time
import utils
import sys

logger = utils.createLogger("zoom")

ZOOM_USERS_API_URL = "https://api.zoom.us/v2/users/"
ZOOM_CREATE_MEETING_API_URL = "https://api.zoom.us/v2/users/{userId}/meetings"

config = utils.loadConfig()
if "zoom" not in config:
    logger.error("zoom key not found in the config json")
    sys.exit(1)
zoom_conf = config["zoom"]
if "api_key" not in zoom_conf:
    logger.error("api_key not found in zoom config")
    sys.exit(1)
ZOOM_API_KEY = zoom_conf["api_key"]
if "api_secret" not in zoom_conf:
    logger.error("api_secret not found in zoom config")
    sys.exit(1)
ZOOM_API_SECRET = zoom_conf["api_secret"]


def encode(text):
Example #20
0
import tornado.locks
import json
import jwt
import aiomysql
import bcrypt
import datetime

from wx.sign import Sign
from wechatpy.utils import check_signature
from wechatpy.exceptions import InvalidSignatureException
from utils import createLogger, config
from auth import authenticated
from qiniu import Auth

# Configure logging
logger = createLogger(__name__)


class NoResultError(Exception):
    pass


async def maybe_create_tables(db):
    try:
        async with db.acquire() as conn:
            async with conn.cursor() as cur:
                await cur.execute("SELECT COUNT(*) FROM kt_posts LIMIT 1")
                await cur.fetchone()
    except aiomysql.ProgrammingError:
        with open("schema.sql") as f:
            schema = f.read()
Example #21
0
import sys
import os.path
import utils
import spsfsync_runner
import datetime
import spsoapclient
import diff

logger =utils.createLogger("spsfsync_starter")

def main():


    fpns_to_sync = [x for x in [x.strip() for x in open(os.path.join(utils.getScriptPath(), "fpns_to_sync.txt")).read().\
        split("\n")] if x[0] != "#"] # there!
#    logger.debug("WILL SYNC "+",".join(fpns_to_sync))
    if len(fpns_to_sync) == 0: 
        logger.error("Nothing to sync")
        return -1

# validate legacy list -- used by diff
    validFPNs = diff.getValidFPNs()
    #logger.debug("Valid FPNS "+str(validFPNs))

    logger.debug("connecting to sp")
    spsoap = spsoapclient.spsoapclient()
    sp_list_map = spsoap.get_lists_as_map()

    def _check_lists():
        bad = False
        for ed in fpns_to_sync:
Example #22
0
def main(fin, fout, manifest_file, rmap, debug_max = None):

    logger = utils.createLogger("sp_get_status.py")
    if debug_max is not None:
        debug_max = int(debug_max)
    logger.debug("debug_max "+str(debug_max))

    logger.debug("OUTFILE "+fout)
    csv_fileo = open(fout, 'w')
    csvw = csv.writer(csv_fileo, delimiter=',',
        quotechar='"', quoting=csv.QUOTE_NONNUMERIC)

    manifest_csv = csv.writer(open(manifest_file, "w"), delimiter=',',
        quotechar='"', quoting=csv.QUOTE_NONNUMERIC)

    manifest_csv.writerow(["ID", "Email", "From", "To"])

    
    csvw.writerow(["ID", "Stormpost_Reason__c", "Stormpost_Reason_Date__c"])
    date = datetime.datetime.now()
    sdate = "%02d-%02d-%04d"%(date.month, date.day, date.year)
    



    if recips_exp_csv_file is None:
        logger.debug("exporting recips")
        recips_exp_csv_file = os.path.join(tempdir, "recips_file.csv")
        rc = spsoap.runExportRecipList(recips_exp_csv_file)
        if rc: return rc

    fle = csv.reader(open(fin), delimiter=",", quotechar='"')
    first = True
    logger.debug("START")
    
    
    k = 0
    upds = 0
    for row in fle:
        if first: 
            first = False
            continue
        k = k + 1
        if debug_max is not None:
#            logger.debug("count: %d - %d"%(k, debug_max))
            if k > debug_max:
                break
        if k%100 == 0: logger.debug("RECNO %d"%k)
        id = row[0]
        email = row[1]
        oldstat = row[6]
        try:
            sp_id, sval = rmap[email]
            if sval not in valid_status:
                str = "bad status for %s -- %s"%(email, sval)
                logger.error(str)
                raise Exception("bad status for %s -- %s"%(email, sval))
            else:
                if oldstat != sval:
#                    logger.debug("email: %s changed status from: %s to %s"%\
#                        (email, oldstat, sval))
                    csvw.writerow([id, sval, sdate])
                    manifest_csv.writerow([id, email, oldstat, sval])
                    upds = upds + 1
                    csv_fileo.flush()
        except KeyError:
            logger.info("Email %s is not in SP"%email)
        

    logger.debug("DONE")
    return (0, upds)
Example #23
0
from smtplib import SMTP
import os
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.Utils import COMMASPACE, formatdate
from email import Encoders
import utils

smtphost = "ad-vm-data1"
smtpport = 25

#admin_email = ["*****@*****.**", "*****@*****.**", "*****@*****.**"]
admin_email = ["*****@*****.**", "*****@*****.**"]

logger = utils.createLogger("mailer.py")

def sendMail(rcpt, subject, body, files=[]):

    send_from = "*****@*****.**"
    msg = MIMEMultipart()
    msg['From'] = send_from

    if rcpt is None: rcpt = admin_email


    msg['To'] = COMMASPACE.join(rcpt)
    msg['Date'] = formatdate(localtime=True)
    msg['Subject'] = subject
    
    msg.attach( MIMEText(body) )
Example #24
0
#!/usr/bin/env python3

# should be added to a cronjob firing everyday
# e.g at 10 AM IST everyday
# 30 4 * * * python3 path/main.py >> path/cronjob.log 2>&1

import time
import random

import utils
import bot
import scheduling

logger = utils.createLogger("main")

if __name__ == "__main__":
    logger.info("starting water cooler bot...")
    shouldScheduleToday, delay_seconds = scheduling.getTodaySchedule()
    if shouldScheduleToday:
        logger.info("sleeping for {:.1f} seconds".format(delay_seconds))
        time.sleep(delay_seconds)
        bot.ping()
        logger.info("sent message to slack channel successfully!")
Example #25
0
def main(sf_csv_file, sp_email_map, maxupds, sendto):
    logger = utils.createLogger("spsfsync.py")



    b = utils.getSFSiteCred()
    workdir = os.path.join(tempdir, utils.temp_name("sfupdworkdir"))
    os.mkdir(workdir)
    
    rc,upds, splits, manifest_map  = \
        __create_sf_updates(sf_csv_file, workdir, sp_email_map, 
        maxupds)
    if rc != 0 or upds == 0:
        if rc != 0:
            logger.error("sp_get_status returned with error %d"%rc)
        else:
            logger.info("There were no updates")
        return rc
    
    logger.debug("THERE ARE %d updates and %d splits"%(upds, len(splits)))
    
    # loop splits here
    # then cat the logfile
    cgood = 0
    cbad = 0

    goodfile = os.path.join(tempdir, utils.temp_name("goodups.txt"))
    badfile = os.path.join(tempdir, utils.temp_name("badupds.txt"))
    logger.debug("GOODFILE "+goodfile)
    
    good = open(goodfile, "w")
    bad = open(badfile, "w")

    updlogfile = os.path.join(tempdir, utils.temp_name("spsynclogfile.csv")) # yes reuse

    ktest = 0
    for spl in splits:
        rc = sfupdspstatus.main(b[0], b[1], b[2], spl, updlogfile)

        csv_spl = csv.reader(open(spl))
        up_file = open(updlogfile)
        csv_log = csv.reader(up_file)

        first = True
        for row_upd in csv_spl:
            row_log = csv_log.next()
            if first:
                first = False
                continue
            lid = row_upd[0]
            try:
                (email, fromv) = manifest_map[lid]
                cto = row_upd[1]

                success = row_log[1].lower()
                error = row_log[3]
                if success == "true":
                    cgood = cgood + 1
                    good.write(email+" changed from "+fromv+" to "+cto+"\n")
                    good.flush()
                else:
                    cbad = cbad + 1
                    bad.write(email+" failed to update from "+fromv+" to "+cto+" "+error+"\n")
                    bad.flush()

            except KeyError:
                logger.error("ID not found "+lid)
        up_file.close()
        ktest = ktest + 1
#        if ktest > 4: break ## remove this
    
    good.close()
    bad.close()
    att = []
    att.append((goodfile, "goodups.txt"))
    if cbad > 0: att.append((badfile, "badupds.txt"))
    logger.debug(str(att))
    sendMail(sendto, "%d SF Stormpost_Reason__c updated, %d failed"%(cgood, cbad), "spsfcync", att)    
    return rc
Example #26
0
def main(save_dir, work_dir, src_fname, mail_file):
    global logger
    doesnotwant = "Does Not Want"
    logger = utils.createLogger("fpn_unsubs")
    logger.debug("main started")
    if not os.path.isdir(save_dir):
        r,c = commands.getstatusoutput("mkdir -p "+save_dir)
        if r:
            logger.error(c)
            mail_file_o = open(mail_file, "w")
            mail_file_o.write("ERROR\n")
            mail_file_o.write(c)
            sys.exit(1)
    
    # save_dir has timestamp:email_add
    donefile = os.path.join(save_dir, "donefile.dat")
    grep1 = "grep -n \"%%s\" %s"%donefile
    open(donefile, "a").close() # wtf
    consider = {}
    for p in [l for l in os.listdir(work_dir) if l.find(src_fname) != -1]:
        for line in open(os.path.join(work_dir, p)).read().split("\n"):
            if len(line):
                parts = line.split(delim)
                if len(parts) == 3:
                    addy = parts[1].strip().lower()
                    eds = parts[2]
                    token = parts[0]
                    blk = "%s:%s"%(token, addy)
                    cmd = grep1%blk
#                    logger.debug(cmd)
                    r,c = commands.getstatusoutput(cmd)
                    if r == 0:
                        logger.debug("Already done "+line)
                    else:
                        logger.debug("Will consider "+line)
                        try:
                            tup = consider[addy]
                        except KeyError:
                            tup = ([], [])
                            consider[addy] = tup
                        tup[0].append(token)
                        for ed in eds.split(";"):
                            edx = ed.split(",")[0]
                            if edx not in tup[1]:
                                tup[1].append(edx)
            
    
    if len(consider) == 0: 
        logger.info("Nothing to process")
        return 0


    (sfurl, username, password) = utils.getSFSiteCred(os.path.join(sf_home, "sfrunner.cfg"))


    where = None
    
        
    for key in consider.keys():
        def escapeQ(x): 
            return x.replace("'", "\\'")
        if not where:
            where = "where Email in ('%s'"%escapeQ(key)
        else:
            where = where + ",'%s'"%escapeQ(key)

    where = where + ") and FPN_Subscriptions__c != '' and FPN_Subscriptions__c != '%s' and FPN_Subscriptions__c != 'Not Authorized - see JMC'"%doesnotwant
    
    queryRes = os.path.join(work_dir, "fpnunsubsw.csv")
    if os.path.isfile(queryRes):
        try:
            os.unlink(queryRes)
        except OSError:
            mail_file_o = open(mail_file, "w")
            mail_file_o.write("ERROR")
            msg = "*** cannot delete file "+queryRes
            mail_file_o.write(msg)
            logger.error(msg)
            return 1
            
    p1.main(queryRes, sfurl, username, password, where)
    
    if not os.path.isfile(queryRes):
        msg = "ERROR\n**** query for unsubs did not generate output file\n Query: "+where
        logger.error(msg);
        mail_file_o = open(mail_file, "w")
        mail_file_o.write(msg)
        return 1
    
    # diff what we got from the server and what is currently in sf

    sfmap = {}    
    first = True
    for r in csv.reader(open(queryRes, 'r')):
        if first: first = False
        else:
            key = r[1].strip().lower()
            sfmap[key] = (r[0], r[4].split(";")) 
    

    # create the input file
    csv_file = os.path.join(work_dir, "update.csv")
    logger.debug("LOADER CSV "+csv_file)
    csv_fileo = open(csv_file, 'w')
    csvw = csv.writer(csv_fileo, delimiter=',',
        quotechar='"', quoting=csv.QUOTE_NONNUMERIC)
    csvw.writerow(["ID", "FPN_Subscriptions__c"])
    
    id_to_unsub_map = {}
    
    has_one = False
    for key in consider.keys():
        try:
            insf_tup = sfmap[key]
        except KeyError:
            # if this is the case then it means the contact was deleted from SF
            continue
        to_remove = consider[key][1]

        logger.debug("CONTACT: "+key)
        logger.debug("SF val: "+str(insf_tup))
        logger.debug("toRemove: "+str(to_remove))

        has_one = True
        new_val = ""
        for i in insf_tup[1]:
            i1 = i.split(" ")[0]
            if i1 not in to_remove:
                if new_val == "":
                    new_val = i
                else:
                    new_val = new_val + ";" + i
        if new_val == "": new_val = doesnotwant
        csvw.writerow([insf_tup[0], new_val])
        id_to_unsub_map[insf_tup[0]] = (key, to_remove)
    csv_fileo.close()
    
    if has_one:
        logger.debug("id_to_unsub_map "+str(id_to_unsub_map))
    
        stat_file = os.path.join(work_dir, "fpnunsubs.out")
        logger.debug("STAT FILE "+stat_file)
    
        try:
            rmsg, rcount, fcount = upsert.main("update", csv_file, stat_file, sfurl,
                username, password)
            if rmsg != "":
                emsg = "Error at update: %s"%rmsg
                mail_file_o = open(mail_file, "w")
                mail_file_o.write(emsg)
                return 1
        except sf.SFException, e:
            mail_file_o = open(mail_file, "w")
            mail_file_o.write("ERROR")
            mail_file_o.write(str(e))
            logger.error(str(e))
            return 1
        
        mail_file_o = open(mail_file, "w")
        mail_file_o.write("SF Updated: Records Processed: %d\nFailed: %d\n"%(rcount, fcount))
        
    
        mail_file_o.write("Successful updates\nEmail Address\t\tFPN Unsubscribes\n")    
        stat_file_o = open(stat_file)
    
        first = True
        fail_rec = []
        for r in csv.reader(stat_file_o):
            if first: first = False
            else:
                id = r[0]
                success = r[1]
                create = r[2]
                reason = r[3]
                try:
                    email, unsub = id_to_unsub_map[id]
                    if success.lower() == "true":
                         mail_file_o.write("%s\t\t%s\n"%(email, ";".join(unsub)))
                    else:
                        fail_rec.append((email, ";".join(unsub), reason))
                except KeyError:
                    fail_rec.append(("", "", reason))
        if len(fail_rec):
            mail_file_o.write("Failed updates\nEmail Address\t\tFPN Unsubscribes\tReason\n")    
            for z in fail_rec:
                mail_file_o.write("%s\t\t%s\t%s\n"%(z[0], z[1], z[2]))
Example #27
0
from utils import getSFSiteCred, createLogger, getScriptPath
import sys
import sf
import os.path
import datetime
import commands
import uu
import curl
import fcntl


mailhost = "mail.advisen.com"
administrators = "[email protected],[email protected]"
mailsender = "*****@*****.**"
logger = createLogger("pushreports")
sleepTime = 5

configFile = "pushreports.cfg"
configPath =  os.path.join(getScriptPath(), configFile)

pid_file = os.path.join(getScriptPath(), 'program.pid')
fp = open(pid_file, 'w')
try:
    fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
    logger.error("Another instance is already running")
    sys.exit(0)
class PRException(sf.SFException):
    def __init__(self, msg):
        self.msg = msg
        
Example #28
0
            # save current olfa state - based on the last shit you sent to the Arduinos

            self.inst_drivers[0].instrument_widget.save_arduino_variables()
            self.inst_drivers[
                0].instrument_widget.btn_overwrite_olfaConfig_file()

            # TODO: add popup that shows where it saved it to

            # THIS ONLY WORKS NOW when you have just the single instrument
            self.logger.info('~~~~~~closing program~~~~~~~~')
            event.accept()


if __name__ == "__main__":
    # Create logger
    mainLogger = utils.createLogger(__name__)
    mainLogger.info('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')

    app1 = QApplication(sys.argv)

    # Default Channel objects
    channelObjs = []
    channelObjs.append(
        channelObj(name='olfa prototype', instrument='olfactometer'))
    channelObjs.append(channelObj(name='PID reading', instrument='NI-DAQ'))

    # Open main window
    mainWindow = mainGUI(channelObjs)
    size = mainWindow.sizeHint()
    mainWindow.resize(size)
    #mainLogger.debug('Done creating interface')
Example #29
0
import requests
import sys
import json

import utils
import zoom
import topics as topicGenerator

logger = utils.createLogger("bot")

config = utils.loadConfig()
if "slack" not in config:
    logger.error("slack key not present in config json")
    sys.exit(1)
slack_conf = config["slack"]
if "webhook_url" not in slack_conf or "channel" not in slack_conf:
    logger.error("webhook_url or channel not present in slack config")
    sys.exit(1)

WEBHOOK_URL = slack_conf["webhook_url"]
CHANNEL = slack_conf["channel"]
BOT_USERNAME = slack_conf.get("username", "water-cooler-bot")
BOT_ICON_EMOJI = slack_conf.get("icon_emoji", ":water-cooler:")

def ping(num_topics=3):
    topics = [topicGenerator.generateTopic() for _ in range(num_topics)]
    meetingUrl = zoom.getMeetingUrl()
    msg = "You must be really thirsty, its time to get a glass of water! :glass_of_milk:\n" +\
            "Gather around the water cooler ya folks - {}\n\n".format(meetingUrl) +\
            "While you're at it, here are some topics that you may find interesting to discuss -\n" +\
            "\n".join(["{}. {}".format(i+1, topic) for i, topic in enumerate(topics)])
Example #30
0
import signal
import asyncio
from os import environ
from wsapi import readbcv, websockets
from utils import createLogger


def stop_handler(sig, loop, fatherlog):
    log = fatherlog.getChild('stop_handler')
    log.info(f'收到信号: {sig}, 正在关闭......')
    loop.stop()
    loop.remove_signal_handler(signal.SIGTERM)
    loop.add_signal_handler(signal.SIGINT, lambda: None)


loop = asyncio.get_event_loop()
fatherlog = createLogger('ReadBCV')

signal.signal(2, lambda: None)
loop.add_signal_handler(15, stop_handler, 15, loop, fatherlog)
loop.add_signal_handler(2, stop_handler, 2, loop, fatherlog)

addr = environ.get('APP_LISTEN', '0.0.0.0')
port = int(environ.get('APP_PORT', '8765'))
path = environ.get('APP_PATH', '/')
fatherlog.info('Listen %s:%s At Path = %s' % (addr, port, path))
start_server = websockets.serve(readbcv, addr, port)
loop.run_until_complete(start_server)
loop.run_forever()
Example #31
0
def main(source, dest, runtype):
    # runtype 1 - the one run at midnight
    # runtype 2 - the final day's run
    global logger
    logger = utils.createLogger("runner")
    if not os.path.isdir(source):
        print "invalid directory "+source
        sys.exit(1)
        
    if not prepareDestDir(dest):
        sys.exit(1)
    import datetime
    today = datetime.datetime(2012, 12, 21).now()
    pfile = os.path.join(source, "prev_%s.csv"%runtype)    
    nfile = os.path.join(source, "now_%s.csv"%runtype)        
    backup(source, today, runtype)
    # abort if there is no prev file
    if not os.path.isfile(pfile):
        logger.error("There is no previous file "+pfile)
        sendMail(None, "**** Stormpost SF Error", "There is no previous file "+pfile)
        
        sys.exit(1)
    
    import p2
    bx = utils.getSFSiteCred()
    p2.main(int(runtype), nfile, bx[0], bx[1], bx[2])
    if not os.path.isfile(nfile):
        restorePrev(source, runtype)
        return
        
        
    import diff
    adds, deletes = diff.diff(nfile, pfile)
    print adds, deletes
    tosend = []

    jlt_to_old = {
        "JLT Pharma":"Pharma Global",
        "JLT Constr":"Constr Global",
        "JLT Insur":"Insur Global",
        "JLT Bank":"Bank Global",
        "JLT Telecom":"Telecom Global",
        "JLT Asset & Wealth":"Asset & Wealth Global",
        "JLT Lawyers":"Lawyers Global",
        "JLT Media":"Media Global"}
    global_to_new = {
        "Pharma Global":"Pharma Global Advisen",
        "Constr Global":"Constr Global Advisen",
        "Insur Global":"Insur Global Advisen",
        "Bank Global":"Bank Global Advisen",
        "Telecom Global":"Telecom Global Advisen",
        "Asset & Wealth Global":"Asset & Wealth Global Advisen",
        "Lawyers Global":"Lawyers Global Advisen",
        "Media Global":"Media Global Advisen"}

    
    def createSPFiles(which, decor):
        import string
        for w in which.keys():
            if w in jlt_to_old.keys(): k = jlt_to_old[w]
            else:
                if w in global_to_new: k = global_to_new[w]
                else:
                    k = w
            if decor == "":
                fn = k.replace(" ", "_").replace("/", "_") + "_%04d_%02d_%02d.txt"%(today.year, today.month, today.day)
            else:
                fn = k.replace(" ", "_").replace("/", "_") + "_%s_%04d_%02d_%02d.txt"%(decor,today.year, today.month, today.day)            
            fn = os.path.join(dest, fn)
            tosend.append(fn)
            op = open(fn, "w").write(string.join(which[w], "\n"))
    
    createSPFiles(adds, "")
    createSPFiles(deletes, "remove")
    
    logger.debug("Files to send "+str(tosend))
    sys.exit(0)
Example #32
0
def __create_sf_updates(fin, workdir, rmap, debug_max = None):

    splits = []
    split_size = 500
    logger = utils.createLogger("sp_get_status.py")

    date = datetime.datetime.now()
    sdate = "%02d-%02d-%04d"%(date.month, date.day, date.year)

    if debug_max is not None:
        debug_max = int(debug_max)
    logger.debug("debug_max "+str(debug_max))

    logger.debug("*** WORKDIR "+workdir)


    logger.debug("START")

    
    
    k = 0
    upds = 0
    ksplit = 0
    upd_fileo = None
    upd_csvw = None
    manifest_map = {}
    
    def __create_split_file():
        fname = os.path.join(workdir, "spupd%03d"%(len(splits)+1))
        logger.debug("creating SPLIT file "+fname)
        fileo = open(fname, "w")
        csvw = csv.writer(fileo, delimiter=',',
            quotechar='"', quoting=csv.QUOTE_NONNUMERIC)
        csvw.writerow(["ID", "Stormpost_Reason__c", "Stormpost_Reason_Date__c"])
        splits.append(fname)
        return fileo, csvw
    
    first = True

    for row in csv.reader(open(fin), delimiter=",", quotechar='"'):
        if first: 
            first = False
            continue
        k = k + 1
        if k%100 == 0: logger.debug("RECNO %d"%k)
        id = row[0]
        email = row[1]
        oldstat = row[6]
        try:
            sp_id, sval, m_id = rmap[email]
            if sval not in valid_status:
                err = "bad status for %s -- %s"%(email, sval)
                logger.error(err)
                raise Exception("bad status for %s -- %s"%(email, sval))
            else:
                if oldstat != sval:
                    if upd_fileo is None:
                        logger.debug("creating init")
                        upd_fileo, upd_csvw = __create_split_file()
                        ksplit = 0
                        logger.debug("SIZE %d"%(len(splits)))
                    if ksplit > split_size:
                        logger.debug("SPLITTING ANEW")
                        ksplit = 0
                        upd_fileo.close()
                        upd_fileo, upd_csvw = __create_split_file()
                        logger.debug("SIZE %d"%(len(splits)))
                        
                    upd_csvw.writerow([id, sval, sdate])
                    upd_fileo.flush()

                    manifest_map[id] = (email, oldstat) # memory max?
                    upds = upds + 1
                    ksplit = ksplit + 1

                    if debug_max is not None:
                        if upds > debug_max:
                            break
        except KeyError:
            logger.info("Email %s is not in SP"%email)
        

    logger.debug("DONE")
    return (0, upds, splits, manifest_map)
Example #33
0
# spsfsync_runner.py

import sys
import os.path
import utils
import p1_a
from mailer import sendMail
import datetime
import spsfsync
import spsoapclient

logger =utils.createLogger("spsfsync_runner")

tempdir = utils.get_temp_dir()


send_emails_to = ["*****@*****.**",
    "*****@*****.**",
    "*****@*****.**"]
def __dumpSF(nowfile):
    b = utils.getSFSiteCred()

    where =  "where FPN_Subscriptions__c != '' and FPN_Subscriptions__c != 'Does Not Want'"
    
    p1_a.main(nowfile, b[0], b[1], b[2], where)

def main(archive_dir, debug = 0):
    if not os.path.isdir(archive_dir):
        print "archive_dir "+archive_dir+" not found"
        return -1