def load_config(app_name):
    global config

    search_filenames = [
        os.path.expanduser("~/.fundraising/%s.yaml" % app_name),
        os.path.expanduser("~/.%s.yaml" % app_name),
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/config.yaml" % app_name,
        "/etc/fundraising/%s.yaml" % app_name,
        "/etc/%s.yaml" % app_name,
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/%s.yaml" % (
            app_name,
            app_name,
        )
    ]
    # TODO: if getops.get(--config/-f): search_filenames.append

    for filename in search_filenames:
        if not os.path.exists(filename):
            continue

        config = DictAsAttrDict(load_yaml(file(filename, 'r')))
        log.info("Loaded config from {path}.".format(path=filename))

        config.app_name = app_name

        return

    raise Exception("No config found, searched " + ", ".join(search_filenames))
    def mail(errorcode, data=None, print_exception=False):
        body = ""
        if print_exception:
            exception_info = "".join(traceback.format_exception(*sys.exc_info()))
            body = body + exception_info
        if data:
            if not isinstance(data, basestring):
                data = yaml.safe_dump([data], default_flow_style=False, allow_unicode=True)
            body = body + "\n\nWhile processing:\n{data}".format(data=data)

        log.error("sending failmail: " + body)

        msg = MIMEText(body)

        from_address = config.failmail_sender
        to_address = config.failmail_recipients
        if hasattr(to_address, 'split'):
            to_address = to_address.split(",")

        msg['Subject'] = "Fail Mail: {code} ({process})".format(code=errorcode, process=config.app_name)
        msg['From'] = from_address
        msg['To'] = to_address[0]

        mailer = smtplib.SMTP('localhost')
        mailer.sendmail(from_address, to_address, msg.as_string())
        mailer.quit()
 def buildQuery(self):
     query = super(PagedGroup, self).buildQuery()
     log.info("Limiting batch contact retrieval to {num} records.".format(
         num=self.pagesize))
     query.limit = self.pagesize
     query.offset = self.offset
     return query
Exemplo n.º 4
0
def run_export_query(db=None, query=None, output=None, sort_by_index=None):
    """Export query results as a CSV file"""

    # Get a file-like object
    if not hasattr(output, 'write'):
        output = open(output, 'wb')

    w = unicode_csv_writer.UnicodeCsvWriter(output)

    gen = db.execute_paged(query=query, pageIndex=sort_by_index, pageSize=10000)

    # Make sure we've got the table headers
    try:
        first = gen.next()
        num_rows = 1

        # Get the order of keys and sort them alphabetically so it doesn't come
        # out as complete soup
        keys = sorted(first.keys())
        w.writerow(keys)
        w.writerow(order_keyed_row(keys, first))

        for row in gen:
            w.writerow(order_keyed_row(keys, row))
            num_rows += 1

    except StopIteration:
        pass

    output.flush()
    output.close()
    log.info("Wrote %d rows" % num_rows)
Exemplo n.º 5
0
    def execute(self, sql, params=None, timeout = 0):
        cursor = self.db_conn.cursor(cursorclass=Dbi.cursors.DictCursor)
        deathClock = None

        if self.debug:
            if params:
                log.debug(str(sql) + " % " + repr(params))
            else:
                log.debug(str(sql))

        if timeout > 0:
            deathClock = threading.Timer(timeout, self.kill_connection)
            deathClock.start()

        try:
            if params:
                cursor.execute(sql, params)
            elif hasattr(sql, 'uninterpolated_sql') and sql.params:
                cursor.execute(sql.uninterpolated_sql(), sql.params)
            else:
                cursor.execute(str(sql))
            #for row in cursor.fetchall():
            #	yield row
            out = cursor.fetchall()
            cursor.close()
            return out
        finally:
            if deathClock is not None:
                deathClock.cancel()
Exemplo n.º 6
0
def read_encoded(path, version, callback, column_headers, encoding):
    # Coerce to a list
    if not hasattr(version, 'extend'):
        version = [version]

    with io.open(path, 'r', encoding=encoding) as csvfile:
        plainreader = unicode_csv_reader(csvfile, **dialect)

        for row in plainreader:
            column_type = row[0]
            if column_type == 'RH':
                if int(row[4]) not in version:
                    raise RuntimeError("This file uses an unexpected format revision: {version}".format(version=row[4]))
            elif column_type == 'FH':
                pass
            elif column_type == 'SH':
                start_date, end_date = row[1:3]
                log.info("Report file covers date range {start} to {end}".format(start=start_date, end=end_date))
            elif column_type == 'CH':
                column_headers = ['Column Type'] + row[1:]
            elif column_type == 'SB':
                record = dict(zip(column_headers, row))
                try:
                    callback(record)
                except:
                    FailMailer.mail('BAD_AUDIT_LINE', data=record, print_exception=True)
            elif column_type in ('SF', 'SC', 'RF', 'RC', 'FF'):
                pass
            else:
                raise RuntimeError("Unknown column type: {type}".format(type=column_type))
Exemplo n.º 7
0
    def mail(errorcode, data=None, print_exception=False):
        body = ""
        if print_exception:
            exception_info = "".join(
                traceback.format_exception(*sys.exc_info()))
            body = body + exception_info
        if data:
            if not isinstance(data, basestring):
                data = yaml.safe_dump([data],
                                      default_flow_style=False,
                                      allow_unicode=True)
            body = body + "\n\nWhile processing:\n{data}".format(data=data)

        log.error("sending failmail: " + body)

        msg = MIMEText(body)

        from_address = config.failmail_sender
        to_address = config.failmail_recipients
        if hasattr(to_address, 'split'):
            to_address = to_address.split(",")

        msg['Subject'] = "Fail Mail: {code} ({process})".format(
            code=errorcode, process=config.app_name)
        msg['From'] = from_address
        msg['To'] = to_address[0]

        mailer = smtplib.SMTP('localhost')
        mailer.sendmail(from_address, to_address, msg.as_string())
        mailer.quit()
def load_config(app_name):
    global config

    search_filenames = [
        os.path.expanduser("~/.fundraising/%s.yaml" % app_name),
        os.path.expanduser("~/.%s.yaml" % app_name),
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/config.yaml" % app_name,
        "/etc/fundraising/%s.yaml" % app_name,
        "/etc/%s.yaml" % app_name,
        # FIXME: relative path fail
        os.path.dirname(__file__) + "/../%s/%s.yaml" % (app_name, app_name,)
    ]
    # TODO: if getops.get(--config/-f): search_filenames.append

    for filename in search_filenames:
        if not os.path.exists(filename):
            continue

        config = DictAsAttrDict(load_yaml(file(filename, 'r')))
        log.info("Loaded config from {path}.".format(path=filename))

        config.app_name = app_name

        return

    raise Exception("No config found, searched " + ", ".join(search_filenames))
def write_gdoc_results(doc=None, results=[]):
    log.info("Writing test results to {url}".format(url=doc))
    doc = Spreadsheet(doc=doc)
    for result in results:
        props = {}
        props.update(result['criteria'])
        props.update(result['results'])
        doc.append_row(props)
 def get(self, filename, dest_path):
     try:
         self.client.get(filename, dest_path)
     except:
         if os.path.exists(dest_path):
             log.info("Removing corrupted download: {path}".format(path=dest_path))
             os.unlink(dest_path)
         raise
def write_gdoc_results(doc=None, results=[]):
    log.info("Writing test results to {url}".format(url=doc))
    doc = Spreadsheet(doc=doc)
    for result in results:
        props = {}
        props.update(result['criteria'])
        props.update(result['results'])
        doc.append_row(props)
Exemplo n.º 12
0
    def __init__(self, name):
        self.name = name

        sql = "INSERT INTO donor_autoreview_job SET name = %s"
        dbc = db.get_db(config.drupal_schema)
        dbc.execute(sql, (name, ))
        self.id = dbc.last_insert_id()
        log.info("This job has ID %d" % self.id)
def rotate_files():
    # Clean up after ourselves
    if config.days_to_keep_files:
        now = time.time()
        for f in os.listdir(config.working_path):
            path = os.path.join(config.working_path, f)
            if os.stat(path).st_mtime < (now - config.days_to_keep_files * 86400):
                if os.path.isfile(path):
                    log.info("Removing old file %s" % path)
                    os.remove(path)
Exemplo n.º 14
0
def export_data(output_path=None):
    db = DbConnection(**config.silverpop_db)

    log.info("Starting full data export")
    exportq = DbQuery()
    exportq.tables.append('silverpop_export_view')
    exportq.columns.append('*')
    run_export_query(db=db,
                     query=exportq,
                     output=output_path,
                     sort_by_index="ContactID")
Exemplo n.º 15
0
def run_queries(db, queries):
    """
    Build silverpop_export database from CiviCRM.
    """
    i = 1
    for query in queries:
        no_prefix = query[query.index("\n") + 1 :]
        info = (i, no_prefix[:80])
        log.info("Running query #%s: %s" % info)
        db.execute(query)
        i += 1
Exemplo n.º 16
0
def rotate_files():
    # Clean up after ourselves
    if config.days_to_keep_files:
        now = time.time()
        for f in os.listdir(config.working_path):
            path = os.path.join(config.working_path, f)
            if os.stat(path).st_mtime < (now -
                                         config.days_to_keep_files * 86400):
                if os.path.isfile(path):
                    log.info("Removing old file %s" % path)
                    os.remove(path)
def export_data(output_path=None):
    db = DbConnection(**config.silverpop_db)

    log.info("Starting full data export")
    exportq = DbQuery()
    exportq.tables.append('silverpop_export_view')
    exportq.columns.append('*')
    run_export_query(
        db=db,
        query=exportq,
        output=output_path,
        sort_by_index="ContactID"
    )
Exemplo n.º 18
0
def export_unsubscribes(output_path=None):
    db = DbConnection(**config.silverpop_db)

    log.info("Starting unsubscribe data export")
    exportq = DbQuery()
    exportq.tables.append('silverpop_export')
    exportq.columns.append('contact_id')
    exportq.columns.append('email')
    exportq.where.append('opted_out=1')
    run_export_query(db=db,
                     query=exportq,
                     output=output_path,
                     sort_by_index="contact_id")
 def connect(self):
     log.info("Connecting to {host}".format(host=config.sftp.host))
     transport = paramiko.Transport((config.sftp.host, 22))
     params = {
         'username': config.sftp.username,
     }
     if hasattr(config.sftp, 'host_key'):
         params['hostkey'] = make_key(config.sftp.host_key)
     if hasattr(config.sftp, 'password'):
         params['password'] = config.sftp.password
     if hasattr(config.sftp, 'private_key'):
         params['pkey'] = make_key(config.sftp.private_key)
     transport.connect(**params)
     self.client = paramiko.SFTPClient.from_transport(transport)
Exemplo n.º 20
0
    def __init__(self,
                 label=None,
                 type="",
                 campaign=None,
                 banners=None,
                 start=None,
                 end=None,
                 disabled=False,
                 **ignore):
        for key in config.ignored_columns:
            if key in ignore:
                ignore.pop(key)
        if ignore:
            log.warn("ignoring columns: {columns}".format(
                columns=", ".join(ignore.keys())))

        self.campaign = mediawiki.centralnotice.api.get_campaign(campaign)
        if not self.campaign:
            log.warn("no such campaign '{campaign}'".format(campaign=campaign))

        self.type = type.lower()

        self.banners = []
        if self.type.count('banner') > 0:
            self.is_banner_test = True
            if banners:
                if hasattr(banners, 'strip'):
                    banners = [s.strip() for s in banners.split(",")]
                self.banners = banners
            else:
                if self.campaign['banners']:
                    self.banners = self.campaign['banners'].keys()

            #self.variations = [ FrTestVariation(banner=name) for name in self.banners ]

        self.is_country_test = (self.type.count('country') > 0)
        self.is_lp_test = (self.type.count('lp') > 0)

        self.start_time = start
        self.end_time = end

        self.label = label
        if not self.label:
            # FIXME
            self.label = campaign

        self.enabled = not disabled

        self.results = []
Exemplo n.º 21
0
    def send(self, queue, msg):

        msg.update(Redis.source_meta())

        encoded = json.dumps(msg)

        if self.config.no_effect:
            log.info("not queueing message. " + encoded)
            return

        if queue in self.config.redis.queues:
            # Map queue name if desired.
            self.conn.rpush(self.config.redis.queues[queue], encoded)
        else:
            self.conn.rpush(queue, encoded)
def export_unsubscribes(output_path=None):
    db = DbConnection(**config.silverpop_db)

    log.info("Starting unsubscribe data export")
    exportq = DbQuery()
    exportq.tables.append('silverpop_export')
    exportq.columns.append('contact_id')
    exportq.columns.append('email')
    exportq.where.append('opted_out=1')
    run_export_query(
        db=db,
        query=exportq,
        output=output_path,
        sort_by_index="contact_id"
    )
Exemplo n.º 23
0
def export_unsubscribes(output_path=None):
    config = process.globals.get_config()

    db = DbConnection(**config.silverpop_db)

    log.info("Starting unsubscribe data export")
    exportq = DbQuery()
    exportq.tables.append('silverpop_excluded')
    exportq.columns.append('*')
    run_export_query(
        db=db,
        query=exportq,
        output=output_path,
        sort_by_index="id"
    )
 def addMatch(job_id, oldId, newId, action, match):
     log.info("Found a match: {old} -> {new} : {match}".format(old=oldId, new=newId, match=match))
     db.get_db(config.drupal_schema).execute("""
         INSERT INTO donor_review_queue
             SET
                 job_id = %(job_id)s,
                 old_id = %(old_id)s,
                 new_id = %(new_id)s,
                 action_id = %(action_id)s,
                 match_description = %(match)s
         """, {
             'job_id': job_id,
             'old_id': oldId,
             'new_id': newId,
             'action_id': action.id,
             'match': match,
         })
Exemplo n.º 25
0
def export_and_upload():
    log.info("Begin Silverpop Export")

    make_sure_path_exists(config.working_path)

    updatefile = os.path.join(
        config.working_path,
        'DatabaseUpdate-' + time.strftime("%Y%m%d%H%M%S") + '.csv')
    unsubfile = os.path.join(
        config.working_path,
        'Unsubscribes-' + time.strftime("%Y%m%d%H%M%S") + '.csv')

    export_data(output_path=updatefile)
    export_unsubscribes(output_path=unsubfile)
    upload([updatefile, unsubfile])
    rotate_files()

    log.info("End Silverpop Export")
def export_and_upload():
    log.info("Begin Silverpop Export")

    make_sure_path_exists(config.working_path)

    updatefile = os.path.join(
        config.working_path,
        'DatabaseUpdate-' + time.strftime("%Y%m%d%H%M%S") + '.csv'
    )
    unsubfile = os.path.join(
        config.working_path,
        'Unsubscribes-' + time.strftime("%Y%m%d%H%M%S") + '.csv'
    )

    export_data(output_path=updatefile)
    export_unsubscribes(output_path=unsubfile)
    upload([updatefile, unsubfile])
    rotate_files()

    log.info("End Silverpop Export")
Exemplo n.º 27
0
    def execute(self, sql, params=None):
        cursor = self.db_conn.cursor(cursorclass=Dbi.cursors.DictCursor)

        if self.debug:
            if params:
                log.debug(str(sql) + " % " + repr(params))
            else:
                log.debug(str(sql))

        if params:
            cursor.execute(sql, params)
        elif hasattr(sql, 'uninterpolated_sql') and sql.params:
            cursor.execute(sql.uninterpolated_sql(), sql.params)
        else:
            cursor.execute(str(sql))
        #for row in cursor.fetchall():
        #	yield row
        out = cursor.fetchall()
        cursor.close()
        return out
Exemplo n.º 28
0
    def execute(self, sql, params=None):
        cursor = self.db_conn.cursor(cursorclass=Dbi.cursors.DictCursor)

        if self.debug:
            if params:
                log.debug(str(sql) + " % " + repr(params))
            else:
                log.debug(str(sql))

        if params:
            cursor.execute(sql, params)
        elif hasattr(sql, 'uninterpolated_sql') and sql.params:
            cursor.execute(sql.uninterpolated_sql(), sql.params)
        else:
            cursor.execute(str(sql))
        #for row in cursor.fetchall():
        #	yield row
        out = cursor.fetchall()
        cursor.close()
        return out
Exemplo n.º 29
0
    def load_results(self):
        if self.is_banner_test and self.banners:
            cases = []
            for name in self.banners:
                test_case = self.get_case(
                    campaign=self.campaign['name'],
                    banner=name,
                    languages=self.campaign['languages'],
                    countries=self.campaign['countries'],
                )
                cases.append(test_case)

            self.results.extend(get_banner_results(cases))

        if self.is_country_test:
            #results = [ calculate_result(country=code) for code in campaign['countries'] ]
            #self.results.extend(results)
            log.warn("country test type not implemented")

        if self.is_lp_test:
            log.warn("LP test type not implemented")
Exemplo n.º 30
0
    def load_results(self):
        if self.is_banner_test and self.banners:
            cases = []
            for name in self.banners:
                test_case = self.get_case(
                    campaign=self.campaign['name'],
                    banner=name,
                    languages=self.campaign['languages'],
                    countries=self.campaign['countries'],
                )
                cases.append(test_case)

            self.results.extend(get_banner_results(cases))

        if self.is_country_test:
            # results = [ calculate_result(country=code) for code in campaign['countries'] ]
            # self.results.extend(results)
            log.warn("country test type not implemented")

        if self.is_lp_test:
            log.warn("LP test type not implemented")
Exemplo n.º 31
0
def update_gdoc_spec(doc=None, spec=None):
    log.info("Updating test specs with latest CentralNotice changes... {url}".format(url=doc))

    # FIXME: currently, the spec must have been read with read_gdoc_spec in order to get row numbers
    if not spec:
        spec = read_gdoc_spec(doc=doc)

    spec.update_from_logs()

    doc = Spreadsheet(doc=doc)
    for index, test in enumerate(spec.spec, 0):
        api_rownum = index + 1
        rownum = index + 2
        if api_rownum < doc.num_rows():
            if not hasattr(test, 'modified') or not test.modified:
                continue
            log.debug("updating spec end time in row {rownum}: {spec}".format(rownum=rownum, spec=test))
            if test.end_time:
                doc.update_row({'end': test.end_time}, index=api_rownum)
        else:
            log.debug("appending spec row {rownum}: {spec}".format(rownum=rownum, spec=test))
            doc.append_row({
                'label': test.label,
                'type': "banner",
                'start': test.start_time,
                'end': test.end_time,
                'campaign': test.campaign['name'],
                'banners': ", ".join(test.banners),
            })
Exemplo n.º 32
0
    def __init__(self, label=None, type="", campaign=None, banners=None, start=None, end=None, disabled=False, **ignore):
        for key in config.ignored_columns:
            if key in ignore:
                ignore.pop(key)
        if ignore:
            log.warn("ignoring columns: {columns}".format(columns=", ".join(ignore.keys())))

        self.campaign = mediawiki.centralnotice.api.get_campaign(campaign)
        if not self.campaign:
            log.warn("no such campaign '{campaign}'".format(campaign=campaign))

        self.type = type.lower()

        self.banners = []
        if self.type.count('banner') > 0:
            self.is_banner_test = True
            if banners:
                if hasattr(banners, 'strip'):
                    banners = [s.strip() for s in banners.split(",")]
                self.banners = banners
            else:
                if self.campaign['banners']:
                    self.banners = self.campaign['banners'].keys()

            # self.variations = [ FrTestVariation(banner=name) for name in self.banners ]

        self.is_country_test = (self.type.count('country') > 0)
        self.is_lp_test = (self.type.count('lp') > 0)

        self.start_time = start
        self.end_time = end

        self.label = label
        if not self.label:
            # FIXME
            self.label = campaign

        self.enabled = not disabled

        self.results = []
Exemplo n.º 33
0
def update_gdoc_spec(doc=None, spec=None):
    log.info("Updating test specs with latest CentralNotice changes... {url}".
             format(url=doc))

    # FIXME: currently, the spec must have been read with read_gdoc_spec in order to get row numbers
    if not spec:
        spec = read_gdoc_spec(doc=doc)

    spec.update_from_logs()

    doc = Spreadsheet(doc=doc)
    for index, test in enumerate(spec.spec, 0):
        api_rownum = index + 1
        rownum = index + 2
        if api_rownum < doc.num_rows():
            if not hasattr(test, 'modified') or not test.modified:
                continue
            log.debug("updating spec end time in row {rownum}: {spec}".format(
                rownum=rownum, spec=test))
            if test.end_time:
                doc.update_row({'end': test.end_time}, index=api_rownum)
        else:
            log.debug("appending spec row {rownum}: {spec}".format(
                rownum=rownum, spec=test))
            doc.append_row({
                'label': test.label,
                'type': "banner",
                'start': test.start_time,
                'end': test.end_time,
                'campaign': test.campaign['name'],
                'banners': ", ".join(test.banners),
            })
Exemplo n.º 34
0
    def reviewBatch(self):
        '''For each new contact, find the oldest contact with the same email address.'''

        matchDescription = EmailMatch("Exact match").json()

        self.contactCache.fetch()
        for contact in self.contactCache.contacts:
            if contact['email']:
                query = db.Query()
                query.columns = [
                    'MIN(contact_id) AS contact_id',
                ]
                query.tables = [
                    'civicrm_email',
                ]
                query.where.extend([
                    'email = %(email)s',
                    'contact_id < %(new_id)s',
                ])
                query.group_by.extend([
                    'email',
                ])
                query.params = {
                    'new_id': contact['id'],
                    'email': contact['email'],
                }
                result = db.get_db().execute(query)

                if result:
                    for row in result:
                        ReviewQueue.addMatch(self.job_id, row['contact_id'], contact['id'], Autoreview.REC_DUP, matchDescription)

            ReviewQueue.tag(contact['id'], QuickAutoreview.QUICK_REVIEWED)

        if not self.contactCache.contacts:
            log.warn("Searched an empty batch of contacts!")
        else:
            last_seen = self.contactCache.contacts[-1]['id']
            log.info("End of batch.  Last contact scanned was ID {id}".format(id=last_seen))
Exemplo n.º 35
0
    def parse_line(self, row):
        required_fields = [
            "Period 3 Amount",
            "Subscription Currency",
            "Subscription ID",
            "Subscription Payer Name",
            "Subscription Period 3",
        ]

        missing_fields = []
        for field in required_fields:
            if field not in row or row[field] == '':
                missing_fields.append(field)
        if missing_fields:
            raise RuntimeError("Message is missing some important fields: [{fields}]".format(fields=", ".join(missing_fields)))

        names = row['Subscription Payer Name'].split(' ')

        out = {
            'subscr_id': row['Subscription ID'],
            'mc_currency': row['Subscription Currency'],
            'mc_amount3': float(row['Period 3 Amount']) / 100,
            'period3': row['Subscription Period 3'],
            'subscr_date': row['Subscription Creation Date'],
            'payer_email': row['Subscription Payer email address'],
            'first_name': names[0],
            'last_name': " ".join(names[1:]),
            'address_street': row['Shipping Address Line1'],
            'address_city': row['Shipping Address City'],
            'address_zip': row['Shipping Address Zip'],
            'address_state': row['Shipping Address State'],
            'address_country_code': row['Shipping Address Country'],
            'gateway': 'paypal',
        }

        # FIXME what historical evil caused...
        if row['Subscription Period 3'] != "1 M":
            raise RuntimeError("Unknown subscription period {period}".format(period=row['Subscription Period 3']))

        if row['Subscription Action Type'] == 'S0000':
            out['txn_type'] = 'subscr_signup'
            if self.crm.subscription_exists(out['subscr_id']):
                log.info("-Duplicate\t{id}\t{date}\tsubscr_signup".format(id=out['subscr_id'], date=out['subscr_date']))
                return
        elif row['Subscription Action Type'] == 'S0100':
            log.info("-Ignored\t{id}\t{date}\tsubscr_modify".format(id=out['subscr_id'], date=out['subscr_date']))
            return
        elif row['Subscription Action Type'] == 'S0200':
            out['txn_type'] = 'subscr_cancel'
            out['cancel_date'] = out['subscr_date']
        elif row['Subscription Action Type'] == 'S0300':
            out['txn_type'] = 'subscr_eot'

        if config.no_thankyou:
            out['thankyou_date'] = 0

        log.info("+Sending\t{id}\t{date}\t{type}".format(id=out['subscr_id'], date=out['subscr_date'], type=out['txn_type']))
        self.send(out)
Exemplo n.º 36
0
def export_all():
    """
    Dump database contents to CSVs.
    """
    log.info("Begin Silverpop Export")
    config = process.globals.get_config()

    make_sure_path_exists(config.working_path)

    updatefile = os.path.join(
        config.working_path,
        'DatabaseUpdate-' + time.strftime("%Y%m%d%H%M%S") + '.csv'
    )
    unsubfile = os.path.join(
        config.working_path,
        'Unsubscribes-' + time.strftime("%Y%m%d%H%M%S") + '.csv'
    )

    export_data(output_path=updatefile)
    export_unsubscribes(output_path=unsubfile)
    rotate_files()

    log.info("End Silverpop Export")
Exemplo n.º 37
0
def read_encoded(path, version, callback, column_headers, encoding):
    # Coerce to a list
    if not hasattr(version, 'extend'):
        version = [version]

    with io.open(path, 'r', encoding=encoding) as csvfile:
        plainreader = unicode_csv_reader(csvfile, **dialect)

        for row in plainreader:
            column_type = row[0]
            if column_type == 'RH':
                if int(row[4]) not in version:
                    raise RuntimeError(
                        "This file uses an unexpected format revision: {version}"
                        .format(version=row[4]))
            elif column_type == 'FH':
                pass
            elif column_type == 'SH':
                start_date, end_date = row[1:3]
                log.info(
                    "Report file covers date range {start} to {end}".format(
                        start=start_date, end=end_date))
            elif column_type == 'CH':
                column_headers = ['Column Type'] + row[1:]
            elif column_type == 'SB':
                record = dict(zip(column_headers, row))
                try:
                    callback(record)
                except:
                    FailMailer.mail('BAD_AUDIT_LINE',
                                    data=record,
                                    print_exception=True)
            elif column_type in ('SF', 'SC', 'RF', 'RC', 'FF'):
                pass
            else:
                raise RuntimeError(
                    "Unknown column type: {type}".format(type=column_type))
Exemplo n.º 38
0
def is_fr_test(test):
    if test.label and test.banners and test.campaign:
        is_chapter = re.search(config.fr_chapter_test, test.banners[0])
        if is_chapter:
            log.debug("Determined test {title} belongs to a chapter".format(title=test.label))
        else:
            log.debug("Determined test {title} belongs to Fundraising".format(title=test.label))
        return not is_chapter

    log.warn("missing data for test {title}".format(title=test.label))
Exemplo n.º 39
0
def is_fr_test(test):
    if test.label and test.banners and test.campaign:
        is_chapter = re.search(config.fr_chapter_test, test.banners[0])
        if is_chapter:
            log.debug("Determined test {title} belongs to a chapter".format(
                title=test.label))
        else:
            log.debug("Determined test {title} belongs to Fundraising".format(
                title=test.label))
        return not is_chapter

    log.warn("missing data for test {title}".format(title=test.label))
Exemplo n.º 40
0
    def pull():
        '''Pull down new remote files'''

        config = process.globals.get_config()

        # Check against both unprocessed and processed files to find new remote files
        local_paths = [
            config.incoming_path,
            config.archive_path,
        ]
        if hasattr(config, 'extra_paths'):
            local_paths.extend(config.extra_paths)
        local_files = walk_files(local_paths)

        remote = Client()
        remote_files = remote.ls(config.sftp.remote_root)
        empty_failures = []

        for filename in remote_files:
            if filename in local_files:
                log.info("Skipping already downloaded file {filename}".format(filename=filename))
                continue

            log.info("Downloading file {filename}".format(filename=filename))
            dest_path = os.path.join(config.incoming_path, filename)
            remote.get(os.path.join(config.sftp.remote_root, filename), dest_path)

            # Assert that the file is not empty
            if os.path.getsize(dest_path) == 0:
                os.unlink(dest_path)
                empty_failures.append(filename)
                log.warn("Stupid file was empty, removing locally: {path}".format(path=dest_path))

        if empty_failures:
            log.error("The following files were empty, please contact your provider: {failures}".format(failures=", ".join(empty_failures)))

            if hasattr(config, 'panic_on_empty') and config.panic_on_empty:
                raise RuntimeError("Stupid files did not download correctly.")
def update_gdoc_results(doc=None, results=[]):
    log.info("Updating results in {url}".format(url=doc))
    doc = Spreadsheet(doc=doc)
    existing = list(doc.get_all_rows())

    def find_matching_cases(criteria):
        matching = []

        def fuzzy_compare_row(row, criteria):
            if not row:
                return False
            if criteria['banner'] == row['banner'] and criteria[
                    'campaign'] == row['campaign'] and criteria[
                        'start'] == row['start']:
                return True

        for n, row in enumerate(existing, 1):
            if fuzzy_compare_row(row, criteria):
                matching.append(n)

        return matching

    for result in results:
        if not result:
            continue

        matching = find_matching_cases(result['criteria'])

        props = {}
        props.update(result['results'])
        props.update(result['criteria'])

        if len(matching) == 0:
            doc.append_row(props)
        else:
            if len(matching) > 1:
                log.warn(
                    "more than one result row {match} matches criteria: {criteria}"
                    .format(match=matching, criteria=result['criteria']))
            index = matching[-1]
            log.debug("updating row {rownum} with {banner}".format(
                rownum=index, banner=result['criteria']['banner']))
            doc.update_row(props, index=index)
def update_gdoc_results(doc=None, results=[]):
    log.info("Updating results in {url}".format(url=doc))
    doc = Spreadsheet(doc=doc)
    existing = list(doc.get_all_rows())

    def find_matching_cases(criteria):
        matching = []

        def fuzzy_compare_row(row, criteria):
            if not row:
                return False
            if criteria['banner'] == row['banner'] and criteria['campaign'] == row['campaign'] and criteria['start'] == row['start']:
                return True

        for n, row in enumerate(existing, 1):
            if fuzzy_compare_row(row, criteria):
                matching.append(n)

        return matching

    for result in results:
        if not result:
            continue

        matching = find_matching_cases(result['criteria'])

        props = {}
        props.update(result['results'])
        props.update(result['criteria'])

        if len(matching) == 0:
            doc.append_row(props)
        else:
            if len(matching) > 1:
                log.warn("more than one result row {match} matches criteria: {criteria}".format(match=matching, criteria=result['criteria']))
            index = matching[-1]
            log.debug("updating row {rownum} with {banner}".format(rownum=index, banner=result['criteria']['banner']))
            doc.update_row(props, index=index)
Exemplo n.º 43
0
    def parse_line(self, row):
        if row['Billing Address Line1']:
            addr_prefix = 'Billing Address '
        else:
            addr_prefix = 'Shipping Address '

        out = {
            'gateway_txn_id': row['Transaction ID'],
            'date': row['Transaction Initiation Date'],
            'settled_date': row['Transaction Completion Date'],
            'gross': float(row['Gross Transaction Amount']) / 100.0,
            'currency': row['Gross Transaction Currency'],
            'gateway_status': row['Transactional Status'],
            'gateway': 'paypal',
            'note': row['Transaction Note'],
            'email': row['Payer\'s Account ID'],
            'street_address': row[addr_prefix + 'Line1'],
            'supplemental_address_1': row[addr_prefix + 'Line2'],
            'city': row[addr_prefix + 'City'],
            'state_province': row[addr_prefix + 'State'],
            'postal_code': row[addr_prefix + 'Zip'],
            'country': row[addr_prefix + 'Country'],
        }

        if row['Fee Amount']:
            out['fee'] = float(row['Fee Amount']) / 100.0

            if row['Fee Currency'] and row[
                    'Gross Transaction Currency'] != row['Fee Currency']:
                raise RuntimeError(
                    "Failed to import because multiple currencies for one transaction is not handled."
                )

        if 'First Name' in row:
            out['first_name'] = row['First Name']

        if 'Last Name' in row:
            out['last_name'] = row['Last Name']

        if 'Payment Source' in row:
            out['payment_method'] = row['Payment Source']

        if 'Card Type' in row:
            out['payment_submethod'] = row['Card Type']

        if row['PayPal Reference ID Type'] == 'SUB':
            out['subscr_id'] = row['PayPal Reference ID']

        event_type = row['Transaction Event Code'][0:3]

        queue = None
        if event_type in ('T00', 'T03', 'T05', 'T07', 'T22'):
            if row['Transaction Event Code'] == 'T0002':
                queue = 'recurring'
                out = self.normalize_recurring(out)
            elif row['Transaction  Debit or Credit'] == 'DR':
                # sic: double-space is coming from the upstream
                log.info("-Debit\t{id}\t{date}\tPayment to".format(
                    id=out['gateway_txn_id'], date=out['date']))
                # This payment is from us!  Do not send to the CRM.
                return
            else:
                queue = 'donations'
        elif event_type in ('T11', 'T12'):
            out['gateway_refund_id'] = out['gateway_txn_id']
            out['gross_currency'] = out['currency']

            if row['PayPal Reference ID Type'] == 'TXN':
                out['gateway_parent_id'] = row['PayPal Reference ID']

            if row['Transaction Event Code'] == 'T1106':
                out['type'] = 'reversal'
            elif row['Transaction Event Code'] == 'T1107':
                out['type'] = 'refund'
            elif row['Transaction Event Code'] == 'T1201':
                out['type'] = 'chargeback'
            else:
                log.info(
                    "-Unknown\t{id}\t{date}\t(Refundish type {type})".format(
                        id=out['gateway_txn_id'],
                        date=out['date'],
                        type=row['Transaction Event Code']))
                return

            queue = 'refund'

        if not queue:
            log.info("-Unknown\t{id}\t{date}\t(Type {type})".format(
                id=out['gateway_txn_id'], date=out['date'], type=event_type))
            return

        if self.crm.transaction_exists(gateway_txn_id=out['gateway_txn_id'],
                                       gateway='paypal'):
            log.info("-Duplicate\t{id}\t{date}\t{type}".format(
                id=out['gateway_txn_id'],
                date=row['Transaction Initiation Date'],
                type=queue))
            return

        if 'last_name' not in out and queue != 'refund':
            out['first_name'], out['last_name'] = self.fetch_donor_name(
                out['gateway_txn_id'])

        if config.no_thankyou:
            out['thankyou_date'] = 0

        log.info("+Sending\t{id}\t{date}\t{type}".format(
            id=out['gateway_txn_id'],
            date=row['Transaction Initiation Date'],
            type=queue))
        self.send(queue, out)
Exemplo n.º 44
0
    def normalize_and_send(self, record):
        """Transform the record into a WMF queue message

        See https://wikitech.wikimedia.org/wiki/Fundraising/Queue_messages"""

        msg = {}

        if record["transaction_type"] == "0":
            queue = "donations"
        elif record["transaction_type"] == "5":
            queue = "refund"
        else:
            raise RuntimeError(
                "Don't know how to handle transaction type {type}.".format(
                    type=record["transaction_type"]))

        msg["date"] = to_timestamp(
            datetime.strptime(
                record["transaction_date"] + record["transaction_time"],
                "%d%m%y%H%M%S").utctimetuple())
        iso_date = datetime.fromtimestamp(msg["date"]).isoformat()

        msg["gateway"] = "worldpay"

        # FIXME: is this the CustomerId or what?
        if "originators_transaction_reference" in record:
            msg["gateway_txn_id"] = record[
                "originators_transaction_reference"].strip()
        else:
            raise RuntimeError("We're gonna die: no gateway_txn_id available.")

        # The default currency is GBP, don't make me explain why the amount
        # comes from a different field when currency != GBP :(
        if record["local_currency_code"].strip():
            msg["currency"] = record["local_currency_code"]
            msg["gross"] = int(record["local_value"]) * exponent_to_multiplier(
                record["local_exponent"])
        else:
            msg["currency"] = "GBP"
            msg["gross"] = int(
                record["transaction_value"]) * exponent_to_multiplier(2)

        if queue == "refund":
            msg["gross_currency"] = msg["currency"]
            msg["gateway_parent_id"] = msg["gateway_txn_id"]
            # Note that we do not have a new txn id for the refund
            msg["gateway_refund_id"] = msg["gateway_txn_id"]
            # FIXME: chargeback vs refund info is not available in this file.
            msg["type"] = "refund"
            log.info("+Sending\t{id}\t{date}\t{type}".format(
                id=msg["gateway_parent_id"], date=iso_date, type=msg["type"]))
            self.send(queue, msg)
            return

        if self.crm.transaction_exists(gateway_txn_id=msg["gateway_txn_id"],
                                       gateway="worldpay"):
            log.info("-Duplicate\t{id}\t{date}\t{type}".format(
                id=msg["gateway_txn_id"], date=iso_date, type=queue))
            return

        # Switch behavior depending on the status.  We only like "accepted" transactions.
        status = record["status"].strip()
        if status == "P":
            log.info("-Pending\t{id}\t{date}\t{type}".format(
                id=msg["gateway_txn_id"], date=iso_date, type=queue))
            return
        elif status == "R":
            log.info("-Rejection\t{id}\t{date}\t{type}".format(
                id=msg["gateway_txn_id"], date=iso_date, type=queue))
            return
        elif status != "A":
            raise RuntimeError(
                "Unknown gateway status: {code}".format(code=status))

        # Include settlement details if they are available.
        if record["settlement_value"].strip():
            if record["settlement_currency_code"].strip():
                msg["settlement_currency"] = record["settlement_currency_code"]
            else:
                msg["settlement_currency"] = "GBP"
            msg["settlement_amount"] = int(
                record["settlement_value"]) * exponent_to_multiplier(
                    record["settlement_exponent"])

        msg["email"] = "*****@*****.**"
        msg["payment_method"] = "cc"
        msg["payment_submethod"] = reference_data.decode_card_type(
            record["card_type"].strip())

        # custom values
        msg["raw_card_type"] = record["card_type"].strip()

        log.info("+Sending\t{id}\t{date}\t{type}".format(
            id=msg["gateway_txn_id"], date=iso_date, type=queue))
        self.send(queue, msg)
 def buildQuery(self):
     query = super(PagedGroup, self).buildQuery()
     log.info("Limiting batch contact retrieval to {num} records.".format(num=self.pagesize))
     query.limit = self.pagesize
     query.offset = self.offset
     return query
 def commit():
     log.info("Committing tags...")
     for tag, contacts in ReviewQueue.cached_tags.items():
         log.info("Bulk tagging {num} contacts with tag <{tag}>".format(num=len(contacts), tag=tag.name))
         ReviewQueue.tag_many(contacts, tag)
    def send(self, queue_key, body):
        if config.no_effect:
            log.info("not queueing message. " + json.dumps(body))
            return

        self.conn.send(self.create_message(queue_key, body))
Exemplo n.º 48
0
    def parse_line(self, row):
        required_fields = [
            "Period 3 Amount",
            "Subscription Currency",
            "Subscription ID",
            "Subscription Payer Name",
            "Subscription Period 3",
        ]

        missing_fields = []
        for field in required_fields:
            if not field in row or row[field] == '':
                missing_fields.append(field)
        if missing_fields:
            raise RuntimeError(
                "Message is missing some important fields: [{fields}]".format(
                    fields=", ".join(missing_fields)))

        names = row['Subscription Payer Name'].split(' ')

        out = {
            'subscr_id': row['Subscription ID'],
            'mc_currency': row['Subscription Currency'],
            'mc_amount3': float(row['Period 3 Amount']) / 100,
            'period3': row['Subscription Period 3'],
            'subscr_date': row['Subscription Creation Date'],
            'payer_email': row['Subscription Payer email address'],
            'first_name': names[0],
            'last_name': " ".join(names[1:]),
            'address_street': row['Shipping Address Line1'],
            'address_city': row['Shipping Address City'],
            'address_zip': row['Shipping Address Zip'],
            'address_state': row['Shipping Address State'],
            'address_country_code': row['Shipping Address Country'],
            'gateway': 'paypal',
        }

        # FIXME what historical evil caused...
        if row['Subscription Period 3'] != "1 M":
            raise RuntimeError("Unknown subscription period {period}".format(
                period=row['Subscription Period 3']))

        if row['Subscription Action Type'] == 'S0000':
            out['txn_type'] = 'subscr_signup'
            if self.crm.subscription_exists(out['subscr_id']):
                log.info("-Duplicate\t{id}\t{date}\tsubscr_signup".format(
                    id=out['subscr_id'], date=out['subscr_date']))
                return
        elif row['Subscription Action Type'] == 'S0100':
            log.info("-Ignored\t{id}\t{date}\tsubscr_modify".format(
                id=out['subscr_id'], date=out['subscr_date']))
            return
        elif row['Subscription Action Type'] == 'S0200':
            out['txn_type'] = 'subscr_cancel'
            out['cancel_date'] = out['subscr_date']
        elif row['Subscription Action Type'] == 'S0300':
            out['txn_type'] = 'subscr_eot'

        if config.no_thankyou:
            out['thankyou_date'] = 0

        log.info(
            "+Sending\t{id}\t{date}\t{type}".format(id=out['subscr_id'],
                                                    date=out['subscr_date'],
                                                    type=out['txn_type']))
        self.send(out)
Exemplo n.º 49
0
def run_queries(db, queries):
    i = 1
    for query in queries:
        log.info("Running query #%s" % i)
        db.execute(query)
        i += 1
Exemplo n.º 50
0
    f.close()
    return queries


def run_queries(db, queries):
    i = 1
    for query in queries:
        log.info("Running query #%s" % i)
        db.execute(query)
        i += 1


if __name__ == '__main__':
    global config
    log.info("Begin Silverpop Update")
    lock.begin()

    log.info("Loading update query set")
    update_queries = load_queries('update_table.sql')

    db = DbConnection(**config.silverpop_db)

    log.info("Starting update query run")
    run_queries(db, update_queries)

    export.export_and_upload()

    lock.end()
    log.info("End Silverpop Export")
def upload(files=None):
    log.info("Uploading to silverpop")
    sftpc = SftpClient()
    for path in files:
        sftpc.put(path, os.path.basename(path))
Exemplo n.º 52
0
                ])
                query.params = {
                    'new_id': contact['id'],
                    'email': contact['email'],
                }
                result = db.get_db().execute(query)

                if result:
                    for row in result:
                        ReviewQueue.addMatch(self.job_id, row['contact_id'], contact['id'], Autoreview.REC_DUP, matchDescription)

            ReviewQueue.tag(contact['id'], QuickAutoreview.QUICK_REVIEWED)

        if not self.contactCache.contacts:
            log.warn("Searched an empty batch of contacts!")
        else:
            last_seen = self.contactCache.contacts[-1]['id']
            log.info("End of batch.  Last contact scanned was ID {id}".format(id=last_seen))


if __name__ == '__main__':
    log.info("Begin quick_autoreview deduper")
    lock.begin()

    job = QuickAutoreview()
    job.reviewBatch()
    ReviewQueue.commit()

    lock.end()
    log.info("End quick_autoreview deduper")
Exemplo n.º 53
0
 def commit():
     log.info("Committing tags...")
     for tag, contacts in ReviewQueue.cached_tags.items():
         log.info("Bulk tagging {num} contacts with tag <{tag}>".format(
             num=len(contacts), tag=tag.name))
         ReviewQueue.tag_many(contacts, tag)
Exemplo n.º 54
0
 def kill_connection(self):
     log.warn('Query taking too long - killing connection {}'.format(self.connection_id))
     killerConnection = Dbi.connect(**self.connectionArgs)
     cursor = killerConnection.cursor()
     cursor.execute('KILL CONNECTION {}'.format(self.connection_id))
     killerConnection.close()
Exemplo n.º 55
0
def upload(files=None):
    log.info("Uploading to silverpop")
    sftpc = SftpClient()
    for path in files:
        sftpc.put(path, os.path.basename(path))