'EMAILS': 'Comma-separated list of email addresses'
}

REQUIRED_ARGS = [
    'DB_HOST', 'DB_PORT', 'DB_USER', 'DB_PASS', 'DB_NAME', 'EMAILS'
]


def filter_emails_by_membership(args) -> list:
    database = psycopg2.connect(host=args.DB_HOST,
                                port=args.DB_PORT,
                                user=args.DB_USER,
                                password=args.DB_PASS,
                                database=args.DB_NAME)
    database_cursor = database.cursor(
        cursor_factory=psycopg2.extras.RealDictCursor)
    membership_check_query = """
    SELECT email
    FROM ak_moveon.core_user
    WHERE email = ANY(%s)
    AND subscription_status = 'subscribed'
    """
    emails = args.EMAILS.split(',')
    database_cursor.execute(membership_check_query, (emails, ))
    return [item.get('email') for item in list(database_cursor.fetchall())]


if __name__ == '__main__':
    run_from_cli(filter_emails_by_membership, DESCRIPTION, ARG_DEFINITIONS,
                 REQUIRED_ARGS)
Exemplo n.º 2
0
        rows = [row for row in csvreader]

    with open(args.OUT, 'w') as csvfile:
        fieldnames = [
            'County Code', 'State', 'County', 'Name', 'Temp County', 'Address',
            'City', 'Zip', 'lat', 'lng', 'Hours', 'Type'
        ]
        writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
        writer.writeheader()
        for row in rows:
            result = requests.get(
                'http://www.mapquestapi.com/geocoding/v1/address',
                params={
                    'key': args.MAPQUEST_API_KEY,
                    'location':
                    f"{row['Address']} {row['City']}, {row['State']}"
                }).json()
            location = result.get('results')[0].get('locations')[0]
            zip = location.get('postalCode')
            if zip:
                row['Zip'] = zip
            row['lat'] = location.get('latLng').get('lat')
            row['lng'] = location.get('latLng').get('lng')
            writer.writerow(row)

    return args.OUT


if __name__ == '__main__':
    run_from_cli(add_geo, DESCRIPTION, ARG_DEFINITIONS, REQUIRED_ARGS)
Exemplo n.º 3
0
                'City': city_parts[0][:-1],
                'Zip': city_parts[2],
                'lat': '',
                'lng': '',
                'Hours': f"{row['Date']} {row['Hours']}",
                'Type': 'Drop Box'
            })

    rows_by_location = {}
    for row in rows:
        location = f"{row['Name']}-{row['County']}"
        if not location in rows_by_location:
            rows_by_location[location] = row
        else:
            rows_by_location[location]['Hours'] += ', ' + row['Hours']
    rows = rows_by_location.values()

    with open(args.OUT, 'w') as csvfile:
        fieldnames = ['County Code', 'State', 'County', 'Name', 'Temp County', 'Address', 'City', 'Zip', 'lat', 'lng', 'Hours', 'Type']
        writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
        writer.writeheader()
        for row in rows:
            writer.writerow(row)


    return args.OUT


if __name__ == '__main__':
    run_from_cli(format, DESCRIPTION, ARG_DEFINITIONS, REQUIRED_ARGS)
Exemplo n.º 4
0
REQUIRED_ARGS = [
    'DB_HOST', 'DB_PORT', 'DB_USER', 'DB_PASS', 'DB_NAME', 'DB_QUERY'
]


def get_psql_results(args):
    database = psycopg2.connect(host=args.DB_HOST,
                                port=args.DB_PORT,
                                user=args.DB_USER,
                                password=args.DB_PASS,
                                database=args.DB_NAME)
    database_cursor = database.cursor(
        cursor_factory=psycopg2.extras.RealDictCursor)
    if args.DB_QUERY[-4:] == '.sql':
        with open(args.DB_QUERY, 'r') as file:
            query = file.read()
    else:
        query = args.DB_QUERY
    if 'DB_VALUES' in args.__dict__ and args.DB_VALUES:
        database_cursor.execute(query, args.DB_VALUES)
    else:
        database_cursor.execute(query)
    if 'NO_RESULTS' in args.__dict__ and args.NO_RESULTS:
        database.commit()
        return []
    return [dict(row) for row in database_cursor.fetchall()]


if __name__ == '__main__':
    run_from_cli(get_psql_results, DESCRIPTION, ARG_DEFINITIONS, REQUIRED_ARGS)
Exemplo n.º 5
0
            account_description(account) for account in declined_accounts
        ]
        args.SLACK_MESSAGE_TEXT = "The following pending Mobilize accounts "\
                                  "should be *declined* (not subscribed to "\
                                  "MoveOn's email list):\n"\
                                  + "\n".join(account_descriptions)
        notify_slack(args)
    elif len(approved_accounts) == 0:
        args.SLACK_MESSAGE_TEXT = "There are currently no pending Mobilize "\
                                  "accounts."
        notify_slack(args)
    if hasattr(args, 'VERBOSE') and args.VERBOSE:
        import pprint
        pprint.PrettyPrinter(indent=2).pprint({'approved': approved_accounts})
        pprint.PrettyPrinter(indent=2).pprint({'declined': declined_accounts})

    return "%s approved, %s declined" % (
        len(approved_accounts),
        len(declined_accounts)
    )


def aws_lambda(event, context) -> str:
     return run_from_lamba(
         post_report, DESCRIPTION, ARG_DEFINITIONS, REQUIRED_ARGS, event
     )


if __name__ == '__main__':
    run_from_cli(post_report, DESCRIPTION, ARG_DEFINITIONS, REQUIRED_ARGS)
def add_county_codes(args):

    county_name_to_code = {}
    with open(args.COUNTIES, 'rt') as csvfile:
        csvreader = csv.DictReader(csvfile, delimiter="\t")
        for row in csvreader:
            county_name_to_code[row['name'].upper()] = row['code']

    with open(args.IN, 'rt') as csvfile:
        csvreader = csv.DictReader(csvfile)
        rows = [row for row in csvreader]

    with open(args.OUT, 'w') as csvfile:
        fieldnames = [
            'County Code', 'State', 'County', 'Name', 'Temp County', 'Address',
            'City', 'Zip', 'lat', 'lng', 'Hours', 'Type'
        ]
        writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
        writer.writeheader()
        for row in rows:
            row['County Code'] = county_name_to_code.get(
                row['County'].upper(),
                county_name_to_code.get(f"{row['County'].upper()} COUNTY", ''))
            writer.writerow(row)

    return args.OUT


if __name__ == '__main__':
    run_from_cli(add_county_codes, DESCRIPTION, ARG_DEFINITIONS, REQUIRED_ARGS)
Exemplo n.º 7
0
import slackweb

from pywell.entry_points import run_from_cli

DESCRIPTION = 'Send notification to Slack.'

ARG_DEFINITIONS = {
    'SLACK_WEBHOOK': 'Web hook URL for Slack.',
    'SLACK_CHANNEL': 'Slack channel to send to.',
    'SLACK_MESSAGE_TEXT': 'Text to send.'
}

REQUIRED_ARGS = ['SLACK_WEBHOOK', 'SLACK_CHANNEL', 'SLACK_MESSAGE_TEXT']


def notify_slack(args):
    slack = slackweb.Slack(url=args.SLACK_WEBHOOK)
    return slack.notify(text=args.SLACK_MESSAGE_TEXT,
                        channel=args.SLACK_CHANNEL)


if __name__ == '__main__':
    run_from_cli(notify_slack, DESCRIPTION, ARG_DEFINITIONS, REQUIRED_ARGS)
Exemplo n.º 8
0
    pending_accounts = []
    while not done:
        if hasattr(args, 'VERBOSE') and args.VERBOSE:
            print('getting users at offset %s' % offset)
        response = requests.get(
            '%s%s' %
            (args.MOBILIZE_API_ROOT, 'users?limit=20&offset=%s' % offset),
            auth=requests.auth.HTTPBasicAuth(args.MOBILIZE_API_KEY,
                                             args.MOBILIZE_API_SECRET))
        if hasattr(args, 'VERBOSE') and args.VERBOSE:
            print('status: %s' % response.status_code)
        if response.status_code == 200:
            users = response.json()
            count += len(users)
            pending_accounts += [
                user for user in users if user_is_pending_for_group(
                    user, args.MOBILIZE_DEFAULT_GROUP_ID)
            ]
            if len(users) < 20:
                done = True
            else:
                offset += 20
        else:
            done = True
    return pending_accounts


if __name__ == '__main__':
    run_from_cli(get_pending_accounts, DESCRIPTION, ARG_DEFINITIONS,
                 REQUIRED_ARGS)