Exemplo n.º 1
0
def main():
    # define twitter authentication keys
    config = cnfg.load(".twitter_config")
    oauth = OAuth1(config["consumer_key"], config["consumer_secret"],
                   config["access_token"], config["access_token_secret"])

    # define screen names for candidates
    candidates = {
        'clinton': 'hillaryclinton',
        'trump': 'realdonaldtrump',
        'rubio': 'marcorubio',
        'sanders': 'berniesanders',
        'cruz': 'tedcruz'
    }

    # define mongodb parameters
    coll_dict = {
        'trump': db.trump,
        'clinton': db.clinton,
        'sanders': db.sanders,
        'cruz': db.cruz,
        'rubio': db.rubio
    }
    client = MongoClient()
    db = client.tweets

    # get tweets and insert into mongodb
    max_calls = 180
    max_tweets = 3500
    for cand in candidates:
        results = get_tweets(candidates[cand], since_id=None)
        into_mongo(coll_dict[cand], results)
Exemplo n.º 2
0
def authentify(config_file):
    config = cnfg.load(".twitter_config")

    auth = tweepy.OAuthHandler(config["consumer_key"],
                               config["consumer_secret"])

    auth.set_access_token(config["access_token"],
                          config["access_token_secret"])
    return auth
Exemplo n.º 3
0
def authentify(config_file):
    config = cnfg.load(".twitter_config")

    auth = tweepy.OAuthHandler(config["consumer_key"],
                               config["consumer_secret"])

    auth.set_access_token(config["access_token"],
                          config["access_token_secret"])
    return auth
Exemplo n.º 4
0
def main():
    """
    Store kiva loan, lender, and borrower data into
    `loan`, `loan_lender`, `loan_borrower` tables in database
    """
    config = cnfg.load(".metis_config")
    engine = create_engine('postgresql://{}:{}@{}:5432/{}'.format(
        config['db_user'], config['db_pwd'], config['db_host'], 'ubuntu'))

    conn = engine.connect()
    metadata = MetaData()

    loan_table = Table(
        'loan', metadata, Column('id', Integer, primary_key=True),
        Column('name', String), Column('funded_amount', Integer),
        Column('funded_date', Integer), Column('status', String),
        Column('planned_expiration_date', Date), Column('posted_date', Date),
        Column('sector', String), Column('activity', String),
        Column('loan_amount', Integer), Column('lender_count', Integer),
        Column('location_country_code', String),
        Column('location_country', String), Column('location_geo_level',
                                                   String),
        Column('location_geo_type', String), Column('location_geo_lat', Float),
        Column('location_geo_long', Float), Column('location_town', String),
        Column('partner_id', Integer),
        Column('bonus_credit_eligibility', Boolean),
        Column('description_en', String), Column('use_text', String),
        Column('tag_text', String), Column('terms_disbursal_amount', Integer),
        Column('terms_disbursal_currency', String),
        Column('terms_disbursal_date', Date),
        Column('terms_loan_amount', Integer),
        Column('terms_loss_liability_currency_exchange', String),
        Column('terms_loss_liability_nonpayment', String),
        Column('terms_repayment_term', Integer),
        Column('journal_totals_entries', Integer),
        Column('file_index', Integer), Column('payments', String))

    loan_lender_table = Table('loan_lender', metadata,
                              Column('loan_id', Integer),
                              Column('lender_id', String),
                              Column('file_index', Integer))

    loan_borrower_table = Table('loan_borrower', metadata,
                                Column('loan_id', Integer),
                                Column('first_name', String),
                                Column('last_name', String),
                                Column('pictured', Boolean),
                                Column('gender', String),
                                Column('file_index', Integer))

    # Process files in `kiva_ds_json` directory
    for file_index in range(10, 150):
        print('-- File ', file_index)
        process_file_loans(file_index, conn, loan_table, loan_lender_table,
                           loan_borrower_table)
Exemplo n.º 5
0
def kiva_api(api_url):
    config_kiva = cnfg.load(".metis_config")['kiva_api']
    consumer_key = config_kiva['consumer_key']
    consumer_secret = config_kiva['consumer_secret']
    resource_owner_key = config_kiva['resource_owner_key']
    resource_owner_secret = config_kiva['resource_owner_secret']
    oauth = OAuth1(consumer_key,
                   client_secret=consumer_secret,
                   resource_owner_key=resource_owner_key,
                   resource_owner_secret=resource_owner_secret)
    response = requests.get(url=api_url, auth=oauth)
    response = response.text
    return response
Exemplo n.º 6
0
def connect_twitter(config=cnfg.load(home + twitterConfigFile)):
    """
    load twitter app configs and return api object
    """
    try:
        auth = tweepy.OAuthHandler(config['consumer_key'],
                                   config['consumer_secret'])
        auth.set_access_token(config['access_token'],
                              config['access_token_secret'])
        api = tweepy.API(auth)
        return api
    except tweepy.TweepError as e:
        print e.message[0]['code']
        print e.args[0][0]['code']
Exemplo n.º 7
0
def connect_twitter(config=cnfg.load(home + twitterConfigFile)):
    """
    load twitter app configs and return api object
    """
    try:
        auth = tweepy.OAuthHandler(config['consumer_key'], 
                                   config['consumer_secret'])
        auth.set_access_token(config['access_token'],
                              config['access_token_secret'])
        api = tweepy.API(auth)
        return api
    except tweepy.TweepError as e:
        print e.message[0]['code']
        print e.args[0][0]['code']
Exemplo n.º 8
0
    def setUp(self):
        patch_os_path_join = mock.patch('os.path.join')
        self.addCleanup(patch_os_path_join.stop)
        self.mock_os_path_join = patch_os_path_join.start()
        self.mock_os_path_join.return_value = '~/a_file'

        patch_os_path_expanduser = mock.patch('os.path.expanduser')
        self.addCleanup(patch_os_path_expanduser.stop)
        self.mock_os_path_expanduser = patch_os_path_expanduser.start()
        self.mock_os_path_expanduser.return_value = '/home/a_user/a_file'

        patch_contents_of = mock.patch('cnfg.contents_of')
        self.addCleanup(patch_contents_of.stop)
        self.mock_contents_of = patch_contents_of.start()
        self.mock_contents_of.return_value = "{'a': 1,}"

        self.result = cnfg.load('a_file')
Exemplo n.º 9
0
def stream_tweet(query, collection):
    config = cnfg.load("/home/ubuntu/Documents/.twitter_config")

    auth = tweepy.OAuthHandler(config["consumer_key"],
                               config["consumer_secret"])
    auth.set_access_token(config["access_token"],
                          config["access_token_secret"])
    api = tweepy.API(auth)

    class StreamListener(tweepy.StreamListener):
        def on_status(self, status):

            # skip retweets
            # if status.retweeted:
            # return

            # store tweet and creation date
            data = {}
            data['id'] = status.id
            data['datetime'] = status.created_at
            try:
                data['text'] = status.extended_tweet['full_text']
            except:
                data['text'] = status.text
            data['entities'] = status.entities
            data['reply_to_tweet'] = status.in_reply_to_status_id
            data['user'] = status.user.id
            data['retweet_count'] = status.retweet_count
            data['favorite_count'] = status.favorite_count
            #data['possibly_sensitive'] = status.possibly_sensitive
            data['lang'] = status.lang

            # insert into db
            try:
                collection.insert_one(data)
            except:
                pass

    stream_listener = StreamListener()
    stream = tweepy.Stream(auth=api.auth, listener=stream_listener)
    stream.filter(track=[query])
Exemplo n.º 10
0
import cnfg

settings = cnfg.load('.examplerc')

print(settings['message'])
Exemplo n.º 11
0
from flask import Flask, request, jsonify, render_template
from bson import json_util
from bson.objectid import ObjectId
import json
import cnfg
import pymongo

app = Flask(__name__)

#-------Connect to MongoDB hosted on AWS-------#

config = cnfg.load('.fletcher_app_config')
connect_string = "mongodb://"+ config['SECRET_KEY']
connection = pymongo.MongoClient(connect_string)
db = connection.debates
#----------------------------------------------#

@app.route('/debates/')
def index():
    """Homepage"""
    template = 'index.html'
    return render_template(template)

def toJson(data):
    """Convert Mongo object(s) to JSON"""
    return json.dumps(data, default=json_util.default, ensure_ascii=True)

@app.route('/debates/<debate_name>',methods=["GET"])
def get_debates(debate_name):
    """Return a list of all words for a given debate
    #ex) GET /debates/?limit=10&offset=20
Exemplo n.º 12
0
import unirest
import time
import webhose
from elasticsearch import Elasticsearch
import json
import cnfg
import pandas as pd
from nltk.tokenize import sent_tokenize
from vaderSentiment.vaderSentiment import sentiment as vaderSentiment
import datetime

config = cnfg.load("/home/ubuntu/dfsharp/.webhoser_config")
tok = config["token"]
webhose.config(token=tok)

# In[3]:


def get_archive():
    yesterday = datetime.date.today() - datetime.timedelta(hours=24)
    unix_time = yesterday.strftime("%s")
    # get response from webhose query
    # response = unirest.get("https://webhose.io/search?token=" + tok + "&format=json&q=NBA+DFS",
    response = unirest.get(
        "https://webhose.io/search?token=" + tok + "&format=json&q=MLB%20DFS",
        # response = unirest.get("https://webhose.io/search?token=" + tok + "&format=json&q=MLB+DFS+ts="+unix_time,
        headers={"Accept": "application/json"})

    return (response)

Exemplo n.º 13
0
import requests
import cnfg
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
from collections import defaultdict
from collections import OrderedDict
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
from proj_elastic import InsertLogs
from sklearn import ensemble
from sklearn.preprocessing import scale
from sklearn.metrics import mean_squared_error


# In[2]:
config = cnfg.load("/home/ubuntu/dfsharp/.rotoguru_config")
url = config["url"]


def daily_download():
    # read in the user and key from config file
    # read in daily update of season long box scores
    df = pd.read_csv(url, sep=':')

    # format date as index, reset and sort old to new
    df.index = [pd.to_datetime(str(x), format='%Y%m%d') for x in df.Date]
    df.reset_index(inplace=True)
    df = df.sort(['index', 'Team'], ascending=[1, 1])

    # cut off note row
    df = df[1:]
Exemplo n.º 14
0
'''Initialize tweets database for each user'''

# Set up twitter access
# pip install tweepy
import cnfg
import tweepy
config = cnfg.load(".twitter_config_old")
auth = tweepy.OAuthHandler(config["consumer_key"], config["consumer_secret"])
auth.set_access_token(config["access_token"], config["access_token_secret"])
api = tweepy.API(auth,
                 wait_on_rate_limit=True,
                 wait_on_rate_limit_notify=True,
                 retry_delay=3 * 60,
                 retry_count=6)

# Set up database
from pymongo import MongoClient
client = MongoClient()
db = client.retweets

# Set max tweets for initialization and update
max_tweets_init = 5000
max_tweetsat_init = 200

# List of twitter accounts to track
accounts = ['@andreatantaros']

from datetime import datetime
print "### Initialising users %s \nat %s" % (str(accounts), datetime.now())

# Clear db if it exists
Exemplo n.º 15
0
def make_db_conn():
    config = cnfg.load(".metis_config")
    engine = create_engine('postgresql://{}:{}@{}:5432/{}'.format(
        config['db_user'], config['db_pwd'], config['db_host'], 'sephora'))
    conn = engine.connect()
    return conn
Exemplo n.º 16
0
with open('dict_list.pkl', 'r') as picklefile:
    dict_list = pickle.load(picklefile)
with open('master_ids.pkl', 'r') as picklefile:
    master_ids = pickle.load(picklefile)
with open('daily_projections.pkl', 'r') as picklefile:
    daily_projections = pickle.load(picklefile)
with open('player_dict.pkl', 'r') as picklefile:
    player_dict = pickle.load(picklefile)

date_string = date.today().strftime("%m-%d-%Y")
yesterday_string = (date.today() - timedelta(1)).strftime("%m-%d-%Y")
now = datetime.now()
latest = date_string if now.hour > 11 else yesterday_string
today = daily_projections[latest]

config_db = cnfg.load(".psql_config")
connection = config_db['connection']
Base = declarative_base()


class Player(Base):

    # __tablename__ = 'Player_Proj_' + '12-11-2015'
    __tablename__ = 'Player_Projections_' + date_string

    Index = Column(Integer, primary_key=True)
    Player = Column(String)
    Team = Column(String)
    Date = Column(Date)
    Time = Column(String)
    GameTime = Column(String)
Exemplo n.º 17
0
response = requests.get(url0)
page = response.text
soup = BeautifulSoup(page)
zip_code_ls = []
for zip_code in soup.findAll(headers="header3"):
    zip_code_ls = zip_code_ls + str(zip_code.getText()).split(",")
zip_code = map(int, zip_code_ls)
zip_code.remove(11695)

#######################
##
##  Get Data from MongoDB
##
#######################

config = cnfg.load(".yelp_config")

oauth = OAuth1(config["Consumer_Key"], config["Consumer_Secret"], config["Token"], config["Token_Secret"])

client = MongoClient()
bus_NY_db = client.bus_NY_db
bus_NY_col = bus_NY_db.bus_NY2

#######################
##
##  Get API for each zip code
##
#######################

zip_code_pass2 = []
for zip_cd in zip_code:
Exemplo n.º 18
0
import cnfg
import twitter
config = cnfg.load(".twitter_config_whosyodata")

auth = twitter.oauth.OAuth(config["access_token"],
                           config["access_token_secret"],
                           config["consumer_key"], config["consumer_secret"])

# Set up database access
from pymongo import MongoClient
client = MongoClient()
db = client.retweets

from datetime import datetime
print "### Saving US and World trends at %s" % datetime.now()

WORLD_WOE_ID = 1
US_WOE_ID = 23424977
twitter_api = twitter.Twitter(auth=auth)
world_trends = twitter_api.trends.place(_id=WORLD_WOE_ID)[0]
us_trends = twitter_api.trends.place(_id=US_WOE_ID)[0]

db.us_trends.insert_one(us_trends)
db.world_trends.insert_one(world_trends)

print 'Done updating trends'
Exemplo n.º 19
0
import requests
from os.path import expanduser
import cnfg

home = expanduser("~")
config = cnfg.load(home + "/.lyft_config")

client_id = config['client_id']
client_secret = config['client_secret']

def lyft_token_request(client_id, client_secret):
	headers = {
	    'Content-Type': 'application/json'
	}

	data = '{"grant_type": "client_credentials", "scope": "public"}'

	r = requests.post('https://api.lyft.com/oauth/token',
	                 headers=headers,
	                 auth = (client_id,client_secret),
	                 data=data)
	
	token_dict = {}
	token_dict['access_token'] = str(r.json()['access_token'])
	
	print token_dict


if __name__ == '__main__':
	lyft_token = lyft_token_request(client_id,client_secret)
Exemplo n.º 20
0
import re
from sklearn.preprocessing import scale


# In[2]:


'''
it loads latest model and todays_players CSV, dynamically determines
starters, generates today's projections, and pushes them up to elasticsearch

---
also generates today's optimal lineup and pushes THAT to elasticsearch
'''

config = cnfg.load("/home/ubuntu/dfsharp/.twitter_config")
oauth = OAuth1(config["consumer_key"],
               config["consumer_secret"],
               config["access_token"],
               config["access_token_secret"])

auth = tweepy.OAuthHandler(config["consumer_key"],
                           config["consumer_secret"])
auth.set_access_token(config["access_token"],
                      config["access_token_secret"])

api = tweepy.API(auth)

emoticons_str = r"""
    (?:
        [:=;] # Eyes
Exemplo n.º 21
0
with open('depth_ids.pkl', 'r') as picklefile:
     depth_ids = pickle.load(picklefile)

with open('player_to_team.pkl', 'r') as picklefile:
     player_to_team = pickle.load(picklefile)

with open('depth_to_teams.pkl', 'r') as picklefile:
     depth_to_teams = pickle.load(picklefile)


date_string = date.today().strftime("%m-%d-%Y")

players = player_dict.values()
player_updates = defaultdict(list)

config = cnfg.load("ds/metis/ds5_Greg/projects/05-kojak/.twitter_develop")
oauth = OAuth1(config["consumer_key"],
               config["consumer_secret"],
               config["access_token"],
               config["access_token_secret"])


auth = tweepy.OAuthHandler(config["consumer_key"],
                           config["consumer_secret"])
auth.set_access_token(config["access_token"],
                      config["access_token_secret"])

api = tweepy.API(auth)

emoticons_str = r"""
    (?:
   u'tag_type': u'LocationTag'}],
 u'logo_url': u'https://d1qb2nb5cznatu.cloudfront.net/startups/i/221-5be033086a9e3a91dd279cb38435b165-medium_jpg.jpg?buster=1315770973',
 u'markets': [],
 u'name': u'Mintbox',
 u'product_desc': u'Mintbox has a unique approach to tracking in-store sales from online media and offers name-brand retailers a scalable pay-for-performance marketing solution to acquire customers and increase customer value through discreet, targeted private sales. \n\nAs a result, users of Mintbox gain access to personalized private sales and earned discounts, both in-store and online, from top retail brands.\n\nCompared to the glut of deal sites, Mintbox works with bigger, more attractive retail brands.',
 u'quality': 2,
 u'screenshots': [],
 u'status': None,
 u'thumb_url': u'https://d1qb2nb5cznatu.cloudfront.net/startups/i/221-5be033086a9e3a91dd279cb38435b165-thumb_jpg.jpg?buster=1315770973',
 u'twitter_url': u'',
 u'updated_at': u'2013-08-03T19:59:54Z',
 u'video_url': u''}
'''

# angellist config
config = cnfg.load('.angelco_config')
client_id = config['client_id']
client_secret = config['client_secret']
access_token = config['access_token']

_C_API_BEGINNING = 'https://api.angel.co'
_OAUTH_API_BEGINNING = 'https://angel.co/api'
_API_VERSION = 1


def main():
    companies = {}
    errors = {}
    t0 = time()
    limit = 1900  #reduced limit if running after testing
    progress = ProgressBar()
Exemplo n.º 23
0
# Github Doc: https://github.com/Yelp/yelp-python
from yelp.client import Client
from yelp.oauth1_authenticator import Oauth1Authenticator
import cnfg

settings = cnfg.load('data-scrubber/python/.gms_config')

# Yelp Authentication
auth = Oauth1Authenticator(consumer_key=settings['consumer_key'],
                           consumer_secret=settings['consumer_secret'],
                           token=settings['token'],
                           token_secret=settings['token_secret'])

client = Client(auth)

# search parameter
params = {'term': 'food', 'lang': 'en'}

response = client.search('Fort Greene, Brooklyn', **params)


# Parse Search Results
def pullHood(hood):
    response = client.search(hood, **params)

    for i in range(len(response.businesses)):
        #business_id = response.businesses[i].id
        business_list = {
            'id': response.businesses[i].id,
            'is_claimed': response.businesses[i].is_claimed,
            'is_closed': response.businesses[i].is_closed,
Exemplo n.º 24
0
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import exists
from sqlalchemy.exc import IntegrityError
from instagram.client import InstagramAPI
from instagram.bind import InstagramAPIError

from db_setup import Base, InstagramUser, Media, Follower

# Configuring the database connection.
engine = create_engine('postgresql://localhost/metis_adsthetic')
Base.metadata.bind = engine
DBSession = sessionmaker(bind = engine)
session = DBSession()

# Configuring the instagram api client.
config = cnfg.load(".instagram_config")
api = InstagramAPI(client_id=config['CLIENT_ID'], 
				   client_secret=config['CLIENT_SECRET'])

################################################################
#################      Helper functions        #################
################################################################

def get_user_id(username):
	""" Given a instagram username, return the instagram_id."""

	user_search = api.user_search(q=username)
	user_ids = {user.__dict__['username']:user.id for user in user_search}
	return str(user_ids[username])

def rate_limit_check():
Exemplo n.º 25
0
with open('depth_ids.pkl', 'r') as picklefile:
    depth_ids = pickle.load(picklefile)

with open('player_to_team.pkl', 'r') as picklefile:
    player_to_team = pickle.load(picklefile)

with open('depth_to_teams.pkl', 'r') as picklefile:
    depth_to_teams = pickle.load(picklefile)

date_string = date.today().strftime("%m-%d-%Y")

players = player_dict.values()
player_updates = defaultdict(list)

config = cnfg.load("ds/metis/ds5_Greg/projects/05-kojak/.twitter_develop")
oauth = OAuth1(config["consumer_key"], config["consumer_secret"],
               config["access_token"], config["access_token_secret"])

auth = tweepy.OAuthHandler(config["consumer_key"], config["consumer_secret"])
auth.set_access_token(config["access_token"], config["access_token_secret"])

api = tweepy.API(auth)

emoticons_str = r"""
    (?:
        [:=;] # Eyes
        [oO\-]? # Nose (optional)
        [D\)\]\(\]/\\OpP] # Mouth
    )"""
Exemplo n.º 26
0
response = requests.get(url0)
page = response.text
soup = BeautifulSoup(page)
zip_code_ls = []
for zip_code in soup.findAll(headers="header3"):
    zip_code_ls = zip_code_ls + str(zip_code.getText()).split(',')
zip_code = map(int, zip_code_ls)
zip_code.remove(11695)

#######################
##
##  Get Data from MongoDB
##
#######################

config = cnfg.load(".yelp_config")

oauth = OAuth1(config["Consumer_Key"], config["Consumer_Secret"],
               config["Token"], config["Token_Secret"])

client = MongoClient()
bus_NY_db = client.bus_NY_db
bus_NY_col = bus_NY_db.bus_NY2

#######################
##
##  Get API for each zip code
##
#######################

zip_code_pass2 = []
Exemplo n.º 27
0
import unirest
import time
import webhose
from elasticsearch import Elasticsearch
import json
import cnfg
import pandas as pd
from nltk.tokenize import sent_tokenize
from vaderSentiment.vaderSentiment import sentiment as vaderSentiment


config = cnfg.load("/home/ubuntu/dfsharp/.webhoser_config")
tok = config["token"]
webhose.config(token=tok)

# In[3]:


def get_archive():
    # get response from webhose query "NBA DFS"
    response = unirest.get("https://webhose.io/search?token=" + tok + "&format=json&q=NBA+DFS",

                           headers={
                               "Accept": "application/json"
                           }
                           )

    return(response)


# In[4]:
Exemplo n.º 28
0
    def on_data(self, data):
        """This will be called each time we receive stream data"""
        # Decode JSON
        try:
            datajson = json.loads(data)

            # We only want to store tweets in English
            if "lang" in datajson and datajson["lang"] == "en":
                # Store tweet info into the cooltweets collection.
                self.db.tweetdb.insert(datajson)
        except Exception:
            print("Error in parsing data {}".format(data))


#For macbook
# config = cnfg.load("/Users/bholligan/.ssh/.twitter_config")
#For ubuntu
config = cnfg.load("/home/bholligan/.twitter_config")

auth = tweepy.OAuthHandler(config["consumer_key"], config["consumer_secret"])
auth.set_access_token(config["access_token"], config["access_token_secret"])

tweet_listener = TweetListener()
tweet_stream = tweepy.Stream(auth=auth, listener=tweet_listener)

tweet_stream.filter(track=[
    '#rio2016', "#olympics", '#RioOlympics2016', 'olympics', '#nbcolympics',
    '#bbcolympics', 'rio olympics'
])
Exemplo n.º 29
0
import time
import json
import oauth2 as oauth
import cnfg
"""
Run this script one time to get
`resource_owner_key` and `resource_owner_secret` to use
Kiva API
"""

config_kiva = cnfg.load(".metis_config")['kiva_api']

# Get consumer_key and consumer_secret from Kiva Developers Site
consumer_key = config_kiva['consumer_key']
consumer_secret = config_kiva['consumer_secret']

# If you have set up a callback URL at build.kiva.org, you should enter it
# below. Otherwise leave it as 'oob'
callback_url = 'oob'

# This is the URL of the protected resource you want to access
resource_url = 'https://api.kivaws.org/v1/my/account.json'

# These should stay the same, probably
request_token_url = 'https://api.kivaws.org/oauth/request_token.json?oauth_callback=oob'
authorize_url = 'https://www.kiva.org/oauth/authorize?response_type=code&oauth_callback=oob' + \
    '&client_id=' + consumer_key + '&scope=access'
access_token_url = 'https://api.kivaws.org/oauth/access_token'

# Leave everything below this line alone
consumer = oauth.Consumer(consumer_key, consumer_secret)
Exemplo n.º 30
0
from pymongo import MongoClient
import requests
from requests_oauthlib import OAuth1
import cnfg
import time
from random import randint
import os
import inspect, os

cwd = inspect.getfile(inspect.currentframe())

file_name = 'pull_tweets.py'

cwd = cwd[0:len(cwd)-len(file_name)]

config = cnfg.load(".twitter_develop")
#cwd = os.getcwd()

oauth = OAuth1(config["consumer_key"],
               config["consumer_secret"],
               config["access_token"],
               config["access_token_secret"])

twitter_dict = {}
with open(cwd + 'twitter_accounts.csv', 'rb') as csvfile:
    twitter_accounts = csv.reader(csvfile, delimiter=',')
    for row in twitter_accounts:
        twitter_dict[row[0]] = [row[1], row[2]]
        
client = MongoClient()
db = client.basketball
   u'tag_type': u'LocationTag'}],
 u'logo_url': u'https://d1qb2nb5cznatu.cloudfront.net/startups/i/221-5be033086a9e3a91dd279cb38435b165-medium_jpg.jpg?buster=1315770973',
 u'markets': [],
 u'name': u'Mintbox',
 u'product_desc': u'Mintbox has a unique approach to tracking in-store sales from online media and offers name-brand retailers a scalable pay-for-performance marketing solution to acquire customers and increase customer value through discreet, targeted private sales. \n\nAs a result, users of Mintbox gain access to personalized private sales and earned discounts, both in-store and online, from top retail brands.\n\nCompared to the glut of deal sites, Mintbox works with bigger, more attractive retail brands.',
 u'quality': 2,
 u'screenshots': [],
 u'status': None,
 u'thumb_url': u'https://d1qb2nb5cznatu.cloudfront.net/startups/i/221-5be033086a9e3a91dd279cb38435b165-thumb_jpg.jpg?buster=1315770973',
 u'twitter_url': u'',
 u'updated_at': u'2013-08-03T19:59:54Z',
 u'video_url': u''}
'''

# angellist config
config = cnfg.load('.angelco_config')
client_id = config['client_id']
client_secret = config['client_secret']
access_token = config['access_token']

_C_API_BEGINNING = 'https://api.angel.co'
_OAUTH_API_BEGINNING = 'https://angel.co/api'
_API_VERSION = 1


def main():
    companies = {}
    errors = {}
    t0 = time()
    limit = 1900 #reduced limit if running after testing
    progress = ProgressBar()
Exemplo n.º 32
0
import cnfg
import tweepy
from requests_oauthlib import OAuth1
import re
from sklearn.preprocessing import scale

# In[2]:
'''
it loads latest model and todays_players CSV, dynamically determines
starters, generates today's projections, and pushes them up to elasticsearch

---
also generates today's optimal lineup and pushes THAT to elasticsearch
'''

config = cnfg.load("/home/ubuntu/dfsharp/.twitter_config")
oauth = OAuth1(config["consumer_key"], config["consumer_secret"],
               config["access_token"], config["access_token_secret"])

auth = tweepy.OAuthHandler(config["consumer_key"], config["consumer_secret"])
auth.set_access_token(config["access_token"], config["access_token_secret"])

api = tweepy.API(auth)

emoticons_str = r"""
    (?:
        [:=;] # Eyes
        [oO\-]? # Nose (optional)
        [D\)\]\(\]/\\OpP] # Mouth
    )"""
            try:
                d = {el: users[u][el] for el in columns}
                d['latinx'] = 1 if latinx else 0
                d['todes'] = 0 if latinx else 1
                df = df.append(d, ignore_index=True)
            except Exception as ex:
                print(ex)
                print("exception at batch " + str(i))
                print("exception at u " + str(u))
                raise  # re-raise exception - interrupt


# ---------------------------------------------------------------------------- #
# Authorize twitter API
# ---------------------------------------------------------------------------- #
config = cnfg.load("/Users/katie/.twitter_config")

oauth = OAuth1(config["consumer_key"], config["consumer_secret"],
               config["access_token"], config["access_token_secret"])

# ---------------------------------------------------------------------------- #
# Create a big ol' dataframe to start
# ---------------------------------------------------------------------------- #
columns = [
    'id_str', 'name', 'screen_name', 'location', 'description', 'url',
    'entities', 'protected', 'followers_count', 'friends_count',
    'listed_count', 'created_at', 'verified', 'statuses_count', 'lang'
]

global df
df = pd.DataFrame(columns=columns.copy().extend(['latinx', 'todes']))
Exemplo n.º 34
0
     dict_list = pickle.load(picklefile)
with open('master_ids.pkl', 'r') as picklefile:
     master_ids = pickle.load(picklefile)
with open('daily_projections.pkl', 'r') as picklefile:
     daily_projections = pickle.load(picklefile)
with open('player_dict.pkl', 'r') as picklefile:
     player_dict = pickle.load(picklefile)


date_string = date.today().strftime("%m-%d-%Y")
yesterday_string = (date.today() - timedelta(1)).strftime("%m-%d-%Y")
now = datetime.now()
latest = date_string if now.hour > 11 else yesterday_string
today = daily_projections[latest]

config_db = cnfg.load(".psql_config")
connection = config_db['connection']
Base = declarative_base()


class Player(Base):

    # __tablename__ = 'Player_Proj_' + '12-11-2015'
    __tablename__ = 'Player_Projections_' + date_string

    
    Index = Column(Integer, primary_key = True)
    Player = Column(String)
    Team = Column(String)
    Date = Column(Date)
    Time = Column(String)
Exemplo n.º 35
0
import requests
from os.path import expanduser
import cnfg
import requests
import time
import pickle
import pandas as pd

home = expanduser("~")
uber_config = cnfg.load(home + "/.uber_config")
lyft_config = cnfg.load(home + "/.lyft_token")

def uber_request_price(start_location, start_latitude, start_longitude, end_location, end_latitude, end_longitude):
	url_price = 'https://api.uber.com/v1/estimates/price'
	parameters = {
	    'server_token': uber_config['server_token'],
	    'start_latitude': start_latitude,
	    'start_longitude': start_longitude,
	    'end_latitude': end_latitude, 
	    'end_longitude': end_longitude
    }
	response_price = requests.get(url_price, params=parameters)
	return response_price.json()

def uber_request_time(start_location, start_latitude, start_longitude, end_location, end_latitude, end_longitude):
	url_time = 'https://api.uber.com/v1/estimates/time'
	parameters = {
	    'server_token': uber_config['server_token'],
	    'start_latitude': start_latitude,
	    'start_longitude': start_longitude,
	    'end_latitude': end_latitude, 
Exemplo n.º 36
0
import pandas as pd
from scipy.stats.mstats import mode
from datetime import datetime

'''
<------------------------------------------------------------------->
MONGODB & TWEEPY CREDENTIALS
<------------------------------------------------------------------->
'''

# mongo setup
client = MongoClient('mongodb://52.10.8.92:27017/')
users = client.twitter_data.users2

# tweepy setup
config = cnfg.load(".twitter_config")
auth = tweepy.OAuthHandler(config["consumer_key"],
                           config["consumer_secret"])
auth.set_access_token(config["access_token"],
                      config["access_token_secret"])
api = tweepy.API(auth)

'''
<------------------------------------------------------------------->
SPYRE
<------------------------------------------------------------------->
'''

class SpyreApp(server.App):
    title = "Twitter Recommendation Engine"
from flask import request
import pickle
import numpy as np
import math
import spotipy
import csv
import pandas as pd
import requests
from spotipy import util
from flask_bootstrap import Bootstrap
import json
import cnfg
import os
fileDir = os.path.dirname(os.path.realpath('__file__'))
filename = os.path.join(fileDir, '.spotify_config')
config = cnfg.load(filename)

application = flask.Flask(__name__)
Bootstrap(application)

with open("logit_thresh2.pkl", "rb") as f:
    logit = pickle.load(f)
with open("scale_func2.pkl.pkl", "rb") as f:
    scaler = pickle.load(f)

unm = "spotify:user:1255765740"
scope = "streaming"

token = util.prompt_for_user_token(unm,
                                   scope,
                                   client_id=config["client_id"],
Exemplo n.º 38
0
import cnfg
import tweepy
import json
from pymongo import MongoClient
import pymongo


client = pymongo.MongoClient("mongodb://*****:*****@54.173.47.58:27017/climatechange")
db = client.climatechange
col = db.climate_tweets





config = cnfg.load(".twitter_config")

# Replace the API_KEY and API_SECRET with your application's key and secret.
auth = tweepy.AppAuthHandler(config["consumer_key"], config["consumer_secret"])


api = tweepy.API(auth, wait_on_rate_limit=True,
                   wait_on_rate_limit_notify=True)

if (not api):
    print ("Can't Authenticate")
    sys.exit(-1)

# Continue with rest of code

import sys
Exemplo n.º 39
0
import cnfg
import tweepy
from sk_markov import generate_tweet

tweet_text = generate_tweet()
print tweet_text

config = cnfg.load(".twitter_config_robama")
auth = tweepy.OAuthHandler(config["consumer_key"],
                           config["consumer_secret"])
auth.set_access_token(config["access_token"],
                      config["access_token_secret"])
api=tweepy.API(auth)

api.update_status(tweet_text)