def funding( startup ):
	if startup.get( "last_round" ) is None:
		al_data = startup[ 'al_data' ]
		al_id = property( al_data, 'id' )
		al_funding = bot_utils.load_json( "https://api.angel.co/1/startups/%s/funding?access_token=%s" % (al_id, access_token) )
		if al_funding is not None:
			funding_info = al_funding.get( "funding" )
			total_raised = 0
			last_round = None

			for funding in funding_info:
				amount = property( funding, "amount" )

				if amount is not None:					
					closed = property( funding, "closed_at" )
				
					total_raised = total_raised + amount
					if last_round is None or closed > last_round.get( "closed_at" ):
						last_round = funding

			if last_round is not None:
				bot_utils.set_if_empty( startup, "last_round", last_round.get( "amount" ) )
				bot_utils.set_if_empty( startup, "last_round_type", last_round.get( "round_type" ) )
				bot_utils.set_if_empty( startup, "last_round_url", last_round.get( "source_url" ) )
				bot_utils.set_if_empty( startup, "total_funding", total_raised )
Exemple #2
0
def funding(startup):
    if startup.get("last_round") is None:
        al_data = startup['al_data']
        al_id = property(al_data, 'id')
        al_funding = bot_utils.load_json(
            "https://api.angel.co/1/startups/%s/funding" % al_id)
        if al_funding is not None:
            funding_info = al_funding.get("funding")
            total_raised = 0
            last_round = None

            for funding in funding_info:
                amount = property(funding, "amount")

                if amount is not None:
                    closed = property(funding, "closed_at")

                    total_raised = total_raised + amount
                    if last_round is None or closed > last_round.get(
                            "closed_at"):
                        last_round = funding

            if last_round is not None:
                bot_utils.set_if_empty(startup, "last_round",
                                       last_round.get("amount"))
                bot_utils.set_if_empty(startup, "last_round_type",
                                       last_round.get("round_type"))
                bot_utils.set_if_empty(startup, "last_round_url",
                                       last_round.get("source_url"))
                bot_utils.set_if_empty(startup, "total_funding", total_raised)
Exemple #3
0
def recent_hunts(startup_map, url, max=1000):

    print("Product Hunt.recent_hunts => %s" % url)

    ph_data = bot_utils.load_json(authenticate(url))
    posts = []
    if ph_data is not None:
        posts = ph_data.get("posts")
    count = 0
    for post in posts:
        name = post.get("name")
        if startup_map.get(name) is None:
            startup = angel_list.find_startup(
                urllib.request.pathname2url(adjust_path(name)))
            if startup is not None:
                startup_map[name] = startup
                startup["product_hunt_url"] = post.get("discussion_url")
                startup["product_hunt_votes"] = post.get("votes_count")
                startup["product_hunt_comments"] = post.get("comments_count")
                startup["updated"] = datetime.datetime.strptime(
                    post.get("day"), '%Y-%m-%d')
                print("Found via PH: " + name)
                count = count + 1
                if count > max:
                    break
    return startup_map
def find_startup( name ):
	url = "https://api.angel.co/1/search?access_token=%s&type=Startup&query=%s" % (access_token, name)
	
	results = bot_utils.load_json( url )
	if results is None or len( results ) == 0:
		return None

	al_id = None if results is None else results[0].get( "id" )

	if al_id is not None:
		 al_data = bot_utils.load_json( "https://api.angel.co/1/startups/%s" % al_id )
		 if al_data is not None:
		 	if bot_utils.match_all( al_data, al_checks ):
		 		startup = create( al_data )
		 		crunchbase.find_startup( startup, name )
		 		return startup

	return None
Exemple #5
0
def find_startup(name):
    url = "https://api.angel.co/1/search?type=Startup&query=%s" % name

    results = bot_utils.load_json(url)
    if results is None or len(results) == 0:
        return None

    al_id = None if results is None else results[0].get("id")

    if al_id is not None:
        al_data = bot_utils.load_json("https://api.angel.co/1/startups/%s" %
                                      al_id)
        if al_data is not None:
            if bot_utils.match_all(al_data, al_checks):
                startup = create(al_data)
                crunchbase.find_startup(startup, name)
                return startup

    return None
Exemple #6
0
def last_round( startup ):
	if startup.get( "last_round" ) is None:
		rounds = relationship( startup, "funding_rounds" ) 
		if rounds is not None:
			round = rounds[0]
			url = authenticate( "http://api.crunchbase.com/v/2/" + round.get( "path" ) )
			last_round_data = bot_utils.load_json( url )
			startup[ "cb_last_round" ] = last_round_data
			bot_utils.set_if_empty( startup, "last_round", property( startup, "money_raised_usd", "cb_last_round" ) )
			bot_utils.set_if_empty( startup, "last_round_type", property( startup, "funding_type", "cb_last_round" ) )
			bot_utils.set_if_empty( startup, "last_round_url", funding_web + property( startup, "permalink", "cb_last_round" ) )
Exemple #7
0
def find_startup( startup, name ):
	if startup.get( "cb_data" ) is not None:
		return True

	path = name.lower().replace( " ", "-" ).replace( ".", "-")
	url = authenticate( base_api + "organization/" + path )
	cb_data = bot_utils.load_json( url )
	if cb_data is not None:
		response = cb_data.get( "data" ).get( "response" )
		if response is not False and cb_data.get( "data" ).get( "properties" ).get( "primary_role" ) == 'company':
			fill( startup, cb_data, name )			
			return True

	return False
def recent_startups( startup_map, url, max=1000 ):

	print( "AngelList.recent_startups => %s" % url ) 
	results = bot_utils.load_json( url )
		
	count = 0
	if results is not None:
		for al_data in results.get( "startups" ):
			if bot_utils.match_all( al_data, al_checks ):
				name = al_data.get( "name" );
				if startup_map.get( name ) is None:
					startup = create( al_data )
					startup_map[ name ] = startup
					crunchbase.find_startup( startup, name )
					print( "Found via AL: " + name )
					count = count + 1
					if count > max:
						break
	return startup_map;
Exemple #9
0
def recent_startups(startup_map, url, max=1000):

    print("AngelList.recent_startups => %s" % url)
    results = bot_utils.load_json(url)

    count = 0
    if results is not None:
        for al_data in results.get("startups"):
            if bot_utils.match_all(al_data, al_checks):
                name = al_data.get("name")
                if startup_map.get(name) is None:
                    startup = create(al_data)
                    startup_map[name] = startup
                    crunchbase.find_startup(startup, name)
                    print("Found via AL: " + name)
                    count = count + 1
                    if count > max:
                        break
    return startup_map
Exemple #10
0
def initialize():
    # Initialization Loop
    while True:
        # Load settings
        try:
            settings = utils.load_json('settings.json')
        except:
            print 'Error loading settings!'
            time.sleep(15)
            continue

        # Create logger
        try:
            logger = utils.Logger(**settings['logging'])
        except:
            print 'Error initializing logger!'
            time.sleep(15)
            continue

        # Load reddit
        try:
            logger.log_debug('Connecting to reddit.', 'reddit')
            reddit = praw.Reddit(user_agent=settings['reddit']['useragent'])
            reddit.login(settings['reddit']['username'],
                         settings['reddit']['password'])
        except:
            logger.log_error('Unable to connect to reddit.', 'reddit')
            time.sleep(15)
            continue

        # Load subreddit
        try:
            logger.log_debug('Loading subreddit.', 'reddit')
            subreddit = reddit.get_subreddit(settings['reddit']['subreddit'])
        except:
            logger.log_error('Unable to connect to subreddit.', 'reddit')
            time.sleep(15)
            continue
        break
    return settings, logger, reddit, subreddit
Exemple #11
0
def recent_hunts( startup_map, url, max=1000 ):

	print( "Product Hunt.recent_hunts => %s" % url ) 

	ph_data = bot_utils.load_json( authenticate( url ) )
	posts = []
	if ph_data is not None:
		posts = ph_data.get( "posts" )
	count = 0
	for post in posts:
		name = post.get( "name" )
		if startup_map.get( name ) is None:
			startup = angel_list.find_startup( urllib.request.pathname2url( adjust_path( name ) ) )
			if startup is not None:
				startup_map[ name ] = startup
				startup[ "product_hunt_url" ] = post.get( "discussion_url" )
				startup[ "product_hunt_votes" ] = post.get( "votes_count" )
				startup[ "product_hunt_comments" ] = post.get( "comments_count" )
				startup[ "updated" ] = datetime.datetime.strptime( post.get( "day" ), '%Y-%m-%d' )
				print( "Found via PH: " + name )
				count = count + 1
				if count > max:
					break
	return startup_map
Exemple #12
0
def loop(settings, logger, reddit, subreddit):
    # Load state if stored in file
    if os.path.exists('state.json'):
        state = utils.load_json('state.json')
    else:
        state = {}

    # Start main loop
    while True:
        try:
            # Make copy of state for write determination
            old_state = copy.deepcopy(state)

            # Load config page
            try:
                logger.log_debug('Loading config page.', 'reddit')
                page = subreddit.get_wiki_page('youtube_bot')
            except:
                logger.log_error('Unable to load config page.', 'reddit')
                time.sleep(15)
                continue

            # Parse config page
            try:
                logger.log_debug('Parsing config page.', 'reddit')
                config = json.loads(page.content_md)
            except:
                logger.log_error('Failed to parse config page.', 'reddit')
                time.sleep(15)
                continue

            # Check if within window
            logger.log_debug('Checking window.', 'status')
            current_time = datetime.datetime.now()
            current_date = current_time.strftime('%d-%m-%Y')
            within_window = check_window(config.get('start_time', '02:00'),
                                         config.get('stop_time', '22:00'),
                                         current_time.strftime('%H:%M'))
            if not within_window:
                logger.log_debug('Not within window, waiting 5 minutes.',
                                 'status')
                time.sleep(300)
                continue
            else:
                logger.log_debug('Within window, continuing.', 'status')

            # Check if thread already started
            logger.log_debug('Checking for current post.', 'status')
            if current_date == state.get('post_date', ''):
                post_id = state.get('post_id', None)
                logger.log_debug('Found post: %s' % post_id, 'status')
            else:
                post_id = None
                logger.log_debug('No post found.', 'status')

            # Create post if no post id
            if post_id:
                try:
                    logger.log_debug('Fetching post.', 'reddit')
                    post = reddit.get_submission(submission_id=post_id)
                except:
                    logger.log_error('Could not fetch post.', 'reddit')
                    time.sleep(15)
                    continue
            else:
                try:
                    logger.log_info('Creating post.', 'reddit')
                    title = current_time.strftime(config['post_title'])
                    body = config['post_text']
                    post = subreddit.submit(title, body)
                    state['removed_comments'] = []
                except:
                    logger.log_error('Could not create post.', 'reddit')
                    time.sleep(15)
                    continue

            # Dump state for crash safety
            state['post_id'] = post.id
            state['post_date'] = current_date
            if not state == old_state:
                try:
                    logger.log_debug('Writing state file.', 'io')
                    utils.write_json(state, 'state.json')
                    old_state = copy.deepcopy(state)
                except:
                    logger.log_error('Unable to write state file.', 'io')
                    time.sleep(15)
                    continue

            # Clean top level comments
            try:
                logger.log_debug('Loading approved commentors.', 'status')
                removed_comments = state.get('removed_comments', [])
                approved_commentors = config.get('approved_commentors', [])
                approved_commentors.append(settings['reddit']['username'])
                logger.log_debug('Cleaning comments.', 'reddit')
                for comment in post.comments:
                    logger.log_debug(
                        'Parsing comment tree: %s' % comment.permalink,
                        'reddit')
                    parse_comment_tree(comment, approved_commentors,
                                       removed_comments)
            except:
                logger.log_error('Unable to clean comments.', 'reddit')
                time.sleep(15)
                continue

            # Dump state for crash safety
            state['removed_comments'] = removed_comments
            if not state == old_state:
                try:
                    logger.log_debug('Writing state file.', 'io')
                    utils.write_json(state, 'state.json')
                    old_state = copy.deepcopy(state)
                except:
                    logger.log_error('Unable to write state file.', 'io')
                    time.sleep(15)
                    continue

            # Build list of new videos to post
            new_videos = []
            channel_states = state.get('channels', {})
            try:
                for channel in config.get('channels', []):
                    logger.log_debug('Fetching videos: %s' % channel,
                                     'youtube')
                    channel_state = channel_states.get(channel,
                                                       {'name': channel})
                    videos, channel_states[channel] = get_new_videos(
                        channel_state, settings['youtube']['api_key'])
                    if len(videos) == 0:
                        logger.log_debug('No new videos.', 'youtube')
                    elif len(videos) >= 5:
                        logger.log_error('Too many videos: %s' % channel,
                                         'youtube')
                    else:
                        logger.log_debug('Added videos: %s' % len(videos),
                                         'youtube')
                        new_videos.extend(videos)
                        channel_states[channel]['after'] = videos[0]['date']
            except:
                logger.log_error('Error fetching new videos.', 'youtube')
                time.sleep(15)
                continue

            # Submit video comments
            for video in new_videos:
                try_counter = 0
                while True:
                    try_counter += 1
                    logger.log_debug('Processing Comment: %s' % video['id'],
                                     'reddit')
                    text = config.get('comment_text',
                                      u'[{channel}] [{title}]({url})')
                    try:
                        logger.log_debug(
                            'Formatting Comment: %s' % video['id'], 'reddit')
                        text = text.format(channel=video['channel'],
                                           title=video['title'],
                                           id=video['id'],
                                           url=video['url'])
                        logger.log_info('Submitting Comment: %s' % video['id'],
                                        'reddit')
                        post.add_comment(text)
                        break
                    except:
                        if try_counter >= 5:
                            logger.log_error(
                                'Skipping Comment: %s' % video['id'], 'reddit')
                            break
                        logger.log_error('Submit Failed: %s' % video['id'],
                                         'reddit')
                        time.sleep(5)
                        continue

            # Dump state for crash safety
            state['channels'] = channel_states
            if not state == old_state:
                try:
                    logger.log_debug('Writing state file.', 'io')
                    utils.write_json(state, 'state.json')
                    old_state = copy.deepcopy(state)
                except:
                    logger.log_error('Unable to write state file.', 'io')
                    time.sleep(15)
                    continue

            # Wait before cycling again to be nice to the APIs
            time.sleep(5)

        except KeyboardInterrupt:
            logger.log_debug('Interrupt recieved.', 'status')
            break