def main():
    app_settings = settings.get_app_settings()
    api = TwitterAPI.TwitterAPI(**app_settings)

    store_data = store.get_data()

    search_term = 'vnnlp'
    query = {'screen_name': search_term}

    filename = 'user_timeline/{}.yaml'.format(search_term)

    utils.ensure_dir(filename)

    if 'user_timeline' in store_data and 'max_id' in store_data[
            'user_timeline']:
        query['max_id'] = store_data['user_timeline']['max_id'] - 1

    max_id = None
    try:
        with open(filename, 'a') as output_file:
            r = TwitterAPI.TwitterPager(api, 'statuses/user_timeline', query)

            for tweet in r.get_iterator():
                yaml.dump([tweet], output_file, default_flow_style=False)
                if 'id' in tweet:
                    max_id = tweet['id']

    except KeyboardInterrupt:
        pass

    if not 'user_timeline' in store_data:
        store_data['user_timeline'] = {}

    store_data['user_timeline']['max_id'] = max_id
    store.store_data(store_data)
Пример #2
0
def get_results() -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]:
    """
    Get the results from the competition and print it out
    :return: Winner's name and their query
    """
    twitter_api = TwitterAPI.TwitterAPI(
        config.get('twitter', 'CONSUMER_KEY'),
        config.get('twitter', 'CONSUMER_SECRET'),
        config.get('twitter', 'ACCESS_TOKEN_KEY'),
        config.get('twitter', 'ACCESS_TOKEN_SECRET'),
    )

    valid_normal_entries: List[Dict[str, Any]] = []
    valid_regex_entries: List[Dict[str, Any]] = []

    logging.info('GET RESULTS')

    json_db: Dict[str, Any] = load_json_db(
        config.get('scryfallCardGolf', 'TWEET_DATABASE'))
    max_key: str = max(json_db.keys())

    r = TwitterAPI.TwitterPager(twitter_api, 'statuses/mentions_timeline', {
        'count': 200,
        'since_id': json_db[max_key]['tweet_id']
    })

    for item in r.get_iterator():
        if 'text' not in item:
            logging.warning('SUSPEND, RATE LIMIT EXCEEDED: ' + item['message'])
            break

        logging.info('[TWEET] ' + item['user']['screen_name'] + ': ' +
                     item['text'])
        for url in item['entities']['urls']:
            test_url = url['expanded_url']
            if 'scryfall.com' not in test_url:
                continue

            logging.info('{} submitted solution: {}'.format(
                item['user']['screen_name'], test_url))
            test_query_results = test_query(item['user']['screen_name'],
                                            test_url)
            if test_query_results:
                user_json_entry: Dict[str, Any] = {
                    'name': item['user']['screen_name'],
                    'length': len(test_query_results),
                    'query': test_query_results
                }

                if re.search(r'/.+/', test_query_results):
                    valid_regex_entries.append(user_json_entry)
                else:
                    valid_normal_entries.append(user_json_entry)

    return valid_normal_entries, valid_regex_entries
Пример #3
0
def fetch(api, resource, search_term, query):
    filename = '{0}/{1}.yaml'.format(resource.replace('/', '_'), search_term)

    utils.ensure_dir(filename)

    tweets = []
    try:
        r = TwitterAPI.TwitterPager(api, resource, query)
        for tweet in r.get_iterator():
            tweets.append(tweet)

    except KeyboardInterrupt:
        pass

    with open(filename, 'w') as output_file:
        yaml.dump(tweets, output_file, default_flow_style=False)
def get_user_tweet_replies(api, screen_name):
    params = {'q': screen_name}

    r = TwitterAPI.TwitterPager(api, 'search/tweets', params)

    items = []
    for item in r.get_iterator():
        if 'in_reply_to_screen_name' in item and item[
                'in_reply_to_screen_name'] == screen_name:
            items.append(item)

    filename = f'reply_tweets/{screen_name}.yaml'
    utils.ensure_dir(filename)

    with open(filename, 'w') as output_file:
        yaml.dump(items, output_file, default_flow_style=False)
Пример #5
0
def get_results() -> List[Dict[str, Any]]:
    """
    Get the results from the competition and print it out
    :return: Winner's name and their query
    """
    valid_entries: List[Dict[str, Any]] = []

    logging.info('GET RESULTS')

    json_db: Dict[str, Any] = load_json_db(TWEET_DATABASE)
    max_key: str = max(json_db.keys())

    r = TwitterAPI.TwitterPager(twitter_api, 'statuses/mentions_timeline', {
        'count': 200,
        'since_id': json_db[max_key]['tweet_id']
    })

    for item in r.get_iterator():
        if 'text' not in item:
            logging.warning('SUSPEND, RATE LIMIT EXCEEDED: %s\n' %
                            item['message'])
            break

        logging.info('[TWEET] ' + item['user']['screen_name'] + ': ' +
                     item['text'])
        for url in item['entities']['urls']:
            test_url = url['expanded_url']
            if 'scryfall.com' not in test_url:
                continue

            logging.info('{} submitted solution: {}'.format(
                item['user']['screen_name'], test_url))
            test_query_results = test_query(item['user']['screen_name'],
                                            test_url)
            if test_query_results:
                valid_entries.append({
                    'name': item['user']['screen_name'],
                    'length': len(test_query_results),
                    'query': test_query_results
                })

    return valid_entries
def fetch(api, resource, search_term, query):
    filename = '{0}/{1}.yaml'.format(resource.replace('/', '_'), search_term)

    utils.ensure_dir(filename)
    count = 0
    try:
        with open(filename, 'a') as output_file:
            r = TwitterAPI.TwitterPager(api, resource, query)

            for tweet in r.get_iterator():
                if not 'retweeted_status' in tweet and not (
                        "Tôi đã thích video"
                        or "Tôi đã thêm video") in tweet['text']:
                    yaml.dump([tweet['text']],
                              output_file,
                              default_flow_style=False)
                    count += 1
                    print(count, tweet['text'])
    except KeyboardInterrupt:
        pass
Пример #7
0
 def test_paging_iterator(self):
     pager = TwitterAPI.TwitterPager(self.api, 'search/tweets',
                                     {'q': 'pizza'})
     self.assertIsInstance(pager, TwitterAPI.TwitterPager)
     it = pager.get_iterator()
     self.use_iterator(it)