예제 #1
0
파일: news.py 프로젝트: sanjayss34/Newstock
def get_google_news_articles(query, dates):
    gnews = gnp.get_google_news(gnp.EDITION_ENGLISH_US)
    results = []
    for d in dates:
        response = gnp.get_google_news_query(query + ' ' + d)
        for r in range(0, min(10, len(response['stories']))):
            results.append({'date': d, 'url': response['stories'][r]['link']})
    return results
예제 #2
0
 def post(self):
     query = self.get_query_argument("query")
     # print(query)
     c = gnp.get_google_news_query(query)
     # data = json.loads(c)
     # x = u''.join(data).encode('utf-8').strip()
     # j = json.dumps(x)
     self.content_type = 'application/json'
     self.write(json_encode(c))
예제 #3
0
def urlsandtitles(trend):
	jsonobject = gnp.get_google_news_query(trend)
	stories=jsonobject['stories']
	urls=[]
	titles=[]
	for each in stories:
		url=each['link']
		title=each['title']
		if check_lang(url):
			urls.append(url)
			titles.append(title)
	return urls,titles
예제 #4
0
def get_news(query):
    content =gnp.get_google_news_query(query)
    return content['stories'][:10]
예제 #5
0
def save_file(list, filename):
    file_stuff = open(filename, "w")
    for item in list:
        file_stuff.write(item)
        file_stuff.write("\n")

if __name__ == '__main__':
    
    # print excerpt_extractor.get_summary("http://www.worldpoliticsreview.com/articles/17167/growing-pains-urbanization-and-governance-in-peru")[1] + '\n'
    
    list_content = []
    list_queries = ["\"Latin American Public Opinion Project\"", "\"Latin America Public Opinion Project\"", "\"LAPOP\"", "\"Americas Barometer\"", "\"AmericasBarometer\"",
                    "\"El Proyecto de Opinion Publica de America Latina\"", "\"Barometro de las Americas\"", "Elizabeth Zechmeister", "Elizabeth J Zechmeister", "\"Mitchell Seligson\""]
    for query in list_queries:
        a = gnp.get_google_news_query(query)
        list_content.append(query)
        list_content.append("\n")
        for key, value in a.iteritems():
            for item in value:
                try:
                    snip = item.get('content_snippet')
                    title = item.get('title')
                    link = item.get('link')
                    list_content.append(snip)
                    list_content.append(title)
                    list_content.append(link)
                    list_content.append("\n")
                    # try:
                        # scrape_link(link)
                    # except:
예제 #6
0
from __future__ import print_function

import gnp

# b = gnp.get_google_news(gnp.EDITION_ENGLISH_US, geo='London,UK')
c = gnp.get_google_news_query("Honda+Factory")
import json

print(json.dumps(c, indent=4, sort_keys=True))
예제 #7
0
#!/bin/bash
import gnp
import codecs
import json
import MySQLdb
import sys

if len(sys.argv) <> 2:
    print "Usage: " + sys.argv[0] + " CODE"
    sys.exit(-1)

stockcode = sys.argv[1]
print stockcode
result=gnp.get_google_news_query(stockcode)
j_output = json.dumps(result, indent=4, ensure_ascii=False )
data = json.loads(j_output)

try:
        conn=MySQLdb.connect(host='127.0.0.1',user='******',passwd='admin',db='db_stock',port=3307,charset='utf8')
        cur=conn.cursor()


	for element in data['stories']:
# 		print element['category']
#		print element['source']
#		print element['content_snippet']
#		print element['link']
#		print element['title']
		cur.execute("insert into tb_news_search(stat_date,search_key,category,source,content_snippet,link,title) values(current_date(),%s,%s,%s,%s,%s,%s)",(stockcode,element['category'],element['source'],element['content_snippet'],element['link'],element['title']))	
	cur.close()
        conn.commit()
예제 #8
0
twitter_stream = twitter.TwitterStream(auth = twitter_api.auth)
print "got stream api"

stream = twitter_stream.statuses.filter(track = q)
print "got stream"


result_data = {"0": 0}
result_data.clear()

i = 0;
for tweet in stream:
	key = 'tweet' + str(i)
	result_data[key] = tweet['text']
	# print result_data
	i = i + 1
	if i % 10 == 0:
		time = datetime.datetime.now()
		with open('./AS2data/captured_tweets_stream_' + time.strftime("%Y%m%d_%H%M%S") + '.json', 'w') as outfile:
			json.dump(result_data, outfile, indent = 4)
		break

# get google data
# gnews = gnp.get_google_news(gnp.EDITION_ENGLISH_US)
gnews = gnp.get_google_news_query(q)

time = datetime.datetime.now()
with open('./AS2data/google_news_' + time.strftime("%Y%m%d_%H%M%S") + '.json', 'w') as outfile:
	json.dump(gnews, outfile, indent = 4)