Beispiel #1
0
		#print("hi")
		
		if date in item[tag[1]].encode('ascii','ignore'):
			s=item[tag[1]].encode('ascii','ignore')
			print(s)
			out.write(s+'\n')
			#write title to the file
			print item[tag[2]]
			out.write(item[tag[2]].encode('ascii','ignore')+'\n')
			#get actual url link from feed and scarpe it
			print(item[tag[3]])
			htmlfile=urllib.urlopen(item[tag[3]])
			htmltext=htmlfile.read()
			
			soup=BeautifulSoup(htmltext)
			word=getatt.find(url[i])
			if word!=None and len(word)==2:
				g_data=soup.find_all(word[1])
			else:
				g_data=soup.find_all(word[1],{word[2]:word[3]})
		
			for item in g_data:
				out.write(item.text.encode('ascii','ignore')+"\n");
			
			#write the matter from url link to file
			
			#add delimiter at end of record
			out.write("\n-|\n")
			print
			print
	
Beispiel #2
0
		#print("hi")
		
		if date in item[tag[1]].encode('ascii','ignore'):
			s=item[tag[1]].encode('ascii','ignore')
			print(s)
			out.write(s+'\n')
			#write title to the file
			print item[tag[2]]
			out.write(item[tag[2]].encode('ascii','ignore')+'\n')
			#get actual url link from feed and scarpe it
			print(item[tag[3]])
			htmlfile=urllib.urlopen(item[tag[3]])
			htmltext=htmlfile.read()
			
			soup=BeautifulSoup(htmltext)
			word=getatt.find(url[i])
			if word!=None and len(word)==2:
				g_data=soup.find_all(word[1])
			else:
				g_data=soup.find_all(word[1],{word[2]:word[3]})
		
			for item in g_data:
				out.write(item.text.encode('ascii','ignore')+"\n");
			
			#write the matter from url link to file
			
			#add delimiter at end of record
			out.write("\n-|\n")
			print
			print
	
Beispiel #3
0
from __future__ import print_function
from alchemyapi import AlchemyAPI
import json
import requests
from bs4 import BeautifulSoup
import getatt

alchemyapi = AlchemyAPI()
keys = ["elon musk", "tesla"]
url = "http://techcrunch.com/2016/01/29/tesla-says-ceo-elon-musk-just-bought-more-of-the-company/"
r = requests.get(url)
soup = BeautifulSoup(r.content)
word = getatt.find(url)
g_data = soup.find_all(word[1], {word[2]: word[3]})
s = ""
for item in g_data:
    s += item.text

print(s)
"""
flag=0;
for i in range(0,len(keys)):
	if( keys[i] in s.lower()):
		flag=1;


if flag==1:

	response = alchemyapi.keywords('text',s, {'sentiment':1})

	if response['status'] == 'OK':
Beispiel #4
0
from __future__ import print_function
from alchemyapi import AlchemyAPI
import json
import requests
from bs4 import BeautifulSoup
import getatt


alchemyapi = AlchemyAPI()
keys=["elon musk","tesla"]
url="http://techcrunch.com/2016/01/29/tesla-says-ceo-elon-musk-just-bought-more-of-the-company/"
r=requests.get(url);
soup=BeautifulSoup(r.content)
word=getatt.find(url)
g_data=soup.find_all(word[1],{word[2]:word[3]})
s="";
for item in g_data:
	s+=item.text

print(s);

"""
flag=0;
for i in range(0,len(keys)):
	if( keys[i] in s.lower()):
		flag=1;


if flag==1:

	response = alchemyapi.keywords('text',s, {'sentiment':1})