elif "stream"  == args.mode.lower():
  print "Activating stream mode"
  if args.location is not None and args.lang is not None : 
    grap.stream(keywords,writeTweet,args.location,args.lang)
  else : 
    grap.stream(keywords,writeTweet)


#STREAMLocation mode
#------------------
elif "streamlocation"  == args.mode.lower():
  print "Activating stream mode by location "  

  if args.location is not None and args.lang is not None : 
    grap.streamlocation(writeTweet,args.location,args.lang)
  else : 
    grap.streamlocation(writeTweet)

  


#Streamloop Mode
#------------------
elif "streamloop" == args.mode.lower():
  print "Activating streamloop mode"
  if args.location is not None and args.lang is not None : 
    grap.streamloop(keywords,writeTweet,args.location,args.lang)
  else : 
    grap.streamloop(keywords,writeTweet)

    else:
        grap.search(keywords, writeTweet)

#STREAM Mode
#------------------
elif "stream" == args.mode.lower():
    print "Activating stream mode"
    if args.location is not None and args.lang is not None:
        grap.stream(keywords, writeTweet, args.location, args.lang)
    else:
        grap.stream(keywords, writeTweet)

#STREAMLocation mode
#------------------
elif "streamlocation" == args.mode.lower():
    print "Activating stream mode by location "

    if args.location is not None and args.lang is not None:
        grap.streamlocation(writeTweet, args.location, args.lang)
    else:
        grap.streamlocation(writeTweet)

#Streamloop Mode
#------------------
elif "streamloop" == args.mode.lower():
    print "Activating streamloop mode"
    if args.location is not None and args.lang is not None:
        grap.streamloop(keywords, writeTweet, args.location, args.lang)
    else:
        grap.streamloop(keywords, writeTweet)
Exemple #3
0
# parser = argparse.ArgumentParser(description='tool to extract set of Subjecitve Words and idioms depending on set of Patterns written in Config File')
# parser.add_argument('-c','--config', help='Input Config file name',required=True)
# parser.add_argument('-i','--input', help='Input Tweets files to Extract subjective words from',required=True)
# parser.add_argument('-o','--output',help='Output file name - print in console if not specified', required= True)
# parser.add_argument('-uf','--uniqandfilter',help='filter extracted lexicon words and save them to clean_uniq_output file with counts', required= False , action="store_true")
# parser.add_argument('-sl','--seedlexicon', help='Input classified lexicon file name',required=False)
# args = parser.parse_args()
	


grap = TweetGrapper()
def do(tweet):
	print str(tweet.id ) +"\t" + tweet.simpleText()
#grap.streamloop(["السيسي","مصر"],do)
# grap.streamloop(["مصر","مرسي","السيسي","مبارك","الأخوان","\"30 يونيو\"","\"25 يناي\"","#انتخبوا_العرص","عسكر"],do)
grap.streamloop(["فودافون","موبينيل"],do)





# config = Config(args.config)
# matcher = PatternMatcher(args.input,config)

# print config.Patterns

# candidateWords = matcher.applyPatterns("patterns_closed")

# for i in candidateWords:
# 	print i.text + "\t" + i.pattern