op_type_span.append(ele_id)
    opinion_type.append(op_type_span)
    opinion.append(opinion_type)
    return opinion
    
    
if __name__ == '__main__':
    
    my_time_stamp = True
    try:
      opts, args = getopt.getopt(sys.argv[1:],"",["no-time"])
      for opt, arg in opts:
        if opt == "--no-time":
          my_time_stamp = False
    except getopt.GetoptError:
      pass
    
    kafObj = KafParser(sys.stdin)
    data_str = kafObj.getSentimentTriples()       
    data= convertPolarityToFloats(data_str)
    my_data_neg = applyNegators(data)   
    my_data_neg_int = applyIntensifiers(my_data_neg)
    total = computeTotal(my_data_neg_int)
    
    opinionObj = createOpinion(kafObj,total)
    kafObj.addLayer('opinions',opinionObj)
    kafObj.addLinguisticProcessor('RuleBasedSentimentAnalyzer','1.0','opinion',time_stamp=my_time_stamp)
    kafObj.saveToFile(sys.stdout)
    
    
    
    print>>sys.stderr,'Example usage: cat myUTF8file.kaf.xml |',sys.argv[0]
    sys.exit(-1)

my_time_stamp = True
try:
  opts, args = getopt.getopt(sys.argv[1:],"",["no-time"])
  for opt, arg in opts:
    if opt == "--no-time":
      my_time_stamp = False
except getopt.GetoptError:
  pass


logging.debug('Starting stanford parser for German text')
logging.debug('Loading and parsing KAF file ...')
my_kaf = KafParser(sys.stdin)

lang = my_kaf.getLanguage()
if lang != 'de':
  print>>sys.stdout,'ERROR! Language is',lang,'and must be "de" (German)'
  sys.exit(-1)

logging.debug('Extracting sentences from the KAF')

termid_for_token = {}
lemma_for_termid = {}
for term in my_kaf.getTerms():
    lemma_for_termid[term.getId()] = term.getLemma()
    tokens_id = term.get_list_span()
    for token_id in tokens_id:
        termid_for_token[token_id] = term.getId()
    print >> sys.stderr, 'Input stream required in KAF format at least with the text layer.'
    print >> sys.stderr, 'The language encoded in the KAF has to be Dutch, otherwise it will raise an error.'
    print >> sys.stderr, 'Example usage: cat myUTF8file.kaf.xml |', sys.argv[0]
    sys.exit(-1)

my_time_stamp = True
try:
    opts, args = getopt.getopt(sys.argv[1:], "", ["no-time"])
    for opt, arg in opts:
        if opt == "--no-time":
            my_time_stamp = False
except getopt.GetoptError:
    pass

logging.debug('Loading and parsing KAF file ...')
my_kaf = KafParser(sys.stdin)

lang = my_kaf.getLanguage()
if lang != 'nl':
    print >> sys.stdout, 'ERROR! Language is ', lang, ' and must be nl (Dutch)'
    sys.exit(-1)

logging.debug('Extracting sentences from the KAF')
sentences = []
current_sent = []
term_ids = []
current_sent_tid = []

lemma_for_termid = {}
termid_for_token = {}
        logging.getLogger().setLevel(logging.ERROR)

    numNegators = 0
    ## READ the data and create structure for terms
    if not sys.stdin.isatty():
        ## READING FROM A PIPE
        logging.debug('Reading from standard input')
        fic = sys.stdin
    else:
        print >> sys.stderr, 'Input stream required.'
        print >> sys.stderr, 'Example usage: cat myUTF8file.kaf.xml |', sys.argv[
            0]
        print >> sys.stderr, sys.argv[0] + ' -h  for help'
        sys.exit(-1)

    kafParserObj = KafParser(fic)

    for term in kafParserObj.getTerms():
        terms.append(term)

    logging.debug('Number of terms loaded ' + str(len(terms)))

    ## Load lexicons

    lang = kafParserObj.getLanguage()
    ##lexSent = LexiconSent(lang,'general')
    lexSent = LexiconSent(lang, arguments.lexicon,
                          arguments.lexicon_path)  ##Default lexicons
    ################

    ## For each term, establish its sentiment polarity
    opinion_type.append(op_type_span)
    opinion.append(opinion_type)
    return opinion


if __name__ == '__main__':

    my_time_stamp = True
    try:
        opts, args = getopt.getopt(sys.argv[1:], "", ["no-time"])
        for opt, arg in opts:
            if opt == "--no-time":
                my_time_stamp = False
    except getopt.GetoptError:
        pass

    kafObj = KafParser(sys.stdin)
    data_str = kafObj.getSentimentTriples()
    data = convertPolarityToFloats(data_str)
    my_data_neg = applyNegators(data)
    my_data_neg_int = applyIntensifiers(my_data_neg)
    total = computeTotal(my_data_neg_int)

    opinionObj = createOpinion(kafObj, total)
    kafObj.addLayer('opinions', opinionObj)
    kafObj.addLinguisticProcessor('RuleBasedSentimentAnalyzer',
                                  '1.0',
                                  'opinion',
                                  time_stamp=my_time_stamp)
    kafObj.saveToFile(sys.stdout)
    numNegators = 0
    ## READ the data and create structure for terms
    if not sys.stdin.isatty():
        ## READING FROM A PIPE
        logging.debug('Reading from standard input')
        fic = sys.stdin
    else:
        print>>sys.stderr,'Input stream required.'
        print>>sys.stderr,'Example usage: cat myUTF8file.kaf.xml |',sys.argv[0]
        print>>sys.stderr,sys.argv[0]+' -h  for help'
        sys.exit(-1)




    kafParserObj = KafParser(fic)

    for term in kafParserObj.getTerms():
      terms.append(term)



    logging.debug('Number of terms loaded '+str(len(terms)))


    ## Load lexicons

    lang = kafParserObj.getLanguage()
    ##lexSent = LexiconSent(lang,'general')
    lexSent = LexiconSent(lang,arguments.lexicon,arguments.lexicon_path)  ##Default lexicons
    ################