def actual_process(cls,mygroup):
      tagsOfForms=[ u"inf" , u"ger" , u"pp.m.sg" , u"pri.p1.sg" , u"pri.p2.sg", u"pri.p3.sg" , u"pri.p1.pl" , u"pri.p2.pl", u"pri.p3.pl", u"prs.p1.sg" , u"prs.p2.sg", u"prs.p3.sg" , u"prs.p1.pl" , u"prs.p2.pl", u"prs.p3.pl", u"pii.p1.sg" , u"pii.p2.sg", u"pii.p3.sg" , u"pii.p1.pl" , u"pii.p2.pl", u"pii.p3.pl", u"pis.p1.sg" , u"pis.p2.sg", u"pis.p3.sg" , u"pis.p1.pl" , u"pis.p2.pl", u"pis.p3.pl", u"pis.p1.sg" , u"pis.p2.sg", u"pis.p3.sg" , u"pis.p1.pl" , u"pis.p2.pl", u"pis.p3.pl", u"ifi.p1.sg" , u"ifi.p2.sg", u"ifi.p3.sg" , u"ifi.p1.pl" , u"ifi.p2.pl", u"ifi.p3.pl", u"fti.p1.sg" , u"fti.p2.sg", u"fti.p3.sg" , u"fti.p1.pl" , u"fti.p2.pl", u"fti.p3.pl", u"pis.p1.sg" , u"pis.p2.sg", u"pis.p3.sg" , u"pis.p1.pl" , u"pis.p2.pl", u"pis.p3.pl", u"cni.p1.sg" , u"cni.p2.sg", u"cni.p3.sg" , u"cni.p1.pl" , u"cni.p2.pl", u"cni.p3.pl", u"imp.p1.pl" , u"imp.p2.sg", u"imp.p3.sg" , u"imp.p1.pl" , u"imp.p2.pl", u"imp.p3.pl", u"pp.m.sg", u"pp.f.sg", u"pp.m.pl", u"pp.f.pl"]
      foundEntries=[ [] for mylist in tagsOfForms ]
      
      #search for tags in entries
      for i in range(len(tagsOfForms)):
          pattern=tagsOfForms[i]
          for entry in mygroup:
              tagsOfLexicalForm=u".".join(entry.lexicalForm.get_tags())
              if tagsOfLexicalForm.count(pattern) > 0:
                  foundEntries[i].append(entry)
 
      if all( len(pfoundEntries) > 0 for pfoundEntries in foundEntries ):
          if not mygroup[0].lexicalForm.get_lemma() in SpaVblexProcessor.engVblexDict:
              print >> sys.stderr, "Discarded group: not found in bilingual: "+str(mygroup)
          else:
              for token in SpaVblexProcessor.engVblexDict[mygroup[0].lexicalForm.get_lemma()]:
                  valencies=ValenciesProcessor.valencyDict.get(createGFTokenNoValency(token))
                  if not valencies or len(valencies) == 0:
                      SpaVblexProcessor.print_GF(token,[pfoundEntries[0].surfaceForm for pfoundEntries in foundEntries])
                  else:
                      for valency in valencies:
                          SpaVblexProcessor.print_GF(token,[pfoundEntries[0].surfaceForm for pfoundEntries in foundEntries],valency)
      else:
          print >> sys.stderr, "Discarded Group: "+str(mygroup)
 def actual_process(cls,mygroup):
     tagsOfForms=[[u"inf"] , [u"pri",u"p3",u"sg"] , [u"past"] , [u"pp"] , [u"ger"] ]
     foundEntries=[ [] for mylist in tagsOfForms ]
     groupNoSep=[ entry for entry in mygroup if len ( set([u"hasprpers",u"hasthat",u"hasthis"]) & set(entry.lexicalForm.get_tags()) ) == 0 ]
     
     #search for tags in entries
     for i in range(len(tagsOfForms)):
         pattern=u".".join(tagsOfForms[i])
         for entry in groupNoSep:
             tagsOfLexicalForm=u".".join(entry.lexicalForm.get_tags())
             if tagsOfLexicalForm.count(pattern) > 0:
                 foundEntries[i].append(entry)
                 
     if all( len(pfoundEntries) > 0 for pfoundEntries in foundEntries ):
         valencies=ValenciesProcessor.valencyDict.get(createGFTokenNoValency(groupNoSep[0].lexicalForm.get_lemma()))
         if not valencies or len(valencies) == 0:
             EngVblexProcessor.print_GF(groupNoSep[0].lexicalForm.get_lemma(),[pfoundEntries[0].surfaceForm for pfoundEntries in foundEntries])
         else:
             for valency in valencies:
                 EngVblexProcessor.print_GF(groupNoSep[0].lexicalForm.get_lemma(),[pfoundEntries[0].surfaceForm for pfoundEntries in foundEntries],valency)
             
     else:
         print >> sys.stderr, "Discarded Group: "+str(mygroup)