Ejemplo n.º 1
0
 def run(self, line):
   
   # HTTP request
   try:
     auth = base64.encodestring(unicode(line, errors='ignore')).strip()
   except UnicodeEncodeError:
     logger.debug('error while encode character in the actual line')
     pass
       
   r = urllib2.Request(url='http://api.twitter.com/1/friendships/create/%s.xml' % self.user, 
   data='&follow=true', headers={'Authorization': 'Basic %s' % auth})
   
   try:
     h = urllib2.urlopen(r).read()
     logger.debug(h)
   except urllib2.HTTPError as e:
     if e.code == 401:
       #logger.warning('Code 401: user %s cannot be logged' % line.split(':')[0])
       pass
     elif e.code == 104:
       #logger.warning('Code 104: connection reset by peer')
       raise
   except urllib2.URLError as e:
     #logger.warning('URLError: connection timed out')
     pass
   except BadStatusLine:
     #logger.warning('Server cannot understand one request')
     pass
   else:
     twitterinfo.follow += 1
     return True
   finally:
     twitterinfo.count += 1
Ejemplo n.º 2
0
 def specialize_optimal_subclass(rule):
     rules = []
     eligible_preds = rule.shared_var[rule.latest_var]
     for pred in filter(is_unary, eligible_preds):
         for sub_class in self.get_subclasses(pred):
             logger.debug('Swapping with %s' % sub_class)
             new_rule = rule.clone_swap_with_subclass(pred, sub_class)
             if self.can_specialize(new_rule):
                 rules.append(new_rule)
                 rules.extend(specialize_optimal_subclass(new_rule))
     return rules
Ejemplo n.º 3
0
 def run(self):
   global pwn
   
   # Function to get actual line to usage
   getline = lambda: linecache.getline(pwn.file, self.active).strip()
   
   while self.active <= self.count:
     line = getline()
     if not line and self.active >= self.count:
       logger.warning('line %s is empty or something occured so wrong', self.active)
       logger.debug('line = %s' % line)
       close()
     if self.plugin.run(line):
       pwn.follow += 1
       
       if pwn.save:
         s = open(pwn.save, 'a')
         s.write('%s\n' % line)
         s.close()
         
     self.active += pwn.threads
     pwn.current += 1
     
     # Display current statistics
     active_threads = threading.active_count()-1
     dataTostdout('\r[%s] [INFO] %s/%s lines completed. %s thread%s running.' % (time.strftime('%X'), pwn.current, self.count, active_threads, 
     's' if active_threads > 1 else ''), forceOutput=True)
     
   # Show statistics only one time
   if pwn.info:
     if int(self.name[-1]):
       self.plugin.statistics()
   
   # Display all information from completed usage
   if pwn.purge:
     logger.info('Purging file...')
     os.remove(pwn.file)
     logger.info('Purge file completed.')
   close()
Ejemplo n.º 4
0
    def __induce_level(self, rules):
        '''
        Specializes the rules for the last level with unary predicates.
        '''
        while True:
            old_score = self.group_score(rules)
            new_rules = rules[:]
            for i, rule in enumerate(rules):
                specializations = self.specialize(rule)
                self.extend(new_rules, specializations)

            # Take the first N rules
            rules = sorted(new_rules,
                           key=lambda rule: rule.score,
                           reverse=True)[:self.n]

            new_score = self.group_score(rules)

            logger.debug("Old score: %.3f, New score: %.3f" % (old_score, new_score))

            if 1 - abs(old_score/(new_score+0.0001)) < 0.01:
                break

        return rules
Ejemplo n.º 5
0
 def post(self, postdata):
   logger.debug('user = %s' % self.user)
   auth = base64.encodestring(self.user.decode('ascii', 'replace')).strip()
   logger.debug('auth = %s' % auth)
   r = urllib2.Request(url='http://twitter.com/statuses/update.xml', data='status=%s' % postdata, headers={'Authorization': 'Basic %s' % auth})
   try:
     h = urllib2.urlopen(r).read()
   except urllib2.HTTPError as e:
     if e.code == 401:
       logger.err('user %s cannot be logged' % self.user.split(':')[0])
       logger.debug(e.info)
   else:
     logger.info('tweet is posted')
Ejemplo n.º 6
0
    def specialize(self, rule):
        '''
        Returns a list of all specializations of 'rule'.
        '''
        is_unary = lambda p: isinstance(p, UnaryPredicate)

        def specialize_optimal_subclass(rule):
            rules = []
            eligible_preds = rule.shared_var[rule.latest_var]
            for pred in filter(is_unary, eligible_preds):
                for sub_class in self.get_subclasses(pred):
                    logger.debug('Swapping with %s' % sub_class)
                    new_rule = rule.clone_swap_with_subclass(pred, sub_class)
                    if self.can_specialize(new_rule):
                        rules.append(new_rule)
                        rules.extend(specialize_optimal_subclass(new_rule))
            return rules

        logger.debug('Specializing rule: %s' % rule)
        specializations = []
        eligible_preds = rule.shared_var[rule.latest_var]

        # Swapping unary predicates with subclasses, swap only
        # the predicates with the latest variable
        if not self.optimal_subclass:
            for pred in filter(is_unary, eligible_preds):
                logger.debug('Predicate to swap: %s' % pred.label)
                for sub_class in self.get_subclasses(pred):
                    logger.debug('Swapping with %s' % sub_class)
                    new_rule = rule.clone_swap_with_subclass(pred, sub_class)
                    if self.can_specialize(new_rule):
                        specializations.append(new_rule)
        else:
            specializations.extend(specialize_optimal_subclass(rule))

        if self.use_negations:
            # Negate the last predicate
            for pred in filter(is_unary, eligible_preds):
                logger.debug('Predicate to negate: %s' % pred.label)
                new_rule = rule.clone_negate(pred)
                if self.can_specialize(new_rule):
                    specializations.append(new_rule)

        # This makes sure we are not specializing a default rule by appending,
        # this rule should instead be reached by the specialization step above.
        if not (len(eligible_preds) == 1 and
           (eligible_preds[0].label == self.kb.get_root().label or
           self.is_implicit_root(eligible_preds[0].label))):

            # Calculate the union of superclasses of each predicate
            supers = set()
            for pred in eligible_preds:
                supers.update(self.get_superclasses(pred.label))
                supers.add(pred)

            # Calculate the top-most left-most non-ancestor
            for lvl in sorted(self.kb.levels.keys()):

                level = self.kb.levels[lvl]
                diff = level.difference(supers)
                if diff:

                    # The next predicate to specialize with is the left-most
                    for pred in sorted(list(diff)):

                        # Appending a new predicate, the last predicate
                        # is always the producer
                        last_pred = rule.predicates[-1]
                        new_rule = rule.clone_append(pred,
                                                     producer_pred=last_pred)
                        if self.can_specialize(new_rule) and \
                           self.non_redundant(rule, new_rule):
                            specializations.append(new_rule)
                            break

        # Introduce new binary relation
        if isinstance(rule.predicates[-1], UnaryPredicate):
            specializations.extend(self.specialize_add_relation(rule))

        logger.debug('All specializations %s'
                     % [str(rule) for rule in specializations])

        return specializations