def assign_pops(args):
  r = connection.Redis()
  if args.reset:
    log.info("Resetting processed_links")
    if r.llen("delayed_job:unassigned_links") == 0:
      r.rename("delayed_job:processed_links", "delayed_job:unassigned_links")
    else:
      while r.rpoplpush("delayed_job:processed_links",
                        "delayed_job:unassigned_links"):
        pass
    r.delete("delayed_job:unassigned_link_fails")
    return

  if args.process_failed:
    log.info("Processing failed links")
    dbkeys.mutex_popjoin().acquire()
    _assign_pops("delayed_job:unassigned_link_fails",
                 "delayed_job:unassigned_link_fails2",
                 no_add_processed=True)

    if r.exists("delayed_job:unassigned_link_fails2"):
      r.rename("delayed_job:unassigned_link_fails2",
               "delayed_job:unassigned_link_fails")

    dbkeys.mutex_popjoin().release()
    log.info("Complete")
    return

  _assign_pops("delayed_job:unassigned_links",
               "delayed_job:unassigned_link_fails")
def parse(args):

  # We don't use this, but it configures the singleton
  connection.Redis(structures.ConnectionInfo(**args.redis))

  try:
    aslookup = preprocess.MaxMindGeoIPReader.Instance()

    with open(args.trace) as trace_in:
      tracehops = []
      seenset = set()
      numtraces = 0
      log.info("Processed trace: %s" % str(numtraces))
      for line in trace_in:
        if line.split()[0] == "traceroute":
          numtraces += 1
          if numtraces % 1000 == 0:
            log.info("\r\x1b[K" + "Processed trace: %s" % str(numtraces))
          try:
            newpairs, removed = TraceParser.parse(tracehops)
            if removed is not None:
              log.debug("Removed %s" % removed)
          except EmptyTraceError:
            tracehops = [line]
            continue
          if not args.dump and dbkeys.mutex_popjoin().is_locked():
            log.debug("Waiting for popjoin lock")
            dbkeys.mutex_popjoin().wait()

          if args.dump:
            for pair in newpairs:
              print_unless_seen(pair[0], seenset)
              print_unless_seen(pair[1], seenset)
          else:
            load_link_pairs(newpairs, geoipdb=aslookup)

          tracehops = [line]
        else:
          tracehops.append(line)

  except IOError as e:
    log.error("Error: {0}".format(e))
    raise SilentExit()
  except DataError as e:
    log.error("Error: {0}".format(e))
    raise SilentExit()
def process_delayed_joins(args):
  log.info("Processing delayed joins")
  r = connection.Redis()
  if r.llen("delayed_job:unassigned_link_fails") > 0:
    sys.stderr.write("Have unassigned links. "
                     "Run assign_pops --process_failed\n")
    raise SilentExit()
  # Now we process any joins that need to happen. First we lock.
  error_ctr = 0
  dbkeys.mutex_popjoin().acquire()
  try:
    joinlist = preprocess_joins()
    inprocess = list()
    x = len(joinlist)
    log.info("Joining pop pairs: %d".format(x))

    fh = None
    if args.log_joins:
      fh = logging.FileHandler(args.log_joins, mode='w')
      fh.setLevel(logging.DEBUG)
      fh.setFormatter(logging.Formatter('%(message)s'))
      log.addHandler(fh)

    timer = ProgressTimer(x)
    for i, to_join in enumerate(joinlist):
      inprocess.append(to_join)
      log.info("Joining %s to %s\n" % (to_join[1], to_join[0]))

      try:
        joined = join_pops(r, to_join[0], to_join[1])
      except redis_errors as e:
        log.error("Encountered error while processing: {0}. [{1}]\n"
                  .format(to_join, e))
        joinlist.insert(0, inprocess.pop())
        error_ctr += 1
        continue

      else:
        if joined is not None:
          log.info("Joined %s to %s\n" % (joined[1], joined[0]))

        if (r.sismember(dbkeys.POP.list(), to_join[1])
           or r.exists(dbkeys.POP.members(to_join[1]))):

          if descend_target_chain(r, to_join[0]) != to_join[1]:
            raise Exception("Join Failed in ways it should not have...")
          else:
            log.info("Did not join {0} to {1} because {2} had "
                     "previously been joined to {3}\n"
                     .format(to_join[1], to_join[0], to_join[0], to_join[1]))
        timer.tick(1)

      x = len(joinlist) - i

      sys.stderr.write("{newl} {0} joins left {1}\n".format(
                       x,
                       Color.wrapformat("[{0} seconds to finish]",
                                        Color.OKBLUE, timer.eta()),
                       newl=Color.NEWL))

    r.delete('delayed_job:popjoins')
    r.delete('delayed_job:popjoins:inprocess')
    r.delete(r.keys(dbkeys.POP.joined("*")))
    log.info("Joined pops with %d errors while processing" % error_ctr)

    if fh is not None:
      log.removeHandler(fh)

  except KeyboardInterrupt:
    pass
  finally:
    dbkeys.mutex_popjoin().release()