def handle_same_pop_link(link):
  """ Handle links which should belong to the same PoP

  a. Neither has a PoP assigned
    - Assign both the same pop and set links:intra
  b. One side has a PoP assigned
    - Assign the other one the existing PoP and add links:intra
  c. Both sides have a PoP assigned
    - add to delayed_job:popjoins
  """
  r = connection.Redis()

  ip1, ip2 = link.split(":")[2:]

  with r.pipeline() as pipe:
    try:
      pipe.watch(dbkeys.ip_key(ip1))
      pipe.watch(dbkeys.ip_key(ip2))
      pop1 = dbkeys.get_pop(ip1, pipe=pipe)
      pop2 = dbkeys.get_pop(ip2, pipe=pipe)
      pipe.multi()

      if pop1 is None and pop2 is None:
        pop1 = dbkeys.setpopnumber(dbkeys.mutex_popnum(), ip1, pipe=pipe)
        pipe.hset(dbkeys.ip_key(ip2), 'pop', pop1)
        pipe.sadd(dbkeys.POP.members(pop1), ip2)

        store_link(r, (ip1, ip2), pop1, pipe=pipe)
      elif pop1 is not None and pop2 is not None:
        if not r.sismember('delayed_job:popjoins:known', (pop1, pop2)):
          pipe.lpush("delayed_job:popjoins", (pop1, pop2))
          pipe.sadd('delayed_job:popjoins:known', (pop1, pop2))
      else:
        if pop1 is None:
          knownpop = pop2
          pipe.hset(dbkeys.ip_key(ip1), 'pop', knownpop)
          pipe.sadd(dbkeys.POP.members(knownpop), ip1)
        else:
          knownpop = pop1
          pipe.hset(dbkeys.ip_key(ip2), 'pop', knownpop)
          pipe.sadd(dbkeys.POP.members(knownpop), ip2)
        store_link(r, (ip1, ip2), knownpop, pipe=pipe)

      pipe.execute()
      return True
    except redis.WatchError:
      return False
    finally:
      pipe.reset()
def handle_cross_pop_link(link):
  """ Handle a situation where the two IPs on either end of a
  link should be in different PoPs.

  - Neither has a PoP assigned
    - Assign two new PoPs, and create links:inter
  - One side has a PoP assigned
    - Assign 1 new PoP and create links:inter
  -  Both sides have a PoP assigned
    - Add it to the links:inter
  """
  r = connection.Redis()

  ip1, ip2 = link.split(":")[2:]

  with r.pipeline() as pipe:
    try:
      pipe.watch(dbkeys.ip_key(ip1))
      pipe.watch(dbkeys.ip_key(ip2))
      pop1 = dbkeys.get_pop(ip1, pipe=pipe)
      pop2 = dbkeys.get_pop(ip2, pipe=pipe)
      pipe.multi()

      if pop1 is None and pop2 is None:
        pop1 = dbkeys.setpopnumber(dbkeys.mutex_popnum(), ip1, pipe=pipe)
        pop2 = dbkeys.setpopnumber(dbkeys.mutex_popnum(), ip2, pipe=pipe)

      elif pop1 is not None and pop2 is not None:
        pass
      else:
        if pop1 is None:
          pop1 = dbkeys.setpopnumber(dbkeys.mutex_popnum(), ip1, pipe=pipe)
        else:
          pop2 = dbkeys.setpopnumber(dbkeys.mutex_popnum(), ip2, pipe=pipe)
      store_link(r, (ip1, ip2), pop1, pop2, pipe=pipe)

      pipe.execute()
      return True
    except redis.WatchError:
      return False
    finally:
      pipe.reset()
def add_alexa_destinations(vertex_list, linklist, count):
    """
    Add potential destination endpoints based on the top 10000 destinations
    """
    r = connection.Redis()
    aslookup = preprocess.MaxMindGeoIPReader.Instance()
    attached = 0
    failed = 0
    pops = set()
    with pkg_resources.resource_stream(
            'inettopology_popmap.resources',
            'alexa_top_dests.txt') as destlist:

      for line in destlist:
        if line[0] == '#':
          continue
        ip, url, matched_ip, matched_bits = line.split()

        db_ip_pop = dbkeys.get_pop(matched_ip)

        if db_ip_pop is None:
          log.debug("Couldn't attach {0} with ip {1}. No matching IP found"
                    .format(url, matched_ip))
          failed += 1
          continue

        nodeid = "dest_{0}".format(ip.replace('.', '_'))
        if nodeid in vertex_list:
          continue  # Don't add the same url twice

        countries = r.smembers(dbkeys.POP.countries(db_ip_pop))
        if len(countries) == 1:
          country = countries.pop()
        else:
          country = aslookup.lookup_country_codes(matched_ip)[0]

        pops.add(db_ip_pop)
        vertex_list.add_vertex(nodeid,
                               nodeid=nodeid,
                               nodetype="dest",
                               url=url,
                               ip=ip,
                               asn=r.get(dbkeys.POP.asn(db_ip_pop)),
                               country=country)

        linkkey = dbkeys.Link.intralink(db_ip_pop)

        linkdelays = [
            delay
            for edge in r.smembers(linkkey)
            for delay in r.smembers(dbkeys.delay_key(*eval(edge)))]

        try:
          latency = util.decile_transform(linkdelays)
        except util.EmptyListError:
          latency = [5 for x in xrange(10)]

        linklist.append(
            EdgeLink(nodeid,
                     db_ip_pop,
                     {'latency': latency,
                      'med_latency': latency[len(latency)/2]}))

        attached += 1

        if attached % 10 == 0:
          log.info("Attached {0} destinations. Couldn't attach {1}"
                   .format(attached, failed))

        if attached >= count:
          break
    return (attached, len(pops))
def assert_pops_ok(r, *ips):
  for ip in ips:
    pop = dbkeys.get_pop(ip)
    if pop is not None and not r.sismember('poplist', pop):
      raise NoPopExistsError(
          "ip %s has pop of %s, which doesn't exist" % (ip, pop))