Esempio n. 1
0
    def get_sites_info(self, sliver_tools, address_family):
        """Returns info about the sites.

        This data is used to build the markers on the map. In particular, there
        is a marker for each city and an info window that pops up when clicking
        on a marker showing information about the sites.

        Args:
            sliver_tools: A list of sliver_tools.
            address_family: A string specifying the address family (ipv4,ipv6).

        Returns:
            A dict (key=city, value=[site_info, site_info, ...],
            containing for each city the list of the sites deployed in
            that particular city. Each 'site_info' element is a dict
            containing all relevant information about the site:
            (e.g., site_id, city, country, latitude, longitude,..) plus
            a list of sliver_tool_info elements with information and status
            of the slivers. Each sliver_tool _info contains: slice_id,
            tool_id, server_id, status (status_ipv4 or status_ipv6, depending
            on the 'address_family' argument) and timestamp of the last
            update.
        """
        sites = model.Site.gql('ORDER BY site_id DESC')
        site_dict = {}
        sites_per_city = {}
        for site in sites:
            site_info = {}
            site_info['site_id'] = site.site_id
            site_info['city'] = site.city
            site_info['country'] = site.country
            site_info['latitude'] = site.latitude
            site_info['longitude'] = site.longitude
            site_info['sliver_tools'] = []
            site_dict[site.site_id] = site_info
            sites_per_city[site.city] = []

        # Add sliver tools info to the sites.
        for sliver_tool in sliver_tools:
            if not production_check.is_production_slice(sliver_tool.fqdn):
                continue
            sliver_tool_info = {}
            sliver_tool_info['slice_id'] = sliver_tool.slice_id
            sliver_tool_info['tool_id'] = sliver_tool.tool_id
            sliver_tool_info['server_id'] = sliver_tool.server_id
            if address_family == 'ipv4':
                sliver_tool_info['status'] = sliver_tool.status_ipv4
            else:
                sliver_tool_info['status'] = sliver_tool.status_ipv6

            sliver_tool_info['timestamp'] = sliver_tool.when.strftime(
                '%Y-%m-%d %H:%M:%S')
            site_dict[sliver_tool.site_id]['sliver_tools'].append(
                sliver_tool_info)

        for item in site_dict:
            city = site_dict[item]['city']
            sites_per_city[city].append(site_dict[item])

        return sites_per_city
Esempio n. 2
0
    def get_sites_info(self, sliver_tools, address_family):
        """Returns info about the sites.

        This data is used to build the markers on the map. In particular, there
        is a marker for each city and an info window that pops up when clicking
        on a marker showing information about the sites.

        Args:
            sliver_tools: A list of sliver_tools.
            address_family: A string specifying the address family (ipv4,ipv6).

        Returns:
            A dict (key=city, value=[site_info, site_info, ...],
            containing for each city the list of the sites deployed in
            that particular city. Each 'site_info' element is a dict
            containing all relevant information about the site:
            (e.g., site_id, city, country, latitude, longitude,..) plus
            a list of sliver_tool_info elements with information and status
            of the slivers. Each sliver_tool _info contains: slice_id,
            tool_id, server_id, status (status_ipv4 or status_ipv6, depending
            on the 'address_family' argument) and timestamp of the last
            update.
        """
        sites = model.Site.gql('ORDER BY site_id DESC')
        site_dict = {}
        sites_per_city = {}
        for site in sites:
            site_info = {}
            site_info['site_id'] = site.site_id
            site_info['city'] = site.city
            site_info['country'] = site.country
            site_info['latitude'] = site.latitude
            site_info['longitude'] = site.longitude
            site_info['sliver_tools'] = []
            site_dict[site.site_id] = site_info
            sites_per_city[site.city] = []

        # Add sliver tools info to the sites.
        for sliver_tool in sliver_tools:
            if not production_check.is_production_slice(sliver_tool.fqdn):
                continue
            sliver_tool_info = {}
            sliver_tool_info['slice_id'] = sliver_tool.slice_id
            sliver_tool_info['tool_id'] = sliver_tool.tool_id
            sliver_tool_info['server_id'] = sliver_tool.server_id
            if address_family == 'ipv4':
                sliver_tool_info['status'] = sliver_tool.status_ipv4
            else:
                sliver_tool_info['status'] = sliver_tool.status_ipv6

            sliver_tool_info['timestamp'] = sliver_tool.when.strftime(
                '%Y-%m-%d %H:%M:%S')
            site_dict[sliver_tool.site_id]['sliver_tools'].append(
                sliver_tool_info)

        for item in site_dict:
            city = site_dict[item]['city']
            sites_per_city[city].append(site_dict[item])

        return sites_per_city
Esempio n. 3
0
    def testIsProductionSlice(self):
        self.assertTrue(pc.is_production_slice(
            'ndt.iupui.mlab3.mad01.measurement-lab.org'))
        self.assertTrue(pc.is_production_slice(
            '1.michigan.mlab1.hnd01.measurement-lab.org'))
        self.assertTrue(pc.is_production_slice(
            'npad.iupui.mlab1.dfw05.measurement-lab.org'))
        self.assertTrue(pc.is_production_slice(
            'ooni.mlab.mlab1.ams02.measurement-lab.org'))

        self.assertFalse(pc.is_production_slice(
            'ndt.iupui.mlab4.prg01.measurement-lab.org'),
            'mlab4 servers are not production slices')
        self.assertFalse(pc.is_production_slice(
            'ooni.mlab.mlab1.ams02t.measurement-lab.org'),
            'sites with t suffix do not have production slices')
        self.assertFalse(pc.is_production_slice('www.measurementlab.net'))
        self.assertFalse(pc.is_production_slice('www.measurement-lab.org'))
        self.assertFalse(pc.is_production_slice(''))
        self.assertFalse(pc.is_production_slice('.'))
Esempio n. 4
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        lines = []
        try:
            lines = urllib2.urlopen(self.IP_LIST_URL).read().strip('\n').split(
                '\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            sliver_tool_gql = model.SliverTool.gql('WHERE fqdn=:fqdn',
                                                   fqdn=fqdn)
            # FQDN is not necessarily unique across tools.
            for sliver_tool in sliver_tool_gql.run(
                    batch_size=constants.GQL_BATCH_SIZE):
                # case 1) Sliver tool has not changed. Nothing to do.
                if (sliver_tool != None and sliver_tool.sliver_ipv4 == ipv4 and
                        sliver_tool.sliver_ipv6 == ipv6):
                    pass
                # case 2) Sliver tool has changed.
                else:
                    # case 2.1) Sliver tool does not exist in datastore. Initialize
                    #     sliver if the corresponding tool exists in the Tool table
                    #     and the corresponding site exists in the Site table. This
                    #     case occurs when a new tool has been added after the last
                    #     IPUpdateHanlder ran. The sliver tool will actually be
                    #     written to datastore at the next step.
                    if sliver_tool == None:
                        logging.warning('sliver_tool %s is not in datastore.',
                                        fqdn)
                        slice_id, site_id, server_id = \
                            model.get_slice_site_server_ids(fqdn)
                        if slice_id is None or site_id is None or server_id is None:
                            logging.info('Non valid sliver fqdn %s.', fqdn)
                            continue
                        tool = model.Tool.gql('WHERE slice_id=:slice_id',
                                              slice_id=slice_id).get()
                        if tool == None:
                            logging.info('mlab-ns does not support slice %s.',
                                         slice_id)
                            continue
                        site = model.Site.gql('WHERE site_id=:site_id',
                                              site_id=site_id).get()
                        if site == None:
                            logging.info('mlab-ns does not support site %s.',
                                         site_id)
                            continue
                        sliver_tool = self.initialize_sliver_tool(
                            tool, site, server_id, fqdn)

                    # case 2.2) Sliver tool exists in datastore.
                    if ipv4 != None:
                        sliver_tool.sliver_ipv4 = ipv4
                    else:
                        sliver_tool.sliver_ipv4 = message.NO_IP_ADDRESS
                    if ipv6 != None:
                        sliver_tool.sliver_ipv6 = ipv6
                    else:
                        sliver_tool.sliver_ipv6 = message.NO_IP_ADDRESS

                    try:
                        sliver_tool.put()
                        logging.info(
                            'Succeeded to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    except db.TransactionFailedError:
                        logging.error(
                            'Failed to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    continue

                if sliver_tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[sliver_tool.tool_id] = []
                sliver_tool_list[sliver_tool.tool_id].append(sliver_tool)
                logging.info('sliver %s to be added to memcache',
                             sliver_tool.fqdn)

        # Update memcache
        # Never set the memcache to an empty list since it's more likely that
        # this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                        tool_id,
                        sliver_tool_list[tool_id],
                        namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)
Esempio n. 5
0
    def update(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses.
        """
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                host_ips_url = self.DEFAULT_IP_LIST_URL
            else:
                host_ips_url = self.TEMPLATE_IP_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            raw_json = urllib2.urlopen(host_ips_url).read()
            logging.info('Fetched hostnames.json from: %s', host_ips_url)
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', host_ips_url)
            return util.send_not_found(self)

        try:
            rows = json.loads(raw_json)
        except (TypeError, ValueError) as e:
            logging.error('Failed to parse raw json from %s: %s', host_ips_url,
                          e)
            return util.send_not_found(self)

        # Fetch all data that we are going to need from the datastore up front.
        sites = list(model.Site.all().fetch(limit=None))
        tools = list(model.Tool.all().fetch(limit=None))
        slivertools = list(model.SliverTool.all().fetch(limit=None))

        for row in rows:
            # Expected keys: "hostname,ipv4,ipv6" (ipv6 can be an empty string).
            fqdn = row['hostname']
            ipv4 = row['ipv4']
            ipv6 = row['ipv6']

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = list(filter(lambda s: s.site_id == site_id, sites))
            if len(site) == 0:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue
            else:
                site = site[0]

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            slice_tools = list(filter(lambda t: t.slice_id == slice_id, tools))

            if len(slice_tools) == 0:
                continue

            for slice_tool in slice_tools:
                # See if this sliver_tool already exists in the datastore.
                slivertool = list(filter(
                    lambda st: st.fqdn == fqdn and st.tool_id == slice_tool.tool_id,
                    slivertools))

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if len(slivertool) == 1:
                    sliver_tool = slivertool[0]
                elif len(slivertool) == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        slice_tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(slice_tool, site,
                                                              server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.format(
                            slice_tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool(
                    sliver_tool, ipv4, ipv6, site.roundrobin)

                # Update datastore if the SliverTool got updated.
                if updated_sliver_tool:
                    logging.info('Updating IP info for fqdn: %s', fqdn)
                    self.put_sliver_tool(updated_sliver_tool)

        return
Esempio n. 6
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        lines = []
        try:
            lines = urllib2.urlopen(self.IP_LIST_URL).read().strip('\n').split(
                '\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = model.Site.gql('WHERE site_id=:site_id',
                                  site_id=site_id).get()
            if site == None:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            tools = model.Tool.gql('WHERE slice_id=:slice_id',
                                   slice_id=slice_id)
            if tools.count() == 0:
                continue

            for tool in tools.run():
                # Query the datastore to see if this sliver_tool exists there.
                sliver_tool_gql = model.SliverTool.gql(
                    'WHERE fqdn=:fqdn AND tool_id=:tool_id',
                    fqdn=fqdn,
                    tool_id=tool.tool_id)

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if sliver_tool_gql.count() == 1:
                    sliver_tool = sliver_tool_gql.get(
                        batch_size=constants.GQL_BATCH_SIZE)
                elif sliver_tool_gql.count() == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(tool, site,
                                                              server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.format(
                            tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool_ips(sliver_tool,
                                                               ipv4, ipv6)
                # If the sliver_tool got updated IPs then write the change to
                # the datastore, else save the performance hit of writing a
                # record with identical data.
                if updated_sliver_tool:
                    self.put_sliver_tool(updated_sliver_tool)

                if tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[tool.tool_id] = []
                sliver_tool_list[tool.tool_id].append(sliver_tool)

        # Update memcache.  Never set the memcache to an empty list since it's
        # more likely that this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                        tool_id,
                        sliver_tool_list[tool_id],
                        namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)
Esempio n. 7
0
    def update(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses.
        """
        lines = []
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-nstesting':
                host_ips_url = self.TESTING_IP_LIST_URL
            else:
                host_ips_url = self.IP_LIST_URL
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            lines = urllib2.urlopen(host_ips_url).read().strip('\n').split(
                '\n')
            logging.info('Fetched mlab-host-ips.txt from: %s', host_ips_url)
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', host_ips_url)
            return util.send_not_found(self)

        # Fetch all data that we are going to need from the datastore up front.
        sites = list(model.Site.all().fetch(limit=None))
        tools = list(model.Tool.all().fetch(limit=None))
        slivertools = list(model.SliverTool.all().fetch(limit=None))

        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = list(filter(lambda s: s.site_id == site_id, sites))
            if len(site) == 0:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue
            else:
                site = site[0]

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            slice_tools = list(filter(lambda t: t.slice_id == slice_id, tools))

            if len(slice_tools) == 0:
                continue

            for slice_tool in slice_tools:
                # See if this sliver_tool already exists in the datastore.
                slivertool = list(
                    filter(
                        lambda st: st.fqdn == fqdn and st.tool_id == slice_tool
                        .tool_id, slivertools))

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if len(slivertool) == 1:
                    sliver_tool = slivertool[0]
                elif len(slivertool) == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        slice_tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(
                        slice_tool, site, server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.
                        format(slice_tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool(
                    sliver_tool, ipv4, ipv6, site.roundrobin)

                # Update datastore if the SliverTool got updated.
                if updated_sliver_tool:
                    logging.info('Updating IP info for fqdn: %s', fqdn)
                    self.put_sliver_tool(updated_sliver_tool)

        return
Esempio n. 8
0
    def testIsProductionSlice(self):
        # Production checks
        os.environ['MACHINE_REGEX'] = '^mlab[1-3]$'
        os.environ['SITE_REGEX'] = '^[a-z]{3}[0-9c]{2}$'

        self.assertTrue(
            pc.is_production_slice(
                'ndt.iupui.mlab3.mad01.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'neubot.mlab.mlab1.dfw0c.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'wehe.mlab.mlab1.dfw02.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'ndt-iupui-mlab1-dfw03.mlab-oti.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice('mlab1-dfw03.mlab-oti.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice('mlab1.dfw03.mlab-oti.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice('mlab3.dfw02.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice('mlab3-dfw02.measurement-lab.org'))

        self.assertFalse(
            pc.is_production_slice(
                'ndt.iupui.mlab4.prg01.measurement-lab.org'),
            'mlab4 servers are not production slices')
        self.assertFalse(
            pc.is_production_slice('ndt.mlab.mlab1.ams0t.measurement-lab.org'),
            'sites with t suffix do not have production slices')
        self.assertFalse(
            pc.is_production_slice(
                'ndt-iupui-mlab4-prg01.mlab-staging.measurement-lab.org'),
            'mlab4 servers are not production slices')
        self.assertFalse(
            pc.is_production_slice(
                'mlab4-abc01.mlab-staging.measurement-lab.org'),
            'mlab4 servers are not production slices')

        # Staging checks
        os.environ['MACHINE_REGEX'] = '^mlab4$'
        os.environ['SITE_REGEX'] = '^[a-z]{3}[0-9c]{2}$'

        self.assertTrue(
            pc.is_production_slice(
                'ndt.iupui.mlab4.mad01.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'neubot.mlab.mlab4.dfw0c.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'wehe-mlab4-dfw02.mlab-staging.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice('mlab.mlab4.dfw0c.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'mlab4-dfw02.mlab-staging.measurement-lab.org'))

        self.assertFalse(
            pc.is_production_slice('mlab3prg01.measurement-lab.org'),
            'Missing separator between machine and site.')
        self.assertFalse(
            pc.is_production_slice(
                'ndt-iupui-abc-mlab3.mlab-oti.prg01.measurement-lab.org'),
            'Too many experiment-org parts.')
        self.assertFalse(
            pc.is_production_slice(
                'ndt.iupui.mlab3.prg01.measurement-lab.org'),
            'mlab3 servers are not staging slices')
        self.assertFalse(
            pc.is_production_slice('ndt.mlab.mlab4.ams0t.measurement-lab.org'),
            'sites with t suffix do not have staging slices')

        # Sandbox checks
        os.environ['MACHINE_REGEX'] = '^mlab[1-4]$'
        os.environ['SITE_REGEX'] = '^[a-z]{3}[0-9]t$'

        self.assertTrue(
            pc.is_production_slice(
                'ndt.iupui.mlab4.mad0t.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'neubot.mlab.mlab1.dfw0t.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'wehe-mlab4-dfw0t.mlab-staging.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice('mlab1.dfw0t.measurement-lab.org'))
        self.assertTrue(
            pc.is_production_slice(
                'mlab4-dfw0t.mlab-staging.measurement-lab.org'))

        self.assertFalse(
            pc.is_production_slice(
                'ndt.iupui.mlab3.prg01.measurement-lab.org'),
            'Sites not ending in t are not "production" sandbox sites')

        # Malformed host names. Project doesn't matter.
        self.assertFalse(
            pc.is_production_slice(
                'wehe-mlab4-prg01.mlab-otimeasurement-lab.org'),
            'Missing dot between project and domain')
        self.assertFalse(pc.is_production_slice('www.measurementlab.net'))
        self.assertFalse(pc.is_production_slice('www.measurement-lab.org'))
        self.assertFalse(pc.is_production_slice(''))
        self.assertFalse(pc.is_production_slice('.'))
Esempio n. 9
0
    def update(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses.
        """
        lines = []
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                host_ips_url = self.DEFAULT_IP_LIST_URL
            else:
                host_ips_url = self.TEMPLATE_IP_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            lines = urllib2.urlopen(host_ips_url).read().strip('\n').split('\n')
            logging.info('Fetched mlab-host-ips.txt from: %s', host_ips_url)
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', host_ips_url)
            return util.send_not_found(self)

        # Fetch all data that we are going to need from the datastore up front.
        sites = list(model.Site.all().fetch(limit=None))
        tools = list(model.Tool.all().fetch(limit=None))
        slivertools = list(model.SliverTool.all().fetch(limit=None))

        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = list(filter(lambda s: s.site_id == site_id, sites))
            if len(site) == 0:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue
            else:
                site = site[0]

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            slice_tools = list(filter(lambda t: t.slice_id == slice_id, tools))

            if len(slice_tools) == 0:
                continue

            for slice_tool in slice_tools:
                # See if this sliver_tool already exists in the datastore.
                slivertool = list(filter(
                    lambda st: st.fqdn == fqdn and st.tool_id == slice_tool.tool_id,
                    slivertools))

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if len(slivertool) == 1:
                    sliver_tool = slivertool[0]
                elif len(slivertool) == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        slice_tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(slice_tool, site,
                                                              server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.format(
                            slice_tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool(
                    sliver_tool, ipv4, ipv6, site.roundrobin)

                # Update datastore if the SliverTool got updated.
                if updated_sliver_tool:
                    logging.info('Updating IP info for fqdn: %s', fqdn)
                    self.put_sliver_tool(updated_sliver_tool)

        return
Esempio n. 10
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        ip = {}
        lines = []
        try:
            lines = urllib2.urlopen(
                self.IP_LIST_URL).read().strip('\n').split('\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
              continue

            sliver_tool_gql = model.SliverTool.gql('WHERE fqdn=:fqdn',
                                                   fqdn=fqdn)
            # FQDN is not necessarily unique across tools.
            for sliver_tool in sliver_tool_gql.run(
                batch_size=constants.GQL_BATCH_SIZE):
                # case 1) Sliver tool has not changed. Nothing to do.
                if (sliver_tool != None and sliver_tool.sliver_ipv4 == ipv4 and
                    sliver_tool.sliver_ipv6 == ipv6):
                    pass
                # case 2) Sliver tool has changed.
                else:
                    # case 2.1) Sliver tool does not exist in datastore. Initialize
                    #     sliver if the corresponding tool exists in the Tool table
                    #     and the corresponding site exists in the Site table. This
                    #     case occurs when a new tool has been added after the last
                    #     IPUpdateHanlder ran. The sliver tool will actually be
                    #     written to datastore at the next step.
                    if sliver_tool == None:
                        logging.warning('sliver_tool %s is not in datastore.', fqdn)
                        slice_id, site_id, server_id = \
                            model.get_slice_site_server_ids(fqdn)
                        if slice_id is None or site_id is None or server_id is None:
                            logging.info('Non valid sliver fqdn %s.', fqdn)
                            continue
                        tool = model.Tool.gql('WHERE slice_id=:slice_id',
                                              slice_id=slice_id).get()
                        if tool == None:
                            logging.info('mlab-ns does not support slice %s.',
                                         slice_id)
                            continue
                        site = model.Site.gql('WHERE site_id=:site_id',
                                              site_id=site_id).get()
                        if site == None:
                            logging.info('mlab-ns does not support site %s.',
                                         site_id)
                            continue
                        sliver_tool = self.initialize_sliver_tool(
                            tool, site, server_id, fqdn)

                    # case 2.2) Sliver tool exists in datastore.
                    if ipv4 != None:
                        sliver_tool.sliver_ipv4 = ipv4
                    else:
                        sliver_tool.sliver_ipv4 = message.NO_IP_ADDRESS
                    if ipv6 != None:
                        sliver_tool.sliver_ipv6 = ipv6
                    else:
                        sliver_tool.sliver_ipv6 = message.NO_IP_ADDRESS

                    try:
                        sliver_tool.put()
                        logging.info(
                            'Succeeded to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    except db.TransactionFailedError:
                        logging.error(
                            'Failed to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    continue

                if sliver_tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[sliver_tool.tool_id] = []
                sliver_tool_list[sliver_tool.tool_id].append(sliver_tool)
                logging.info('sliver %s to be added to memcache', sliver_tool.fqdn)

        # Update memcache
        # Never set the memcache to an empty list since it's more likely that
        # this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                    tool_id, sliver_tool_list[tool_id],
                    namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)