Exemple #1
0
 def testSendSuccessJson(self):
     request = UtilTestCase.RequestMockup()
     util.send_success(request, output_type=message.FORMAT_JSON)
     self.assertEqual(request.error_code, None)
     self.assertEqual(request.response.headers['Content-Type'],
                      'application/json')
     self.assertEqual(request.response.out.msg, '{"status_code": "200 OK"}')
Exemple #2
0
 def testSendSuccessNoJson(self):
     request = UtilTestCase.RequestMockup()
     util.send_success(request, output_type=message.FORMAT_HTML)
     self.assertEqual(request.error_code, None)
     self.assertEqual(request.response.out.msg, '<html> Success! </html>')
     util.send_success(request, output_type='not_suppored_format')
     self.assertEqual(request.error_code, None)
     self.assertEqual(request.response.out.msg, '<html> Success! </html>')
Exemple #3
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = model.Nagios.get_by_key_name(
            constants.DEFAULT_NAGIOS_ENTRY)
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(
            None, nagios.url, nagios.username, nagios.password)

        authhandler = urllib2.HTTPDigestAuthHandler(password_manager)
        opener = urllib2.build_opener(authhandler)
        urllib2.install_opener(opener)

        tools_gql = model.Tool.gql('ORDER by tool_id DESC')
        for item in tools_gql.run(batch_size=constants.GQL_BATCH_SIZE):
            logging.info('Pulling status of %s from Nagios.', item.tool_id)
            for family in StatusUpdateHandler.NAGIOS_AF_SUFFIXES:
              slice_url = nagios.url + '?show_state=1&service_name=' + \
                    item.tool_id + family + \
                    "&plugin_output=1"

              slice_status = self.get_slice_status(slice_url)
              self.update_sliver_tools_status(slice_status, item.tool_id,
                                              family)
        return util.send_success(self)
Exemple #4
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = model.Nagios.get_by_key_name(
            constants.DEFAULT_NAGIOS_ENTRY)
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(
            None, nagios.url, nagios.username, nagios.password)

        authhandler = urllib2.HTTPDigestAuthHandler(password_manager)
        opener = urllib2.build_opener(authhandler)
        urllib2.install_opener(opener)

        tools_gql = model.Tool.gql('ORDER by tool_id DESC')
        for item in tools_gql.run(batch_size=constants.GQL_BATCH_SIZE):
            logging.info('Pulling status of %s from Nagios.', item.tool_id)
            for family in StatusUpdateHandler.NAGIOS_AF_SUFFIXES:
              slice_url = nagios.url + '?show_state=1&service_name=' + \
                    item.tool_id + family + \
                    "&plugin_output=1"

              slice_status = self.get_slice_status(slice_url)
              self.update_sliver_tools_status(slice_status, item.tool_id,
                                              family)
        return util.send_success(self)
Exemple #5
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            nagios_sites_json = json.loads(urllib2.urlopen(
                self.SITE_LIST_URL).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.SITE_LIST_URL)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s',
                          self.SITE_LIST_URL, e)
            return util.send_not_found(self)

        nagios_site_ids = set()

        # Validate the data from Nagios.
        valid_nagios_sites_json = []
        for nagios_site in nagios_sites_json:
            if not self._is_valid_site(nagios_site):
                continue
            valid_nagios_sites_json.append(nagios_site)
            nagios_site_ids.add(nagios_site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = model.Site.all()
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = nagios_site_ids.intersection(mlab_site_ids)
        new_site_ids = nagios_site_ids.difference(mlab_site_ids)
        removed_site_ids = mlab_site_ids.difference(nagios_site_ids)

        # Do not remove sites here for now.
        # TODO(claudiu) Implement the site removal as a separate handler.
        for site_id in removed_site_ids:
            logging.warning('Site %s removed from %s.', site_id,
                            self.SITE_LIST_URL)

        for site_id in unchanged_site_ids:
            logging.info('Site %s unchanged in %s.', site_id,
                         self.SITE_LIST_URL)

        for nagios_site in valid_nagios_sites_json:
            if (nagios_site[self.SITE_FIELD] in new_site_ids):
                logging.info('Registering site %s.',
                             nagios_site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.register_site(nagios_site):
                    logging.error('Error registering site %s.',
                                  nagios_site[self.SITE_FIELD])
                    continue

        return util.send_success(self)
Exemple #6
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            nagios_sites_json = json.loads(urllib2.urlopen(
                self.SITE_LIST_URL).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.SITE_LIST_URL)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s',
                          self.SITE_LIST_URL, e)
            return util.send_not_found(self)

        nagios_site_ids = set()

        # Validate the data from Nagios.
        valid_nagios_sites_json = []
        for nagios_site in nagios_sites_json:
            if not self._is_valid_site(nagios_site):
                continue
            valid_nagios_sites_json.append(nagios_site)
            nagios_site_ids.add(nagios_site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = model.Site.all()
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = nagios_site_ids.intersection(mlab_site_ids)
        new_site_ids = nagios_site_ids.difference(mlab_site_ids)
        removed_site_ids = mlab_site_ids.difference(nagios_site_ids)

        # Do not remove sites here for now.
        # TODO(claudiu) Implement the site removal as a separate handler.
        for site_id in removed_site_ids:
            logging.warning('Site %s removed from %s.', site_id,
                            self.SITE_LIST_URL)

        for site_id in unchanged_site_ids:
            logging.info('Site %s unchanged in %s.', site_id,
                         self.SITE_LIST_URL)

        for nagios_site in valid_nagios_sites_json:
            if (nagios_site[self.SITE_FIELD] in new_site_ids):
                logging.info('Registering site %s.',
                             nagios_site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.register_site(nagios_site):
                    logging.error('Error registering site %s.',
                                  nagios_site[self.SITE_FIELD])
                    continue

        return util.send_success(self)
Exemple #7
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to siteinfo and registers them.
        """
        try:
            locations_url = os.environ.get('LOCATIONS_URL')
            sites_json = json.loads(urllib2.urlopen(locations_url).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', locations_url)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s',
                          locations_url, e)
            return util.send_not_found(self)

        site_ids = set()

        # Validate the site data.
        valid_sites_json = []
        for site in sites_json:
            if not self._is_valid_site(site):
                continue
            valid_sites_json.append(site)
            site_ids.add(site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = list(model.Site.all().fetch(limit=None))
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = site_ids.intersection(mlab_site_ids)
        new_site_ids = site_ids.difference(mlab_site_ids)

        # Do not remove sites here for now.

        for site in valid_sites_json:
            # Register new site AND update an existing site anyway.
            if (site[self.SITE_FIELD] in new_site_ids) or (
                    site[self.SITE_FIELD] in unchanged_site_ids):
                if site[self.SITE_FIELD] in new_site_ids:
                    logging.info('Add new site %s.', site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.update_site(site):
                    logging.error('Error updating site %s.',
                                  site[self.SITE_FIELD])
                    continue
        # call check_ip job at the end of check_site job
        IPUpdateHandler().update()

        return util.send_success(self)
Exemple #8
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = nagios_config_wrapper.get_nagios_config()
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        nagios_status.authenticate_nagios(nagios)

        for slice_info in nagios_status.get_slice_info(nagios.url):

            slice_status = nagios_status.get_slice_status(slice_info.slice_url)
            if slice_status:
                self.update_sliver_tools_status(
                    slice_status, slice_info.tool_id, slice_info.address_family)
        return util.send_success(self)
Exemple #9
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = nagios_config_wrapper.get_nagios_config()
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        nagios_status.authenticate_nagios(nagios)

        for slice_info in nagios_status.get_slice_info(nagios.url):

            slice_status = nagios_status.get_slice_status(slice_info.slice_url)
            if slice_status:
                self.update_sliver_tools_status(
                    slice_status, slice_info.tool_id, slice_info.address_family)
        return util.send_success(self)
Exemple #10
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        lines = []
        try:
            lines = urllib2.urlopen(self.IP_LIST_URL).read().strip('\n').split(
                '\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            sliver_tool_gql = model.SliverTool.gql('WHERE fqdn=:fqdn',
                                                   fqdn=fqdn)
            # FQDN is not necessarily unique across tools.
            for sliver_tool in sliver_tool_gql.run(
                    batch_size=constants.GQL_BATCH_SIZE):
                # case 1) Sliver tool has not changed. Nothing to do.
                if (sliver_tool != None and sliver_tool.sliver_ipv4 == ipv4 and
                        sliver_tool.sliver_ipv6 == ipv6):
                    pass
                # case 2) Sliver tool has changed.
                else:
                    # case 2.1) Sliver tool does not exist in datastore. Initialize
                    #     sliver if the corresponding tool exists in the Tool table
                    #     and the corresponding site exists in the Site table. This
                    #     case occurs when a new tool has been added after the last
                    #     IPUpdateHanlder ran. The sliver tool will actually be
                    #     written to datastore at the next step.
                    if sliver_tool == None:
                        logging.warning('sliver_tool %s is not in datastore.',
                                        fqdn)
                        slice_id, site_id, server_id = \
                            model.get_slice_site_server_ids(fqdn)
                        if slice_id is None or site_id is None or server_id is None:
                            logging.info('Non valid sliver fqdn %s.', fqdn)
                            continue
                        tool = model.Tool.gql('WHERE slice_id=:slice_id',
                                              slice_id=slice_id).get()
                        if tool == None:
                            logging.info('mlab-ns does not support slice %s.',
                                         slice_id)
                            continue
                        site = model.Site.gql('WHERE site_id=:site_id',
                                              site_id=site_id).get()
                        if site == None:
                            logging.info('mlab-ns does not support site %s.',
                                         site_id)
                            continue
                        sliver_tool = self.initialize_sliver_tool(
                            tool, site, server_id, fqdn)

                    # case 2.2) Sliver tool exists in datastore.
                    if ipv4 != None:
                        sliver_tool.sliver_ipv4 = ipv4
                    else:
                        sliver_tool.sliver_ipv4 = message.NO_IP_ADDRESS
                    if ipv6 != None:
                        sliver_tool.sliver_ipv6 = ipv6
                    else:
                        sliver_tool.sliver_ipv6 = message.NO_IP_ADDRESS

                    try:
                        sliver_tool.put()
                        logging.info(
                            'Succeeded to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    except db.TransactionFailedError:
                        logging.error(
                            'Failed to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    continue

                if sliver_tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[sliver_tool.tool_id] = []
                sliver_tool_list[sliver_tool.tool_id].append(sliver_tool)
                logging.info('sliver %s to be added to memcache',
                             sliver_tool.fqdn)

        # Update memcache
        # Never set the memcache to an empty list since it's more likely that
        # this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                        tool_id,
                        sliver_tool_list[tool_id],
                        namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)
Exemple #11
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                json_file = self.DEFAULT_SITE_LIST_URL
            else:
                json_file = self.TEMPLATE_SITE_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            sites_json = json.loads(urllib2.urlopen(json_file).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', json_file)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s', json_file,
                          e)
            return util.send_not_found(self)

        site_ids = set()

        # Validate the site data.
        valid_sites_json = []
        for site in sites_json:
            if not self._is_valid_site(site):
                continue
            valid_sites_json.append(site)
            site_ids.add(site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = list(model.Site.all().fetch(limit=None))
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = site_ids.intersection(mlab_site_ids)
        new_site_ids = site_ids.difference(mlab_site_ids)

        # Do not remove sites here for now.

        for site in valid_sites_json:
            # Register new site AND update an existing site anyway.
            if (site[self.SITE_FIELD] in new_site_ids) or (
                    site[self.SITE_FIELD] in unchanged_site_ids):
                if site[self.SITE_FIELD] in new_site_ids:
                    logging.info('Add new site %s.', site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.update_site(site):
                    logging.error('Error updating site %s.',
                                  site[self.SITE_FIELD])
                    continue
        # call check_ip job at the end of check_site job
        IPUpdateHandler().update()

        return util.send_success(self)
Exemple #12
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from either Nagios or Prometheus.
        The base URLs for accessing status information are stored in the
        datastore along with the credentials necessary to access the data.
        """
        # Determine if there are any dependencies on Prometheus.
        prometheus_deps = model.get_status_source_deps('prometheus')
        # Get Prometheus configs, and authenticate.
        prometheus_config = prometheus_config_wrapper.get_prometheus_config()
        if prometheus_config is None:
            logging.error('Datastore does not have the Prometheus configs.')
        else:
            prometheus_opener = prometheus_status.authenticate_prometheus(
                prometheus_config)

        # Determine if there are any dependencies on Nagios.
        nagios_deps = model.get_status_source_deps('nagios')
        # Get Nagios configs, and authenticate.
        nagios_config = nagios_config_wrapper.get_nagios_config()
        if nagios_config is None:
            logging.error('Datastore does not have the Nagios configs.')
        else:
            nagios_opener = nagios_status.authenticate_nagios(nagios_config)

        # If we have dependencies on both Prometheus and Nagios, and neither one
        # of the configs is available, then abort, because we can't fetch status
        # from either. However, if we have one or the other, then continue,
        # because it may be preferable to update _some_ statuses than none.
        if (prometheus_deps and not prometheus_config) and (nagios_deps and
                                                            not nagios_config):
            logging.error(
                'Neither Nagios nor Prometheus configs are available.')
            return util.send_not_found(self)

        for tool_id in model.get_all_tool_ids():
            tool = model.get_tool_from_tool_id(tool_id)
            for address_family in ['', '_ipv6']:
                if tool.status_source == 'prometheus':
                    logging.info('Status source for %s%s is: prometheus',
                                 tool_id, address_family)
                    # Only proceed if prometheus_config exists, and hence
                    # prometheus_opener should also exist.
                    if prometheus_config:
                        slice_info = prometheus_status.get_slice_info(
                            prometheus_config.url, tool_id, address_family)
                        if not slice_info:
                            continue
                        slice_status = prometheus_status.get_slice_status(
                            slice_info.slice_url, prometheus_opener)
                    else:
                        logging.error(
                            'Prometheus config unavailable. Skipping %s%s',
                            tool_id, address_family)
                        continue
                elif tool.status_source == 'nagios':
                    logging.info('Status source for %s%s is: nagios', tool_id,
                                 address_family)
                    # Only proceed if nagios_config exists, and hence
                    # nagios_opener should also exist.
                    if nagios_config:
                        slice_info = nagios_status.get_slice_info(
                            nagios_config.url, tool_id, address_family)
                        slice_status = nagios_status.get_slice_status(
                            slice_info.slice_url, nagios_opener)
                    else:
                        logging.error(
                            'Nagios config unavailable. Skipping %s%s', tool_id,
                            address_family)
                        continue
                else:
                    logging.error('Unknown tool status_source: %s.',
                                  tool.status_source)
                    continue

                if slice_status:
                    self.update_sliver_tools_status(slice_status,
                                                    slice_info.tool_id,
                                                    slice_info.address_family)

        return util.send_success(self)
Exemple #13
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        lines = []
        try:
            lines = urllib2.urlopen(self.IP_LIST_URL).read().strip('\n').split(
                '\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = model.Site.gql('WHERE site_id=:site_id',
                                  site_id=site_id).get()
            if site == None:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            tools = model.Tool.gql('WHERE slice_id=:slice_id',
                                   slice_id=slice_id)
            if tools.count() == 0:
                continue

            for tool in tools.run():
                # Query the datastore to see if this sliver_tool exists there.
                sliver_tool_gql = model.SliverTool.gql(
                    'WHERE fqdn=:fqdn AND tool_id=:tool_id',
                    fqdn=fqdn,
                    tool_id=tool.tool_id)

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if sliver_tool_gql.count() == 1:
                    sliver_tool = sliver_tool_gql.get(
                        batch_size=constants.GQL_BATCH_SIZE)
                elif sliver_tool_gql.count() == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(tool, site,
                                                              server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.format(
                            tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool_ips(sliver_tool,
                                                               ipv4, ipv6)
                # If the sliver_tool got updated IPs then write the change to
                # the datastore, else save the performance hit of writing a
                # record with identical data.
                if updated_sliver_tool:
                    self.put_sliver_tool(updated_sliver_tool)

                if tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[tool.tool_id] = []
                sliver_tool_list[tool.tool_id].append(sliver_tool)

        # Update memcache.  Never set the memcache to an empty list since it's
        # more likely that this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                        tool_id,
                        sliver_tool_list[tool_id],
                        namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)
Exemple #14
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                json_file = self.DEFAULT_SITE_LIST_URL
            else:
                json_file = self.TEMPLATE_SITE_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            sites_json = json.loads(urllib2.urlopen(json_file).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', json_file)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s', json_file,
                          e)
            return util.send_not_found(self)

        site_ids = set()

        # Validate the site data.
        valid_sites_json = []
        for site in sites_json:
            if not self._is_valid_site(site):
                continue
            valid_sites_json.append(site)
            site_ids.add(site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = list(model.Site.all().fetch(limit=None))
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = site_ids.intersection(mlab_site_ids)
        new_site_ids = site_ids.difference(mlab_site_ids)

        # Do not remove sites here for now.

        for site in valid_sites_json:
            # Register new site AND update an existing site anyway.
            if (site[self.SITE_FIELD] in new_site_ids) or (
                    site[self.SITE_FIELD] in unchanged_site_ids):
                if site[self.SITE_FIELD] in new_site_ids:
                    logging.info('Add new site %s.', site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.update_site(site):
                    logging.error('Error updating site %s.',
                                  site[self.SITE_FIELD])
                    continue
        # call check_ip job at the end of check_site job
        IPUpdateHandler().update()

        return util.send_success(self)
Exemple #15
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from either Nagios or Prometheus.
        The base URLs for accessing status information are stored in the
        datastore along with the credentials necessary to access the data.
        """
        # Determine if there are any dependencies on Prometheus.
        prometheus_deps = model.get_status_source_deps('prometheus')
        # Get Prometheus configs, and authenticate.
        prometheus_config = prometheus_config_wrapper.get_prometheus_config()
        if prometheus_config is None:
            logging.error('Datastore does not have the Prometheus configs.')
        else:
            prometheus_opener = prometheus_status.authenticate_prometheus(
                prometheus_config)

        # Determine if there are any dependencies on Nagios.
        nagios_deps = model.get_status_source_deps('nagios')
        # Get Nagios configs, and authenticate.
        nagios_config = nagios_config_wrapper.get_nagios_config()
        if nagios_config is None:
            logging.error('Datastore does not have the Nagios configs.')
        else:
            nagios_opener = nagios_status.authenticate_nagios(nagios_config)

        # If we have dependencies on both Prometheus and Nagios, and neither one
        # of the configs is available, then abort, because we can't fetch status
        # from either. However, if we have one or the other, then continue,
        # because it may be preferable to update _some_ statuses than none.
        if (prometheus_deps and not prometheus_config) and (nagios_deps and
                                                            not nagios_config):
            logging.error(
                'Neither Nagios nor Prometheus configs are available.')
            return util.send_not_found(self)

        for tool_id in model.get_all_tool_ids():
            tool = model.get_tool_from_tool_id(tool_id)
            for address_family in ['', '_ipv6']:
                if tool.status_source == 'prometheus':
                    logging.info('Status source for %s%s is: prometheus',
                                 tool_id, address_family)
                    # Only proceed if prometheus_config exists, and hence
                    # prometheus_opener should also exist.
                    if prometheus_config:
                        slice_info = prometheus_status.get_slice_info(
                            prometheus_config.url, tool_id, address_family)
                        if not slice_info:
                            continue
                        slice_status = prometheus_status.get_slice_status(
                            slice_info.slice_url, prometheus_opener)
                    else:
                        logging.error(
                            'Prometheus config unavailable. Skipping %s%s',
                            tool_id, address_family)
                        continue
                elif tool.status_source == 'nagios':
                    logging.info('Status source for %s%s is: nagios', tool_id,
                                 address_family)
                    # Only proceed if nagios_config exists, and hence
                    # nagios_opener should also exist.
                    if nagios_config:
                        slice_info = nagios_status.get_slice_info(
                            nagios_config.url, tool_id, address_family)
                        slice_status = nagios_status.get_slice_status(
                            slice_info.slice_url, nagios_opener)
                    else:
                        logging.error(
                            'Nagios config unavailable. Skipping %s%s', tool_id,
                            address_family)
                        continue
                else:
                    logging.error('Unknown tool status_source: %s.',
                                  tool.status_source)
                    continue

                if slice_status:
                    self.update_sliver_tools_status(slice_status,
                                                    slice_info.tool_id,
                                                    slice_info.address_family)

        return util.send_success(self)
Exemple #16
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        ip = {}
        lines = []
        try:
            lines = urllib2.urlopen(
                self.IP_LIST_URL).read().strip('\n').split('\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
              continue

            sliver_tool_gql = model.SliverTool.gql('WHERE fqdn=:fqdn',
                                                   fqdn=fqdn)
            # FQDN is not necessarily unique across tools.
            for sliver_tool in sliver_tool_gql.run(
                batch_size=constants.GQL_BATCH_SIZE):
                # case 1) Sliver tool has not changed. Nothing to do.
                if (sliver_tool != None and sliver_tool.sliver_ipv4 == ipv4 and
                    sliver_tool.sliver_ipv6 == ipv6):
                    pass
                # case 2) Sliver tool has changed.
                else:
                    # case 2.1) Sliver tool does not exist in datastore. Initialize
                    #     sliver if the corresponding tool exists in the Tool table
                    #     and the corresponding site exists in the Site table. This
                    #     case occurs when a new tool has been added after the last
                    #     IPUpdateHanlder ran. The sliver tool will actually be
                    #     written to datastore at the next step.
                    if sliver_tool == None:
                        logging.warning('sliver_tool %s is not in datastore.', fqdn)
                        slice_id, site_id, server_id = \
                            model.get_slice_site_server_ids(fqdn)
                        if slice_id is None or site_id is None or server_id is None:
                            logging.info('Non valid sliver fqdn %s.', fqdn)
                            continue
                        tool = model.Tool.gql('WHERE slice_id=:slice_id',
                                              slice_id=slice_id).get()
                        if tool == None:
                            logging.info('mlab-ns does not support slice %s.',
                                         slice_id)
                            continue
                        site = model.Site.gql('WHERE site_id=:site_id',
                                              site_id=site_id).get()
                        if site == None:
                            logging.info('mlab-ns does not support site %s.',
                                         site_id)
                            continue
                        sliver_tool = self.initialize_sliver_tool(
                            tool, site, server_id, fqdn)

                    # case 2.2) Sliver tool exists in datastore.
                    if ipv4 != None:
                        sliver_tool.sliver_ipv4 = ipv4
                    else:
                        sliver_tool.sliver_ipv4 = message.NO_IP_ADDRESS
                    if ipv6 != None:
                        sliver_tool.sliver_ipv6 = ipv6
                    else:
                        sliver_tool.sliver_ipv6 = message.NO_IP_ADDRESS

                    try:
                        sliver_tool.put()
                        logging.info(
                            'Succeeded to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    except db.TransactionFailedError:
                        logging.error(
                            'Failed to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    continue

                if sliver_tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[sliver_tool.tool_id] = []
                sliver_tool_list[sliver_tool.tool_id].append(sliver_tool)
                logging.info('sliver %s to be added to memcache', sliver_tool.fqdn)

        # Update memcache
        # Never set the memcache to an empty list since it's more likely that
        # this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                    tool_id, sliver_tool_list[tool_id],
                    namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)