Example #1
0
 def testSendNotFoundJson(self):
     request = UtilTestCase.RequestMockup()
     util.send_not_found(request, output_type=message.FORMAT_JSON)
     self.assertEqual(request.error_code, 404)
     self.assertEqual(request.response.headers['Content-Type'],
                      'application/json')
     self.assertEqual(request.response.out.msg,
                      '{"status_code": "404 Not found"}')
Example #2
0
 def testSendNotFoundJson(self):
     request = UtilTestCase.RequestMockup()
     util.send_not_found(request, output_type=message.FORMAT_JSON)
     self.assertEqual(request.error_code, 404)
     self.assertEqual(request.response.headers['Content-Type'],
                      'application/json')
     self.assertEqual(request.response.out.msg,
                      '{"status_code": "404 Not found"}')
Example #3
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            nagios_sites_json = json.loads(urllib2.urlopen(
                self.SITE_LIST_URL).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.SITE_LIST_URL)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s',
                          self.SITE_LIST_URL, e)
            return util.send_not_found(self)

        nagios_site_ids = set()

        # Validate the data from Nagios.
        valid_nagios_sites_json = []
        for nagios_site in nagios_sites_json:
            if not self._is_valid_site(nagios_site):
                continue
            valid_nagios_sites_json.append(nagios_site)
            nagios_site_ids.add(nagios_site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = model.Site.all()
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = nagios_site_ids.intersection(mlab_site_ids)
        new_site_ids = nagios_site_ids.difference(mlab_site_ids)
        removed_site_ids = mlab_site_ids.difference(nagios_site_ids)

        # Do not remove sites here for now.
        # TODO(claudiu) Implement the site removal as a separate handler.
        for site_id in removed_site_ids:
            logging.warning('Site %s removed from %s.', site_id,
                            self.SITE_LIST_URL)

        for site_id in unchanged_site_ids:
            logging.info('Site %s unchanged in %s.', site_id,
                         self.SITE_LIST_URL)

        for nagios_site in valid_nagios_sites_json:
            if (nagios_site[self.SITE_FIELD] in new_site_ids):
                logging.info('Registering site %s.',
                             nagios_site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.register_site(nagios_site):
                    logging.error('Error registering site %s.',
                                  nagios_site[self.SITE_FIELD])
                    continue

        return util.send_success(self)
Example #4
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            nagios_sites_json = json.loads(urllib2.urlopen(
                self.SITE_LIST_URL).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.SITE_LIST_URL)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s',
                          self.SITE_LIST_URL, e)
            return util.send_not_found(self)

        nagios_site_ids = set()

        # Validate the data from Nagios.
        valid_nagios_sites_json = []
        for nagios_site in nagios_sites_json:
            if not self._is_valid_site(nagios_site):
                continue
            valid_nagios_sites_json.append(nagios_site)
            nagios_site_ids.add(nagios_site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = model.Site.all()
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = nagios_site_ids.intersection(mlab_site_ids)
        new_site_ids = nagios_site_ids.difference(mlab_site_ids)
        removed_site_ids = mlab_site_ids.difference(nagios_site_ids)

        # Do not remove sites here for now.
        # TODO(claudiu) Implement the site removal as a separate handler.
        for site_id in removed_site_ids:
            logging.warning('Site %s removed from %s.', site_id,
                            self.SITE_LIST_URL)

        for site_id in unchanged_site_ids:
            logging.info('Site %s unchanged in %s.', site_id,
                         self.SITE_LIST_URL)

        for nagios_site in valid_nagios_sites_json:
            if (nagios_site[self.SITE_FIELD] in new_site_ids):
                logging.info('Registering site %s.',
                             nagios_site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.register_site(nagios_site):
                    logging.error('Error registering site %s.',
                                  nagios_site[self.SITE_FIELD])
                    continue

        return util.send_success(self)
Example #5
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to siteinfo and registers them.
        """
        try:
            locations_url = os.environ.get('LOCATIONS_URL')
            sites_json = json.loads(urllib2.urlopen(locations_url).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', locations_url)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s',
                          locations_url, e)
            return util.send_not_found(self)

        site_ids = set()

        # Validate the site data.
        valid_sites_json = []
        for site in sites_json:
            if not self._is_valid_site(site):
                continue
            valid_sites_json.append(site)
            site_ids.add(site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = list(model.Site.all().fetch(limit=None))
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = site_ids.intersection(mlab_site_ids)
        new_site_ids = site_ids.difference(mlab_site_ids)

        # Do not remove sites here for now.

        for site in valid_sites_json:
            # Register new site AND update an existing site anyway.
            if (site[self.SITE_FIELD] in new_site_ids) or (
                    site[self.SITE_FIELD] in unchanged_site_ids):
                if site[self.SITE_FIELD] in new_site_ids:
                    logging.info('Add new site %s.', site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.update_site(site):
                    logging.error('Error updating site %s.',
                                  site[self.SITE_FIELD])
                    continue
        # call check_ip job at the end of check_site job
        IPUpdateHandler().update()

        return util.send_success(self)
Example #6
0
    def get(self):
        # TODO(claudiu) This doesn't scale. Generate HTML pages for maps
        # automatically for each tool.
        valid_paths = {
            '': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin/sites': lambda: self.site_view(),
            '/admin/sliver_tools': lambda: self.sliver_tool_view(),
            '/admin/map': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin/map/ipv4': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin/map/ipv4/all': lambda: self.map_view('all', 'ipv4'),
            '/admin/map/ipv4/mobiperf':
            lambda: self.map_view('mobiperf', 'ipv4'),
            '/admin/map/ipv4/neubot': lambda: self.map_view('neubot', 'ipv4'),
            '/admin/map/ipv4/ndt': lambda: self.map_view('ndt', 'ipv4'),
            '/admin/map/ipv6': lambda: self.map_view('all', 'ipv6'),
            '/admin/map/ipv6/all': lambda: self.map_view('all', 'ipv6'),
            '/admin/map/ipv6/mobiperf':
            lambda: self.map_view('mobiperf', 'ipv6'),
            '/admin/map/ipv6/neubot': lambda: self.map_view('neubot', 'ipv6'),
            '/admin/map/ipv6/ndt': lambda: self.map_view('ndt', 'ipv6'),
        }

        path = self.request.path.rstrip('/')
        if path not in valid_paths.keys():
            return util.send_not_found(self)

        return valid_paths[path]()
Example #7
0
    def send_redirect_response(self, sliver_tools, query):
        """Sends an HTTP redirect (for web-based tools only).

        Args:
            sliver_tools: A list of SliverTool instances, representing the best
                sliver tool selected for this lookup request.
            query: A LookupQuery instance representing the user lookup request.
        """
        if type(sliver_tools) != list:
            logging.error("Problem: sliver_tools is not a list.")
            return

        sliver_tool = sliver_tools[0]

        # npad uses a constant port of 8000
        if sliver_tool.tool_id == 'npad' and not sliver_tool.http_port:
            sliver_tool.http_port = '8000'

        if sliver_tool.http_port:
            fqdn = fqdn_rewrite.rewrite(sliver_tool.fqdn,
                                        query.tool_address_family,
                                        sliver_tool.tool_id)
            url = _create_tool_url(fqdn, sliver_tool.http_port)
            logging.debug('Redirecting to this url: %s', url)
            return self.redirect(url)

        return util.send_not_found(self, 'html')
Example #8
0
    def map_view(self, tool_id, address_family):
        """Displays a per tool map with the status of the slivers.

        Args:
            tool_id: A string representing the tool id (e.g., npad, ndt).
            address_family: A string specifying the address family (ipv4,ipv6).

        """
        sliver_tools = None

        if tool_id == 'all':
            sliver_tools = model.SliverTool.gql('ORDER BY tool_id DESC')
        else:
            cached_sliver_tools = memcache.get(tool_id)
            if cached_sliver_tools:
                sliver_tools = cached_sliver_tools
            else:
                sliver_tools = model.SliverTool.gql(
                    'WHERE tool_id=:tool_id '
                    'ORDER BY tool_id DESC',
                    tool_id=tool_id)

        if not sliver_tools:
            return util.send_not_found(self)

        data = self.get_sites_info(sliver_tools, address_family)
        json_data = simplejson.dumps(data)
        file_name = '' . join(['mlabns/templates/map_view.html'])
        values = {'cities' : json_data,
                  'tool_id' : tool_id,
                  'address_family' : address_family,
                  'privacy_doc_url' : constants.PRIVACY_DOC_URL,
                  'design_doc_url' : constants.DESIGN_DOC_URL}
        self.response.out.write(template.render(file_name, values))
Example #9
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = model.Nagios.get_by_key_name(
            constants.DEFAULT_NAGIOS_ENTRY)
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(
            None, nagios.url, nagios.username, nagios.password)

        authhandler = urllib2.HTTPDigestAuthHandler(password_manager)
        opener = urllib2.build_opener(authhandler)
        urllib2.install_opener(opener)

        tools_gql = model.Tool.gql('ORDER by tool_id DESC')
        for item in tools_gql.run(batch_size=constants.GQL_BATCH_SIZE):
            logging.info('Pulling status of %s from Nagios.', item.tool_id)
            for family in StatusUpdateHandler.NAGIOS_AF_SUFFIXES:
              slice_url = nagios.url + '?show_state=1&service_name=' + \
                    item.tool_id + family + \
                    "&plugin_output=1"

              slice_status = self.get_slice_status(slice_url)
              self.update_sliver_tools_status(slice_status, item.tool_id,
                                              family)
        return util.send_success(self)
Example #10
0
    def get(self):
        # TODO(claudiu) This doesn't scale. Generate HTML pages for maps
        # automatically for each tool.
        valid_paths = {
            '': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin/sites': lambda: self.site_view(),
            '/admin/sliver_tools': lambda: self.sliver_tool_view(),
            '/admin/map': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin/map/ipv4': lambda: self.redirect('/admin/map/ipv4/all'),
            '/admin/map/ipv4/all': lambda: self.map_view('all', 'ipv4'),
            '/admin/map/ipv4/mobiperf':
            lambda: self.map_view('mobiperf', 'ipv4'),
            '/admin/map/ipv4/neubot': lambda: self.map_view('neubot', 'ipv4'),
            '/admin/map/ipv4/ndt': lambda: self.map_view('ndt', 'ipv4'),
            '/admin/map/ipv6': lambda: self.map_view('all', 'ipv6'),
            '/admin/map/ipv6/all': lambda: self.map_view('all', 'ipv6'),
            '/admin/map/ipv6/mobiperf':
            lambda: self.map_view('mobiperf', 'ipv6'),
            '/admin/map/ipv6/neubot': lambda: self.map_view('neubot', 'ipv6'),
            '/admin/map/ipv6/ndt': lambda: self.map_view('ndt', 'ipv6'),
        }

        path = self.request.path.rstrip('/')
        if path not in valid_paths.keys():
            return util.send_not_found(self)

        return valid_paths[path]()
Example #11
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = model.Nagios.get_by_key_name(
            constants.DEFAULT_NAGIOS_ENTRY)
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(
            None, nagios.url, nagios.username, nagios.password)

        authhandler = urllib2.HTTPDigestAuthHandler(password_manager)
        opener = urllib2.build_opener(authhandler)
        urllib2.install_opener(opener)

        tools_gql = model.Tool.gql('ORDER by tool_id DESC')
        for item in tools_gql.run(batch_size=constants.GQL_BATCH_SIZE):
            logging.info('Pulling status of %s from Nagios.', item.tool_id)
            for family in StatusUpdateHandler.NAGIOS_AF_SUFFIXES:
              slice_url = nagios.url + '?show_state=1&service_name=' + \
                    item.tool_id + family + \
                    "&plugin_output=1"

              slice_status = self.get_slice_status(slice_url)
              self.update_sliver_tools_status(slice_status, item.tool_id,
                                              family)
        return util.send_success(self)
Example #12
0
    def send_redirect_response(self, sliver_tools, query):
        """Sends an HTTP redirect (for web-based tools only).

        Args:
            sliver_tool: A list of SliverTool instances,
                representing the best sliver
                tool selected for this lookup request.
            query: A LookupQuery instance representing the user lookup request.

        """
        if type(sliver_tools) != list:
            logging.error("Problem: sliver_tools is not a list.")
            return

        sliver_tool = sliver_tools[0]

        if sliver_tool.http_port:
            url = ''.join([
                'http://',
                self._add_fqdn_annotation(query, sliver_tool.fqdn), ':',
                sliver_tool.http_port
            ])
            return self.redirect(str(url))

        return util.send_not_found(self, 'html')
Example #13
0
    def get(self):
        """Handles an HTTP GET request.

        The URL must be in the following format:
        'http://mlab-ns.appspot.com/tool-name?query_string',
        where tool-name is one of the tools running on M-Lab.
        For more information about the URL and the supported arguments
        in the query string, see the design doc at http://goo.gl/48S22.
        """
        # Check right away whether we should proxy this request.
        url = reverse_proxy.try_reverse_proxy_url(self.request,
                                                  datetime.datetime.now())
        if url:
            # NB: if sending the proxy url is unsuccessful, then fall through to
            # regular request handling.
            success = self.send_proxy_response(url)
            if success:
                logging.info('[reverse_proxy],true,%s', url)
                return

        query = lookup_query.LookupQuery()
        query.initialize_from_http_request(self.request)

        logging.info('Policy is %s', query.policy)

        client_signature = query.calculate_client_signature()
        lookup_resolver = resolver.new_resolver(query.policy, client_signature)
        sliver_tools = lookup_resolver.answer_query(query)

        if sliver_tools is None:
            return util.send_not_found(self, query.response_format)

        if query.response_format == message.FORMAT_JSON:
            self.send_json_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_HTML:
            self.send_html_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_REDIRECT:
            self.send_redirect_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_BT:
            self.send_bt_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_MAP:
            candidates = lookup_resolver.get_candidates(query)
            self.send_map_response(sliver_tools, query, candidates)
        else:
            # TODO (claudiu) Discuss what should be the default behaviour.
            # I think json it's OK since is valid for all tools, while
            # redirect only applies to web-based tools.

            self.send_json_response(sliver_tools, query)

        # At this point, the client has received a response but the server has
        # not closed the connection.
        self.log_location(query, sliver_tools)

        # TODO (claudiu) Add a FORMAT_TYPE column in the BigQuery schema.
        self.log_request(query, sliver_tools)
Example #14
0
    def get(self):
        """Handles an HTTP GET request.

        The URL must be in the following format:
        'http://mlab-ns.appspot.com/tool-name?query_string',
        where tool-name is one of the tools running on M-Lab.
        For more information about the URL and the supported arguments
        in the query string, see the design doc at http://goo.gl/48S22.
        """
        query = lookup_query.LookupQuery()
        query.initialize_from_http_request(self.request)

        logging.info('Policy is %s', query.policy)

        client_signature = query.calculate_client_signature()
        lookup_resolver = resolver.new_resolver(query.policy, client_signature)
        sliver_tools = lookup_resolver.answer_query(query)

        if sliver_tools is None:
            return util.send_not_found(self, query.response_format)

        if query.response_format == message.FORMAT_JSON:
            self.send_json_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_HTML:
            self.send_html_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_REDIRECT:
            self.send_redirect_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_BT:
            self.send_bt_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_MAP:
            candidates = lookup_resolver.get_candidates(query)
            self.send_map_response(sliver_tools, query, candidates)
        else:
            # TODO (claudiu) Discuss what should be the default behaviour.
            # I think json it's OK since is valid for all tools, while
            # redirect only applies to web-based tools.

            self.send_json_response(sliver_tools, query)

        # TODO (claudiu) Add a FORMAT_TYPE column in the BigQuery schema.
        self.log_request(query, sliver_tools)
Example #15
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = nagios_config_wrapper.get_nagios_config()
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        nagios_status.authenticate_nagios(nagios)

        for slice_info in nagios_status.get_slice_info(nagios.url):

            slice_status = nagios_status.get_slice_status(slice_info.slice_url)
            if slice_status:
                self.update_sliver_tools_status(
                    slice_status, slice_info.tool_id, slice_info.address_family)
        return util.send_success(self)
Example #16
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from Nagios. The Nagios URL
        containing the information is stored in the Nagios db along with
        the credentials necessary to access the data.
        """
        nagios = nagios_config_wrapper.get_nagios_config()
        if nagios is None:
            logging.error('Datastore does not have the Nagios credentials.')
            return util.send_not_found(self)

        nagios_status.authenticate_nagios(nagios)

        for slice_info in nagios_status.get_slice_info(nagios.url):

            slice_status = nagios_status.get_slice_status(slice_info.slice_url)
            if slice_status:
                self.update_sliver_tools_status(
                    slice_status, slice_info.tool_id, slice_info.address_family)
        return util.send_success(self)
Example #17
0
    def send_redirect_response(self, sliver_tools, query):
        """Sends an HTTP redirect (for web-based tools only).

        Args:
            sliver_tool: A list of SliverTool instances,
                representing the best sliver
                tool selected for this lookup request.
            query: A LookupQuery instance representing the user lookup request.

        """
        if type(sliver_tools) != list:
            logging.error("Problem: sliver_tools is not a list.")
            return

        sliver_tool = sliver_tools[0]

        if sliver_tool.http_port:
            url = _create_tool_url(sliver_tool.fqdn, query.tool_address_family,
                                   sliver_tool.http_port)
            return self.redirect(url)

        return util.send_not_found(self, 'html')
Example #18
0
    def get(self):
        """Handles an HTTP GET request.

        The URL must be in the following format:
        'http://mlab-ns.appspot.com/tool-name?query_string',
        where tool-name is one of the tools running on M-Lab.
        For more information about the URL and the supported arguments
        in the query string, see the design doc at http://goo.gl/48S22.
        """
        query = lookup_query.LookupQuery()
        query.initialize_from_http_request(self.request)

        logging.info('Policy is %s', query.policy)
        lookup_resolver = resolver.new_resolver(query.policy)
        sliver_tools = lookup_resolver.answer_query(query)

        if sliver_tools is None:
            return util.send_not_found(self, query.response_format)

        if query.response_format == message.FORMAT_JSON:
            self.send_json_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_HTML:
            self.send_html_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_REDIRECT:
            self.send_redirect_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_BT:
            self.send_bt_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_MAP:
            candidates = lookup_resolver.get_candidates(query)
            self.send_map_response(sliver_tool, query, candidates)
        else:
            # TODO (claudiu) Discuss what should be the default behaviour.
            # I think json it's OK since is valid for all tools, while
            # redirect only applies to web-based tools (e.g., npad)
            self.send_json_response(sliver_tools, query)

        # TODO (claudiu) Add a FORMAT_TYPE column in the BigQuery schema.
        self.log_request(query, sliver_tools)
Example #19
0
    def send_redirect_response(self, sliver_tools, query):
        """Sends an HTTP redirect (for web-based tools only).

        Args:
            sliver_tool: A list of SliverTool instances,
                representing the best sliver
                tool selected for this lookup request.
            query: A LookupQuery instance representing the user lookup request.

        """
        if type(sliver_tools) != list:
            logging.error("Problem: sliver_tools is not a list.")
            return

        sliver_tool = sliver_tools[0]

        if sliver_tool.http_port:
            url = ''.join([
                'http://', self._add_fqdn_annotation(query, sliver_tool.fqdn),
                ':', sliver_tool.http_port])
            return self.redirect(str(url))

        return util.send_not_found(self, 'html')
Example #20
0
    def get(self):
        """Handles an HTTP GET request.

        The URL must be in the following format:
        'http://mlab-ns.appspot.com/tool-name?query_string',
        where tool-name is one of the tools running on M-Lab.
        For more information about the URL and the supported arguments
        in the query string, see the design doc at http://goo.gl/48S22.
        """
        query = lookup_query.LookupQuery()
        query.initialize_from_http_request(self.request)

        # Check right away whether we should proxy this request.
        url = reverse_proxy.try_reverse_proxy_url(query,
                                                  datetime.datetime.now())
        if url:
            # NB: if sending the proxy url is unsuccessful, then fall through to
            # regular request handling.
            success = self.send_proxy_response(url)
            if success:
                logging.info('[reverse_proxy],true,%s', url)
                return

        logging.info('Policy is %s', query.policy)

        client_signature = query.calculate_client_signature()
        lookup_resolver = resolver.new_resolver(query.policy, client_signature)
        sliver_tools = lookup_resolver.answer_query(query)

        if sliver_tools is None:
            # NOTE: at this point, we know that either the query is invalid
            # (e.g. bad tool_id) or that a valid query has no capacity.
            if model.is_valid_tool(query.tool_id):
                # A.K.A. "no capacity".
                return util.send_no_content(self)
            else:
                # Invalid tool, so report "404 Not Found".
                return util.send_not_found(self, query.response_format)

        if query.response_format == message.FORMAT_JSON:
            self.send_json_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_HTML:
            self.send_html_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_REDIRECT:
            self.send_redirect_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_BT:
            self.send_bt_response(sliver_tools, query)
        elif query.response_format == message.FORMAT_MAP:
            candidates = lookup_resolver.get_candidates(query)
            self.send_map_response(sliver_tools, query, candidates)
        else:
            # TODO (claudiu) Discuss what should be the default behaviour.
            # I think json it's OK since is valid for all tools, while
            # redirect only applies to web-based tools.

            self.send_json_response(sliver_tools, query)

        # At this point, the client has received a response but the server has
        # not closed the connection.
        self.log_location(query, sliver_tools)

        # TODO (claudiu) Add a FORMAT_TYPE column in the BigQuery schema.
        self.log_request(query, sliver_tools)
Example #21
0
    def update(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses.
        """
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                host_ips_url = self.DEFAULT_IP_LIST_URL
            else:
                host_ips_url = self.TEMPLATE_IP_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            raw_json = urllib2.urlopen(host_ips_url).read()
            logging.info('Fetched hostnames.json from: %s', host_ips_url)
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', host_ips_url)
            return util.send_not_found(self)

        try:
            rows = json.loads(raw_json)
        except (TypeError, ValueError) as e:
            logging.error('Failed to parse raw json from %s: %s', host_ips_url,
                          e)
            return util.send_not_found(self)

        # Fetch all data that we are going to need from the datastore up front.
        sites = list(model.Site.all().fetch(limit=None))
        tools = list(model.Tool.all().fetch(limit=None))
        slivertools = list(model.SliverTool.all().fetch(limit=None))

        for row in rows:
            # Expected keys: "hostname,ipv4,ipv6" (ipv6 can be an empty string).
            fqdn = row['hostname']
            ipv4 = row['ipv4']
            ipv6 = row['ipv6']

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = list(filter(lambda s: s.site_id == site_id, sites))
            if len(site) == 0:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue
            else:
                site = site[0]

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            slice_tools = list(filter(lambda t: t.slice_id == slice_id, tools))

            if len(slice_tools) == 0:
                continue

            for slice_tool in slice_tools:
                # See if this sliver_tool already exists in the datastore.
                slivertool = list(filter(
                    lambda st: st.fqdn == fqdn and st.tool_id == slice_tool.tool_id,
                    slivertools))

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if len(slivertool) == 1:
                    sliver_tool = slivertool[0]
                elif len(slivertool) == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        slice_tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(slice_tool, site,
                                                              server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.format(
                            slice_tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool(
                    sliver_tool, ipv4, ipv6, site.roundrobin)

                # Update datastore if the SliverTool got updated.
                if updated_sliver_tool:
                    logging.info('Updating IP info for fqdn: %s', fqdn)
                    self.put_sliver_tool(updated_sliver_tool)

        return
Example #22
0
    def update(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses.
        """
        lines = []
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                host_ips_url = self.DEFAULT_IP_LIST_URL
            else:
                host_ips_url = self.TEMPLATE_IP_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            lines = urllib2.urlopen(host_ips_url).read().strip('\n').split('\n')
            logging.info('Fetched mlab-host-ips.txt from: %s', host_ips_url)
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', host_ips_url)
            return util.send_not_found(self)

        # Fetch all data that we are going to need from the datastore up front.
        sites = list(model.Site.all().fetch(limit=None))
        tools = list(model.Tool.all().fetch(limit=None))
        slivertools = list(model.SliverTool.all().fetch(limit=None))

        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = list(filter(lambda s: s.site_id == site_id, sites))
            if len(site) == 0:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue
            else:
                site = site[0]

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            slice_tools = list(filter(lambda t: t.slice_id == slice_id, tools))

            if len(slice_tools) == 0:
                continue

            for slice_tool in slice_tools:
                # See if this sliver_tool already exists in the datastore.
                slivertool = list(filter(
                    lambda st: st.fqdn == fqdn and st.tool_id == slice_tool.tool_id,
                    slivertools))

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if len(slivertool) == 1:
                    sliver_tool = slivertool[0]
                elif len(slivertool) == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        slice_tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(slice_tool, site,
                                                              server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.format(
                            slice_tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool(
                    sliver_tool, ipv4, ipv6, site.roundrobin)

                # Update datastore if the SliverTool got updated.
                if updated_sliver_tool:
                    logging.info('Updating IP info for fqdn: %s', fqdn)
                    self.put_sliver_tool(updated_sliver_tool)

        return
Example #23
0
 def post(self):
     """Not implemented."""
     return util.send_not_found(self)
Example #24
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        lines = []
        try:
            lines = urllib2.urlopen(self.IP_LIST_URL).read().strip('\n').split(
                '\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            sliver_tool_gql = model.SliverTool.gql('WHERE fqdn=:fqdn',
                                                   fqdn=fqdn)
            # FQDN is not necessarily unique across tools.
            for sliver_tool in sliver_tool_gql.run(
                    batch_size=constants.GQL_BATCH_SIZE):
                # case 1) Sliver tool has not changed. Nothing to do.
                if (sliver_tool != None and sliver_tool.sliver_ipv4 == ipv4 and
                        sliver_tool.sliver_ipv6 == ipv6):
                    pass
                # case 2) Sliver tool has changed.
                else:
                    # case 2.1) Sliver tool does not exist in datastore. Initialize
                    #     sliver if the corresponding tool exists in the Tool table
                    #     and the corresponding site exists in the Site table. This
                    #     case occurs when a new tool has been added after the last
                    #     IPUpdateHanlder ran. The sliver tool will actually be
                    #     written to datastore at the next step.
                    if sliver_tool == None:
                        logging.warning('sliver_tool %s is not in datastore.',
                                        fqdn)
                        slice_id, site_id, server_id = \
                            model.get_slice_site_server_ids(fqdn)
                        if slice_id is None or site_id is None or server_id is None:
                            logging.info('Non valid sliver fqdn %s.', fqdn)
                            continue
                        tool = model.Tool.gql('WHERE slice_id=:slice_id',
                                              slice_id=slice_id).get()
                        if tool == None:
                            logging.info('mlab-ns does not support slice %s.',
                                         slice_id)
                            continue
                        site = model.Site.gql('WHERE site_id=:site_id',
                                              site_id=site_id).get()
                        if site == None:
                            logging.info('mlab-ns does not support site %s.',
                                         site_id)
                            continue
                        sliver_tool = self.initialize_sliver_tool(
                            tool, site, server_id, fqdn)

                    # case 2.2) Sliver tool exists in datastore.
                    if ipv4 != None:
                        sliver_tool.sliver_ipv4 = ipv4
                    else:
                        sliver_tool.sliver_ipv4 = message.NO_IP_ADDRESS
                    if ipv6 != None:
                        sliver_tool.sliver_ipv6 = ipv6
                    else:
                        sliver_tool.sliver_ipv6 = message.NO_IP_ADDRESS

                    try:
                        sliver_tool.put()
                        logging.info(
                            'Succeeded to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    except db.TransactionFailedError:
                        logging.error(
                            'Failed to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    continue

                if sliver_tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[sliver_tool.tool_id] = []
                sliver_tool_list[sliver_tool.tool_id].append(sliver_tool)
                logging.info('sliver %s to be added to memcache',
                             sliver_tool.fqdn)

        # Update memcache
        # Never set the memcache to an empty list since it's more likely that
        # this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                        tool_id,
                        sliver_tool_list[tool_id],
                        namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)
Example #25
0
    def update(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses.
        """
        lines = []
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-nstesting':
                host_ips_url = self.TESTING_IP_LIST_URL
            else:
                host_ips_url = self.IP_LIST_URL
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            lines = urllib2.urlopen(host_ips_url).read().strip('\n').split(
                '\n')
            logging.info('Fetched mlab-host-ips.txt from: %s', host_ips_url)
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', host_ips_url)
            return util.send_not_found(self)

        # Fetch all data that we are going to need from the datastore up front.
        sites = list(model.Site.all().fetch(limit=None))
        tools = list(model.Tool.all().fetch(limit=None))
        slivertools = list(model.SliverTool.all().fetch(limit=None))

        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = list(filter(lambda s: s.site_id == site_id, sites))
            if len(site) == 0:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue
            else:
                site = site[0]

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            slice_tools = list(filter(lambda t: t.slice_id == slice_id, tools))

            if len(slice_tools) == 0:
                continue

            for slice_tool in slice_tools:
                # See if this sliver_tool already exists in the datastore.
                slivertool = list(
                    filter(
                        lambda st: st.fqdn == fqdn and st.tool_id == slice_tool
                        .tool_id, slivertools))

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if len(slivertool) == 1:
                    sliver_tool = slivertool[0]
                elif len(slivertool) == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        slice_tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(
                        slice_tool, site, server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.
                        format(slice_tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool(
                    sliver_tool, ipv4, ipv6, site.roundrobin)

                # Update datastore if the SliverTool got updated.
                if updated_sliver_tool:
                    logging.info('Updating IP info for fqdn: %s', fqdn)
                    self.put_sliver_tool(updated_sliver_tool)

        return
Example #26
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                json_file = self.DEFAULT_SITE_LIST_URL
            else:
                json_file = self.TEMPLATE_SITE_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            sites_json = json.loads(urllib2.urlopen(json_file).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', json_file)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s', json_file,
                          e)
            return util.send_not_found(self)

        site_ids = set()

        # Validate the site data.
        valid_sites_json = []
        for site in sites_json:
            if not self._is_valid_site(site):
                continue
            valid_sites_json.append(site)
            site_ids.add(site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = list(model.Site.all().fetch(limit=None))
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = site_ids.intersection(mlab_site_ids)
        new_site_ids = site_ids.difference(mlab_site_ids)

        # Do not remove sites here for now.

        for site in valid_sites_json:
            # Register new site AND update an existing site anyway.
            if (site[self.SITE_FIELD] in new_site_ids) or (
                    site[self.SITE_FIELD] in unchanged_site_ids):
                if site[self.SITE_FIELD] in new_site_ids:
                    logging.info('Add new site %s.', site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.update_site(site):
                    logging.error('Error updating site %s.',
                                  site[self.SITE_FIELD])
                    continue
        # call check_ip job at the end of check_site job
        IPUpdateHandler().update()

        return util.send_success(self)
Example #27
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from either Nagios or Prometheus.
        The base URLs for accessing status information are stored in the
        datastore along with the credentials necessary to access the data.
        """
        # Determine if there are any dependencies on Prometheus.
        prometheus_deps = model.get_status_source_deps('prometheus')
        # Get Prometheus configs, and authenticate.
        prometheus_config = prometheus_config_wrapper.get_prometheus_config()
        if prometheus_config is None:
            logging.error('Datastore does not have the Prometheus configs.')
        else:
            prometheus_opener = prometheus_status.authenticate_prometheus(
                prometheus_config)

        # Determine if there are any dependencies on Nagios.
        nagios_deps = model.get_status_source_deps('nagios')
        # Get Nagios configs, and authenticate.
        nagios_config = nagios_config_wrapper.get_nagios_config()
        if nagios_config is None:
            logging.error('Datastore does not have the Nagios configs.')
        else:
            nagios_opener = nagios_status.authenticate_nagios(nagios_config)

        # If we have dependencies on both Prometheus and Nagios, and neither one
        # of the configs is available, then abort, because we can't fetch status
        # from either. However, if we have one or the other, then continue,
        # because it may be preferable to update _some_ statuses than none.
        if (prometheus_deps and not prometheus_config) and (nagios_deps and
                                                            not nagios_config):
            logging.error(
                'Neither Nagios nor Prometheus configs are available.')
            return util.send_not_found(self)

        for tool_id in model.get_all_tool_ids():
            tool = model.get_tool_from_tool_id(tool_id)
            for address_family in ['', '_ipv6']:
                if tool.status_source == 'prometheus':
                    logging.info('Status source for %s%s is: prometheus',
                                 tool_id, address_family)
                    # Only proceed if prometheus_config exists, and hence
                    # prometheus_opener should also exist.
                    if prometheus_config:
                        slice_info = prometheus_status.get_slice_info(
                            prometheus_config.url, tool_id, address_family)
                        if not slice_info:
                            continue
                        slice_status = prometheus_status.get_slice_status(
                            slice_info.slice_url, prometheus_opener)
                    else:
                        logging.error(
                            'Prometheus config unavailable. Skipping %s%s',
                            tool_id, address_family)
                        continue
                elif tool.status_source == 'nagios':
                    logging.info('Status source for %s%s is: nagios', tool_id,
                                 address_family)
                    # Only proceed if nagios_config exists, and hence
                    # nagios_opener should also exist.
                    if nagios_config:
                        slice_info = nagios_status.get_slice_info(
                            nagios_config.url, tool_id, address_family)
                        slice_status = nagios_status.get_slice_status(
                            slice_info.slice_url, nagios_opener)
                    else:
                        logging.error(
                            'Nagios config unavailable. Skipping %s%s', tool_id,
                            address_family)
                        continue
                else:
                    logging.error('Unknown tool status_source: %s.',
                                  tool.status_source)
                    continue

                if slice_status:
                    self.update_sliver_tools_status(slice_status,
                                                    slice_info.tool_id,
                                                    slice_info.address_family)

        return util.send_success(self)
Example #28
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        ip = {}
        lines = []
        try:
            lines = urllib2.urlopen(
                self.IP_LIST_URL).read().strip('\n').split('\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
              continue

            sliver_tool_gql = model.SliverTool.gql('WHERE fqdn=:fqdn',
                                                   fqdn=fqdn)
            # FQDN is not necessarily unique across tools.
            for sliver_tool in sliver_tool_gql.run(
                batch_size=constants.GQL_BATCH_SIZE):
                # case 1) Sliver tool has not changed. Nothing to do.
                if (sliver_tool != None and sliver_tool.sliver_ipv4 == ipv4 and
                    sliver_tool.sliver_ipv6 == ipv6):
                    pass
                # case 2) Sliver tool has changed.
                else:
                    # case 2.1) Sliver tool does not exist in datastore. Initialize
                    #     sliver if the corresponding tool exists in the Tool table
                    #     and the corresponding site exists in the Site table. This
                    #     case occurs when a new tool has been added after the last
                    #     IPUpdateHanlder ran. The sliver tool will actually be
                    #     written to datastore at the next step.
                    if sliver_tool == None:
                        logging.warning('sliver_tool %s is not in datastore.', fqdn)
                        slice_id, site_id, server_id = \
                            model.get_slice_site_server_ids(fqdn)
                        if slice_id is None or site_id is None or server_id is None:
                            logging.info('Non valid sliver fqdn %s.', fqdn)
                            continue
                        tool = model.Tool.gql('WHERE slice_id=:slice_id',
                                              slice_id=slice_id).get()
                        if tool == None:
                            logging.info('mlab-ns does not support slice %s.',
                                         slice_id)
                            continue
                        site = model.Site.gql('WHERE site_id=:site_id',
                                              site_id=site_id).get()
                        if site == None:
                            logging.info('mlab-ns does not support site %s.',
                                         site_id)
                            continue
                        sliver_tool = self.initialize_sliver_tool(
                            tool, site, server_id, fqdn)

                    # case 2.2) Sliver tool exists in datastore.
                    if ipv4 != None:
                        sliver_tool.sliver_ipv4 = ipv4
                    else:
                        sliver_tool.sliver_ipv4 = message.NO_IP_ADDRESS
                    if ipv6 != None:
                        sliver_tool.sliver_ipv6 = ipv6
                    else:
                        sliver_tool.sliver_ipv6 = message.NO_IP_ADDRESS

                    try:
                        sliver_tool.put()
                        logging.info(
                            'Succeeded to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    except db.TransactionFailedError:
                        logging.error(
                            'Failed to write IPs of %s (%s, %s) in datastore.',
                            fqdn, ipv4, ipv6)
                    continue

                if sliver_tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[sliver_tool.tool_id] = []
                sliver_tool_list[sliver_tool.tool_id].append(sliver_tool)
                logging.info('sliver %s to be added to memcache', sliver_tool.fqdn)

        # Update memcache
        # Never set the memcache to an empty list since it's more likely that
        # this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                    tool_id, sliver_tool_list[tool_id],
                    namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)
Example #29
0
 def post(self):
     """Not implemented."""
     return util.send_not_found(self)
Example #30
0
    def get(self):
        """Triggers the registration handler.

        Checks if new sites were added to Nagios and registers them.
        """
        try:
            project = app_identity.get_application_id()
            if project == 'mlab-ns':
                # TODO: eliminate project translation.
                json_file = self.DEFAULT_SITE_LIST_URL
            else:
                json_file = self.TEMPLATE_SITE_LIST_URL.format(project=project)
        except AttributeError:
            logging.error('Cannot get project name.')
            return util.send_not_found(self)

        try:
            sites_json = json.loads(urllib2.urlopen(json_file).read())
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', json_file)
            return util.send_not_found(self)
        except (TypeError, ValueError) as e:
            logging.error('The json format of %s in not valid: %s', json_file,
                          e)
            return util.send_not_found(self)

        site_ids = set()

        # Validate the site data.
        valid_sites_json = []
        for site in sites_json:
            if not self._is_valid_site(site):
                continue
            valid_sites_json.append(site)
            site_ids.add(site[self.SITE_FIELD])

        mlab_site_ids = set()
        mlab_sites = list(model.Site.all().fetch(limit=None))
        for site in mlab_sites:
            mlab_site_ids.add(site.site_id)

        unchanged_site_ids = site_ids.intersection(mlab_site_ids)
        new_site_ids = site_ids.difference(mlab_site_ids)

        # Do not remove sites here for now.

        for site in valid_sites_json:
            # Register new site AND update an existing site anyway.
            if (site[self.SITE_FIELD] in new_site_ids) or (
                    site[self.SITE_FIELD] in unchanged_site_ids):
                if site[self.SITE_FIELD] in new_site_ids:
                    logging.info('Add new site %s.', site[self.SITE_FIELD])
                # TODO(claudiu) Notify(email) when this happens.
                if not self.update_site(site):
                    logging.error('Error updating site %s.',
                                  site[self.SITE_FIELD])
                    continue
        # call check_ip job at the end of check_site job
        IPUpdateHandler().update()

        return util.send_success(self)
Example #31
0
 def testSendNotFoundNoJson(self):
     request = UtilTestCase.RequestMockup()
     util.send_not_found(request, output_type=message.FORMAT_HTML)
     self.assertEqual(request.error_code, 404)
     util.send_not_found(request, output_type='not_suppored_format')
     self.assertEqual(request.error_code, 404)
Example #32
0
 def testSendNotFoundNoJson(self):
     request = UtilTestCase.RequestMockup()
     util.send_not_found(request, output_type=message.FORMAT_HTML)
     self.assertEqual(request.error_code, 404)
     util.send_not_found(request, output_type='not_suppored_format')
     self.assertEqual(request.error_code, 404)
Example #33
0
    def get(self):
        """Triggers the update handler.

        Updates sliver status with information from either Nagios or Prometheus.
        The base URLs for accessing status information are stored in the
        datastore along with the credentials necessary to access the data.
        """
        # Determine if there are any dependencies on Prometheus.
        prometheus_deps = model.get_status_source_deps('prometheus')
        # Get Prometheus configs, and authenticate.
        prometheus_config = prometheus_config_wrapper.get_prometheus_config()
        if prometheus_config is None:
            logging.error('Datastore does not have the Prometheus configs.')
        else:
            prometheus_opener = prometheus_status.authenticate_prometheus(
                prometheus_config)

        # Determine if there are any dependencies on Nagios.
        nagios_deps = model.get_status_source_deps('nagios')
        # Get Nagios configs, and authenticate.
        nagios_config = nagios_config_wrapper.get_nagios_config()
        if nagios_config is None:
            logging.error('Datastore does not have the Nagios configs.')
        else:
            nagios_opener = nagios_status.authenticate_nagios(nagios_config)

        # If we have dependencies on both Prometheus and Nagios, and neither one
        # of the configs is available, then abort, because we can't fetch status
        # from either. However, if we have one or the other, then continue,
        # because it may be preferable to update _some_ statuses than none.
        if (prometheus_deps and not prometheus_config) and (nagios_deps and
                                                            not nagios_config):
            logging.error(
                'Neither Nagios nor Prometheus configs are available.')
            return util.send_not_found(self)

        for tool_id in model.get_all_tool_ids():
            tool = model.get_tool_from_tool_id(tool_id)
            for address_family in ['', '_ipv6']:
                if tool.status_source == 'prometheus':
                    logging.info('Status source for %s%s is: prometheus',
                                 tool_id, address_family)
                    # Only proceed if prometheus_config exists, and hence
                    # prometheus_opener should also exist.
                    if prometheus_config:
                        slice_info = prometheus_status.get_slice_info(
                            prometheus_config.url, tool_id, address_family)
                        if not slice_info:
                            continue
                        slice_status = prometheus_status.get_slice_status(
                            slice_info.slice_url, prometheus_opener)
                    else:
                        logging.error(
                            'Prometheus config unavailable. Skipping %s%s',
                            tool_id, address_family)
                        continue
                elif tool.status_source == 'nagios':
                    logging.info('Status source for %s%s is: nagios', tool_id,
                                 address_family)
                    # Only proceed if nagios_config exists, and hence
                    # nagios_opener should also exist.
                    if nagios_config:
                        slice_info = nagios_status.get_slice_info(
                            nagios_config.url, tool_id, address_family)
                        slice_status = nagios_status.get_slice_status(
                            slice_info.slice_url, nagios_opener)
                    else:
                        logging.error(
                            'Nagios config unavailable. Skipping %s%s', tool_id,
                            address_family)
                        continue
                else:
                    logging.error('Unknown tool status_source: %s.',
                                  tool.status_source)
                    continue

                if slice_status:
                    self.update_sliver_tools_status(slice_status,
                                                    slice_info.tool_id,
                                                    slice_info.address_family)

        return util.send_success(self)
Example #34
0
    def get(self):
        """Triggers the update handler.

        Updates sliver tool IP addresses from Nagios.
        """
        lines = []
        try:
            lines = urllib2.urlopen(self.IP_LIST_URL).read().strip('\n').split(
                '\n')
        except urllib2.HTTPError:
            # TODO(claudiu) Notify(email) when this happens.
            logging.error('Cannot open %s.', self.IP_LIST_URL)
            return util.send_not_found(self)

        sliver_tool_list = {}
        for line in lines:
            # Expected format: "FQDN,IPv4,IPv6" (IPv6 can be an empty string).
            line_fields = line.split(',')
            if len(line_fields) != 3:
                logging.error('Line does not have 3 fields: %s.', line)
                continue
            fqdn = line_fields[0]
            ipv4 = line_fields[1]
            ipv6 = line_fields[2]

            if not production_check.is_production_slice(fqdn):
                continue

            # Gather some information about this site which will be used to
            # determine if we need to do anything with this site/sliver.
            slice_id, site_id, server_id = \
                model.get_slice_site_server_ids(fqdn)

            # Make sure this is a valid slice FQDN, and not a mistake or just a
            # node name.
            if slice_id is None or site_id is None or server_id is None:
                continue

            # If mlab-ns does not support this site, then skip it.
            site = model.Site.gql('WHERE site_id=:site_id',
                                  site_id=site_id).get()
            if site == None:
                logging.info('mlab-ns does not support site %s.', site_id)
                continue

            # If mlab-ns does not serve/support this slice, then skip it. Note:
            # a given slice_id might have multiple tools (e.g., iupui_ndt has
            # both 'ndt' and 'ndt_ssl' tools.
            tools = model.Tool.gql('WHERE slice_id=:slice_id',
                                   slice_id=slice_id)
            if tools.count() == 0:
                continue

            for tool in tools.run():
                # Query the datastore to see if this sliver_tool exists there.
                sliver_tool_gql = model.SliverTool.gql(
                    'WHERE fqdn=:fqdn AND tool_id=:tool_id',
                    fqdn=fqdn,
                    tool_id=tool.tool_id)

                # Check to see if the sliver_tool already exists in the
                # datastore. If not, add it to the datastore.
                if sliver_tool_gql.count() == 1:
                    sliver_tool = sliver_tool_gql.get(
                        batch_size=constants.GQL_BATCH_SIZE)
                elif sliver_tool_gql.count() == 0:
                    logging.info(
                        'For tool %s, fqdn %s is not in datastore.  Adding it.',
                        tool.tool_id, fqdn)
                    sliver_tool = self.initialize_sliver_tool(tool, site,
                                                              server_id, fqdn)
                else:
                    logging.error(
                        'Error, or too many sliver_tools returned for {}:{}.'.format(
                            tool.tool_id, fqdn))
                    continue

                updated_sliver_tool = self.set_sliver_tool_ips(sliver_tool,
                                                               ipv4, ipv6)
                # If the sliver_tool got updated IPs then write the change to
                # the datastore, else save the performance hit of writing a
                # record with identical data.
                if updated_sliver_tool:
                    self.put_sliver_tool(updated_sliver_tool)

                if tool.tool_id not in sliver_tool_list:
                    sliver_tool_list[tool.tool_id] = []
                sliver_tool_list[tool.tool_id].append(sliver_tool)

        # Update memcache.  Never set the memcache to an empty list since it's
        # more likely that this is a Nagios failure.
        if sliver_tool_list:
            for tool_id in sliver_tool_list.keys():
                if not memcache.set(
                        tool_id,
                        sliver_tool_list[tool_id],
                        namespace=constants.MEMCACHE_NAMESPACE_TOOLS):
                    logging.error(
                        'Failed to update sliver IP addresses in memcache.')

        return util.send_success(self)