Beispiel #1
0
def poll():
    """ Callback function that polls for new tasks based on a schedule. """
    deployment_id = helper.get_deployment_id()
    # If the deployment is not registered, skip.
    if not deployment_id:
        return

    # If we can't reach the backup and recovery services, skip.
    nodes = helper.get_node_info()
    http_client = tornado.httpclient.HTTPClient()
    for node in nodes:
        br_host = node[helper.NodeInfoTags.HOST]
        request = tornado.httpclient.HTTPRequest(br_host)
        try:
            response = http_client.fetch(request)
            if json.loads(response.body)['status'] != 'up':
                logging.warn(
                    'Backup and Recovery service at {} is not up.'.format(
                        br_host))
                return
        except (socket.error, ValueError):
            logging.exception(
                'Backup and Recovery service at {} is not up.'.format(br_host))
            return

    logging.info("Polling for new task.")

    # Send request to AppScale Portal.
    url = "{0}{1}".format(constants.PORTAL_URL, constants.PORTAL_POLL_PATH)
    data = urllib.urlencode({JSONTags.DEPLOYMENT_ID: deployment_id})
    request = helper.create_request(url=url, method='POST', body=data)
    response = helper.urlfetch(request)

    if not response[JSONTags.SUCCESS]:
        logging.error("Inaccessible resource: {}".format(url))
        return

    try:
        data = json.loads(response[JSONTags.BODY])
    except (TypeError, ValueError) as error:
        logging.error(
            "Cannot parse response from url '{0}'. Error: {1}".format(
                url, str(error)))
        return

    if data == {}:  # If there's no task to perform.
        return

    # Verify all necessary fields are present in the request.
    if not set(data.keys()).issuperset(set(constants.REQUIRED_KEYS)):
        logging.error("Missing args in response: {0}".format(response))
        return

    logging.debug("Task to run: {0}".format(data))
    logging.info("Redirecting task request to TaskHandler.")
    url = "http://localhost:{}{}".format(constants.HERMES_PORT, '/do_task')
    request = helper.create_request(url, method='POST', body=json.dumps(data))

    # The poller can move forward without waiting for a response here.
    helper.urlfetch_async(request)
Beispiel #2
0
def poll():
  """ Callback function that polls for new tasks based on a schedule. """
  deployment_id = helper.get_deployment_id()
  # If the deployment is not registered, skip.
  if not deployment_id:
    return

  # If we can't reach the backup and recovery services, skip.
  nodes = helper.get_node_info()
  http_client = tornado.httpclient.HTTPClient()
  for node in nodes:
    br_host = node[helper.NodeInfoTags.HOST]
    request = tornado.httpclient.HTTPRequest(br_host)
    try:
      response = http_client.fetch(request)
      if json.loads(response.body)['status'] != 'up':
        logging.warn('Backup and Recovery service at {} is not up.'
          .format(br_host))
        return
    except (socket.error, ValueError):
      logging.exception('Backup and Recovery service at {} is not up.'
        .format(br_host))
      return

  logging.info("Polling for new task.")

  # Send request to AppScale Portal.
  url = "{0}{1}".format(constants.PORTAL_URL,
                        constants.PORTAL_POLL_PATH)
  data = urllib.urlencode({JSONTags.DEPLOYMENT_ID: deployment_id})
  request = helper.create_request(url=url, method='POST', body=data)
  response = helper.urlfetch(request)

  if not response[JSONTags.SUCCESS]:
    logging.error("Inaccessible resource: {}".format(url))
    return

  try:
    data = json.loads(response[JSONTags.BODY])
  except (TypeError, ValueError) as error:
    logging.error("Cannot parse response from url '{0}'. Error: {1}".
      format(url, str(error)))
    return

  if data == {}:  # If there's no task to perform.
    return

  # Verify all necessary fields are present in the request.
  if not set(data.keys()).issuperset(set(constants.REQUIRED_KEYS)):
    logging.error("Missing args in response: {0}".format(response))
    return

  logging.debug("Task to run: {0}".format(data))
  logging.info("Redirecting task request to TaskHandler.")
  url = "http://localhost:{}{}".format(constants.HERMES_PORT, '/do_task')
  request = helper.create_request(url, method='POST', body=json.dumps(data))

  # The poller can move forward without waiting for a response here.
  helper.urlfetch_async(request)
Beispiel #3
0
    def send(self, nodes_stats):
        deployment_id = helper.get_deployment_id()
        # If the deployment is not registered, skip.
        if not deployment_id:
            return

        # Send request to AppScale Portal.
        portal_path = self._portal_method.format(deployment_id=deployment_id)
        url = "{0}{1}".format(constants.PORTAL_URL, portal_path)
        data = {
            'deployment_id':
            deployment_id,
            'nodes_stats':
            json.dumps({
                node_ip: [stats_to_dict(snapshot) for snapshot in snapshots]
                for node_ip, snapshots in nodes_stats.iteritems()
            })
        }
        snapshots_num = sum(
            len(snapshots) for snapshots in nodes_stats.values())
        logger.debug(
            "Sending {snapshots} node stats snapshots about {nodes} nodes to the "
            "AppScale Portal".format(snapshots=snapshots_num,
                                     nodes=len(nodes_stats)))

        request = helper.create_request(url=url,
                                        method='POST',
                                        body=urllib.urlencode(data))
        response = helper.urlfetch(request)

        if not response[JSONTags.SUCCESS]:
            logger.error("Inaccessible resource: {}".format(url))
            return
Beispiel #4
0
  def send(self, nodes_stats):
    deployment_id = helper.get_deployment_id()
    # If the deployment is not registered, skip.
    if not deployment_id:
      return

    # Send request to AppScale Portal.
    portal_path = self._portal_method.format(deployment_id=deployment_id)
    url = "{0}{1}".format(constants.PORTAL_URL, portal_path)
    data = {
      'deployment_id': deployment_id,
      'nodes_stats': json.dumps({
        node_ip: [stats_to_dict(snapshot) for snapshot in snapshots]
        for node_ip, snapshots in nodes_stats.iteritems()
      })
    }
    snapshots_num = sum(len(snapshots) for snapshots in nodes_stats.values())
    logging.debug(
      "Sending {snapshots} node stats snapshots about {nodes} nodes to the "
      "AppScale Portal".format(snapshots=snapshots_num, nodes=len(nodes_stats))
    )

    request = helper.create_request(url=url, method='POST',
                                    body=urllib.urlencode(data))
    response = helper.urlfetch(request)

    if not response[JSONTags.SUCCESS]:
      logging.error("Inaccessible resource: {}".format(url))
      return
Beispiel #5
0
    def post(self):
        """ POST method that sends a request for action to the
    corresponding deployment components. """
        logging.debug("Task request received: {0}, {1}".format(
            str(self.request), str(self.request.body)))

        if not self.request.body:
            logging.info(
                "Response from the AppScale Portal empty. No tasks to run.")
            self.set_status(constants.HTTP_Codes.HTTP_OK)
            return

        try:
            data = json.loads(self.request.body)
        except (TypeError, ValueError) as error:
            logging.exception(error)
            logging.error("Unable to parse: {0}".format(self.request.body))
            self.set_status(constants.HTTP_Codes.HTTP_BAD_REQUEST)
            return

        # Verify all necessary fields are present in request.body.
        logging.debug("Verifying all necessary parameters are present.")
        if not set(data.keys()).issuperset(set(constants.REQUIRED_KEYS)):
            logging.error("Missing args in request: " + self.request.body)
            self.set_status(constants.HTTP_Codes.HTTP_BAD_REQUEST)
            return

        # Gather information for sending the requests to start off the current
        # task at hand.
        nodes = helper.get_node_info()

        if data[JSONTags.TYPE] not in constants.SUPPORTED_TASKS:
            logging.error("Unsupported task type: '{0}'".format(
                data[JSONTags.TYPE]))
            self.set_status(constants.HTTP_Codes.HTTP_BAD_REQUEST)
            return

        tasks = [data[JSONTags.TYPE]]
        logging.info("Tasks to execute: {0}".format(tasks))
        for task in tasks:
            # Initiate the task as pending.
            TASK_STATUS_LOCK.acquire(True)
            TASK_STATUS[data[JSONTags.TASK_ID]] = {
                JSONTags.TYPE: task,
                NodeInfoTags.NUM_NODES: len(nodes),
                JSONTags.STATUS: TaskStatus.PENDING
            }
            TASK_STATUS_LOCK.release()

            result_queue = Queue.Queue()
            threads = []
            for node in nodes:
                # Create a br_service compatible JSON object.
                json_data = helper.create_br_json_data(
                    node[NodeInfoTags.ROLE], task, data[JSONTags.BUCKET_NAME],
                    node[NodeInfoTags.INDEX], data[JSONTags.STORAGE])
                request = helper.create_request(url=node[NodeInfoTags.HOST],
                                                method='POST',
                                                body=json_data)

                # Start a thread for the request.
                thread = threading.Thread(target=helper.send_remote_request,
                                          name='{0}{1}'.format(
                                              data[JSONTags.TYPE],
                                              node[NodeInfoTags.HOST]),
                                          args=(
                                              request,
                                              result_queue,
                                          ))
                threads.append(thread)
                thread.start()

            # Wait for threads to finish.
            for thread in threads:
                thread.join()
            # Harvest results.
            results = [result_queue.get() for _ in xrange(len(nodes))]
            logging.debug("Task: {0}. Results: {1}.".format(task, results))

            # Backup source code.
            app_success = False
            if task == 'backup':
                app_success = helper.\
                  backup_apps(data[JSONTags.STORAGE], data[JSONTags.BUCKET_NAME])
            elif task == 'restore':
                app_success = helper.\
                  restore_apps(data[JSONTags.STORAGE], data[JSONTags.BUCKET_NAME])

            # Update TASK_STATUS.
            successful_nodes = 0
            for result in results:
                if result[JSONTags.SUCCESS]:
                    successful_nodes += 1

            TASK_STATUS_LOCK.acquire(True)
            all_nodes = TASK_STATUS[data[JSONTags.TASK_ID]]\
                [NodeInfoTags.NUM_NODES]
            if successful_nodes < all_nodes or not app_success:
                TASK_STATUS[data[JSONTags.TASK_ID]][JSONTags.STATUS] = \
                  TaskStatus.FAILED
            else:
                TASK_STATUS[data[JSONTags.TASK_ID]][JSONTags.STATUS] = \
                  TaskStatus.COMPLETE

            logging.info("Task: {0}. Status: {1}.".format(
                task, TASK_STATUS[data[JSONTags.TASK_ID]][JSONTags.STATUS]))
            IOLoop.instance().add_callback(
                callback=lambda: helper.report_status(
                    task, data[JSONTags.TASK_ID], TASK_STATUS[data[
                        JSONTags.TASK_ID]][JSONTags.STATUS]))
            TASK_STATUS_LOCK.release()

        self.set_status(constants.HTTP_Codes.HTTP_OK)