Esempio n. 1
0
  def bulkCreateTasks(self, request, *args, **kwargs):
    """Task that creates GCI Tasks from bulk data specified in the POST dict.

    The POST dict should have the following information present:
        bulk_create_key: the key of the bulk_create entity
    """
    import settings

    # keep track of our own timelimit (20 seconds)
    timelimit = 20000
    timekeeper = Timekeeper(timelimit)

    post_dict = request.POST

    bulk_create_key = post_dict.get('bulk_create_key')
    if not bulk_create_key:
      return error_handler.logErrorAndReturnOK(
                 'Not all POST data specified in: %s' % post_dict)

    bulk_data = GCIBulkCreateData.get(bulk_create_key)
    if not bulk_data:
      return error_handler.logErrorAndReturnOK(
                 'No valid data found for key: %s' % bulk_create_key)

    # note that we only query for the quota once
    org_admin = ndb.Key.from_old_key(
        GCIBulkCreateData.created_by.get_value_for_datastore(bulk_data)).get()
    org = bulk_data.org
    task_quota = getRemainingTaskQuota(org)

    # TODO(ljvderijk): Add transactions

    tasks = bulk_data.tasks
    while len(tasks) > 0:
      try:
        # check if we have time
        timekeeper.ping()

        if settings.GCI_TASK_QUOTA_LIMIT_ENABLED and task_quota <= 0:
          return error_handler.logErrorAndReturnOK(
              'Task quota reached for %s' %(org.name))

        # remove the first task
        task_as_string = tasks.pop(0)

        loaded_task = json.loads(task_as_string)
        task = {}
        for key, value in loaded_task.iteritems():
          # If we don't do this python will complain about kwargs not being
          # strings when we try to save the new task.
          task[key.encode('UTF-8')] = value

        logging.info('Uncleaned task: %s', task)
        # clean the data
        errors = self._cleanTask(task, org)

        if errors:
          logging.warning(
              'Invalid task data uploaded, the following errors occurred: %s',
              errors)
          bulk_data.errors.append(db.Text(
              'The task in row %i contains the following errors.\n %s' \
              %(bulk_data.tasksRemoved(), '\n'.join(errors))))

        # at-most-once semantics for creating tasks
        bulk_data.put()

        if errors:
          # do the next task
          continue

        # set other properties
        task['org'] = org

        # TODO(daniel): access program in more efficient way
        task['program'] = org_admin.program.to_old_key()
        task['status'] = task_model.UNPUBLISHED
        task['created_by'] = org_admin.to_old_key()
        task['modified_by'] = org_admin.to_old_key()
        # TODO(ljv): Remove difficulty level completely if needed.
        # Difficulty is hardcoded to easy since GCI2012 has no difficulty.
        task['difficulty_level'] = DifficultyLevel.EASY

        subscribers_entities = task['mentor_entities'] + [org_admin]
        task['subscribers'] = list(set([ent.key() for ent in
            subscribers_entities if ent.automatic_task_subscription]))

        # create the new task
        logging.info('Creating new task with fields: %s', task)
        task_entity = GCITask(**task)
        task_entity.put()
        task_quota = task_quota - 1
      except DeadlineExceededError:
        # time to bail out
        break

    if len(tasks) == 0:
      # send out a message
      notifications.sendBulkCreationCompleted(bulk_data)
      bulk_data.delete()
    else:
      # there is still work to be done, do a non 500 response and requeue
      task_params = {
          'bulk_create_key': bulk_data.key()
          }
      new_task = taskqueue.Task(params=task_params, url=BULK_CREATE_URL)
      # add to the gci queue
      new_task.add(queue_name='gci-update')

    # we're done here
    return http.HttpResponse('OK')
Esempio n. 2
0
    def bulkCreateTasks(self, request, *args, **kwargs):
        """Task that creates GCI Tasks from bulk data specified in the POST dict.

    The POST dict should have the following information present:
        bulk_create_key: the key of the bulk_create entity
    """
        import settings

        # keep track of our own timelimit (20 seconds)
        timelimit = 20000
        timekeeper = Timekeeper(timelimit)

        post_dict = request.POST

        bulk_create_key = post_dict.get('bulk_create_key')
        if not bulk_create_key:
            return error_handler.logErrorAndReturnOK(
                'Not all POST data specified in: %s' % post_dict)

        bulk_data = GCIBulkCreateData.get(bulk_create_key)
        if not bulk_data:
            return error_handler.logErrorAndReturnOK(
                'No valid data found for key: %s' % bulk_create_key)

        # note that we only query for the quota once
        org_admin = ndb.Key.from_old_key(
            GCIBulkCreateData.created_by.get_value_for_datastore(
                bulk_data)).get()
        org = bulk_data.org
        task_quota = getRemainingTaskQuota(org)

        # TODO(ljvderijk): Add transactions

        tasks = bulk_data.tasks
        while len(tasks) > 0:
            try:
                # check if we have time
                timekeeper.ping()

                if settings.GCI_TASK_QUOTA_LIMIT_ENABLED and task_quota <= 0:
                    return error_handler.logErrorAndReturnOK(
                        'Task quota reached for %s' % (org.name))

                # remove the first task
                task_as_string = tasks.pop(0)

                loaded_task = json.loads(task_as_string)
                task = {}
                for key, value in loaded_task.iteritems():
                    # If we don't do this python will complain about kwargs not being
                    # strings when we try to save the new task.
                    task[key.encode('UTF-8')] = value

                logging.info('Uncleaned task: %s', task)
                # clean the data
                errors = self._cleanTask(task, org)

                if errors:
                    logging.warning(
                        'Invalid task data uploaded, the following errors occurred: %s',
                        errors)
                    bulk_data.errors.append(db.Text(
                        'The task in row %i contains the following errors.\n %s' \
                        %(bulk_data.tasksRemoved(), '\n'.join(errors))))

                # at-most-once semantics for creating tasks
                bulk_data.put()

                if errors:
                    # do the next task
                    continue

                # set other properties
                task['org'] = org

                # TODO(daniel): access program in more efficient way
                task['program'] = org_admin.program.to_old_key()
                task['status'] = task_model.UNPUBLISHED
                task['created_by'] = org_admin.to_old_key()
                task['modified_by'] = org_admin.to_old_key()
                # TODO(ljv): Remove difficulty level completely if needed.
                # Difficulty is hardcoded to easy since GCI2012 has no difficulty.
                task['difficulty_level'] = DifficultyLevel.EASY

                subscribers_entities = task['mentor_entities'] + [org_admin]
                task['subscribers'] = list(
                    set([
                        ent.key() for ent in subscribers_entities
                        if ent.automatic_task_subscription
                    ]))

                # create the new task
                logging.info('Creating new task with fields: %s', task)
                task_entity = GCITask(**task)
                task_entity.put()
                task_quota = task_quota - 1
            except DeadlineExceededError:
                # time to bail out
                break

        if len(tasks) == 0:
            # send out a message
            notifications.sendBulkCreationCompleted(bulk_data)
            bulk_data.delete()
        else:
            # there is still work to be done, do a non 500 response and requeue
            task_params = {'bulk_create_key': bulk_data.key()}
            new_task = taskqueue.Task(params=task_params, url=BULK_CREATE_URL)
            # add to the gci queue
            new_task.add(queue_name='gci-update')

        # we're done here
        return http.HttpResponse('OK')
Esempio n. 3
0
def bulkCreateTasks(request, *args, **kwargs):
  """Task that creates GCI Tasks from bulk data specified in the POST dict.

  The POST dict should have the following information present:
      bulk_create_key: the key of the bulk_create entity
  """
  import settings

  # keep track of our own timelimit (20 seconds)
  timelimit = 20000
  timekeeper = Timekeeper(timelimit)

  post_dict = request.POST

  bulk_create_key = post_dict.get('bulk_create_key')
  if not bulk_create_key:
    return error_handler.logErrorAndReturnOK(
               'Not all POST data specified in: %s' % post_dict)

  bulk_data = bulk_create_model.GCIBulkCreateData.get(bulk_create_key)
  if not bulk_data:
    return error_handler.logErrorAndReturnOK(
               'No valid data found for key: %s' % bulk_create_key)

  # note that we only query for the quota once
  org_admin = bulk_data.created_by
  task_quota = org_logic.getRemainingTaskQuota(org_admin.scope)

  tasks = bulk_data.tasks
  while len(tasks) > 0:
    try:
      # check if we have time
      timekeeper.ping()

      if settings.GCI_TASK_QUOTA_LIMIT_ENABLED and task_quota <= 0:
        return error_handler.logErrorAndReturnOK(
            'Task quota reached for %s' %(org_admin.scope.name))

      # remove the first task
      task_as_string = tasks.pop(0)

      loaded_task = simplejson.loads(task_as_string)
      task = {}
      for key, value in loaded_task.iteritems():
        # If we don't do this python will complain about kwargs not being
        # strings when we try to save the new task.
        task[key.encode('UTF-8')] = value

      logging.info('Uncleaned task: %s' %task)
      # clean the data
      errors = _cleanTask(task, org_admin)

      if errors:
        logging.warning(
            'Invalid task data uploaded, the following errors occurred: %s'
            %errors)
        bulk_data.errors.append(db.Text(
            'The task in row %i contains the following errors.\n %s' \
            %(bulk_data.tasksRemoved(), '\n'.join(errors))))

      # at-most-once semantics for creating tasks
      bulk_data.put()

      if errors:
        # do the next task
        continue

      # set other properties
      task['link_id'] = 't%i' % (int(time.time()*100))
      task['scope'] = org_admin.scope
      task['scope_path'] = org_admin.scope_path
      task['program'] = org_admin.program
      task['status'] = 'Unpublished'
      task['created_by'] = org_admin
      task['modified_by'] = org_admin

      # create the new task
      logging.info('Creating new task with fields: %s' %task)
      task_logic.updateOrCreateFromFields(task)
      task_quota = task_quota - 1
    except DeadlineExceededError:
      # time to bail out
      pass

  if len(tasks) == 0:
    # send out a message
    notifications.sendBulkCreationCompleted(bulk_data)
    bulk_data.delete()
  else:
    # there is still work to be done, do a non 500 response and requeue
    task_params = {
        'bulk_create_key': bulk_data.key().id_or_name()
        }
    new_task = taskqueue.Task(params=task_params,
                              url=BULK_CREATE_URL)
    # add to the gci queue
    new_task.add(queue_name='gci-update')

  # we're done here
  return http.HttpResponse('OK')