コード例 #1
0
ファイル: edit.py プロジェクト: hellojwilde/quizzical
    def post(self, id):
        for model in ["Score", "Attempt"]:
            start_map(
                "Archive %s(s)" % model,
                "jobs.cleanup.archive",
                "google.appengine.ext.mapreduce.input_readers.DatastoreInputReader",
                {"entity_kind": "models.%s" % model, "quiz_id": int(id)},
            )

        self.redirect(links.Quiz.roster(int(id)))
コード例 #2
0
ファイル: edit.py プロジェクト: hellojwilde/quizzical
    def post(self, id):
        for model in ['Score', 'Attempt']:
            start_map(
                'Archive %s(s)' % model, 'jobs.cleanup.archive',
                'google.appengine.ext.mapreduce.input_readers.DatastoreInputReader',
                {
                    'entity_kind': 'models.%s' % model,
                    'quiz_id': int(id)
                })

        self.redirect(links.Quiz.roster(int(id)))
コード例 #3
0
ファイル: edit.py プロジェクト: hellojwilde/quizzical
    def post(self, id):
        self.fetch(id)
        self.quiz_entity.is_deleting = True
        self.quiz_entity.put()

        for model in ["Question", "Attempt", "Score", "Snapshot"]:
            start_map(
                "Delete %s(s)" % model,
                "jobs.cleanup.delete",
                "google.appengine.ext.mapreduce.input_readers.DatastoreInputReader",
                {"entity_kind": "models.%s" % model, "quiz_id": int(id)},
            )

        self.quiz_entity.delete()
        self.redirect("/")
コード例 #4
0
ファイル: utils.py プロジェクト: 404minds/quiz-forest
  def tx(is_xg_transaction):
    """Start MapReduce job and update datastore admin state.

    Args:
      is_xg_transaction: True if we are running inside a xg-enabled
        transaction, else False if we are running inside a non-xg-enabled
        transaction (which means the datastore admin state is updated in one
        transaction and the MapReduce job in an indepedent transaction).
    Returns:
      result MapReduce job id as a string.
    """
    job_id = control.start_map(
        job_name, handler_spec, reader_spec,
        mapper_params,
        output_writer_spec=writer_spec,
        mapreduce_parameters=mapreduce_params,
        base_path=config.MAPREDUCE_PATH,
        shard_count=shard_count,
        in_xg_transaction=is_xg_transaction,
        queue_name=queue_name)
    operation = DatastoreAdminOperation.get(operation_key)
    operation.status = DatastoreAdminOperation.STATUS_ACTIVE
    operation.active_jobs += 1
    operation.active_job_ids = list(set(operation.active_job_ids + [job_id]))
    operation.put(config=_CreateDatastoreConfig())
    return job_id
コード例 #5
0
ファイル: edit.py プロジェクト: hellojwilde/quizzical
    def post(self, id):
        self.fetch(id)
        self.quiz_entity.is_deleting = True
        self.quiz_entity.put()

        for model in ['Question', 'Attempt', 'Score', 'Snapshot']:
            start_map(
                'Delete %s(s)' % model, 'jobs.cleanup.delete',
                'google.appengine.ext.mapreduce.input_readers.DatastoreInputReader',
                {
                    'entity_kind': 'models.%s' % model,
                    'quiz_id': int(id)
                })

        self.quiz_entity.delete()
        self.redirect('/')
コード例 #6
0
ファイル: utils.py プロジェクト: SRabbelier/Melange
  def tx():
    operation.active_jobs += 1
    operation.put(config=_CreateDatastoreConfig())

    return control.start_map(
        job_name, handler_spec, reader_spec,
        mapper_params,
        mapreduce_parameters=mapreduce_params,
        base_path=config.MAPREDUCE_PATH,
        shard_count=32,
        transactional=True)
コード例 #7
0
ファイル: seeder.py プロジェクト: SRabbelier/Melange
  def startMapReduce(self, json):
    configuration_sheet = DataSeederConfigurationSheet(json=json)
    configuration_sheet.put()

    reader_parameters = {'configuration_sheet_key': str(configuration_sheet.key())}
    return start_map(self.MAPPER_NAME,
                     self.HANDLER_SPEC,
                     self.READER_SPEC,
                     reader_parameters,
                     self.SHARD_COUNT,
                     queue_name=self.QUEUE_NAME)
コード例 #8
0
ファイル: utils.py プロジェクト: cdonati/appscale
 def tx():
   operation = DatastoreAdminOperation.get(operation_key)
   job_id = control.start_map(
       job_name, handler_spec, reader_spec,
       mapper_params,
       output_writer_spec=writer_spec,
       mapreduce_parameters=mapreduce_params,
       base_path=config.MAPREDUCE_PATH,
       shard_count=shard_count,
       transactional=True,
       queue_name=queue_name,
       transactional_parent=operation)
   operation.status = DatastoreAdminOperation.STATUS_ACTIVE
   operation.active_jobs += 1
   operation.active_job_ids = list(set(operation.active_job_ids + [job_id]))
   operation.put(config=_CreateDatastoreConfig())
   return job_id
コード例 #9
0
    def tx():
        operation = DatastoreAdminOperation.get(operation_key)

        job_id = control.start_map(job_name,
                                   handler_spec,
                                   reader_spec,
                                   mapper_params,
                                   output_writer_spec=writer_spec,
                                   mapreduce_parameters=mapreduce_params,
                                   base_path=config.MAPREDUCE_PATH,
                                   shard_count=DEFAULT_SHARD_SIZE,
                                   transactional=True,
                                   queue_name=queue_name,
                                   transactional_parent=operation)
        operation.status = DatastoreAdminOperation.STATUS_ACTIVE
        operation.active_jobs += 1
        operation.active_job_ids = list(
            set(operation.active_job_ids + [job_id]))
        operation.put(config=_CreateDatastoreConfig())
        return job_id
コード例 #10
0
    def post(self):
        """Handler for post requests to datastore_admin/delete.do.

    Jobs are executed and user is redirected to the get handler.
    """
        namespace = self.request.get('namespace')
        kinds = self.request.get('kind', allow_multiple=True)
        (namespace_str, _) = _GetPrintableStrs(namespace, kinds)
        app_id = self.request.get('app_id')
        token = self.request.get('xsrf_token')

        jobs = []
        if utils.ValidateXsrfToken(token, XSRF_ACTION):
            try:
                for kind in kinds:
                    name = 'Delete all %s objects%s' % (kind, namespace_str)
                    mapreduce_params = {
                        'entity_kind': kind,
                    }

                    if utils.config.CLEANUP_MAPREDUCE_STATE:
                        mapreduce_params['done_callback'] = '%s/%s' % (
                            utils.config.BASE_PATH, DeleteDoneHandler.SUFFIX)

                    jobs.append(
                        control.start_map(
                            name,
                            self.DELETE_HANDLER,
                            self.INPUT_READER,
                            mapreduce_params,
                            mapreduce_parameters=mapreduce_params,
                            base_path=utils.config.MAPREDUCE_PATH,
                            _app=app_id))

                error = ''
            except Exception, e:
                error = self._HandleException(e)

            parameters = [('job', job) for job in jobs]
            if error:
                parameters.append(('error', error))
コード例 #11
0
ファイル: utils.py プロジェクト: shnaizerk/gts
  def tx():
    """Start MapReduce job and update datastore admin state.

    Returns:
      result MapReduce job id as a string.
    """
    job_id = control.start_map(
        job_name, handler_spec, reader_spec,
        mapper_params,
        output_writer_spec=writer_spec,
        mapreduce_parameters=mapreduce_params,
        base_path=config.MAPREDUCE_PATH,
        shard_count=shard_count,
        in_xg_transaction=True,
        queue_name=queue_name)
    operation = DatastoreAdminOperation.get(operation_key)
    operation.status = DatastoreAdminOperation.STATUS_ACTIVE
    operation.active_jobs += 1
    operation.active_job_ids = list(set(operation.active_job_ids + [job_id]))
    operation.put(config=_CreateDatastoreConfig())
    return job_id
コード例 #12
0
  def run(self,
          job_name,
          handler_spec,
          input_reader_spec,
          output_writer_spec=None,
          params=None,
          shards=None):
    """Start a mapreduce job.

    Args:
      job_name: mapreduce name. Only for display purpose.
      handler_spec: fully qualified name to your map function/class.
      input_reader_spec: fully qualified name to input reader class.
      output_writer_spec: fully qualified name to output writer class.
      params: a dictionary of parameters for input reader and output writer
        initialization.
      shards: number of shards. This provides a guide to mapreduce. The real
        number of shards is determined by how input are splited.
    """
    if shards is None:
      shards = parameters.config.SHARD_COUNT

    mapreduce_id = control.start_map(
        job_name,
        handler_spec,
        input_reader_spec,
        params or {},
        mapreduce_parameters={
            "done_callback": self.get_callback_url(),
            "done_callback_method": "GET",
            "pipeline_id": self.pipeline_id,
        },
        shard_count=shards,
        output_writer_spec=output_writer_spec,
        )
    self.fill(self.outputs.job_id, mapreduce_id)
    self.set_status(console_url="%s/detail?mapreduce_id=%s" % (
        (parameters.config.BASE_PATH, mapreduce_id)))
コード例 #13
0
  def run(self,
          job_name,
          handler_spec,
          input_reader_spec,
          output_writer_spec=None,
          params=None,
          shards=None):
    """Start a mapreduce job.

    Args:
      job_name: mapreduce name. Only for display purpose.
      handler_spec: fully qualified name to your map function/class.
      input_reader_spec: fully qualified name to input reader class.
      output_writer_spec: fully qualified name to output writer class.
      params: a dictionary of parameters for input reader and output writer
        initialization.
      shards: number of shards. This provides a guide to mapreduce. The real
        number of shards is determined by how input are splited.
    """
    if shards is None:
      shards = parameters.config.SHARD_COUNT

    mapreduce_id = control.start_map(
        job_name,
        handler_spec,
        input_reader_spec,
        params or {},
        mapreduce_parameters={
            "done_callback": self.get_callback_url(),
            "done_callback_method": "GET",
            "pipeline_id": self.pipeline_id,
        },
        shard_count=shards,
        output_writer_spec=output_writer_spec,
        )
    self.fill(self.outputs.job_id, mapreduce_id)
    self.set_status(console_url="%s/detail?job_id=%s" % (
        (parameters.config.BASE_PATH, mapreduce_id)))
コード例 #14
0
  def post(self):
    """Handler for post requests to datastore_admin/delete.do.

    Jobs are executed and user is redirected to the get handler.
    """
    namespace = self.request.get('namespace')
    kinds = self.request.get('kind', allow_multiple=True)
    (namespace_str, _) = _GetPrintableStrs(namespace, kinds)
    app_id = self.request.get('app_id')
    token = self.request.get('xsrf_token')

    jobs = []
    if utils.ValidateXsrfToken(token, XSRF_ACTION):
      try:
        for kind in kinds:
          name = 'Delete all %s objects%s' % (kind, namespace_str)
          mapreduce_params = {
              'entity_kind': kind,
          }

          if utils.config.CLEANUP_MAPREDUCE_STATE:
            mapreduce_params['done_callback'] = '%s/%s' % (
                utils.config.BASE_PATH, DeleteDoneHandler.SUFFIX)

          jobs.append(control.start_map(
              name, self.DELETE_HANDLER,
              self.INPUT_READER, mapreduce_params,
              mapreduce_parameters=mapreduce_params,
              base_path=utils.config.MAPREDUCE_PATH,
              _app=app_id))

        error = ''
      except Exception, e:
        error = self._HandleException(e)

      parameters = [('job', job) for job in jobs]
      if error:
        parameters.append(('error', error))
コード例 #15
0
 def run(self,
         job_name,
         handler_spec,
         input_reader_spec,
         output_writer_spec=None,
         params=None,
         shards=None):
   mapreduce_id = control.start_map(
       job_name,
       handler_spec,
       input_reader_spec,
       params or {},
       mapreduce_parameters={
           "done_callback": self.get_callback_url(),
           "done_callback_method": "GET",
           "pipeline_id": self.pipeline_id,
       },
       shard_count=shards,
       output_writer_spec=output_writer_spec,
       )
   self.fill(self.outputs.job_id, mapreduce_id)
   self.set_status(console_url="%s/detail?job_id=%s" % (
       (parameters._DEFAULT_BASE_PATH, mapreduce_id)))
コード例 #16
0
 def run(self,
         job_name,
         handler_spec,
         input_reader_spec,
         output_writer_spec=None,
         params=None,
         shards=None):
     mapreduce_id = control.start_map(
         job_name,
         handler_spec,
         input_reader_spec,
         params or {},
         mapreduce_parameters={
             "done_callback": self.get_callback_url(),
             "done_callback_method": "GET",
             "pipeline_id": self.pipeline_id,
         },
         shard_count=shards,
         output_writer_spec=output_writer_spec,
     )
     self.fill(self.outputs.job_id, mapreduce_id)
     self.set_status(console_url="%s/detail?job_id=%s" %
                     ((base_handler._DEFAULT_BASE_PATH, mapreduce_id)))