コード例 #1
0
  def _Dynamic_TransactionQuery(self, request, response):
    if not request.has_ancestor():
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          'No ancestor in transactional query.')

    app_id = datastore_types.ResolveAppId(None)
    if (datastore_rpc._GetDatastoreType(app_id) !=
        datastore_rpc.BaseConnection.HIGH_REPLICATION_DATASTORE):
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          'remote_api supports transactional queries only in the '
          'high-replication datastore.')


    entity_group_key = entity_pb.Reference()
    entity_group_key.CopyFrom(request.ancestor())
    group_path = entity_group_key.mutable_path()
    root = entity_pb.Path_Element()
    root.MergeFrom(group_path.element(0))
    group_path.clear_element()
    group_path.add_element().CopyFrom(root)
    eg_element = group_path.add_element()
    eg_element.set_type(metadata.EntityGroup.KIND_NAME)
    eg_element.set_id(metadata.EntityGroup.ID)


    begin_request = datastore_pb.BeginTransactionRequest()
    begin_request.set_app(app_id)
    tx = datastore_pb.Transaction()
    self.__call('datastore_v3', 'BeginTransaction', begin_request, tx)


    request.mutable_transaction().CopyFrom(tx)
    self.__call('datastore_v3', 'RunQuery', request, response.mutable_result())


    get_request = datastore_pb.GetRequest()
    get_request.mutable_transaction().CopyFrom(tx)
    get_request.add_key().CopyFrom(entity_group_key)
    get_response = datastore_pb.GetResponse()
    self.__call('datastore_v3', 'Get', get_request, get_response)
    entity_group = get_response.entity(0)


    response.mutable_entity_group_key().CopyFrom(entity_group_key)
    if entity_group.has_entity():
      response.mutable_entity_group().CopyFrom(entity_group.entity())


    self.__call('datastore_v3', 'Commit', tx, datastore_pb.CommitResponse())
コード例 #2
0
  def _Dynamic_TransactionQuery(self, request, response):
    if not request.has_ancestor():
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          'No ancestor in transactional query.')

    app_id = datastore_types.ResolveAppId(None)
    if (datastore_rpc._GetDatastoreType(app_id) !=
        datastore_rpc.BaseConnection.HIGH_REPLICATION_DATASTORE):
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          'remote_api supports transactional queries only in the '
          'high-replication datastore.')


    entity_group_key = entity_pb.Reference()
    entity_group_key.CopyFrom(request.ancestor())
    group_path = entity_group_key.mutable_path()
    root = entity_pb.Path_Element()
    root.MergeFrom(group_path.element(0))
    group_path.clear_element()
    group_path.add_element().CopyFrom(root)
    eg_element = group_path.add_element()
    eg_element.set_type(metadata.EntityGroup.KIND_NAME)
    eg_element.set_id(metadata.EntityGroup.ID)


    begin_request = datastore_pb.BeginTransactionRequest()
    begin_request.set_app(app_id)
    tx = datastore_pb.Transaction()
    self.__call('datastore_v3', 'BeginTransaction', begin_request, tx)


    request.mutable_transaction().CopyFrom(tx)
    self.__call('datastore_v3', 'RunQuery', request, response.mutable_result())


    get_request = datastore_pb.GetRequest()
    get_request.mutable_transaction().CopyFrom(tx)
    get_request.add_key().CopyFrom(entity_group_key)
    get_response = datastore_pb.GetResponse()
    self.__call('datastore_v3', 'Get', get_request, get_response)
    entity_group = get_response.entity(0)


    response.mutable_entity_group_key().CopyFrom(entity_group_key)
    if entity_group.has_entity():
      response.mutable_entity_group().CopyFrom(entity_group.entity())


    self.__call('datastore_v3', 'Commit', tx, datastore_pb.CommitResponse())
コード例 #3
0
ファイル: utils.py プロジェクト: 404minds/quiz-forest
def StartMap(operation_key,
             job_name,
             handler_spec,
             reader_spec,
             writer_spec,
             mapper_params,
             mapreduce_params=None,
             queue_name=None,
             shard_count=MAPREDUCE_DEFAULT_SHARDS):
  """Start map as part of datastore admin operation.

  Will increase number of active jobs inside the operation and start new map.

  Args:
    operation_key: Key of the DatastoreAdminOperation for current operation.
    job_name: Map job name.
    handler_spec: Map handler specification.
    reader_spec: Input reader specification.
    writer_spec: Output writer specification.
    mapper_params: Custom mapper parameters.
    mapreduce_params: Custom mapreduce parameters.
    queue_name: the name of the queue that will be used by the M/R.
    shard_count: the number of shards the M/R will try to use.

  Returns:
    resulting map job id as string.
  """

  if not mapreduce_params:
    mapreduce_params = {}
  mapreduce_params[DatastoreAdminOperation.PARAM_DATASTORE_ADMIN_OPERATION] = (
      str(operation_key))
  mapreduce_params['done_callback'] = '%s/%s' % (config.BASE_PATH,
                                                 MapreduceDoneHandler.SUFFIX)
  if queue_name is not None:
    mapreduce_params['done_callback_queue'] = queue_name
  mapreduce_params['force_writes'] = 'True'

  def tx(is_xg_transaction):
    """Start MapReduce job and update datastore admin state.

    Args:
      is_xg_transaction: True if we are running inside a xg-enabled
        transaction, else False if we are running inside a non-xg-enabled
        transaction (which means the datastore admin state is updated in one
        transaction and the MapReduce job in an indepedent transaction).
    Returns:
      result MapReduce job id as a string.
    """
    job_id = control.start_map(
        job_name, handler_spec, reader_spec,
        mapper_params,
        output_writer_spec=writer_spec,
        mapreduce_parameters=mapreduce_params,
        base_path=config.MAPREDUCE_PATH,
        shard_count=shard_count,
        in_xg_transaction=is_xg_transaction,
        queue_name=queue_name)
    operation = DatastoreAdminOperation.get(operation_key)
    operation.status = DatastoreAdminOperation.STATUS_ACTIVE
    operation.active_jobs += 1
    operation.active_job_ids = list(set(operation.active_job_ids + [job_id]))
    operation.put(config=_CreateDatastoreConfig())
    return job_id






  datastore_type = datastore_rpc._GetDatastoreType()

  if datastore_type != datastore_rpc.BaseConnection.MASTER_SLAVE_DATASTORE:
    return db.run_in_transaction_options(
        db.create_transaction_options(xg=True), tx, True)
  else:
    return db.run_in_transaction(tx, False)
コード例 #4
0
ファイル: utils.py プロジェクト: venky6363/appscale
def StartMap(operation_key,
             job_name,
             handler_spec,
             reader_spec,
             writer_spec,
             mapper_params,
             mapreduce_params=None,
             queue_name=None,
             shard_count=MAPREDUCE_DEFAULT_SHARDS):
    """Start map as part of datastore admin operation.

  Will increase number of active jobs inside the operation and start new map.

  Args:
    operation_key: Key of the DatastoreAdminOperation for current operation.
    job_name: Map job name.
    handler_spec: Map handler specification.
    reader_spec: Input reader specification.
    writer_spec: Output writer specification.
    mapper_params: Custom mapper parameters.
    mapreduce_params: Custom mapreduce parameters.
    queue_name: the name of the queue that will be used by the M/R.
    shard_count: the number of shards the M/R will try to use.

  Returns:
    resulting map job id as string.
  """

    if not mapreduce_params:
        mapreduce_params = {}
    mapreduce_params[DatastoreAdminOperation.
                     PARAM_DATASTORE_ADMIN_OPERATION] = (str(operation_key))
    mapreduce_params['done_callback'] = '%s/%s' % (config.BASE_PATH,
                                                   MapreduceDoneHandler.SUFFIX)
    if queue_name is not None:
        mapreduce_params['done_callback_queue'] = queue_name
    mapreduce_params['force_writes'] = 'True'

    def tx(is_xg_transaction):
        """Start MapReduce job and update datastore admin state.

    Args:
      is_xg_transaction: True if we are running inside a xg-enabled
        transaction, else False if we are running inside a non-xg-enabled
        transaction (which means the datastore admin state is updated in one
        transaction and the MapReduce job in an indepedent transaction).
    Returns:
      result MapReduce job id as a string.
    """
        job_id = control.start_map(job_name,
                                   handler_spec,
                                   reader_spec,
                                   mapper_params,
                                   output_writer_spec=writer_spec,
                                   mapreduce_parameters=mapreduce_params,
                                   base_path=config.MAPREDUCE_PATH,
                                   shard_count=shard_count,
                                   in_xg_transaction=is_xg_transaction,
                                   queue_name=queue_name)
        operation = DatastoreAdminOperation.get(operation_key)
        operation.status = DatastoreAdminOperation.STATUS_ACTIVE
        operation.active_jobs += 1
        operation.active_job_ids = list(
            set(operation.active_job_ids + [job_id]))
        operation.put(config=_CreateDatastoreConfig())
        return job_id

    datastore_type = datastore_rpc._GetDatastoreType()

    if datastore_type != datastore_rpc.BaseConnection.MASTER_SLAVE_DATASTORE:
        return db.run_in_transaction_options(
            db.create_transaction_options(xg=True), tx, True)
    else:
        return db.run_in_transaction(tx, False)