def build_user_entity(row):
    entity = entity_pb2.Entity()
    datastore_helper.add_key_path(entity.key, 'User', row['email'])
    props = dict(row)
    del props['email']
    datastore_helper.add_properties(entity, props)
    return entity
 def make_entity(self, content):
     """Create entity and set properties."""
     entity = entity_pb2.Entity()
     # We use uuid for keys.
     datastore_helper.add_key_path(entity.key, self._kind, str(uuid.uuid4()))
     datastore_helper.add_properties(entity, content)
     return entity
Exemple #3
0
def create_ds_entity(element):
    entity = entity_pb2.Entity()
    kind = 'FileMetaData'

    datastore_helper.add_key_path(entity.key, kind, str(uuid.uuid4()))
    datastore_helper.add_properties(entity, element)
    return entity
Exemple #4
0
    def make_entity(self, content):
        entity = entity_pb2.Entity()
        if self._namespace is not None:
            entity.key.partition_id.namespace_id = self._namespace

        helper.add_key_path(entity.key, self._kind, str(uuid.uuid4()))
        helper.add_properties(entity, content)
        return entity
def build_company_entity(data):
    entity = entity_pb2.Entity()
    datastore_helper.add_key_path(entity.key, 'Company', data[0])
    props = {}
    props['reg_date'] = data[1]
    props['boxes_bought'] = 0
    props['boxes_prov'] = 0
    datastore_helper.add_properties(entity, props)
    return entity
  def make_entity(self, content):
    entity = entity_pb2.Entity()
    if self._namespace is not None:
      entity.key.partition_id.namespace_id = self._namespace

    # All entities created will have the same ancestor
    datastore_helper.add_key_path(entity.key, self._kind, self._ancestor,
                                  self._kind, str(uuid.uuid4()))

    datastore_helper.add_properties(entity, {"content": unicode(content)})
    return entity
Exemple #7
0
    def make_entity(self, content):
        entity = entity_pb2.Entity()
        if self._namespace is not None:
            entity.key.partition_id.namespace_id = self._namespace

        # All entities created will have the same ancestor
        datastore_helper.add_key_path(entity.key, self._kind, self._ancestor,
                                      self._kind, str(uuid.uuid4()))

        datastore_helper.add_properties(entity, {"content": unicode(content)})
        return entity
  def make_entity(self, content):
    """Create entity from given string."""
    entity = entity_pb2.Entity()
    if self._namespace is not None:
      entity.key.partition_id.namespace_id = self._namespace

    # All entities created will have the same ancestor
    datastore_helper.add_key_path(entity.key, self._kind, self._ancestor,
                                  self._kind, hashlib.sha1(content).hexdigest())

    datastore_helper.add_properties(entity, {'content': str(content)})
    return entity
Exemple #9
0
  def make_entity(self, content):
    """Create entity from given string."""
    entity = entity_pb2.Entity()
    if self._namespace is not None:
      entity.key.partition_id.namespace_id = self._namespace

    # All entities created will have the same ancestor
    datastore_helper.add_key_path(entity.key, self._kind, self._ancestor,
                                  self._kind, hashlib.sha1(content).hexdigest())

    datastore_helper.add_properties(entity, {'content': str(content)})
    return entity
  def test_DatastoreWriteLargeEntities(self):
    """100*100kB entities gets split over two Commit RPCs."""
    with patch.object(helper, 'get_datastore',
                      return_value=self._mock_datastore):
      entities = [e.entity for e in fake_datastore.create_entities(100)]

      datastore_write_fn = _Mutate.DatastoreWriteFn(self._PROJECT)
      datastore_write_fn.start_bundle()
      for entity in entities:
        datastore_helper.add_properties(
            entity, {'large': u'A' * 100000}, exclude_from_indexes=True)
        datastore_write_fn.process(WriteToDatastore.to_upsert_mutation(entity))
      datastore_write_fn.finish_bundle()

      self.assertEqual(2, self._mock_datastore.commit.call_count)
Exemple #11
0
    def process(self, element):
        entity = entity_pb2.Entity()
        sku = int(element.pop('sku'))
        element['regularPrice'] = float(element['regularPrice'])
        element['salePrice'] = float(element['salePrice'])
        element['name'] = unicode(element['name'].decode('utf-8'))
        element['type'] = unicode(element['type'].decode('utf-8'))
        element['url'] = unicode(element['url'].decode('utf-8'))
        element['image'] = unicode(element['image'].decode('utf-8'))
        element['inStoreAvailability'] = unicode(
            element['inStoreAvailability'])

        datastore_helper.add_key_path(entity.key, 'Productx', sku)
        datastore_helper.add_properties(entity, element)
        return [entity]
Exemple #12
0
  def test_DatastoreWriteLargeEntities(self):
    """100*100kB entities gets split over two Commit RPCs."""
    with patch.object(helper, 'get_datastore',
                      return_value=self._mock_datastore):
      entities = [e.entity for e in fake_datastore.create_entities(100)]

      datastore_write_fn = _Mutate.DatastoreWriteFn(
          self._PROJECT, fixed_batch_size=_Mutate._WRITE_BATCH_INITIAL_SIZE)
      datastore_write_fn.start_bundle()
      for entity in entities:
        datastore_helper.add_properties(
            entity, {'large': u'A' * 100000}, exclude_from_indexes=True)
        datastore_write_fn.process(WriteToDatastore.to_upsert_mutation(entity))
      datastore_write_fn.finish_bundle()

      self.assertEqual(2, self._mock_datastore.commit.call_count)
Exemple #13
0
    def process(self, element, entityName, user, dataset):
        entity = entity_pb2.Entity()
        datastore_helper.add_key_path(entity.key, entityName.get(),
                                      str(uuid.uuid4()))

        datastore_helper.add_properties(
            entity, {
                "label": unicode(element['label']),
                "user": unicode(user.get()),
                "dataset": unicode(dataset.get())
            })

        datastore_helper.add_properties(entity,
                                        {"text": unicode(element['text'])},
                                        exclude_from_indexes=True)

        return [entity]
def build_trending_entities(company):
    metric = metric_list[randint(0, 4)]
    if randint(1, 3) > 2:
        delta = randint(15, 30)
    else:
        delta = -randint(15, 30)

    entity = entity_pb2.Entity()
    datastore_helper.add_key_path(entity.key, 'Trend',
                                  '{}-{}'.format(company[0], metric))
    props = {}
    props['company'] = company[0]
    props['metric'] = metric
    props['delta'] = delta
    props['date'] = datetime.now()
    datastore_helper.add_properties(entity, props)
    return entity
 def testPropertyValues(self):
     blob_key = datastore.Value()
     blob_key.blob_key_value = 'blob-key'
     property_dict = collections.OrderedDict(
         a_string=u'a',
         a_blob='b',
         a_boolean=True,
         a_integer=1,
         a_long=2L,
         a_double=1.0,
         a_timestamp_microseconds=datetime.datetime.now(),
         a_key=datastore.Key(),
         a_entity=datastore.Entity(),
         a_blob_key=blob_key,
         many_integer=[1, 2, 3])
     entity = datastore.Entity()
     helper.add_properties(entity, property_dict)
     d = dict((prop.name, helper.get_value(prop.value))
              for prop in entity.property)
     self.assertDictEqual(d, property_dict)
 def testPropertyValues(self):
   blob_key = datastore.Value()
   blob_key.blob_key_value = 'blob-key'
   property_dict = collections.OrderedDict(
       a_string=u'a',
       a_blob='b',
       a_boolean=True,
       a_integer=1,
       a_long=2L,
       a_double=1.0,
       a_timestamp_microseconds=datetime.datetime.now(),
       a_key=datastore.Key(),
       a_entity=datastore.Entity(),
       a_blob_key=blob_key,
       many_integer=[1, 2, 3])
   entity = datastore.Entity()
   helper.add_properties(entity, property_dict)
   d = dict((prop.name, helper.get_value(prop.value))
            for prop in entity.property)
   self.assertDictEqual(d, property_dict)
def build_project_entities(company):
    project = project_list[randint(0, 4)]
    health_index = randint(0, 5)
    if health_index < 1:
        health = randint(30, 50)
    elif health_index < 2:
        health = randint(50, 70)
    else:
        health = randint(70, 100)
    progress = randint(50, 95)
    entity = entity_pb2.Entity()
    datastore_helper.add_key_path(entity.key, 'Project',
                                  '{}-{}'.format(company[0], project))
    props = {}
    props['due'] = datetime.now() + timedelta(randint(30, 60))
    props['name'] = project
    props['health'] = health
    props['progress'] = progress
    props['company'] = company[0]
    datastore_helper.add_properties(entity, props)
    return entity
Exemple #18
0
    def make_entity(self, content):
        """Transform each row from input file to DS Entity.

        :type content: str
        :param content: one line of input file.

        :rtype: `entity_pb2.Entity`
        :returns: Entity created for each line of input file.
        """
        sku_base, similarities = itemgetter('item', 'similarity_items')(
            json.loads(content))
        entity = entity_pb2.Entity()
        s_sim = [e for e in sorted(similarities,
            key=lambda x: x['similarity'], reverse=True)][:self.sim_cap]
        add_key_path(entity.key, self._kind, sku_base)
        add_properties(entity, {'items': [e['item'] for e in s_sim]},
            exclude_from_indexes = True)
        add_properties(entity, {'scores':
            [e['similarity'] for e in s_sim]},
            exclude_from_indexes=True)
        return entity
Exemple #19
0
 def process(self, idx):
     entity = entity_pb2.Entity()
     embed = entity_pb2.Entity()  # embedded document
     ss = {
         'ss': unicode('X'),  # string
         'bb': False,  # boolean
         'll': [1, 2, 3],  # list of integers
         'ii': 123  # integer
     }
     datastore_helper.add_properties(
         embed, ss)  # setting properties for embedded document
     element = dict()
     element['s'] = unicode('String')  # string
     element['b'] = True  # boolean
     element['l'] = [unicode('s'), unicode('a'),
                     unicode('b')]  # list of strings
     element['i'] = 1  # integer
     element['e'] = embed  # embedded document
     datastore_helper.add_key_path(entity.key, 'ds',
                                   idx)  # setting id for document
     datastore_helper.add_properties(
         entity, element)  # setting properties for document
     return [entity]
def build_renewal_entities(company_updates):

    # print company_updates
    amount = company_updates[1]['purchased'][0] * 2499

    health_index = randint(0, 5)
    if health_index < 1:
        health = randint(10, 30)
    elif health_index < 3:
        health = randint(30, 60)
    else:
        health = randint(60, 100)

    entity = entity_pb2.Entity()
    datastore_helper.add_key_path(entity.key, 'Renewal',
                                  '{}-{}'.format(company_updates[0], amount))
    props = {}
    props['due'] = datetime.now() + timedelta(randint(30, 120))
    props['amount'] = amount
    props['health'] = health
    props['company'] = company_updates[0]
    datastore_helper.add_properties(entity, props)
    return entity
 def make_stats_response(self, property_map):
   resp = datastore_pb2.RunQueryResponse()
   entity_result = resp.batch.entity_results.add()
   datastore_helper.add_properties(entity_result.entity, property_map)
   return resp
Exemple #22
0
 def make_stats_response(self, property_map):
     resp = datastore_pb2.RunQueryResponse()
     entity_result = resp.batch.entity_results.add()
     datastore_helper.add_properties(entity_result.entity, property_map)
     return resp
 def testAddPropertyValuesBlindlyAdd(self):
     entity = datastore.Entity()
     helper.add_properties(entity, {'a': 1})
     helper.add_properties(entity, {'a': 2})
     self.assertEquals(2, len(entity.property))
Exemple #24
0
def create_entity(input_features, kind):
    entity = entity_pb2.Entity()
    datastore_helper.add_key_path(entity.key, kind, input_features['id'])
    datastore_helper.add_properties(entity,
                                    {'text': unicode(input_features['text'])})
    return entity
 def testAddPropertyValuesBlindlyAdd(self):
   entity = datastore.Entity()
   helper.add_properties(entity, {'a': 1})
   helper.add_properties(entity, {'a': 2})
   self.assertEquals(2, len(entity.property))
Exemple #26
0
def run_data_migration():
    request_data = json.loads(request.get_data())

    # Required fields
    fields = [
        'name',
        'function_kwargs',
        'user'
    ]

    # Some basic validation
    for f in fields:
        if f not in request_data:
            resp_data = json.dumps(
                {
                    'error': 'The ' + f + ' field is required.'
                }
            )
            resp = Response(resp_data, status=400, mimetype='application/json')
            return resp

    if request_data['name'] not in migration.choices:
        resp_data = json.dumps(
            {
                'error': 'The migration name is not valid.'
            }
        )
        resp = Response(resp_data, status=400, mimetype='application/json')
        return resp

    migration_name = request_data['name']
    function_kwargs = request_data['function_kwargs'] or {}
    user = request_data['user']

    function_kwargs.update({'name': migration_name})

    # Create a MigrationHistory entity to keep track of the migration status
    # set the project
    project = PROJECT or 'meridianedit-staging'

    # Create entity key
    partition_id = entity_pb2.PartitionId(project_id=project, namespace_id="")
    migration_history_obj_id = datetime.now().strftime("%Y%m%d%H%M%S")
    path_element = entity_pb2.Key.PathElement(kind="MigrationHistory", name=migration_history_obj_id)
    key = entity_pb2.Key(partition_id=partition_id, path=[path_element])

    # Create entity and give it properties
    entity = entity_pb2.Entity(key=key)
    property_dict = {
        'name': migration_name,
        'function_kwargs': json.dumps(function_kwargs),
        'started_by': user,
        'status': 'running',
        'created': datetime.now()
    }
    datastore_helper.add_properties(entity, property_dict)

    # Add entity to datastore
    mutations = [Mutation(insert=entity)]
    client = apache_helper.get_datastore(project)
    throttler = AdaptiveThrottler(window_ms=120000, bucket_ms=1000, overload_ratio=1.25)
    apache_helper.write_mutations(client, project, mutations, throttler, rpc_stats_callback)

    # Call the migration with any given function kwargs
    migration_kwargs = {
        'migration_history_obj': migration_history_obj_id,
    }
    migration_kwargs.update(function_kwargs)

    # Run the migration in a celery task worker to prevent it timing
    # out this connection. Also monitor the task so we can update
    # migration status.
    run_dataflow_migration.delay(pickle.dumps(entity), **migration_kwargs)

    resp_data = {
        'migration_history_obj_id': migration_history_obj_id
    }

    # A default 500 error message is returned if any of this breaks
    return Response(json.dumps(resp_data), status=200, mimetype='application/json')