コード例 #1
0
ファイル: main.py プロジェクト: yassine-naamane/zorya
def add_schedule():
    """
    Add a schedule.
    Returns:

    """
    with client.context():
        schedules_model = SchedulesModel()
        schedules_model.Schedule = {
            "dtype": request.json["dtype"],
            "Corder": request.json["Corder"],
            "Shape": request.json["Shape"],
            "__ndarray__": request.json["__ndarray__"],
        }

        schedules_model.Name = request.json["name"]
        schedules_model.Timezone = request.json["timezone"]
        schedules_model.key = ndb.Key("SchedulesModel", request.json["name"])
        schedules_model.put()
    return "ok", 200
コード例 #2
0
def query_database(user_id):
    #     """Fetches all notes associated with user_id.

    #     Notes are ordered them by date created, with most recent note added
    #     first.
    #     """
    ancestor_key = ndb.Key(Note, user_id)
    query = Note.query(ancestor=ancestor_key).order(-Note.created)
    notes = query.fetch()

    note_messages = []

    for note in notes:
        note_messages.append({
            'friendly_id': note.friendly_id,
            'message': note.message,
            'created': note.created
        })

    return note_messages
コード例 #3
0
def test_delete_entity_with_redis_cache(ds_entity, redis_context):
    entity_id = test_utils.system.unique_resource_id()
    ds_entity(KIND, entity_id, foo=42)

    class SomeKind(ndb.Model):
        foo = ndb.IntegerProperty()

    key = ndb.Key(KIND, entity_id)
    cache_key = _cache.global_cache_key(key._key)

    assert key.get().foo == 42
    assert redis_context.global_cache.redis.get(cache_key) is not None

    assert key.delete() is None
    assert redis_context.global_cache.redis.get(cache_key) is None

    # This is py27 behavior. Not entirely sold on leaving _LOCKED value for
    # Datastore misses.
    assert key.get() is None
    assert redis_context.global_cache.redis.get(cache_key) == b"0"
コード例 #4
0
    def test_user_url(self):
        """Run through an authorization back and forth with a custom user url
    provided to the auth mechanism
    """
        state = self.state(user_url='https://kylewm.com')

        self.expect_requests_get(
            'https://kylewm.com/',
            response='<html><link rel="webmention" href="/webmention"></html>')
        self.mox.ReplayAll()

        resp = self.client.post('/fake/start',
                                data={
                                    'feature': 'listen',
                                    'callback':
                                    'http://withknown.com/bridgy_callback',
                                    'user_url': 'https://kylewm.com',
                                })

        self.assert_equals(302, resp.status_code)
        self.assert_auth_url_state(resp, state)

        resp = self.client.get(
            f'/fake/add?state={state}&oauth_token=fake-token&oauth_token_secret=fake-secret'
        )
        self.assert_equals(302, resp.status_code)
        self.assert_equals(
            'http://withknown.com/bridgy_callback?' + urllib.parse.urlencode([
                ('result', 'success'),
                ('user', 'http://localhost/fake/0123456789'),
                ('key', ndb.Key('FakeSource',
                                '0123456789').urlsafe().decode()),
            ]), resp.headers['Location'])

        source = FakeSource.get_by_id('0123456789')
        self.assertTrue(source)
        self.assert_equals('Fake User', source.name)
        self.assert_equals(['listen'], source.features)
        self.assert_equals(['https://kylewm.com/', 'http://fakeuser.com/'],
                           source.domain_urls)
        self.assert_equals(['kylewm.com', 'fakeuser.com'], source.domains)
コード例 #5
0
ファイル: frontend_handlers.py プロジェクト: sbs2001/osv
def package_handler():
    """Handle a package request."""
    package_path = request.args.get('package')
    if not package_path:
        abort(400)
        return None

    ecosystem, package = package_path.split('/', 1)

    package_info = ndb.Key(osv.PackageInfo, package_path).get()
    if package_info and package_info.latest_tag:
        latest_tag = package_info.latest_tag
    else:
        # Fall back to last lexicographically ordered tag.
        latest_tag_info = osv.PackageTagInfo.query(
            osv.PackageTagInfo.package == package,
            osv.PackageTagInfo.ecosystem == ecosystem)
        latest_tag_info = latest_tag_info.order(-osv.PackageTagInfo.tag).get()
        if not latest_tag_info:
            abort(404)
            return None

        latest_tag = latest_tag_info.tag

    query = osv.PackageTagInfo.query(osv.PackageTagInfo.package == package,
                                     osv.PackageTagInfo.ecosystem == ecosystem,
                                     osv.PackageTagInfo.bugs > '')
    tags_with_bugs = []
    for tag_info in query:
        tag_with_bugs = {
            'tag': tag_info.tag,
            'bugs': tag_info.bugs,
        }

        tags_with_bugs.append(tag_with_bugs)

    tags_with_bugs.sort(key=lambda b: b['tag'], reverse=True)
    return jsonify({
        'latestTag': latest_tag,
        'bugs': tags_with_bugs,
    })
コード例 #6
0
ファイル: test_crud.py プロジェクト: REWSOFT/python-ndb
def test_delete_entity_in_transaction_with_global_cache(
    client_context, ds_entity
):
    """Regression test for #426

    https://github.com/googleapis/python-ndb/issues/426
    """

    class SomeKind(ndb.Model):
        foo = ndb.IntegerProperty()

    entity_id = test_utils.system.unique_resource_id()
    ds_entity(KIND, entity_id, foo=42)

    global_cache = global_cache_module._InProcessGlobalCache()
    with client_context.new(global_cache=global_cache).use():
        key = ndb.Key(KIND, entity_id)
        assert key.get().foo == 42

        ndb.transaction(key.delete)
        assert key.get() is None
コード例 #7
0
ファイル: test_crud.py プロジェクト: REWSOFT/python-ndb
def test_retrieve_entity_with_legacy_structured_property(ds_entity):
    class OtherKind(ndb.Model):
        one = ndb.StringProperty()
        two = ndb.StringProperty()

    class SomeKind(ndb.Model):
        foo = ndb.IntegerProperty()
        bar = ndb.StructuredProperty(OtherKind)

    entity_id = test_utils.system.unique_resource_id()
    ds_entity(
        KIND, entity_id, **{"foo": 42, "bar.one": "hi", "bar.two": "mom"}
    )

    key = ndb.Key(KIND, entity_id)
    retrieved = key.get()
    assert retrieved.foo == 42
    assert retrieved.bar.one == "hi"
    assert retrieved.bar.two == "mom"

    assert isinstance(retrieved.bar, OtherKind)
コード例 #8
0
ファイル: frontend_handlers.py プロジェクト: safe2011/osv
def vulnerability_handler():
    """Handle a vulnerability request."""
    vuln_id = request.args.get('id')
    if not vuln_id:
        abort(400)
        return None

    bug = ndb.Key(osv.Bug, vuln_id).get()
    if not bug:
        abort(404)
        return None

    if bug.status == osv.BugStatus.UNPROCESSED:
        abort(404)
        return None

    if not bug.public:
        abort(403)
        return None

    return jsonify(bug_to_response(bug, detailed=True))
コード例 #9
0
    def test_mark_bug_invalid(self):
        """Test mark_bug_invalid."""
        osv.SourceRepository(id='oss-fuzz', name='oss-fuzz',
                             db_prefix='OSV-').put()
        osv.Bug(db_id='OSV-2021-1', source_id='oss-fuzz:1337').put()
        osv.AffectedCommit(bug_id='OSV-2021-1').put()
        osv.AffectedCommit(bug_id='OSV-2021-1').put()

        message = mock.Mock()
        message.attributes = {
            'type': 'invalid',
            'testcase_id': '1337',
            'source_id': '',
        }

        worker.mark_bug_invalid(message)
        bug = ndb.Key(osv.Bug, 'OSV-2021-1').get()
        self.assertEqual(osv.BugStatus.INVALID, bug.status)

        commits = list(osv.AffectedCommit.query())
        self.assertEqual(0, len(commits))
コード例 #10
0
    def generate(cls):
        """
        Generate a bunch of instances that could be confused with each
        other.

        e.g.
        Key('AncestorModel', 1)
        Key('AncestorModel', 1, 'AncestorModel', 1)
        Key('AncestorModel', 1, 'AncestorModel', '1')
        Key('AncestorModel', '1')
        Key('AncestorModel', '1', 'AncestorModel', 1)
        Key('AncestorModel', '1', 'AncestorModel', '1')
        """
        ids = [1, '1', 2, '2']  # int(1) and str(1) are different ID's

        for i, parent in enumerate(ids):
            cls(id=parent, sort=-i).put()

        for i, (parent, child) in enumerate(product(ids, repeat=2)):
            parent_key = ndb.Key(cls._get_kind(), parent)
            cls(parent=parent_key, id=child, sort=i).put()
コード例 #11
0
    def test_run_command_progression(self):
        """Test run_command with a progression task."""
        commands.run_command('progression', '123', 'job')

        self.assertEqual(1, self.mock.progression_execute_task.call_count)
        self.mock.progression_execute_task.assert_called_with('123', 'job')

        # TaskStatus should indicate success.
        task_status_entities = list(data_types.TaskStatus.query())
        self.assertEqual(1, len(task_status_entities))

        task_status = task_status_entities[0]
        self.assertEqual(ndb.Key(data_types.TaskStatus, 'progression 123 job'),
                         task_status.key)

        self.assertDictEqual(
            {
                'bot_name': 'bot_name',
                'status': 'finished',
                'time': test_utils.CURRENT_TIME,
            }, task_status.to_dict())
コード例 #12
0
    def test_not_fixed(self):
        """Test not fixed bug."""
        message = mock.Mock()
        message.attributes = {
            'source_id': 'oss-fuzz:123',
            'allocated_id': 'OSV-2020-1337',
        }

        regress_result = osv.RegressResult(
            id='oss-fuzz:123',
            commit='eefe8ec3f1f90d0e684890e810f3f21e8500a4cd',
            repo_url='https://repo.com/repo',
            issue_id='9001',
            project='project',
            ecosystem='ecosystem',
            summary='Heap-buffer-overflow in Foo',
            details='DETAILS',
            severity='MEDIUM',
            reference_urls=['https://url/'])
        regress_result.put()

        oss_fuzz.process_impact_task('oss-fuzz:123', message)
        self.expect_dict_equal(
            'not_fixed',
            ndb.Key(osv.Bug, 'OSV-2020-1337').get()._to_dict())

        affected_commits = list(osv.AffectedCommit.query())
        self.assertCountEqual([
            '4c155795426727ea05575bd5904321def23c03f4',
            'b1c95a196f22d06fcf80df8c6691cd113d8fefff',
            'eefe8ec3f1f90d0e684890e810f3f21e8500a4cd',
            '36f0bd9549298b44f9ff2496c9dd1326b3a9d0e2',
            '8d8242f545e9cec3e6d0d2e3f5bde8be1c659735',
            'b9b3fd4732695b83c3068b7b6a14bb372ec31f98',
            'b587c21c36a84e16cfc6b39eb68578d43b5281ad',
            '88e5ae3c40c85b702ba89a34c29f233048abb12b',
            '3ea6feea9bb853596c727abab309476cc07d1505',
            'febfac1940086bc1f6d3dc33fda0a1d1ba336209',
            'ff8cc32ba60ad9cbb3b23f0a82aad96ebe9ff76b',
        ], [commit.commit for commit in affected_commits])
コード例 #13
0
def fetch_multiple_entities_by_ids_and_models(
    ids_and_models: List[Tuple[str, List[str]]]
) -> List[List[Optional[TYPE_MODEL_SUBCLASS]]]:
    """Fetches the entities from the datastore corresponding to the given ids
    and models.

    Args:
        ids_and_models: list(tuple(str, list(str))). The ids and their
            corresponding model names for which we have to fetch entities.

    Raises:
        Exception. Model names should not be duplicated in input list.

    Returns:
        list(list(datastore_services.Model)). The model instances corresponding
        to the ids and models. The models corresponding to the same tuple in the
        input are grouped together.
    """
    entity_keys: List[Key] = []
    model_names = [model_name for (model_name, _) in ids_and_models]
    if len(model_names) != len(list(set(model_names))):
        raise Exception('Model names should not be duplicated in input list.')
    for (model_name, entity_ids) in ids_and_models:
        # Add the keys to the list of keys whose entities we have to fetch.
        entity_keys = (
            entity_keys +
            [ndb.Key(model_name, entity_id) for entity_id in entity_ids])

    all_models: List[Optional[TYPE_MODEL_SUBCLASS]] = ndb.get_multi(
        entity_keys)
    all_models_grouped_by_model_type: List[List[
        Optional[TYPE_MODEL_SUBCLASS]]] = []

    start_index = 0
    for (_, entity_ids) in ids_and_models:
        all_models_grouped_by_model_type.append(
            all_models[start_index:start_index + len(entity_ids)])
        start_index = start_index + len(entity_ids)

    return all_models_grouped_by_model_type
コード例 #14
0
ファイル: models.py プロジェクト: boltreactor/bambiha
    def update_profile(cls, request):

        user = ndb.Key(urlsafe=request.session['user']).get()
        if request.FILES.get('profile_image'):
            file = request.FILES['profile_image']
            s3 = boto3.resource(
                service_name='s3',
                region_name='us-east-2',
                aws_access_key_id='AKIAS7EVJ2DJLKQNBTZC',
                aws_secret_access_key='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'
            )
            bucket = s3.Bucket('kompass')
            details = bucket.put_object(Key=file.name, Body=file)
            url = "https://kompass.s3.us-east-2.amazonaws.com/" + details.key
            user.profile_image = url
            user.put()
            return user
        elif request.FILES.get('cover_image'):
            file = request.FILES['cover_image']
            s3 = boto3.resource(
                service_name='s3',
                region_name='us-east-2',
                aws_access_key_id='AKIAS7EVJ2DJLKQNBTZC',
                aws_secret_access_key='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'
            )
            bucket = s3.Bucket('kompass')
            details = bucket.put_object(Key=file.name, Body=file)
            url = "https://kompass.s3.us-east-2.amazonaws.com/" + details.key
            user.cover_image = url
            user.put()
            return user
        else:
            user.date_of_birth = request.data['date_of_birth']
            user.gender = request.data['gender']
            user.language = request.data['language']
            user.about = request.data['about']
            user.phone = request.data['phone']
            user.location = request.data['location']
            user.put()
            return user
コード例 #15
0
ファイル: models.py プロジェクト: boltreactor/bambiha
 def update(cls, request):
     obj = ndb.Key(urlsafe=request.data['id']).get()
     obj.unsubscribe = request.data['unsubscribe']
     obj.unsub_all_marketing_emails = request.data['messages_email']
     obj.push_notifications = request.data['push_notifications']
     obj.messages_sms = request.data['messages_sms']
     obj.messages_email = request.data['messages_email']
     obj.reminders_sms = request.data['reminders_sms']
     obj.reminders_email = request.data['reminders_email']
     obj.promotion_sms = request.data['promotion_sms']
     obj.promotion_email = request.data['promotion_email']
     obj.policy_sms = request.data['policy_sms']
     obj.policy_email = request.data['policy_email']
     obj.acc_support_sms = request.data['acc_support_sms']
     obj.acc_support_email = request.data['acc_support_email']
     if obj.messages_sms is True or obj.messages_email is True or obj.reminders_email is True \
             or obj.reminders_sms is True or \
             obj.promotion_email is True or obj.promotion_sms is True or obj.policy_email is True or obj.policy_sms is True \
             or obj.acc_support_email is True or obj.acc_support_sms is True:
         obj.unsubscribe = False
     obj.put()
     return obj
コード例 #16
0
def get_parts(head):
    # We could alternatively achieve this via an ancestor query (retrieving the
    # head and its parts simultaneously) to give us strong consistency. But the
    # downside of that is that it bypasses the automatic memcache layer built
    # into ndb, which we want to take advantage of.
    if head.numparts == 1:
        return []
    logging.info("retrieving %d extra part(s)", head.numparts - 1)
    filename = head.key.string_id()
    keys = [ndb.Key('ProcessedFilePart', filename + ':' + str(i))
            for i in range(1, head.numparts)]
    num_tries = 0
    while True:
        num_tries += 1
        if num_tries >= 10:
            logging.error("tried too many times, giving up")
            raise werkzeug.exceptions.InternalServerError()
        parts = ndb.get_multi(keys)
        if any(p.etag != head.etag for p in parts):
            logging.warn("got differing etags, retrying")
        else:
            return sorted(parts, key=lambda p: p.key.string_id())
コード例 #17
0
    def template_vars(self):
        vars = dict(self.request.params)

        entity = None
        key = vars.get('auth_entity')
        if key:
            entity = vars['entity'] = ndb.Key(urlsafe=key).get()

        if entity:
            vars.setdefault('site', vars['entity'].site_name().lower())

        vars.update({
            silo + '_html': module.StartHandler.button_html(
                '/%s/start_auth' % silo,
                image_prefix='/oauth_dropins/static/',
                outer_classes='col-lg-2 col-sm-4 col-xs-6',
                scopes=SCOPE_OVERRIDES.get(silo, ''),
            )
            for silo, module in OAUTHS.items()
        })

        return vars
コード例 #18
0
ファイル: util.py プロジェクト: swamim/bridgy
def load_source(handler, param='source_key'):
  """Extracts a URL-safe key from a query parameter and loads a source object.

  Returns HTTP 400 if the parameter is not provided or the source doesn't exist.

  Args:
    handler: RequestHandler
    param: string

  Returns: Source object
  """
  try:
    source = ndb.Key(urlsafe=util.get_required_param(handler, param)).get()
  except (binascii.Error, google.protobuf.message.DecodeError):
    msg = 'Bad value for %s' % param
    logging.warning(msg, stack_info=True)
    handler.abort(400, msg)

  if not source:
    handler.abort(400, 'Source key not found')

  return source
コード例 #19
0
    def test_simplify_range(self):
        """Test simplifying commit range."""
        message = mock.Mock()
        message.attributes = {
            'source_id': 'oss-fuzz:123',
            'allocated_id': 'OSV-2020-1337',
        }

        regress_result = osv.RegressResult(
            id='oss-fuzz:123',
            commit=('a2ba949290915d445d34d0e8e9de2e7ce38198fc:'
                    'eefe8ec3f1f90d0e684890e810f3f21e8500a4cd'),
            repo_url='https://repo.com/repo',
            issue_id='9001',
            project='project',
            ecosystem='ecosystem',
            summary='Heap-buffer-overflow in Foo',
            severity='MEDIUM',
            reference_urls=['https://url/'])
        regress_result.put()

        fix_result = osv.FixResult(
            id='oss-fuzz:123',
            commit=('b1c95a196f22d06fcf80df8c6691cd113d8fefff:'
                    '8d8242f545e9cec3e6d0d2e3f5bde8be1c659735'),
            repo_url='https://repo.com/repo',
            project='project',
            ecosystem='ecosystem',
            summary='Heap-buffer-overflow in Foo',
            details='DETAILS',
            severity='MEDIUM',
            reference_urls=['https://url/'])
        fix_result.put()

        oss_fuzz.process_impact_task('oss-fuzz:123', message)
        self.expect_dict_equal(
            'simplify_range',
            ndb.Key(osv.Bug, 'OSV-2020-1337').get()._to_dict())
コード例 #20
0
    def test_update_last_successful_build(self):
        """When last successful build is available at both places."""
        with ndb.Client().context():
            project = {
                'name': 'test-project',
                'last_successful_build': {
                    'build_id': '2',
                    'finish_time': 'test_time'
                }
            }
            LastSuccessfulBuild(id='test-project-fuzzing',
                                build_tag='fuzzing',
                                project='test-project',
                                build_id='1',
                                finish_time='test_time').put()

            update_build_status.update_last_successful_build(
                project, 'fuzzing')
            expected_build_id = '2'
            self.assertEqual(
                expected_build_id,
                ndb.Key(LastSuccessfulBuild,
                        'test-project-fuzzing').get().build_id)
コード例 #21
0
  def finish(self, auth_entity, state=None):
    self.state = util.decode_oauth_state(state)
    if not state:
      self.error('If you want to publish or preview, please approve the prompt.')
      return self.redirect('/')

    source = ndb.Key(urlsafe=self.state['source_key']).get()
    if auth_entity is None:
      self.error('If you want to publish or preview, please approve the prompt.')
    elif not auth_entity.is_authority_for(source.auth_entity):
      self.error('Please log into %s as %s to publish that page.' %
                 (source.GR_CLASS.NAME, source.name))
    else:
      result = self._run()
      if result and result.content:
        self.messages.add('Done! <a href="%s">Click here to view.</a>' %
                          self.entity.published.get('url'))
        granary_message = self.entity.published.get('granary_message')
        if granary_message:
          self.messages.add(granary_message)
      # otherwise error() added an error message

    return self.redirect(source.bridgy_url(self))
コード例 #22
0
def test_delete_entity_with_global_cache(ds_entity, client_context):
    entity_id = test_utils.system.unique_resource_id()
    ds_entity(KIND, entity_id, foo=42)

    class SomeKind(ndb.Model):
        foo = ndb.IntegerProperty()

    key = ndb.Key(KIND, entity_id)
    cache_key = _cache.global_cache_key(key._key)
    global_cache = global_cache_module._InProcessGlobalCache()
    cache_dict = global_cache_module._InProcessGlobalCache.cache

    with client_context.new(global_cache=global_cache).use():
        assert key.get().foo == 42
        assert cache_key in cache_dict

        assert key.delete() is None
        assert cache_key not in cache_dict

        # This is py27 behavior. Not entirely sold on leaving _LOCKED value for
        # Datastore misses.
        assert key.get() is None
        assert cache_dict[cache_key][0] == b"0"
コード例 #23
0
ファイル: tasks.py プロジェクト: Tiamat-Tech/bridgy
    def post(self):
        self.request.headers[
            'Content-Type'] = 'application/x-www-form-urlencoded'
        logging.debug('Params: %s', list(self.request.params.items()))
        if not self.lease(
                ndb.Key(urlsafe=self.request.params['response_key'])):
            return

        source = self.source
        poll_estimate = self.entity.created - datetime.timedelta(seconds=61)
        logging.info('Created by this poll: %s/%s', util.host_url(self),
                     logs.url(poll_estimate, source.key))

        self.activities = [json_loads(a) for a in self.entity.activities_json]
        response_obj = json_loads(self.entity.response_json)
        if (not source.is_activity_public(response_obj) or
                not all(source.is_activity_public(a)
                        for a in self.activities)):
            logging.info('Response or activity is non-public. Dropping.')
            self.complete()
            return

        self.send_webmentions()
コード例 #24
0
    def dispatch_request(self):
        logger.debug(f'Params: {list(request.values.items())}')
        if not self.lease(ndb.Key(urlsafe=request.values['response_key'])):
            return ('', ERROR_HTTP_RETURN_CODE) if getattr(g, 'failed',
                                                           None) else 'OK'

        source = g.source
        poll_estimate = self.entity.created - datetime.timedelta(seconds=61)
        poll_url = util.host_url(logs.url(poll_estimate, source.key))
        logger.info(f'Created by this poll: {poll_url}')

        self.activities = [json_loads(a) for a in self.entity.activities_json]
        response_obj = json_loads(self.entity.response_json)
        if (not source.is_activity_public(response_obj) or
                not all(source.is_activity_public(a)
                        for a in self.activities)):
            logger.info('Response or activity is non-public. Dropping.')
            self.complete()
            return ''

        self.send_webmentions()
        return ('',
                ERROR_HTTP_RETURN_CODE) if getattr(g, 'failed', None) else 'OK'
コード例 #25
0
def getProducts(request):
    ancestor_key = ndb.Key("Product", "product")
    all_products = []
    if request.query_params.get('category_key', None):
        products = Products.query(
            Products.category_key == request.query_params['category_key'],
            ancestor=ancestor_key).fetch()
    else:
        products = Products.query(ancestor=ancestor_key).fetch()
    for p in products:
        all_products.append({
            "category": {
                "key": p.category_key,
                "name": Products.get_with_key(p.category_key).name
            },
            "date": p.date,
            "description": p.description,
            "images": p.images,
            "price": p.price,
            "quantity": p.quantity,
            "title": p.title,
            "id": p.key.urlsafe(),
            "status": p.product_status
        })
    if all_products:
        return Response(
            {
                'status': status.HTTP_200_OK,
                'products': all_products
            }, status.HTTP_200_OK)
    else:
        return Response(
            {
                'status': status.HTTP_200_OK,
                'message': "No products found",
                'products': all_products
            }, status.HTTP_200_OK)
コード例 #26
0
ファイル: load_db.py プロジェクト: cubingusa/org
def process_export(old_export_path, new_export_path):
  for table, cls in get_tables():
    logging.info('Processing ' + table)
    table_suffix = '/WCA_export_' + table + '.tsv'
    old_rows = read_table(old_export_path + table_suffix, cls, False)
    new_rows = read_table(new_export_path + table_suffix, cls, True)
    logging.info('Old: %d' % len(old_rows))
    logging.info('New: %d' % len(new_rows))
    write_table(new_export_path + table_suffix, new_rows, cls)
    modifier = get_modifier(table)

    objects_to_put = []
    keys_to_delete = []
    for key in new_rows:
      row = new_rows[key]
      if key in old_rows and old_rows[key] == row:
        continue
      else:
        obj = cls(id=key)
        obj.ParseFromDict(row)
        if modifier:
          modifier(obj)
        objects_to_put += [obj]
    for key, row in old_rows.items():
      if key in new_rows:
        continue
      else:
        keys_to_delete += [ndb.Key(cls, key)]
    logging.info('Putting %d objects' % len(objects_to_put))
    while objects_to_put:
      batch_size = 500
      subslice = objects_to_put[:batch_size]
      objects_to_put = objects_to_put[batch_size:]
      ndb.put_multi(subslice)
    logging.info('Deleting %d objects' % len(keys_to_delete))
    ndb.delete_multi(keys_to_delete)
コード例 #27
0
ファイル: jobs.py プロジェクト: phwd/clusterfuzz
    def post(self):
        """Handle a post request."""
        key = helpers.get_integer_key(self.request)
        job = ndb.Key(data_types.Job, key).get()
        if not job:
            raise helpers.EarlyExitException('Job not found.', 400)

        # Delete from fuzzers' jobs' list.
        for fuzzer in ndb_utils.get_all_from_model(data_types.Fuzzer):
            if job.name in fuzzer.jobs:
                fuzzer.jobs.remove(job.name)
                fuzzer.put()

        # Delete associated fuzzer-job mapping(s).
        query = data_types.FuzzerJob.query()
        query = query.filter(data_types.FuzzerJob.job == job.name)
        for mapping in ndb_utils.get_all_from_query(query):
            mapping.key.delete()

        # Delete job.
        job.key.delete()

        helpers.log('Deleted job %s' % job.name, helpers.MODIFY_OPERATION)
        self.redirect('/jobs')
コード例 #28
0
def create_project_settings(project, info, service_account):
  """Setup settings for ClusterFuzz (such as CPU distribution)."""
  key = ndb.Key(data_types.OssFuzzProject, project)
  oss_fuzz_project = key.get()

  # Expecting to run a blackbox fuzzer, so use high end hosts.
  is_high_end = info.get('blackbox', False)

  ccs = ccs_from_info(info)
  language = info.get('language')

  if oss_fuzz_project:
    if oss_fuzz_project.service_account != service_account['email']:
      oss_fuzz_project.service_account = service_account['email']
      oss_fuzz_project.put()

    if oss_fuzz_project.high_end != is_high_end:
      oss_fuzz_project.high_end = is_high_end
      oss_fuzz_project.put()

    if oss_fuzz_project.ccs != ccs:
      oss_fuzz_project.ccs = ccs
      oss_fuzz_project.put()
  else:
    if language in MEMORY_SAFE_LANGUAGES:
      cpu_weight = OSS_FUZZ_MEMORY_SAFE_LANGUAGE_PROJECT_WEIGHT
    else:
      cpu_weight = OSS_FUZZ_DEFAULT_PROJECT_CPU_WEIGHT

    data_types.OssFuzzProject(
        id=project,
        name=project,
        high_end=is_high_end,
        cpu_weight=cpu_weight,
        service_account=service_account['email'],
        ccs=ccs).put()
コード例 #29
0
def test_crud_without_datastore(ds_entity, client_context):
    entity_id = test_utils.system.unique_resource_id()

    class SomeKind(ndb.Model):
        foo = ndb.IntegerProperty()
        bar = ndb.StringProperty()
        baz = ndb.StringProperty()

    global_cache = global_cache_module._InProcessGlobalCache()
    with client_context.new(global_cache=global_cache).use() as context:
        context.set_global_cache_policy(None)  # Use default
        context.set_datastore_policy(False)  # Don't use Datastore

        key = ndb.Key(KIND, entity_id)
        SomeKind(foo=42, bar="none", baz="night", _key=key).put()

        entity = key.get()
        assert isinstance(entity, SomeKind)
        assert entity.foo == 42
        assert entity.bar == "none"
        assert entity.baz == "night"

        key.delete()
        assert key.get() is None
コード例 #30
0
def get_entity_key_by_keystr(expected_kind, keystr):
    """
    Helper to get a key for an ndb entity by its urlsafe keystr
    Args:
        expected_kind: The expected kind of ndb.Key as case-sensative string
        keystr: ndb.Key string representation
    Returns:
        An instance of Entity with key of keystr
    Raises:
        ValueError: The keystr is None or of wrong type
        ValueError: The expected_kind does not match the kind of keystr
    """

    if not keystr or not isinstance(keystr, str):
        raise ValueError(_keystr_type_err % keystr)

    # Resolve the ndb key
    ndb_key = ndb.Key(urlsafe=keystr)

    # Validate the kind
    if not ndb_key.kind() == expected_kind:
        raise ValueError(_kind_err % (expected_kind, ndb_key.kind()))

    return ndb_key