示例#1
0
    def _createSessionObject(self, request):
        """
        Creates Session object, returning a variation of the
        SessionForm object.
        """
        # check if user is already logged in
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')
        user_id = getUserId(user)
        # convert websafeKey to a conference key
        conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
        # check that conference exists
        if not conf:
            raise endpoints.NotFoundException(
                'No conference found with key: %s' %
                request.websafeConferenceKey)
        # check that user is the owner
        if user_id != conf.organizerUserId:
            raise endpoints.ForbiddenException(
                'The conference can only be changed by the owner.')

        if not request.name:
            raise endpoints.BadRequestException("Session 'name' field \
                required")
        # copy SessionForm/ProtoRPC Message into dict
        data = {
            field.name: getattr(request, field.name)
            for field in request.all_fields()
        }
        del data['websafeKey']
        del data['websafeConfKey']
        del data['websafeConferenceKey']
        # add default values for those missing (both data model & outbound
        # Message)
        for df in DEFAULT_SESS:
            if data[df] in (None, []):
                data[df] = DEFAULT_SESS[df]
                setattr(request, df, DEFAULT_SESS[df])
        # convert type of session object to string
        if data['typeOfSession']:
            data['typeOfSession'] = str(data['typeOfSession'])
        # convert date from a string to Date objects
        if data['date']:
            data['date'] = datetime.strptime(data['date'][:10],
                                             "%Y-%m-%d").date()
        # convert startTime from a string to Time objects
        if data['startTime']:
            data['startTime'] = datetime.strptime(data['startTime'][:5],
                                                  "%H:%M").time()
        # convert duration from a string to Time objects
        if data['duration']:
            data['duration'] = datetime.strptime(data['duration'][:5],
                                                 "%H:%M").time()
        # convert speakers from strings as list to Speaker entity keys as list
        if data['speakers']:
            # Check that each provided speaker for a session exists in the
            # database. If the speaker exists in the database, then retrieve
            # the key value. Otherwise, make a new speaker having the provided
            # name as the key.
            speakersForSession = []
            for speaker in data['speakers']:
                # The function get_or_insert(key_name, args) gets as a
                # transaction an existing entity or it makes a new entity,
                # which eliminates the problem of duplicate speakers when
                # multiple sessions that have the same speaker are formed
                # during the same time. The speaker string value is put in
                # lowercase with no whitespaces for the key name.
                spkr_for_sess_key = Speaker.get_or_insert(
                    speaker.lower().strip().replace(" ",
                                                    "_"), name=speaker).key
                # add speaker for session key to speakersForSession list
                speakersForSession.append(spkr_for_sess_key)
            # update data['speakers'] with newly formed list of keys
            data['speakers'] = speakersForSession

        # get Conference key
        c_key = conf.key
        # designate new Session ID with the Conference key as a parent
        s_id = Session.allocate_ids(size=1, parent=c_key)[0]
        # create key for new Session having Conference key as a parent
        s_key = ndb.Key(Session, s_id, parent=c_key)
        # put key into dict
        data['key'] = s_key
        # create a Session
        Session(**data).put()
        # allows for the session to be copied back to form
        sess = s_key.get()
        # reviews speakers for conference when task is added to queue
        taskqueue.add(params={'c_key_str': c_key.urlsafe()},
                      url='/tasks/review_speakers_for_sessions')
        return self._copySessionToForm(sess)
示例#2
0
文件: tasks.py 项目: v1cker/bridgy
    def post(self, *path_args):
        logging.debug('Params: %s', self.request.params)

        key = self.request.params['source_key']
        source = ndb.Key(urlsafe=key).get()
        if not source or source.status == 'disabled' or 'listen' not in source.features:
            logging.error('Source not found or disabled. Dropping task.')
            return
        logging.info('Source: %s %s, %s', source.label(),
                     source.key.string_id(), source.bridgy_url(self))

        last_polled = self.request.params['last_polled']
        if last_polled != source.last_polled.strftime(
                util.POLL_TASK_DATETIME_FORMAT):
            logging.warning(
                'duplicate poll task! deferring to the other task.')
            return

        logging.info('Last poll: %s', self._last_poll_url(source))

        # mark this source as polling
        source.updates = {
            'poll_status': 'polling',
            'last_poll_attempt': util.now_fn(),
            'rate_limited': False,
        }
        source = models.Source.put_updates(source)

        source.updates = {}
        try:
            self.poll(source)
        except Exception, e:
            source.updates['poll_status'] = 'error'
            code, body = util.interpret_http_exception(e)
            if code in source.DISABLE_HTTP_CODES or isinstance(
                    e, models.DisableSource):
                # the user deauthorized the bridgy app, so disable this source.
                # let the task complete successfully so that it's not retried.
                logging.warning('Disabling source due to: %s' % e,
                                exc_info=True)
                source.updates.update({
                    'status': 'disabled',
                    'poll_status': 'ok',
                })
                body = '%s\nLast poll: %s' % (source.bridgy_url(self),
                                              self._last_poll_url(source))
                if source.is_beta_user():
                    util.email_me(subject='Bridgy: disabled %s' %
                                  source.label(),
                                  body=body)

            elif code in source.RATE_LIMIT_HTTP_CODES:
                logging.info(
                    'Rate limited. Marking as error and finishing. %s', e)
                source.updates['rate_limited'] = True
            elif ((code and int(code) / 100 == 5)
                  or (code == '400' and isinstance(source, flickr.Flickr))
                  or util.is_connection_failure(e)):
                logging.error(
                    'API call failed. Marking as error and finishing. %s: %s\n%s',
                    code, body, e)
                self.abort(util.ERROR_HTTP_RETURN_CODE)
            else:
                raise
示例#3
0
 def make_key(user_key, sport_category_id):
     """Makes a key."""
     user_pairs = list(user_key.pairs())
     pairs = user_pairs + [(SportProfile, sport_category_id)]
     return ndb.Key(pairs=pairs)
示例#4
0
 def test_request_key_to_datetime(self):
     key = ndb.Key(task_request.TaskRequest, 0x7f14acec2fcfffff)
     # Resolution is only kept at millisecond level compared to
     # datetime_to_request_base_id() by design.
     self.assertEqual(datetime.datetime(2012, 1, 2, 3, 4, 5, 123000),
                      task_request.request_key_to_datetime(key))
示例#5
0
    def testGetGraphJson_WithAnomalies_ReturnsCorrectAnomalyAnnotations(self):
        self._AddTestColumns()

        anomaly1 = anomaly.Anomaly(
            start_revision=14999,
            end_revision=15000,
            test=utils.TestKey('ChromiumGPU/win7/dromaeo/dom'),
            median_before_anomaly=100,
            median_after_anomaly=200)
        anomaly1.SetIsImprovement()
        key1 = anomaly1.put()

        anomaly2 = anomaly.Anomaly(
            start_revision=15004,
            end_revision=15006,
            test=utils.TestKey('ChromiumGPU/win7/dromaeo/dom'),
            median_before_anomaly=200,
            median_after_anomaly=100,
            bug_id=12345)
        anomaly2.SetIsImprovement()
        key2 = anomaly2.put()

        old_style_test_key = ndb.Key('Master', 'ChromiumGPU', 'Bot', 'win7',
                                     'Test', 'dromaeo', 'Test', 'dom')
        anomaly3 = anomaly.Anomaly(start_revision=15008,
                                   end_revision=15009,
                                   test=old_style_test_key,
                                   median_before_anomaly=100,
                                   median_after_anomaly=200)
        key3 = anomaly3.put()

        test = utils.TestKey('ChromiumGPU/win7/dromaeo/dom').get()
        test.description = 'About this test'
        test.units = 'ms'
        test.buildername = 'Windows 7 (1)'
        test.UpdateSheriff()
        test.put()

        flot_json_str = graph_json.GetGraphJson(
            {
                'ChromiumGPU/win7/dromaeo/dom': [],
            }, rev=15000, num_points=8)

        flot = json.loads(flot_json_str)
        annotations = flot['annotations']
        self.assertEqual(5, len(annotations['0']))

        # Verify key fields of the annotation dictionary for the first anomaly.
        anomaly_one_annotation = annotations['0']['0']['g_anomaly']
        self.assertEqual(14999, anomaly_one_annotation['start_revision'])
        self.assertEqual(15000, anomaly_one_annotation['end_revision'])
        self.assertEqual('100.0%', anomaly_one_annotation['percent_changed'])
        self.assertIsNone(anomaly_one_annotation['bug_id'])
        self.assertEqual(key1.urlsafe(), anomaly_one_annotation['key'])
        self.assertTrue(anomaly_one_annotation['improvement'])

        # Verify key fields of the annotation dictionary for the second anomaly.
        anomaly_two_annotation = annotations['0']['2']['g_anomaly']
        self.assertEqual(15004, anomaly_two_annotation['start_revision'])
        self.assertEqual(15006, anomaly_two_annotation['end_revision'])
        self.assertEqual('50.0%', anomaly_two_annotation['percent_changed'])
        self.assertEqual(12345, anomaly_two_annotation['bug_id'])
        self.assertEqual(key2.urlsafe(), anomaly_two_annotation['key'])
        self.assertFalse(anomaly_two_annotation['improvement'])

        # Verify the key for the third anomaly.
        anomaly_three_annotation = annotations['0']['3']['g_anomaly']
        self.assertEqual(key3.urlsafe(), anomaly_three_annotation['key'])

        # Verify the tracing link annotations
        self.assertEqual('http://trace/15000',
                         annotations['0']['0']['a_tracing_uri'])
        self.assertEqual('http://trace/15012',
                         annotations['0']['4']['a_tracing_uri'])

        # Verify the series annotations.
        self.assertEqual(
            {
                '0': {
                    'name': 'dom',
                    'path': 'ChromiumGPU/win7/dromaeo/dom',
                    'units': 'ms',
                    'better': 'Higher',
                    'description': 'About this test',
                    'can_bisect': True,
                }
            }, annotations['series'])
示例#6
0
def cron_update_buckets():
    """Synchronizes Bucket entities with configs fetched from luci-config."""
    config_map = config.get_project_configs(cfg_path(),
                                            project_config_pb2.BuildbucketCfg)

    buckets_of_project = {
        pid: set(b.name for b in pcfg.buckets)
        for pid, (_, pcfg) in config_map.iteritems()
    }

    for project_id, (revision, project_cfg) in config_map.iteritems():
        # revision is None in file-system mode. Use SHA1 of the config as revision.
        revision = revision or 'sha1:%s' % hashlib.sha1(
            project_cfg.SerializeToString()).hexdigest()
        for bucket_cfg in project_cfg.buckets:
            bucket = Bucket.get_by_id(bucket_cfg.name)
            if (bucket and bucket.project_id == project_id
                    and bucket.revision == revision):
                continue

            for acl in bucket_cfg.acls:
                if acl.identity and ':' not in acl.identity:
                    acl.identity = 'user:%s' % acl.identity

            @ndb.transactional
            def update_bucket():
                bucket = Bucket.get_by_id(bucket_cfg.name)
                if bucket and bucket.project_id != project_id:
                    # Does bucket.project_id still claim this bucket?
                    if bucket_cfg.name in buckets_of_project.get(
                            bucket.project_id, []):
                        logging.error(
                            'Failed to reserve bucket %s for project %s: '
                            'already reserved by %s', bucket_cfg.name,
                            project_id, bucket.project_id)
                        return
                if (bucket and bucket.project_id == project_id and
                        bucket.revision == revision):  # pragma: no coverage
                    return

                report_reservation = bucket is None or bucket.project_id != project_id
                Bucket(
                    id=bucket_cfg.name,
                    project_id=project_id,
                    revision=revision,
                    config_content=protobuf.text_format.MessageToString(
                        bucket_cfg),
                ).put()
                if report_reservation:
                    logging.warning('Reserved bucket %s for project %s',
                                    bucket_cfg.name, project_id)
                logging.info('Updated bucket %s to revision %s',
                             bucket_cfg.name, revision)

            update_bucket()

    # Delete/unreserve non-existing buckets.
    all_bucket_keys = Bucket.query().fetch(keys_only=True)
    existing_bucket_keys = [
        ndb.Key(Bucket, b) for buckets in buckets_of_project.itervalues()
        for b in buckets
    ]
    to_delete = set(all_bucket_keys).difference(existing_bucket_keys)
    if to_delete:
        logging.warning('Deleting buckets: %s',
                        ', '.join(k.id() for k in to_delete))
        ndb.delete_multi(to_delete)
示例#7
0
    def put(self, *args, **kwargs):
        try:
            # get user
            user_key = ndb.Key(urlsafe=args[0])
            user = user_key.get()

            #get new information from request body
            new_data = json.loads(self.request.body)

            #check if new_data is valid
            if (not checkRequestBody(self, new_data)):
                return

            #not allow to update password or events using this method
            if ('password' in new_data) or ('events' in new_data):
                badRequest(
                    self,
                    "Not allowed to update password or events using this method"
                )
                return

            #if updating username check if username is unique
            if ('username' in new_data):
                #query for account with username specified
                user_query_object = ndb.gql(
                    "SELECT * FROM User WHERE username = '******'username']) + "'")

                #if query returned with 1 or more objects, username is in used
                #report error and return
                if (user_query_object.count() > 0):
                    badRequest(self,
                               "Username is associated with another account")
                    return

                #if query returned no result, update username
                user.username = new_data['username']

            #if updating email check if email is associated with other account
            if ('email' in new_data):
                #query for account with email specified
                user_query_object = ndb.gql(
                    "SELECT * FROM User WHERE email = '" +
                    str(new_data['email']) + "'")

                #if query return valid result, report error and return
                if (user_query_object.count() > 0):
                    badRequest(self,
                               "Email is associated with another account")
                    return

                #if query returned no result, update email
                user.email = new_data['email']

            #update all other information
            if ('first_name' in new_data):
                user.first_name = new_data['first_name']

            if ('last_name' in new_data):
                user.last_name = new_data['last_name']

            #save to datastore
            user.put()

            #create self link and return
            user_dict = user.to_dict()
            user_dict['self'] = "/users/" + user.id
            self.response.write(json.dumps(user_dict))

        except (Exception):
            badRequest(self, "Invalid userID")
            return
示例#8
0
    def maybe_add_or_delete_source(self, source_cls, auth_entity, state,
                                   **kwargs):
        """Adds or deletes a source if auth_entity is not None.

    Used in each source's oauth-dropins :meth:`CallbackHandler.finish()` and
    :meth:`CallbackHandler.get()` methods, respectively.

    Args:
      source_cls: source class, e.g. :class:`instagram.Instagram`
      auth_entity: ouath-dropins auth entity
      state: string, OAuth callback state parameter. a JSON serialized dict
        with operation, feature, and an optional callback URL. For deletes,
        it will also include the source key
      kwargs: passed through to the source_cls constructor

    Returns:
      source entity if it was created or updated, otherwise None
    """
        state_obj = util.decode_oauth_state(state)
        operation = state_obj.get('operation', 'add')
        feature = state_obj.get('feature')
        callback = state_obj.get('callback')
        user_url = state_obj.get('user_url')

        logging.debug(
            'maybe_add_or_delete_source with operation=%s, feature=%s, callback=%s',
            operation, feature, callback)

        if operation == 'add':  # this is an add/update
            if not auth_entity:
                if not self.messages:
                    self.messages.add(
                        "OK, you're not signed up. Hope you reconsider!")
                if callback:
                    callback = util.add_query_params(callback,
                                                     {'result': 'declined'})
                    logging.debug(
                        'user declined adding source, redirect to external callback %s',
                        callback)
                    # call super.redirect so the callback url is unmodified
                    super(Handler, self).redirect(callback.encode('utf-8'))
                else:
                    self.redirect('/')
                return

            CachedPage.invalidate('/users')
            logging.info('%s.create_new with %s',
                         source_cls.__class__.__name__,
                         (auth_entity.key, state, kwargs))
            source = source_cls.create_new(
                self,
                auth_entity=auth_entity,
                features=feature.split(',') if feature else [],
                user_url=user_url,
                **kwargs)

            if source:
                # add to login cookie
                logins = self.get_logins()
                logins.append(
                    Login(path=source.bridgy_path(),
                          site=source.SHORT_NAME,
                          name=source.label_name()))
                self.set_logins(logins)

            if callback:
                callback = util.add_query_params(
                    callback, {
                        'result': 'success',
                        'user': source.bridgy_url(self),
                        'key': source.key.urlsafe(),
                    } if source else {'result': 'failure'})
                logging.debug(
                    'finished adding source, redirect to external callback %s',
                    callback)
                # call super.redirect so the callback url is unmodified
                super(Handler, self).redirect(callback.encode('utf-8'))

            elif source and not source.domains:
                self.redirect(
                    '/edit-websites?' +
                    urllib.parse.urlencode({
                        'source_key': source.key.urlsafe(),
                    }))

            else:
                self.redirect(source.bridgy_url(self) if source else '/')

            return source

        else:  # this is a delete
            if auth_entity:
                self.redirect('/delete/finish?auth_entity=%s&state=%s' %
                              (auth_entity.key.urlsafe(), state))
            else:
                self.messages.add(
                    'If you want to disable, please approve the %s prompt.' %
                    source_cls.GR_CLASS.NAME)
                source_key = state_obj.get('source')
                if source_key:
                    source = ndb.Key(urlsafe=source_key).get()
                    if source:
                        return self.redirect(source.bridgy_url(self))

                self.redirect('/')
示例#9
0
def delete_user(id_user):
    key = ndb.Key(User, id_user)
    key.delete()
    return make_response(jsonify({'deleted': id_user}), http.OK)
示例#10
0
def streamGroup_key(group_name=DEFAULT_GROUP_NAME):
    return ndb.Key('streamGroup', group_name)
示例#11
0
def store_current_config(config_json):
  conf_ndb_key = ndb.Key(Configuration, CONFIG_SINGLETON_KEY)
  conf = Configuration(key=conf_ndb_key, configuration=config_json)
  store(conf)
示例#12
0
def mailingUser_group(frequency=5):
    return ndb.Key('frequency', str(frequency))
示例#13
0
 def create_game_history(cls, game, guess, found, index, message):
     game_history = GameHistory(parent=ndb.Key(Game, game),
                                guess=guess, found=found, index=index, message=message)
     game_history.put()
示例#14
0
    def _createConferenceObject(self, request):
        """
        Create or update Conference object, returning
        ConferenceForm/request.
        """
        # preload necessary data items
        user = endpoints.get_current_user()
        if not user:
            raise endpoints.UnauthorizedException('Authorization required')
        user_id = getUserId(user)

        if not request.name:
            raise endpoints.BadRequestException("Conference 'name' field \
                required")

        # copy ConferenceForm/ProtoRPC Message into dict
        data = {
            field.name: getattr(request, field.name)
            for field in request.all_fields()
        }
        del data['websafeKey']
        del data['organizerDisplayName']
        """
        Add default values for those missing
        (both data model & outbound Message).
        """

        for df in DEFAULTS:
            if data[df] in (None, []):
                data[df] = DEFAULTS[df]
                setattr(request, df, DEFAULTS[df])
        """
        Convert dates from strings to Date objects; set month based on
        start_date.
        """

        if data['startDate']:
            data['startDate'] = datetime.strptime(data['startDate'][:10],
                                                  "%Y-%m-%d").date()
            data['month'] = data['startDate'].month
        else:
            data['month'] = 0
        if data['endDate']:
            data['endDate'] = datetime.strptime(data['endDate'][:10],
                                                "%Y-%m-%d").date()

        # set seatsAvailable to be same as maxAttendees on creation
        if data["maxAttendees"] > 0:
            data["seatsAvailable"] = data["maxAttendees"]
            setattr(request, "seatsAvailable", data["maxAttendees"])

        # generate Profile Key based on user ID and Conference
        # ID based on Profile key get Conference key from ID
        p_key = ndb.Key(Profile, user_id)
        c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
        c_key = ndb.Key(Conference, c_id, parent=p_key)
        data['key'] = c_key
        data['organizerUserId'] = request.organizerUserId = user_id

        # create Conference, send email to organizer confirming
        # creation of Conference & return (modified) ConferenceForm
        Conference(**data).put()
        taskqueue.add(params={
            'email': user.email(),
            'conferenceInfo': repr(request)
        },
                      url='/tasks/send_confirmation_email')
        return request
    def test_import_revision(self):
        self.mock_get_archive()

        gitiles_import._import_revision(
            'config_set',
            gitiles.Location(
                hostname='localhost',
                project='project',
                treeish='luci/config',
                path='/',
            ), self.test_commit, False, self.test_project_id)

        expected_latest_revision_url = (
            'https://localhost/project/+/a1841f40264376d170269ee9473ce924b7c2c4e9'
        )
        gitiles.get_archive.assert_called_once_with(
            'localhost',
            'project',
            'a1841f40264376d170269ee9473ce924b7c2c4e9',
            '/',
            project_id=self.test_project_id,
            deadline=15)
        saved_config_set = storage.ConfigSet.get_by_id('config_set')
        self.assertIsNotNone(saved_config_set)
        self.assertEqual(saved_config_set.latest_revision,
                         self.test_commit.sha)
        self.assertEqual(saved_config_set.location,
                         'https://localhost/project/+/luci/config')
        self.assertEqual(saved_config_set.latest_revision_url,
                         expected_latest_revision_url)

        saved_revision = storage.Revision.get_by_id(
            self.test_commit.sha, parent=saved_config_set.key)
        self.assertIsNotNone(saved_revision)

        saved_file = storage.File.get_by_id('test_archive/x',
                                            parent=saved_revision.key)
        self.assertIsNotNone(saved_file)
        self.assertEqual(saved_file.content_hash,
                         'v1:587be6b4c3f93f93c489c0111bba5596147a26cb')
        self.assertEqual(
            saved_file.url,
            os.path.join(expected_latest_revision_url, 'test_archive/x'))

        saved_blob = storage.Blob.get_by_id(saved_file.content_hash)
        self.assertIsNotNone(saved_blob)
        self.assertEqual(saved_blob.content, 'x\n')
        self.assert_attempt(True, 'Imported')

        # Run second time, assert nothing is fetched from gitiles.
        ndb.Key(storage.ConfigSet, 'config_set').delete()
        gitiles.get_archive.reset_mock()
        gitiles_import._import_revision(
            'config_set',
            gitiles.Location(hostname='localhost',
                             project='project',
                             treeish='master',
                             path='/'), self.test_commit, False,
            self.test_project_id)
        self.assertFalse(gitiles.get_archive.called)
        self.assert_attempt(True, 'Up-to-date')
示例#16
0
def key_2():
    return ndb.Key('TestModel', "DEFAULT_MODEL_NAME_2")
    def parse(self, response):
        """
        Parse team info from FMSAPI
        Returns a tuple of: list of models (Team, DistrictTeam, Robot),
        and a Boolean indicating if there are more pages to be fetched
        """

        # Get team json
        # don't need to null check, if error, HTTP code != 200, so we wont' get here
        current_page = response['pageCurrent']
        total_pages = response['pageTotal']
        teams = response['teams']
        ret_models = []

        for teamData in teams:
            # Fix issue where FIRST's API returns dummy website for all teams
            if teamData['website'] is not None and 'www.firstinspires.org' in teamData['website']:
                website = None
            else:
                raw_website = teamData.get('website', None)
                website = urlparse.urlparse(raw_website, 'http').geturl() if raw_website else None

                # Fix oddity with urlparse having three slashes after the scheme (#1635)
                website = website.replace('///', '//') if website else None

            team = Team(
                id="frc{}".format(teamData['teamNumber']),
                team_number=teamData['teamNumber'],
                name=teamData['nameFull'],
                nickname=teamData['nameShort'],
                school_name=teamData.get('schoolName'),
                home_cmp=teamData.get('homeCMP').lower() if teamData.get('homeCMP') else None,
                city=teamData['city'],
                state_prov=teamData['stateProv'],
                country=teamData['country'],
                website=website,
                rookie_year=teamData['rookieYear']
            )

            districtTeam = None
            if teamData['districtCode']:
                districtAbbrev = DistrictType.abbrevs[teamData['districtCode'].lower()]
                districtTeam = DistrictTeam(
                    id=DistrictTeam.renderKeyName(self.year, districtAbbrev, team.key_name),
                    team=ndb.Key(Team, team.key_name),
                    year=self.year,
                    district=districtAbbrev,
                    district_key=ndb.Key(District, District.renderKeyName(self.year, teamData['districtCode'].lower())),
                )

            robot = None
            if teamData['robotName']:
                robot = Robot(
                    id=Robot.renderKeyName(team.key_name, self.year),
                    team=ndb.Key(Team, team.key_name),
                    year=self.year,
                    robot_name=teamData['robotName'].strip()
                )

            ret_models.append((team, districtTeam, robot))

        return (ret_models, (current_page < total_pages))
示例#18
0
def import_revision(config_set, location, create_config_set=False):
  """Imports a referenced Gitiles revision into a config set.

  If |create_config_set| is True and Revision entity does not exist,
  then creates ConfigSet with latest_revision set to |location.treeish|.
  """
  assert re.match('[0-9a-f]{40}', location.treeish), (
      '"%s" is not a valid sha' % location.treeish
  )
  logging.debug('Importing revision %s:%s', config_set, location.treeish)
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, location.treeish)
  if rev_key.get():
    if create_config_set:
      storage.ConfigSet(id=config_set, latest_revision=location.treeish).put()
    return

  # Fetch archive, extract files and save them to Blobs outside ConfigSet
  # transaction.
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.error(
        'Could not import %s: configuration does not exist', config_set)
    return

  logging.info('%s archive size: %d bytes' % (config_set, len(archive)))

  entites_to_put = [storage.Revision(key=rev_key)]
  if create_config_set:
    entites_to_put.append(
        storage.ConfigSet(id=config_set, latest_revision=location.treeish))

  stream = StringIO.StringIO(archive)
  blob_futures = []
  with tarfile.open(mode='r|gz', fileobj=stream) as tar:
    for item in tar:
      if not item.isreg():  # pragma: no cover
        continue
      with contextlib.closing(tar.extractfile(item)) as extracted:
        content = extracted.read()
        if not validation.validate_config(
            config_set, item.name, content, log_errors=True):
          logging.error('Invalid revision: %s/%s', config_set, location.treeish)
          return
        content_hash = storage.compute_hash(content)
        blob_futures.append(storage.import_blob_async(
            content=content, content_hash=content_hash))
        entites_to_put.append(
            storage.File(
                id=item.name,
                parent=rev_key,
                content_hash=content_hash)
        )

  # Wait for Blobs to be imported before proceeding.
  ndb.Future.wait_all(blob_futures)

  @ndb.transactional
  def do_import():
    if not rev_key.get():
      ndb.put_multi(entites_to_put)

  do_import()
  logging.info('Imported revision %s/%s', config_set, location.treeish)
示例#19
0
    def post(self):
        #get event details from request body
        event_data = json.loads(self.request.body)
        print(event_data)

        #check request body
        if (not checkRequestBody(self, event_data)):
            return

        #make sure all required properties are present in request body
        if ('name' not in event_data) or ('date' not in event_data) or (
                'account_id' not in event_data):
            badRequest(self, "Name, date and account_id are required")
            return

        #if all_day is true, ignore time value in request body
        if ('all_day' in event_data):
            if (event_data['all_day']):
                event_data['time'] = None

        # check if description is in request body.
        # set to null if description is not in request body
        if ('description' not in event_data):
            event_data['description'] = None

        # check if all_day is in request body.
        # Set to false if not in request body
        if ('all_day' not in event_data):
            event_data['all_day'] = False

        # check if time is in request body
        # set to null if not in request body
        if ('time' not in event_data):
            event_data['time'] = None

        #create new event
        new_event = Event(name=event_data['name'],
                          date=str(event_data['date']),
                          time=str(event_data['time']),
                          description=event_data['description'],
                          all_day=event_data['all_day'],
                          account_id=str(event_data['account_id']))
        new_event.put()

        #get event id
        new_event.id = str(new_event.key.urlsafe())
        new_event.put()

        #check if account_id is a valid user account in datastore
        user_query_object = ndb.gql("SELECT * FROM User WHERE id = '" +
                                    str(event_data['account_id']) + "'")
        if (user_query_object.count() > 0):
            #update user's account events field
            user_key = ndb.Key(urlsafe=str(new_event.account_id))
            user = user_key.get()
            event_json = {
                "id": new_event.id,
                "self": "/events/" + new_event.id
            }
            user.events.append(event_json)
            user.put()

        #set a self link to event and user account
        new_event_dict = new_event.to_dict()
        new_event_dict['self'] = "/events/" + new_event.id
        new_event_dict['user_self'] = "/users" + new_event.account_id

        #return event created
        self.response.write(json.dumps(new_event_dict))
示例#20
0
 def get(self, reg_no):
     return Car.query(ancestor=ndb.Key('Car', reg_no)).get()
示例#21
0
    def get(self):
        user = users.get_current_user()
        searchRes = []
        userDetails = []
        currentUserProf = None
        searchedString = self.request.get("username").strip()
        searchedStringOrig = self.request.get("username").strip()
        if '@' in searchedString:
            searchedString = searchedString.split("@")[0]
        is_special_char_present = False
        for char in SPECIAL_CHAR.split():
            if char in searchedString:
                is_special_char_present = True
                break

        if user:
            currentUserProf = ndb.Key(User, user.email())
            currentUserProf = currentUserProf.get()
            url = users.create_logout_url(self.request.uri)
            url_string = "Logout"

            if is_special_char_present:
                self.add_message('Invalid user name passed in search string',
                                 'danger')
            else:
                if searchedString != "":
                    results_name = search.Index(name="search_user").search(
                        "name:%s" % (searchedString))
                    results_email = search.Index(name="search_user").search(
                        "email:%s" % (searchedString))

                    for res in results_name.results:
                        if res.doc_id not in searchRes:
                            searchRes.append(res.doc_id)

                    for resemail in results_email.results:
                        if resemail.doc_id not in searchRes:
                            searchRes.append(resemail.doc_id)

                    for urlkey in searchRes:
                        user_detail = ndb.Key(urlsafe=urlkey)
                        if user_detail.get():
                            userDetails.append(user_detail.get())
                else:
                    self.add_message('Please enter a valid user name',
                                     'danger')

        else:
            url = users.create_login_url(self.request.uri)
            url_string = "Login"
            self.redirect("/", abort=False)
            return

        template_values = {
            "url": url,
            "url_string": url_string,
            "user": user,
            "userDetails": userDetails,
            "currentUserProf": currentUserProf,
            "searchedString": searchedStringOrig,
            "messages": self.messages,
        }
        template = JINJA_ENVIRONMENT.get_template("/Instagram/userSearch.html")
        self.response.write(template.render(template_values))
示例#22
0
    def testStudentProfileCreated(self):
        """Tests that profile entity is created correctly."""
        postdata = {
            'user_id': TEST_USER_ID,
            'public_name': TEST_PUBLIC_NAME,
            'web_page': TEST_WEB_PAGE,
            'blog': TEST_BLOG,
            'photo_url': TEST_PHOTO_URL,
            'first_name': TEST_FIRST_NAME,
            'last_name': TEST_LAST_NAME,
            'email': TEST_EMAIL,
            'phone': TEST_PHONE,
            'residential_street': TEST_RESIDENTIAL_STREET,
            'residential_street_extra': TEST_RESIDENTIAL_STREET_EXTRA,
            'residential_city': TEST_RESIDENTIAL_CITY,
            'residential_province': TEST_RESIDENTIAL_PROVINCE,
            'residential_country': TEST_RESIDENTIAL_COUNTRY,
            'residential_postal_code': TEST_RESIDENTIAL_POSTAL_CODE,
            'birth_date': TEST_BIRTH_DATE.strftime('%Y-%m-%d'),
            'tee_style': TEST_TEE_STYLE,
            'tee_size': TEST_TEE_SIZE,
            'gender': TEST_GENDER,
            'program_knowledge': TEST_PROGRAM_KNOWLEDGE,
            'school_country': TEST_SCHOOL_COUNTRY,
            'school_name': TEST_SCHOOL_NAME,
            'major': TEST_MAJOR,
            'degree': TEST_DEGREE,
        }
        response = self.post(_getProfileRegisterAsStudentUrl(
            self.program.key()),
                             postdata=postdata)
        self.assertResponseRedirect(response,
                                    url=_getEditProfileUrl(self.program.key()))

        # check that user entity has been created
        user_key = ndb.Key(user_model.User._get_kind(), TEST_USER_ID)
        user = user_key.get()
        self.assertIsNotNone(user)

        # check that profile entity has been created
        profile_key = ndb.Key(
            user_model.User._get_kind(), TEST_USER_ID,
            profile_model.Profile._get_kind(),
            '%s/%s' % (self.program.key().name(), TEST_USER_ID))
        profile = profile_key.get()
        self.assertIsNotNone(profile)

        # check that the created profile is a student
        self.assertIsNotNone(profile.student_data)
        self.assertTrue(profile.is_student)

        # check student data properties
        self.assertEqual(profile.student_data.education.school_country,
                         TEST_SCHOOL_COUNTRY)
        self.assertEqual(profile.student_data.education.school_id,
                         TEST_SCHOOL_NAME)
        self.assertEqual(profile.student_data.education.major, TEST_MAJOR)
        self.assertEqual(profile.student_data.education.degree,
                         education_model.Degree.MASTERS)
        self.assertEqual(profile.student_data.number_of_proposals, 0)
        self.assertEqual(profile.student_data.number_of_projects, 0)
        self.assertEqual(profile.student_data.number_of_passed_evaluations, 0)
        self.assertEqual(profile.student_data.number_of_failed_evaluations, 0)
        self.assertListEqual(profile.student_data.project_for_orgs, [])
        self.assertIsNone(profile.student_data.tax_form)
        self.assertIsNone(profile.student_data.enrollment_form)
示例#23
0
 def test_request_id_to_key(self):
     # Simple XOR.
     self.assertEqual(ndb.Key(task_request.TaskRequest, 0x7f14acec2fcfffff),
                      task_request.request_id_to_key(0xeb5313d0300000))
示例#24
0
def load_user(key):
    user_db = ndb.Key(urlsafe=key).get()
    if user_db:
        return FlaskUser(user_db)
    return None
示例#25
0
 def by_string(cls, url_safe_str):
     comment_key = ndb.Key(urlsafe=url_safe_str)
     return comment_key.get()
def getObjective(project_id):
    key = ndb.Key("Objective", project_id)
    return key.get()
示例#27
0
def create_employee_key(starter_name='default_employee'):
    return ndb.Key('EmployeeBook', starter_name)
def replay_key(replay_folder=DEFAULT_REPLAY_FOLDER):
    """Constructs a Datastore key for a Replay entity with guestbook_name."""
    return ndb.Key('Replay', replay_folder)
示例#29
0
 def get_by_key(game_key, number):
     """Returns the thread for the given parent and number."""
     pairs = list(game_key.pairs()) + [(GameCommentThread, number)]
     key = ndb.Key(pairs=pairs)
     thread = key.get()
     return thread
示例#30
0
def delete_tag(request, tagId):
    # retrieve Tag object from datastore
    key = ndb.Key(urlsafe=tagId)
    tag = key.delete()
    return HttpResponse("tag deleted")