Esempio n. 1
0
    def test_put_for_index_update(self):
        """Forcing an overwite into a unique index changes timestamps and
        coerces as expected."""
        # Initial save. We'll overwite this user's email and expect the uid to
        # remain the same. Don't do it the normal way so that we can
        # intentionally put a modified time in the past (the other option is to
        # make the tests slow enough for the second-resolution timestamps to
        # differ).
        user = User.create(email='foo')
        user.created = str_to_dt('1970-01-01T00:00:00Z')
        user.modified = user.created
        row_dict = user.coerce_row_dict(user.to_dict())
        with mysql_connection.connect() as sql:
            sql.insert_or_update(User.table, row_dict)


        # UPDATE should advance modified timestamps, but not created.
        duplicate_user = User.create(email='foo')  # new uid, should be ignored on put
        duplicate_user.created = str_to_dt('1982-01-01T00:00:00Z')
        # Make sure boolean and json attributes coerced correctly.
        duplicate_user.receive_sms = False
        duplicate_user.owned_teams = ['Team_X']
        fetched_user = User.put_for_index(duplicate_user, 'email')

        self.assertEqual(user.created, fetched_user.created)
        self.assertGreater(fetched_user.modified, user.modified)
        self.assertEqual(fetched_user.receive_sms, False)
        self.assertEqual(fetched_user.owned_teams, ['Team_X'])
        # uid of duplicate user is gone
        self.assertEqual(fetched_user.uid, user.uid)
Esempio n. 2
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestApiNetworks, self).set_up()

        application = webapp2.WSGIApplication(api_routes,
                                              config={
                                                  'webapp2_extras.sessions': {
                                                      'secret_key':
                                                      self.cookie_key
                                                  }
                                              },
                                              debug=True)
        self.testapp = webtest.TestApp(application)

        with mysql_connection.connect() as sql:
            sql.reset({
                'network': Network.get_table_definition(),
                'organization': Organization.get_table_definition(),
                'program': Program.get_table_definition(),
                'user': User.get_table_definition(),
            })

        self.program = Program.create(
            name="Engagement Project",
            label='ep18',
            min_cycles=3,
            active=True,
            preview_url='foo.com',
        )
        self.program.put()
Esempio n. 3
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestApiParticipants, self).set_up()

        application = self.patch_webapp(webapp2.WSGIApplication)(
            api_routes,
            config={
                'webapp2_extras.sessions': {
                    'secret_key': self.cookie_key
                }
            },
            debug=True)
        self.testapp = webtest.TestApp(application)

        with mysql_connection.connect() as sql:
            sql.reset({
                'classroom': Classroom.get_table_definition(),
                'cycle': Cycle.get_table_definition(),
                'participant': Participant.get_table_definition(),
                'program': Program.get_table_definition(),
                'team': Team.get_table_definition(),
                'user': User.get_table_definition(),
            })

        self.program = Program.create(
            name="Engagement Project",
            label='ep18',
            min_cycles=3,
            active=True,
            preview_url='foo.com',
        )
        self.program.put()
Esempio n. 4
0
    def test_task_creates_backup_for_user(self):
        user_response_params = dict(
            self.response_params,
            type=Response.USER_LEVEL_SYMBOL,
            private=True,
            user_id='User_foo',
        )
        r = Response.create(**user_response_params)
        r.put()
        payload = json.dumps(
            r.to_dict(),
            default=util.json_dumps_default,
        )

        self.testapp.post(
            '/task/backup_response',
            payload,
            headers={'Content-Type': 'application/json'},
        )

        with mysql_connection.connect() as sql:
            rows = sql.select_star_where(ResponseBackup.table)

        backup = rows[0]
        backup['body'] = json.loads(backup['body'])
        expected = dict(r.to_dict(), backup_id=1),
        self.assertEqual(len(rows), 1)
        self.assertEqual(
            rows[0],
            dict(r.to_dict(), backup_id=1),
        )
Esempio n. 5
0
    def post(self):
        content_types = self.request.headers.get('Content-Type', [])
        if 'application/json' not in content_types:
            raise Exception("/task/backup_response requires JSON content type")

        # This results from a known sane internal call, so we can skip most of
        # the layers we normally use (get_params(), creating objects). First, a
        # dictionary of python-friendly values:
        response_params = json.loads(self.request.body)

        # Then convert values to their MySQL-friendly versions.
        row_dict = ResponseBackup.coerce_row_dict(response_params)

        # The data provided should come from Response.after_put(), so the
        # created and modified times should be correct, they'll need converting
        # to sql format.
        for key in ('created', 'modified'):
            row_dict[key] = util.iso_datetime_to_sql(row_dict[key])

        # Then just insert, with no choice to update anything that might look
        # similar.
        with mysql_connection.connect() as sql:
            affected_rows = sql.insert_row_dicts(ResponseBackup.table,
                                                 [row_dict])

        # There's no unique index or primary key on this table, so we should
        # _always_ get a new row.
        if affected_rows != 1:
            raise Exception("BackupResponses failed to insert a new row.")
Esempio n. 6
0
    def get_by_participant(klass, participant_id, project_cohort_id=None):
        """Get whitelisted pd for a participant, optionally scoped by pc."""

        pc_clause = 'AND `project_cohort_id` = %s' if project_cohort_id else ''

        # Only certain types of participant data are readable by anyone without
        # direct access to the database. This keeps participant responses
        # secure.
        query = """
            SELECT    *
            FROM      `participant_data`
            WHERE     `key` IN ('progress', 'link', 'condition', 'ep_assent',
                                'last_login', 'saw_baseline',
                                'saw_demographics', 'saw_validation')
              AND     `participant_id` = %s
              {pc_clause}
        """.format(pc_clause=pc_clause)

        params = [participant_id]
        if project_cohort_id:
            params.append(project_cohort_id)

        with mysql_connection.connect() as sql:
            row_dicts = sql.select_query(query, tuple(params))

        return [klass.row_dict_to_obj(d) for d in row_dicts]
Esempio n. 7
0
    def test_update_one(self):
        """Updating an object with .put() changes timestamps and coerces as
        expected."""
        # Initial save. Don't do it the normal way so that we can intentionally
        # put a modified time in the past (the other option is to make the
        # tests slow enough for the second-resolution timestamps to differ).
        user = User.create(email='foo')
        user.created = str_to_dt('1970-01-01T00:00:00Z')
        user.modified = user.created
        row_dict = user.coerce_row_dict(user.to_dict())
        with mysql_connection.connect() as sql:
            sql.insert_or_update(User.table, row_dict)

        # Check that timestamps saved correctly.
        fetched_user = User.get_by_id(user.uid)
        self.assertEqual(user.created, fetched_user.created)
        self.assertEqual(user.modified, fetched_user.modified)

        # UPDATE should advance modified timestamps, but not created.
        user.name = 'newly named';
        user.created = str_to_dt('1982-01-01T00:00:00Z')
        # Make sure boolean and json attributes coerced correctly.
        user.receive_sms = False
        user.owned_teams = ['Team_X']
        user.put()
        updated_user = user
        self.assertEqual(updated_user.created, fetched_user.created)
        self.assertGreater(updated_user.modified, fetched_user.modified)
        self.assertEqual(updated_user.receive_sms, False)
        self.assertEqual(updated_user.owned_teams, ['Team_X'])
Esempio n. 8
0
    def set_up(self):
        super(TestParticipants, self).set_up()

        with mysql_connection.connect() as sql:
            sql.reset({
                'participant': Participant.get_table_definition(),
            })
Esempio n. 9
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestApiParticipation, self).set_up()

        application = webapp2.WSGIApplication(api_routes,
                                              config={
                                                  'webapp2_extras.sessions': {
                                                      'secret_key':
                                                      self.cookie_key
                                                  }
                                              },
                                              debug=True)
        self.testapp = webtest.TestApp(application)

        # Successful download of completion ids triggers a notification, which
        # requires a cohort name.
        Program.mock_program_config(
            self.program_label,
            {'cohorts': {
                self.cohort_label: {
                    'name': self.cohort_label
                }
            }},
        )

        with mysql_connection.connect() as sql:
            sql.reset({
                'participant':
                Participant.get_table_definition(),
                'participant_data':
                ParticipantData.get_table_definition(),
            })
Esempio n. 10
0
    def get_cached_properties_from_db(self, org_id):
        """Count how many related users and teams there are."""
        query = '''
            SELECT COUNT(DISTINCT(t.`uid`)) as num_teams
            ,      COUNT(DISTINCT(u.`uid`)) as num_users
            FROM `organization` o
            LEFT OUTER JOIN `user` u
              ON  u.`owned_organizations` LIKE BINARY %s
            LEFT OUTER JOIN `team` t
              ON  t.`organization_ids` LIKE BINARY %s
            WHERE o.`uid` = %s
        '''
        params = (
            '%{}%'.format(org_id),
            '%{}%'.format(org_id),
            org_id,
        )

        with mysql_connection.connect() as sql:
            row_dicts = sql.select_query(query, params)

        if len(row_dicts) == 0:
            return {}
        elif len(row_dicts) == 1:
            # {'num_users': int, 'num_teams': int}
            return {k: int(v) for k, v in row_dicts[0].items()}
        else:
            raise Exception(
                "Multiple results for organization cached properties.")
Esempio n. 11
0
    def completion_ids_anonymous(klass, project_cohort_id, start, end):
        """Get list of anonymous participant ids having entered the survey,
        whether or not they finished it.

        Args:
            project_cohort_id: str applicable pc
            start: datetime filters for pd modified after this date
            end: datetime filters for pd modified before this date

        Returns: list of rows with 'participant_id', 'survey_ordinal', and
            'value' (which is progress).
        """
        # Optionally add a time range. Note that SQL stores and compares
        # datetime strings in a different format.
        query = """
            SELECT  `participant_id`
            ,       `survey_ordinal`
            ,       `value`
            FROM `participant_data` pd
            WHERE `key` = 'progress'
              AND `testing` = 0
              AND `project_cohort_id` = %s
              AND `modified` >= %s
              AND `modified` < %s
            ORDER BY `survey_ordinal`, `value`, `participant_id`
        """
        query_params = (
            project_cohort_id,
            start.strftime(config.sql_datetime_format),
            end.strftime(config.sql_datetime_format),
        )

        with mysql_connection.connect() as sql:
            result = sql.select_query(query, tuple(query_params))
        return result
Esempio n. 12
0
    def test_get_day(self):
        """Get all user's notifications for the last 24 hours."""
        user = User.create(name='superdude', email='*****@*****.**')

        old = self.test_create(user=user)
        old.created = datetime.datetime(2017, 1, 1)
        # Simulate a put, but without stripping timestamps at this step, which
        # SqlModel.put() would normally do. This lets us post-date the
        # notification.
        row_dict = Notification.coerce_row_dict(old.to_dict())
        with mysql_connection.connect() as sql:
            sql.insert_or_update(Notification.table, row_dict)

        # This one gets CURRENT_TIMESTAMP, as normal.
        new = self.test_create(user=user)
        new.put()

        # Only the new one should be returned.
        start = datetime.datetime.now() - datetime.timedelta(hours=12)
        end = datetime.datetime.now() + datetime.timedelta(hours=12)
        notes = Notification.get_period_for_user(
            user,
            util.datelike_to_iso_string(start),
            util.datelike_to_iso_string(end),
        )
        self.assertEquals(len(notes), 1)
Esempio n. 13
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestApiSurveys, self).set_up()

        application = webapp2.WSGIApplication(api_routes,
                                              config={
                                                  'webapp2_extras.sessions': {
                                                      'secret_key':
                                                      self.cookie_key
                                                  }
                                              },
                                              debug=True)
        self.testapp = webtest.TestApp(application)

        with mysql_connection.connect() as sql:
            sql.reset({
                'classroom': Classroom.get_table_definition(),
                'metric': Metric.get_table_definition(),
                'program': Program.get_table_definition(),
                'survey': Survey.get_table_definition(),
                'team': Team.get_table_definition(),
                'user': User.get_table_definition(),
            })

        self.ep_program = Program.create(
            name="Engagement Project",
            label='ep18',
            active=True,
            preview_url='foo.com',
        )
        self.ep_program.put()
Esempio n. 14
0
    def resolve_id_mismatch(klass, user, new_id):
        """Change all references to user's id to a new id.

        N.B. this is obviously brittle; when the relationship schema changes,
        this will also have to change.
        """
        # The auth server has a different id for this user; defer to it.

        teams = Team.get(captain_id=user.uid)
        for t in teams:
            t.captain_id = new_id
        Team.put_multi(teams)

        classrooms = Classroom.get(contact_id=user.uid)
        for c in classrooms:
            c.contact_id = new_id
        Classroom.put_multi(classrooms)

        params = {'uid': new_id, 'short_uid': SqlModel.convert_uid(new_id)}
        with mysql_connection.connect() as sql:
            sql.update_row(klass.table, 'uid', user.uid, **params)

        for k, v in params.items():
            setattr(user, k, v)

        return user
Esempio n. 15
0
    def get_for_classroom(klass, team_id, classroom_id, student_ids=None):
        if student_ids is None:
            student_ids = []

        stripped_ids = [klass.strip_token(id) for id in student_ids]

        student_id_clause = 'AND `stripped_student_id` IN({})'.format(','.join(
            ['%s'] * len(stripped_ids)))
        query = '''
            SELECT *
            FROM   `{table}`
            WHERE
              # While the query would run without the team id (b/c classroom
              # would constrain it), including it is useful because when a
              # student id is included, the engine can then use the table's
              # team-student_id index. Try this query with EXPLAIN to see more.
              `team_id` = %s AND
              `classroom_ids` LIKE BINARY %s
              {student_id_clause}
        '''.format(
            table=klass.table,
            student_id_clause=student_id_clause if stripped_ids else '',
        )
        params = [team_id, '%{}%'.format(classroom_id)]

        if stripped_ids:
            params.append(stripped_ids)

        with mysql_connection.connect() as sql:
            row_dicts = sql.select_query(query, tuple(params))
        return [klass.row_dict_to_obj(d) for d in row_dicts]
Esempio n. 16
0
def registrar_evento(self, rfid_user):
    self.id_user = rfid_user
    self.date = datetime.date.today()
    self.time = datetime.datetime.now().strftime("%H:%M:%S")

    self.msqlConn = msqlConn.connect()

    try:
        self.msqlConn.execute("""SELECT * FROM events WHERE iduser='******' AND date='{}'  
            ORDER BY idevents DESC LIMIT 1"""
                    .format(self.id_user, self.date))

        row = msqlConn.fetchall()

        if not row or row[0][4] == 2:
            self.msqlConn.execute(""" INSERT INTO events VALUES ('', '{}', '{}', '{}', '{}') """
                        .format(self.id_user, self.date, self.time, 1))

            msqlConn.commit()

        else:
            self.msqlConn.execute(""" INSERT INTO events VALUES ('', '{}', '{}', '{}', '{}') """
                        .format(self.id_user, self.date, self.time, 2))
                        
            msqlConn.commit()

        msqlConn.close()

    #1 = entrada
    #2 = saida
    except mysql.connector.Error as err:
        print (err)
    def set_up(self):
        """Clear relevant tables from testing SQL database."""
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestGraphQLProjectCohort, self).set_up()

        with mysql_connection.connect() as sql:
            sql.reset({
                'checkpoint': Checkpoint.get_table_definition(),
            })

        Program.mock_program_config(
            self.program_label, {
                'name': self.program_name,
                'default_portal_type': 'name_or_id',
                'description': self.program_description,
                'cohorts': {
                    self.cohort_label: self.cohort
                }
            })

        application = webapp2.WSGIApplication(api_routes,
                                              config={
                                                  'webapp2_extras.sessions': {
                                                      'secret_key':
                                                      self.cookie_key
                                                  }
                                              },
                                              debug=True)
        self.testapp = webtest.TestApp(application)
Esempio n. 18
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestModel, self).set_up()

        with mysql_connection.connect() as sql:
            sql.reset({
                'user': User.get_table_definition(),
            })
Esempio n. 19
0
 def set_up(self):
     """Clear relevant tables from testing SQL database."""
     # Let ConsistencyTestCase set up the datastore testing stub.
     super(TestParticipantData, self).set_up()
     with mysql_connection.connect() as sql:
         sql.reset({
             'participant_data':
             ParticipantData.get_table_definition(),
         })
Esempio n. 20
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestNotifications, self).set_up()

        with mysql_connection.connect() as sql:
            sql.reset({
                'digest': Digest.get_table_definition(),
                'notification': Notification.get_table_definition(),
                'user': User.get_table_definition(),
            })
Esempio n. 21
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestSurveys, self).set_up()

        with mysql_connection.connect() as sql:
            sql.reset({
                'cycle': Cycle.get_table_definition(),
                'metric': Metric.get_table_definition(),
                'program': Program.get_table_definition(),
                'survey': Survey.get_table_definition(),
                'team': Team.get_table_definition(),
            })
Esempio n. 22
0
    def count_for_classroom(klass, classroom_id):
        query = '''
            SELECT COUNT(`uid`)
            FROM   `{table}`
            WHERE  `classroom_ids` LIKE BINARY %s
        '''.format(table=klass.table)
        params = ['%{}%'.format(classroom_id)]

        with mysql_connection.connect() as sql:
            num_students = sql.select_single_value(query, tuple(params))

        return num_students
Esempio n. 23
0
    def query_by_network(klass, network_id):
        query = '''
            SELECT *
            FROM `{table}`
            WHERE `owned_networks` LIKE BINARY %s
        '''.format(table=klass.table)
        params = ('%{}%'.format(network_id),)

        with mysql_connection.connect() as sql:
            row_dicts = sql.select_query(query, params)

        return [klass.row_dict_to_obj(d) for d in row_dicts]
Esempio n. 24
0
    def set_up(self):
        # Let ConsistencyTestCase set up the datastore testing stub.
        super(TestCycles, self).set_up()

        with mysql_connection.connect() as sql:
            sql.reset({
                'classroom': Classroom.get_table_definition(),
                'cycle': Cycle.get_table_definition(),
                'program': Program.get_table_definition(),
                'team': Team.get_table_definition(),
                'user': User.get_table_definition(),
            })
Esempio n. 25
0
    def completion_ids(klass, **kwargs):
        """Get list of identifiable names/ids having entered the survey, whether
        or not they finished it.

        Args:
            project_cohort_id: str applicable pc
            survey_id: str applicable survey
            start: datetime filters for pd modified after this date
            end: datetime filters for pd modified before this date

        Returns: List of dictionaries, each with keys 'participant_id' and
        'survey_ordinal'.
        """
        # All kwargs default to None.
        kwargs = collections.defaultdict(lambda: None, kwargs)

        # Determine and check for a single scope.
        scope_keys = ('project_cohort_id', 'survey_id')
        scope_filters = {k: kwargs[k] for k in scope_keys if kwargs[k]}
        if len(scope_filters) != 1:
            raise Exception("Invalid scope filters: {}".format(scope_filters))
        query_params = scope_filters.values()

        # Optionally add a time range. Note that SQL stores and compares
        # datetime strings in a different format.
        query_params += [
            kwargs[k].strftime(config.sql_datetime_format)
            for k in ('start', 'end') if kwargs[k]
        ]

        query = """
            SELECT  p.`name` as token
            ,       pd.`value` as percent_progress
            ,       pd.`survey_ordinal` as module
            FROM `participant_data` pd
            JOIN `participant` p
              ON pd.`participant_id` = p.`uid`
            WHERE pd.`key` = 'progress'
              AND pd.`testing` = 0
              AND pd.`{scope_key}` = %s
              {start}
              {end}
            ORDER BY module, percent_progress * 1, token
        """.format(
            scope_key=scope_filters.keys()[0],
            start='AND pd.`modified` > %s' if kwargs['start'] else '',
            end='AND pd.`modified` < %s' if kwargs['end'] else '',
        )

        with mysql_connection.connect() as sql:
            result = sql.select_query(query, tuple(query_params))
        return result
Esempio n. 26
0
    def query_by_teams(klass, team_ids):
        query = '''
            SELECT *
            FROM `{table}`
            WHERE `team_id` IN ({interps})
        '''.format(table=klass.table,
                   interps=', '.join(['%s'] * len(team_ids)))
        params = tuple(team_ids)

        with mysql_connection.connect() as sql:
            row_dicts = sql.select_query(query, params)

        return [klass.row_dict_to_obj(r) for r in row_dicts]
Esempio n. 27
0
    def query_by_organization(klass, org_id):
        query = '''
            SELECT *
            FROM `{table}`
            WHERE `organization_ids` LIKE BINARY %s
            ORDER BY `name`
        '''.format(table=klass.table)
        params = ('%{}%'.format(org_id), )

        with mysql_connection.connect() as sql:
            row_dicts = sql.select_query(query, params)

        return [klass.row_dict_to_obj(d) for d in row_dicts]
Esempio n. 28
0
    def participation_by_project_cohort_from_sql(klass,
                                                 ids_or_codes,
                                                 using_codes=False,
                                                 start=None,
                                                 end=None):
        """Like participation() but for many project cohorts at once.

        Args:
            ids_or_codes: list applicable pc ids or pc codes
            using_codes: bool default False, True if first argument is codes
            start: datetime optional, filters for pd modified after this date
            end: datetime optional, filters for pd modified before this date

        Returns: List of dictionaries, each representing a distinct value of
        progress (e.g. 33, 100) and associated counts.
        """
        if len(ids_or_codes) == 0:
            return []

        query = """
            SELECT  `project_cohort_id`
            ,       MAX(`code`) as code
            ,       `value`
            ,       `survey_ordinal`
            ,       COUNT(`uid`) as n
            FROM `participant_data`
            WHERE `key` = 'progress'
              AND `testing` = 0
              AND `{match_field}` IN({interps})
              {start}
              {end}
            GROUP BY `project_cohort_id`, `survey_ordinal`, `value`
            # Cast value to number before ordering, otherwise string sorting
            # gives you things like: 1, 100, 33.
            # http://stackoverflow.com/questions/5417381/mysql-sort-string-number
            ORDER BY `project_cohort_id`, `survey_ordinal`, `value` * 1
        """.format(
            match_field='code' if using_codes else 'project_cohort_id',
            interps=','.join(['%s'] * len(ids_or_codes)),
            start='AND `modified` >= %s' if start else '',
            end='AND `modified` < %s' if end else '',
        )
        query_params = list(ids_or_codes)
        if start:
            query_params.append(start.strftime(config.sql_datetime_format))
        if end:
            query_params.append(end.strftime(config.sql_datetime_format))

        with mysql_connection.connect() as sql:
            result = sql.select_query(query, tuple(query_params))
        return result
Esempio n. 29
0
 def registrar_usuario(self, nome, rg, cpf, hora_entrada, hora_saida, rfid):
    
    self.msqlConn = msqlConn.connect()
    
    try:
        self.msqlConn.execute(""" INSERT INTO users (nome,cpf,rg,entrada,saida,rfid) VALUES ('{}','{}','{}','{}','{}','{}') """
                        .format(nome, rg, cpf, hora_entrada, hora_saida, rfid))

        msqlConn.commit()

        self.msqlConn.close()

    except mysql.connector.Error as err:
        print (err)
Esempio n. 30
0
    def query_by_team(klass, team_id):
        query = '''
            SELECT o.*
            FROM `organization` o
            JOIN `team` t
              ON t.`organization_ids` LIKE BINARY CONCAT('%%', o.`uid`, '%%')
            WHERE t.`uid` = %s
            ORDER BY o.`name`
        '''
        params = (team_id, )

        with mysql_connection.connect() as sql:
            row_dicts = sql.select_query(query, params)

        return [klass.row_dict_to_obj(d) for d in row_dicts]