Пример #1
0
    def test_update_covidcast_meta_cache_query(self):
        """Query to update the metadata cache looks sensible.

    NOTE: Actual behavior is tested by integration test.
    """

        args = ('epidata_json_str', )
        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)

        database.update_covidcast_meta_cache(*args)

        connection = mock_connector.connect()
        cursor = connection.cursor()
        self.assertTrue(cursor.execute.called)

        sql, args = cursor.execute.call_args[0]
        expected_args = ('"epidata_json_str"', )
        self.assertEqual(args, expected_args)

        sql = sql.lower()
        self.assertIn('update', sql)
        self.assertIn('`covidcast_meta_cache`', sql)
        self.assertIn('timestamp', sql)
        self.assertIn('epidata', sql)
    def test_get_data_stdev_across_locations_query(self):
        """Query to get signal-level standard deviation looks sensible.

    NOTE: Actual behavior is tested by integration test.
    """

        args = ('max_day', )
        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)

        database.get_data_stdev_across_locations(*args)

        connection = mock_connector.connect()
        cursor = connection.cursor()
        self.assertTrue(cursor.execute.called)

        sql, args = cursor.execute.call_args[0]
        expected_args = ('max_day', )
        self.assertEqual(args, expected_args)

        sql = sql.lower()
        self.assertIn('select', sql)
        self.assertIn('`covidcast`', sql)
        self.assertIn('std(', sql)
Пример #3
0
    def test_connect_opens_connection(self):
        """Connect to the database."""

        mock_connector = MagicMock()
        database = Database()

        database.connect(connector_impl=mock_connector)

        self.assertTrue(mock_connector.connect.called)
Пример #4
0
    def test_insert_or_update_batch_exception_reraised(self):
        """Test that an exception is reraised"""
        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)
        connection = mock_connector.connect()
        cursor = connection.cursor()
        cursor.executemany.side_effect = Exception('Test')

        cc_rows = {MagicMock(geo_id='CA', val=1, se=0, sample_size=0)}
        self.assertRaises(Exception, database.insert_or_update_batch, cc_rows)
Пример #5
0
    def test_insert_or_update_batch_none_returned(self):
        """Test that None is returned when row count cannot be returned"""
        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)
        connection = mock_connector.connect()
        cursor = connection.cursor()
        cursor.rowcount = -1

        cc_rows = [MagicMock(geo_id='CA', val=1, se=0, sample_size=0)]
        result = database.insert_or_update_batch(cc_rows)
        self.assertIsNone(result)
Пример #6
0
    def test_insert_or_update_batch_row_count_returned(self):
        """Test that the row count is returned"""
        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)
        connection = mock_connector.connect()
        cursor = connection.cursor()
        cursor.rowcount = 3

        cc_rows = [MagicMock(geo_id='CA', val=1, se=0, sample_size=0)]
        result = database.insert_or_update_batch(cc_rows)
        self.assertEqual(result, 3)
Пример #7
0
    def test_disconnect_with_commit(self):
        """Disconnect from the database and commit."""

        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)

        # commit
        database.disconnect(True)

        connection = mock_connector.connect()
        self.assertTrue(connection.commit.called)
        self.assertTrue(connection.close.called)
Пример #8
0
    def test_disconnect_with_rollback(self):
        """Disconnect from the database and rollback."""

        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)

        # rollback
        database.disconnect(False)

        connection = mock_connector.connect()
        self.assertFalse(connection.commit.called)
        self.assertTrue(connection.close.called)
Пример #9
0
def main():
    response = Epidata.covidcast_meta()
    print(response['result'])

    if response['result'] == 1:
        commit = False
        database = Database()
        database.connect()
        try:
            database.update_covidcast_meta_cache(
                json.dumps(response['epidata']))
            commit = True
            print('successfully cached epidata')
        finally:
            database.disconnect(commit)
    else:
        print('metadata is not available')
Пример #10
0
    def test_count_all_rows_query(self):
        """Query to count all rows looks sensible.

    NOTE: Actual behavior is tested by integration test.
    """

        mock_connector = MagicMock()
        database = Database()
        database.connect(connector_impl=mock_connector)
        connection = mock_connector.connect()
        cursor = connection.cursor()
        cursor.__iter__.return_value = [(123, )]

        num = database.count_all_rows()

        self.assertEqual(num, 123)
        self.assertTrue(cursor.execute.called)

        sql = cursor.execute.call_args[0][0].lower()
        self.assertIn('select count(1)', sql)
        self.assertIn('from `covidcast`', sql)
Пример #11
0
def main(args):
    """Delete rows from covidcast."""

    logger = get_structured_logger("csv_deletion", filename=args.log_file)
    start_time = time.time()
    database = Database()
    database.connect()
    all_n = 0

    try:
        for deletion_file in sorted(
                glob.glob(os.path.join(args.deletion_dir, '*.csv'))):
            n = handle_file(deletion_file, database, logger)
            if n is not None:
                all_n += n
            else:
                all_n = "rowcount unsupported"
    finally:
        database.disconnect(True)

    logger.info("Deleted CSVs from database",
                total_runtime_in_seconds=round(time.time() - start_time, 2),
                row_count=all_n)
Пример #12
0
class DeleteBatch(unittest.TestCase):
    """Tests batch deletions"""
    def setUp(self):
        """Perform per-test setup."""

        # connect to the `epidata` database and clear the `covidcast` table
        cnx = mysql.connector.connect(user='******',
                                      password='******',
                                      host='delphi_database_epidata',
                                      database='epidata')
        cur = cnx.cursor()
        cur.execute('truncate table covidcast')
        cnx.commit()
        cur.close()

        # make connection and cursor available to test cases
        self.cnx = cnx
        self.cur = cnx.cursor()

        # use the local instance of the epidata database
        secrets.db.host = 'delphi_database_epidata'
        secrets.db.epi = ('user', 'pass')

        # use the local instance of the Epidata API
        Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php'

        # will use secrets as set above
        from delphi.epidata.acquisition.covidcast.database import Database
        self.database = Database()
        self.database.connect()

    def tearDown(self):
        """Perform per-test teardown."""
        self.cur.close()
        self.cnx.close()

    @unittest.skip("Database user would require FILE privileges")
    def test_delete_from_file(self):
        self._test_delete_batch(
            path.join(path.dirname(__file__), "delete_batch.csv"))

    def test_delete_from_tuples(self):
        with open(path.join(path.dirname(__file__), "delete_batch.csv")) as f:
            rows = []
            for line in f:
                rows.append(line.strip().split(","))
        rows = [r + ["day"] for r in rows[1:]]
        self._test_delete_batch(rows)

    def _test_delete_batch(self, cc_deletions):
        # load sample data
        rows = [
            # geo_value issue is_latest
            ["d_nonlatest", 1, 0],
            ["d_nonlatest", 2, 1],
            ["d_latest", 1, 0],
            ["d_latest", 2, 0],
            ["d_latest", 3, 1]
        ]
        for time_value in [0, 1]:
            self.cur.executemany(
                f'''
            INSERT INTO covidcast
            (`geo_value`, `issue`, `is_latest_issue`, `time_value`,
            `source`, `signal`, `time_type`, `geo_type`,
            value_updated_timestamp, direction_updated_timestamp, value, stderr, sample_size, lag, direction)
            VALUES
            (%s, %s, %s, {time_value},
            "src", "sig", "day", "geo",
            0, 0, 0, 0, 0, 0, 0)
            ''', rows)
        self.cnx.commit()

        # delete entries
        self.database.delete_batch(cc_deletions)

        # verify remaining data is still there
        self.cur.execute("select * from covidcast")
        result = list(self.cur)
        self.assertEqual(len(result), 2 * len(rows) - 2)

        examples = [
            # verify deletions are gone
            Example(
                'select * from covidcast where time_value=0 and geo_value="d_nonlatest" and issue=1',
                []),
            Example(
                'select * from covidcast where time_value=0 and geo_value="d_latest" and issue=3',
                []),
            # verify is_latest_issue flag was corrected
            Example(
                'select geo_value, issue from covidcast where time_value=0 and is_latest_issue=1',
                [('d_nonlatest', 2), ('d_latest', 2)])
        ]

        for ex in examples:
            self.cur.execute(ex.given)
            result = list(self.cur)
            self.assertEqual(result, ex.expected, ex.given)