コード例 #1
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_update_bulk(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        new_data = {
            "objects": [
                {"data": ["1", "A", "B", "C"], "external_id": "1"},
                {"data": ["2", "D", "E", "F"], "external_id": "2"},
            ]
        }

        response = self.client.put(
            "/api/1.0/dataset/%s/data/" % self.dataset.slug,
            content_type="application/json",
            data=json.dumps(new_data),
            **self.auth_headers
        )

        self.assertEqual(response.status_code, 202)
        body = json.loads(response.content)
        self.assertEqual(len(body["objects"]), 2)

        # Refresh
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertEqual(self.dataset.row_count, 4)
コード例 #2
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_created_search(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        new_data = {"data": ["5", "Flibbity!", "B", "C"]}

        response = self.client.post(
            "/api/1.0/dataset/%s/data/" % self.dataset.slug,
            content_type="application/json",
            data=json.dumps(new_data),
            **self.auth_headers
        )

        self.assertEqual(response.status_code, 201)

        response = self.client.get("/api/1.0/data/?q=flibbity", **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        # Verify that the group count is correct
        self.assertEqual(body["meta"]["total_count"], 1)
        self.assertEqual(len(body["objects"]), 1)
コード例 #3
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_update(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        update_data = {"dataset": "/api/1.0/dataset/%s/" % self.dataset.slug, "data": ["5", "A", "B", "C"]}

        response = self.client.get("/api/1.0/dataset/%s/data/" % self.dataset.slug, **self.auth_headers)

        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)

        data = body["objects"][0]

        response = self.client.put(
            "/api/1.0/dataset/%s/data/%s/" % (self.dataset.slug, data["external_id"]),
            content_type="application/json",
            data=json.dumps(update_data),
            **self.auth_headers
        )

        self.assertEqual(response.status_code, 202)
        body = json.loads(response.content)
        self.assertEqual(body["data"], update_data["data"])
        self.assertEqual(body["dataset"], data["dataset"])
        self.assertEqual(body["resource_uri"], data["resource_uri"])
        self.assertEqual(body["external_id"], data["external_id"])
コード例 #4
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_create(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        new_data = {"data": ["5", "A", "B", "C"]}

        response = self.client.post(
            "/api/1.0/dataset/%s/data/" % self.dataset.slug,
            content_type="application/json",
            data=json.dumps(new_data),
            **self.auth_headers
        )

        self.assertEqual(response.status_code, 201)
        body = json.loads(response.content)
        self.assertEqual(body["data"], new_data["data"])
        self.assertIn("dataset", body)
        self.assertIn("resource_uri", body)
        self.assertIn("external_id", body)

        # Refresh
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertEqual(self.dataset.row_count, 5)
コード例 #5
0
ファイル: test_api_data.py プロジェクト: eob/panda
    def test_deleted_search(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.get('/api/1.0/dataset/%s/data/' % self.dataset.slug, **self.auth_headers)

        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)

        # Dataset objects were returned
        data = body['objects'][0]

        response = self.client.delete('/api/1.0/dataset/%s/data/%s/' % (self.dataset.slug, data['external_id']), content_type='application/json', **self.auth_headers)

        self.assertEqual(response.status_code, 204)

        response = self.client.get('/api/1.0/data/?q=%s' % data['data'][0], **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        # Verify that the group count is correct
        self.assertEqual(body['meta']['total_count'], 0)
        self.assertEqual(len(body['objects']), 0)
コード例 #6
0
ファイル: test_api_data.py プロジェクト: eob/panda
    def test_updated_search(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        update_data = {
            'dataset': '/api/1.0/dataset/%s/' % self.dataset.slug,
            'data': ['5', 'Flibbity!', 'B', 'C']
        }

        response = self.client.get('/api/1.0/data/', **self.auth_headers)

        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)

        # Dataset objects were returned
        data = body['objects'][0]['objects'][0]

        response = self.client.put('/api/1.0/dataset/%s/data/%s/' % (self.dataset.slug, data['external_id']), content_type='application/json', data=json.dumps(update_data), **self.auth_headers)

        self.assertEqual(response.status_code, 202)

        response = self.client.get('/api/1.0/data/?q=flibbity', **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        # Verify that the group count is correct
        self.assertEqual(body['meta']['total_count'], 1)
        self.assertEqual(len(body['objects']), 1)
コード例 #7
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_updated_search(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        update_data = {"dataset": "/api/1.0/dataset/%s/" % self.dataset.slug, "data": ["5", "Flibbity!", "B", "C"]}

        response = self.client.get("/api/1.0/data/", **self.auth_headers)

        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)

        # Dataset objects were returned
        data = body["objects"][0]["objects"][0]

        response = self.client.put(
            "/api/1.0/dataset/%s/data/%s/" % (self.dataset.slug, data["external_id"]),
            content_type="application/json",
            data=json.dumps(update_data),
            **self.auth_headers
        )

        self.assertEqual(response.status_code, 202)

        response = self.client.get("/api/1.0/data/?q=flibbity", **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        # Verify that the group count is correct
        self.assertEqual(body["meta"]["total_count"], 1)
        self.assertEqual(len(body["objects"]), 1)
コード例 #8
0
ファイル: test_api_dataset.py プロジェクト: mivanov/panda
    def test_search_data(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        # Refetch dataset so that attributes will be updated
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        # Import second dataset so we can make sure only one is matched
        second_dataset = Dataset.objects.create(
            name='Second dataset',
            creator=self.dataset.creator)

        # Bending the rules again...
        second_dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.get('/api/1.0/dataset/%s/data/?q=Christopher' % self.dataset.slug, **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)
        
        # Verify that correct attributes of the dataset are attached
        self.assertEqual(int(body['id']), self.dataset.id)
        self.assertEqual(body['name'], self.dataset.name)
        self.assertEqual(body['row_count'], self.dataset.row_count)
        self.assertEqual(body['column_schema'], self.dataset.column_schema)

        # Test that only one dataset was matched
        self.assertEqual(body['meta']['total_count'], 1)
        self.assertEqual(len(body['objects']), 1)
        self.assertEqual(body['objects'][0]['data'][1], 'Christopher')
コード例 #9
0
ファイル: test_api_data.py プロジェクト: eob/panda
    def test_update(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        update_data = {
            'dataset': '/api/1.0/dataset/%s/' % self.dataset.slug,
            'data': ['5', 'A', 'B', 'C']
        }

        response = self.client.get('/api/1.0/dataset/%s/data/' % self.dataset.slug, **self.auth_headers)

        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)

        data = body['objects'][0]

        response = self.client.put('/api/1.0/dataset/%s/data/%s/' % (self.dataset.slug, data['external_id']), content_type='application/json', data=json.dumps(update_data), **self.auth_headers)

        self.assertEqual(response.status_code, 202)
        body = json.loads(response.content)
        self.assertEqual(body['data'], update_data['data'])
        self.assertEqual(body['dataset'], data['dataset'])
        self.assertEqual(body['resource_uri'], data['resource_uri'])
        self.assertEqual(body['external_id'], data['external_id'])
コード例 #10
0
ファイル: test_api_dataset.py プロジェクト: mivanov/panda
    def test_import_data(self):
        response = self.client.get('/api/1.0/dataset/%s/import/%i/' % (self.dataset.slug, self.upload.id), **self.auth_headers)

        utils.wait() 

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        self.assertNotEqual(body['current_task'], None)
        self.assertEqual(body['current_task']['task_name'], 'panda.tasks.import.csv')
        
        # Refetch dataset so that attributes will be updated
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertEqual(self.dataset.row_count, 4)
        self.assertEqual([c['name'] for c in self.dataset.column_schema], self.upload.columns)
        self.assertEqual(self.dataset.initial_upload, self.upload)
        self.assertEqual(self.dataset.sample_data, self.upload.sample_data)

        task = self.dataset.current_task

        self.assertNotEqual(task, None)
        self.assertEqual(task.status, 'SUCCESS')
        self.assertEqual(task.task_name, 'panda.tasks.import.csv')
        self.assertNotEqual(task.start, None)
        self.assertNotEqual(task.end, None)
        self.assertEqual(task.traceback, None)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'Christopher')['response']['numFound'], 1)
コード例 #11
0
ファイル: test_dataset.py プロジェクト: eob/panda
    def test_export_csv(self):
        self.dataset.import_data(self.user, self.upload)

        utils.wait()

        self.dataset.export_data(self.user, 'test_export.csv')

        task = self.dataset.current_task

        self.assertNotEqual(task, None)
        self.assertNotEqual(task.id, None)
        self.assertEqual(task.task_name, 'panda.tasks.export.csv')

        utils.wait()

        # Refresh from database
        task = TaskStatus.objects.get(id=task.id)

        self.assertEqual(task.status, 'SUCCESS')
        self.assertNotEqual(task.start, None)
        self.assertNotEqual(task.end, None)
        self.assertEqual(task.traceback, None)

        with open(os.path.join(utils.TEST_DATA_PATH, utils.TEST_DATA_FILENAME), 'r') as f:
            imported_csv = f.read()

        with open(os.path.join(settings.EXPORT_ROOT, 'test_export.csv')) as f:
            exported_csv = f.read()

        self.assertEqual(imported_csv, exported_csv)
コード例 #12
0
ファイル: test_dataset.py プロジェクト: eob/panda
    def test_import_excel_xlsx(self):
        xlsx_upload = utils.get_test_data_upload(self.user, self.dataset, utils.TEST_EXCEL_XLSX_FILENAME)

        self.dataset.import_data(self.user, xlsx_upload)

        task = self.dataset.current_task

        self.assertNotEqual(task, None)
        self.assertNotEqual(task.id, None)
        self.assertEqual(task.task_name, 'panda.tasks.import.xlsx')

        utils.wait()

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)
        xlsx_upload = DataUpload.objects.get(id=xlsx_upload.id)
        task = TaskStatus.objects.get(id=task.id)

        self.assertEqual(dataset.columns, ['id', 'first_name', 'last_name', 'employer'])
        self.assertEqual(dataset.row_count, 4)
        self.assertEqual(xlsx_upload.imported, True)
        self.assertEqual(task.status, 'SUCCESS')
        self.assertNotEqual(task.start, None)
        self.assertNotEqual(task.end, None)
        self.assertEqual(task.traceback, None)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'Christopher')['response']['numFound'], 1)
コード例 #13
0
ファイル: test_dataset.py プロジェクト: eob/panda
    def test_import_additional_data_different_columns(self):
        self.dataset.import_data(self.user, self.upload)

        utils.wait()

        xls_upload = utils.get_test_data_upload(self.user, self.dataset, utils.TEST_XLS_FILENAME)
        xls_upload.columns = ['id', 'first_name', 'last_name', 'employer', 'MORE COLUMNS!']
        xls_upload.save()
        
        # Refresh from database
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertRaises(DataImportError, self.dataset.import_data, self.user, xls_upload)

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)
        upload = DataUpload.objects.get(id=self.upload.id)
        xls_upload = DataUpload.objects.get(id=xls_upload.id)
        
        self.assertEqual(dataset.columns, ['id', 'first_name', 'last_name', 'employer'])
        self.assertEqual(dataset.row_count, 4)
        self.assertEqual(upload.imported, True)
        self.assertEqual(xls_upload.imported, False)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'Christopher')['response']['numFound'], 1)
コード例 #14
0
ファイル: test_api_dataset.py プロジェクト: mivanov/panda
    def test_export_data(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.get('/api/1.0/dataset/%s/export/' % self.dataset.slug, **self.auth_headers)

        utils.wait() 

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        self.assertNotEqual(body['current_task'], None)
        self.assertEqual(body['current_task']['task_name'], 'panda.tasks.export.csv')
        
        # Refetch dataset so that attributes will be updated
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        task = self.dataset.current_task

        self.assertNotEqual(task, None)
        self.assertEqual(task.status, 'SUCCESS')
        self.assertEqual(task.task_name, 'panda.tasks.export.csv')
        self.assertNotEqual(task.start, None)
        self.assertNotEqual(task.end, None)
        self.assertEqual(task.traceback, None)
コード例 #15
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_deleted_search(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.get("/api/1.0/dataset/%s/data/" % self.dataset.slug, **self.auth_headers)

        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)

        # Dataset objects were returned
        data = body["objects"][0]

        response = self.client.delete(
            "/api/1.0/dataset/%s/data/%s/" % (self.dataset.slug, data["external_id"]),
            content_type="application/json",
            **self.auth_headers
        )

        self.assertEqual(response.status_code, 204)

        response = self.client.get("/api/1.0/data/?q=%s" % data["data"][0], **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        # Verify that the group count is correct
        self.assertEqual(body["meta"]["total_count"], 0)
        self.assertEqual(len(body["objects"]), 0)
コード例 #16
0
ファイル: test_dataset.py プロジェクト: mivanov/panda
    def test_import_additional_data_same_columns(self):
        self.dataset.import_data(self.user, self.upload)

        utils.wait()

        xls_upload = utils.get_test_data_upload(self.user, self.dataset, utils.TEST_XLS_FILENAME)
        
        # Refresh from database
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.dataset.import_data(self.user, xls_upload)

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)
        upload = DataUpload.objects.get(id=self.upload.id)
        xls_upload = DataUpload.objects.get(id=xls_upload.id)
        
        self.assertEqual([c['name'] for c in dataset.column_schema], ['id', 'first_name', 'last_name', 'employer'])
        self.assertEqual([c['type'] for c in dataset.column_schema], ['int', 'unicode', 'unicode', 'unicode'])
        self.assertEqual([c['indexed_name'] for c in dataset.column_schema], [None, None, None, None])
        self.assertEqual(dataset.row_count, 8)
        self.assertEqual(upload.imported, True)
        self.assertEqual(xls_upload.imported, True)
        self.assertEqual(dataset.locked, False)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'Christopher')['response']['numFound'], 2)
コード例 #17
0
ファイル: test_dataset.py プロジェクト: mivanov/panda
    def test_reindex(self):
        self.dataset.import_data(self.user, self.upload)

        utils.wait()

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)

        dataset.reindex_data(self.user, typed_columns=[True, False, True, True])

        utils.wait()

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)
        task = dataset.current_task

        self.assertEqual(task.status, 'SUCCESS')
        self.assertNotEqual(task.start, None)
        self.assertNotEqual(task.end, None)
        self.assertEqual(task.traceback, None)

        self.assertEqual([c['name'] for c in dataset.column_schema], ['id', 'first_name', 'last_name', 'employer'])
        self.assertEqual([c['type'] for c in dataset.column_schema], ['int', 'unicode', 'unicode', 'unicode'])
        self.assertEqual([c['indexed'] for c in dataset.column_schema], [True, False, True, True])
        self.assertEqual([c['indexed_name'] for c in dataset.column_schema], ['column_int_id', None, 'column_unicode_last_name', 'column_unicode_employer'])
        self.assertEqual([c['min'] for c in dataset.column_schema], [1, None, None, None])
        self.assertEqual([c['max'] for c in dataset.column_schema], [4, None, None, None])
        self.assertEqual(dataset.row_count, 4)
        self.assertEqual(dataset.locked, False)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_int_id:2')['response']['numFound'], 1)
        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_unicode_last_name:Germuska')['response']['numFound'], 1)
        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_unicode_first_name:Joseph')['response']['numFound'], 0)
コード例 #18
0
ファイル: test_dataset.py プロジェクト: igrowbeards/panda
    def test_reindex_complex(self):
        upload = utils.get_test_data_upload(self.user, self.dataset, filename=utils.TEST_CSV_TYPES_FILENAME)
        self.dataset.import_data(self.user, upload)

        utils.wait()

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)

        dataset.reindex_data(self.user, typed_columns=[True for c in upload.columns])

        utils.wait()

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertEqual([c['name'] for c in dataset.column_schema], ['text', 'date', 'integer', 'boolean', 'float', 'time', 'datetime', 'empty_column', ''])
        self.assertEqual([c['type'] for c in dataset.column_schema], ['unicode', 'datetime', 'int', 'bool', 'float', 'datetime', 'datetime', 'NoneType', 'unicode'])
        self.assertEqual([c['indexed'] for c in dataset.column_schema], [True for c in upload.columns])
        self.assertEqual([c['indexed_name'] for c in dataset.column_schema], ['column_unicode_text', 'column_datetime_date', 'column_int_integer', 'column_bool_boolean', 'column_float_float', 'column_datetime_time', 'column_datetime_datetime', 'column_NoneType_empty_column', 'column_unicode_'])
        self.assertEqual(dataset.row_count, 5)
        self.assertEqual(dataset.locked, False)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_bool_boolean:true')['response']['numFound'], 2)
        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_unicode_text:"Chicago Tribune"')['response']['numFound'], 1)
        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_datetime_datetime:[1971-01-01T01:01:01Z TO NOW]')['response']['numFound'], 1)
        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_datetime_time:[9999-12-31T04:13:01Z TO *]')['response']['numFound'], 2)
        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_datetime_date:[1971-01-01T00:00:00Z TO NOW]')['response']['numFound'], 1)
コード例 #19
0
ファイル: test_api_data.py プロジェクト: eob/panda
    def test_update_bulk(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        new_data = { 'objects': [
            {
                'data': ['1', 'A', 'B', 'C'],
                'external_id': '1'
            },
            {
                'data': ['2', 'D', 'E', 'F'],
                'external_id': '2'
            }
        ]}

        response = self.client.put('/api/1.0/dataset/%s/data/' % self.dataset.slug, content_type='application/json', data=json.dumps(new_data), **self.auth_headers)

        self.assertEqual(response.status_code, 202)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        # Refresh
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertEqual(self.dataset.row_count, 4)
コード例 #20
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_get_404(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.get("/api/1.0/dataset/%s/data/not-a-valid-id/" % self.dataset.id, **self.auth_headers)
        self.assertEqual(response.status_code, 404)
コード例 #21
0
ファイル: test_api_dataset.py プロジェクト: mivanov/panda
    def test_get(self):
        # Import so that there will be a task object
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        # Refetch dataset so that attributes will be updated
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        response = self.client.get('/api/1.0/dataset/%s/' % self.dataset.slug, **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        self.assertEqual(body['name'], self.dataset.name)
        self.assertEqual(body['description'], self.dataset.description)
        self.assertEqual(body['row_count'], self.dataset.row_count)
        self.assertEqual(body['sample_data'], self.dataset.sample_data)
        self.assertEqual(body['column_schema'], self.dataset.column_schema)
        self.assertEqual(body['creator']['email'], self.dataset.creator.email)

        task_response = self.client.get('/api/1.0/task/%i/' % self.dataset.current_task.id, **self.auth_headers)

        self.assertEqual(task_response.status_code, 200)

        self.assertEqual(body['current_task'], json.loads(task_response.content))

        self.assertEqual(len(body['related_uploads']), 0)
        self.assertEqual(len(body['data_uploads']), 1)
        self.assertEqual(body['initial_upload'], '/api/1.0/data_upload/%i/' % self.dataset.initial_upload.id)
コード例 #22
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_search_meta(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        # Import second dataset so we can make sure both match
        second_dataset = Dataset.objects.create(name="Second dataset", creator=self.dataset.creator)

        second_dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.get("/api/1.0/data/?q=Ryan&limit=1", **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        # Verify that the group count is correct
        self.assertEqual(body["meta"]["limit"], 1)
        self.assertEqual(body["meta"]["offset"], 0)
        self.assertEqual(body["meta"]["total_count"], 2)
        self.assertIs(body["meta"]["previous"], None)
        self.assertIsNot(body["meta"]["next"], None)
        self.assertEqual(len(body["objects"]), 1)
コード例 #23
0
ファイル: test_api_dataset.py プロジェクト: mivanov/panda
    def test_reindex_data(self):
        response = self.client.get('/api/1.0/dataset/%s/import/%i/' % (self.dataset.slug, self.upload.id), **self.auth_headers)

        utils.wait() 

        response = self.client.get('/api/1.0/dataset/%s/reindex/?typed_columns=True,False,False,False' % (self.dataset.slug), **self.auth_headers)

        utils.wait() 

        self.assertEqual(response.status_code, 200)
        
        # Refetch dataset so that attributes will be updated
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertEqual(self.dataset.row_count, 4)
        self.assertEqual([c['name'] for c in self.dataset.column_schema], self.upload.columns)
        self.assertEqual(self.dataset.initial_upload, self.upload)
        self.assertEqual(self.dataset.sample_data, self.upload.sample_data)

        task = self.dataset.current_task

        self.assertNotEqual(task, None)
        self.assertEqual(task.status, 'SUCCESS')
        self.assertEqual(task.task_name, 'panda.tasks.reindex')
        self.assertNotEqual(task.start, None)
        self.assertNotEqual(task.end, None)
        self.assertEqual(task.traceback, None)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'column_int_id:3')['response']['numFound'], 1)
コード例 #24
0
ファイル: test_dataset.py プロジェクト: mivanov/panda
    def test_import_xls(self):
        xls_upload = utils.get_test_data_upload(self.user, self.dataset, utils.TEST_XLS_FILENAME)

        self.dataset.import_data(self.user, xls_upload)

        task = self.dataset.current_task

        self.assertNotEqual(task, None)
        self.assertNotEqual(task.id, None)
        self.assertEqual(task.task_name, 'panda.tasks.import.xls')

        utils.wait()

        # Refresh from database
        dataset = Dataset.objects.get(id=self.dataset.id)
        xls_upload = DataUpload.objects.get(id=xls_upload.id)
        task = TaskStatus.objects.get(id=task.id)

        self.assertEqual([c['name'] for c in dataset.column_schema], ['id', 'first_name', 'last_name', 'employer'])
        self.assertEqual([c['type'] for c in dataset.column_schema], ['int', 'unicode', 'unicode', 'unicode'])
        self.assertEqual([c['indexed_name'] for c in dataset.column_schema], [None, None, None, None])
        self.assertEqual(dataset.row_count, 4)
        self.assertEqual(xls_upload.imported, True)
        self.assertEqual(task.status, 'SUCCESS')
        self.assertNotEqual(task.start, None)
        self.assertNotEqual(task.end, None)
        self.assertEqual(task.traceback, None)
        self.assertEqual(dataset.locked, False)

        self.assertEqual(solr.query(settings.SOLR_DATA_CORE, 'Christopher')['response']['numFound'], 1)
コード例 #25
0
ファイル: test_dataset.py プロジェクト: eob/panda
    def test_get_row(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        row = self.dataset.get_row('1')

        self.assertEqual(row['external_id'], '1')
        self.assertEqual(json.loads(row['data']), ['1', 'Brian', 'Boyer', 'Chicago Tribune'])
コード例 #26
0
ファイル: test_api_dataset.py プロジェクト: mivanov/panda
    def test_reindex_data_invalid_columns(self):
        response = self.client.get('/api/1.0/dataset/%s/import/%i/' % (self.dataset.slug, self.upload.id), **self.auth_headers)

        utils.wait() 

        response = self.client.get('/api/1.0/dataset/%s/reindex/?typed_columns=True,False,False' % (self.dataset.slug), **self.auth_headers)

        utils.wait() 

        self.assertEqual(response.status_code, 400)
コード例 #27
0
ファイル: test_dataset.py プロジェクト: mivanov/panda
    def test_generate_typed_column_names_some(self):
        self.dataset.import_data(self.user, self.upload)

        utils.wait()

        typed_columns = [True, False, True, True]

        for i, c in enumerate(self.dataset.column_schema):
            self.dataset.column_schema[i]['indexed'] = typed_columns.pop(0)

        self.dataset.column_schema = update_indexed_names(self.dataset.column_schema)

        self.assertEqual([c['indexed_name'] for c in self.dataset.column_schema], ['column_int_id', None, 'column_unicode_last_name', 'column_unicode_employer'])
コード例 #28
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_search_boolean_query(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.get("/api/1.0/data/?q=Brian+and+Tribune", **self.auth_headers)

        self.assertEqual(response.status_code, 200)

        body = json.loads(response.content)

        self.assertEqual(body["meta"]["total_count"], 1)
        self.assertEqual(len(body["objects"]), 1)
コード例 #29
0
ファイル: test_api_data.py プロジェクト: eads/panda
    def test_delete_list(self):
        self.dataset.import_data(self.user, self.upload, 0)

        utils.wait()

        response = self.client.delete("/api/1.0/dataset/%s/data/" % self.dataset.slug, **self.auth_headers)

        self.assertEqual(response.status_code, 204)

        # Refresh
        self.dataset = Dataset.objects.get(id=self.dataset.id)

        self.assertEqual(self.dataset.row_count, 0)
コード例 #30
0
ファイル: test_dataset.py プロジェクト: igrowbeards/panda
    def test_generate_typed_column_names_conflict(self):
        self.dataset.import_data(self.user, self.upload)

        utils.wait()

        typed_columns = [True, False, True, True]

        for i, c in enumerate(self.dataset.column_schema):
            self.dataset.column_schema[i]['name'] = 'test'
            self.dataset.column_schema[i]['indexed'] = typed_columns.pop(0)

        self.dataset._generate_typed_column_names()

        self.assertEqual([c['indexed_name'] for c in self.dataset.column_schema], ['column_int_test', None, 'column_unicode_test', 'column_unicode_test2'])