def test_export_to_json_filename_save_data_in_correct_format(self): temp = tempfile.NamedTemporaryFile(delete=False, mode="wb") self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.name) with open(temp.name) as fobj: imported_json = json.load(fobj) COLUMN_TYPE = { "float_column": float, "decimal_column": float, "bool_column": bool, "integer_column": int, "date_column": six.text_type, "datetime_column": six.text_type, "percent_column": six.text_type, "unicode_column": six.text_type, } field_types = defaultdict(list) for row in imported_json: for field_name, value in row.items(): field_types[field_name].append(type(value)) # We test if the JSON was created serializing all the fields correctly # (some as native JSON values, like int and float) and others needed to # be serialized, like date, datetime etc. for field_name, value_types in field_types.items(): if field_name != "unicode_column": self.assertEqual(Counter(value_types), Counter({type(None): 1, COLUMN_TYPE[field_name]: 6})) else: self.assertEqual(Counter(value_types), Counter({COLUMN_TYPE[field_name]: 7}))
def test_export_to_json_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.name) table = rows.import_from_json(temp.name) self.assert_table_equal(table, utils.table)
def test_export_to_json_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode="wb") self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.name) table = rows.import_from_json(temp.name) self.assert_table_equal(table, utils.table)
def test_export_to_json_fobj(self): # TODO: may test with codecs.open passing an encoding # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.file) table = rows.import_from_json(temp.name) self.assert_table_equal(table, utils.table)
def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False, mode="wb") filename = "{}.{}".format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields=OrderedDict([("jsoncolumn", rows.fields.JSONField)])) table.append({"jsoncolumn": '{"python": 42}'}) rows.export_to_json(table, filename) table2 = rows.import_from_json(filename) self.assert_table_equal(table, table2)
def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields=OrderedDict([('jsoncolumn', rows.fields.JSONField)])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_json(table, filename) table2 = rows.import_from_json(filename) self.assert_table_equal(table, table2)
def test_export_to_json_uses_prepare_to_export(self, mocked_prepare_to_export): temp = tempfile.NamedTemporaryFile(delete=False, mode="wb") self.files_to_delete.append(temp.name) kwargs = {"test": 123, "parameter": 3.14} mocked_prepare_to_export.return_value = iter([utils.table.fields.keys()]) rows.export_to_json(utils.table, temp.name, **kwargs) self.assertTrue(mocked_prepare_to_export.called) self.assertEqual(mocked_prepare_to_export.call_count, 1) call = mocked_prepare_to_export.call_args self.assertEqual(call[0], (utils.table,)) self.assertEqual(call[1], kwargs)
def test_export_to_json_uses_prepare_to_export(self, mocked_prepare_to_export): temp = tempfile.NamedTemporaryFile(delete=False, mode="wb") self.files_to_delete.append(temp.name) kwargs = {"test": 123, "parameter": 3.14} mocked_prepare_to_export.return_value = iter( [utils.table.fields.keys()]) rows.export_to_json(utils.table, temp.name, **kwargs) self.assertTrue(mocked_prepare_to_export.called) self.assertEqual(mocked_prepare_to_export.call_count, 1) call = mocked_prepare_to_export.call_args self.assertEqual(call[0], (utils.table, )) self.assertEqual(call[1], kwargs)
def test_export_to_json_indent(self): temp = tempfile.NamedTemporaryFile(delete=False, mode='rb+') self.files_to_delete.append(temp.name) table = rows.Table(fields=utils.table.fields) table.append(utils.table[0]._asdict()) rows.export_to_json(table, temp.name, indent=2) temp.file.seek(0) result = temp.file.read().strip().replace(b'\r\n', b'\n').splitlines() self.assertEqual(result[0], b'[') self.assertEqual(result[1], b' {') for line in result[2:-2]: self.assertTrue(line.startswith(b' ')) self.assertEqual(result[-2], b' }') self.assertEqual(result[-1], b']')
def test_export_to_json_indent(self): temp = tempfile.NamedTemporaryFile(delete=False, mode="rb+") self.files_to_delete.append(temp.name) table = rows.Table(fields=utils.table.fields) table.append(utils.table[0]._asdict()) rows.export_to_json(table, temp.name, indent=2) temp.file.seek(0) result = temp.file.read().strip().replace(b"\r\n", b"\n").splitlines() self.assertEqual(result[0], b"[") self.assertEqual(result[1], b" {") for line in result[2:-2]: self.assertTrue(line.startswith(b" ")) self.assertEqual(result[-2], b" }") self.assertEqual(result[-1], b"]")
def test_export_to_json_indent(self): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) table = rows.Table(fields=utils.table.fields) table.append(utils.table[0]._asdict()) rows.export_to_json(table, temp.name, indent=2) temp.file.seek(0) result = temp.file.read().strip().replace('\r\n', '\n').splitlines() self.assertEqual(result[0], '[') self.assertEqual(result[1], ' {') for line in result[2:-2]: self.assertTrue(line.startswith(' ')) self.assertEqual(result[-2], ' }') self.assertEqual(result[-1], ']')
def __call__(self): view = getMultiAdapter((self.context, self.request), name='view') table = view.table() filename = "%s.json" % view.filename_prefix() data = rows.export_to_json(table) self.request.response.setHeader('Content-Type', '"%s"' % EXTENSIONS_TYPES.get('json')) self.request.response.setHeader('Content-Disposition', 'attachment; filename="%s"' % filename) return data
def test_export_to_json_filename_save_data_in_correct_format(self): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.name) with open(temp.name, 'rb') as fobj: imported_json = json.load(fobj) for row in imported_json: self.assertEqual(type(row['float_column']), float) self.assertEqual(type(row['decimal_column']), float) self.assertEqual(type(row['bool_column']), bool) self.assertEqual(type(row['integer_column']), int) self.assertEqual(type(row['date_column']), unicode) self.assertEqual(type(row['datetime_column']), unicode) self.assertEqual(type(row['percent_column']), unicode) self.assertEqual(type(row['unicode_column']), unicode) self.assertEqual(type(row['null_column']), unicode)
def test_export_to_json_filename_save_data_in_correct_format(self): temp = tempfile.NamedTemporaryFile(delete=False, mode="wb") self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.name) with open(temp.name) as fobj: imported_json = json.load(fobj) COLUMN_TYPE = { "float_column": float, "decimal_column": float, "bool_column": bool, "integer_column": int, "date_column": six.text_type, "datetime_column": six.text_type, "percent_column": six.text_type, "unicode_column": six.text_type, } field_types = defaultdict(list) for row in imported_json: for field_name, value in row.items(): field_types[field_name].append(type(value)) # We test if the JSON was created serializing all the fields correctly # (some as native JSON values, like int and float) and others needed to # be serialized, like date, datetime etc. for field_name, value_types in field_types.items(): if field_name != "unicode_column": self.assertEqual( Counter(value_types), Counter({ type(None): 1, COLUMN_TYPE[field_name]: 6 }), ) else: self.assertEqual(Counter(value_types), Counter({COLUMN_TYPE[field_name]: 7}))
def test_export_to_json_uses_export_data(self, mocked_export_data): temp = tempfile.NamedTemporaryFile(delete=False, mode="wb") self.files_to_delete.append(temp.name) kwargs = {"test": 123, "parameter": 3.14} mocked_export_data.return_value = 42 result = rows.export_to_json(utils.table, temp.name, **kwargs) self.assertTrue(mocked_export_data.called) self.assertEqual(mocked_export_data.call_count, 1) self.assertEqual(result, 42) call = mocked_export_data.call_args self.assertEqual(call[0][0], temp.name) self.assertEqual(call[1], {"mode": "wb"})
def parse(self, stream, media_type=None, parser_context=None) -> List[dict]: """ Return a list of lists representing the rows of a CSV file. """ media_type_params = dict([param.strip().split('=') for param in media_type.split(';')[1:]]) charset = media_type_params.get('charset', 'utf-8') dialect = media_type_params.get('dialect', 'excel') txt = stream.read() try: table = rows.import_from_csv(BytesIO(txt), encoding=charset, dialect=dialect, skip_header=False) except Exception as ex: raise ex table = json.loads(rows.export_to_json(table)) return table
def json_claims_by_kind(): counter = count_claims_by_kind() result = import_from_dicts(counter) result.order_by('label') return rows.export_to_json(result)
def json_claims_by_state(): counter = count_claims_by_state() result = import_from_dicts(counter) result.order_by('label') return rows.export_to_json(result)
def json_claims_by_tag(): counter = count_claims_by_tag() result = import_from_dicts(counter) result.order_by('count') return rows.export_to_json(result)