def convert(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, source, destination): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING if input_locale is not None: with rows.locale_context(input_locale): table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) else: table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) if order_by is not None: order_by = _get_field_names(order_by, table.field_names, permit_not=True) # TODO: use complete list of `order_by` fields table.order_by(order_by[0].replace('^', '-')) if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(table, destination, encoding=output_encoding) else: export_to_uri(table, destination, encoding=output_encoding)
def join(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, keys, sources, destination): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING keys = [key.strip() for key in keys.split(',')] if input_locale is not None: with rows.locale_context(input_locale): tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] else: tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] result = rows.join(keys, tables) if order_by is not None: order_by = _get_field_names(order_by, result.field_names, permit_not=True) # TODO: use complete list of `order_by` fields result.order_by(order_by[0].replace('^', '-')) if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, destination, encoding=output_encoding) else: export_to_uri(result, destination, encoding=output_encoding)
def join(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, keys, sources, destination): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING keys = [key.strip() for key in keys.split(',')] if input_locale is not None: with rows.locale_context(input_locale): tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] else: tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] result = rows.join(keys, tables) if order_by is not None: order_by = _get_field_names(order_by, result.fields.keys(), permit_not=True) # TODO: use complete list of `order_by` fields result.order_by(order_by[0].replace('^', '-')) if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(destination, result, encoding=output_encoding) else: export_to_uri(destination, result, encoding=output_encoding)
def convert(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, source, destination): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING if input_locale is not None: with rows.locale_context(input_locale): table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) else: table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) if order_by is not None: order_by = _get_field_names(order_by, table.fields.keys(), permit_not=True) # TODO: use complete list of `order_by` fields table.order_by(order_by[0].replace('^', '-')) if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(destination, table, encoding=output_encoding) else: export_to_uri(destination, table, encoding=output_encoding)
def convert(input_encoding, output_encoding, input_locale, output_locale, source, destination): with rows.locale_context(input_locale): table = import_from_uri(source) with rows.locale_context(output_locale): export_to_uri(destination, table)
def sum_( input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, fields, fields_exclude, sources, destination, ): import_fields = _get_import_fields(fields, fields_exclude) if input_locale is not None: with rows.locale_context(input_locale): tables = [ _import_table( source, encoding=input_encoding, verify_ssl=verify_ssl, import_fields=import_fields, ) for source in sources ] else: tables = [ _import_table( source, encoding=input_encoding, verify_ssl=verify_ssl, import_fields=import_fields, ) for source in sources ] result = sum(tables) if order_by is not None: order_by = _get_field_names(order_by, result.field_names, permit_not=True) # TODO: use complete list of `order_by` fields result.order_by(order_by[0].replace("^", "-")) export_fields = _get_export_fields(result.field_names, fields_exclude) # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING if output_locale is not None: with rows.locale_context(output_locale): export_to_uri( result, destination, encoding=output_encoding, export_fields=export_fields, ) else: export_to_uri( result, destination, encoding=output_encoding, export_fields=export_fields )
def sort(input_encoding, output_encoding, input_locale, output_locale, key, source, destination): key = key.replace('^', '-') with rows.locale_context(input_locale): table = import_from_uri(source) table.order_by(key) with rows.locale_context(output_locale): export_to_uri(destination, table)
def join(input_encoding, output_encoding, input_locale, output_locale, keys, sources, destination): keys = [key.strip() for key in keys.split(',')] with rows.locale_context(input_locale): tables = [import_from_uri(source) for source in sources] result = rows.join(keys, tables) with rows.locale_context(output_locale): export_to_uri(destination, result)
def query(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, fields, output, query, sources): # TODO: may move all 'destination' to '--output' # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING if not query.lower().startswith('select'): field_names = '*' if fields is None else fields table_names = ', '.join( ['table{}'.format(index) for index in range(1, len(sources) + 1)]) query = 'SELECT {} FROM {} WHERE {}'.format(field_names, table_names, query) if input_locale is not None: with rows.locale_context(input_locale): tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] else: tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] sqlite_connection = rows.export_to_sqlite(tables[0], ':memory:', table_name='table1') for index, table in enumerate(tables[1:], start=2): rows.export_to_sqlite(table, sqlite_connection, table_name='table{}'.format(index)) result = rows.import_from_sqlite(sqlite_connection, query=query) if output is None: fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(result, fobj, encoding=output_encoding) else: rows.export_to_txt(result, fobj, encoding=output_encoding) fobj.seek(0) click.echo(fobj.read()) else: if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(output, result, encoding=output_encoding) else: export_to_uri(output, result, encoding=output_encoding)
def sum(input_encoding, output_encoding, input_locale, output_locale, sources, destination): with rows.locale_context(input_locale): tables = [import_from_uri(source) for source in sources] result = tables[0] for table in tables[1:]: result = result + table with rows.locale_context(output_locale): export_to_uri(destination, result)
def test_IntegerField(self): self.assertEqual(fields.IntegerField.TYPE, (int, )) self.assertEqual(fields.IntegerField.serialize(None), '') self.assertIs(type(fields.IntegerField.serialize(None)), six.text_type) self.assertIn(type(fields.IntegerField.deserialize('42')), fields.IntegerField.TYPE) self.assertEqual(fields.IntegerField.deserialize('42'), 42) self.assertEqual(fields.IntegerField.deserialize(42), 42) self.assertEqual(fields.IntegerField.serialize(42), '42') self.assertIs(type(fields.IntegerField.serialize(42)), six.text_type) self.assertEqual(fields.IntegerField.deserialize(None), None) self.assertEqual(fields.IntegerField.deserialize('10152709355006317'), 10152709355006317) with rows.locale_context(locale_name): self.assertEqual(fields.IntegerField.serialize(42000), '42000') self.assertIs(type(fields.IntegerField.serialize(42000)), six.text_type) self.assertEqual(fields.IntegerField.serialize(42000, grouping=True), '42.000') self.assertEqual(fields.IntegerField.deserialize('42.000'), 42000) self.assertEqual(fields.IntegerField.deserialize(42), 42) self.assertEqual(fields.IntegerField.deserialize(42.0), 42) with self.assertRaises(ValueError): fields.IntegerField.deserialize(1.23)
def test_FloatField(self): self.assertIs(fields.FloatField.TYPE, float) self.assertEqual(fields.FloatField.serialize(None), '') self.assertIs(type(fields.FloatField.serialize(None)), types.UnicodeType) self.assertIs(type(fields.FloatField.deserialize('42.0')), fields.FloatField.TYPE) self.assertEqual(fields.FloatField.deserialize('42.0'), 42.0) self.assertEqual(fields.FloatField.deserialize(42.0), 42.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0) self.assertEqual(fields.FloatField.deserialize(None), None) self.assertEqual(fields.FloatField.serialize(42.0), '42.0') self.assertIs(type(fields.FloatField.serialize(42.0)), types.UnicodeType) with rows.locale_context('pt_BR.UTF-8'): self.assertEqual(fields.FloatField.serialize(42000.0), '42000,000000') self.assertIs(type(fields.FloatField.serialize(42000.0)), types.UnicodeType) self.assertEqual(fields.FloatField.serialize(42000, grouping=True), '42.000,000000') self.assertEqual(fields.FloatField.deserialize('42.000,00'), 42000.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0)
def test_IntegerField(self): self.assertIs(fields.IntegerField.TYPE, int) self.assertEqual(fields.IntegerField.serialize(None), '') self.assertIs(type(fields.IntegerField.serialize(None)), types.UnicodeType) self.assertIs(type(fields.IntegerField.deserialize('42')), fields.IntegerField.TYPE) self.assertEqual(fields.IntegerField.deserialize('42'), 42) self.assertEqual(fields.IntegerField.deserialize(42), 42) self.assertEqual(fields.IntegerField.serialize(42), '42') self.assertIs(type(fields.IntegerField.serialize(42)), types.UnicodeType) self.assertEqual(fields.IntegerField.deserialize(None), None) with rows.locale_context(locale_name): self.assertEqual(fields.IntegerField.serialize(42000), '42000') self.assertIs(type(fields.IntegerField.serialize(42000)), types.UnicodeType) self.assertEqual(fields.IntegerField.serialize(42000, grouping=True), '42.000') self.assertEqual(fields.IntegerField.deserialize('42.000'), 42000) with self.assertRaises(ValueError): fields.IntegerField.deserialize(1.23)
def test_PercentField(self): deserialized = Decimal('0.42010') self.assertIs(fields.PercentField.TYPE, Decimal) self.assertIs(type(fields.PercentField.deserialize('42.0%')), fields.PercentField.TYPE) self.assertEqual(fields.PercentField.deserialize('42.0%'), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(Decimal('0.420')), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(deserialized), deserialized) self.assertEqual(fields.PercentField.deserialize(None), None) self.assertEqual(fields.PercentField.serialize(deserialized), '42.010%') self.assertEqual(type(fields.PercentField.serialize(deserialized)), types.UnicodeType) self.assertEqual(fields.PercentField.serialize(Decimal('42.010')), '4201.0%') self.assertEqual(fields.PercentField.serialize(Decimal('0.01')), '1%') with rows.locale_context(locale_name): self.assertEqual( type(fields.PercentField.serialize(deserialized)), types.UnicodeType ) self.assertEqual(fields.PercentField.serialize(Decimal('42.0')), '4200%') self.assertEqual(fields.PercentField.serialize(Decimal('42000.0')), '4200000%') self.assertEqual(fields.PercentField.deserialize('42.000,00%'), Decimal('420.0000')) self.assertEqual(fields.PercentField.serialize(Decimal('42000.00'), grouping=True), '4.200.000%') with self.assertRaises(ValueError): fields.PercentField.deserialize(42)
def test_FloatField(self): self.assertIs(fields.FloatField.TYPE, float) self.assertEqual(fields.FloatField.serialize(None), '') self.assertIs(type(fields.FloatField.serialize(None)), types.UnicodeType) self.assertIs(type(fields.FloatField.deserialize('42.0')), fields.FloatField.TYPE) self.assertEqual(fields.FloatField.deserialize('42.0'), 42.0) self.assertEqual(fields.FloatField.deserialize(42.0), 42.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0) self.assertEqual(fields.FloatField.deserialize(None), None) self.assertEqual(fields.FloatField.serialize(42.0), '42.0') self.assertIs(type(fields.FloatField.serialize(42.0)), types.UnicodeType) with rows.locale_context(locale_name): self.assertEqual(fields.FloatField.serialize(42000.0), '42000,000000') self.assertIs(type(fields.FloatField.serialize(42000.0)), types.UnicodeType) self.assertEqual(fields.FloatField.serialize(42000, grouping=True), '42.000,000000') self.assertEqual(fields.FloatField.deserialize('42.000,00'), 42000.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0)
def test_PercentField(self): deserialized = Decimal('0.42010') self.assertEqual(fields.PercentField.TYPE, (Decimal, )) self.assertIn(type(fields.PercentField.deserialize('42.0%')), fields.PercentField.TYPE) self.assertEqual(fields.PercentField.deserialize('42.0%'), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(Decimal('0.420')), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(deserialized), deserialized) self.assertEqual(fields.PercentField.deserialize(None), None) self.assertEqual(fields.PercentField.serialize(deserialized), '42.010%') self.assertEqual(type(fields.PercentField.serialize(deserialized)), six.text_type) self.assertEqual(fields.PercentField.serialize(Decimal('42.010')), '4201.0%') self.assertEqual(fields.PercentField.serialize(Decimal('0')), '0.00%') self.assertEqual(fields.PercentField.serialize(None), '') self.assertEqual(fields.PercentField.serialize(Decimal('0.01')), '1%') with rows.locale_context(locale_name): self.assertEqual(type(fields.PercentField.serialize(deserialized)), six.text_type) self.assertEqual(fields.PercentField.serialize(Decimal('42.0')), '4200%') self.assertEqual(fields.PercentField.serialize(Decimal('42000.0')), '4200000%') self.assertEqual(fields.PercentField.deserialize('42.000,00%'), Decimal('420.0000')) self.assertEqual( fields.PercentField.serialize(Decimal('42000.00'), grouping=True), '4.200.000%') with self.assertRaises(ValueError): fields.PercentField.deserialize(42)
def schema(input_encoding, input_locale, verify_ssl, output_format, fields, fields_exclude, samples, source, output): samples = samples if samples > 0 else None import_fields = _get_import_fields(fields, fields_exclude) source = detect_source(source, verify_ssl=verify_ssl) # TODO: make it lazy if input_locale is not None: with rows.locale_context(input_locale): table = import_from_source(source, DEFAULT_INPUT_ENCODING, samples=samples, import_fields=import_fields) else: table = import_from_source(source, DEFAULT_INPUT_ENCODING, samples=samples, import_fields=import_fields) export_fields = _get_export_fields(table.field_names, fields_exclude) if export_fields is None: export_fields = table.field_names if output in ('-', None): output = sys.stdout else: output = open(output, mode='w', encoding='utf-8') rows.fields.generate_schema(table, export_fields, output_format, output)
def test_PercentField(self): deserialized = Decimal('0.42010') self.assertIs(fields.PercentField.TYPE, Decimal) self.assertIs(type(fields.PercentField.deserialize('42.0%')), fields.PercentField.TYPE) self.assertEqual(fields.PercentField.deserialize('42.0%'), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(Decimal('0.420')), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(deserialized), deserialized) self.assertEqual(fields.PercentField.deserialize(None), None) self.assertEqual(fields.PercentField.serialize(deserialized), '42.010%') self.assertEqual(type(fields.PercentField.serialize(deserialized)), types.UnicodeType) self.assertEqual(fields.PercentField.serialize(Decimal('42.010')), '4201.0%') self.assertEqual(fields.PercentField.serialize(Decimal('0.01')), '1%') with rows.locale_context('pt_BR.UTF-8'): self.assertEqual(type(fields.PercentField.serialize(deserialized)), types.UnicodeType) self.assertEqual(fields.PercentField.serialize(Decimal('42.0')), '4200%') self.assertEqual(fields.PercentField.serialize(Decimal('42000.0')), '4200000%') self.assertEqual(fields.PercentField.deserialize('42.000,00%'), Decimal('420.0000')) self.assertEqual( fields.PercentField.serialize(Decimal('42000.00'), grouping=True), '4.200.000%') with self.assertRaises(ValueError): fields.PercentField.deserialize(42)
def extract(filename): # TODO: check header position if filename.name.endswith('.xls'): import_function = rows.import_from_xls elif filename.name.endswith('.xlsx'): import_function = rows.import_from_xlsx else: raise ValueError('Cannot parse this spreadsheet') metadata = extract_metadata(filename) result = [] with rows.locale_context('pt_BR.UTF-8'): table = import_function( str(filename), start_row=21, fields=FIELDS, skip_header=False, ) for row in table: row_data = row._asdict() if is_filled(row_data): # Created this way so first columns will be metadata data = metadata.copy() data.update(row_data) for key, value in data.items(): if isinstance(value, Decimal): data[key] = round(value, 2) result.append(data) # TODO: check rows with rendimento_liquido = 0 result.sort( key=lambda row: (row['orgao'], -(row['rendimento_liquido'] or 0))) return result
def test_DecimalField(self): deserialized = Decimal('42.010') self.assertIs(fields.DecimalField.TYPE, Decimal) self.assertEqual(fields.DecimalField.serialize(None), '') self.assertIs(type(fields.DecimalField.serialize(None)), types.UnicodeType) self.assertIs(type(fields.DecimalField.deserialize('42.0')), fields.DecimalField.TYPE) self.assertEqual(fields.DecimalField.deserialize('42.0'), Decimal('42.0')) self.assertEqual(fields.DecimalField.deserialize(deserialized), deserialized) self.assertEqual(fields.DecimalField.serialize(deserialized), '42.010') self.assertEqual(type(fields.DecimalField.serialize(deserialized)), types.UnicodeType) self.assertEqual(fields.DecimalField.deserialize('21.21657469231'), Decimal('21.21657469231')) self.assertEqual(fields.DecimalField.deserialize(None), None) with rows.locale_context('pt_BR.UTF-8'): self.assertEqual(types.UnicodeType, type(fields.DecimalField.serialize(deserialized))) self.assertEqual(fields.DecimalField.serialize(Decimal('4200')), '4200') self.assertEqual(fields.DecimalField.serialize(Decimal('42.0')), '42,0') self.assertEqual(fields.DecimalField.serialize(Decimal('42000.0')), '42000,0') self.assertEqual(fields.DecimalField.deserialize('42.000,00'), Decimal('42000.00')) self.assertEqual(fields.DecimalField.serialize(Decimal('42000.0'), grouping=True), '42.000,0')
def test_DecimalField(self): deserialized = Decimal('42.010') self.assertIs(fields.DecimalField.TYPE, Decimal) self.assertEqual(fields.DecimalField.serialize(None), '') self.assertIs(type(fields.DecimalField.serialize(None)), types.UnicodeType) self.assertIs(type(fields.DecimalField.deserialize('42.0')), fields.DecimalField.TYPE) self.assertEqual(fields.DecimalField.deserialize('42.0'), Decimal('42.0')) self.assertEqual(fields.DecimalField.deserialize(deserialized), deserialized) self.assertEqual(fields.DecimalField.serialize(deserialized), '42.010') self.assertEqual(type(fields.DecimalField.serialize(deserialized)), types.UnicodeType) self.assertEqual(fields.DecimalField.deserialize('21.21657469231'), Decimal('21.21657469231')) self.assertEqual(fields.DecimalField.deserialize(None), None) with rows.locale_context('pt_BR.UTF-8'): self.assertEqual(types.UnicodeType, type(fields.DecimalField.serialize(deserialized))) self.assertEqual(fields.DecimalField.serialize(Decimal('4200')), '4200') self.assertEqual(fields.DecimalField.serialize(Decimal('42.0')), '42,0') self.assertEqual(fields.DecimalField.serialize(Decimal('42000.0')), '42000,0') self.assertEqual(fields.DecimalField.deserialize('42.000,00'), Decimal('42000.00')) self.assertEqual( fields.DecimalField.serialize(Decimal('42000.0'), grouping=True), '42.000,0')
def print_(input_encoding, output_encoding, input_locale, output_locale, table_index, verify_ssl, fields, fields_exclude, order_by, source): import_fields = _get_import_fields(fields, fields_exclude) # TODO: if create_table implements `fields_exclude` this _import_table call # will import only the desired data if input_locale is not None: with rows.locale_context(input_locale): table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index, import_fields=import_fields) else: table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index, import_fields=import_fields) if order_by is not None: order_by = _get_field_names(order_by, table.field_names, permit_not=True) # TODO: use complete list of `order_by` fields table.order_by(order_by[0].replace('^', '-')) export_fields = _get_export_fields(table.field_names, fields_exclude) output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) else: rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) fobj.seek(0) # TODO: may pass unicode to click.echo if output_encoding is not provided click.echo(fobj.read())
def join(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, fields, fields_exclude, keys, sources, destination): export_fields = _get_import_fields(fields, fields_exclude) keys = make_header(keys.split(','), permit_not=False) if input_locale is not None: with rows.locale_context(input_locale): tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] else: tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] result = rows.join(keys, tables) if order_by is not None: order_by = _get_field_names(order_by, result.field_names, permit_not=True) # TODO: use complete list of `order_by` fields result.order_by(order_by[0].replace('^', '-')) if export_fields is None: export_fields = _get_export_fields(result.field_names, fields_exclude) # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, destination, encoding=output_encoding, export_fields=export_fields) else: export_to_uri(result, destination, encoding=output_encoding, export_fields=export_fields)
def test_DecimalField(self): deserialized = Decimal('42.010') self.assertEqual(fields.DecimalField.TYPE, (Decimal, )) self.assertEqual(fields.DecimalField.serialize(None), '') self.assertIs(type(fields.DecimalField.serialize(None)), six.text_type) self.assertEqual(fields.DecimalField.deserialize(''), None) self.assertIn(type(fields.DecimalField.deserialize('42.0')), fields.DecimalField.TYPE) self.assertEqual(fields.DecimalField.deserialize('42.0'), Decimal('42.0')) self.assertEqual(fields.DecimalField.deserialize(deserialized), deserialized) self.assertEqual(fields.DecimalField.serialize(deserialized), '42.010') self.assertEqual(type(fields.DecimalField.serialize(deserialized)), six.text_type) self.assertEqual(fields.DecimalField.deserialize('21.21657469231'), Decimal('21.21657469231')) self.assertEqual(fields.DecimalField.deserialize('-21.34'), Decimal('-21.34')) self.assertEqual(fields.DecimalField.serialize(Decimal('-21.34')), '-21.34') self.assertEqual(fields.DecimalField.deserialize(None), None) with rows.locale_context(locale_name): self.assertEqual( six.text_type, type(fields.DecimalField.serialize(deserialized)) ) self.assertEqual(fields.DecimalField.serialize(Decimal('4200')), '4200') self.assertEqual(fields.DecimalField.serialize(Decimal('42.0')), '42,0') self.assertEqual(fields.DecimalField.serialize(Decimal('42000.0')), '42000,0') self.assertEqual(fields.DecimalField.serialize(Decimal('-42.0')), '-42,0') self.assertEqual(fields.DecimalField.deserialize('42.000,00'), Decimal('42000.00')) self.assertEqual(fields.DecimalField.deserialize('-42.000,00'), Decimal('-42000.00')) self.assertEqual( fields.DecimalField.serialize( Decimal('42000.0'), grouping=True ), '42.000,0' ) self.assertEqual(fields.DecimalField.deserialize(42000), Decimal('42000')) self.assertEqual(fields.DecimalField.deserialize(42000.0), Decimal('42000'))
def test_DecimalField(self): deserialized = Decimal('42.010') self.assertEqual(fields.DecimalField.TYPE, (Decimal, )) self.assertEqual(fields.DecimalField.serialize(None), '') self.assertIs(type(fields.DecimalField.serialize(None)), six.text_type) self.assertEqual(fields.DecimalField.deserialize(''), None) self.assertIn(type(fields.DecimalField.deserialize('42.0')), fields.DecimalField.TYPE) self.assertEqual(fields.DecimalField.deserialize('42.0'), Decimal('42.0')) self.assertEqual(fields.DecimalField.deserialize(deserialized), deserialized) self.assertEqual(fields.DecimalField.serialize(deserialized), '42.010') self.assertEqual(type(fields.DecimalField.serialize(deserialized)), six.text_type) self.assertEqual(fields.DecimalField.deserialize('21.21657469231'), Decimal('21.21657469231')) self.assertEqual(fields.DecimalField.deserialize('-21.34'), Decimal('-21.34')) self.assertEqual(fields.DecimalField.serialize(Decimal('-21.34')), '-21.34') self.assertEqual(fields.DecimalField.deserialize(None), None) with rows.locale_context(locale_name): self.assertEqual(six.text_type, type(fields.DecimalField.serialize(deserialized))) self.assertEqual(fields.DecimalField.serialize(Decimal('4200')), '4200') self.assertEqual(fields.DecimalField.serialize(Decimal('42.0')), '42,0') self.assertEqual(fields.DecimalField.serialize(Decimal('42000.0')), '42000,0') self.assertEqual(fields.DecimalField.serialize(Decimal('-42.0')), '-42,0') self.assertEqual(fields.DecimalField.deserialize('42.000,00'), Decimal('42000.00')) self.assertEqual(fields.DecimalField.deserialize('-42.000,00'), Decimal('-42000.00')) self.assertEqual( fields.DecimalField.serialize(Decimal('42000.0'), grouping=True), '42.000,0') self.assertEqual(fields.DecimalField.deserialize(42000), Decimal('42000')) self.assertEqual(fields.DecimalField.deserialize(42000.0), Decimal('42000'))
def test_DecimalField(self): deserialized = Decimal("42.010") self.assertEqual(fields.DecimalField.TYPE, (Decimal,)) self.assertEqual(fields.DecimalField.serialize(None), "") self.assertIs(type(fields.DecimalField.serialize(None)), six.text_type) self.assertEqual(fields.DecimalField.deserialize(""), None) self.assertIn( type(fields.DecimalField.deserialize("42.0")), fields.DecimalField.TYPE ) self.assertEqual(fields.DecimalField.deserialize("42.0"), Decimal("42.0")) self.assertEqual(fields.DecimalField.deserialize(deserialized), deserialized) self.assertEqual(fields.DecimalField.serialize(deserialized), "42.010") self.assertEqual( type(fields.DecimalField.serialize(deserialized)), six.text_type ) self.assertEqual( fields.DecimalField.deserialize("21.21657469231"), Decimal("21.21657469231") ) self.assertEqual(fields.DecimalField.deserialize("-21.34"), Decimal("-21.34")) self.assertEqual(fields.DecimalField.serialize(Decimal("-21.34")), "-21.34") self.assertEqual(fields.DecimalField.deserialize(None), None) with rows.locale_context(locale_name): self.assertEqual( six.text_type, type(fields.DecimalField.serialize(deserialized)) ) self.assertEqual(fields.DecimalField.serialize(Decimal("4200")), "4200") self.assertEqual(fields.DecimalField.serialize(Decimal("42.0")), "42,0") self.assertEqual( fields.DecimalField.serialize(Decimal("42000.0")), "42000,0" ) self.assertEqual(fields.DecimalField.serialize(Decimal("-42.0")), "-42,0") self.assertEqual( fields.DecimalField.deserialize("42.000,00"), Decimal("42000.00") ) self.assertEqual( fields.DecimalField.deserialize("-42.000,00"), Decimal("-42000.00") ) self.assertEqual( fields.DecimalField.serialize(Decimal("42000.0"), grouping=True), "42.000,0", ) self.assertEqual(fields.DecimalField.deserialize(42000), Decimal("42000")) self.assertEqual(fields.DecimalField.deserialize(42000.0), Decimal("42000"))
def test_FloatField(self): self.assertEqual(fields.FloatField.TYPE, (float,)) self.assertEqual(fields.FloatField.serialize(None), "") self.assertIs(type(fields.FloatField.serialize(None)), six.text_type) self.assertIn( type(fields.FloatField.deserialize("42.0")), fields.FloatField.TYPE ) self.assertEqual(fields.FloatField.deserialize("42.0"), 42.0) self.assertEqual(fields.FloatField.deserialize(42.0), 42.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0) self.assertEqual(fields.FloatField.deserialize(None), None) self.assertEqual(fields.FloatField.serialize(42.0), "42.0") self.assertIs(type(fields.FloatField.serialize(42.0)), six.text_type) with rows.locale_context(locale_name): self.assertEqual(fields.FloatField.serialize(42000.0), "42000,000000") self.assertIs(type(fields.FloatField.serialize(42000.0)), six.text_type) self.assertEqual( fields.FloatField.serialize(42000, grouping=True), "42.000,000000" ) self.assertEqual(fields.FloatField.deserialize("42.000,00"), 42000.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0) self.assertEqual(fields.FloatField.deserialize(42.0), 42.0)
def test_PercentField(self): deserialized = Decimal("0.42010") self.assertEqual(fields.PercentField.TYPE, (Decimal,)) self.assertIn( type(fields.PercentField.deserialize("42.0%")), fields.PercentField.TYPE ) self.assertEqual(fields.PercentField.deserialize("42.0%"), Decimal("0.420")) self.assertEqual( fields.PercentField.deserialize(Decimal("0.420")), Decimal("0.420") ) self.assertEqual(fields.PercentField.deserialize(deserialized), deserialized) self.assertEqual(fields.PercentField.deserialize(None), None) self.assertEqual(fields.PercentField.serialize(deserialized), "42.010%") self.assertEqual( type(fields.PercentField.serialize(deserialized)), six.text_type ) self.assertEqual(fields.PercentField.serialize(Decimal("42.010")), "4201.0%") self.assertEqual(fields.PercentField.serialize(Decimal("0")), "0.00%") self.assertEqual(fields.PercentField.serialize(None), "") self.assertEqual(fields.PercentField.serialize(Decimal("0.01")), "1%") with rows.locale_context(locale_name): self.assertEqual( type(fields.PercentField.serialize(deserialized)), six.text_type ) self.assertEqual(fields.PercentField.serialize(Decimal("42.0")), "4200%") self.assertEqual( fields.PercentField.serialize(Decimal("42000.0")), "4200000%" ) self.assertEqual( fields.PercentField.deserialize("42.000,00%"), Decimal("420.0000") ) self.assertEqual( fields.PercentField.serialize(Decimal("42000.00"), grouping=True), "4.200.000%", ) with self.assertRaises(ValueError): fields.PercentField.deserialize(42)
def query( input_encoding, output_encoding, input_locale, output_locale, verify_ssl, samples, output, frame_style, query, sources, ): samples = samples if samples > 0 else None if not query.lower().startswith("select"): table_names = ", ".join( ["table{}".format(index) for index in range(1, len(sources) + 1)] ) query = "SELECT * FROM {} WHERE {}".format(table_names, query) if len(sources) == 1: source = detect_source(sources[0], verify_ssl=verify_ssl, progress=True) if source.plugin_name in ("sqlite", "postgresql"): # Optimization: query the db directly result = import_from_source( source, DEFAULT_INPUT_ENCODING, query=query, samples=samples ) else: if input_locale is not None: with rows.locale_context(input_locale): table = import_from_source( source, DEFAULT_INPUT_ENCODING, samples=samples ) else: table = import_from_source( source, DEFAULT_INPUT_ENCODING, samples=samples ) sqlite_connection = sqlite3.Connection(":memory:") rows.export_to_sqlite(table, sqlite_connection, table_name="table1") result = rows.import_from_sqlite(sqlite_connection, query=query) else: # TODO: if all sources are SQLite we can also optimize the import if input_locale is not None: with rows.locale_context(input_locale): tables = [ _import_table( source, encoding=input_encoding, verify_ssl=verify_ssl, samples=samples, ) for source in sources ] else: tables = [ _import_table( source, encoding=input_encoding, verify_ssl=verify_ssl, samples=samples, ) for source in sources ] sqlite_connection = sqlite3.Connection(":memory:") for index, table in enumerate(tables, start=1): rows.export_to_sqlite( table, sqlite_connection, table_name="table{}".format(index) ) result = rows.import_from_sqlite(sqlite_connection, query=query) # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or sys.stdout.encoding or DEFAULT_OUTPUT_ENCODING if output is None: fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt( result, fobj, encoding=output_encoding, frame_style=frame_style ) else: rows.export_to_txt( result, fobj, encoding=output_encoding, frame_style=frame_style ) fobj.seek(0) click.echo(fobj.read()) else: if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, output, encoding=output_encoding) else: export_to_uri(result, output, encoding=output_encoding)
def query(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, fields, output, query, sources): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING if not query.lower().startswith('select'): field_names = '*' if fields is None else fields table_names = ', '.join(['table{}'.format(index) for index in range(1, len(sources) + 1)]) query = 'SELECT {} FROM {} WHERE {}'.format(field_names, table_names, query) if len(sources) == 1: source = detect_source(sources[0], verify_ssl=verify_ssl) if source.plugin_name != 'sqlite': if input_locale is not None: with rows.locale_context(input_locale): table = import_from_source(source, DEFAULT_INPUT_ENCODING) else: table = import_from_source(source, DEFAULT_INPUT_ENCODING) sqlite_connection = sqlite3.Connection(':memory:') rows.export_to_sqlite(table, sqlite_connection, table_name='table1') result = rows.import_from_sqlite(sqlite_connection, query=query) else: # Optimization: query the SQLite database directly result = import_from_source(source, DEFAULT_INPUT_ENCODING, query=query) else: if input_locale is not None: with rows.locale_context(input_locale): tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] else: tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] sqlite_connection = sqlite3.Connection(':memory:') for index, table in enumerate(tables, start=1): rows.export_to_sqlite(table, sqlite_connection, table_name='table{}'.format(index)) result = rows.import_from_sqlite(sqlite_connection, query=query) if output is None: fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(result, fobj, encoding=output_encoding) else: rows.export_to_txt(result, fobj, encoding=output_encoding) fobj.seek(0) click.echo(fobj.read()) else: if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, output, encoding=output_encoding) else: export_to_uri(result, output, encoding=output_encoding)
def print_(input_encoding, output_encoding, input_locale, output_locale, table_index, verify_ssl, fields, fields_except, order_by, source): if fields is not None and fields_except is not None: click.echo('ERROR: `--fields` cannot be used with `--fields-except`', err=True) sys.exit(20) output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING # TODO: may use `import_fields` for better performance if input_locale is not None: with rows.locale_context(input_locale): table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index) else: table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index) table_field_names = table.field_names if fields is not None: fields = _get_field_names(fields, table_field_names) if fields_except is not None: fields_except = _get_field_names(fields_except, table_field_names) # TODO: should set `export_fields = None` if `--fields` and # `--fields-except` are `None` if fields is not None and fields_except is None: export_fields = fields elif fields is not None and fields_except is not None: export_fields = list(fields) for field_to_remove in fields_except: export_fields.remove(field_to_remove) elif fields is None and fields_except is not None: export_fields = list(table_field_names) for field_to_remove in fields_except: export_fields.remove(field_to_remove) else: export_fields = table_field_names if order_by is not None: order_by = _get_field_names(order_by, table_field_names, permit_not=True) # TODO: use complete list of `order_by` fields table.order_by(order_by[0].replace('^', '-')) fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) else: rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) fobj.seek(0) # TODO: may pass unicode to click.echo if output_encoding is not provided click.echo(fobj.read())
def query(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, fields, output, query, sources): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING if not query.lower().startswith('select'): field_names = '*' if fields is None else fields table_names = ', '.join( ['table{}'.format(index) for index in range(1, len(sources) + 1)]) query = 'SELECT {} FROM {} WHERE {}'.format(field_names, table_names, query) if len(sources) == 1: source = detect_source(sources[0], verify_ssl=verify_ssl) if source.plugin_name != 'sqlite': if input_locale is not None: with rows.locale_context(input_locale): table = import_from_source(source, DEFAULT_INPUT_ENCODING) else: table = import_from_source(source, DEFAULT_INPUT_ENCODING) sqlite_connection = sqlite3.Connection(':memory:') rows.export_to_sqlite(table, sqlite_connection, table_name='table1') result = rows.import_from_sqlite(sqlite_connection, query=query) else: # Optimization: query the SQLite database directly result = import_from_source(source, DEFAULT_INPUT_ENCODING, query=query) else: if input_locale is not None: with rows.locale_context(input_locale): tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] else: tables = [ _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources ] sqlite_connection = sqlite3.Connection(':memory:') for index, table in enumerate(tables, start=1): rows.export_to_sqlite(table, sqlite_connection, table_name='table{}'.format(index)) result = rows.import_from_sqlite(sqlite_connection, query=query) if output is None: fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(result, fobj, encoding=output_encoding) else: rows.export_to_txt(result, fobj, encoding=output_encoding) fobj.seek(0) click.echo(fobj.read()) else: if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, output, encoding=output_encoding) else: export_to_uri(result, output, encoding=output_encoding)
def print_(input_encoding, output_encoding, input_locale, output_locale, table_index, verify_ssl, fields, fields_except, order_by, source): if fields is not None and fields_except is not None: click.echo('ERROR: `--fields` cannot be used with `--fields-except`', err=True) sys.exit(20) # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING # TODO: may use `import_fields` for better performance if input_locale is not None: with rows.locale_context(input_locale): table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index) else: table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index) table_field_names = table.fields.keys() if fields is not None: fields = _get_field_names(fields, table_field_names) if fields_except is not None: fields_except = _get_field_names(fields_except, table_field_names) if fields is not None and fields_except is None: export_fields = fields elif fields is not None and fields_except is not None: export_fields = list(fields) map(export_fields.remove, fields_except) elif fields is None and fields_except is not None: export_fields = list(table_field_names) map(export_fields.remove, fields_except) else: export_fields = table_field_names if order_by is not None: order_by = _get_field_names(order_by, table_field_names, permit_not=True) # TODO: use complete list of `order_by` fields table.order_by(order_by[0].replace('^', '-')) fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) else: rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) fobj.seek(0) # TODO: may pass unicode to click.echo if output_encoding is not provided click.echo(fobj.read())