def export_set(dataset): """Returns DBF representation of a Dataset""" new_dbf = dbfnew.dbf_new() temp_file, temp_uri = tempfile.mkstemp() # create the appropriate fields based on the contents of the first row first_row = dataset[0] for fieldname, field_value in zip(dataset.headers, first_row): if type(field_value) in [int, float]: new_dbf.add_field(fieldname, 'N', 10, 8) else: new_dbf.add_field(fieldname, 'C', 80) new_dbf.write(temp_uri) dbf_file = dbf.Dbf(temp_uri, readOnly=0) for row in dataset: record = dbfrecord.DbfRecord(dbf_file) for fieldname, field_value in zip(dataset.headers, row): record[fieldname] = field_value record.store() dbf_file.close() dbf_stream = open(temp_uri, 'rb') if is_py3: stream = io.BytesIO(dbf_stream.read()) else: stream = StringIO(dbf_stream.read()) dbf_stream.close() os.close(temp_file) os.remove(temp_uri) return stream.getvalue()
def export_set(dataset): """Returns DBF representation of a Dataset""" new_dbf = dbfnew.dbf_new() temp_file, temp_uri = tempfile.mkstemp() # create the appropriate fields based on the contents of the first row first_row = dataset[0] for fieldname, field_value in zip(dataset.headers, first_row): if type(field_value) in [int, float]: new_dbf.add_field(fieldname, 'N', 10, 8) else: new_dbf.add_field(fieldname, 'C', 80) new_dbf.write(temp_uri) dbf_file = dbf.Dbf(temp_uri, readOnly=0) for row in dataset: record = dbfrecord.DbfRecord(dbf_file) for fieldname, field_value in zip(dataset.headers, row): record[fieldname] = field_value record.store() dbf_file.close() dbf_stream = open(temp_uri, 'rb') if is_py3: stream = io.BytesIO(dbf_stream.read()) else: stream = StringIO(dbf_stream.read()) dbf_stream.close() os.remove(temp_uri) return stream.getvalue()
def export_set(dataset, **kwargs): """Returns CSV representation of Dataset.""" stream = StringIO() kwargs.setdefault('delimiter', DEFAULT_DELIMITER) _csv = csv.writer(stream, **kwargs) for row in dataset._package(dicts=False): _csv.writerow(row) return stream.getvalue()
def export_set(dataset): """Returns CSV representation of Dataset.""" stream = StringIO() if is_py3: _csv = csv.writer(stream) else: _csv = csv.writer(stream, encoding=DEFAULT_ENCODING) for row in dataset._package(dicts=False): _csv.writerow(row) return stream.getvalue()
def import_set(dset, in_stream, headers=True): """Returns dataset from CSV stream.""" dset.wipe() if is_py3: rows = csv.reader(StringIO(in_stream)) else: rows = csv.reader(StringIO(in_stream), encoding=DEFAULT_ENCODING) for i, row in enumerate(rows): if (i == 0) and (headers): dset.headers = row else: dset.append(row)
def import_set(dset, in_stream, headers=True): """Returns a dataset from a DBF stream.""" dset.wipe() if is_py3: _dbf = dbf.Dbf(io.BytesIO(in_stream)) else: _dbf = dbf.Dbf(StringIO(in_stream)) dset.headers = _dbf.fieldNames for record in range(_dbf.recordCount): row = [_dbf[record][f] for f in _dbf.fieldNames] dset.append(row)
def export_set(dataset, **kwargs): """Returns CSV representation of Dataset.""" stream = StringIO() date_format = kwargs.pop('date_format', None) kwargs.setdefault('delimiter', DEFAULT_DELIMITER) if not is_py3: kwargs.setdefault('encoding', DEFAULT_ENCODING) _csv = csv.writer(stream, **kwargs) for row in dataset._package(dicts=False): if date_format: for idx, value in enumerate(row): if isinstance(value, date) or isinstance(value, datetime): row[idx] = value.strftime(date_format) _csv.writerow(row) return stream.getvalue()
def detect(stream): """Returns True if the given stream is valid DBF""" #_dbf = dbf.Table(StringIO(stream)) try: if is_py3: if type(stream) is not bytes: stream = bytes(stream, 'utf-8') _dbf = dbf.Dbf(io.BytesIO(stream), readOnly=True) else: _dbf = dbf.Dbf(StringIO(stream), readOnly=True) return True except Exception: return False
def import_set(dset, in_stream, headers=True, **kwargs): """Returns dataset from CSV stream.""" dset.wipe() kwargs.setdefault('delimiter', DEFAULT_DELIMITER) rows = csv.reader(StringIO(in_stream), **kwargs) for i, row in enumerate(rows): if (i == 0) and (headers): dset.headers = row elif row: dset.append(row)
def detect(stream): """Returns True if the given stream is valid DBF""" #_dbf = dbf.Table(StringIO(stream)) try: if is_py3: if type(stream) is not bytes: stream = bytes(stream, 'utf-8') _dbf = dbf.Dbf(io.BytesIO(stream), readOnly=True) else: _dbf = dbf.Dbf(StringIO(stream), readOnly=True) return True except (ValueError, struct.error): # When we try to open up a file that's not a DBF, dbfpy raises a # ValueError. # When unpacking a string argument with less than 8 chars, struct.error is # raised. return False
def write(self, dataset): f = BytesIO() stream = StringIO() for row in dataset._package(): for key, val in row.items(): stream.write('-' * len(key) + '\n') stream.write(key.encode('utf-8') + '\n') stream.write('-' * len(key) + '\n') stream.write(val.encode('utf-8') + '\n\n') stream.write('\n' + '*' * 50 + '\n\n\n') f.write(stream.getvalue()) return f