Exemplo n.º 1
0
def export_set(dataset):
    """Returns DBF representation of a Dataset"""
    new_dbf = dbfnew.dbf_new()
    temp_file, temp_uri = tempfile.mkstemp()

    # create the appropriate fields based on the contents of the first row
    first_row = dataset[0]
    for fieldname, field_value in zip(dataset.headers, first_row):
        if type(field_value) in [int, float]:
            new_dbf.add_field(fieldname, 'N', 10, 8)
        else:
            new_dbf.add_field(fieldname, 'C', 80)

    new_dbf.write(temp_uri)

    dbf_file = dbf.Dbf(temp_uri, readOnly=0)
    for row in dataset:
        record = dbfrecord.DbfRecord(dbf_file)
        for fieldname, field_value in zip(dataset.headers, row):
            record[fieldname] = field_value
        record.store()

    dbf_file.close()
    dbf_stream = open(temp_uri, 'rb')
    if is_py3:
        stream = io.BytesIO(dbf_stream.read())
    else:
        stream = StringIO(dbf_stream.read())
    dbf_stream.close()
    os.remove(temp_uri)
    return stream.getvalue()
Exemplo n.º 2
0
def export_set(dataset):
    """Returns DBF representation of a Dataset"""
    new_dbf = dbfnew.dbf_new()
    temp_file, temp_uri = tempfile.mkstemp()

    # create the appropriate fields based on the contents of the first row
    first_row = dataset[0]
    for fieldname, field_value in zip(dataset.headers, first_row):
        if type(field_value) in [int, float]:
            new_dbf.add_field(fieldname, 'N', 10, 8)
        else:
            new_dbf.add_field(fieldname, 'C', 80)

    new_dbf.write(temp_uri)

    dbf_file = dbf.Dbf(temp_uri, readOnly=0)
    for row in dataset:
        record = dbfrecord.DbfRecord(dbf_file)
        for fieldname, field_value in zip(dataset.headers, row):
            record[fieldname] = field_value
        record.store()

    dbf_file.close()
    dbf_stream = open(temp_uri, 'rb')
    if is_py3:
        stream = io.BytesIO(dbf_stream.read())
    else:
        stream = StringIO(dbf_stream.read())
    dbf_stream.close()
    os.close(temp_file)
    os.remove(temp_uri)
    return stream.getvalue()
Exemplo n.º 3
0
def export_set(dataset, **kwargs):
    """Returns CSV representation of Dataset."""
    stream = StringIO()

    kwargs.setdefault('delimiter', DEFAULT_DELIMITER)

    _csv = csv.writer(stream, **kwargs)

    for row in dataset._package(dicts=False):
        _csv.writerow(row)

    return stream.getvalue()
Exemplo n.º 4
0
Arquivo: _csv.py Projeto: dtbinh/test
def export_set(dataset):
    """Returns CSV representation of Dataset."""
    stream = StringIO()

    if is_py3:
        _csv = csv.writer(stream)
    else:
        _csv = csv.writer(stream, encoding=DEFAULT_ENCODING)

    for row in dataset._package(dicts=False):
        _csv.writerow(row)

    return stream.getvalue()
Exemplo n.º 5
0
def export_set(dataset):
    """Returns CSV representation of Dataset."""
    stream = StringIO()

    if is_py3:
        _csv = csv.writer(stream)
    else:
        _csv = csv.writer(stream, encoding=DEFAULT_ENCODING)

    for row in dataset._package(dicts=False):
        _csv.writerow(row)

    return stream.getvalue()
Exemplo n.º 6
0
    def write(self, dataset):
        f = BytesIO()
        stream = StringIO()

        for row in dataset._package():
            for key, val in row.items():
                stream.write('-' * len(key) + '\n')
                stream.write(key.encode('utf-8') + '\n')
                stream.write('-' * len(key) + '\n')
                stream.write(val.encode('utf-8') + '\n\n')
            stream.write('\n' + '*' * 50 + '\n\n\n')

        f.write(stream.getvalue())
        return f
Exemplo n.º 7
0
def export_set(dataset, **kwargs):
    """Returns CSV representation of Dataset."""
    stream = StringIO()

    date_format = kwargs.pop('date_format', None)

    kwargs.setdefault('delimiter', DEFAULT_DELIMITER)
    if not is_py3:
        kwargs.setdefault('encoding', DEFAULT_ENCODING)

    _csv = csv.writer(stream, **kwargs)

    for row in dataset._package(dicts=False):
        if date_format:
            for idx, value in enumerate(row):
                if isinstance(value, date) or isinstance(value, datetime):
                    row[idx] = value.strftime(date_format)

        _csv.writerow(row)

    return stream.getvalue()