Exemple #1
0
"""PETL Rename Command."""
import unsync


@unsync.command()
@unsync.option('--source',
               '-s',
               required=True,
               type=str,
               help='Name of the source data table/s.')
@unsync.option('--destination',
               '-d',
               type=str,
               help='Name of the destination data table.')
@unsync.option('--transform',
               '-t',
               multiple=True,
               type=unsync.Tuple([str, str]),
               help='Header transforms, FROM, TO.')
def rename(data, source, destination, transform):
    """Rename columns based on Transform parameters."""
    if not destination:
        destination = source
    s = data.get(source)
    s = s.rename(dict(transform))
    data.set(destination, s)
Exemple #2
0
"""CSV commands for the Unsync Tool."""
import unsync
import petl
import os


@unsync.command()
@unsync.option('--output-file', '-o', type=unsync.Path(dir_okay=False, readable=True, resolve_path=True), help='CSV file that data will be read from.')
@unsync.option('--source', '-s', required=True, help='Source table to export as CSV.')
@unsync.option('--csv-arg', multiple=True, type=unsync.Tuple([str, str]), help='Arguments that will be passed to petl\'s CSV parsing engine.')
@unsync.option('--errors', default='strict', help='PETL option for CSV errors.')
@unsync.option('--write-header/--no-write-header', default=True, help='When set the CSV file will have a header row.')
@unsync.option('--append/--no-append', default=False, help='When set the output file will be opened and rows will be appended to the existing data. When set --write-header is always False.')
def export(data, output_file, source, csv_arg, errors, write_header, append):
    """Export the specified table of data to a csv file."""
    existing_data = data.get(source)
    if append is True:
        petl.appendcsv(existing_data, output_file, errors=errors, **dict(csv_arg))
    else:
        petl.tocsv(existing_data, output_file, errors=errors, write_header=write_header, **dict(csv_arg))


@unsync.command()
@unsync.option('--output-dir', '-o', type=unsync.Path(dir_okay=True, file_okay=False, readable=True, resolve_path=True), help='CSV file that data will be read from.')
@unsync.option('--csv-arg', multiple=True, type=unsync.Tuple([str, str]), help='Arguments that will be passed to petl\'s CSV parsing engine.')
@unsync.option('--errors', default='strict', help='PETL option for CSV errors.')
@unsync.option('--write-header/--no-write-header', default=True, help='When set the CSV file will have a header row.')
@unsync.option('--exclude-empty/--include-empty', default=True, help='When set data tables with no data in them will not create CSV files.')
def full_export(data, output_dir, csv_arg, errors, write_header, exclude_empty):
    """Export all data tables as CSV files."""
    if not os.path.exists(output_dir):
Exemple #3
0
"""Utility command to filter table rows based on the value of a column and a provided regex."""
import unsync


@unsync.command()
@unsync.option('--source', '-s', required=True, help='The source data table.')
@unsync.option('--filter', '-f', type=unsync.Tuple([str, str]), multiple=True, required=True, help='A tuple of values, first is the column to filter on and second is the regex to use.')
@unsync.option('--destination', '-d', help='The destination data table for matched rows. If blank will overwrite the source table.')
def search(data, source, filter, destination):
    """Include rows where the specified column matches the given regex."""
    if not destination:
        destination = source
    s = data.get(source)
    for column, pattern in filter:
        s = s.search(column, pattern)
    data.set(destination, s)