"""CSV commands for the Unsync Tool.""" import unsync import petl import os @unsync.command() @unsync.option('--output-file', '-o', type=unsync.Path(dir_okay=False, readable=True, resolve_path=True), help='CSV file that data will be read from.') @unsync.option('--source', '-s', required=True, help='Source table to export as CSV.') @unsync.option('--csv-arg', multiple=True, type=unsync.Tuple([str, str]), help='Arguments that will be passed to petl\'s CSV parsing engine.') @unsync.option('--errors', default='strict', help='PETL option for CSV errors.') @unsync.option('--write-header/--no-write-header', default=True, help='When set the CSV file will have a header row.') @unsync.option('--append/--no-append', default=False, help='When set the output file will be opened and rows will be appended to the existing data. When set --write-header is always False.') def export(data, output_file, source, csv_arg, errors, write_header, append): """Export the specified table of data to a csv file.""" existing_data = data.get(source) if append is True: petl.appendcsv(existing_data, output_file, errors=errors, **dict(csv_arg)) else: petl.tocsv(existing_data, output_file, errors=errors, write_header=write_header, **dict(csv_arg)) @unsync.command() @unsync.option('--output-dir', '-o', type=unsync.Path(dir_okay=True, file_okay=False, readable=True, resolve_path=True), help='CSV file that data will be read from.') @unsync.option('--csv-arg', multiple=True, type=unsync.Tuple([str, str]), help='Arguments that will be passed to petl\'s CSV parsing engine.') @unsync.option('--errors', default='strict', help='PETL option for CSV errors.') @unsync.option('--write-header/--no-write-header', default=True, help='When set the CSV file will have a header row.') @unsync.option('--exclude-empty/--include-empty', default=True, help='When set data tables with no data in them will not create CSV files.') def full_export(data, output_dir, csv_arg, errors, write_header, exclude_empty): """Export all data tables as CSV files.""" if not os.path.exists(output_dir):
"""Timetabler PTF9 import functions.""" import unsync import petl @unsync.command() @unsync.option('--input-file', '-i', type=unsync.Path(exists=True, dir_okay=False, readable=True, resolve_path=True), help='Timetabler PTF9 file to extract data from.') @unsync.option('--destination', '-d', default='users', help='The destination table that these users will be stored in.' ) def ptf9_staff_import(data, input_file, destination): """Import staff users from a Timetabler PTF9 export file.""" staff_users = petl.fromxml( input_file, '{http://www.timetabling.com.au/TDV9}Teachers/{http://www.timetabling.com.au/TDV9}Teacher', { 'TeacherID': '{http://www.timetabling.com.au/TDV9}TeacherID', 'Code': '{http://www.timetabling.com.au/TDV9}Code', 'FirstName': '{http://www.timetabling.com.au/TDV9}FirstName', 'MiddleName': '{http://www.timetabling.com.au/TDV9}MiddleName', 'LastName': '{http://www.timetabling.com.au/TDV9}LastName', 'Salutation': '{http://www.timetabling.com.au/TDV9}Salutation', 'DaysUnavailable':
"""Timetabler PTF9 import functions.""" import unsync import petl @unsync.command() @unsync.option('--input-file', '-i', type=unsync.Path(exists=True, dir_okay=False, readable=True, resolve_path=True), help='Timetabler PTF9 file to extract data from.') @unsync.option('--destination', '-d', default='enrollments', help='The destination table that these enrollments will be stored in.') def ptf9_student_enrollment_import(data, input_file, destination): """Import student enrollments from a Timetabler PTF9 export file.""" student_enrollments = petl.fromxml(input_file, '{http://www.timetabling.com.au/TDV9}StudentLessons/{http://www.timetabling.com.au/TDV9}StudentLesson', { 'StudentID': '{http://www.timetabling.com.au/TDV9}StudentID', 'CourseID': '{http://www.timetabling.com.au/TDV9}CourseID', 'LessonType': '{http://www.timetabling.com.au/TDV9}LessonType', 'ClassCode': '{http://www.timetabling.com.au/TDV9}ClassCode', 'RollClassCode': '{http://www.timetabling.com.au/TDV9}RollClassCode', }) data.cat(destination, student_enrollments) command = ptf9_student_enrollment_import # default=[('StudentID', 'ptf9_student_id'), ('ClassCode', 'course_id')]
import unsync REQUIRED_SCOPES = [ 'https://www.googleapis.com/auth/admin.directory.group.member', 'https://www.googleapis.com/auth/admin.directory.group', 'https://www.googleapis.com/auth/admin.directory.orgunit', 'https://www.googleapis.com/auth/admin.directory.user', 'https://www.googleapis.com/auth/admin.directory.user.alias' ] @unsync.command() @unsync.option('--client-secrets', required=True, default='./client-secrets.json', type=unsync.Path(dir_okay=False, file_okay=True, readable=True, resolve_path=True), help='Location of the client-secrets.json file.') @unsync.option('--credentials-path', type=unsync.Path(dir_okay=True, file_okay=False, resolve_path=True), help='Location of stored credentials. Defaults to a credentials directory in the app data folder.') @unsync.option('--apps-domain', required=True, help='The Google Apps domain name.') @unsync.option('--') def list_groups(data, output_file): """Query the Google Apps domain and retrieve group information.""" pass
"""PETL Distinct Command.""" import unsync @unsync.command() @unsync.option('--source', '-s', required=True, help='Name of the source data table.') @unsync.option('--destination', '-d', help='Name of the destination data table. Will default to the source table if not specified.') @unsync.option('--key', '-k', default=None, help='If the key keyword argument is passed, the comparison is done on the given key instead of the full row.') @unsync.option('--count', '-c', default=None, help='If the count argument is not None, it will be used as the name for an additional field, and the values of the field will be the number of duplicate rows.') @unsync.option('--presorted/--no-presorted', default=False, help='Are the tables presorted?') @unsync.option('--buffersize', default=None, type=int, help="Controls how presorting is performed. See http://petl.readthedocs.io/en/latest/transform.html#petl.transform.sorts.sort") @unsync.option('--tempdir', type=unsync.Path(file_okay=False, dir_okay=True, exists=True, writable=True, resolve_path=True), help='Location to store chunks when sorting.') @unsync.option('--cache/--no-cache', default=True, help='Controls wether presort results are chaced for use in subsequent operations.') def distinct(data, source, destination, key, count, presorted, buffersize, tempdir, cache): """Return only rows that are distinct within the table.""" if not destination: destination = source s = data.get(source) s = s.distinct(key=key, count=count, presorted=presorted, buffersize=buffersize, tempdir=tempdir, cache=cache) data.set(destination, s)
type=str, help='The right prefix.') @unsync.option('--presorted/--no-presorted', default=False, help='Are the tables presorted?') @unsync.option( '--buffersize', default=None, type=int, help= "Controls how presorting is performed. See http://petl.readthedocs.io/en/latest/transform.html#petl.transform.sorts.sort" ) @unsync.option('--tempdir', type=unsync.Path(file_okay=False, dir_okay=True, exists=True, writable=True, resolve_path=True), help='Location to store chunks when sorting.') @unsync.option( '--cache/--no-cache', default=True, help= 'Controls wether presort results are chaced for use in subsequent operations.' ) @unsync.option( '--missing', default=None, help= 'Value to use for fields missing in a join. Only used for certain types of join.' )