def get_user_logins(data, url, api_key, user_data, user_id_field, destination): if not url.startswith('http') or not url.startswith('https'): url = 'https://' + url user_data = data.get(user_data) user_data = user_data.dicts() login_data = [] debug = data.config.debug client = LoginsAPI(url, api_key) for user in user_data: try: r = client.list_user_logins_users(user[user_id_field]) if debug: unsync.secho( 'Retrieved {} Canvas Logins for Canvas User ID: {}'.format( len(r), user[user_id_field]), fg='green') except CanvasAPIError: unsync.secho( 'Unable to retrieve Canvas Login information for Canvas User ID: {}' .format(user[user_id_field]), fg='red') for login in r: login_data.append(login) login_data = petl.fromdicts(login_data) data.set(destination, login_data)
def shcopy_files(data, source, source_field, destination_field, results, quiet): """Copy files from the source to the destination.""" source = data.get(source) results_data = [] for i in source.dicts(): r = { source_field: i[source_field], destination_field: i[destination_field] } try: shutil.copy(i[source_field], i[destination_field]) r['success'] = True r['message'] = "File copied successfully." if not quiet: unsync.secho('Successfully copied {} to {}'.format( i[source_field], i[destination_field]), fg='green') except (shutil.Error, IOError) as e: r['success'] = False r['message'] = str(e) if not quiet: unsync.secho('Failed copying {} to {}. Reason was: {}'.format( i[source_field], i[destination_field]), str(e), fg='red') results_data.append(r) if results: results_data = petl.fromdicts(results_data) data.set(results, results_data)
def from_args(data, pair): """Load valstore key value pairs from the provided pair arguments given to this command.""" for k, v in pair: if k in data.values: unsync.secho( f'Valstore data already exists for the {k} key. This will be overwritten.', fg='red') data.values.update(dict(pair))
def print_all_to_screen(data, offset, lines, style): """Print a representation for each table currently stored.""" for table in data.registry: d = data.get(table) if offset > 0: d = d.rowslice(offset, offset + lines) a = petl.look(d, limit=lines, style=style) unsync.secho('== {} =='.format(table), fg='green') unsync.echo(a)
def print_to_screen(data, offset, lines, source, column, style): """Print out a text version of the data contained in the source table.""" d = data.get(source) if offset > 0: d = d.rowslice(offset, offset + lines) if not column[0] is None: d = d.cut(*column) a = petl.look(d, limit=lines, style=style) unsync.secho('== {} =='.format(source), fg='green') unsync.echo(a)
def update_user_logins(data, url, api_key, source, account_id_field, login_id_field, unique_id_field, password_field, sis_user_id_field, integration_id_field, results_table): if not url.startswith('http') or not url.startswith('https'): url = 'https://' + url client = LoginsAPI(url, api_key) source = data.get(source) debug = data.config.debug results = [] for row in petl.dicts(source): account_id = row[account_id_field] login_id = row[login_id_field] kwargs = {} if unique_id_field is not None and row[unique_id_field] is not None: kwargs['login_unique_id'] = row[unique_id_field] if password_field is not None and row[password_field] is not None: kwargs['login_password'] = row[password_field] if sis_user_id_field is not None and row[sis_user_id_field] is not None: kwargs['login_sis_user_id'] = row[sis_user_id_field] if integration_id_field is not None and row[ integration_id_field] is not None: kwargs['login_integration_id'] = row[integration_id_field] try: r = client.edit_user_login(login_id, account_id, **kwargs) unsync.secho('Successfully updated login: {} with data: {}'.format( login_id, str(kwargs)), fg='green') if results_table: row['_data'] = str(kwargs) row['_response_status'] = r row['_response_content'] = r results.append(row) if debug: unsync.secho(str(r), fg='yellow') except (CanvasAPIError) as e: unsync.secho('Failed updating login: {} with data: {}'.format( login_id, str(kwargs)), fg='red') unsync.secho('Response Status: {} Response Reason: {}'.format( e.response.status_code, e.response.content), fg='red') if results_table: row['_data'] = str(kwargs) row['_response_status'] = e.response.status_code row['_response_content'] = e.response.content results.append(row) results = petl.fromdicts(results) data.cat(results_table, results)
def get_user_profiles(data, url, api_key, user_data, user_id_field, destination): if not url.startswith('http') or not url.startswith('https'): url = 'https://' + url user_data = data.get(user_data) user_data = user_data.dicts() profile_data = [] debug = data.config.debug client = UsersAPI(url, api_key) for user in user_data: try: r = client.get_user_profile(user[user_id_field]) profile_data.append(r) if debug: unsync.secho('Retrieved Profile for Canvas User ID: {}'.format(len(r), user[user_id_field]), fg='green') except CanvasAPIError as e: unsync.secho('Unable to retrieve Profile for Canvas User ID: {}'.format(user[user_id_field]), fg='red') profile_data = petl.fromdicts(profile_data) data.set(destination, profile_data)
def from_yaml(data, values_data): """Load key value data from the provided YAML file and load it into the valstore. Existing keys with the same name will be overwritten.""" if values_data is not None and len(values_data) >= 1: for values_data_file in values_data: with open(values_data_file, 'r') as f: try: yaml_data = yaml.load(f) except yaml.scanner.ScannerError as e: unsync.secho( f'An error occured processing the values data in: {values_data_file}', fg='red') unsync.secho(str(e), fg='red') else: for k in yaml_data.keys(): if k in data.values: unsync.secho( f'Values data already exists for the {k} key. This will be overwritten in the current context.', fg='red') data.values.update(yaml_data)
def validate(data, name, source, header, test, assertion): """Validate that a table meets the required constraints.""" s = data.get(source) if name: name = name + ' ' else: name = '' constraints = [] for c_data in test: constraint = {'name': c_data[0]} if c_data[1] != '_row_': constraint['field'] = c_data[1] constraint['test'] = eval(c_data[2]) constraints.append(constraint) for c_data in assertion: constraint = {'name': c_data[0]} if c_data[1] != '_row_': constraint['field'] = c_data[1] constraint['assertion'] = eval(c_data[2]) constraints.append(constraint) params = {} if header is not None and len(header) != 0: params['header'] = header if len(constraints) != 0: params['constraints'] = constraints problems = petl.validate(s, **params) if problems.nrows() > 0: unsync.secho('{}Validation Failed!'.format(name), fg='red') unsync.secho(str(problems.lookall()), fg='red') raise PETLValidationError(problems) else: if data.config.debug is True: unsync.secho('{}Validation Passed!'.format(name), fg='green')
def stats(data, source): """Print a text representation of the data table for a given KIND.""" d = data.get(source) unsync.secho('Row Count: {}'.format(d.nrows()), fg='green') unsync.secho('Column Count: {}'.format(len(d[0])), fg='green')