def download_all(cls, registry: Registry, args): canvas = registry.get_service(args.service, "canvas") local = registry.get_service('local') resources = canvas.api.get(cls.endpoint, retrieve_all=True) rows = [] for resource in tqdm(natsorted(resources, key=cls.sort_resource)): try: path = local.find_existing(registry, resource[cls.title_attribute]) rows.append(("Yes", "Yes", resource[cls.title_attribute], os.path.relpath(path))) except WaltzAmbiguousResource as war: paths = "\n".join( os.path.relpath(path) for path in war.args[0]) rows.append( ("Yes", "Multiple", resource[cls.title_attribute], paths)) except FileNotFoundError: rows.append(("Yes", "No", resource[cls.title_attribute], "")) full_resource = canvas.api.get(cls.endpoint + str(resource[cls.id])) registry.store_resource(canvas.name, cls.name, resource[cls.title_attribute], "", json.dumps(full_resource)) print(tabulate(rows, ('Remote', 'Local', 'Title', 'Path'))) print("Downloaded", len(resources), cls.name_plural)
def decode(cls, registry: Registry, args): local = registry.get_service(args.local_service, 'local') # TODO: use disambiguate if args.all: raw_resources = registry.find_all_resources(service=args.service, category=cls.name) else: raw_resources = [ registry.find_resource(title=args.title, service=args.service, category=cls.name) ] for raw_resource in raw_resources: try: destination_path = local.find_existing(registry, raw_resource.title) except FileNotFoundError: destination_path = local.make_markdown_filename( raw_resource.title) if args.destination: destination_path = os.path.join(args.destination, destination_path) decoded_markdown, extra_files = cls.decode_json( registry, raw_resource.data, args) local.write(destination_path, decoded_markdown) for path, data in extra_files: local.write(path, data)
def upload_new(cls, registry: Registry, local_quiz, args): canvas = registry.get_service(args.service, "canvas") quiz_data = cls._make_canvas_upload(registry, local_quiz, args) created_quiz = canvas.api.post('quizzes/', data=quiz_data) if 'errors' in created_quiz: pprint(created_quiz['errors']) raise WaltzException("Error loading data, see above.") print("Created quiz", local_quiz['title'], "on canvas") # Create the groups group_name_to_id = {} for group in local_quiz['groups'].values(): group_data = QuizGroup._make_canvas_upload(registry, group, args) created_group = canvas.api.post( 'quizzes/{quiz_id}/groups'.format(quiz_id=created_quiz['id']), data=group_data) created_group = created_group['quiz_groups'][ 0] # acbart: Weird response type # acbart: Okay because names are strings and IDs are ints group_name_to_id[created_group['name']] = created_group['id'] group_name_to_id[created_group['id']] = created_group['id'] if local_quiz['groups']: print("Created quiz", local_quiz['title'], "groups on canvas") # Create the questions for question in local_quiz['questions']: if 'quiz_group_id' in question and question[ 'quiz_group_id'] is not None: question['quiz_group_id'] = group_name_to_id[ question['quiz_group_id']] question_data = QuizQuestion._make_canvas_upload( registry, question, args) created_question = canvas.api.post( 'quizzes/{quiz_id}/questions'.format( quiz_id=created_quiz['id']), data=question_data) print("Created quiz", local_quiz['title'], "questions on canvas")
def decode_question(cls, registry: Registry, question, quiz, args): question_type = cls.TYPES[question['question_type']] if args.combine: raw = question_type.decode_json_raw(registry, question, args) raw['text'] = h2m(raw['text']) return raw, None, None local = registry.get_service(args.local_service, 'local') title = question['question_name'] try: destination_path = local.find_existing(registry, args.title, folder_file=title) except FileNotFoundError: destination_path = local.make_markdown_filename(title) if args.banks: first_bank_path = args.banks[0].format( title=make_safe_filename(title), id=question['id'], quiz_title=make_safe_filename(quiz['title']), quiz_id=quiz['id']) destination_path = os.path.join(first_bank_path, destination_path) else: first_bank_path = make_safe_filename(quiz['title']) if args.destination: destination_path = os.path.join(args.destination, first_bank_path, destination_path) else: destination_path = os.path.join(first_bank_path, destination_path) decoded_markdown = question_type.decode_json(registry, question, args) return title, destination_path, decoded_markdown
def download(cls, registry: Registry, args): blockpy = registry.get_service(args.service, "blockpy") courses = blockpy.api.get('list/courses/')['courses'] potentials = [course for course in courses if course['url'] == args.title] if len(potentials) > 1: raise WaltzAmbiguousResource("Too many courses with URL '{}'".format(args.title)) elif not potentials: raise WaltzResourceNotFound("No course with URL '{}' found.".format(args.title)) course_id = potentials[0]['id'] bundle = blockpy.api.get('export/', json={'course_id': course_id}) records = { 'problems': {}, 'groups': [] } # Memberships groups_assignments = {} for membership in bundle['memberships']: if membership['assignment_group_url'] not in groups_assignments: groups_assignments[membership['assignment_group_url']] = [] records['problems'][membership['assignment_group_url']] = [] better_url = membership['assignment_url'] if better_url is None: better_url = membership['assignment_id'] groups_assignments[membership['assignment_group_url']].append(better_url) records['problems'][membership['assignment_group_url']].append(better_url) # Assignments for assignment in bundle['assignments']: registry.store_resource(blockpy.name, 'problem', assignment['url'], "", json.dumps(assignment)) # Groups for group in bundle['groups']: group['problems'] = groups_assignments.get(group['url'], []) records['groups'].append(group['url']) registry.store_resource(blockpy.name, 'blockpy_group', group['url'], "", json.dumps(group)) registry.store_resource(blockpy.name, 'blockpy_course', args.title, course_id, json.dumps(records))
def encode(cls, registry: Registry, args): local = registry.get_service(args.local_service, 'local') source_path = local.find_existing(registry, args.title, folder_file=cls.folder_file) decoded_markdown = local.read(source_path) data = cls.encode_json(registry, decoded_markdown, args) registry.store_resource(args.service, cls.name, args.title, "", data)
def encode_question_by_title(cls, registry: Registry, title: str, args): local = registry.get_service(args.local_service, 'local') # TODO: By default limit search to "<Quiz> Questions/" folder? source_path = local.find_existing(registry, title, check_front_matter=True, top_directories=args.banks) decoded_markdown = local.read(source_path) regular, waltz, body = extract_front_matter(decoded_markdown) body = hide_data_in_html(regular, m2h(body)) waltz['question_text'] = body return cls.encode_question(registry, waltz, args)
def upload(cls, registry: Registry, args): canvas = registry.get_service(args.service, "canvas") raw_resource = registry.find_resource(title=args.title, service=args.service, category=args.category, disambiguate=args.url) full_page = json.loads(raw_resource.data) canvas.api.put("pages/{url}".format(url=full_page['title']), data={ 'wiki_page[title]': full_page['title'], 'wiki_page[body]': full_page['body'], 'wiki_page[published]': full_page['published'] })
def upload(cls, registry: Registry, args): canvas = registry.get_service(args.service, "canvas") raw_resource = registry.find_resource(title=args.title, service=canvas.name, category=cls.name, disambiguate="") full_assignment = json.loads(raw_resource.data) assignment_data = cls._make_canvas_upload(registry, full_assignment, args) remote_assignment = cls.find(canvas, args.title) if remote_assignment is None: canvas.api.post('assignments/', data=assignment_data) else: canvas.api.put("assignments/{aid}".format(aid=remote_assignment['id']), data=assignment_data)
def upload(cls, registry: Registry, args): blockpy = registry.get_service(args.service, cls.default_service) raw_resource = registry.find_resource(title=args.title, service=args.service, category=args.category, disambiguate=args.url) full_data = json.loads(raw_resource.data) blockpy.api.post("import", json={ 'course_id': full_data['course_id'], 'assignments': [full_data] })
def diff_extra_files(cls, registry: Registry, data, args): local = registry.get_service(args.local_service, 'local') for py_filename in ['on_run', 'starting_code', 'on_change', 'on_eval']: try: source_path = local.find_existing(registry, args.title, folder_file=py_filename, extension='.py') except FileNotFoundError: yield py_filename, "" continue print(source_path) yield source_path, local.read(source_path)
def upload(cls, registry: Registry, args): canvas = registry.get_service(args.service, "canvas") # Get the local version raw_resource = registry.find_resource(title=args.title, service=args.service, category=args.category, disambiguate=args.id) local_quiz = json.loads(raw_resource.data) # Get the remote version remote_quiz = cls.find(canvas, args.title) # Either put or post the quiz if remote_quiz is None: cls.upload_new(registry, local_quiz, args) else: cls.upload_edit(registry, remote_quiz, local_quiz, args)
def download(cls, registry: Registry, args): blockpy = registry.get_service(args.service, "blockpy") bundle = blockpy.api.get('export/', json={'assignment_url': args.title}) potentials = bundle['assignments'] # Assignments if len(potentials) > 1: raise WaltzAmbiguousResource( f"Too many problems with URL '{args.title}'") elif not potentials: raise WaltzResourceNotFound( f"No problem with URL '{args.title}' found.") assignment = potentials[0] registry.store_resource(blockpy.name, 'problem', assignment['url'], "", json.dumps(assignment))
def diff_extra_files(cls, registry: Registry, data, args): local = registry.get_service(args.local_service, 'local') regular, waltz, body = extract_front_matter(data) for question in waltz['questions']: if isinstance(question, str): destination_path = local.find_existing( registry, args.title, folder_file=question, check_front_matter=True, top_directories=args.banks) yield destination_path, local.read(destination_path) elif 'group' in question: for inner_question in question['questions']: if isinstance(inner_question, str): destination_path = local.find_existing( registry, args.title, folder_file=inner_question) yield destination_path, local.read(destination_path)
def download(cls, registry: Registry, args): if args.all: cls.download_all(registry, args) return canvas = registry.get_service(args.service, "canvas") resource_json = cls.find(canvas, args.title) if resource_json is not None: try: registry.find_resource(canvas.name, cls.name, args.title, "") print("Downloaded new version of {}: ".format(cls.name), args.title) except WaltzException: print("Downloaded new {}:".format(cls.name), args.title) resource_json = json.dumps(resource_json) registry.store_resource(canvas.name, cls.name, args.title, "", resource_json) return resource_json cls.find_similar(registry, canvas, args)
def decode_json(cls, registry: Registry, data: str, args): raw_data = json.loads(data) result = CommentedMap() result['title'] = raw_data['url'] result['display title'] = raw_data['name'] result['resource'] = cls.name result['type'] = raw_data['type'] if raw_data['reviewed']: result['human reviewed'] = raw_data['reviewed'] result['visibility'] = CommentedMap() result['visibility']['hide status'] = raw_data['hidden'] result['visibility']['publicly indexed'] = raw_data['public'] if raw_data['ip_ranges']: result['visibility']['ip ranges'] = raw_data['ip_ranges'] result['additional settings'] = json.loads(raw_data['settings'] or "{}") if raw_data['forked_id']: result['forked'] = CommentedMap() # TODO: Look up forked's url for more info; or perhaps automatically have it downloaded along? result['forked']['id'] = raw_data['forked_id'] result['forked']['version'] = raw_data['forked_version'] result['identity'] = CommentedMap() result['identity']['owner id'] = raw_data['owner_id'] result['identity']['owner email'] = raw_data['owner_id__email'] result['identity']['course id'] = raw_data['course_id'] result['identity']['version downloaded'] = raw_data['version'] result['identity']['created'] = to_friendly_date_from_datetime( blockpy_string_to_datetime(raw_data['date_created'])) result['identity']['modified'] = to_friendly_date_from_datetime( blockpy_string_to_datetime(raw_data['date_modified'])) # TODO: Tags # TODO: Sample Submissions. Have a "samples/" folder? # TODO: If args.combine, then put it all into one file files_path = raw_data['url'] result['files'] = CommentedMap() result['files']['path'] = files_path result['files']['hidden but accessible files'] = [] result['files']['instructor only files'] = [] result['files']['extra starting files'] = [] result['files']['read-only files'] = [] # Check if index file exists; if so, that's our directory target local = registry.get_service(args.local_service, 'local') try: index_path = local.find_existing(registry, files_path, folder_file=cls.folder_file) files_path = os.path.dirname(index_path) except FileNotFoundError: pass if hasattr(args, 'destination') and args.destination: files_path = os.path.join(args.destination, files_path) # Then build up the extra instructor files extra_files = [(os.path.join(files_path, "on_run.py"), raw_data['on_run']), (os.path.join(files_path, "starting_code.py"), raw_data['starting_code'])] if raw_data['on_change']: extra_files.append( (os.path.join(files_path, "on_change.py"), raw_data['on_change'])) if raw_data['on_eval']: extra_files.append( (os.path.join(files_path, "on_eval.py"), raw_data['on_eval'])) if raw_data['extra_instructor_files']: # TODO: Create special manifest file for listing special file types (e.g., "&" and "?") extra_instructor_files = json.loads( raw_data['extra_instructor_files']) for eif_filename, eif_contents in extra_instructor_files.items(): if eif_filename[0] in "?!^&*": new_path = os.path.join(files_path, eif_filename[1:]) extra_files.append((new_path, eif_contents)) special_file_type = cls.SPECIAL_INSTRUCTOR_FILES[ eif_filename[0]] result['files'][special_file_type].append(new_path) # Put instructions up front and return the result return add_to_front_matter(raw_data['instructions'], result), extra_files
def upload_edit(cls, registry: Registry, old_quiz, new_quiz, args): canvas = registry.get_service(args.service, "canvas") quiz_id = old_quiz['id'] # Edit the quiz on canvas quiz_data = cls._make_canvas_upload(registry, new_quiz, args) canvas.api.put('quizzes/{quiz_id}'.format(quiz_id=quiz_id), data=quiz_data) print("Updated quiz", old_quiz['title'], "on canvas") # Make a map of the old groups' names/ids to the groups old_group_map = {} for group in old_quiz['groups'].values(): old_group_map[group['name']] = group old_group_map[group['id']] = group # Update groups with the same name and create new ones used_groups = {} for group in new_quiz['groups'].values(): group_data = QuizGroup._make_canvas_upload(registry, group, args) if group['name'] in old_group_map: canvas_group = old_group_map[group['name']] canvas_group = canvas.api.put( 'quizzes/{quiz_id}/groups/{group_id}'.format( quiz_id=quiz_id, group_id=canvas_group['id']), data=group_data) else: canvas_group = canvas.api.post( 'quizzes/{quiz_id}/groups'.format(quiz_id=quiz_id), data=group_data) canvas_group = canvas_group['quiz_groups'][ 0] # acbart: Weird response type used_groups[canvas_group['name']] = canvas_group used_groups[canvas_group['id']] = canvas_group if new_quiz['groups']: print("Updated quiz", old_quiz['title'], "groups on canvas") # Delete any groups that no longer have a reference for old_group in old_quiz['groups'].values(): if old_group['id'] not in used_groups: canvas.api.delete('quizzes/{quiz_id}/groups/{group_id}'.format( quiz_id=quiz_id, group_id=old_group['id'])) print("Deleted question group", old_group['name'], " (ID: {})".format(old_group['id'])) # Push all the questions name_map = {q['question_name']: q for q in old_quiz['questions']} used_questions = {} for new_question in new_quiz['questions']: if new_question.get('quiz_group_id') is not None: new_question['quiz_group_id'] = used_groups[ new_question['quiz_group_id']]['id'] question_data = QuizQuestion._make_canvas_upload( registry, new_question, args) if new_question['question_name'] in name_map: canvas_question = name_map[new_question['question_name']] canvas_question = canvas.api.put( 'quizzes/{quiz_id}/questions/{question_id}'.format( quiz_id=quiz_id, question_id=canvas_question['id']), data=question_data) else: canvas_question = canvas.api.post( 'quizzes/{quiz_id}/questions'.format(quiz_id=quiz_id), data=question_data) used_questions[canvas_question['id']] = canvas_question print("Updated quiz", old_quiz['title'], "questions on canvas") # Delete any old questions for question in old_quiz['questions']: if question['id'] not in used_questions: canvas.api.delete( 'quizzes/{quiz_id}/questions/{question_id}'.format( quiz_id=quiz_id, question_id=question['id'])) print("Deleted question", question.get('name', "NO NAME"), " (ID: {})".format(question['id']))
def encode_json(cls, registry: Registry, data: str, args): regular, waltz, body = extract_front_matter(data) # Grab out convenient groups visibility = waltz.get('visibility', {}) forked = waltz.get('forked', {}) identity = waltz.get('identity', {}) files = waltz.get('files', {}) # Grab any extra files extra_files = {} local = registry.get_service(args.local_service, 'local') for py_filename in ['on_run', 'starting_code', 'on_change', 'on_eval']: try: source_path = local.find_existing(registry, args.title, folder_file=py_filename, extension='.py') except FileNotFoundError: extra_files[py_filename] = "" continue extra_files[py_filename] = local.read(source_path) collected = {} for special, prepend in cls.SPECIAL_INSTRUCTOR_FILES_R.items(): for file in files.get(special, []): source_path = local.find_existing(registry, args.title, folder_file=file, extension="") collected[prepend + file] = local.read(source_path) if collected: extra_files['extra_instructor_files'] = json.dumps(collected) else: extra_files['extra_instructor_files'] = "" # And generate the rest of the JSON return json.dumps({ "_schema_version": 2, 'url': waltz['title'], 'name': waltz['display title'], 'type': waltz['type'], 'reviewed': waltz.get('human reviewed', False), 'hidden': visibility.get('hide status'), 'public': visibility.get('publicly indexed'), 'ip_ranges': visibility.get('ip ranges', ""), 'settings': json.dumps(waltz['additional settings']) if waltz['additional settings'] else None, 'forked_id': forked.get('id', None), 'forked_version': forked.get('version', None), 'owner_id': identity['owner id'], 'owner_id__email': identity['owner email'], 'course_id': identity['course id'], 'version': identity['version downloaded'], 'date_created': from_friendly_date(identity.get('created')), 'date_modified': from_friendly_date(identity.get('modified')), 'instructions': body, 'extra_starting_files': "", # TODO: Store sample submissions in BlockPy 'sample_submissions': [], # TODO: Store tags in BlockPy 'tags': [], **extra_files # TODO: Other fields })
def diff(cls, registry: Registry, args): # Get local version local = registry.get_service(args.local_service, 'local') source_path = None try: source_path = local.find_existing(registry, args.title) except FileNotFoundError: print("No local version of {}".format(args.title)) # Get remote version canvas = registry.get_service(args.service, "canvas") resource_json = cls.find(canvas, args.title) if resource_json is None: print("No canvas version of {}".format(args.title)) # Do the diff if we can if not source_path or not resource_json: return False local_markdown = local.read(source_path) extra_local_files = cls.diff_extra_files(registry, local_markdown, args) remote_markdown, extra_remote_files = cls.decode_json( registry, json.dumps(resource_json), args) extra_local_files, extra_remote_files = dict(extra_local_files), dict( extra_remote_files) if args.console: # Handle main file for difference in difflib.ndiff(local_markdown.splitlines(True), remote_markdown.splitlines(True)): print(difference, end="") # Handle extra files for local_path, local_data in extra_local_files.items(): if local_path in extra_remote_files: print(local_path) remote_data = extra_remote_files[local_path] for difference in difflib.ndiff( local_data.splitlines(True), remote_data.splitlines(True)): print(difference, end="") else: print("No canvas version of", local_path) for remote_path, remote_data in extra_remote_files.items(): if remote_path not in extra_local_files: print("No local version of", remote_path) else: html_differ = difflib.HtmlDiff(wrapcolumn=60) combined_diffs = [ html_differ.make_table( local_markdown.splitlines(), remote_markdown.splitlines(), fromdesc="Local: {}".format(source_path), todesc="Canvas: {}".format(args.title)) ] # Handle extra files for local_path, local_data in extra_local_files.items(): if local_path in extra_remote_files: remote_data = extra_remote_files[local_path].splitlines() else: remote_data = [] combined_diffs.append("<strong>{}</strong>".format(local_path)) combined_diffs.append( html_differ.make_table(local_data.splitlines(), remote_data, fromdesc="Local", todesc="Canvas")) for remote_path, remote_data in extra_remote_files.items(): if remote_path not in extra_local_files: combined_diffs.append( "<strong>{}</strong>".format(remote_path)) combined_diffs.append( html_differ.make_table([], remote_data.splitlines(), fromdesc="Local", todesc="Canvas")) local_diff_path = local.make_diff_filename(args.title) local_diff_path = os.path.join(os.path.dirname(source_path), local_diff_path) local.write( local_diff_path, cls.DIFF_TEMPLATE.format(diffs="\n\n".join(combined_diffs))) if not args.prevent_open: start_file(local_diff_path)
def diff(cls, registry: Registry, args): # Get local version local = registry.get_service(args.local_service, 'local') source_path = None try: source_path = local.find_existing(registry, args.title, folder_file=cls.folder_file, args=args) except FileNotFoundError: print("No local version of {}".format(args.title)) # Get remote version service = registry.get_service(args.service, cls.default_service) service_name = service.type.title() resource_json = cls.find(service, args.title) if resource_json is None: print(f"No {service_name} version of {args.title}") # Do the diff if we can if not source_path or not resource_json: return False local_base = get_parent_directory(source_path) local_markdown = local.read(source_path) extra_local_files = cls.diff_extra_files(registry, local_markdown, args) remote_markdown, extra_remote_files = cls.decode_json(registry, json.dumps(resource_json), args) #extra_local_files, extra_remote_files = dict(extra_local_files), dict(extra_remote_files) extra_local_files={os.path.normpath(local_path): local_data for local_path, local_data in dict(extra_local_files).items()} extra_remote_files = {os.path.normpath(remote_path): remote_data for remote_path, remote_data in dict(extra_remote_files).items()} if args.console: # Handle main file for difference in difflib.ndiff(local_markdown.splitlines(True), remote_markdown.splitlines(True)): print(difference, end="") # Handle extra files for local_path, local_data in extra_local_files.items(): if local_path in extra_remote_files: print(local_path) remote_data = extra_remote_files[local_path] for difference in difflib.ndiff(local_data.splitlines(True), remote_data.splitlines(True)): print(difference, end="") else: print(f"No {service_name} version of {local_path}") for remote_path, remote_data in extra_remote_files.items(): if remote_path not in extra_local_files: print("No local version of", remote_path) else: html_differ = difflib.HtmlDiff(wrapcolumn=60) combined_diffs = [html_differ.make_table(local_markdown.splitlines(), remote_markdown.splitlines(), fromdesc="Local: {}".format(source_path), todesc=f"{service_name}: {args.title}")] # Handle extra files for local_path, local_data in extra_local_files.items(): if local_path in extra_remote_files: # File exists in remote and local remote_data = extra_remote_files[local_path].splitlines() else: # Local files that are not in the remote remote_data = [] combined_diffs.append("<strong>{}</strong>".format(local_path)) combined_diffs.append(html_differ.make_table(local_data.splitlines(), remote_data, fromdesc="Local", todesc=service_name)) for remote_path, remote_data in extra_remote_files.items(): # Remote files that are not in the local if remote_path not in extra_local_files: combined_diffs.append("<strong>{}</strong>".format(remote_path)) combined_diffs.append(html_differ.make_table([], remote_data.splitlines(), fromdesc="Local", todesc=service_name)) local_diff_path = local.make_diff_filename(args.title) local_diff_path = os.path.join(os.path.dirname(source_path), local_diff_path) local.write(local_diff_path, cls.DIFF_TEMPLATE.format(diffs="\n\n".join(combined_diffs))) if not args.prevent_open: start_file(local_diff_path)