def check_auth(args): """ Checks courseraprogramming's connectivity to the coursera.org API servers """ oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() my_profile_url = ('https://api.coursera.org/api/externalBasicProfiles.v1?' 'q=me&fields=name') r = requests.get(my_profile_url, auth=auth) if r.status_code != 200: logging.error('Received response code %s from the basic profile API.', r.status_code) logging.debug('Response body:\n%s', r.text) sys.exit(1) try: external_id = r.json()['elements'][0]['id'] except: logging.error( 'Could not parse the external id out of the response body %s', r.text) external_id = None try: name = r.json()['elements'][0]['name'] except: logging.error('Could not parse the name out of the response body %s', r.text) name = None if not args.quiet or args.quiet == 0: print('Name: %s' % name) print('External ID: %s' % external_id) if name is None or external_id is None: sys.exit(1)
def command_reregister(args): "Implements the reregister command." oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() # retrieve the currentGraderId url = args.register_endpoint + '/' + args.currentGraderId result = requests.get(args.register_endpoint + '/' + args.currentGraderId, auth=auth) if result.status_code != 200: logging.error('Unable to retrieve grader details with id! Code: %s', result.status_code) return 1 try: s3bucket = result.json()['elements'][0]['bucket'] s3key = result.json()['elements'][0]['key'] except: logging.error( 'Cannot parse the response from the grader details' 'endpoint: %s', result.text) return 1 grader_id = register_grader(auth, args, s3bucket, s3key) return update_assignments(auth, grader_id, args)
def test_compute_cache_filename(): args = argparse.Namespace() args.client_id = 'client_id' args.client_secret = 'fake-secret' args.scopes = 'fake scopes' cfg = ConfigParser.ConfigParser() cfg.add_section('oauth2') cfg.set('oauth2', 'token_cache', '/tmp/configured_cache') assert oauth2.build_oauth2(args, cfg).token_cache_file == \ '/tmp/configured_cache'
def test_compute_cache_filename(): args = argparse.Namespace() args.client_id = 'client_id' args.client_secret = 'fake-secret' args.scopes = 'fake scopes' cfg = configparser.ConfigParser() cfg.add_section('oauth2') cfg.set('oauth2', 'token_cache', '/tmp/configured_cache') assert oauth2.build_oauth2(args, cfg).token_cache_file == \ '/tmp/configured_cache'
def test_compute_cache_filname_expanded_path(): args = argparse.Namespace() args.client_id = 'client_id' args.client_secret = 'fake-secret' args.scopes = 'fake scopes' cfg = ConfigParser.ConfigParser() cfg.add_section('oauth2') cfg.set('oauth2', 'token_cache', '~/.coursera/oauth2_cache.pickle') computed = oauth2.build_oauth2(args, cfg).token_cache_file assert '~' not in computed, 'Computed contained "~": %s' % computed
def test_compute_cache_filname_expanded_path(): args = argparse.Namespace() args.client_id = 'client_id' args.client_secret = 'fake-secret' args.scopes = 'fake scopes' cfg = configparser.ConfigParser() cfg.add_section('oauth2') cfg.set('oauth2', 'token_cache', '~/.coursera/oauth2_cache.pickle') computed = oauth2.build_oauth2(args, cfg).token_cache_file assert '~' not in computed, 'Computed contained "~": %s' % computed
def command_publish(args): oauth2_instance = oauth2.build_oauth2(args) course_id = args.course item_ids = [args.item] + (getattr(args, 'additional_items') or []) for item_id in item_ids: logging.info("Starting publish for item {} in course {}".format( item_id, course_id)) try: logging.info("Fetching requied metadata...") metadata = get_metadata(oauth2_instance, args.get_endpoint, course_id, item_id) logging.info("Publishing...") publish_item(oauth2_instance, args.publish_endpoint, args.publish_action, course_id, item_id, metadata) logging.info("Publish complete for item {} in course {}".format( item_id, course_id)) except ItemNotFoundError as e: logging.error( "Unable to find a publishable assignment with item " "id {}. Maybe there are no changes to publish?".format( item_id, course_id)) sys.exit(ErrorCodes.FATAL_ERROR) except ValidationError as e: logging.error( "We found some validation errors in your assignment with item " "id {}. Please verify that your assignment is formatted " "correctly and try again.".format(item_id)) sys.exit(ErrorCodes.FATAL_ERROR) except GraderExecutorError as e: if e.status == GraderExecutorStatus.PENDING: logging.warn( "We are still processing your grader for your assignment " "with item id {}. Please try again soon.".format(item_id)) sys.exit(ErrorCodes.RETRYABLE_ERROR) elif e.status == GraderExecutorStatus.FAILED: logging.error( "We were unable to process your grader for your " "assignment with item id {}. Please try to upload your " "grader again. If the problem persists, please let us " "know.".format(item_id)) sys.exit(ErrorCodes.FATAL_ERROR) elif e.status == GraderExecutorStatus.MISSING: logging.error( "We were unable to find your grader for your assignment " "with item id {}. Please try to upload your grader " "again. If the problem persists, please let us " "know.".format(item_id)) sys.exit(ErrorCodes.FATAL_ERROR) except InternalError as e: logging.error( "Something unexpected happened while trying to publish your " "assignment with item id {}. Please verify your course and " "item ids are correct. If the problem persists, please let " "us know.".format(item_id)) sys.exit(ErrorCodes.FATAL_ERROR)
def check_auth(args): """ Checks courseraprogramming's connectivity to the coursera.org API servers """ oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() my_profile_url = ( 'https://api.coursera.org/api/externalBasicProfiles.v1?' 'q=me&fields=name' ) r = requests.get(my_profile_url, auth=auth) if r.status_code != 200: logging.error('Received response code %s from the basic profile API.', r.status_code) logging.debug('Response body:\n%s', r.text) sys.exit(1) try: external_id = r.json()['elements'][0]['id'] except: logging.error( 'Could not parse the external id out of the response body %s', r.text) external_id = None try: name = r.json()['elements'][0]['name'] except: logging.error( 'Could not parse the name out of the response body %s', r.text) name = None if not args.quiet > 0: print 'Name: %s' % name print 'External ID: %s' % external_id if name is None or external_id is None: sys.exit(1)
def command_reregister(args): "Implements the reregister command." oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() # retrieve the currentGraderId url = args.register_endpoint + "/" + args.currentGraderId result = requests.get(args.register_endpoint + "/" + args.currentGraderId, auth=auth) if result.status_code != 200: logging.error("Unable to retrieve grader details with id! Code: %s", result.status_code) return 1 try: s3bucket = result.json()["elements"][0]["bucket"] s3key = result.json()["elements"][0]["key"] except: logging.error("Cannot parse the response from the grader details" "endpoint: %s", result.text) return 1 grader_id = register_grader(auth, args, s3bucket, s3key) return update_assignments(auth, grader_id, args)
def display_auth_cache(args): ''' Writes to the screen the state of the authentication cache. (For debugging authentication issues.) BEWARE: DO NOT email the output of this command!!! You must keep the tokens secure. Treat them as passwords. ''' oauth2_instance = oauth2.build_oauth2(args) if not args.quiet or args.quiet == 0: token = oauth2_instance.token_cache['token'] if not args.no_truncate and token is not None: token = token[:10] + '...' print("Auth token: %s" % token) expires_time = oauth2_instance.token_cache['expires'] expires_in = int((expires_time - time.time()) * 10) / 10.0 print("Auth token expires in: %s seconds." % expires_in) if 'refresh' in oauth2_instance.token_cache: refresh = oauth2_instance.token_cache['refresh'] if not args.no_truncate and refresh is not None: refresh = refresh[:10] + '...' print("Refresh token: %s" % refresh) else: print("No refresh token found.")
def display_auth_cache(args): ''' Writes to the screen the state of the authentication cache. (For debugging authentication issues.) BEWARE: DO NOT email the output of this command!!! You must keep the tokens secure. Treat them as passwords. ''' oauth2_instance = oauth2.build_oauth2(args) if not args.quiet > 0: token = oauth2_instance.token_cache['token'] if not args.no_truncate and token is not None: token = token[:10] + '...' print "Auth token: %s" % token expires_time = oauth2_instance.token_cache['expires'] expires_in = int((expires_time - time.time()) * 10) / 10.0 print "Auth token expires in: %s seconds." % expires_in if 'refresh' in oauth2_instance.token_cache: refresh = oauth2_instance.token_cache['refresh'] if not args.no_truncate and refresh is not None: refresh = refresh[:10] + '...' print "Refresh token: %s" % refresh else: print "No refresh token found."
def command_publish(args): oauth2_instance = oauth2.build_oauth2(args) course_id = args.course item_ids = [args.item] + (getattr(args, 'additional_items') or []) error = None for item_id in item_ids: logging.info("Starting publish for item {} in course {}".format( item_id, course_id)) try: logging.info("Fetching required metadata...") metadata = get_metadata( oauth2_instance, args.get_endpoint, course_id, item_id) logging.info("Publishing...") publish_item( oauth2_instance, args.publish_endpoint, args.publish_action, course_id, item_id, metadata) logging.info("Publish complete for item {} in course {}".format( item_id, course_id)) except ItemNotFoundError as e: logging.error( "Unable to find a publishable assignment with item " "id {}. Maybe there are no changes to publish?".format( item_id, course_id)) error = ErrorCodes.FATAL_ERROR except ValidationError as e: logging.error( "We found some validation errors in your assignment with item " "id {}. Please verify that your assignment is formatted " "correctly and try again.".format( item_id)) error = ErrorCodes.FATAL_ERROR except GraderExecutorError as e: if e.status == GraderExecutorStatus.PENDING: logging.warn( "We are still processing your grader for your assignment " "with item id {}. Please try again soon.".format( item_id)) if error != ErrorCodes.FATAL_ERROR: error = ErrorCodes.RETRYABLE_ERROR elif e.status == GraderExecutorStatus.FAILED: logging.error( "We were unable to process your grader for your " "assignment with item id {}. Please try to upload your " "grader again. If the problem persists, please let us " "know.".format( item_id)) error = ErrorCodes.FATAL_ERROR elif e.status == GraderExecutorStatus.MISSING: logging.error( "We were unable to find your grader for your assignment " "with item id {}. Please try to upload your grader " "again. If the problem persists, please let us " "know.".format( item_id)) error = ErrorCodes.FATAL_ERROR except InternalError as e: logging.error( "Something unexpected happened while trying to publish your " "assignment with item id {}. Please verify your course and " "item ids are correct. If the problem persists, please let " "us know.".format( item_id)) error = ErrorCodes.FATAL_ERROR if error is not None: sys.exit(error)
def command_upload(args): "Implements the upload subcommand" d = utils.docker_client(args) image = get_container_image(args, d) oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() # TODO: use transloadit's signatures for upload signing. # authorization = authorize_upload(args, auth) # Generate a random uuid for upload. upload_id = uuid.uuid4().hex transloadit_host = idle_transloadit_server(args) upload_url = 'https://%(host)s/assemblies/%(id)s' % { 'host': transloadit_host, 'id': upload_id, } if args.upload_to_requestbin is not None: upload_url = 'http://requestb.in/%s' % args.upload_to_requestbin if not args.quiet > 0: sys.stdout.write( 'About to upload to server:\n\t%(transloadit_host)s\n' 'with upload id:\n\t%(upload_id)s\nStatus API:\n' '\t%(upload_url)s\nUploading...' % { 'transloadit_host': transloadit_host, 'upload_id': upload_id, 'upload_url': upload_url, }) sys.stdout.flush() p = multiprocessing.Process(target=upload, args=(args, upload_url, image)) p.daemon = True # Auto-kill when the main process exits. p.start() time.sleep(20) # Yield control to the child process to kick off upload. upload_information = None while p.is_alive(): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: logging.warn( 'Upload information retrieved before upload completed??! %s', upload_information) break time.sleep(10) # 10 seconds p.join(1) # Join to clean up zombie. # TODO: make time waiting for transloadit to finish processing configurable for i in xrange(300): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: break time.sleep(5) if upload_information is None: logging.error( 'Upload did not complete within expected time limits. Upload ' 'URL: %s', upload_url) return 1 # Register the grader with Coursera to initiate the image cleaning process logging.debug('Grader upload info is: %s', upload_information) # Rebuild an authorizer to ensure it's fresh and not expired auth = oauth2_instance.build_authorizer() grader_cpu = None if hasattr(args, 'grader_cpu') and args.grader_cpu is not None: grader_cpu = args.grader_cpu * 1024 register_request = { 'courseId': args.course, 'bucket': upload_information[0], 'key': upload_information[1], 'reservedCpu': grader_cpu, 'reservedMemory': getattr(args, 'grader_memory_limit', None), 'wallClockTimeout': getattr(args, 'grading_timeout', None), } logging.debug('About to POST data to register endpoint: %s', json.dumps(register_request)) register_result = requests.post(args.register_endpoint, data=json.dumps(register_request), auth=auth) if register_result.status_code != 201: # Created logging.error('Failed to register grader (%s) with Coursera: %s', upload_information[1], register_result.text) return 1 try: grader_id = register_result.json()['elements'][0]['executorId'] location = register_result.headers['location'] except: logging.exception( 'Could not parse the response from the Coursera register grader ' 'endpoint: %s', register_result.text) return 1 logging.info('The grader status API is at: %s', location) return update_assignments(auth, grader_id, args)
def command_publish(args): oauth2_instance = oauth2.build_oauth2(args) course_id = args.course item_ids = [args.item] + (getattr(args, 'additional_items') or []) error = None for item_id in item_ids: logging.info("Starting publish for item {} in course {}".format( item_id, course_id)) try: logging.info("Fetching required write access token...") authoring_pa_id = get_authoring_pa_id( oauth2_instance, course_id, item_id) write_access_token = get_write_access_token( oauth2_instance, args.get_endpoint, authoring_pa_id) logging.info("Publishing...") publish_item( oauth2_instance, args.publish_endpoint, args.publish_action, authoring_pa_id, write_access_token) logging.info("Publish complete for item {} in course {}".format( item_id, course_id)) except ItemNotFoundError as e: logging.error( "Unable to find a publishable assignment with item " "id {}. Maybe there are no changes to publish?".format( id)) error = ErrorCodes.FATAL_ERROR except ValidationError as e: logging.error( "We found some validation errors in your assignment with item " "id {}. Please verify that your assignment is formatted " "correctly and try again.".format( item_id)) error = ErrorCodes.FATAL_ERROR except GraderExecutorError as e: if e.status == GraderExecutorStatus.PENDING: logging.warn( "We are still processing your grader for your assignment " "with item id {}. Please try again soon.".format( item_id)) if error != ErrorCodes.FATAL_ERROR: error = ErrorCodes.RETRYABLE_ERROR elif e.status == GraderExecutorStatus.FAILED: logging.error( "We were unable to process your grader for your " "assignment with item id {}. Please try to upload your " "grader again. If the problem persists, please let us " "know.".format( item_id)) error = ErrorCodes.FATAL_ERROR elif e.status == GraderExecutorStatus.MISSING: logging.error( "We were unable to find your grader for your assignment " "with item id {}. Please try to upload your grader " "again. If the problem persists, please let us " "know.".format( item_id)) error = ErrorCodes.FATAL_ERROR except InternalError as e: logging.error( "Something unexpected happened while trying to publish your " "assignment with item id {}. Please verify your course and " "item ids are correct. If the problem persists, please let " "us know.".format( item_id)) error = ErrorCodes.FATAL_ERROR except ProgrammingAssignmentDraftNotReadyError as e: logging.error( "Your assignment with item id {} is not ready for publish. " "Please verify your assignment draft is ready and try " "again.".format(item_id)) error = ErrorCodes.FATAL_ERROR if error is not None: sys.exit(error)
def command_upload(args): "Implements the upload subcommand" d = utils.docker_client(args) image = get_container_image(args, d) oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() # TODO: use transloadit's signatures for upload signing. # authorization = authorize_upload(args, auth) # Generate a random uuid for upload. upload_id = uuid.uuid4().hex transloadit_host = idle_transloadit_server(args) upload_url = 'https://%(host)s/assemblies/%(id)s' % { 'host': transloadit_host, 'id': upload_id, } if args.upload_to_requestbin is not None: upload_url = 'http://requestb.in/%s' % args.upload_to_requestbin if not args.quiet > 0: sys.stdout.write( 'About to upload to server:\n\t%(transloadit_host)s\n' 'with upload id:\n\t%(upload_id)s\nStatus API:\n' '\t%(upload_url)s\nUploading...' % { 'transloadit_host': transloadit_host, 'upload_id': upload_id, 'upload_url': upload_url, }) sys.stdout.flush() p = multiprocessing.Process(target=upload, args=(args, upload_url, image)) p.daemon = True # Auto-kill when the main process exits. p.start() time.sleep(10) # Yield control to the child process to kick off upload. upload_information = None while p.is_alive(): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: logging.warn( 'Upload information retrieved before upload completed??! %s', upload_information) break time.sleep(10) # 10 seconds p.join(1) # Join to clean up zombie. # TODO: make time waiting for transloadit to finish processing configurable for i in xrange(300): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: break time.sleep(5) if upload_information is None: logging.error( 'Upload did not complete within expected time limits. Upload ' 'URL: %s', upload_url) return 1 # Register the grader with Coursera to initiate the image cleaning process logging.debug('Grader upload info is: %s', upload_information) # Rebuild an authorizer to ensure it's fresh and not expired auth = oauth2_instance.build_authorizer() register_request = { 'courseId': args.course, 'bucket': upload_information[0], 'key': upload_information[1], } logging.debug('About to POST data to register endpoint: %s', json.dumps(register_request)) register_result = requests.post( args.register_endpoint, data=json.dumps(register_request), auth=auth) if register_result.status_code != 201: # Created logging.error( 'Failed to register grader (%s) with Coursera: %s', upload_information[1], register_result.text) return 1 try: grader_id = register_result.json()['elements'][0]['executorId'] location = register_result.headers['location'] except: logging.exception( 'Could not parse the response from the Coursera register grader ' 'endpoint: %s', register_result.text) return 1 logging.info('The grader status API is at: %s', location) update_assignment_params = { 'action': args.update_part_action, 'id': '%s~%s' % (args.course, args.item), 'partId': args.part, 'executorId': grader_id, } update_result = requests.post( args.update_part_endpoint, params=update_assignment_params, auth=auth) if update_result.status_code != 200: logging.error( 'Unable to update the assignment to use the new grader. Param: %s ' 'URL: %s Response: %s', update_assignment_params, update_result.url, update_result.text) return 1 logging.info('Successfully updated assignment part %s to new executor %s', args.part, grader_id) return 0
def command_upload(args): "Implements the upload subcommand" d = utils.docker_client(args) image = get_container_image(args, d) oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() # TODO: use transloadit's signatures for upload signing. # authorization = authorize_upload(args, auth) # Generate a random uuid for upload. upload_id = uuid.uuid4().hex transloadit_host = idle_transloadit_server(args) upload_url = 'https://%(host)s/assemblies/%(id)s' % { 'host': transloadit_host, 'id': upload_id, } if args.upload_to_requestbin is not None: upload_url = 'http://requestb.in/%s' % args.upload_to_requestbin if not args.quiet > 0: sys.stdout.write( 'About to upload to server:\n\t%(transloadit_host)s\n' 'with upload id:\n\t%(upload_id)s\nStatus API:\n' '\t%(upload_url)s\nUploading...' % { 'transloadit_host': transloadit_host, 'upload_id': upload_id, 'upload_url': upload_url, }) sys.stdout.flush() p = multiprocessing.Process(target=upload, args=(args, upload_url, image)) p.daemon = True # Auto-kill when the main process exits. p.start() time.sleep(20) # Yield control to the child process to kick off upload. upload_information = None while p.is_alive(): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: logging.warn( 'Upload information retrieved before upload completed??! %s', upload_information) break time.sleep(10) # 10 seconds p.join(1) # Join to clean up zombie. # TODO: make time waiting for transloadit to finish processing configurable for i in xrange(300): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: break time.sleep(5) if upload_information is None: logging.error( 'Upload did not complete within expected time limits. Upload ' 'URL: %s', upload_url) return 1 # Register the grader with Coursera to initiate the image cleaning process logging.debug('Grader upload info is: %s', upload_information) # Rebuild an authorizer to ensure it's fresh and not expired auth = oauth2_instance.build_authorizer() grader_id = register_grader(auth, args, bucket=upload_information[0], key=upload_information[1]) return update_assignments(auth, grader_id, args)
def command_upload(args): "Implements the upload subcommand" d = utils.docker_client(args) image = get_container_image(args, d) oauth2_instance = oauth2.build_oauth2(args) auth = oauth2_instance.build_authorizer() # TODO: use transloadit's signatures for upload signing. # authorization = authorize_upload(args, auth) # Generate a random uuid for upload. upload_id = uuid.uuid4().hex transloadit_host = idle_transloadit_server(args) upload_url = "https://%(host)s/assemblies/%(id)s" % {"host": transloadit_host, "id": upload_id} if args.upload_to_requestbin is not None: upload_url = "http://requestb.in/%s" % args.upload_to_requestbin if not args.quiet > 0: sys.stdout.write( "About to upload to server:\n\t%(transloadit_host)s\n" "with upload id:\n\t%(upload_id)s\nStatus API:\n" "\t%(upload_url)s\nUploading..." % {"transloadit_host": transloadit_host, "upload_id": upload_id, "upload_url": upload_url} ) sys.stdout.flush() p = multiprocessing.Process(target=upload, args=(args, upload_url, image)) p.daemon = True # Auto-kill when the main process exits. p.start() time.sleep(20) # Yield control to the child process to kick off upload. upload_information = None while p.is_alive(): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: logging.warn("Upload information retrieved before upload completed??! %s", upload_information) break time.sleep(10) # 10 seconds p.join(1) # Join to clean up zombie. # TODO: make time waiting for transloadit to finish processing configurable for i in xrange(300): upload_information = poll_transloadit(args, upload_url) if upload_information is not None: break time.sleep(5) if upload_information is None: logging.error("Upload did not complete within expected time limits. Upload " "URL: %s", upload_url) return 1 # Register the grader with Coursera to initiate the image cleaning process logging.debug("Grader upload info is: %s", upload_information) # Rebuild an authorizer to ensure it's fresh and not expired auth = oauth2_instance.build_authorizer() grader_cpu = None if hasattr(args, "grader_cpu") and args.grader_cpu is not None: grader_cpu = args.grader_cpu * 1024 register_request = { "courseId": args.course, "bucket": upload_information[0], "key": upload_information[1], "reservedCpu": grader_cpu, "reservedMemory": getattr(args, "grader_memory_limit", None), "wallClockTimeout": getattr(args, "grading_timeout", None), } logging.debug("About to POST data to register endpoint: %s", json.dumps(register_request)) register_result = requests.post(args.register_endpoint, data=json.dumps(register_request), auth=auth) if register_result.status_code != 201: # Created logging.error("Failed to register grader (%s) with Coursera: %s", upload_information[1], register_result.text) return 1 try: grader_id = register_result.json()["elements"][0]["executorId"] location = register_result.headers["location"] except: logging.exception( "Could not parse the response from the Coursera register grader " "endpoint: %s", register_result.text ) return 1 logging.info("The grader status API is at: %s", location) return update_assignments(auth, grader_id, args)