def import_canvas_api_data(): data = json.loads(request.data) course_id = data and data.get('course_id') if not course_id: raise BadRequestError('Required "course_id" parameter missing.') path = data and data.get('path') if not path: raise BadRequestError('Required "path" parameter missing.') mock = data and data.get('mock') s3_key = data and data.get('s3_key') if not s3_key: raise BadRequestError('Required "s3_key" parameter missing.') job_id = data and data.get('job_id') update_canvas_api_import_status( job_id=job_id, course_id=course_id, status='received', ) job_started = ImportCanvasApiData( course_id=course_id, path=path, mock=mock, s3_key=s3_key, job_id=job_id, ).run_async() return respond_with_status(job_started)
def update_job_schedule(job_id): sched = get_scheduler() job_id = job_id.upper() job = sched.get_job(job_id) if not job: raise BadRequestError(f'No job found for job id: {job_id}') if request.method == 'DELETE': app.logger.warn( f'About to delete schedule definition for job id: {job_id}') sched.remove_job(job_id) return tolerant_jsonify([job_to_dict(job) for job in sched.get_jobs()]) else: # If JSON properties are present, they will be evaluated by APScheduler's cron trigger API. # https://apscheduler.readthedocs.io/en/latest/modules/triggers/cron.html#module-apscheduler.triggers.cron try: args = request.get_json(force=True) except Exception as e: raise BadRequestError(str(e)) if args: try: job.reschedule(trigger='cron', **args) except Exception as e: raise BadRequestError(f'Error rescheduling job: {e}') # Passing a empty JSON object will pause this job. else: job.pause() job = sched.get_job(job_id) return tolerant_jsonify(job_to_dict(job))
def sync_file_to_s3(): data = json.loads(request.data) url = data and data.get('url') if not url: raise BadRequestError('Required "url" parameter missing.') key = data and data.get('key') if not key: raise BadRequestError('Required "key" parameter missing.') canvas_sync_job_id = data and data.get('canvas_sync_job_id') if canvas_sync_job_id: update_canvas_sync_status(canvas_sync_job_id, key, 'received') job_started = SyncFileToS3(url=url, key=key, canvas_sync_job_id=canvas_sync_job_id).run_async() return respond_with_status(job_started)
def reload_job_schedules(): """Discard any manual changes to job schedules and bring back the configured version.""" if not app.config['JOB_SCHEDULING_ENABLED']: raise BadRequestError('Job scheduling is not enabled') schedule_all_jobs(force=True) app.logger.info(f'Overwrote current jobs schedule with configured values') return get_job_schedule()
def transform_piazza_api_data(archive='latest'): if (archive != 'latest' ) and not (re.match('(daily|monthly|full)', archive) and re.match(r'(\w+)\_(\d{4}\-\d{2}\-\d{2})', archive)): raise BadRequestError( f"Incorrect archive parameter '{archive}', should be 'latest' or like 'daily_2020-09-12'." ) job_started = TransformPiazzaApiData(archive=archive).run_async() return respond_with_status(job_started)
def update_scheduled_job_args(job_id): try: args = request.get_json(force=True) except Exception as e: raise BadRequestError(str(e)) if not args: raise BadRequestError(f'Could not parse args from request') sched = get_scheduler() job_id = job_id.upper() job = sched.get_job(job_id) if not job: raise BadRequestError(f'No job found for job id: {job_id}') try: existing_args = job.args if len(existing_args) > 2: new_args = dict(existing_args[2]) new_args.update(args) else: new_args = args job.modify(args=[existing_args[0], existing_args[1], new_args]) except Exception as e: raise BadRequestError(f'Error updating job args: {e}') job = sched.get_job(job_id) return tolerant_jsonify(job_to_dict(job))
def import_piazza_api_data(archive='latest'): days_in_month = calendar.monthrange(time.localtime()[0], time.localtime()[1])[1] todays_day = time.localtime()[2] # if today is the last day of the month, fetch the monthly as latest instead of daily if (archive == 'monthly') or (archive == 'latest' and todays_day == days_in_month): archive = time.strftime('monthly_%Y-%m-01', time.localtime()) if (archive != 'latest' ) and not (re.match('(daily|monthly|full)', archive) and re.match(r'(\w+)_(\d{4}\-\d{2}\-\d{2})', archive)): raise BadRequestError( f"Incorrect archive parameter '{archive}', should be 'latest' or like 'daily_2020-09-12'." ) job_started = ImportPiazzaApiData(archive=archive).run_async() return respond_with_status(job_started)
def import_piazza_api_data(archive='latest'): today = datetime.date.today() todays_day = today.day first_of_the_month = today.replace(day=1) last_month = first_of_the_month - datetime.timedelta(days=1) # if today is the first day of the month, fetch the last month's monthly as latest instead of daily if (archive == 'monthly') or (archive == 'latest' and todays_day == 1): archive = last_month.strftime('monthly_%Y-%m-01') if (archive != 'latest' ) and not (re.match('(daily|monthly|full)', archive) and re.match(r'(\w+)_(\d{4}\-\d{2}\-\d{2})', archive)): raise BadRequestError( f"Incorrect archive parameter '{archive}', should be 'latest' or like 'daily_2020-09-12'." ) job_started = ImportPiazzaApiData(archive=archive).run_async() return respond_with_status(job_started)
def import_registrations_hist_enr(load_mode): if load_mode not in ['batch', 'new']: raise BadRequestError( 'Unrecognized mode for non-advisee registrations import.') job_started = ImportRegistrationsHistEnr(load_mode=load_mode).run_async() return respond_with_status(job_started)
def _safety_check(): if 'EB_ENVIRONMENT' in app.config: raise BadRequestError( "'EDL data comparison can ONLY be run in developer's local environment'" )