示例#1
0
def put_thumbs(notify_buf, jpeg_dir, prefix, suffix, video_id, store_loc):
    # I wish Filesystem API worked the same for local and remote, but it don't
    if store_loc == 'local':
        root = local_storage_root_dir()
        store_path = root + "/" + prefix + "/" + suffix + "/videos/" + str(video_id) + "/jpegs"
        if default_storage.exists(store_path):
            infoLog(notify_buf, "Found prior directory, removing: %s" % store_path)
            dirRemove(store_path) 
        os.mkdir(store_path)
    else:
        store_path = prefix + "/" + suffix + "/videos/" + str(video_id) + "/jpegs"
        default_storage.delete(store_path)

    # not doing write to tmp and then mv because the file storage API limitation
    image_list = os.listdir(jpeg_dir)
    image_list.sort()
    for fname in image_list:
        infoLog(notify_buf, "Uploading: %s" % fname)
        local_file = open(jpeg_dir + "/" + fname, 'rb')
        store_file = default_storage.open(store_path + "/" + fname, 'wb')
        file_data = local_file.read()
        store_file.write(file_data)
        local_file.close()
        store_file.close()
    infoLog(notify_buf, "Uploaded: %s files" % str(len(image_list)))
示例#2
0
def upload(notify_buf, target_dir, target_part, prefix, suffix, video_id,
           video_file, store_loc):
    # I wish Filesystem API worked the same for local and remote, but it don't
    if store_loc == 'local':
        root = local_storage_root_dir()
        store_path = root + "/" + prefix + "/" + suffix + "/videos/" + str(
            video_id) + "/" + target_part
        if default_storage.exists(store_path):
            infoLog(notify_buf,
                    "Found prior directory, removing: %s" % store_path)
            dirRemove(store_path)
        os.mkdir(store_path)
    else:
        store_path = prefix + "/" + suffix + "/videos/" + str(
            video_id) + "/" + target_part
        default_storage.delete(store_path)

    statinfo = os.stat(target_dir + "/" + video_file)
    infoLog(notify_buf, "Final file size: %s" % str(statinfo.st_size))

    local_file = open(target_dir + "/" + video_file, 'rb')
    store_file = default_storage.open(store_path + "/" + video_file, 'wb')
    store_file.write(local_file.read())
    local_file.close()
    store_file.close()
示例#3
0
def put_thumbs(notify_buf, jpeg_dir, prefix, suffix, video_id, store_loc):
    # I wish Filesystem API worked the same for local and remote, but it don't
    if store_loc == 'local':
        root = local_storage_root_dir()
        store_path = root + "/" + prefix + "/" + suffix + "/videos/" + str(
            video_id) + "/jpegs"
        if default_storage.exists(store_path):
            infoLog(notify_buf,
                    "Found prior directory, removing: %s" % store_path)
            dirRemove(store_path)
        os.mkdir(store_path)
    else:
        store_path = prefix + "/" + suffix + "/videos/" + str(
            video_id) + "/jpegs"
        default_storage.delete(store_path)

    # not doing write to tmp and then mv because the file storage API limitation
    image_list = os.listdir(jpeg_dir)
    image_list.sort()
    for fname in image_list:
        infoLog(notify_buf, "Uploading: %s" % fname)
        local_file = open(jpeg_dir + "/" + fname, 'rb')
        store_file = default_storage.open(store_path + "/" + fname, 'wb')
        file_data = local_file.read()
        store_file.write(file_data)
        local_file.close()
        store_file.close()
    infoLog(notify_buf, "Uploaded: %s files" % str(len(image_list)))
示例#4
0
def view_submissions_to_grade(request, course_prefix, course_suffix, exam_slug):
    course = request.common_page_data['course']
    
    try:
        exam = Exam.objects.get(course=course, is_deleted=0, slug=exam_slug)
    except Exam.DoesNotExist:
        raise Http404

    if exam.mode=="draft":
        exam = exam.image

    submitters = ExamRecord.objects.filter(exam=exam, complete=True, time_created__lt=exam.grace_period).values('student').distinct()
    fname = course_prefix+"-"+course_suffix+"-"+exam_slug+"-"+datetime.datetime.now().strftime("%Y-%m-%d-%H:%M:%S")+".csv"
    temp_file_path = FILE_DIR+"/"+fname
    outfile = open(temp_file_path,"w+")

    could_not_parse = ""

    for s in submitters: #yes, there is sql in a loop here.  We'll optimize later
        latest_sub = ExamRecord.objects.values('student__username', 'time_created', 'json_data').filter(exam=exam, time_created__lt=exam.grace_period, student=s['student']).latest('time_created')
        try:
            sub_obj = json.loads(latest_sub['json_data']).iteritems()
            for k,v in sub_obj:
                outstring = '"%s","%s","%s"\n' % (latest_sub['student__username'], k, parse_val(v))
                outfile.write(outstring)
        except ValueError:
            could_not_parse += latest_sub['student__username']+ " " #Don't output if the latest submission was erroneous

    outfile.write("\n")

    #if there were items we could not parse
    if could_not_parse:
        #write the usernames at the beginning of the file
        outfile.seek(0)
        data=outfile.read()
        outfile.seek(0)
        outfile.truncate()
        outfile.write("Could not parse data from the following users: " + could_not_parse + "\n")
        outfile.write(data)

    if is_storage_local():
        outfile.close()
        reports_dir = local_storage_root_dir() + "/" + course_prefix + "/" + course_suffix + "/reports/"
        if not default_storage.exists(reports_dir):
            os.mkdir(reports_dir)
        copyfile(temp_file_path, reports_dir + fname)
        file_url = local_file_server_root() + "/" + course_prefix + "/" + course_suffix + "/reports/" + fname
        return HttpResponseRedirect(file_url)
    else:
        #write to S3
        secure_file_storage = S3BotoStorage(bucket=AWS_SECURE_STORAGE_BUCKET_NAME, access_key=AWS_ACCESS_KEY_ID, secret_key=AWS_SECRET_ACCESS_KEY)
        s3file = secure_file_storage.open("/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname),'w')
        outfile.seek(0)
        s3file.write(outfile.read())
        s3file.close()
        outfile.close()
        return HttpResponseRedirect(secure_file_storage.url_monkeypatched("/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname), response_headers={'response-content-disposition': 'attachment'}))
示例#5
0
def view_csv_grades(request, course_prefix, course_suffix, exam_slug):
    course = request.common_page_data['course']
    
    try:
        exam = Exam.objects.get(course=course, is_deleted=0, slug=exam_slug)
    except Exam.DoesNotExist:
        raise Http404

    if course.mode=="draft":
        course = course.image

    if exam.mode=="draft":
        exam = exam.image
    
    graded_students = ExamScore.objects.filter(course=course, exam=exam).values('student','student__username').distinct()
    fname = course_prefix+"-"+course_suffix+"-"+exam_slug+"-grades-"+datetime.datetime.now().strftime("%Y-%m-%d-%H:%M:%S")+".csv"
    temp_file_path = FILE_DIR+"/"+fname
    outfile = open(temp_file_path,"w+")

    could_not_parse = ""

    for s in graded_students: #yes, there is sql in a loop here.  We'll optimize later
        score_obj = ExamScore.objects.get(course=course, exam=exam, student=s['student'])
        outstring = '"%s","%s"\n' % (s['student__username'], score_obj.score)
        outfile.write(outstring)
        subscores = ExamScoreField.objects.filter(parent=score_obj)
        for field in subscores:
            outstring = '"%s","%s","%s"\n' % (s['student__username'], field.field_name, str(field.subscore))
            outfile.write(outstring)

    outfile.write("\n")
    
    if is_storage_local():
        outfile.close()
        reports_dir = local_storage_root_dir() + "/" + course_prefix + "/" + course_suffix + "/reports/"
        if not default_storage.exists(reports_dir):
            os.mkdir(reports_dir)
        copyfile(temp_file_path, reports_dir + fname)
        file_url = local_file_server_root() + "/" + course_prefix + "/" + course_suffix + "/reports/" + fname
        return HttpResponseRedirect(file_url)
    else:
        #write to S3
        secure_file_storage = S3BotoStorage(bucket=AWS_SECURE_STORAGE_BUCKET_NAME, access_key=AWS_ACCESS_KEY_ID, secret_key=AWS_SECRET_ACCESS_KEY)
        s3file = secure_file_storage.open("/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname),'w')
        outfile.seek(0)
        s3file.write(outfile.read())
        s3file.close()
        outfile.close()
        return HttpResponseRedirect(secure_file_storage.url("/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname), response_headers={'response-content-disposition': 'attachment'}))
示例#6
0
def duration(video):
    notify_buf = []
    (store_path, course_prefix, course_suffix, video_id, video_filename) = splitpath(video.file.name)
    print store_path
    print video_filename
    cmdline = [ 'ffprobe' ]
    cmdline += \
        [ '-loglevel', 'error', 
          '-show_streams', local_storage_root_dir() + "/" + store_path,
        ]
    infoLog(notify_buf, "GET DURATION: " + " ".join(cmdline))
    result = subprocess.Popen(cmdline, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
    duration_str = [x for x in result.stdout.readlines() if "duration=" in x][0]
    infoLog(notify_buf, duration_str)
    video.duration = int(float(duration_str.split("=")[1]))
    video.save()
示例#7
0
    def handle(self, *args, **options):
        if len(args) != 3:
            raise CommandError("Wrong number of arguments, %d instead of 3" %
                               len(args))
        if options['force_local'] and options['force_remote']:
            raise CommandError("Can't run both local and remote")
        arg_prefix = args[0]
        arg_suffix = args[1]
        handle = arg_prefix + "--" + arg_suffix
        slug = args[2]

        try:
            video = Video.objects.get(course__handle=handle,
                                      slug=slug,
                                      mode='draft')
        except MultipleObjectsReturned:
            print "Found multiple videos named \"%s\"" % slug
            return
        except Video.DoesNotExist:
            print "Video \"%s\" not found for handle \"%s\"" % (slug, handle)
            return

        if video.file.name == "default":
            print "Video slug \"%s\" doesn't have a file listed in S3 (name=\"default\")" % slug
            return

        # FIXME: after confirming this works, clean these lines up.
        #where = getattr(settings, 'AWS_ACCESS_KEY_ID', 'local')
        #if options['force_local']:
        #    where='local'
        #if options['force_remote']:
        #    where='remote'
        #if where == 'local':
        if (is_storage_local()
                or options['force_local']) and not options['force_remote']:
            media_root = local_storage_root_dir()
            local_path = media_root + "/" + video.file.name
            kelvinator.tasks.kelvinate(local_path, options['target_frames'],
                                       options['notify_addr'])
            print "Kelvination complete: %s" % video.file.name
        else:
            kelvinator.tasks.kelvinate.delay(video.file.name,
                                             options['target_frames'],
                                             options['notify_addr'])
            print "Kelvination queued (%s): %s" % (instance, video.file.name)
示例#8
0
def duration(video):
    notify_buf = []
    (store_path, course_prefix, course_suffix, video_id,
     video_filename) = splitpath(video.file.name)
    print store_path
    print video_filename
    cmdline = ['ffprobe']
    cmdline += \
        [ '-loglevel', 'error',
          '-show_streams', local_storage_root_dir() + "/" + store_path,
        ]
    infoLog(notify_buf, "GET DURATION: " + " ".join(cmdline))
    result = subprocess.Popen(cmdline,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT)
    duration_str = [x for x in result.stdout.readlines()
                    if "duration=" in x][0]
    infoLog(notify_buf, duration_str)
    video.duration = int(float(duration_str.split("=")[1]))
    video.save()
示例#9
0
def upload(notify_buf, target_dir, target_part, prefix, suffix, video_id, video_file, store_loc):
    # I wish Filesystem API worked the same for local and remote, but it don't
    if store_loc == 'local':
        root = local_storage_root_dir()
        store_path = root + "/" + prefix + "/" + suffix + "/videos/" + str(video_id) + "/" + target_part
        if default_storage.exists(store_path):
            infoLog(notify_buf, "Found prior directory, removing: %s" % store_path)
            dirRemove(store_path) 
        os.mkdir(store_path)
    else:
        store_path = prefix + "/" + suffix + "/videos/" + str(video_id) + "/" + target_part
        default_storage.delete(store_path)

    statinfo = os.stat(target_dir + "/" + video_file)
    infoLog(notify_buf, "Final file size: %s" % str(statinfo.st_size))

    local_file = open(target_dir + "/" + video_file, 'rb')
    store_file = default_storage.open(store_path + "/" + video_file, 'wb')
    store_file.write(local_file.read())
    local_file.close()
    store_file.close()
示例#10
0
    def handle(self, *args, **options):
        if len(args) != 3:
            raise CommandError("Wrong number of arguments, %d instead of 3" % len(args))
        if options['force_local'] and options['force_remote']:
            raise CommandError("Can't run both local and remote")
        arg_prefix=args[0]
        arg_suffix=args[1]
        handle=arg_prefix+"--"+arg_suffix
        slug=args[2]

        try:
            video = Video.objects.get(course__handle=handle, slug=slug, mode='draft')
        except MultipleObjectsReturned:
            print "Found multiple videos named \"%s\"" % slug
            return
        except Video.DoesNotExist:
            print "Video \"%s\" not found for handle \"%s\"" % (slug, handle)
            return

        if video.file.name == "default":
            print "Video slug \"%s\" doesn't have a file listed in S3 (name=\"default\")" % slug
            return
            
        # FIXME: after confirming this works, clean these lines up.
        #where = getattr(settings, 'AWS_ACCESS_KEY_ID', 'local')
        #if options['force_local']: 
        #    where='local'
        #if options['force_remote']:
        #    where='remote'
        #if where == 'local':
        if (is_storage_local() or options['force_local']) and not options['force_remote']:
            media_root = local_storage_root_dir()
            local_path = media_root + "/" + video.file.name
            kelvinator.tasks.kelvinate(local_path, options['target_frames'], options['notify_addr'])
            print "Kelvination complete: %s" % video.file.name
        else:
            kelvinator.tasks.kelvinate.delay(video.file.name, options['target_frames'], options['notify_addr'])
            print "Kelvination queued (%s): %s" % (instance, video.file.name)
示例#11
0
def view_csv_grades(request, course_prefix, course_suffix, exam_slug):
    course = request.common_page_data['course']

    try:
        exam = Exam.objects.get(course=course, is_deleted=0, slug=exam_slug)
    except Exam.DoesNotExist:
        raise Http404

    if course.mode == "draft":
        course = course.image

    if exam.mode == "draft":
        exam = exam.image

    graded_students = ExamScore.objects.filter(
        course=course, exam=exam).values('student',
                                         'student__username').distinct()
    fname = course_prefix + "-" + course_suffix + "-" + exam_slug + "-grades-" + datetime.datetime.now(
    ).strftime("%Y-%m-%d-%H:%M:%S") + ".csv"
    temp_file_path = FILE_DIR + "/" + fname
    outfile = open(temp_file_path, "w+")

    could_not_parse = ""

    for s in graded_students:  #yes, there is sql in a loop here.  We'll optimize later
        score_obj = ExamScore.objects.get(course=course,
                                          exam=exam,
                                          student=s['student'])
        outstring = '"%s","%s"\n' % (s['student__username'], score_obj.score)
        outfile.write(outstring)
        subscores = ExamScoreField.objects.filter(parent=score_obj)
        for field in subscores:
            outstring = '"%s","%s","%s"\n' % (
                s['student__username'], field.field_name, str(field.subscore))
            outfile.write(outstring)

    outfile.write("\n")

    if is_storage_local():
        outfile.close()
        reports_dir = local_storage_root_dir(
        ) + "/" + course_prefix + "/" + course_suffix + "/reports/"
        if not default_storage.exists(reports_dir):
            os.mkdir(reports_dir)
        copyfile(temp_file_path, reports_dir + fname)
        file_url = local_file_server_root(
        ) + "/" + course_prefix + "/" + course_suffix + "/reports/" + fname
        return HttpResponseRedirect(file_url)
    else:
        #write to S3
        secure_file_storage = S3BotoStorage(
            bucket=AWS_SECURE_STORAGE_BUCKET_NAME,
            access_key=AWS_ACCESS_KEY_ID,
            secret_key=AWS_SECRET_ACCESS_KEY)
        s3file = secure_file_storage.open(
            "/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname),
            'w')
        outfile.seek(0)
        s3file.write(outfile.read())
        s3file.close()
        outfile.close()
        return HttpResponseRedirect(
            secure_file_storage.url("/%s/%s/reports/exams/%s" %
                                    (course_prefix, course_suffix, fname),
                                    response_headers={
                                        'response-content-disposition':
                                        'attachment'
                                    }))
示例#12
0
def view_submissions_to_grade(request, course_prefix, course_suffix,
                              exam_slug):
    course = request.common_page_data['course']

    try:
        exam = Exam.objects.get(course=course, is_deleted=0, slug=exam_slug)
    except Exam.DoesNotExist:
        raise Http404

    if exam.mode == "draft":
        exam = exam.image

    submitters = ExamRecord.objects.filter(
        exam=exam, complete=True,
        time_created__lt=exam.grace_period).values('student').distinct()
    fname = course_prefix + "-" + course_suffix + "-" + exam_slug + "-" + datetime.datetime.now(
    ).strftime("%Y-%m-%d-%H:%M:%S") + ".csv"
    temp_file_path = FILE_DIR + "/" + fname
    outfile = open(temp_file_path, "w+")

    could_not_parse = ""

    for s in submitters:  #yes, there is sql in a loop here.  We'll optimize later
        latest_sub = ExamRecord.objects.values(
            'student__username', 'time_created',
            'json_data').filter(exam=exam,
                                time_created__lt=exam.grace_period,
                                student=s['student']).latest('time_created')
        try:
            sub_obj = json.loads(latest_sub['json_data']).iteritems()
            for k, v in sub_obj:
                outstring = '"%s","%s","%s"\n' % (
                    latest_sub['student__username'], k, parse_val(v))
                outfile.write(outstring)
        except ValueError:
            could_not_parse += latest_sub[
                'student__username'] + " "  #Don't output if the latest submission was erroneous

    outfile.write("\n")

    #if there were items we could not parse
    if could_not_parse:
        #write the usernames at the beginning of the file
        outfile.seek(0)
        data = outfile.read()
        outfile.seek(0)
        outfile.truncate()
        outfile.write("Could not parse data from the following users: " +
                      could_not_parse + "\n")
        outfile.write(data)

    if is_storage_local():
        outfile.close()
        reports_dir = local_storage_root_dir(
        ) + "/" + course_prefix + "/" + course_suffix + "/reports/"
        if not default_storage.exists(reports_dir):
            os.mkdir(reports_dir)
        copyfile(temp_file_path, reports_dir + fname)
        file_url = local_file_server_root(
        ) + "/" + course_prefix + "/" + course_suffix + "/reports/" + fname
        return HttpResponseRedirect(file_url)
    else:
        #write to S3
        secure_file_storage = S3BotoStorage(
            bucket=AWS_SECURE_STORAGE_BUCKET_NAME,
            access_key=AWS_ACCESS_KEY_ID,
            secret_key=AWS_SECRET_ACCESS_KEY)
        s3file = secure_file_storage.open(
            "/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname),
            'w')
        outfile.seek(0)
        s3file.write(outfile.read())
        s3file.close()
        outfile.close()
        return HttpResponseRedirect(
            secure_file_storage.url_monkeypatched(
                "/%s/%s/reports/exams/%s" %
                (course_prefix, course_suffix, fname),
                response_headers={
                    'response-content-disposition': 'attachment'
                }))