Exemplo n.º 1
0
 def form_valid(self, form):
     problem_set = get_object_or_404(ProblemSet,
                                     id=self.kwargs['problem_set_id'])
     verify(self.request.user.can_edit_problem_set(problem_set))
     form.instance.author = self.request.user
     form.instance.problem_set = problem_set
     return super(ProblemCreate, self).form_valid(form)
Exemplo n.º 2
0
def toggle_observed(request, course_pk, student_pk):
    """Promote student to teacher in a given course"""
    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_edit_course(course))
    student = get_object_or_404(User, pk=student_pk)
    course.toggle_observed(student)
    return redirect(course)
Exemplo n.º 3
0
 def _create_table(self, dir_path, table, dialect, headers_present):
     file_path = os.path.join(dir_path, table + ".csv")
     if not os.path.isfile(file_path) :
         return
     tdf = self.tic_dat_factory
     fieldnames=tdf.primary_key_fields.get(table, ()) + tdf.data_fields.get(table, ())
     dict_rdr_args = dict({"fieldnames":fieldnames} if not headers_present else{},
                          **{"dialect": dialect})
     if table in tdf.generator_tables:
         def rtn() :
             if not headers_present:
                 self._verify_fields_by_cnt(dir_path, table, dialect)
             with open(file_path) as csvfile:
                 for r in csv.DictReader(csvfile, **dict_rdr_args) :
                     verify(set(r.keys()).issuperset(fieldnames),
                            "Failed to find the required field names for %s"%table)
                     yield tuple(_try_float(r[_]) for _ in tdf.data_fields[table])
     else:
         if not headers_present:
             self._verify_fields_by_cnt(dir_path, table, dialect)
         rtn = {} if tdf.primary_key_fields.get(table) else []
         with open(file_path) as csvfile:
             for r in csv.DictReader(csvfile, **dict_rdr_args) :
                 verify(set(r.keys()).issuperset(fieldnames),
                        "Failed to find the required field names for %s"%table)
                 if tdf.primary_key_fields.get(table) :
                     p_key = _try_float(r[tdf.primary_key_fields[table][0]]) \
                         if len(tdf.primary_key_fields[table])==1 else \
                         tuple(_try_float(r[_]) for _ in tdf.primary_key_fields[table])
                     rtn[p_key] = tuple(_try_float(r[_]) for _ in tdf.data_fields.get(table,()))
                 else:
                     rtn.append(tuple(_try_float(r[_]) for _ in tdf.data_fields[table]))
     return rtn
Exemplo n.º 4
0
 def problem_copy(user, original_problem, new_problem_set):
     """
     Copies the given problem to the given problem set.
     Verifies if:
     a) user can edit the problem.
     b) user can edit the problem set.
     Returns the copy of the given problem.
     """
     verify(user.can_view_problem(original_problem))
     verify(request.user.can_edit_problem_set(new_problem_set))
     new_problem = Problem()
     new_problem.title = original_problem.title
     new_problem.description = original_problem.description
     new_problem.problem_set = new_problem_set
     new_problem.tags = original_problem.tags
     new_problem.save()
     original_parts = original_problem.parts.all()
     for original_part in original_parts:
         new_part = Part()
         new_part.problem = new_problem
         new_part.description = original_part.description
         new_part.solution = original_part.solution
         new_part.validation = original_part.validation
         new_part.secret = original_part.secret
         new_part.save()
     return new_problem
Exemplo n.º 5
0
def demote_to_student(request, course_pk, teacher_pk):
    """Demote teacher to student in a given course"""
    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_edit_course(course))
    teacher = get_object_or_404(User, pk=teacher_pk)
    course.demote_to_student(teacher)
    return redirect(course)
Exemplo n.º 6
0
 def _create_tic_dat(self, xls_file_path, row_offsets, headers_present):
     verify(utls.dictish(row_offsets) and
            set(row_offsets).issubset(self.tic_dat_factory.all_tables) and
            all(utls.numericish(x) and (x>=0) for x in row_offsets.values()),
            "row_offsets needs to map from table names to non negative row offset")
     row_offsets = dict({t:0 for t in self.tic_dat_factory.all_tables}, **row_offsets)
     tdf = self.tic_dat_factory
     rtn = {}
     sheets, field_indicies = self._get_sheets_and_fields(xls_file_path,
                                 set(tdf.all_tables).difference(tdf.generator_tables),
                                 row_offsets, headers_present)
     ho = 1 if headers_present else 0
     for table, sheet in sheets.items() :
         fields = tdf.primary_key_fields.get(table, ()) + tdf.data_fields.get(table, ())
         indicies = field_indicies[table]
         table_len = min(len(sheet.col_values(indicies[field])) for field in fields)
         if tdf.primary_key_fields.get(table, ()) :
             tableObj = {self._sub_tuple(tdf.primary_key_fields[table], indicies)(x) :
                         self._sub_tuple(tdf.data_fields.get(table, ()), indicies)(x)
                         for x in (sheet.row_values(i) for i in
                                     range(table_len)[row_offsets[table]+ho:])}
         else :
             tableObj = [self._sub_tuple(tdf.data_fields.get(table, ()), indicies)(x)
                         for x in (sheet.row_values(i) for i in
                                     range(table_len)[row_offsets[table]+ho:])]
         rtn[table] = tableObj
     for table in tdf.generator_tables :
         rtn[table] = self._create_generator_obj(xls_file_path, table, row_offsets[table],
                                                 headers_present)
     return rtn
Exemplo n.º 7
0
def course_detail(request, course_pk):
    """Show a list of all problems in a problem set."""
    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_view_course(course))
    if request.user.can_edit_course(course):
        students = list(course.students.exclude(taught_courses=course))
        problem_sets = course.problem_sets.filter(visible=True)
        part_count = Part.objects.filter(problem__problem_set__in=problem_sets).count()
        attempts = Attempt.objects.filter(part__problem__problem_set__in=problem_sets)
        from django.db.models import Count
        valid_attempts = attempts.filter(valid=True).values('user').annotate(Count('user'))
        all_attempts = attempts.values('user').annotate(Count('user'))
        def to_dict(attempts):
            attempts_dict = {}
            for val in attempts:
                attempts_dict[val['user']] = val['user__count']
            return attempts_dict
        valid_attempts_dict = to_dict(valid_attempts)
        all_attempts_dict = to_dict(all_attempts)
        for student in students:
            student.valid = valid_attempts_dict.get(student.pk, 0)
            student.invalid = all_attempts_dict.get(student.pk, 0) - student.valid
            student.empty = part_count - student.valid - student.invalid
    else:
        students = []
    course.annotate_for_user(request.user)
    return render(request, 'courses/course_detail.html', {
        'course': course,
        'students': students,
        'show_teacher_forms': request.user.can_edit_course(course),
    })
Exemplo n.º 8
0
 def get_context_data(self, **kwargs):
     context = super(ProblemCreate, self).get_context_data(**kwargs)
     problem_set = get_object_or_404(ProblemSet,
                                     id=self.kwargs['problem_set_id'])
     verify(self.request.user.can_edit_problem_set(problem_set))
     context['problem_set'] = problem_set
     return context
Exemplo n.º 9
0
def copy_form(request, problem_pk):
    """
    Show and react to CopyForm.
    """
    problem = Problem.objects.get(pk=problem_pk)
    verify(request.user.can_view_problem(problem))
    if request.method == 'POST':
        form = CopyProblemForm(request.POST)
        if form.is_valid():
            problem_set_pk = form.cleaned_data['problem_set_id']
            problem_set = ProblemSet.objects.get(pk=problem_set_pk)
            verify(request.user.can_edit_problem_set(problem_set))
            problem.copy_to(problem_set)
            return redirect(problem_set)
        else:
            # TODO: handle errors
            response = HttpResponse("Please select a problem set.")
            return response
    else:
        form = CopyProblemForm()
        courses = request.user.taught_courses.all()
        return render(request, 'courses/problem_copy_form.html', {
            'form': form,
            'courses': courses,
            'problem': problem,
        })
Exemplo n.º 10
0
 def rtn() :
     if not headers_present:
         self._verify_fields_by_cnt(dir_path, table, dialect)
     with open(file_path) as csvfile:
         for r in csv.DictReader(csvfile, **dict_rdr_args) :
             verify(set(r.keys()).issuperset(fieldnames),
                    "Failed to find the required field names for %s"%table)
             yield tuple(_try_float(r[_]) for _ in tdf.data_fields[table])
Exemplo n.º 11
0
def demote_to_student(request, course_pk, teacher_pk):
    """Demote teacher to student in a given course"""
    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_edit_course(course))
    teacher = get_object_or_404(User, pk=teacher_pk)
    course.students.add(teacher)
    course.teachers.remove(teacher)
    return redirect(course)
Exemplo n.º 12
0
def course_submission_history_problemset(request, course_pk, problemset_pk):
    course = get_object_or_404(Course, pk=course_pk)
    problemset = get_object_or_404(ProblemSet, pk=problemset_pk)
    verify(request.user.can_view_course_statistics(course))
    return render(request, 'statistics/submission_history_problemset.html', {
        'course': course,
        'problemset': problemset
    })
Exemplo n.º 13
0
def promote_to_teacher(request, course_pk, student_pk):
    """Promote student to teacher in a given course"""
    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_edit_course(course))
    student = get_object_or_404(User, pk=student_pk)
    course.teachers.add(student)
    course.students.remove(student)
    return redirect(course)
Exemplo n.º 14
0
def problem_set_progress_groups(request, problem_set_pk, group_pk):
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    group = get_object_or_404(CourseGroup, pk=group_pk)
    verify(request.user.can_view_problem_set_attempts(problem_set))
    return render(request, "courses/problem_set_progress_groups.html", {
        'problem_set': problem_set,
        'group': group
    })
Exemplo n.º 15
0
def problem_set_move(request, problem_set_pk):
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    verify(request.user.can_edit_course(problem_set.course))
    if 'move_up' in request.POST:
        problem_set.move(-1)
    elif 'move_down' in request.POST:
        problem_set.move(1)
    return redirect(problem_set.course)
Exemplo n.º 16
0
def problem_set_move(request, problem_set_pk):
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    verify(request.user.can_edit_course(problem_set.course))
    print request.POST
    if 'move_up' in request.POST:
        problem_set.move(-1)
    elif 'move_down' in request.POST:
        problem_set.move(1)
    return redirect(problem_set.course)
Exemplo n.º 17
0
def course_progress(request, course_pk, user_pk):
    course = get_object_or_404(Course, id=course_pk)
    user = get_object_or_404(User, id=user_pk)
    verify(request.user.can_view_course_attempts(course))
    return render(request, "courses/course_progress.html", {
        'course': course,
        'observed_user': user,
        'course_attempts': course.user_attempts(user)
    })
Exemplo n.º 18
0
def course_groups_delete(request, group_pk):
    group = get_object_or_404(CourseGroup, id=group_pk)
    course_pk = group.course.pk
    course = get_object_or_404(Course, id=course_pk)
    verify(request.user.can_delete_course_groups(course))

    group.delete()

    return redirect('course_groups', course_pk=course_pk)
Exemplo n.º 19
0
def course_progress(request, course_pk, user_pk):
    course = get_object_or_404(Course, id=course_pk)
    user = get_object_or_404(User, id=user_pk)
    verify(request.user.can_view_course_attempts(course))
    return render(request, "courses/course_progress.html", {
        'course': course,
        'observed_user': user,
        'course_attempts': course.user_attempts(user)
    })
Exemplo n.º 20
0
def course_groups_confirm_delete(request, group_pk):
    """
    This view will serve a modal window to tell the user if he is sure he wants to
    delete this group.
    """

    group = get_object_or_404(CourseGroup, id=group_pk)
    course_pk = group.course.pk
    course = get_object_or_404(Course, id=course_pk)
    verify(request.user.can_delete_course_groups(course))
    return render(request, 'courses/coursegroup_confirm_delete.html',
                  {'group_pk': group_pk})
Exemplo n.º 21
0
 def _create_tic_dat_from_sql(self, sql_file_path, includes_schema):
     verify(os.path.exists(sql_file_path), "%s isn't a valid file path"%sql_file_path)
     verify(not self.tic_dat_factory.generator_tables,
            "recovery of generator tables from sql files not yet implemented")
     with sql.connect(":memory:") as con:
         if not includes_schema:
             for str in self._get_schema_sql():
                 con.execute(str)
         with open(sql_file_path, "r") as f:
             for str in f.read().split(";"):
                 con.execute(str)
         return self._create_tic_dat_from_con(con)
Exemplo n.º 22
0
 def _verify_fields_by_cnt(self, dir_path, table, dialect) :
     file_path = os.path.join(dir_path, table + ".csv")
     verify(os.path.isfile(file_path),
            "Could not find file path %s for table %s"%(file_path, table))
     tdf = self.tic_dat_factory
     fieldnames=tdf.primary_key_fields.get(table, ()) + tdf.data_fields.get(table, ())
     with open(file_path) as csvfile :
         trial_rdr = csv.reader(csvfile, dialect=dialect)
         for row in trial_rdr:
             verify(len(row) == len(fieldnames),
                    "Need %s columns for table %s"%(len(fieldnames), table))
             return
Exemplo n.º 23
0
def problem_set_tex(request, problem_set_pk):
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    verify(request.user.can_edit_problem_set(problem_set))
    tex = render_to_string("courses/izpit-latex-template.tex",
                           {'problem_set': problem_set})
    response = HttpResponse()
    response.write(tex)
    response['Content-Type'] = 'application/x-tex; charset=utf-8'
    file_name = 'izpit{}.tex'.format(problem_set.pk)
    response['Content-Disposition'] = 'attachment; filename={0}'.format(
        file_name)
    return response
Exemplo n.º 24
0
def send_control():
    """
    未编码位、分隔标志、未编码位、基准标志+时间质量标志、校验位、未编码位、基准标志
    """
    utils.vacancy()
    utils.divide()
    utils.vacancy()
    utils.p_unit()
    utils.time_quality()
    utils.verify()
    utils.vacancy()
    utils.time_quality()
Exemplo n.º 25
0
def course_detail(request, course_pk):
    """Show a list of all problems in a problem set."""
    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_view_course(course))
    if request.user.can_edit_course(course):
        students = course.student_success()
    else:
        students = []
    course.annotate_for_user(request.user)
    return render(request, 'courses/course_detail.html', {
        'course': course,
        'students': students,
        'show_teacher_forms': request.user.can_edit_course(course),
    })
Exemplo n.º 26
0
def course_user_submission_history_problemset(request, course_pk,
                                              problemset_pk, student_pk):
    course = get_object_or_404(Course, pk=course_pk)
    student = get_object_or_404(User, pk=student_pk)
    problemset = get_object_or_404(ProblemSet, pk=problemset_pk)
    verify(request.user.can_view_course_statistics(course))
    user_history = get_submission_history(problemset, student)
    return render(
        request, "statistics/user_submission_history.html", {
            'course': course,
            'problemset': problemset,
            'student': student,
            'history': user_history
        })
Exemplo n.º 27
0
def problem_set_detail(request, problem_set_pk):
    """Show a list of all problems in a problem set."""
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    verify(request.user.can_view_problem_set(problem_set))

    user_attempts = request.user.attempts.filter(part__problem__problem_set__id=problem_set_pk)
    valid_parts_ids = user_attempts.filter(valid=True).values_list('part_id', flat=True)
    invalid_parts_ids = user_attempts.filter(valid=False).values_list('part_id', flat=True)

    return render(request, 'courses/problem_set_detail.html', {
        'problem_set': problem_set,
        'valid_parts_ids': valid_parts_ids,
        'invalid_parts_ids': invalid_parts_ids,
        'show_teacher_forms': request.user.can_edit_problem_set(problem_set),
    })
Exemplo n.º 28
0
def download_rules(prefix_db):
	global cached_rules
	print "++++ Updating from prefix-DB... ++++"
	conn = httplib.HTTPConnection(prefix_db, 8080)
	# post the request to initiate the prefix-ownership procedure.
	conn.request("GET", "/all")
	
	# wait for the response...
	response = conn.getresponse()
	# prefix-ownership authentication procedure completed
	if (response.status == 200):
		db = pickle.loads(zlib.decompress(response.read()))
		for asn in db:
                        record = db[asn].get()
                        authorized_pubkeys = gRPKITree.get_pub_key_set(asn)
                        verified = False
                        for pubkey in authorized_pubkeys:
                                if (utils.verify(signed_record.record, signed_record.signature, pubkey)):
                                        verified = True
                                        break
                        if not verified:
				del db[prefix]
		cached_rules = db
		store_db()
	else:
		print response.status
Exemplo n.º 29
0
def download_rules(prefix_db):
    global cached_rules
    print "++++ Updating from prefix-DB... ++++"
    conn = httplib.HTTPConnection(prefix_db, 8080)
    # post the request to initiate the prefix-ownership procedure.
    conn.request("GET", "/all")

    # wait for the response...
    response = conn.getresponse()
    # prefix-ownership authentication procedure completed
    if (response.status == 200):
        db = pickle.loads(zlib.decompress(response.read()))
        for asn in db:
            record = db[asn].get()
            authorized_pubkeys = gRPKITree.get_pub_key_set(asn)
            verified = False
            for pubkey in authorized_pubkeys:
                if (utils.verify(signed_record.record, signed_record.signature,
                                 pubkey)):
                    verified = True
                    break
            if not verified:
                del db[prefix]
        cached_rules = db
        store_db()
    else:
        print response.status
Exemplo n.º 30
0
def extract(corpus, graph=True, debug=False):
    wb = EC.NoStdout() if not debug else EC.YesStdout()
    with wb:
        tokenized_corpus = NLP.Basic.tokenize(corpus)
        tagged_corpus = nltk.pos_tag(tokenized_corpus)
        function = lths
        processor = ltpc

        extracted = function(tagged_corpus, tokenized_corpus)

        if not utils.verify(extracted):
            alt_extraction = function(tagged_corpus, tokenized_corpus, force=True)
            if type(alt_extraction) == dict:
                pass
            else:
                if alt_extraction():
                    if not graph:
                        return alt_extraction
                    return to_graph.to_graph(alt_extraction, 3.7)
            all_extracted = list(processor(extracted, tagged_corpus))
            if not graph:
                print("Okay")
                return all_extracted
            graph = []
            for i in all_extracted:
                graph.append(to_graph.to_graph(i, 3.7))
        else:
            if not graph:
                print("Me too")
                return extracted
            graph = to_graph.to_graph(extracted, 3.7)
        return graph
Exemplo n.º 31
0
def verifyScriptSig(scriptSig: str, msg: str) -> bool:
    stack: deque = deque()
    params: List[str] = scriptSig.split("\t")
    stack.append(params[0])
    popped: str = ""
    for i in range(1, len(params)):
        if params[i] not in opcodeList:
            stack.append(params[i])
        else:
            if params[i] == "OP_DUP":
                popped = stack.pop()
                stack.append(popped)
                stack.append(popped)
            elif params[i] == "OP_HASH160":
                popped = stack.pop()
                reqHash: str = utils.getHashValue(popped, hashSize)
                stack.append(reqHash)
            elif params[i] == "OP_EQUALVERIFY":
                first: str = stack.pop()
                second: str = stack.pop()
                if first != second:
                    return False
            elif params[i] == "OP_CHECKSIG":
                pubKey = RSA.importKey(stack.pop())
                signature: str = stack.pop()
                if not utils.verify(pubKey, msg, signature):
                    return False

    return True
Exemplo n.º 32
0
def verifyMultiSig(scriptSig: str, msg: str):
    listData: List[str] = scriptSig.split("\t")
    if listData[-1] != "OP_CHECKMULTISIG":
        return False
    n = int(listData[-2])
    m = int(listData[len(listData) - 3 - n])
    if m > n:
        return False
    publicKeyList: List[str] = []
    signatureList: List[str] = []
    for i in range(len(listData) - 3, len(listData) - 3 - n, -1):
        publicKeyList.append(listData[i])
    for i in range(len(listData) - 3 - n - 1, -1, -1):
        if listData[i] != "":
            signatureList.append(listData[i])

    print(signatureList)
    if len(signatureList) == 0:
        return False
    signPtr = 0
    pubPtr = 0
    count = 0
    while signPtr < len(signatureList) and pubPtr < len(publicKeyList):
        if utils.verify(RSA.importKey(publicKeyList[pubPtr]), msg,
                        signatureList[signPtr]) == True:
            count += 1
            signPtr += 1
            pubPtr += 1
        else:
            pubPtr += 1

    return count >= m
Exemplo n.º 33
0
def access():
    validate('type_validate', request.json)

    if request.json['grand_type'] == 'password':
        validate('password_login', request.json)

        user = verify(request.json)
        if not user:
            raise exception.LoginFailed()

        access_token = TokenStore(user.id, request.json['client_id'], generate_token())
        refresh_token = TokenStore(user.id, request.json['client_id'], generate_token(), token_type='Refresh')

        access_token.save()
        refresh_token.save()

        return jsonify({
            "access_token": access_token.token,
            "refresh_token": refresh_token.token
            })

    elif request.json['grand_type'] == 'refresh_token':
        validate('refresh_login', request.json)
        refresh_token = verify_refresh_token(request.json['refresh_token'])
        if not refresh_token:
            raise exception.LoginFailed()

        access_token = TokenStore(refresh_token[0], refresh_token[1], generate_token())

        access_token.save()

        return jsonify({
            "access_token": access_token.token
            })
Exemplo n.º 34
0
def user_problem_solution_through_time(request, student_pk, part_pk):
    student = get_object_or_404(User, pk=student_pk)
    part = get_object_or_404(Part, pk=part_pk)
    course = part.problem.problem_set.course
    user_part_attempts = list(
        HistoricalAttempt.objects.filter(part=part, user=student).reverse())
    modified_attempts = append_time_differences_between_attempts(
        user_part_attempts)

    verify(request.user.can_view_course_statistics(course))
    return render(
        request, "statistics/user_problem_part_solution_history.html", {
            "student": student,
            "part": part,
            "course": course,
            "user_part_attempts": modified_attempts
        })
Exemplo n.º 35
0
def course_groups(request, course_pk):
    """
    Main course groups view where we are able to see all current groups and their students,
    update and delete existing groups and create new ones.
    """

    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_view_course_groups(course))

    return render(
        request,
        "courses/course_groups.html",
        {
            "course": course,
            'show_teacher_forms':
            request.user.can_create_course_groups(course),
            # 'student_success' : course.student_success_by_problemset_grouped_by_groups()
        })
Exemplo n.º 36
0
def user_problem_solution_at_time(request, historical_attempt_pk):
    historical_attempt = get_object_or_404(HistoricalAttempt,
                                           pk=historical_attempt_pk)
    problem = historical_attempt.part.problem
    course = problem.problem_set.course
    student = historical_attempt.user
    problem_state = get_problem_solve_state_at_time(historical_attempt)
    verify(request.user.can_view_course_statistics(course))
    return render(
        request, "statistics/solution_at_time.html", {
            "historical_attempt": historical_attempt,
            "problem": problem,
            "student": student,
            "parts": problem_state,
            "course": course,
            "show_teacher_forms":
            request.user.can_view_course_statistics(course)
        })
Exemplo n.º 37
0
def post_crates():
    if request.method == "POST":
        # Verify user
        verified = verify(request.headers)
        if verified == -1:
            return create_return(status_401(), 401)

        if not request.data:
            return create_return(status_400(), 400)

        crate_info = crate_information(request.get_json())
        if not crate_info:
            return create_return(status_400(), 400)

        crate_info["owner"] = verified
        crate_info["vinyl"] = []
        return create_return(
            json.dumps(
                new_object(crate_info, crates, datastore_client,
                           request.url_root)), 201)

    if request.method == "GET":
        #         Verify user but don't return error
        verified = verify(request.headers)
        #         Get crates belonging to this user
        query = datastore_client.query(kind=crates)
        query.add_filter("owner", "=", str(verified))
        collection_size = len(list(query.fetch()))
        q_offset = int(request.args.get('offset', 0))
        crate_iterator = query.fetch(limit=paginate_limit, offset=q_offset)
        pages = crate_iterator.pages
        results = list(next(pages))
        for result in results:
            # Format Vinyl for Crate
            vinyl_in_crate(result, datastore_client, request.url_root)

        return_info = {"crates": results, "collection_size": collection_size}

        if crate_iterator.next_page_token:
            next_offset = q_offset + paginate_limit
            next_url = request.base_url + "?offset=" + str(next_offset)
            return_info["next"] = next_url

        return create_return(json.dumps(return_info), 200)
Exemplo n.º 38
0
 def write_file(self, tic_dat, mdb_file_path, allow_overwrite = False):
     """
     write the ticDat data to an SQLite database file
     :param tic_dat: the data object to write
     :param mdb_file_path: the file path of the SQLite database to populate
     :param allow_overwrite: boolean - are we allowed to overwrite pre-existing data
     :return:
     caveats : Numbers with absolute values larger than 1e+100 will
               be written as 1e+100 or -1e+100
     """
     msg = []
     if not self.tic_dat_factory.good_tic_dat_object(tic_dat, lambda m : msg.append(m)) :
         raise TicDatError("Not a valid TicDat object for this schema : " + " : ".join(msg))
     verify(not os.path.isdir(mdb_file_path), "A directory is not a valid Access file path")
     if not os.path.exists(mdb_file_path) :
         self.write_schema(mdb_file_path)
     self._check_tables_fields(mdb_file_path, self.tic_dat_factory.all_tables)
     with py.connect(_connection_str(mdb_file_path)) as con:
         for t in self.tic_dat_factory.all_tables:
             if not allow_overwrite :
                 with con.cursor() as cur :
                     cur.execute("Select * from %s"%t)
                     verify(not any(True for _ in cur.fetchall()),
                         "allow_overwrite is False, but there are already data records in %s"%t)
             con.cursor().execute("Delete from %s"%t).commit() if allow_overwrite else None
             _t = getattr(tic_dat, t)
             if dictish(_t) :
                 primary_keys = tuple(self.tic_dat_factory.primary_key_fields[t])
                 for pk_row, sql_data_row in _t.items() :
                     _items = sql_data_row.items()
                     fields = primary_keys + tuple(x[0] for x in _items)
                     data_row = ((pk_row,) if len(primary_keys)==1 else pk_row) + \
                               tuple(_write_data(x[1]) for x in _items)
                     assert len(data_row) == len(fields)
                     str = "INSERT INTO %s (%s) VALUES (%s)"%\
                           (t, ",".join(fields), ",".join("?" for _ in fields))
                     con.cursor().execute(str, data_row).commit()
             else :
                 for sql_data_row in (_t if containerish(_t) else _t()) :
                     str = "INSERT INTO %s (%s) VALUES (%s)"%(t, ",".join(sql_data_row.keys()),
                       ",".join(["?"]*len(sql_data_row)))
                     con.cursor().execute(str,tuple(map(_write_data, sql_data_row.values())))
Exemplo n.º 39
0
 def _check_tables_fields(self, mdb_file_path, tables):
     tdf = self.tic_dat_factory
     TDE = TicDatError
     verify(os.path.exists(mdb_file_path), "%s isn't a valid file path"%mdb_file_path)
     try :
         py.connect(_connection_str(mdb_file_path))
     except Exception as e:
         raise TDE("Unable to open %s as SQLite file : %s"%(mdb_file_path, e.message))
     with py.connect(_connection_str(mdb_file_path)) as con:
         for table in tables:
           with con.cursor() as cur:
             try :
                 cur.execute("Select * from %s"%table)
             except :
                 raise TDE("Unable to recognize table %s in SQLite file %s"
                           %(table, mdb_file_path))
             fields = set(_[0].lower() for _ in cur.description)
             for field in tdf.primary_key_fields.get(table, ()) + tdf.data_fields.get(table, ()):
                 verify(field.lower() in fields,
                     "Unable to recognize field %s in table %s for file %s"%
                     (field, table, mdb_file_path))
Exemplo n.º 40
0
 def _check_tables_fields(self, db_file_path, tables):
     tdf = self.tic_dat_factory
     TDE = TicDatError
     verify(os.path.exists(db_file_path), "%s isn't a valid file path"%db_file_path)
     try :
         sql.connect(db_file_path)
     except Exception as e:
         raise TDE("Unable to open %s as SQLite file : %s"%(db_file_path, e.message))
     with sql.connect(db_file_path) as con:
         for table in tables :
             try :
                 con.execute("Select * from [%s]"%table)
             except :
                 raise TDE("Unable to recognize table %s in SQLite file %s"%
                           (table, db_file_path))
             for field in tdf.primary_key_fields.get(table, ()) + tdf.data_fields.get(table, ()):
                 try :
                     con.execute("Select [%s] from [%s]"%(field,table))
                 except :
                     raise TDE("Unable to recognize field %s in table %s for file %s"%
                               (field, table, db_file_path))
Exemplo n.º 41
0
def about(first=None,last=None):
  if request.method=="GET":
    return render_template("about.html")
  else:
    button = request.form['b']
    first  = request.form['first']
    last   = request.form['last']
    valid_name = utils.verify(first,last)
    if button=="Clear" or not(valid_name):
      return render_template("about.html")
    else:
	    return render_template("about.html",first=first,last=last)
Exemplo n.º 42
0
 def _get_sheets_and_fields(self, xls_file_path, all_tables, row_offsets, headers_present):
     try :
         book = xlrd.open_workbook(xls_file_path)
     except Exception as e:
         raise TicDatError("Unable to open %s as xls file : %s"%(xls_file_path, e.message))
     sheets = defaultdict(list)
     for table, sheet in product(all_tables, book.sheets()) :
         if table.lower() == sheet.name.lower() :
             sheets[table].append(sheet)
     duplicated_sheets = tuple(_t for _t,_s in sheets.items() if len(_s) > 1)
     verify(not duplicated_sheets, "The following sheet names were duplicated : " +
            ",".join(duplicated_sheets))
     sheets = FrozenDict({k:v[0] for k,v in sheets.items() })
     field_indicies, bad_fields = {}, {}
     for table, sheet in sheets.items() :
         field_indicies[table], bad_fields[table] = self._get_field_indicies(
                                             table, sheet, row_offsets[table], headers_present)
     verify(not any(_ for _ in bad_fields.values()),
            "The following field names could not be found : \n" +
            "\n".join("%s : "%t + ",".join(bf) for t,bf in bad_fields.items() if bf))
     return sheets, field_indicies
Exemplo n.º 43
0
def go(
    api: twitter.api.Api, start_user: str
) -> Tuple[List[twitter.models.User], List[twitter.models.User],
           List[twitter.models.User], List[twitter.models.User]]:
    # sanity check
    utils.verify(api)
    LOGGER.info(utils.get_rate_limit(api))

    friends = utils.get_friends(api, start_user)
    followers = utils.get_followers(api, start_user)

    LOGGER.info('[friends] finding < 500 tweets accounts')
    following_less_than_500_tweets = sorted(
        (user for user in tqdm(friends) if user.statuses_count < 500),
        key=lambda u: u.statuses_count,
        reverse=True)

    LOGGER.info('[friends] finding accounts havent tweeted since 2018')
    following_inactive_accounts = sorted(
        (user for user in tqdm(friends)
         if user.status and int(user.status.created_at.split()[-1]) < 2018),
        key=lambda u: u.status.created_at_in_seconds,
        reverse=True)

    LOGGER.info('[followers] finding < 500 tweets accounts')
    follower_less_than_500_tweets = sorted(
        (user for user in tqdm(followers) if user.statuses_count < 500),
        key=lambda u: u.statuses_count,
        reverse=True)

    LOGGER.info('[followers] finding < 100 followers accounts')
    follower_less_than_100_followers = sorted(
        (user for user in tqdm(followers) if user.followers_count < 100),
        key=lambda u: u.followers_count,
        reverse=True)

    LOGGER.info(utils.get_rate_limit(api))

    return (following_less_than_500_tweets, following_inactive_accounts,
            follower_less_than_500_tweets, follower_less_than_100_followers)
Exemplo n.º 44
0
def fit_classif(X_train, y_train):
    ''' Train the 3 differents RF classifiers. We use a CraterGenerator
    in order to get more images of craters. '''

    dic = get_dictionnary_craters(X_train, y_train)
    for key in dic:
        if (key - 8) % 10 == 0:
            print('Shape ', key, ':', len(dic[key]), 'craters')

    j = 0
    #for X, Y in CraterGenerator(X_train, y_train):
    for X_, Y_ in CraterGenerator(X_train[[i for i in range(X_train.shape[0]) if verify(y_train[i]) == True ]],\
                                y_train[[i for i in range(X_train.shape[0]) if verify(y_train[i]) == True]]):
        dic2 = get_dictionnary_craters(
            np.array(X_)[:, :, :, 0].reshape((len(X_), 224, 224)), Y_)
        dic = merge_dic(dic, dic2)
        j += 1
        #print(i)
        if j == 100:
            break

    for key in dic:
        if (key - 8) % 10 == 0:
            print('Shape ', key, ':', len(dic[key]), 'craters')

    dic_size_non_crater = get_non_craters(X_train, y_train, None)
    for key in dic_size_non_crater:
        if (key - 8) % 10 == 0:
            print('Shape ', key, ':', len(dic_size_non_crater[key]), 'craters')

    l = dic[48] + dic[58] + dic[68]
    dic[58] = l

    rdf_58 = get_model_by_size(58, dic, dic_size_non_crater, 3)
    rdf_38 = get_model_by_size(38, dic, dic_size_non_crater, 3)
    rdf_28 = get_model_by_size(28, dic, dic_size_non_crater, 3)

    del dic, dic_size_non_crater

    return rdf_28, rdf_38, rdf_58
Exemplo n.º 45
0
 def get_duplicates(self, xls_file_path, row_offsets={}, headers_present = True):
     """
     Find the row counts indexed by primary key for an Xls file for duplicated primary keys
     :param xls_file_path: An Excel file containing sheets whose names match
                           the table names in the schema (non primary key tables ignored).
     :param row_offsets: (optional) A mapping from table names to initial
                         number of rows to skip (non primary key tables ignored)
     :param headers_present: Boolean. Does the first row of data contain the
                             column headers?
     caveats: Missing sheets resolve to an empty table, but missing primary fields
              on matching sheets throw an Exception.
              Sheet names are considered case insensitive.
     :return: A dictionary whose keys are the table names for the primary key tables. Each value
              of the return dictionary is itself a dictionary. The inner dictionary is keyed by the
              primary key values encountered in the table, and the value is the count of records in the
              Excel sheet with this primary key. Row counts smaller than 2 are pruned off,
              as they aren't duplicates
     """
     verify(utls.dictish(row_offsets) and
            set(row_offsets).issubset(self.tic_dat_factory.all_tables) and
            all(utls.numericish(x) and (x>=0) for x in row_offsets.values()),
            "row_offsets needs to map from table names to non negative row offset")
     row_offsets = dict({t:0 for t in self.tic_dat_factory.all_tables}, **row_offsets)
     tdf = self.tic_dat_factory
     pk_tables = tuple(t for t,_ in tdf.primary_key_fields.items() if _)
     rtn = {t:defaultdict(int) for t in pk_tables}
     sheets, fieldIndicies = self._get_sheets_and_fields(xls_file_path, pk_tables,
                                     row_offsets, headers_present)
     ho = 1 if headers_present else 0
     for table, sheet in sheets.items() :
         fields = tdf.primary_key_fields[table] + tdf.data_fields.get(table, ())
         indicies = fieldIndicies[table]
         table_len = min(len(sheet.col_values(indicies[field])) for field in fields)
         for x in (sheet.row_values(i) for i in range(table_len)[row_offsets[table]+ho:]) :
             rtn[table][self._sub_tuple(tdf.primary_key_fields[table], indicies)(x)] += 1
     for t in rtn.keys():
         rtn[t] = {k:v for k,v in rtn[t].items() if v > 1}
         if not rtn[t]:
             del(rtn[t])
     return rtn
Exemplo n.º 46
0
def register():
    email = request.form['email']
    password = request.form['password']

    if not verify(email, password):
        return render_template('fail.html')
    else:
        password = encrypt(password)
        if mongo.db.users.find_one({'email': email}):
            mongo.db.users.update_one({'email': email}, {'$set': {'password': password}})
        else:
            insert_db(email, password)
    return render_template('inform.html')
Exemplo n.º 47
0
def problem_solution(request, problem_pk, user_pk):
    """Show problem solution."""
    problem = Problem.objects.get(pk=problem_pk)
    student = get_object_or_404(User, pk=user_pk)
    verify(request.user.can_view_problem_solution(problem, student))
    problem_set = problem.problem_set
    attempts = student.attempts.filter(part__problem__id=problem_pk)
    parts = problem.parts.all()

    for part in parts:
        try:
            part.attempt = attempts.get(part=part)
        except:
            part.attempt = None
    return render(
        request, 'problems/solutions.html', {
            'problem': problem,
            'problem_set': problem_set,
            'parts': parts,
            'student': student,
            'is_teacher': request.user.can_edit_problem_set(problem_set),
        })
Exemplo n.º 48
0
def problem_set_detail(request, problem_set_pk):
    """Show a list of all problems in a problem set."""
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    course = problem_set.course
    user = request.user if request.user.is_authenticated() else None
    verify(request.user.can_view_problem_set(problem_set))

    user_attempts = request.user.attempts.filter(part__problem__problem_set__id=problem_set_pk)
    valid_parts_ids = user_attempts.filter(valid=True).values_list('part_id', flat=True)
    invalid_parts_ids = user_attempts.filter(valid=False).values_list('part_id', flat=True)

    problem_success = []
    for problem in problem_set.problems.all():
        if request.user.can_edit_course(course):
            success = problem.student_success()
        else:
            success = {
                'valid': 0,
                'invalid': 0,
                'empty': 0
            }
            for part in problem.parts.all():
                if part.pk in valid_parts_ids:
                    success['valid'] += 1
                elif part.pk in invalid_parts_ids:
                    success['invalid'] += 1
                else:
                    success['empty'] += 1
        problem_success.append((problem, success))

    return render(request, 'courses/problem_set_detail.html', {
        'problem_set': problem_set,
        'problems': problem_set.problems.all(),
        'valid_parts_ids': valid_parts_ids,
        'invalid_parts_ids': invalid_parts_ids,
        'problem_success': problem_success,
        'show_teacher_forms': request.user.can_edit_course(course),
        'user': user,
    })
Exemplo n.º 49
0
def problem_solution(request, problem_pk, user_pk):
    """Show problem solution."""
    problem = Problem.objects.get(pk=problem_pk)
    student = get_object_or_404(User, pk=user_pk)
    verify(request.user.can_view_problem_solution(problem, student))
    problem_set = problem.problem_set
    attempts = student.attempts.filter(part__problem__id=problem_pk)
    parts = problem.parts.all()

    for part in parts:
        try:
            part.attempt = attempts.get(part=part)
        except:
            part.attempt = None
    return render(request, 'problems/solutions.html',
                  {
                      'problem': problem,
                      'problem_set': problem_set,
                      'parts': parts,
                      'student': student,
                      'is_teacher': request.user.can_edit_problem_set(problem_set),
                  }
                  )
Exemplo n.º 50
0
 def write_db_data(self, tic_dat, db_file_path, allow_overwrite = False):
     """
     write the ticDat data to an SQLite database file
     :param tic_dat: the data object to write
     :param db_file_path: the file path of the SQLite database to populate
     :param allow_overwrite: boolean - are we allowed to overwrite pre-existing data
     :return:
     caveats : float("inf"), float("-inf") are written as "inf", "-inf"
     """
     msg = []
     if not self.tic_dat_factory.good_tic_dat_object(tic_dat, lambda m : msg.append(m)) :
         raise TicDatError("Not a valid TicDat object for this schema : " + " : ".join(msg))
     verify(not os.path.isdir(db_file_path), "A directory is not a valid SQLite file path")
     if not os.path.exists(db_file_path) :
         self.write_db_schema(db_file_path)
     self._check_tables_fields(db_file_path, self.tic_dat_factory.all_tables)
     with _sql_con(db_file_path, foreign_keys=False) as con:
         for t in self.tic_dat_factory.all_tables:
             verify(allow_overwrite or not any(True for _ in  con.execute("Select * from %s"%t)),
                     "allow_overwrite is False, but there are already data records in %s"%t)
             con.execute("Delete from %s"%t) if allow_overwrite else None
         for str in self._get_data_sql(tic_dat):
             con.execute(str)
Exemplo n.º 51
0
def verify_email(request, verification_key,
                 template_name='accounts/activate.html',
                 extra_context=None):
    verification_key = verification_key.lower() # Normalize before trying anything with it.
    account = verify(verification_key)

    if extra_context is None:
        extra_context = {}
    context = RequestContext(request)
    for key, value in extra_context.items():
        context[key] = callable(value) and value() or value
    return render_to_response(template_name,
                              { 'account': account},
                              context_instance=context)
Exemplo n.º 52
0
 def write_file(self, tic_dat, file_path, allow_overwrite = False):
     """
     write the ticDat data to an excel file
     :param tic_dat: the data object to write (typically a TicDat)
     :param file_path: the file path of the excel file to create
     :param allow_overwrite: boolean - are we allowed to overwrite an
                             existing file?
     :return:
     caveats: None may be written out as an empty string. This reflects the behavior of xlwt.
     """
     tdf = self.tic_dat_factory
     msg = []
     if not self.tic_dat_factory.good_tic_dat_object(tic_dat, lambda m : msg.append(m)) :
         raise TicDatError("Not a valid ticDat object for this schema : " + " : ".join(msg))
     verify(not os.path.isdir(file_path), "A directory is not a valid xls file path")
     verify(allow_overwrite or not os.path.exists(file_path),
            "The %s path exists and overwrite is not allowed"%file_path)
     book = xlwt.Workbook()
     for t in  sorted(sorted(tdf.all_tables),
                      key=lambda x: len(tdf.primary_key_fields.get(x, ()))) :
         sheet = book.add_sheet(t)
         for i,f in enumerate(tdf.primary_key_fields.get(t,()) + tdf.data_fields.get(t, ())) :
             sheet.write(0, i, f)
         _t = getattr(tic_dat, t)
         if utls.dictish(_t) :
             for row_ind, (p_key, data) in enumerate(_t.items()) :
                 for field_ind, cell in enumerate( (p_key if containerish(p_key) else (p_key,)) +
                                     tuple(data[_f] for _f in tdf.data_fields.get(t, ()))):
                     sheet.write(row_ind+1, field_ind, cell)
         else :
             for row_ind, data in enumerate(_t if containerish(_t) else _t()) :
                 for field_ind, cell in enumerate(tuple(data[_f] for _f in tdf.data_fields[t])) :
                     sheet.write(row_ind+1, field_ind, cell)
     if os.path.exists(file_path):
         os.remove(file_path)
     book.save(file_path)
Exemplo n.º 53
0
def course_users(request, course_pk):
    """Show a list of all course students and teachers"""
    course = get_object_or_404(Course, pk=course_pk)
    verify(request.user.can_edit_course(course))
    students = list(course.students.all())
    part_count = Part.objects.filter(problem__problem_set__course=course).count()
    attempts = Attempt.objects.filter(part__problem__problem_set__course=course)
    from django.db.models import Count
    valid_attempts = attempts.filter(valid=True).values('user').annotate(Count('user'))
    all_attempts = attempts.values('user').annotate(Count('user'))
    def to_dict(attempts):
        attempts_dict = {}
        for val in attempts:
            attempts_dict[val['user']] = val['user__count']
        return attempts_dict
    valid_attempts_dict = to_dict(valid_attempts)
    all_attempts_dict = to_dict(all_attempts)
    for student in students:
        student.correct_percentage = "{}%".format(100.0 * valid_attempts_dict.get(student.pk, 0) / part_count)
        student.incorrect_percentage = "{}%".format(100.0 * (all_attempts_dict.get(student.pk, 0) - valid_attempts_dict.get(student.pk, 0)) / part_count)
    return render(request, 'courses/course_users.html', {
        'course': course,
        'students': students
    })
Exemplo n.º 54
0
	def do_POST(s):
		"""Respond to a POST request."""
		try:
                        print "handling post!"
			length = int(s.headers.getheader('content-length'))
			data = s.rfile.read(length)
			signed_path_end_record = pickle.loads(data)
			record = signed_path_end_record.get()
			authorized_pubkeys = gRPKITree.get_pub_key_set(record.asn)
			print "validating"
			for pubkey in authorized_pubkeys:
                                if (utils.verify(signed_path_end_record.record, signed_path_end_record.signature, pubkey)):
                                        store(signed_path_end_record)
                                        s.send_response(200)
                                        s.end_headers()
                                        return
                        print "no valid"
		except:
			pass
		s.send_response(500)
		s.end_headers()
Exemplo n.º 55
0
 def write_directory(self, tic_dat, dir_path, allow_overwrite = False, dialect='excel',
                     write_header = True):
     """
     write the ticDat data to a collection of csv files
     :param tic_dat: the data object
     :param dir_path: the directory in which to write the csv files
     :param allow_overwrite: boolean - are we allowed to overwrite existing
                             files?
     :param dialect: the csv dialect. Consult csv documentation for details.
     :param write_header: Boolean. Should the header information be written
                          as the first row?
     :return:
     """
     verify(dialect in csv.list_dialects(), "Invalid dialect %s"%dialect)
     verify(not os.path.isfile(dir_path), "A file is not a valid directory path")
     tdf = self.tic_dat_factory
     msg = []
     if not self.tic_dat_factory.good_tic_dat_object(tic_dat, lambda m : msg.append(m)) :
         raise TicDatError("Not a valid TicDat object for this schema : " + " : ".join(msg))
     if not allow_overwrite:
         for t in tdf.all_tables :
             f = os.path.join(dir_path, t + ".csv")
             verify(not os.path.exists(f), "The %s path exists and overwrite is not allowed"%f)
     if not os.path.isdir(dir_path) :
         os.mkdir(dir_path)
     for t in tdf.all_tables :
         f = os.path.join(dir_path, t + ".csv")
         with open(f, 'w') as csvfile:
              writer = csv.DictWriter(csvfile,dialect=dialect, fieldnames=
                     tdf.primary_key_fields.get(t, ()) + tdf.data_fields.get(t, ()) )
              writer.writeheader() if write_header else None
              _t =  getattr(tic_dat, t)
              if dictish(_t) :
                  for p_key, data_row in _t.items() :
                      primaryKeyDict = {f:v for f,v in zip(tdf.primary_key_fields[t],
                                         p_key if containerish(p_key) else (p_key,))}
                      writer.writerow(dict(data_row, **primaryKeyDict))
              else :
                  for data_row in (_t if containerish(_t) else _t()) :
                      writer.writerow(dict(data_row))
Exemplo n.º 56
0
 def get_duplicates(self, dir_path, dialect='excel', headers_present = True):
     """
     Find the row counts indexed by primary key for duplicated primary key records.
     :param dir_path: the directory containing .csv files.
     :param dialect: the csv dialect. Consult csv documentation for details.
     :param headers_present: Boolean. Does the first row of data contain
                             the column headers?
     :return: A dictionary whose keys are the table names for the primary key tables. Each value
              of the return dictionary is itself a dictionary. The inner dictionary is keyed by the
              primary key values encountered in the table, and the value is the count of records in the
              Excel sheet with this primary key. Row counts smaller than 2 are pruned off, as they
              aren't duplicates
     caveats: Missing files resolve to an empty table, but missing fields (data or primary key) on
              matching files throw an Exception.
     """
     verify(dialect in csv.list_dialects(), "Invalid dialect %s"%dialect)
     verify(os.path.isdir(dir_path), "Invalid directory path %s"%dir_path)
     tdf = self.tic_dat_factory
     rtn = {t:defaultdict(int) for t,_ in tdf.primary_key_fields.items()
            if _ and os.path.isfile(os.path.join(dir_path, t + ".csv"))}
     for t in rtn:
         if not headers_present:
             self._verify_fields_by_cnt(dir_path, t, dialect)
         fieldnames=tdf.primary_key_fields.get(t, ()) + tdf.data_fields.get(t, ())
         dict_rdr_args = dict({"fieldnames":fieldnames} if not headers_present else{},
                          **{"dialect": dialect})
         with open(os.path.join(dir_path, t + ".csv")) as csvfile:
             for r in csv.DictReader(csvfile, **dict_rdr_args) :
                 verify(set(r.keys()).issuperset(fieldnames),
                        "Failed to find the required field names for %s"%t)
                 p_key = _try_float(r[tdf.primary_key_fields[t][0]]) \
                         if len(tdf.primary_key_fields[t])==1 else \
                         tuple(_try_float(r[_]) for _ in tdf.primary_key_fields[t])
                 rtn[t][p_key] += 1
     for t in rtn.keys():
         rtn[t] = {k:v for k,v in rtn[t].items() if v > 1}
         if not rtn[t]:
             del(rtn[t])
     return rtn
Exemplo n.º 57
0
 def _create_tic_dat(self, dir_path, dialect, headers_present):
     verify(dialect in csv.list_dialects(), "Invalid dialect %s"%dialect)
     verify(os.path.isdir(dir_path), "Invalid directory path %s"%dir_path)
     rtn =  {t : self._create_table(dir_path, t, dialect, headers_present)
             for t in self.tic_dat_factory.all_tables}
     return {k:v for k,v in rtn.items() if v}
Exemplo n.º 58
0
def problem_set_edit(request, problem_set_pk):
    """Download an archive of edit files for a given problem set."""
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    verify(request.user.can_edit_problem_set(problem_set))
    archive_name, files = problem_set.edit_archive(request.user)
    return zip_archive(archive_name, files)
Exemplo n.º 59
0
def problem_set_toggle_solution_visibility(request, problem_set_pk):
    problem_set = get_object_or_404(ProblemSet, pk=problem_set_pk)
    verify(request.user.can_edit_problem_set(problem_set))
    problem_set.toggle_solution_visibility()
    return redirect(problem_set.course)
Exemplo n.º 60
0
 def get_object(self, *args, **kwargs):
     obj = super(ProblemSetDelete, self).get_object(*args, **kwargs)
     verify(self.request.user.can_edit_course(obj.course))
     return obj