Example #1
0
def run(students, assignments, args, helpers):
    sourceStudent = args["sourceStudent"]

    for assignment in assignments:
        files = assignment.args["files"]
        for student in students:
            for filename in files:
                # try to get from the sourceStudent
                source = helpers.readFromAssignment(sourceStudent,
                                                    assignment.name, filename)
                if source != None:
                    # try to get from this student
                    source = mangle_text(source)
                    other = helpers.readFromAssignment(student,
                                                       assignment.name,
                                                       filename)

                    if other != None:
                        other = mangle_text(other)
                        # if they are the same, remove
                        if source == other or other == "":
                            path = helpers.getAssignmentPath(
                                student, assignment.name, filename)
                            os.system("rm {}".format(path))

    return True
Example #2
0
def run(students, assignments, args, helpers):
	sourceStudent = args["sourceStudent"]

	for assignment in assignments:
		files = assignment.args["files"]
		for student in students:
			for filename in files:
				# try to get from the sourceStudent
				source = helpers.readFromAssignment(sourceStudent, assignment.name, filename)
				if source != None:
					# try to get from this student
					source = mangle_text(source)
					other = helpers.readFromAssignment(student, assignment.name, filename)

					if other != None:
						other = mangle_text(other)
						# if they are the same, remove
						if source == other or other == "":
							path = helpers.getAssignmentPath(student, assignment.name, filename)
							os.system("rm {}".format(path))

	return True
Example #3
0
def tokenize(path):
    t = Tokenizer(path)
    tokens = t.raw_tokenize()

    items = []
    for token in tokens:
        if token.kind.name == "LITERAL":
            text = token.spelling
            cursor_kind = clang.cindex.CursorKind
            kind = token.cursor.kind

            if kind == cursor_kind.STRING_LITERAL:
                # do extra processing on strings
                text = sha256(mangle_text(token.spelling)).hexdigest()[:10]

            items.append(text)

        if token.kind.name == "COMMENT":
            hashed = sha256(mangle_text(token.spelling[2:])).hexdigest()[:10]
            items.append(hashed)

    return "\n".join(items)
Example #4
0
def tokenize(path):
    t = Tokenizer(path)
    tokens = t.raw_tokenize()

    items = []
    for token in tokens:
        if token.kind.name == "LITERAL":
            text = token.spelling
            cursor_kind = clang.cindex.CursorKind
            kind = token.cursor.kind

            if kind == cursor_kind.STRING_LITERAL:
                # do extra processing on strings
                text = sha256(mangle_text(token.spelling)).hexdigest()[:10]

            items.append(text)

        if token.kind.name == "COMMENT":
            hashed = sha256(mangle_text(token.spelling[2:])).hexdigest()[:10]
            items.append(hashed)

    return "\n".join(items)