def run(self): dirName = os.path.join(FileSystem.getDataDir(), 'incorrects') if not os.path.exists(dirName): os.makedirs(dirName) logDirName = os.path.join(FileSystem.getLogDir(), 'incorrects') if not os.path.exists(logDirName): os.makedirs(logDirName) logFileName = os.path.join(logDirName, 'log') logging.basicConfig(filename = logFileName, \ format = '%(asctime)s %(message)s', \ datefmt = '%m/%d/%Y %I:%M:%S %p', level = logging.INFO) logging.info('ListIncorrects()') for part in MLClass.allProblems(): print(part) logging.info('Problem ' + str(part)) incorrectASTs, incorrectSubmissions = \ self.getASTids(part, 'incorrects') correctASTs, correctSubmissions = self.getASTids(part, 'corrects') self.writeASTs(incorrectASTs, 'incorrects', part, dirName) self.writeASTs(correctASTs, 'corrects', part, dirName) self.writeNumSubmissions(incorrectSubmissions, \ 'incorrects', part, dirName) self.writeNumSubmissions(correctSubmissions, \ 'corrects', part, dirName) submissionMap = self.getSubmissionMap(part) self.writeUsers(incorrectSubmissions, 'incorrects', \ part, submissionMap, dirName) self.writeUsers(correctSubmissions, 'corrects', \ part, submissionMap, dirName)
def testProblem(hwId, partId): print('Unit testing homework ' + str(hwId) + ', part ' + str(partId)) logFile = FileSystem.getLogDir() + '/octave_unittesting/log_' + str( hwId) + '_' + str(partId) logging.basicConfig(filename = logFile, format = '%(asctime)s %(message)s', \ datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) print('Loading unit testing code') tester = UnitTester(hwId, partId) print('Loading submissions') Submissions = Octave.objects.filter(homework_id=hwId, part_id=partId) print('Unit testing started.') for submission, i in zip(Submissions, range(len(Submissions))): # run unit tests for submission i print('Running submission ' + str(i) + ' of ' + str(len(Submissions))) tester.refreshWorkingDir() tester.loadCode(submission.code) with Timer() as t: output, correct = tester.run() print('\tRequest took %.03f sec.' % t.interval) # commit output to db #submission.output = output #submission.correct = correct ######submission.save() logging.debug( report(hwId, partId, i, len(Submissions), correct, submission.id, t.interval))
def run(self): logDir = os.path.join(FileSystem.getLogDir(), 'cluster') if not os.path.exists(logDir): os.makedirs(logDir) logFileName = os.path.join(logDir, 'log') logging.basicConfig(filename = logFileName, \ format = '%(asctime)s %(message)s', \ datefmt = '%m/%d/%Y %I:%M:%S %p', level = logging.INFO) labels = ['corrects', 'incorrects'] for assn in self.getAllParts(): for label in labels: logging.info('Cluster.run(): (hw,part): ' \ + str(assn) + ', ' + label) G = self.loadGraph(assn, THRESHOLD, label) Gfilt = self.filterEdges(G, FILTERTHRESHOLD) clusters = self.commonClusters(Gfilt, NUMCLUSTERS) for C, Cidx in zip(clusters, range(len(clusters))): asts = self.sortByConnectivity(C) numSubmissions = self.numSubmissionsInCluster(C) logging.info('--------------------') logging.info('Clustersize: ' + str(len(C.vs)) + \ ' ' + str(len(C.es)) + ' ' + str(numSubmissions)) #print(asts) logging.info('Finding nearest corrects.') nn = self.findNearestCorrects(assn, asts, NUMASTS, label) self.writeResults(assn,Cidx,asts,NUMASTS, \ numSubmissions,nn,label) logging.info('Done.')
def initializeLog(self): logDir = os.path.join(FileSystem.getLogDir(), 'PrecomputeNN') if not os.path.exists(logDir): os.makedirs(logDir) logFileName = os.path.join(logDir, 'log') logging.basicConfig(filename = logFileName, format = '%(asctime)s %(message)s', \ datefmt = '%m/%d/%Y %I:%M:%S %p', level = logging.INFO)
def __init__(self, part, matrixFile, idMap): self.matrixFile = matrixFile self.subIdMap = idMap self.part = part self.stats = {} logDir = os.path.join(FileSystem.getLogDir(), 'astnetwork') if not os.path.exists(logDir): os.makedirs(logDir) logFileName = os.path.join(logDir, 'log') logging.basicConfig(filename = logFileName, format = '%(asctime)s %(message)s', \ datefmt = '%m/%d/%Y %I:%M:%S %p', level = logging.INFO) logging.info('AstNetwork Initialization: (hw,part): ' + str(self.part))
def run(self, assn, threshold): logDir = os.path.join(FileSystem.getLogDir(), 'MakeGraph') if not os.path.exists(logDir): os.makedirs(logDir) logFileName = os.path.join(logDir, 'log') logging.basicConfig(filename = logFileName, format = '%(asctime)s %(message)s', \ datefmt = '%m/%d/%Y %I:%M:%S %p', level = logging.INFO) labels = ['incorrects', 'corrects'] for label in labels: asts = self.getAsts(assn, label) graph = self.getGraph(asts, assn, threshold, label) outPath = self.getOutputFilePath(assn, threshold, label) logging.info('write graph: ' + outPath) graph.save(outPath) logging.info('done.')
#! /usr/bin/env python import os import sys sys.path.append(os.path.abspath('../../')) from src.util.RunExternal import RunExternal from src.util.FileSystem import FileSystem import MySQLdb as mdb import json import logging ASTCMD = os.path.join(FileSystem.getBinDir(), 'astgen') TARCMD = 'tar -czf' tmpdir = os.path.join(FileSystem.getWorkingDir(), 'octtojson') logdir = os.path.join(FileSystem.getLogDir(), 'octtojson') datadir = os.path.join(FileSystem.getDataDir(), 'ast') astfilePrefix = 'ast' octaveSuffix = '.m' JSONSuffix = '.json' codeSuffix = '.code' mapSuffix = '.map' tarSuffix = '.tar.gz' MAXTIME = 10 # in seconds # Needs to be configured to run properly REMOTEDB = False if REMOTEDB: dbServer = 'galois.stanford.edu' dbUser = '******'
import os import sys sys.path.append(os.path.abspath('../../')) from src.util.FileSystem import FileSystem from src.util.MLClass import MLClass import MySQLdb as mdb import logging from operator import itemgetter import warnings import cPickle as pk # The assumption is that the table that is being written to is cleared! tardir = os.path.join(FileSystem.getDataDir(), 'ast') logdir = os.path.join(FileSystem.getLogDir(), 'populatedb') USESKIPLIST = True #problemList = [(1,1),(1,2),(1,3),(2,6),(4,4)] maxEntries = 300 MAXQUEUESIZE = 100 dbread = {} dbread['Server'] = 'evariste' dbread['User'] = '******' dbread['Pwd'] = 'n3gr0n1' dbread['Name'] = 'codewebdb' dbread['TableName'] = 'original_submissions' dbwrite = {}