def saveTrajectoryUserTypes(inFilename, outFilename, objects):
    '''The program saves the objects, 
    by just copying the corresponding trajectory and velocity data
    from the inFilename, and saving the characteristics in objects (first line)
    into outFilename'''
    infile = storage.openCheck(inFilename)
    outfile = storage.openCheck(outFilename, 'w')

    if (inFilename.find('features') >= 0) or (not infile) or (not outfile):
        return

    lines = storage.getLines(infile)
    objNum = 0  # in inFilename
    while lines != []:
        # find object in objects (index i)
        i = 0
        while (i < len(objects)) and (objects[i].num != objNum):
            i += 1

        if i < len(objects):
            l = lines[0].split(' ')
            l[3] = str(objects[i].userType)
            outfile.write(' '.join(l) + '\n')
            for l in lines[1:]:
                outfile.write(l + '\n')
            outfile.write(utils.delimiterChar + '\n')
        # next object
        objNum += 1
        lines = storage.getLines(infile)

    print('read {0} objects'.format(objNum))
def saveTrajectoryUserTypes(inFilename, outFilename, objects):
    '''The program saves the objects, 
    by just copying the corresponding trajectory and velocity data
    from the inFilename, and saving the characteristics in objects (first line)
    into outFilename'''
    infile = storage.openCheck(inFilename)
    outfile = storage.openCheck(outFilename,'w')

    if (inFilename.find('features') >= 0) or (not infile) or (not outfile):
        return

    lines = storage.getLines(infile)
    objNum = 0 # in inFilename
    while lines != []:
        # find object in objects (index i)
        i = 0
        while (i<len(objects)) and (objects[i].num != objNum):
            i+=1

        if i<len(objects):
            l = lines[0].split(' ')
            l[3] = str(objects[i].userType)
            outfile.write(' '.join(l)+'\n')
            for l in lines[1:]:
                outfile.write(l+'\n')
            outfile.write(utils.delimiterChar+'\n')
        # next object
        objNum += 1
        lines = storage.getLines(infile)

    print('read {0} objects'.format(objNum))
Example #3
0
def displayModelResults(
    results,
    model=None,
    plotFigures=True,
    filenamePrefix=None,
    figureFileType='pdf',
    text={
        'title-shapiro':
        'Shapiro-Wilk normality test for residuals: {:.2f} (p={:.3f})',
        'true-predicted.xlabel': 'Predicted values',
        'true-predicted.ylabel': 'True values',
        'residuals-predicted.xlabel': 'Predicted values',
        'residuals-predicted.ylabel': 'Residuals'
    }):
    import statsmodels.api as sm
    '''Displays some model results

    3 graphics, true-predicted, residuals-predicted, '''
    print(results.summary())
    shapiroResult = shapiro(results.resid)
    print(shapiroResult)
    if plotFigures:
        fig = plt.figure(figsize=(7, 6.3 * (2 + int(model is not None))))
        if model is not None:
            ax = fig.add_subplot(3, 1, 1)
            plt.plot(results.predict(), model.endog, 'x')
            x = plt.xlim()
            y = plt.ylim()
            plt.plot([max(x[0], y[0]), min(x[1], y[1])],
                     [max(x[0], y[0]), min(x[1], y[1])], 'r')
            #plt.axis('equal')
            if text is not None:
                plt.title(text['title-shapiro'].format(*shapiroResult))
                #plt.title(text['true-predicted.title'])
                plt.xlabel(text['true-predicted.xlabel'])
                plt.ylabel(text['true-predicted.ylabel'])
            fig.add_subplot(3, 1, 2, sharex=ax)
            plt.plot(results.predict(), results.resid, 'x')
            nextSubplotNum = 3
        else:
            fig.add_subplot(2, 1, 1)
            plt.plot(results.predict(), results.resid, 'x')
            nextSubplotNum = 2
        if text is not None:
            if model is None:
                plt.title(text['title-shapiro'].format(*shapiroResult))
            plt.xlabel(text['residuals-predicted.xlabel'])
            plt.ylabel(text['residuals-predicted.ylabel'])
        qqAx = fig.add_subplot(nextSubplotNum, 1, nextSubplotNum)
        sm.qqplot(results.resid, fit=True, line='45', ax=qqAx)
        plt.axis('equal')
        if text is not None and 'qqplot.xlabel' in text:
            plt.xlabel(text['qqplot.xlabel'])
            plt.ylabel(text['qqplot.ylabel'])
        plt.tight_layout()
        if filenamePrefix is not None:
            from storage import openCheck
            out = openCheck(filenamePrefix + '-coefficients.html', 'w')
            out.write(results.summary().as_html())
            plt.savefig(filenamePrefix + '-model-results.' + figureFileType)
def loadInteractions(filename, nInteractions = -1):
    'Loads interactions from the old UBC traffic event format'
    from events import Interaction 
    from indicators import SeverityIndicator
    file = storage.openCheck(filename)
    if (not file):
        return []

    interactions = []
    interactionNum = 0
    lines = storage.getLines(file)
    while (lines != []) and ((nInteractions<0) or (interactionNum<nInteractions)):
        parsedLine = [int(n) for n in lines[0].split(' ')]
        inter = Interaction(interactionNum, TimeInterval(parsedLine[1],parsedLine[2]), parsedLine[3], parsedLine[4], categoryNum = parsedLine[5])
        
        indicatorFrameNums = [int(n) for n in lines[1].split(' ')]
        for indicatorNum,line in enumerate(lines[2:]):
            values = {}
            for i,v in enumerate([float(n) for n in line.split(' ')]):
                if not ignoredValue[indicatorNum] or v != ignoredValue[indicatorNum]:
                    values[indicatorFrameNums[i]] = v
            inter.addIndicator(SeverityIndicator(severityIndicatorNames[indicatorNum], values, None, mostSevereIsMax[indicatorNum]))

        interactions.append(inter)
        interactionNum+=1
        lines = storage.getLines(file)

    file.close()
    return interactions
def copyTrajectoryFile(keepTrajectory, filenameIn, filenameOut):
    '''Reads filenameIn, keeps the trajectories for which the function keepTrajectory(trajNum, lines) is True
    and writes the result in filenameOut'''
    fileIn = storage.openCheck(filenameIn, 'r', True)
    fileOut = storage.openCheck(filenameOut, "w", True)

    lines = storage.getLines(fileIn)
    trajNum = 0
    while (lines != []):
        if keepTrajectory(trajNum, lines):
            for l in lines:
                fileOut.write(l + "\n")
            fileOut.write(utils.delimiterChar + "\n")
        lines = storage.getLines(fileIn)
        trajNum += 1

    fileIn.close()
    fileOut.close()
def modifyTrajectoryFile(modifyLines, filenameIn, filenameOut):
    '''Reads filenameIn, replaces the lines with the result of modifyLines and writes the result in filenameOut'''
    fileIn = storage.openCheck(filenameIn, 'r', True)
    fileOut = storage.openCheck(filenameOut, "w", True)

    lines = storage.getLines(fileIn)
    trajNum = 0
    while (lines != []):
        modifiedLines = modifyLines(trajNum, lines)
        if modifiedLines:
            for l in modifiedLines:
                fileOut.write(l + "\n")
            fileOut.write(utils.delimiterChar + "\n")
        lines = storage.getLines(fileIn)
        trajNum += 1

    fileIn.close()
    fileOut.close()
def modifyTrajectoryFile(modifyLines, filenameIn, filenameOut):
    '''Reads filenameIn, replaces the lines with the result of modifyLines and writes the result in filenameOut'''
    fileIn = storage.openCheck(filenameIn, 'r', True)
    fileOut = storage.openCheck(filenameOut, "w", True)

    lines = storage.getLines(fileIn)
    trajNum = 0
    while (lines != []):
        modifiedLines = modifyLines(trajNum, lines)
        if modifiedLines:
            for l in modifiedLines:
                fileOut.write(l+"\n")
            fileOut.write(utils.delimiterChar+"\n")
        lines = storage.getLines(fileIn)
        trajNum += 1
         
    fileIn.close()
    fileOut.close()
def copyTrajectoryFile(keepTrajectory, filenameIn, filenameOut):
    '''Reads filenameIn, keeps the trajectories for which the function keepTrajectory(trajNum, lines) is True
    and writes the result in filenameOut'''
    fileIn = storage.openCheck(filenameIn, 'r', True)
    fileOut = storage.openCheck(filenameOut, "w", True)

    lines = storage.getLines(fileIn)
    trajNum = 0
    while (lines != []):
        if keepTrajectory(trajNum, lines):
            for l in lines:
                fileOut.write(l+"\n")
            fileOut.write(utils.delimiterChar+"\n")
        lines = storage.getLines(fileIn)
        trajNum += 1
        
    fileIn.close()
    fileOut.close()
def loadTrajectories(filename, nObjects=-1):
    '''Loads trajectories'''

    file = storage.openCheck(filename)
    if (not file):
        return []

    objects = []
    objNum = 0
    objectType = getFileType(filename)
    lines = storage.getLines(file)
    while (lines != []) and ((nObjects < 0) or (objNum < nObjects)):
        l = lines[0].split(' ')
        parsedLine = [int(n) for n in l[:4]]
        obj = MovingObject(num=objNum,
                           timeInterval=TimeInterval(parsedLine[1],
                                                     parsedLine[2]))
        #add = True
        if len(lines) >= 3:
            obj.positions = Trajectory.load(lines[1], lines[2])
            if len(lines) >= 5:
                obj.velocities = Trajectory.load(lines[3], lines[4])
                if objectType == 'object':
                    obj.userType = parsedLine[3]
                    obj.nObjects = float(l[4])
                    obj.featureNumbers = [int(n) for n in l[5:]]

                    # load contour data if available
                    if len(lines) >= 6:
                        obj.contourType = utils.line2Floats(lines[6])
                        obj.contourOrigins = Trajectory.load(
                            lines[7], lines[8])
                        obj.contourSizes = Trajectory.load(lines[9], lines[10])
                elif objectType == 'prototype':
                    obj.userType = parsedLine[3]
                    obj.nMatchings = int(l[4])

        if len(lines) != 2:
            objects.append(obj)
            objNum += 1
        else:
            print("Error two lines of data for feature %d" % (f.num))

        lines = storage.getLines(file)

    file.close()
    return objects
def loadTrajectories(filename, nObjects = -1):
    '''Loads trajectories'''

    file = storage.openCheck(filename)
    if (not file):
        return []

    objects = []
    objNum = 0
    objectType = getFileType(filename)
    lines = storage.getLines(file)
    while (lines != []) and ((nObjects<0) or (objNum<nObjects)):
        l = lines[0].split(' ')
        parsedLine = [int(n) for n in l[:4]]
        obj = MovingObject(num = objNum, timeInterval = TimeInterval(parsedLine[1],parsedLine[2]))
        #add = True
        if len(lines) >= 3:
            obj.positions = Trajectory.load(lines[1], lines[2])
            if len(lines) >= 5:
                obj.velocities = Trajectory.load(lines[3], lines[4])
                if objectType == 'object':
                    obj.userType = parsedLine[3]
                    obj.nObjects = float(l[4])
                    obj.featureNumbers = [int(n) for n in l[5:]]
                    
                    # load contour data if available
                    if len(lines) >= 6:
                        obj.contourType = utils.line2Floats(lines[6])
                        obj.contourOrigins = Trajectory.load(lines[7], lines[8])
                        obj.contourSizes = Trajectory.load(lines[9], lines[10])
                elif objectType == 'prototype':
                    obj.userType = parsedLine[3]
                    obj.nMatchings = int(l[4])

        if len(lines) != 2:
            objects.append(obj)
            objNum+=1
        else:
            print("Error two lines of data for feature %d"%(f.num))

        lines = storage.getLines(file)

    file.close()
    return objects
def loadCollisionPoints(filename, nPoints=-1):
    '''Loads collision points and returns a dict
    with keys as a pair of the numbers of the two interacting objects'''
    file = storage.openCheck(filename)
    if (not file):
        return []

    points = {}
    num = 0
    lines = storage.getLines(file)
    while (lines != []) and ((nPoints < 0) or (num < nPoints)):
        parsedLine = [int(n) for n in lines[0].split(' ')]
        protagonistNums = (parsedLine[0], parsedLine[1])
        points[protagonistNums] = [[float(n) for n in lines[1].split(' ')],
                                   [float(n) for n in lines[2].split(' ')]]

        num += 1
        lines = storage.getLines(file)

    file.close()
    return points
def loadCollisionPoints(filename, nPoints = -1):
    '''Loads collision points and returns a dict
    with keys as a pair of the numbers of the two interacting objects'''
    file = storage.openCheck(filename)
    if (not file):
        return []

    points = {}
    num = 0
    lines = storage.getLines(file)
    while (lines != []) and ((nPoints<0) or (num<nPoints)):
        parsedLine = [int(n) for n in lines[0].split(' ')]
        protagonistNums = (parsedLine[0], parsedLine[1])
        points[protagonistNums] = [[float(n) for n in lines[1].split(' ')],
                                   [float(n) for n in lines[2].split(' ')]]

        num+=1
        lines = storage.getLines(file)

    file.close()
    return points
Example #13
0
def prepareRegression(data,
                      dependentVariable,
                      independentVariables,
                      maxCorrelationThreshold,
                      correlations,
                      maxCorrelationP,
                      correlationFunc,
                      stdoutText=[
                          'Removing {} (constant: {})',
                          'Removing {} (correlation {} with {})',
                          'Removing {} (no correlation: {}, p={})'
                      ],
                      saveFiles=False,
                      filenamePrefix=None,
                      latexHeader='',
                      latexTable=None,
                      latexFooter=''):
    '''Removes variables from candidate independent variables if
    - if two independent variables are correlated (> maxCorrelationThreshold), one is removed
    - if an independent variable is not correlated with the dependent variable (p>maxCorrelationP)
    Returns the remaining non-correlated variables, correlated with the dependent variable

    correlationFunc is spearmanr or pearsonr from scipy.stats
    text is the template to display for the two types of printout (see default): 3 elements if no saving to latex file, 8 otherwise

    TODO: pass the dummies for nominal variables and remove if all dummies are correlated, or none is correlated with the dependentvariable'''
    from copy import copy
    from pandas import DataFrame
    result = copy(independentVariables)
    table1 = ''
    table2 = {}
    # constant variables
    for var in independentVariables:
        uniqueValues = data[var].unique()
        if (len(uniqueValues) == 1) or (
                len(uniqueValues) == 2 and uniqueValues.dtype != dtype('O')
                and len(data.loc[~isnan(data[var]), var].unique()) == 1):
            print(stdoutText[0].format(var, uniqueValues))
            if saveFiles:
                table1 += latexTable[0].format(var, *uniqueValues)
            result.remove(var)
    # correlated variables
    for v1 in copy(result):
        if v1 in correlations.index:
            for v2 in copy(result):
                if v2 != v1 and v2 in correlations.index:
                    if abs(correlations.loc[v1, v2]) > maxCorrelationThreshold:
                        if v1 in result and v2 in result:
                            if saveFiles:
                                table1 += latexTable[1].format(
                                    v2, v1, correlations.loc[v1, v2])
                            print(stdoutText[1].format(
                                v2, v1, correlations.loc[v1, v2]))
                            result.remove(v2)
    # not correlated with dependent variable
    table2['Correlations'] = []
    table2['Valeurs p'] = []
    for var in copy(result):
        if data.dtypes[var] != dtype('O'):
            cor, p = correlationFunc(data[dependentVariable], data[var])
            if p > maxCorrelationP:
                if saveFiles:
                    table1 += latexTable[2].format(var, cor, p)
                print(stdoutText[2].format(var, cor, p))
                result.remove(var)
            else:
                table2['Correlations'].append(cor)
                table2['Valeurs p'].append(p)

    if saveFiles:
        from storage import openCheck
        out = openCheck(filenamePrefix + '-removed-variables.tex', 'w')
        out.write(latexHeader)
        out.write(table1)
        out.write(latexFooter)
        out.close()
        out = openCheck(filenamePrefix + '-correlations.html', 'w')
        table2['Variables'] = [
            var for var in result if data.dtypes[var] != dtype('O')
        ]
        out.write(
            DataFrame(table2)[['Variables', 'Correlations',
                               'Valeurs p']].to_html(formatters={
                                   'Correlations':
                                   lambda x: '{:.2f}'.format(x),
                                   'Valeurs p':
                                   lambda x: '{:.3f}'.format(x)
                               },
                                                     index=False))
        out.close()
    return result
Example #14
0
def kruskalWallis(data,
                  dependentVariable,
                  independentVariable,
                  plotFigure=False,
                  filenamePrefix=None,
                  figureFileType='pdf',
                  saveLatex=False,
                  renameVariables=lambda s: s,
                  kwCaption=u''):
    '''Studies the influence of (nominal) independent variable over the dependent variable

    Makes tests if the conditional distributions are normal
    using the Shapiro-Wilk test (in which case ANOVA could be used)
    Implements uses the non-parametric Kruskal Wallis test'''
    tmp = data[data[independentVariable].notnull()]
    independentVariableValues = sorted(
        tmp[independentVariable].unique().tolist())
    if len(independentVariableValues) >= 2:
        if saveLatex:
            from storage import openCheck
            out = openCheck(
                filenamePrefix +
                '-{}-{}.tex'.format(dependentVariable, independentVariable),
                'w')
        for x in independentVariableValues:
            print(
                'Shapiro-Wilk normality test for {} when {}={}: {} obs'.format(
                    dependentVariable, independentVariable, x,
                    len(tmp.loc[tmp[independentVariable] == x,
                                dependentVariable])))
            if len(tmp.loc[tmp[independentVariable] == x,
                           dependentVariable]) >= 3:
                print shapiro(tmp.loc[tmp[independentVariable] == x,
                                      dependentVariable])
        if plotFigure:
            plt.figure()
            plt.boxplot([
                tmp.loc[tmp[independentVariable] == x, dependentVariable]
                for x in independentVariableValues
            ])
            #q25, q75 = tmp[dependentVariable].quantile([.25, .75])
            #plt.ylim(ymax = q75+1.5*(q75-q25))
            plt.xticks(range(1,
                             len(independentVariableValues) + 1),
                       independentVariableValues)
            plt.title('{} vs {}'.format(dependentVariable,
                                        independentVariable))
            if filenamePrefix is not None:
                plt.savefig(filenamePrefix + '-{}-{}.{}'.format(
                    dependentVariable, independentVariable, figureFileType))
        table = tmp.groupby([
            independentVariable
        ])[dependentVariable].describe().unstack().sort(['50%'],
                                                        ascending=False)
        table['count'] = table['count'].astype(int)
        testResult = kruskal(*[
            tmp.loc[tmp[independentVariable] == x, dependentVariable]
            for x in independentVariableValues
        ])
        if saveLatex:
            out.write('\\begin{minipage}{\\linewidth}\n' + '\\centering\n' +
                      '\\captionof{table}{' +
                      (kwCaption.format(dependentVariable, independentVariable,
                                        *testResult)) + '}\n' +
                      table.to_latex(float_format=lambda x: '{:.3f}'.format(
                          x)).encode('ascii') + '\n' + '\\end{minipage}\n' +
                      '\\ \\vspace{0.5cm}\n')
        else:
            print table
        return testResult
    else:
        return None
args = parser.parse_args()

# assumes tracking.cfg is in the parent directory to the directory of the traffic intelligence python modules
matchingPaths = [s for s in sys.path if 'traffic-intelligence' in s]
#if len(matchingPaths) > 1:
#    print('Too many matching paths for Traffic Intelligence modules: {}'.format(matchingPaths))
if len(matchingPaths) == 0:
    print('No environment path to Traffic Intelligence modules.\nExiting')
    sys.exit()
else:
    directoryName = matchingPaths[0]
    if directoryName.endswith('/'):
        directoryName = directoryName[:-1]
    if os.path.exists(directoryName + '/../tracking.cfg'
                      ) and not os.path.exists('./tracking.cfg'):
        f = storage.openCheck(directoryName + '/../tracking.cfg')
        out = storage.openCheck('./tracking.cfg', 'w')
        for l in f:
            if 'video-filename' in l:
                tmp = l.split('=')
                out.write(tmp[0] + '= ' + args.videoFilename + '\n')
            elif 'database-filename' in l:
                tmp = l.split('=')
                out.write(tmp[0] + '= ' +
                          utils.removeExtension(args.videoFilename) +
                          '.sqlite\n')
            else:
                out.write(l)
        f.close()
        out.close()
        print(
Example #16
0
# for i in range(nPoints):
#     imagePoints[i,:] = [iPoints[:,i].tolist()];

# H = cvCreateMat(3, 3, CV_64FC1);

# cvFindHomography(imagePoints, worldPoints, H);

homography = np.array([])
if args.pointCorrespondencesFilename is not None:
    worldPts, videoPts = cvutils.loadPointCorrespondences(
        args.pointCorrespondencesFilename)
    homography, mask = cv2.findHomography(
        videoPts, worldPts)  # method=0, ransacReprojThreshold=3
elif args.tsaiCameraFilename is not None:  # hack using PDTV
    from pdtv import TsaiCamera
    f = storage.openCheck(args.tsaiCameraFilename, quitting=True)
    content = storage.getLines(f)
    cameraData = {}
    for l in content:
        tmp = l.split(':')
        cameraData[tmp[0]] = float(tmp[1].strip().replace(',', '.'))
    camera = TsaiCamera(Cx=cameraData['Cx'],
                        Cy=cameraData['Cy'],
                        Sx=cameraData['Sx'],
                        Tx=cameraData['Tx'],
                        Ty=cameraData['Ty'],
                        Tz=cameraData['Tz'],
                        dx=cameraData['dx'],
                        dy=cameraData['dy'],
                        f=cameraData['f'],
                        k=cameraData['k'],