Exemple #1
0
def main():
    reqArgs = [
        ['o', 'outputFile', 'filename for output CSV of fire x camera matches with available archives'],
    ]
    optionalArgs = [
        ['g', 'longitude', 'longitude of fire', float],
        ['t', 'latitude', 'latitude of fire', float],
        ['s', 'startTime', 'start time of fire'],
    ]
    args = collect_args.collectArgs(reqArgs, optionalArgs=optionalArgs, parentParsers=[goog_helper.getParentParser()])
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file)
    outputFile = open(args.outputFile, 'w', newline='')
    outputCsv = csv.writer(outputFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)

    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'], settings)

    locMatches = getLocationMatches(dbManager, args.longitude, args.latitude, args.startTime)
    totalMatches = len(locMatches)
    numOutput = 0
    for rowNum,locMatch in enumerate(locMatches):
        timeDT = datetime.datetime.fromtimestamp(locMatch['timestamp'])
        cams = locMatch['cameraids'].split(',')
        availCams = []
        for cameraID in cams:
            if isCamArchiveAvailable(camArchives, cameraID, timeDT):
                availCams.append(cameraID)
        # logging.warning('availCams %d: %s', len(availCams), availCams)
        if len(availCams) > 0:
            outputRow(outputCsv, locMatch, timeDT, availCams)
            numOutput += 1
        if (rowNum % 10) == 0:
            logging.warning('Processing %d of %d, output %d', rowNum, totalMatches, numOutput)

    logging.warning('Processed %d, output %d', totalMatches, numOutput)
Exemple #2
0
def main():
    reqArgs = [
        ["c", "cameraID", "ID (code name) of camera"],
        [
            "s", "startTime",
            "starting date and time in ISO format (e.g., 2019-02-22T14:34:56 in Pacific time zone)"
        ],
    ]
    optArgs = [
        [
            "e", "endTime",
            "ending date and time in ISO format (e.g., 2019-02-22T14:34:56 in Pacific time zone)"
        ],
        [
            "g", "gapMinutes",
            "override default of 1 minute gap between images to download"
        ],
        ["o", "outputDir", "directory to save the output image"],
    ]

    args = collect_args.collectArgs(
        reqArgs,
        optionalArgs=optArgs,
        parentParsers=[goog_helper.getParentParser()])
    googleServices = goog_helper.getGoogleServices(settings, args)
    gapMinutes = int(args.gapMinutes) if args.gapMinutes else 1
    outputDir = args.outputDir if args.outputDir else settings.downloadDir
    startTimeDT = dateutil.parser.parse(args.startTime)
    if args.endTime:
        endTimeDT = dateutil.parser.parse(args.endTime)
    else:
        endTimeDT = startTimeDT
    assert startTimeDT.year == endTimeDT.year
    assert startTimeDT.month == endTimeDT.month
    assert startTimeDT.day == endTimeDT.day
    assert endTimeDT >= startTimeDT

    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'],
                                                      settings)
    files = img_archive.getHpwrenImages(googleServices, settings, outputDir,
                                        camArchives, args.cameraID,
                                        startTimeDT, endTimeDT, gapMinutes)
    if files:
        logging.warning('Found %d files.', len(files))
    else:
        logging.error('No matches for camera ID %s', args.cameraID)
Exemple #3
0
def main():
    reqArgs = [
        ["o", "outputDir", "local directory to save diff image segments"],
        ["i", "inputDir", "input local directory containing nonSmoke image segments"],
        ["m", "minusMinutes", "subtract images from given number of minutes ago"],
    ]
    optArgs = [
        ["s", "startRow", "starting row"],
        ["e", "endRow", "ending row"],
    ]
    args = collect_args.collectArgs(reqArgs, optionalArgs=optArgs, parentParsers=[goog_helper.getParentParser()])
    minusMinutes = int(args.minusMinutes)
    startRow = int(args.startRow) if args.startRow else 0
    endRow = int(args.endRow) if args.endRow else 1e9

    googleServices = goog_helper.getGoogleServices(settings, args)
    cookieJar = None
    camArchives = None
    cookieJar = img_archive.loginAjax()
    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'], settings)
    timeGapDelta = datetime.timedelta(seconds = 60*minusMinutes)
    skippedBadParse = []
    skippedArchive = []
    imageFileNames = sorted(os.listdir(args.inputDir))
    rowIndex = -1
    for fileName in imageFileNames:
        rowIndex += 1

        if rowIndex < startRow:
            continue
        if rowIndex > endRow:
            print('Reached end row', rowIndex, endRow)
            break

        if (fileName[:3] == 'v2_') or (fileName[:3] == 'v3_'):
            continue # skip replicated files
        logging.warning('Processing row %d, file: %s', rowIndex, fileName)
        parsedName = img_archive.parseFilename(fileName)

        if (not parsedName) or parsedName['diffMinutes'] or ('minX' not in parsedName):
            logging.warning('Skipping file with unexpected parsed data: %s, %s', fileName, str(parsedName))
            skippedBadParse.append((rowIndex, fileName, parsedName))
            continue # skip files without crop info or with diff
        matchingCams = list(filter(lambda x: parsedName['cameraID'] == x['id'], camArchives))
        if len(matchingCams) != 1:
            logging.warning('Skipping camera without archive: %d, %s', len(matchingCams), str(matchingCams))
            skippedArchive.append((rowIndex, fileName, matchingCams))
            continue
        archiveDirs = matchingCams[0]['dirs']
        logging.warning('Found %s directories', archiveDirs)
        earlierImgPath = None
        dt = datetime.datetime.fromtimestamp(parsedName['unixTime'])
        dt -= timeGapDelta
        for dirName in archiveDirs:
            logging.warning('Searching for files in dir %s', dirName)
            imgPaths = img_archive.getFilesAjax(cookieJar, settings.downloadDir, parsedName['cameraID'], dirName, dt, dt, 1)
            if imgPaths:
                earlierImgPath = imgPaths[0]
                break # done
        if not earlierImgPath:
            logging.warning('Skipping image without prior image: %s, %s', str(dt), fileName)
            skippedArchive.append((rowIndex, fileName, dt))
            continue
        logging.warning('Subtracting old image %s', earlierImgPath)
        earlierImg = Image.open(earlierImgPath)
        print('CR', (parsedName['minX'], parsedName['minY'], parsedName['maxX'], parsedName['maxY']))
        croppedEarlyImg = earlierImg.crop((parsedName['minX'], parsedName['minY'], parsedName['maxX'], parsedName['maxY']))

        imgOrig = Image.open(os.path.join(args.inputDir, fileName))
        diffImg = img_archive.diffImages(imgOrig, croppedEarlyImg)
        parsedName['diffMinutes'] = minusMinutes
        diffImgPath = os.path.join(args.outputDir, img_archive.repackFileName(parsedName))
        logging.warning('Saving new image %s', diffImgPath)
        diffImg.save(diffImgPath, format='JPEG')
    logging.warning('Skipped bad parse %d, %s', len(skippedBadParse), str(skippedBadParse))
    logging.warning('Skipped images without archives %d, %s', len(skippedArchive), str(skippedArchive))
Exemple #4
0
def main():
    optArgs = [
        ["b", "heartbeat", "filename used for heartbeating check"],
        [
            "c", "collectPositves",
            "collect positive segments for training data"
        ],
        ["d", "imgDirectory", "Name of the directory containing the images"],
        ["t", "time", "Time breakdown for processing images"],
        [
            "m", "minusMinutes",
            "(optional) subtract images from given number of minutes ago"
        ],
        [
            "r", "restrictType",
            "Only process images from cameras of given type"
        ],
        [
            "s", "startTime",
            "(optional) performs search with modifiedTime > startTime"
        ],
        [
            "e", "endTime",
            "(optional) performs search with modifiedTime < endTime"
        ],
    ]
    args = collect_args.collectArgs(
        [],
        optionalArgs=optArgs,
        parentParsers=[goog_helper.getParentParser()])
    minusMinutes = int(args.minusMinutes) if args.minusMinutes else 0
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                     psqlHost=settings.psqlHost,
                                     psqlDb=settings.psqlDb,
                                     psqlUser=settings.psqlUser,
                                     psqlPasswd=settings.psqlPasswd)
    cameras = dbManager.get_sources(activeOnly=True,
                                    restrictType=args.restrictType)
    startTimeDT = dateutil.parser.parse(
        args.startTime) if args.startTime else None
    endTimeDT = dateutil.parser.parse(args.endTime) if args.endTime else None
    timeRangeSeconds = None
    useArchivedImages = False
    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'],
                                                      settings)
    constants = { # dictionary of constants to reduce parameters in various functions
        'args': args,
        'googleServices': googleServices,
        'camArchives': camArchives,
        'dbManager': dbManager,
    }
    if startTimeDT or endTimeDT:
        assert startTimeDT and endTimeDT
        timeRangeSeconds = (endTimeDT - startTimeDT).total_seconds()
        assert timeRangeSeconds > 0
        assert args.collectPositves
        useArchivedImages = True

    deferredImages = []
    processingTimeTracker = initializeTimeTracker()
    graph = tf_helper.load_graph(settings.model_file)
    labels = tf_helper.load_labels(settings.labels_file)
    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.1  #hopefully reduces segfaults
    with tf.Session(graph=graph, config=config) as tfSession:
        while True:
            classifyImgPath = None
            timeStart = time.time()
            if useArchivedImages:
                (cameraID, timestamp, imgPath, classifyImgPath) = \
                    getArchivedImages(constants, cameras, startTimeDT, timeRangeSeconds, minusMinutes)
            elif minusMinutes:
                (queueFull, deferredImageInfo) = getDeferrredImgageInfo(
                    deferredImages, processingTimeTracker, minusMinutes,
                    timeStart)
                if not queueFull:  # queue is not full, so add more to queue
                    addToDeferredImages(dbManager, cameras, deferredImages)
                if deferredImageInfo:  # we have a deferred image ready to process, now get latest image and subtract
                    (cameraID, timestamp, imgPath, classifyImgPath) = \
                        genDiffImageFromDeferred(dbManager, cameras, deferredImageInfo, deferredImages, minusMinutes)
                    if not cameraID:
                        continue  # skip to next camera without deleting deferred image which may be reused later
                    os.remove(deferredImageInfo['imgPath'])  # no longer needed
                else:
                    continue  # in diff mode without deferredImage, nothing more to do
            # elif args.imgDirectory:  unused functionality -- to delete?
            #     (cameraID, timestamp, imgPath, md5) = getNextImageFromDir(args.imgDirectory)
            else:  # regular (non diff mode), grab image and process
                (cameraID, timestamp, imgPath,
                 md5) = getNextImage(dbManager, cameras)
                classifyImgPath = imgPath
            if not cameraID:
                continue  # skip to next camera
            timeFetch = time.time()

            segments = segmentAndClassify(classifyImgPath, tfSession, graph,
                                          labels)
            timeClassify = time.time()
            recordFilterReport(constants, cameraID, timestamp, classifyImgPath,
                               imgPath, segments, minusMinutes,
                               googleServices['drive'], useArchivedImages)
            timePost = time.time()
            updateTimeTracker(processingTimeTracker, timePost - timeStart)
            if args.time:
                logging.warning(
                    'Timings: fetch=%.2f, classify=%.2f, post=%.2f',
                    timeFetch - timeStart, timeClassify - timeFetch,
                    timePost - timeClassify)
Exemple #5
0
def main():
    reqArgs = [
        ["o", "outputDir", "local directory to save images segments"],
        ["i", "inputCsv", "csvfile with contents of Fuego Cropped Images"],
    ]
    optArgs = [
        ["s", "startRow", "starting row"],
        ["e", "endRow", "ending row"],
        ["d", "display", "(optional) specify any value to display image and boxes"],
        ["x", "minDiffX", "(optional) override default minDiffX of 299"],
        ["y", "minDiffY", "(optional) override default minDiffY of 299"],
        ["a", "minArea", "(optional) override default throw away areas < 1/100 of 299x299"],
        ["t", "throwSize", "(optional) override default throw away size of 1000x1000"],
        ["g", "growRatio", "(optional) override default grow ratio of 1.2"],
        ["m", "minusMinutes", "(optional) subtract images from given number of minutes ago"],
    ]
    args = collect_args.collectArgs(reqArgs, optionalArgs=optArgs, parentParsers=[goog_helper.getParentParser()])
    startRow = int(args.startRow) if args.startRow else 0
    endRow = int(args.endRow) if args.endRow else 1e9
    minDiffX = int(args.minDiffX) if args.minDiffX else 299
    minDiffY = int(args.minDiffY) if args.minDiffY else 299
    throwSize = int(args.throwSize) if args.throwSize else 1000
    growRatio = float(args.growRatio) if args.growRatio else 1.2
    minArea = int(args.minArea) if args.minArea else int(299*2.99)
    minusMinutes = int(args.minusMinutes) if args.minusMinutes else 0

    googleServices = goog_helper.getGoogleServices(settings, args)
    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'], settings)
    if minusMinutes:
        timeGapDelta = datetime.timedelta(seconds = 60*minusMinutes)
    cameraCache = {}
    skippedTiny = []
    skippedHuge = []
    skippedArchive = []
    with open(args.inputCsv) as csvFile:
        csvreader = csv.reader(csvFile)
        for (rowIndex, csvRow) in enumerate(csvreader):
            if rowIndex < startRow:
                continue
            if rowIndex > endRow:
                print('Reached end row', rowIndex, endRow)
                break
            [cropName, minX, minY, maxX, maxY, fileName] = csvRow[:6]
            minX = int(minX)
            minY = int(minY)
            maxX = int(maxX)
            maxY = int(maxY)
            oldCoords = (minX, minY, maxX, maxY)
            if ((maxX - minX) > throwSize) and ((maxY - minY) > throwSize):
                logging.warning('Skip large image: dx=%d, dy=%d, name=%s', maxX - minX, maxY - minY, fileName)
                skippedHuge.append((rowIndex, fileName, maxX - minX, maxY - minY))
                continue
            if ((maxX - minX) * (maxY - minY)) < minArea:
                logging.warning('Skipping tiny image with area: %d, name=%s', (maxX - minX) * (maxY - minY), fileName)
                skippedTiny.append((rowIndex, fileName, (maxX - minX) * (maxY - minY)))
                continue
            # get base image from google drive that was uploaded by sort_images.py

            dirID = getCameraDir(googleServices['drive'], cameraCache, fileName)#-##REPLACE DEP. GDRIVE W HPREWN#
            localFilePath = os.path.join(settings.downloadDir, fileName)#sets a path for that image() not yet downloadedby this iteration
            print('local', localFilePath)
            if not os.path.isfile(localFilePath):# if file has not been downloaded by a previous iteration
                print('download', fileName)
                #+##REPLACE DEP. GDRIVE W HPREWN#nameParsed = img_archive.parseFilename(fileName)#parses file name into dictionary of parts name,unixtime,etc.
                #+##REPLACE DEP. GDRIVE W HPREWN#matchingCams = list(filter(lambda x: nameParsed['cameraID'] == x['id'], camArchives))#filter through camArchives for ids matching cameraid
                #+##REPLACE DEP. GDRIVE W HPREWN#if len(matchingCams) != 1:#if we cannot determine where the image will come from we cannot use the image
                #+##REPLACE DEP. GDRIVE W HPREWN#    logging.warning('Skipping camera without archive: %d, %s', len(matchingCams), str(matchingCams))
                #+##REPLACE DEP. GDRIVE W HPREWN#    skippedArchive.append((rowIndex, fileName, matchingCams))
                #+##REPLACE DEP. GDRIVE W HPREWN#    continue
                #+##REPLACE DEP. GDRIVE W HPREWN#archiveDirs = matchingCams[0]['dirs']
                #+##REPLACE DEP. GDRIVE W HPREWN#logging.warning('Found %s directories', archiveDirs)
                #+##REPLACE DEP. GDRIVE W HPREWN#time = datetime.datetime.fromtimestamp(nameParsed['unixTime'])
                #+##REPLACE DEP. GDRIVE W HPREWN#for dirName in archiveDirs:#search directories of camera for a time near
                #+##REPLACE DEP. GDRIVE W HPREWN#    logging.warning('Searching for files in dir %s', dirName)
                #+##REPLACE DEP. GDRIVE W HPREWN#    imgPaths = img_archive.downloadFilesHpwren(settings.downloadDir, nameParsed['cameraID'], dirName, time, time, 1, 0)
                #+##REPLACE DEP. GDRIVE W HPREWN#    if imgPaths:
                #+##REPLACE DEP. GDRIVE W HPREWN#        localFilePath = imgPaths[0]
                #+##REPLACE DEP. GDRIVE W HPREWN#        break
                #+##REPLACE DEP. GDRIVE W HPREWN#if not imgPaths:
                #+##REPLACE DEP. GDRIVE W HPREWN#    logging.warning('Skipping image not found: %s', fileName)
                #+##REPLACE DEP. GDRIVE W HPREWN#    skippedArchive.append((rowIndex, fileName, time))#archive that images were skipped
                #+##REPLACE DEP. GDRIVE W HPREWN#    continue
                goog_helper.downloadFile(googleServices['drive'], dirID, fileName, localFilePath)#-##REPLACE DEP. GDRIVE W HPREWN#
            imgOrig = Image.open(localFilePath)#opens image

            # if in subracted images mode, download an earlier image and subtract
            if minusMinutes:
                nameParsed = img_archive.parseFilename(fileName)#parses file name into dictionary of parts name,unixtime,etc.
                dt = datetime.datetime.fromtimestamp(nameParsed['unixTime'])
                dt -= timeGapDelta
                earlierImgPath = None
                files = img_archive.getHpwrenImages(googleServices, settings, settings.downloadDir, camArchives, nameParsed['cameraID'], dt, dt, 1)
                if files:
                    earlierImgPath = files[0]
                else:
                    logging.warning('Skipping image without prior image: %s, %s', str(dt), fileName)
                    skippedArchive.append((rowIndex, fileName, dt))
                    continue
                logging.warning('Subtracting old image %s', earlierImgPath)
                earlierImg = Image.open(earlierImgPath)
                diffImg = img_archive.diffImages(imgOrig, earlierImg)
                # realImgOrig = imgOrig # is this useful?
                imgOrig = diffImg
                fileNameParts = os.path.splitext(fileName)
                fileName = str(fileNameParts[0]) + ('_Diff%d' % minusMinutes) + fileNameParts[1]

            # crop the full sized image to show just the smoke, but shifted and flipped
            # shifts and flips increase number of segments for training and also prevent overfitting by perturbing data
            cropCoords = getCropCoords((minX, minY, maxX, maxY), minDiffX, minDiffY, growRatio, (imgOrig.size[0], imgOrig.size[1]))
            for newCoords in cropCoords:
                # XXXX - save work if old=new?
                print('coords old,new', oldCoords, newCoords)
                imgNameNoExt = str(os.path.splitext(fileName)[0])
                cropImgName = imgNameNoExt + '_Crop_' + 'x'.join(list(map(lambda x: str(x), newCoords))) + '.jpg'
                cropImgPath = os.path.join(args.outputDir, cropImgName)
                cropped_img = imgOrig.crop(newCoords)
                cropped_img.save(cropImgPath, format='JPEG')
                flipped_img = cropped_img.transpose(Image.FLIP_LEFT_RIGHT)
                flipImgName = imgNameNoExt + '_Crop_' + 'x'.join(list(map(lambda x: str(x), newCoords))) + '_Flip.jpg'
                flipImgPath = os.path.join(args.outputDir, flipImgName)
                flipped_img.save(flipImgPath, format='JPEG')
            print('Processed row: %s, file: %s' % (rowIndex, fileName))
            if args.display:
                displayCoords = [oldCoords] + cropCoords
                displayImageWithScores(imgOrig, displayCoords)
                imageDisplay(imgOrig)
    logging.warning('Skipped tiny images %d, %s', len(skippedTiny), str(skippedTiny))
    logging.warning('Skipped huge images %d, %s', len(skippedHuge), str(skippedHuge))
    logging.warning('Skipped images without archives %d, %s', len(skippedArchive), str(skippedArchive))
Exemple #6
0
def main():
    optArgs = [
        ["b", "heartbeat", "filename used for heartbeating check"],
        ["c", "collectPositves", "collect positive segments for training data"],
        ["d", "imgDirectory", "Name of the directory containing the images"],
        ["t", "time", "Time breakdown for processing images"],
        ["m", "minusMinutes", "(optional) subtract images from given number of minutes ago"],
        ["r", "restrictType", "Only process images from cameras of given type"],
        ["s", "startTime", "(optional) performs search with modifiedTime > startTime"],
        ["e", "endTime", "(optional) performs search with modifiedTime < endTime"],
    ]
    args = collect_args.collectArgs([], optionalArgs=optArgs, parentParsers=[goog_helper.getParentParser()])
    minusMinutes = int(args.minusMinutes) if args.minusMinutes else 0
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                    psqlHost=settings.psqlHost, psqlDb=settings.psqlDb,
                                    psqlUser=settings.psqlUser, psqlPasswd=settings.psqlPasswd)
    tfConfig = tf.ConfigProto()
    tfConfig.gpu_options.per_process_gpu_memory_fraction = 0.1 #hopefully reduces segfaults
    cameras = dbManager.get_sources(activeOnly=True, restrictType=args.restrictType)
    startTimeDT = dateutil.parser.parse(args.startTime) if args.startTime else None
    endTimeDT = dateutil.parser.parse(args.endTime) if args.endTime else None
    timeRangeSeconds = None
    useArchivedImages = False
    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'], settings)
    DetectionPolicyClass = policies.get_policies()[settings.detectionPolicy]
    detectionPolicy = DetectionPolicyClass(settings, args, googleServices, dbManager, tfConfig, camArchives, minusMinutes, useArchivedImages)
    constants = { # dictionary of constants to reduce parameters in various functions
        'args': args,
        'googleServices': googleServices,
        'camArchives': camArchives,
        'dbManager': dbManager,
    }

    if startTimeDT or endTimeDT:
        assert startTimeDT and endTimeDT
        timeRangeSeconds = (endTimeDT-startTimeDT).total_seconds()
        assert timeRangeSeconds > 0
        assert args.collectPositves
        useArchivedImages = True
        random.seed(0) # fixed seed guarantees same randomized ordering.  Should make this optional argument in future

    processingTimeTracker = initializeTimeTracker()
    while True:
        classifyImgPath = None
        timeStart = time.time()
        if useArchivedImages:
            (cameraID, timestamp, imgPath, classifyImgPath) = \
                getArchivedImages(constants, cameras, startTimeDT, timeRangeSeconds, minusMinutes)
        # elif minusMinutes: to be resurrected using archive functionality
        # elif args.imgDirectory:  unused functionality -- to delete?
        #     (cameraID, timestamp, imgPath, md5) = getNextImageFromDir(args.imgDirectory)
        else: # regular (non diff mode), grab image and process
            (cameraID, timestamp, imgPath, md5) = getNextImage(dbManager, cameras)
            classifyImgPath = imgPath
        if not cameraID:
            continue # skip to next camera
        timeFetch = time.time()

        image_spec = [{}]
        image_spec[-1]['path'] = classifyImgPath
        image_spec[-1]['timestamp'] = timestamp
        image_spec[-1]['cameraID'] = cameraID

        detectionResult = detectionPolicy.detect(image_spec)
        timeDetect = time.time()
        if detectionResult['fireSegment']:
            if checkAndUpdateAlerts(dbManager, cameraID, timestamp, detectionResult['driveFileIDs']):
                alertFire(constants, cameraID, imgPath, detectionResult['annotatedFile'], detectionResult['driveFileIDs'], detectionResult['fireSegment'], timestamp)
        deleteImageFiles(imgPath, imgPath, detectionResult['annotatedFile'])
        if (args.heartbeat):
            heartBeat(args.heartbeat)

        timePost = time.time()
        updateTimeTracker(processingTimeTracker, timePost - timeStart)
        if args.time:
            if not detectionResult['timeMid']:
                detectionResult['timeMid'] = timeDetect
            logging.warning('Timings: fetch=%.2f, detect0=%.2f, detect1=%.2f post=%.2f',
                timeFetch-timeStart, detectionResult['timeMid']-timeFetch, timeDetect-detectionResult['timeMid'], timePost-timeDetect)