Ejemplo n.º 1
0
def fetchAllCameras(camera_names_to_watch):
    """manages the continual observation of a given set of cameras to watch.
    Args:
        camera_names_to_watch (List): list of camera names that are to be watched by this process
    Returns:
        None
    """
    #please do not remove googleServices definintion from this function
    # it is needed for the parallel processing authentication
    googleServices = goog_helper.getGoogleServices(settings, [])
    num_of_watched_cameras = len(camera_names_to_watch)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                    psqlHost=settings.psqlHost, psqlDb=settings.psqlDb,
                                    psqlUser=settings.psqlUser, psqlPasswd=settings.psqlPasswd)
    while True:
        tic = time.time()
        temporaryDir = tempfile.TemporaryDirectory()
        for camera_name in camera_names_to_watch:   
            try:
                capture_and_record(googleServices, dbManager, temporaryDir.name, camera_name)
                logging.warning('successfully fetched camera %s.', camera_name)
            except Exception as e:
                logging.error('Failed to fetch camera %s. %s', camera_name, str(e))
        try:
            shutil.rmtree(temporaryDir.name)
        except Exception as e:
            logging.error('Failed to delete temporaryDir %s. %s', temporaryDir.name, str(e))
            pass
        logging.warning('retrieval of %s cameras took %s seconds.',num_of_watched_cameras, time.time()-tic)
Ejemplo n.º 2
0
def main():
    reqArgs = [
        ['o', 'outputFile', 'filename for output CSV of fire x camera matches with available archives'],
    ]
    optionalArgs = [
        ['g', 'longitude', 'longitude of fire', float],
        ['t', 'latitude', 'latitude of fire', float],
        ['s', 'startTime', 'start time of fire'],
    ]
    args = collect_args.collectArgs(reqArgs, optionalArgs=optionalArgs, parentParsers=[goog_helper.getParentParser()])
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file)
    outputFile = open(args.outputFile, 'w', newline='')
    outputCsv = csv.writer(outputFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)

    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'], settings)

    locMatches = getLocationMatches(dbManager, args.longitude, args.latitude, args.startTime)
    totalMatches = len(locMatches)
    numOutput = 0
    for rowNum,locMatch in enumerate(locMatches):
        timeDT = datetime.datetime.fromtimestamp(locMatch['timestamp'])
        cams = locMatch['cameraids'].split(',')
        availCams = []
        for cameraID in cams:
            if isCamArchiveAvailable(camArchives, cameraID, timeDT):
                availCams.append(cameraID)
        # logging.warning('availCams %d: %s', len(availCams), availCams)
        if len(availCams) > 0:
            outputRow(outputCsv, locMatch, timeDT, availCams)
            numOutput += 1
        if (rowNum % 10) == 0:
            logging.warning('Processing %d of %d, output %d', rowNum, totalMatches, numOutput)

    logging.warning('Processed %d, output %d', totalMatches, numOutput)
Ejemplo n.º 3
0
def main():
    reqArgs = []
    args = collect_args.collectArgs(
        reqArgs,
        optionalArgs=[],
        parentParsers=[goog_helper.getParentParser()])
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file)
    fires = getUnparsedFires(dbManager)
    parseDates(dbManager, fires)
Ejemplo n.º 4
0
def main():
    reqArgs = [
        ["f", "fileName", "name of file containing fire_coords.py output"],
    ]
    args = collect_args.collectArgs(
        reqArgs,
        optionalArgs=[],
        parentParsers=[goog_helper.getParentParser()])
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file)
    insertFires(dbManager, args.fileName)
Ejemplo n.º 5
0
def main():
    reqArgs = [
        ["o", "operation", "add (includes update), delete, list"],
    ]
    optArgs = [
        ["n", "name", "name (ID) of user"],
        ["m", "email", "email address of user"],
        ["p", "phone", "phone number of user"],
        [
            "s", "startTime",
            "starting date and time in ISO format (e.g., 2019-02-22T14:34:56 in Pacific time zone)"
        ],
        [
            "e", "endTime",
            "ending date and time in ISO format (e.g., 2019-02-22T14:34:56 in Pacific time zone)"
        ],
    ]
    args = collect_args.collectArgs(reqArgs, optionalArgs=optArgs)
    startTime = parseTimeStr(args.startTime) if args.startTime else None
    endTime = parseTimeStr(args.endTime) if args.endTime else None
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                     psqlHost=settings.psqlHost,
                                     psqlDb=settings.psqlDb,
                                     psqlUser=settings.psqlUser,
                                     psqlPasswd=settings.psqlPasswd)
    notifications = dbManager.getNotifications()
    activeEmails = dbManager.getNotifications(filterActiveEmail=True)
    activePhones = dbManager.getNotifications(filterActivePhone=True)
    logging.warning(
        'Num all notifications: %d.  Active emails: %d.  Active phones: %d',
        len(notifications), len(activeEmails), len(activePhones))
    if args.operation == 'list':
        for n in notifications:
            printNoficiation(n)
        return
    assert args.name
    matching = list(filter(lambda x: x['name'] == args.name, notifications))
    logging.warning('Found %d matching for name %s', len(matching), args.name)
    if matching:
        printNoficiation(matching[0])
    if args.operation == 'add':
        assert startTime and endTime
        assert endTime >= startTime
        assert args.email or args.phone
        if not matching:
            # insert new entry
            dbRow = {
                'name': args.name,
            }
            if args.email:
                dbRow['email'] = args.email
                dbRow['EmailStartTime'] = startTime
                dbRow['EmailEndTime'] = endTime
            if args.phone:
                dbRow['phone'] = args.phone
                dbRow['PhoneStartTime'] = startTime
                dbRow['PhoneEndTime'] = endTime
            dbManager.add_data('notifications', dbRow)
            logging.warning('Successfully added notification for %s',
                            args.name)
        else:
            # update existing entry
            if args.email:
                sqlTemplate = """UPDATE notifications SET email='%s',EmailStartTime=%s,EmailEndTime=%s WHERE name = '%s' """
                sqlStr = sqlTemplate % (args.email, startTime, endTime,
                                        args.name)
                dbManager.execute(sqlStr)
            if args.phone:
                sqlTemplate = """UPDATE notifications SET phone='%s',PhoneStartTime=%s,PhoneEndTime=%s WHERE name = '%s' """
                sqlStr = sqlTemplate % (args.phone, startTime, endTime,
                                        args.name)
                dbManager.execute(sqlStr)
            logging.warning('Successfully updated notification for %s',
                            args.name)
        notifications = dbManager.getNotifications()
        matching = list(filter(lambda x: x['name'] == args.name,
                               notifications))
        printNoficiation(matching[0])
    elif args.operation == 'delete':
        sqlTemplate = """DELETE FROM notifications WHERE name = '%s' """
        sqlStr = sqlTemplate % (args.name)
        dbManager.execute(sqlStr)
    else:
        logging.error('Unexpected operation: %s', args.operation)
Ejemplo n.º 6
0
def main():
    """directs the funtionality of the process ie start a cleanup, record all cameras on 2min refresh, record a subset of cameras, manage multiprocessed recording of cameras
    Args:
        -c  cleaning_threshold" (flt): time in hours to store data
        -o  cameras_overide    (str): list of specific cameras to watch
        -a  agents            (int): number of agents to assign for parallelization
    Returns:
        None
    """
    reqArgs = []
    optArgs = [
        ["c", "cleaning_threshold", "time in hours to store data"],
        ["o", "cameras_overide", "specific cameras to watch"],
        ["a", "agents", "number of agents to assign for parallelization"]
    ]
    args = collect_args.collectArgs(reqArgs,  optionalArgs=optArgs, parentParsers=[goog_helper.getParentParser()])
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                    psqlHost=settings.psqlHost, psqlDb=settings.psqlDb,    
                                    psqlUser=settings.psqlUser, psqlPasswd=settings.psqlPasswd)
    
    if args.cleaning_threshold:
        cleaning_threshold = float(args.cleaning_threshold)
        cleanup_archive(googleServices, dbManager, cleaning_threshold)
    if args.cameras_overide:
        listofRotatingCameras = list(args.cameras_overide.replace(" ", "").strip('[]').split(','))
    else:
        listofCameras = alertwildfire_API.get_all_camera_info()
        listofRotatingCameras = [camera["name"] for camera in listofCameras if (camera["name"][-1]=='2') ]
    if args.agents:
        agents = int(args.agents)
        #num of camera's per process
        test = "Axis-Briar2"

        temporaryDir = tempfile.TemporaryDirectory()
        trial = [x for x in range(0,10)]
        tic = time.time()
        for x in trial:
            capture_and_record(googleServices, dbManager, temporaryDir.name, test)
        toc =time.time()-tic
        toc_avg = toc/len(trial)
        # target estimate of camera refresh time
        target_refresh_time_per_camera = 12#secs
        num_cameras_per_process = math.floor(target_refresh_time_per_camera / toc_avg)
        #future ability to re-adjust as needed

        #divy the cameras
        camera_bunchs= []
        num_of_processes_needed  =  math.ceil(len(listofRotatingCameras)/num_cameras_per_process)
        if num_of_processes_needed>agents:
            logging.warning('unable to process all given cameras on this machine with %s agents and maintain a target refresh rate of %s seconds, please reduce number of cameras to less than %s',agents, target_refresh_time_per_camera,num_cameras_per_process*agents)
            return

        for num in range(0, num_of_processes_needed):
            split_start = num_cameras_per_process*num
            split_stop = num_cameras_per_process*num+num_cameras_per_process
            camera_bunchs.append(listofRotatingCameras[split_start:split_stop])

        with Pool(processes=agents) as pool:
            result = pool.map(fetchAllCameras, camera_bunchs)
            pool.close()
    else:
        fetchAllCameras(listofRotatingCameras)
Ejemplo n.º 7
0
#usuarios
import db_manager

DATABASE_NAME = "usuarios.db"

dbmngr = db_manager.DbManager(DATABASE_NAME)


class users:
    def __init__(self):

        self.Id = ""
        self.Pass = ""
        self.Nombre = ""
        self.Apellido = ""
        self.Perfil = ""
        self.Area = ""

    def createUser(self):
        self.Id = int(input("Ingrese el ID: "))
        self.Pass = input("Ingrese el Pass: "******"Ingrese el Nombre: ")
        self.Apellido = input("Ingrese el Apellido: ")
        self.Perfil = input("Ingrese el Perfil: ")
        self.Area = input("Ingrese el Area: ")

        dbmngr.connectDB()
        conexion = dbmngr.conn
        cursor = conexion.cursor()
        cursor.execute(
            "Insert into UsersTable (id, password, nombre, apellidos, perfil, area) values (?,?,?,?,?,?)",
Ejemplo n.º 8
0
def main():
    reqArgs = [
        ["m", "mode", "add, delete, enable, disable, stats, or list"],
    ]
    optArgs = [
        ["c", "cameraID", "ID of the camera (e.g., mg-n-mobo-c)"],
        ["u", "url", "url to get images from camera"],
    ]
    args = collect_args.collectArgs(reqArgs, optionalArgs=optArgs)
    if settings.db_file:
        logging.warning('using sqlite %s', settings.db_file)
        dbManager = db_manager.DbManager(sqliteFile=settings.db_file)
    else:
        logging.warning('using postgres %s', settings.psqlHost)
        dbManager = db_manager.DbManager(psqlHost=settings.psqlHost,
                                         psqlDb=settings.psqlDb,
                                         psqlUser=settings.psqlUser,
                                         psqlPasswd=settings.psqlPasswd)

    cameraInfos = dbManager.get_sources(activeOnly=False)
    logging.warning('Num all cameras: %d', len(cameraInfos))
    logging.warning(
        'Num active cameras: %d',
        len(list(filter(lambda x: x['dormant'] == 0, cameraInfos))))
    if args.mode == 'list':
        logging.warning('All cameras: %s',
                        list(map(lambda x: x['name'], cameraInfos)))
        return
    matchingCams = list(
        filter(lambda x: x['name'] == args.cameraID, cameraInfos))
    logging.warning('Found %d matching cams for ID %s', len(matchingCams),
                    args.cameraID)

    if args.mode == 'add':
        if len(matchingCams) != 0:
            logging.error('Camera with ID %s already exists: %s',
                          args.cameraID, matchingCams)
            exit(1)
        dbRow = {
            'name': args.cameraID,
            'url': args.url,
            'dormant': 0,
            'randomID': random.random(),
            'last_date': datetime.datetime.now().isoformat()
        }
        dbManager.add_data('sources', dbRow)
        logging.warning('Successfully added camera %s', args.cameraID)
        return

    if len(matchingCams) != 1:
        logging.error('Cannot find camera with ID %s: %s', args.cameraID,
                      matchingCams)
        exit(1)
    camInfo = matchingCams[0]
    logging.warning('Cam details: %s', camInfo)

    if args.mode == 'del':
        sqlTemplate = """DELETE FROM sources WHERE name = '%s' """
        execCameraSql(dbManager, sqlTemplate, args.cameraID, isQuery=False)
        return

    if args.mode == 'enable':
        if camInfo['dormant'] == 0:
            logging.error('Camera already enabled: dormant=%d',
                          camInfo['dormant'])
            exit(1)
        sqlTemplate = """UPDATE sources SET dormant=0 WHERE name = '%s' """
        execCameraSql(dbManager, sqlTemplate, args.cameraID, isQuery=False)
        return

    if args.mode == 'disable':
        if camInfo['dormant'] == 1:
            logging.error('Camera already disabled: dormant=%d',
                          camInfo['dormant'])
            exit(1)
        sqlTemplate = """UPDATE sources SET dormant=1 WHERE name = '%s' """
        execCameraSql(dbManager, sqlTemplate, args.cameraID, isQuery=False)
        return

    if args.mode == 'stats':
        sqlTemplate = """SELECT max(timestamp) as maxtime FROM scores WHERE CameraName = '%s' """
        dbResult = execCameraSql(dbManager,
                                 sqlTemplate,
                                 args.cameraID,
                                 isQuery=True)
        logging.warning('Most recent image scanned: %s', getTime(dbResult))
        sqlTemplate = """SELECT max(timestamp) as maxtime FROM detections WHERE CameraName = '%s' """
        dbResult = execCameraSql(dbManager,
                                 sqlTemplate,
                                 args.cameraID,
                                 isQuery=True)
        logging.warning('Most recent smoke detection: %s', getTime(dbResult))
        sqlTemplate = """SELECT max(timestamp) as maxtime FROM alerts WHERE CameraName = '%s' """
        dbResult = execCameraSql(dbManager,
                                 sqlTemplate,
                                 args.cameraID,
                                 isQuery=True)
        logging.warning('Most recent smoke alert: %s', getTime(dbResult))
        return

    logging.error('Unexpected mode: %s', args.mode)
    exit(1)
Ejemplo n.º 9
0
"""

import os
import sys
fuegoRoot = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(fuegoRoot, 'lib'))
sys.path.insert(0, fuegoRoot)
import settings
settings.fuegoRoot = fuegoRoot
import db_manager

import datetime
import ast

fileName = '../cameras-hpwren.txt'
manager = db_manager.DbManager(
    os.path.join(settings.fuegoRoot, 'resources/local.db'))

lineNumber = 1
skipped = []
with open(fileName, 'r') as myfile:
    for line in myfile:
        # print("raw", line)
        parsed = ast.literal_eval(line)
        # print("parsed", parsed)
        parsed.pop('urls', None)
        # print("parsed2", lineNumber, parsed)
        lineNumber += 1
        manager.add_data('cameras', parsed)

print(skipped)
Ejemplo n.º 10
0
def main():
    """directs the funtionality of the process ie start a cleanup, record all cameras on 2min refresh, record a subset of cameras, manage multiprocessed recording of cameras
    Args:
        -c  cleaning_threshold" (flt): time in hours to store data
        -o  cameras_overide    (str): list of specific cameras to watch
        -a  agents            (int): number of agents to assign for parallelization
        -f  full_system       (Bool):monitor full system with as many agents as needed
    Returns:
        None
    """
    reqArgs = []
    optArgs = [
        ["c", "cleaning_threshold", "time in hours to store data"],
        ["o", "cameras_overide", "specific cameras to watch"],
        ["a", "agents", "number of agents to assign for parallelization"],
        [
            "f", "full_system",
            "toggle to cover all of alert wildfire with unrestrained parallelization"
        ]
    ]
    args = collect_args.collectArgs(
        reqArgs,
        optionalArgs=optArgs,
        parentParsers=[goog_helper.getParentParser()])
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                     psqlHost=settings.psqlHost,
                                     psqlDb=settings.psqlDb,
                                     psqlUser=settings.psqlUser,
                                     psqlPasswd=settings.psqlPasswd)

    if args.cleaning_threshold:
        cleaning_threshold = float(args.cleaning_threshold)
        cleanup_archive(googleServices, dbManager, cleaning_threshold)
    if args.cameras_overide:
        listofRotatingCameras = list(
            args.cameras_overide.replace(" ", "").strip('[]').split(','))
    else:
        listofCameras = alertwildfire_API.get_all_camera_info()
        listofRotatingCameras = [
            camera["name"] for camera in listofCameras
            if (camera["name"][-1] == '2')
        ]
    if args.agents:
        agents = int(args.agents)
        #num of camera's per process

        toc_avg = test_System_response_time(googleServices,
                                            dbManager,
                                            trial_length=10)
        # target estimate of camera refresh time
        target_refresh_time_per_camera = 12  #secs
        num_cameras_per_process = math.floor(target_refresh_time_per_camera /
                                             toc_avg)
        #future ability to re-adjust as needed

        #divy the cameras
        camera_bunchs = []
        num_of_processes_needed = math.ceil(
            len(listofRotatingCameras) / num_cameras_per_process)
        if num_of_processes_needed > agents:
            logging.warning(
                'unable to process all given cameras on this machine with %s agents and maintain a target refresh rate of %s seconds, please reduce number of cameras to less than %s or increase number of agents to %s',
                agents, target_refresh_time_per_camera,
                num_cameras_per_process * agents, num_of_processes_needed)
            return
        num_cameras_per_process = math.floor(
            len(listofRotatingCameras) / agents)
        for num in range(0, num_of_processes_needed):
            split_start = num_cameras_per_process * num
            split_stop = num_cameras_per_process * num + num_cameras_per_process
            camera_bunchs.append(listofRotatingCameras[split_start:split_stop])

        with Pool(processes=agents) as pool:
            result = pool.map(fetchAllCameras, camera_bunchs)
            pool.close()
    else:
        if args.full_system:
            response_time_per_camera = test_System_response_time(
                googleServices, dbManager, trial_length=10)
            listofCameras = alertwildfire_API.get_all_camera_info()
            target_refresh_time_per_camera, listofTargetCameras, num_of_processes_needed, num_cameras_per_process, num_of_agents_needed = {},{},{},{},0
            # target estimate of camera refresh time
            target_refresh_time_per_camera["rotating"] = 12  #secs
            target_refresh_time_per_camera["stationary"] = 60  #secs
            #separation of data by type
            listofTargetCameras["rotating"] = [
                camera["name"] for camera in listofCameras
                if (camera["name"][-1] == '2')
            ]
            listofTargetCameras["stationary"] = [
                camera["name"] for camera in listofCameras
                if (camera["name"][-1] != '2')
            ]
            camera_bunchs = []
            for type in ["rotating", "stationary"]:
                num_cameras_per_process[type] = math.floor(
                    target_refresh_time_per_camera[type] /
                    response_time_per_camera)
                #divy up cameras rotating and stationary to maximize efficiency
                num_of_processes_needed[type] = math.ceil(
                    len(listofTargetCameras[type]) /
                    num_cameras_per_process[type])
                num_cameras_per_process[type] = math.floor(
                    len(listofTargetCameras[type]) /
                    num_of_processes_needed[type])
                num_of_agents_needed += num_of_processes_needed[type]
                for num in range(0, num_of_processes_needed[type]):
                    split_start = num_cameras_per_process[type] * num
                    split_stop = num_cameras_per_process[
                        type] * num + num_cameras_per_process[type]
                    camera_bunchs.append(
                        listofTargetCameras[type][split_start:split_stop])

            with Pool(processes=num_of_agents_needed) as pool:
                result = pool.map(fetchAllCameras, camera_bunchs)
                pool.close()

        else:
            fetchAllCameras(listofRotatingCameras)
Ejemplo n.º 11
0
def main():
    optArgs = [
        ["b", "heartbeat", "filename used for heartbeating check"],
        [
            "c", "collectPositves",
            "collect positive segments for training data"
        ],
        ["d", "imgDirectory", "Name of the directory containing the images"],
        ["t", "time", "Time breakdown for processing images"],
        [
            "m", "minusMinutes",
            "(optional) subtract images from given number of minutes ago"
        ],
        [
            "r", "restrictType",
            "Only process images from cameras of given type"
        ],
        [
            "s", "startTime",
            "(optional) performs search with modifiedTime > startTime"
        ],
        [
            "e", "endTime",
            "(optional) performs search with modifiedTime < endTime"
        ],
    ]
    args = collect_args.collectArgs(
        [],
        optionalArgs=optArgs,
        parentParsers=[goog_helper.getParentParser()])
    minusMinutes = int(args.minusMinutes) if args.minusMinutes else 0
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                     psqlHost=settings.psqlHost,
                                     psqlDb=settings.psqlDb,
                                     psqlUser=settings.psqlUser,
                                     psqlPasswd=settings.psqlPasswd)
    cameras = dbManager.get_sources(activeOnly=True,
                                    restrictType=args.restrictType)
    startTimeDT = dateutil.parser.parse(
        args.startTime) if args.startTime else None
    endTimeDT = dateutil.parser.parse(args.endTime) if args.endTime else None
    timeRangeSeconds = None
    useArchivedImages = False
    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'],
                                                      settings)
    constants = { # dictionary of constants to reduce parameters in various functions
        'args': args,
        'googleServices': googleServices,
        'camArchives': camArchives,
        'dbManager': dbManager,
    }
    if startTimeDT or endTimeDT:
        assert startTimeDT and endTimeDT
        timeRangeSeconds = (endTimeDT - startTimeDT).total_seconds()
        assert timeRangeSeconds > 0
        assert args.collectPositves
        useArchivedImages = True

    deferredImages = []
    processingTimeTracker = initializeTimeTracker()
    graph = tf_helper.load_graph(settings.model_file)
    labels = tf_helper.load_labels(settings.labels_file)
    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.1  #hopefully reduces segfaults
    with tf.Session(graph=graph, config=config) as tfSession:
        while True:
            classifyImgPath = None
            timeStart = time.time()
            if useArchivedImages:
                (cameraID, timestamp, imgPath, classifyImgPath) = \
                    getArchivedImages(constants, cameras, startTimeDT, timeRangeSeconds, minusMinutes)
            elif minusMinutes:
                (queueFull, deferredImageInfo) = getDeferrredImgageInfo(
                    deferredImages, processingTimeTracker, minusMinutes,
                    timeStart)
                if not queueFull:  # queue is not full, so add more to queue
                    addToDeferredImages(dbManager, cameras, deferredImages)
                if deferredImageInfo:  # we have a deferred image ready to process, now get latest image and subtract
                    (cameraID, timestamp, imgPath, classifyImgPath) = \
                        genDiffImageFromDeferred(dbManager, cameras, deferredImageInfo, deferredImages, minusMinutes)
                    if not cameraID:
                        continue  # skip to next camera without deleting deferred image which may be reused later
                    os.remove(deferredImageInfo['imgPath'])  # no longer needed
                else:
                    continue  # in diff mode without deferredImage, nothing more to do
            # elif args.imgDirectory:  unused functionality -- to delete?
            #     (cameraID, timestamp, imgPath, md5) = getNextImageFromDir(args.imgDirectory)
            else:  # regular (non diff mode), grab image and process
                (cameraID, timestamp, imgPath,
                 md5) = getNextImage(dbManager, cameras)
                classifyImgPath = imgPath
            if not cameraID:
                continue  # skip to next camera
            timeFetch = time.time()

            segments = segmentAndClassify(classifyImgPath, tfSession, graph,
                                          labels)
            timeClassify = time.time()
            recordFilterReport(constants, cameraID, timestamp, classifyImgPath,
                               imgPath, segments, minusMinutes,
                               googleServices['drive'], useArchivedImages)
            timePost = time.time()
            updateTimeTracker(processingTimeTracker, timePost - timeStart)
            if args.time:
                logging.warning(
                    'Timings: fetch=%.2f, classify=%.2f, post=%.2f',
                    timeFetch - timeStart, timeClassify - timeFetch,
                    timePost - timeClassify)
Ejemplo n.º 12
0
def main():
    optArgs = [
        ["b", "heartbeat", "filename used for heartbeating check"],
        ["c", "collectPositves", "collect positive segments for training data"],
        ["d", "imgDirectory", "Name of the directory containing the images"],
        ["t", "time", "Time breakdown for processing images"],
        ["m", "minusMinutes", "(optional) subtract images from given number of minutes ago"],
        ["r", "restrictType", "Only process images from cameras of given type"],
        ["s", "startTime", "(optional) performs search with modifiedTime > startTime"],
        ["e", "endTime", "(optional) performs search with modifiedTime < endTime"],
    ]
    args = collect_args.collectArgs([], optionalArgs=optArgs, parentParsers=[goog_helper.getParentParser()])
    minusMinutes = int(args.minusMinutes) if args.minusMinutes else 0
    googleServices = goog_helper.getGoogleServices(settings, args)
    dbManager = db_manager.DbManager(sqliteFile=settings.db_file,
                                    psqlHost=settings.psqlHost, psqlDb=settings.psqlDb,
                                    psqlUser=settings.psqlUser, psqlPasswd=settings.psqlPasswd)
    tfConfig = tf.ConfigProto()
    tfConfig.gpu_options.per_process_gpu_memory_fraction = 0.1 #hopefully reduces segfaults
    cameras = dbManager.get_sources(activeOnly=True, restrictType=args.restrictType)
    startTimeDT = dateutil.parser.parse(args.startTime) if args.startTime else None
    endTimeDT = dateutil.parser.parse(args.endTime) if args.endTime else None
    timeRangeSeconds = None
    useArchivedImages = False
    camArchives = img_archive.getHpwrenCameraArchives(googleServices['sheet'], settings)
    DetectionPolicyClass = policies.get_policies()[settings.detectionPolicy]
    detectionPolicy = DetectionPolicyClass(settings, args, googleServices, dbManager, tfConfig, camArchives, minusMinutes, useArchivedImages)
    constants = { # dictionary of constants to reduce parameters in various functions
        'args': args,
        'googleServices': googleServices,
        'camArchives': camArchives,
        'dbManager': dbManager,
    }

    if startTimeDT or endTimeDT:
        assert startTimeDT and endTimeDT
        timeRangeSeconds = (endTimeDT-startTimeDT).total_seconds()
        assert timeRangeSeconds > 0
        assert args.collectPositves
        useArchivedImages = True
        random.seed(0) # fixed seed guarantees same randomized ordering.  Should make this optional argument in future

    processingTimeTracker = initializeTimeTracker()
    while True:
        classifyImgPath = None
        timeStart = time.time()
        if useArchivedImages:
            (cameraID, timestamp, imgPath, classifyImgPath) = \
                getArchivedImages(constants, cameras, startTimeDT, timeRangeSeconds, minusMinutes)
        # elif minusMinutes: to be resurrected using archive functionality
        # elif args.imgDirectory:  unused functionality -- to delete?
        #     (cameraID, timestamp, imgPath, md5) = getNextImageFromDir(args.imgDirectory)
        else: # regular (non diff mode), grab image and process
            (cameraID, timestamp, imgPath, md5) = getNextImage(dbManager, cameras)
            classifyImgPath = imgPath
        if not cameraID:
            continue # skip to next camera
        timeFetch = time.time()

        image_spec = [{}]
        image_spec[-1]['path'] = classifyImgPath
        image_spec[-1]['timestamp'] = timestamp
        image_spec[-1]['cameraID'] = cameraID

        detectionResult = detectionPolicy.detect(image_spec)
        timeDetect = time.time()
        if detectionResult['fireSegment']:
            if checkAndUpdateAlerts(dbManager, cameraID, timestamp, detectionResult['driveFileIDs']):
                alertFire(constants, cameraID, imgPath, detectionResult['annotatedFile'], detectionResult['driveFileIDs'], detectionResult['fireSegment'], timestamp)
        deleteImageFiles(imgPath, imgPath, detectionResult['annotatedFile'])
        if (args.heartbeat):
            heartBeat(args.heartbeat)

        timePost = time.time()
        updateTimeTracker(processingTimeTracker, timePost - timeStart)
        if args.time:
            if not detectionResult['timeMid']:
                detectionResult['timeMid'] = timeDetect
            logging.warning('Timings: fetch=%.2f, detect0=%.2f, detect1=%.2f post=%.2f',
                timeFetch-timeStart, detectionResult['timeMid']-timeFetch, timeDetect-detectionResult['timeMid'], timePost-timeDetect)