Exemple #1
0
def main():
    # log_option:
    #    0: No logging
    #    1: On screen logs
    #    2: 1 + File logging to logs/latest.log
    #    3: 2 + File logging to logs/<timestamp>.log
    setup_logger(log_option=2)
    logger = get_logger()

    args = parse_args()
    dev = DevelopSqueezenet(args)

    try:
        dev.run()
    except Exception as e:
        logger.exception(str(e))

    return
Exemple #2
0
import sys
import os
import tensorflow as tf

repo_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(repo_path)

from my_logger import setup_logger, get_logger

setup_logger(log_option=1)

from squeezenet.arg_parsing import parse_args
from squeezenet.develop_squeezenet import DevelopSqueezenet


def main():
    logger = get_logger()
    args = parse_args()
    sqz = DevelopSqueezenet(args)
    sqz.load_checkpointables(
        sqz.cfg.model_saver.checkpoint_id)  # Load checkpoint

    # A call to make sure that a concrete function exists for this polymorphic function
    concrete_fn = sqz.net.call.get_concrete_function(batch_x=tf.TensorSpec(
        [None, 224, 224, 3], tf.float32),
                                                     training=False)

    logger.info('Saving the model in directory: {:s}'.format(
        sqz.cfg.directories.dir_model))
    tf.saved_model.save(sqz.net, sqz.cfg.directories.dir_model)  # Save model
    return
Exemple #3
0
Handle pushbuttons and other interrupt devices connected to Pi.
If relay is manually turned on by pushbutton, then it starts a timer (30 minutes?).
When timer expires, then relay will turn off.
If sensors require the relay on, they will turn it on again within 5 minutes.

When program starts, the red LED on the Pi will flash and the relay will be turned on for 2 seconds.
'''

import os
import os.path
import time
import RPi.GPIO as GPIO

import logging
import my_logger
logger = my_logger.setup_logger(__name__,'pi_ints.log')
#dataFormat = logging.Formatter('%(levelname)s : %(message)s')
#data_logger = my_logger.setup_logger('data','pi_data.log', formatter=dataFormat)

# I cannot run this program as a cron job if attempting to import pi_hw.
# The problem is that pi_hw import Adafruit_DHT and for some reason the cron job can't fetch it.
# So Ihave to replicate the hardware pin assignments here.
#from pi_hw import AC_RELAY,AC_PB_ON,AC_PB_OFF,RED_LED,YELLOW_LED,GREEN_LED
RELAY_EN = True
AC_RELAY = 17
AC_PB_ON = 19
AC_PB_OFF = 26
RED_LED = 25
YELLOW_LED = 24
GREEN_LED = 23
AUTO_ON_TIME = 30   # minutes: turn off after this time
Exemple #4
0
from flask import Flask, make_response, request, current_app
from functools import update_wrapper
from datetime import timedelta
import OpenWeatherProvider as ow
import radar_disp as radar
import logging
import my_logger
logger = my_logger.setup_logger(__name__, 'ow.log', level=logging.DEBUG)

app = Flask(__name__, static_folder='static')


# got this CORS solution from https://stackoverflow.com/questions/26980713/solve-cross-origin-resource-sharing-with-flask
def crossdomain(origin=None,
                methods=None,
                headers=None,
                max_age=21600,
                attach_to_all=True,
                automatic_options=True):
    if methods is not None:
        methods = ', '.join(sorted(x.upper() for x in methods))
    if headers is not None and not isinstance(headers, str):
        headers = ', '.join(x.upper() for x in headers)
    if not isinstance(origin, str):
        origin = ', '.join(origin)
    if isinstance(max_age, timedelta):
        max_age = max_age.total_seconds()

    def get_methods():
        if methods is not None:
            return methods
Exemple #5
0
def main():
    global args, best_prec1
    args = parser.parse_args()
    log_handler = my_logger.setup_logger(args.log_path)
    for key, value in sorted(vars(args).items()):
        log_handler.info(str(key) + ': ' + str(value))

    # best_prec1 = 0
    best_loss = 999999.
    iter_count = 0

    # pooling size
    gap_size = [x // 32 for x in args.crop_size]
    # load resent
    if args.pretrained:
        log_handler.info("=> using pre-trained model '{}'".format(args.arch))
    else:
        log_handler.info("=> create model '{}'".format(args.arch))

    model = getattr(dare_models, args.arch)(pretrained=args.pretrained, gap_size=gap_size, gen_stage_features=args.gen_stage_features)
    # model.gen_stage_features = args.gen_stage_features

    model = nn.DataParallel(model).cuda()

    # define loss function (criterion) and optimizer
    log_handler.info('Criterion type: Optimized Batch Hard Mining')
    criterion = OptiHardTripletLoss(mean_loss=args.mean_loss, margin=args.margin, eps=args.eps).cuda()

    log_handler.info('Loss type: Adam')
    optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.weight_decay)

    # optionally resume from a checkpoint
    # Best loss not best predict
    if args.resume:
        if os.path.isfile(args.resume):
            log_handler.info("=> loading checkpoint '{}'".format(args.resume))
            checkpoint = torch.load(args.resume)
            try:
                args.start_iteration = checkpoint['iterations']
            except:
                args.start_iteration = 0

            best_loss = checkpoint['best_loss']
            model.load_state_dict(update_state_dict(checkpoint['state_dict']))
            optimizer.load_state_dict(checkpoint['optimizer'])
            log_handler.info("=> loaded checkpoint '{}' "
                             .format(args.resume))
        else:
            log_handler.error("=> no checkpoint found at '{}'".format(args.resume))

    iter_count = args.start_iteration
    cudnn.benchmark = True

    if args.extract_features:
        if args.dataset == 'MARS':
            extract_features.extract_features_MARS(model, args.crop_size, args.info_folder, args.data,
                                                   args.extract_features_folder, log_handler,
                                                   batch_size=args.batch_size,
                                                   workers=args.workers, is_tencrop=args.ten_crop)
        elif args.dataset == 'Market1501' or args.dataset == 'Duke':
            extract_features.extract_features_Market1501(model, args.crop_size, args.data,
                                                         args.extract_features_folder, log_handler,
                                                         batch_size=args.batch_size,
                                                         workers=args.workers,
                                                         is_tencrop=args.ten_crop,
                                                         gen_stage_features=args.gen_stage_features)
        else:
            extract_features.extract_features_CUHK03(model, args.crop_size, args.data,
                                                     args.extract_features_folder, log_handler,
                                                     batch_size=args.batch_size,
                                                     workers=args.workers, is_tencrop=args.ten_crop)
        log_handler.info('Finish Extracting Features')
        return

    # split dataset for validation and training
    assert os.path.isdir(args.data)
    train_person_ids = os.listdir(args.data)
    log_handler.info('Number of people in the training set: ' + str(len(train_person_ids)))

    # Data loading code
    normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                     std=[0.229, 0.224, 0.225])
    scale_image_size = [int(x * 1.125) for x in args.crop_size]

    train_dataset = Datasets.TrainingDataset(
        data_folder=args.data,
        person_ids=train_person_ids,
        num_sample_persons=args.num_sample_persons,
        num_sample_imgs=args.num_sample_imgs,
        transform=transforms.Compose([
            transforms.Resize(scale_image_size),
            transforms.RandomCrop(args.crop_size),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            normalize,
        ]), random_mask=args.random_mask)

    train_loader = torch.utils.data.DataLoader(
        train_dataset, batch_size=args.batch_size, shuffle=True,
        num_workers=args.workers, pin_memory=True)

    if not os.path.isdir(args.checkpoint_folder):
        os.makedirs(args.checkpoint_folder)
    log_handler.info('Checkpoint folder: ' + str(args.checkpoint_folder))

    while iter_count < args.max_iter:
        # train for one epoch
        loss_train, iter_count = train(train_loader, model, criterion, optimizer, iter_count, log_handler)

        # remember best prec@1 and save checkpoint
        is_best = loss_train < best_loss
        best_loss = min(loss_train, best_loss)
        save_checkpoint({
            'iterations': iter_count,
            'arch': args.arch,
            'state_dict': model.state_dict(),
            'best_loss': best_loss,
            'optimizer': optimizer.state_dict(),
        }, is_best, folder=args.checkpoint_folder)
Exemple #6
0
'''

import os
import os.path
import glob
import time
import math
import datetime as dt
import Adafruit_DHT  # DHT device interface
import RPi.GPIO as GPIO

import logging
import my_logger
dataFormat = logging.Formatter('%(levelname)s : %(message)s')
data_logger = my_logger.setup_logger('data',
                                     'pi_data.log',
                                     formatter=dataFormat)
data_logger.setLevel(logging.DEBUG)

#from pi_hw import AC_RELAY,AC_PB_ON,AC_PB_OFF,RED_LED,YELLOW_LED,GREEN_LED,DHT_TYPE,DHT_PIN,READ_DS18B20
import pi_hw as hw

hw.set_logger(data_logger)

STATUS_COLOR = {0: 'grey', 1: 'green', 2: 'yellow', 3: 'red'}

# Values for status calculations
RED_DEWPOINT = 3  # diff between measured temp and dewpoint to trigger RED
RED_HUMIDITY = 80  # humidity level to trigger RED
YELLOW_DEWPOINT = 5
YELLOW_HUMIDITY = 65
import numpy as np
import datetime as dt
#import requests
import imageio
# Run once only if you use 'GIF-FI' (does not generate correct anim-gif with Python 2.7)
# NOTE: better to do it from cmd shell
#imageio.plugins.freeimage.download()
import time
from array2gif import write_gif

from urllib.request import urlopen, Request
from html.parser import HTMLParser

import logging
import my_logger
logger = my_logger.setup_logger(__name__, 'pyradar.log', level=logging.DEBUG)
logger.debug('start')


# to use HTMLParser I need to implement my own class?
class MyHTMLParser(HTMLParser):
    '''
    Parses NWS radar lists, extracting image file names for specific station.
    '''
    def __init__(self, station):
        '''
        :param station: radar station name, e.g., "mux" for Mt. Umunhum
        '''
        #super(MyHTMLParser,self).__init__()   # new style super
        HTMLParser.__init__(
            self)  # old style super does not inherit from object
Exemple #8
0
    return value


def display_status():
    if stat == 'red':
        pass
    elif stat == 'yellow':
        pass
    else:
        pass


# ============================================================
# ===== Run this script from command line to test it =====
# ===== If imported to other programs, this code is not run

if __name__ == '__main__':
    import logging
    import my_logger
    dataFormat = logging.Formatter('%(levelname)s : %(message)s')
    data_logger = my_logger.setup_logger('data',
                                         'pi_hw_test.log',
                                         formatter=dataFormat)
    data_logger.setLevel(logging.DEBUG)

    for v in (AMBIENT_T, AMBIENT_DEW, AMBIENT_HUM, AMBIENT_T_1, AMBIENT_DEW_1,
              AMBIENT_HUM_1, MIRROR_T, MIRROR_CELL_T, MIRROR_CELL_HUM):
        val = get_value(v)
        data_logger.debug('name={}: instr={}, measure={}, value={}'.format(
            v['name'], v['instrument'], v['value'], val))
Exemple #9
0
    geist_port = gset.geist_port
    email_acct = None
else:
    dewpoint_alarm = 4.0    # diff between ambient dewpoint and mirror temperature in F
    dewpoint_temp = ('Geist WD100','dewpoint')
    mirror_temp = ('GTHD','temperature')
    geist_addr = 'http://192.168.99.110'   # address of geist Watchdog
    geist_port = 80     # use None for default port value
    email_acct = '*****@*****.**'
    email_acct_pass = '******'
    email_from = '*****@*****.**'
    email_to = '*****@*****.**'

import logging
import my_logger
logger = my_logger.setup_logger(__name__,'geist_prog.log')
dataFormat = logging.Formatter('%(levelname)s : %(message)s')
data_logger = my_logger.setup_logger('data','geist_data.log', formatter=dataFormat)

def get_path_entity(dataDict,path):
    """
    Search the dataDict (JSON returned from Geist) for nodes that match the path.
    :param dataDict: JSON as a nested dictionary obtained from Geist query
    :param path: tuple or list of node names in dataDict to find
    :return: list of nodes that match path
    path parameter can designate nodes by single name or by tuple of names or by '*'.
    For example, we search for path = ('data', '*', 'entity', '0'). This will match
    the toplevel node named 'data', then all nodes within 'data', then all nodes in
    those matches that are named 'entity', and finally nodes named '0'.
    """
    nodes = []