示例#1
0
文件: ann.py 项目: pedrovbj/SVHN-ML
prefix = 'ANN'
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
results_path = Path('{}_results'.format(prefix))
log_path = results_path / '{}_out_{}.txt'.format(prefix, timestamp)
model_path = results_path / '{}_model_{}.ckpt'.format(prefix, timestamp)
img_path = results_path / '{}_cross_entropy_{}.png'.format(prefix, timestamp)

# Init logger
print(log_path, end='\r\n')
logger = MyLogger(log_path)

## Disable TF log
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

## Load data
logger.debug('Loading data... ')
t0 = datetime.now()
Xtrain, Ytrain, Xtest, Ytest = load_data()
Xtrain = flatten(Xtrain)
Xtest = flatten(Xtest)
Xtrain = (Xtrain - Xtrain.mean()) / Xtrain.std()
Xtest = (Xtest - Xtest.mean()) / Xtest.std()
dt = datetime.now() - t0
logger.debug('Done. [Elapsed {}]\r\n'.format(dt))

## Define and fit model
logger.debug('Model fitting...\r\n')
t0 = datetime.now()

# printing period for cost of test set and accuracy
print_period = 1
示例#2
0
        status = currentStatus

    return "On" if status else "Off"


logger.info(
    "STARTING: Fan=%3s, Delay=%.1f, Threshold=%.1f/%.1f, MaxCount=%d/%d, MaxLogCount=%d"
    % (strStatus(), delay, thresholdUp, thresholdDown, maxCountUp,
       maxCountDown, maxLogCount))

while t:
    newStatus = t > (thresholdDown if currentStatus else thresholdUp)

    if newStatus != currentStatus:
        count += 1
    else:
        count = 0

    if (logCount % maxLogCount == 0) or (count > 0):
        logCount = 0
        logger.debug("Fan=%3s, Count=%d, Temp=%2.3f", strStatus(), count, t)

    if count >= (maxCountDown if currentStatus else maxCountUp):
        currentStatus = newStatus
        logger.info("SWITCH FAN: " + strStatus(newStatus))
        fan.write(newStatus)

    logCount += 1
    time.sleep(delay)
    t = temp.read()
            return BeautifulSoup(html.read())
        except Exception as e:
            logger.error("Parsing HTML failed. Error:\n" + repr(e))
            return None


#################
# Scraping body #
#################

# Read configuration
try:
    config = Configurator()
    db = config.getDb()
    URL =  config.getURL()["startURL"]
    logger.debug("URL is set to " + URL)
except KeyError:
    logger.fatal("Unable to read configuration from '../config/config.ini'. Make sure you are running the script from 'scrapeforum' folder.")

# TODO decide if it is needed or it's too much spam
# Send notification email about start
Mailer(constants.EMAIL_TYPE_NOTIFICATION).send("Scraping started", "Scraping started for " + URL + " at " + datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S'))

# Connect to database
try:
    # Set up DB connection
    connectionString = config.getDbConnectionString()
    logger.debug("Trying to connect to DB with connectionString: " + connectionString)
    engine = create_engine(connectionString, echo=(db['echo'] == 'True'))

    # Create missing tables
示例#4
0
        status = currentStatus

    return "On" if status else "Off"

logger.info("STARTING: Fan=%3s, Delay=%.1f, Threshold=%.1f/%.1f, MaxCount=%d/%d, MaxLogCount=%d" % (
    strStatus(), delay,
    thresholdUp, thresholdDown,
    maxCountUp, maxCountDown,
    maxLogCount))

while t:
    newStatus = t > (thresholdDown if currentStatus else thresholdUp)

    if newStatus != currentStatus:
        count += 1
    else:
        count = 0
    
    if (logCount % maxLogCount == 0) or (count > 0):
        logCount = 0
        logger.debug("Fan=%3s, Count=%d, Temp=%2.3f", strStatus(), count, t)

    if count >= (maxCountDown if currentStatus else maxCountUp):
        currentStatus = newStatus
        logger.info("SWITCH FAN: " + strStatus(newStatus))
        fan.write(newStatus)
        
    logCount += 1
    time.sleep(delay)
    t = temp.read()