예제 #1
0
    def test_bind(self, event, context, name, awsRequestId):
        log = logger.getLogger(name, stream=self.stream)
        log.setLevel('DEBUG')

        @log.bind
        def handler(event=None, context=None):
            log.warning('TEST')
            return {'ok': True}

        log.debug('BEFORE CONTEXT')
        handler(event, context)
        log.debug('AFTER CONTEXT')

        exp = dedent(f"""\
            DEBUG - BEFORE CONTEXT
            INFO {awsRequestId} EVENT {{
              "fizz": "buzz"
            }}
            WARNING {awsRequestId} TEST
            INFO {awsRequestId} RETURN {{
              "ok": true
            }}
            DEBUG - AFTER CONTEXT
        """)
        self.stream.seek(0)
        ret = self.stream.read()
        assert ret == exp
예제 #2
0
파일: stock.py 프로젝트: gitvipin/Pali
'''
A simple python module to scan Stock Ticker symbols.
'''

import datetime
import json
import logging
import requests

from src import logger
from src import task
from src import worker

log = logger.getLogger(__name__)
logger.getLogger("requests").setLevel(logging.WARNING)
logger.getLogger("urllib").setLevel(logging.WARNING)


class StockTask(task.Task):
    FIN_DATA_URL = 'https://www.alphavantage.co'
    API_KEY = 'ADD_YOUR_KEY'
    FIN_DATA_TYPE = 'TIME_SERIES_DAILY_ADJUSTED'

    def __init__(self, tckr='TSLA'):
        super(StockTask, self).__init__()
        self.tckr = tckr
        self.data = None
        self.date = datetime.datetime.now().strftime("%Y-%m-%d")

    def build_url(self):
        self.url = '%s/query?apikey=%s&function=%s&symbol=%s' % (
예제 #3
0
파일: ovftool.py 프로젝트: gitvipin/Pali
Thread Pool are waiting for the tasks to be pushed in the Queue and take the
task from queue as soon as they become available which means they don't wait
for all the tasks to be pushed and "exeucte" or "run" to be called for tasks
to be picked up.
'''

import subprocess
import pipes


from src import logger
from src import task
from src import worker


log = logger.getLogger(__name__)

class OvfTask(task.Task):
    TEMPLATE=''
    INFRAHOST_IP=''
    DATASTORE=''
    PASSWORD=''
    OVF_CMND='ovftool --acceptAllEulas --datastore=%s --noSSLVerify --name=%s %s vi://root:%s@%s'

    def __init__(self, name):
        super(OvfTask, self).__init__()
        self.name = name

    def run_command(self, cmd):
        """
        Helper routing for running commands on command line.
예제 #4
0
}
net = net_arch_dict[config.model_type](config).type(dtype)
net.loss = losses.l1_loss
iterations, epoch = util.load_model(net, config.model_file)

# Testing paramters
exp_name = os.path.basename(os.path.dirname(config.model_file))
log_dir = os.path.join(config.output_dir, exp_name)
h_mat_dir = os.path.join(log_dir, 'h_mats')
util.checkDirs([log_dir, h_mat_dir])
config.log_dir = log_dir

# Configure logger
timestamp = time.strftime("%Y%m%d_%H-%M-%S", time.localtime())
logFile = os.path.join(log_dir, 'test_' + exp_name + '_' + timestamp + '.log')
txt_logger = loggerFactory.getLogger(logFile)
config.txt_logger = txt_logger

data_base_path = config.data_path
subdirs = util.globx(data_base_path, ['*'])
sub_dirnames = [os.path.basename(subdir) for subdir in subdirs]
patch_select = config.patch_select
scale = config.scale
batch_size = 1

loss = 'l1_loss'
## Parameters

for sub_dirname in sub_dirnames:
    data_group = sub_dirname
    data_path = os.path.join(data_base_path, data_group)
예제 #5
0
                          gloable_spend=spendGloable,
                          lr=lr,
                          eta=eta)

                logger.info(msg)
                writer.add_scalar('loss', avgLoss, iter)
                writer.add_scalar('lr', lr, iter)

        scheduler.step()

    outName = osp.join(args.subModelDir, 'final.pth')
    torch.save(net.cpu().state_dict(), outName)


if __name__ == '__main__':
    args = parseArgs()
    uniqueName = time.strftime('%y%m%d-%H%M%S')
    args.subModelDir = osp.join(args.modelDir, uniqueName)
    args.subTensorboardDir = osp.join(args.tensorboardDir, uniqueName)
    for subDir in [args.logDir, args.subModelDir, args.subTensorboardDir]:
        if not osp.exists(subDir):
            os.makedirs(subDir)

    logFile = osp.join(args.logDir, uniqueName + '.log')
    logger = getLogger(logFile)
    for k, v in args.__dict__.items():
        logger.info(k)
        logger.info(v)

    main(args, logger)