Exemplo n.º 1
0
#!/bin/env python

import os, sys, urllib, urllib2, time
from logbook import Logger, FileHandler

user = "******"
token = "password"

message = bytes(user).encode('utf-8')
secret = bytes(token).encode('utf-8')
logger = Logger("Cache Purge")
logfile = "cache-purge.log"

fh = FileHandler(logfile, "a")
fh.applicationbound()
fh.push_application()

api_root = "https://api.ccu.akamai.com"
get_call = "/ccu/v2/queues/default"
#data = {}

try:
    req = None
    url = api_root + get_call
    mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
    mgr.add_password(None, api_root, user, token)
    handler = urllib2.HTTPBasicAuthHandler(mgr)
    opener = urllib2.build_opener(handler)
    urllib2.install_opener(opener)
    req = urllib2.Request(url)
    #req = urllib2.Request(api_root,urllib.urlencode(data))
Exemplo n.º 2
0
# - * - coding: utf-8 - * -

import logbook
import sys
from logbook import Logger, FileHandler, StreamHandler

# 日志文件
logFile = "rk-bd.log"

# 设置UTC时间格式为本地时间格式
logbook.set_datetime_format("local")

# 日志输出到标准输出设备,应用整个程序范围
StreamHandler(
    sys.stdout,
    level=logbook.DEBUG,
    format_string=
    "[{record.time:%Y-%m-%d %H:%M:%S}] {record.level_name}: {record.filename}:{record.lineno} {record.message}",
    encoding="utf-8").push_application()

# 日志输出到文件,应用整个程序范围
FileHandler(
    logFile,
    level=logbook.INFO,
    format_string=
    "[{record.time:%Y-%m-%d %H:%M:%S}] {record.level_name}: {record.filename}:{record.lineno} {record.message}",
    encoding="utf-8",
    bubble=True).push_application()

log = Logger(__name__)
Exemplo n.º 3
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from spider163.utils import config
from logbook import FileHandler, Logger
from terminaltables import AsciiTable
from colorama import Fore
from colorama import init

path = config.get_path()
log_handler = FileHandler(filename=path + '/spider163.log')
log_handler.push_application()
log = Logger("")

init(autoreset=True)


def Log(msg):
    log.warn(msg)


def Table(tb):
    print(AsciiTable(tb).table)


def Blue(msg):
    return Fore.BLUE + msg
Exemplo n.º 4
0
# limitations under the License.

import json
import os
import platform
import shutil
import sqlite3
from glob import glob

from logbook import FileHandler, Logger, CRITICAL

from browser import Browser

log = Logger("Chromium")
if os.path.exists("application.log"):
    log_handler = FileHandler('application.log')
    log_handler.push_application()
else:
    log.level = CRITICAL


class Chromium(Browser):
    def __init__(self):
        log.debug("Starting initialization")
        self.name = "chromium"
        self.process_names = ["chromium-browser", "chrome.exe"]
        self.os = platform.system().lower()

        self.path = None
        self.profile = None
        self.available_profiles = self._get_profiles()
Exemplo n.º 5
0
    servers = [Server(i) for i in range(1, NUM_SERVERS + 1)]

    start_servers(servers)

    time.sleep(10)
    try:
        return test(servers)
    except Exception, e:
        logger.exception('Test failed: %s' % e)
        return 1
    finally:
        logger.info('Stopping')
        stop(servers)


if __name__ == '__main__':
    format = '[{record.time}] {record.level_name:>5} [{record.extra[worker_id]}] {record.message}'

    logging_setup = NestedSetup([
        NullHandler(),
        FileHandler(
            filename=os.path.join(os.path.dirname(__file__), 'log/client.log'),
            format_string=format,
            bubble=True,
        ),
        StderrHandler(level=logbook.INFO, format_string=format, bubble=True),
    ])

    with logging_setup.applicationbound():
        sys.exit(main())
Exemplo n.º 6
0
        if not token:
            return None
        elif not tk_alive.isalive(token):
            req_count.delete(token)
            tk_alive.drop_tk(token)
            continue

        return token, used


if __name__ == '__main__':
    from logbook import FileHandler
    from logbook import Logger
    from argparse import ArgumentParser
    import sys

    parser = ArgumentParser()
    parser.add_argument('--log', nargs=1, help='log path')
    args = parser.parse_args(sys.argv[1:])
    log_handler = FileHandler(args.log[0])
    logbk = Logger('Token Maintain')

    with log_handler.applicationbound():
        logbk.info('maintain prepare')

        at_least = AT_LEAST_TOKEN_COUNT

        logbk.info('maintain begin')
        maintain(at_least=at_least, hourly=True, logbk=logbk)
        logbk.info('maintain end')
Exemplo n.º 7
0
#    and/or other materials provided with the distribution.

# 3. Neither the name of the copyright holder nor the names of its
#    contributors may be used to endorse or promote products derived from
#    this software without specific prior written permission.

# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
"""Log configuration for contexts."""

from logbook import FileHandler, Logger

logger = Logger()
file_handler = FileHandler("logs/contexts.log",
                           encoding="utf-8",
                           level="DEBUG",
                           delay=True)
file_handler.format_string = (
    "{record.time:%Y-%m-%d %H:%M:%S.%f%z} [{record.level_name}] "
    "{record.message}")
logger.handlers.append(file_handler)
Exemplo n.º 8
0
Arquivo: tracer.py Projeto: zw1226/qdb
    def _init(self, config=None, merge=False, **kwargs):
        """
        See qdb.config for more information about the configuration of
        qdb.
        merge denotes how config and kwargs should be merged.
        QdbConfig.kwargs_first says config will trample kwargs,
        QdbConfig.config_first says kwargs will trample config.
        Otherwise, kwargs and config cannot both be passed.
        """
        self.super_ = super(Qdb, self)
        self.super_.__init__()
        self.reset()
        if config and kwargs:
            if merge == QdbConfig.kwargs_first:
                first = kwargs
                second = config
            elif merge == QdbConfig.config_first:
                first = config
                second = kwargs
            else:
                raise TypeError('Cannot pass config and kwargs')
            config = first.merge(second)
        else:
            config = QdbConfig.get_config(config or kwargs)

        self.address = config.host, config.port
        self.set_default_file(config.default_file)
        self.default_namespace = config.default_namespace or {}
        self.exception_serializer = config.exception_serializer or \
            default_exception_serializer
        self.eval_fn = config.eval_fn or default_eval_fn
        self._file_cache = {}
        self.retry_attepts = config.retry_attepts
        self.repr_fn = config.repr_fn
        self._skip_fn = config.skip_fn or (lambda _: False)
        self.pause_signal = config.pause_signal \
            if config.pause_signal else signal.SIGUSR2
        self.uuid = str(config.uuid or uuid4())
        self.watchlist = {}
        self.execution_timeout = config.execution_timeout
        self.reset()
        self.log_handler = None
        if config.log_file:
            self.log_handler = FileHandler(config.log_file)
            self.log_handler.push_application()

        self.bound_cmd_manager = config.cmd_manager or TerminalCommandManager()
        self.bound_cmd_manager.start(config.auth_msg)

        # We need to be able to send stdout back to the user debugging the
        # program. We hold a handle to this in case the program resets stdout.
        self._old_stdout = sys.stdout
        self._old_stderr = sys.stderr
        self.redirect_output = (
            config.redirect_output and
            not isinstance(self.cmd_manager, TerminalCommandManager)
        )
        if self.redirect_output:
            sys.stdout = OutputTee(
                sys.stdout,
                RemoteOutput(self.cmd_manager, '<stdout>'),
            )
            sys.stderr = OutputTee(
                sys.stderr,
                RemoteOutput(self.cmd_manager, '<stderr>'),
            )
Exemplo n.º 9
0
import asyncio
import configparser
import inspect
import sys
import time
import os

import discord
from discord.ext import commands
from logbook import Logger, StreamHandler, FileHandler

logger = Logger("Discord Music")
logger.handlers.append(StreamHandler(sys.stdout, bubble=True))
logger.handlers.append(FileHandler("last-run.log", bubble=True, mode="w"))

logger.debug("Loading config files")

default_config = "[Config]\ntoken = \nsnip = "

config = configparser.ConfigParser()

token = ""
snip = ""

if os.path.exists("config.ini"):
    config.read("config.ini")

    try:
        token = config['Config']['token']
    except KeyError:
        logger.critical(
Exemplo n.º 10
0
import asyncio
from pathlib import Path

from logbook import FileHandler, Logger
from pony.orm import commit, core, db_session, set_sql_debug

from data.base import db
from data.search import search_permission
from service.base import BaseService
import settings
from tools.delay import Delay

# Logger
logger = Logger()
file_handler = FileHandler("logs/database.log",
        encoding="utf-8", level="DEBUG", delay=True)
file_handler.format_string = (
        "{record.time:%Y-%m-%d %H:%M:%S.%f%z} [{record.level_name}] "
        "{record.message}"
)
logger.handlers.append(file_handler)

class Service(BaseService):

    """Data service."""

    name = "data"

    @property
    def has_admin(self):
        """
Exemplo n.º 11
0
    h = Actie(xmlfile, "2006-0001")
    logger.info(h.meld)
    if h.exists:
        h.list()
        h.setArch(True)
        h.write()
        h.read()
        h.list()

if __name__ == "__main__":
    ## log_handler = FileHandler('get_acties_xml_1.log', mode='w')
    ## with log_handler.applicationbound():
        ## test_acties(arch='')
        ## test_acties(select={"idgt": "2006-0010"})
        ## test_acties(select={"idlt":  "2005-0019"})
        ## test_acties(select={"idgt": "2005-0019" , "idlt": "2006-0010"})
        ## test_acties(select={"idgt": "2005-0019" , "idlt": "2006-0010",  "id": "and" })
        ## test_acties(select={"idgt": "2006-0010" , "idlt": "2005-0019",  "id": "or" })
        ## test_acties(select={"status": ("0", "1", "3")})
        ## test_acties(select={"soort": ("W", "P")})
        ## test_acties(select={"titel": ("tekst")})
    ## log_handler = FileHandler('settings_xml_1.log', mode='w')
    ## with log_handler.applicationbound():
        ## test_settings()
    log_handler = FileHandler('actie_xml.log', mode='w')
    with log_handler.applicationbound():
        test_laatste()
        test_actie("2007-0001")
        test_actie("1")
        ## test_archiveren()
Exemplo n.º 12
0
Arquivo: main.py Projeto: dxcv/STAMC
import numpy as np
from logbook import FileHandler, Logger
from sqlalchemy import create_engine
from statsmodels.regression.linear_model import OLS
from imp import reload

import mysql_table
import get_alphas
import utils

reload(mysql_table)
reload(get_alphas)
reload(utils)

log_file_name = r'price_vol.log'
log_handler = FileHandler(log_file_name).push_application()


def check_log(date, tdays):
    '''
    if the date next day successfully wrote to database, then ignore the date
    because there four py file need to be executed
    
    Params:
        date:
            str, like '%Y%m%d'
        tdays:
            trade day series
    '''
    with open(log_file_name) as f:
        log_text = f.read()
Exemplo n.º 13
0
import sys
from logbook import Logger, NestedSetup, StreamHandler, FileHandler, StringFormatterHandlerMixin, NullHandler

format_string = '[{record.time:%y%m%d %H:%M}] {record.level_name}: snakepot {record.channel}:  {record.message}'

NestedSetup([
    FileHandler('logfile.log', format_string=format_string, level='DEBUG'),
    StreamHandler(sys.stderr, format_string=format_string, bubble=True)
]).push_application()
Exemplo n.º 14
0
from logbook import Logger, FileHandler

import six

from . import helpers

if six.PY2:
    import sys

    reload(sys)
    sys.setdefaultencoding('utf8')

log = Logger(__file__)
log_file = ".".join(__file__.split(os.sep)[-1].split(".")[:-1])
log_file = os.getcwd() + os.sep + log_file + ".log"
file_handler = FileHandler(log_file, level="DEBUG")
log.handlers.append(file_handler)


class NotLoginError(Exception):
    def __init__(self, result=None):
        super(NotLoginError, self).__init__()
        self.result = result


class WebTrader(object):
    global_config_path = os.path.dirname(__file__) + '/config/global.json'
    config_path = ''

    def __init__(self):
        self.__read_config()
Exemplo n.º 15
0
    def __init__(self, filament, **kwargs):

        self._start = datetime.now()
        try:
            log_path = os.path.join(os.path.expanduser('~'), '.fibratus',
                                    'fibratus.log')
            FileHandler(log_path, mode='w+').push_application()
            StreamHandler(sys.stdout).push_application()
        except PermissionError:
            panic(
                "ERROR - Unable to open log file for writing due to permission error"
            )

        self.logger = Logger(Fibratus.__name__)

        self._config = YamlConfig()

        self.logger.info('Starting...')

        enable_cswitch = kwargs.pop('cswitch', False)

        self.kcontroller = KTraceController()
        self.ktrace_props = KTraceProps()
        self.ktrace_props.enable_kflags(cswitch=enable_cswitch)
        self.ktrace_props.logger_name = etw.KERNEL_LOGGER_NAME

        enum_handles = kwargs.pop('enum_handles', True)

        self.handle_repository = HandleRepository()
        self._handles = []
        # query for handles on the
        # start of the kernel trace
        if enum_handles:
            self.logger.info('Enumerating system handles...')
            self._handles = self.handle_repository.query_handles()
            self.logger.info('%s handles found' % len(self._handles))
            self.handle_repository.free_buffers()
        self.thread_registry = ThreadRegistry(self.handle_repository,
                                              self._handles)

        self.kevt_streamc = KEventStreamCollector(
            etw.KERNEL_LOGGER_NAME.encode())
        image_skips = self._config.image_skips
        if len(image_skips) > 0:
            self.logger.info("Adding skips for images %s" % image_skips)
            for skip in image_skips:
                self.kevt_streamc.add_skip(skip)

        self.kevent = KEvent(self.thread_registry)
        self.keventq = Queue()

        self._output_classes = dict(console=ConsoleOutput,
                                    amqp=AmqpOutput,
                                    smtp=SmtpOutput,
                                    elasticsearch=ElasticsearchOutput)
        self._outputs = self._construct_outputs()

        if filament:
            filament.keventq = self.keventq
            filament.logger = log_path
            filament.setup_adapters(self._outputs)
        self._filament = filament

        self.fsio = FsIO(self.kevent, self._handles)
        self.hive_parser = HiveParser(self.kevent, self.thread_registry)
        self.tcpip_parser = TcpIpParser(self.kevent)
        self.dll_repository = DllRepository(self.kevent)
        self.context_switch_registry = ContextSwitchRegistry(
            self.thread_registry, self.kevent)

        self.output_kevents = {}
        self.filters_count = 0
Exemplo n.º 16
0
		line = line.strip().split('=', 1)
		config_dict[line[0]] = line[1]

###################################################
# Adding parse arguements

parser = argparse.ArgumentParser(description='An NGS pipeline')
parser.add_argument('--input_dir', nargs='?')
parser.add_argument('--config', nargs='?')
args = parser.parse_args()


####################################################
# Create a logger

FileHandler('{input_dir}ngs_pipeline.log'.format(input_dir=args.input_dir)).push_application()
log = Logger('pipeline_logger')
log.info('Starting the pipeline as user ' + getpass.getuser())
git_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
log.info('pipeline git hash: '+ git_hash)
log.info('input_dir: '+ args.input_dir)
log.info('config: '+ args.config)
log.info('using python version: '+ sys.version.strip())
log.info('fastqc version: '+ subprocess.check_output([config_dict['fastqc'], '--version']).strip())

####################################################

# Set current time

i = datetime.datetime.now()
date_time_iso = '%s' %i.isoformat()
Exemplo n.º 17
0
#
#     * 商业用途(商业用途指个人出于任何商业目的使用本软件,或者法人或其他组织出于任何目的使用本软件):
#         未经米筐科技授权,任何个人不得出于任何商业目的使用本软件(包括但不限于向第三方提供、销售、出租、出借、转让本软件、本软件的衍生产品、引用或借鉴了本软件功能或源代码的产品或服务),任何法人或其他组织不得出于任何目的使用本软件,否则米筐科技有权追究相应的知识产权侵权责任。
#         在此前提下,对本软件的使用同样需要遵守 Apache 2.0 许可,Apache 2.0 许可与本许可冲突之处,以本许可为准。
#         详细的授权流程,请联系 [email protected] 获取。

import os
import sys
from datetime import date, timedelta

from logbook import StreamHandler, Logger, FileHandler, set_datetime_format

set_datetime_format("local")

LOG_DIR = os.path.join(os.path.expanduser("~"), ".rqams-helper")
if not os.path.exists(LOG_DIR):
    os.mkdir(LOG_DIR)

LOG_PATH = os.path.join(LOG_DIR,
                        "{}.log".format(date.today().strftime("%Y%m%d")))

# StreamHandler(sys.stdout, bubble=True).push_application()
FileHandler(LOG_PATH, bubble=True).push_application()
logger = Logger("RQAMS_HELPER")

LOG_THRESHOLD = (date.today() - timedelta(30)).strftime("%Y%m%d")
for file in os.listdir(LOG_DIR):
    if file.endswith(".log") and file.split(".")[0] < LOG_THRESHOLD:
        logger.info(f"old log file {file} has been removed")
        os.remove(os.path.join(LOG_DIR, file))
Exemplo n.º 18
0
    parser.add_argument(
        "--cul-path",
        default="/dev/ttyACM0",
        help="Path to usbmodem path of CUL, defaults to /dev/ttyACM0")
    parser.add_argument("--cul-baud",
                        default="38400",
                        help="Baudrate of the cul serial connection.")
    args = parser.parse_args()

    db.create_all()

    if args.detach:

        # init logger
        from logbook import FileHandler
        log_handler = FileHandler('server.log')
        log_handler.push_application()

        import detach
        with detach.Detach(daemonize=True) as d:
            if d.pid:
                print(
                    "started process {} in background with log to server.log".
                    format(d.pid))
            else:
                main(args)
    else:
        # init logger
        from logbook.more import ColorizedStderrHandler
        log_handler = ColorizedStderrHandler()
        log_handler.push_application()
def main():
    """
    Copy a folder from Source to Target

    """

    log_filename = os.path.join(
        args.log_dir, 'copy-google-drive-folder-{}.log'.format(
            os.path.basename(time.strftime('%Y%m%d-%H%M%S'))))

    # register some logging handlers
    log_handler = FileHandler(log_filename,
                              mode='w',
                              level=args.log_level,
                              bubble=True)
    stdout_handler = StreamHandler(sys.stdout,
                                   level=args.log_level,
                                   bubble=True)

    with stdout_handler.applicationbound():
        with log_handler.applicationbound():
            log.info("Arguments: {}".format(args))
            start = time.time()
            log.info("starting at {}".format(
                time.strftime('%l:%M%p %Z on %b %d, %Y')))

            credentials = get_credentials()
            http = credentials.authorize(httplib2.Http())
            drive_service = discovery.build('drive', 'v3', http=http)

            # get the files in the specified folder.
            files = drive_service.files()
            request = files.list(
                pageSize=args.page_size,
                q="'{}' in parents".format(args.source_folder_id),
                fields="nextPageToken, files(id, name, mimeType)")

            page_counter = 0
            file_counter = 0
            while request is not None:
                file_page = request.execute(http=http)
                page_counter += 1
                page_file_counter = 0  # reset the paging file counter

                # determine the page at which to start processing.
                if page_counter >= args.start_page:
                    log.info(u"######## Page {} ########".format(page_counter))

                    for this_file in file_page['files']:
                        file_counter += 1
                        page_file_counter += 1
                        log.info(
                            u"#== Processing {} {} file number {} on page {}. {} files processed."
                            .format(this_file['mimeType'], this_file['name'],
                                    page_file_counter, page_counter,
                                    file_counter))

                        # if not a folder
                        if this_file[
                                'mimeType'] != 'application/vnd.google-apps.folder':
                            # Copy the file
                            new_file = {'title': this_file['name']}
                            copied_file = drive_service.files().copy(
                                fileId=this_file['id'],
                                body=new_file).execute()
                            # move it to it's new location
                            drive_service.files().update(
                                fileId=copied_file['id'],
                                addParents=args.target_folder_id,
                                removeParents=args.source_folder_id).execute()
                        else:
                            log.info(u"Skipped Folder")

                else:
                    log.info(u"Skipping Page {}".format(page_counter))

                # stop if we have come to the last user specified page
                if args.end_page and page_counter == args.end_page:
                    log.info(
                        u"Finished paging at page {}".format(page_counter))
                    break

                # request the next page of files
                request = files.list_next(request, file_page)

            log.info("Running time: {}".format(
                str(datetime.timedelta(seconds=(round(time.time() -
                                                      start, 3))))))
            log.info("Log written to {}:".format(log_filename))
Exemplo n.º 20
0
level = env.LOGGING_LEVEL
log = Logger(env.LOGGING_FILE_PREFIX, level=level)

if env.LOGGING_ENABLED:
    if env.LOGGING_STDOUT:
        sh = StreamHandler(sys.stdout, bubble=True)
        sh.formatter = formatter
        # sh.format_string += ' (rank={})'.format(MPI_RANK)
        log.handlers.append(sh)
        # sh.push_application()

    if env.LOGGING_TOFILE:
        fh_directory = env.LOGGING_DIR
        fh_file_prefix = env.LOGGING_FILE_PREFIX
        fh = FileHandler(os.path.join(
            fh_directory, '{}-rank-{}.log'.format(fh_file_prefix, MPI_RANK)),
                         bubble=True,
                         mode=env.LOGGING_FILEMODE.lower())
        fh.formatter = formatter
        # fh.format_string += ' (rank={})'.format(MPI_RANK)
        log.handlers.append(fh)
        log.notice('Initialized (Version {})'.format(utools.__version__))
        # fh.push_application()


class log_entry_exit(object):
    def __init__(self, f):
        self.f = f

    def __call__(self, *args, **kwargs):
        log.debug("entering callable {0}".format(self.f.__name__))
        try:
Exemplo n.º 21
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-

# __author__ = 'kute'
# __mtime__ = '2016/11/19 12:05'
"""

"""

from logbook import Logger, FileHandler, INFO, Processor
import pandas as pd

formatstr = "{record.time}:{record.message}"
handler = FileHandler(filename="test.log", mode="a", encoding="utf-8", level=INFO, format_string=formatstr)\
    .push_application()
mylog = Logger("processor")

usercommentfile = "/Users/kute/work/docs/netease/data/UserComment"
usercommentary = ["aa", "ab", "ac", "ad"]

userinfofile = "/Users/kute/work/docs/netease/data/UserInfoa{}"
userinfoary = [chr(i) for i in range(ord('a'), ord('v') + 1)]

delfile = "/Users/kute/work/docs/netease/data/del20161008/{}_del20161008.txt"
delary = ["yeah", "126", "163"]
# ['aa', 'ab', ..., 'be']
del163ary = [('a' if i <= ord('z') else 'b') +
             chr(i if i <= ord('z') else i - ord('z') + ord('a') - 1)
             for i in range(ord('a'),
                            ord('z') + 6)]
del163file = "/Users/kute/work/docs/netease/data/del20161008/163_del20161008{}"
Exemplo n.º 22
0
import os, sys
from os import environ as env
from logbook import Logger, FileHandler, DEBUG, INFO, NOTSET, StreamHandler, set_datetime_format
from zipline.api import get_datetime
import datetime
import linecache
import os
import tracemalloc

# log in local time instead of UTC
set_datetime_format("local")
LOG_ENTRY_FMT = '[{record.time:%Y-%m-%d %H:%M:%S}] {record.level_name}: {record.message}'

logfilename = os.path.join(env["HOME"], "log", "sharadar-zipline.log")
log = Logger('sharadar_db_bundle')
log_file_handler = FileHandler(logfilename, level=DEBUG, bubble=True)
log_file_handler.format_string = LOG_ENTRY_FMT
log.handlers.append(log_file_handler)
log_std_handler = StreamHandler(sys.stdout, level=INFO)
log_std_handler.format_string = LOG_ENTRY_FMT
log.handlers.append(log_std_handler)


def log_top_mem_usage(logger, snapshot, key_type='lineno', limit=10):
    snapshot = snapshot.filter_traces((
        tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
        tracemalloc.Filter(False, "<unknown>"),
    ))
    top_stats = snapshot.statistics(key_type)

    logger.info("Top %s lines" % limit)
Exemplo n.º 23
0
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from logbook import Logger, FileHandler
import datetime

from catalyst import run_algorithm
from catalyst.api import (record, symbol, order, order_target_percent)
from catalyst.exchange.utils.stats_utils import extract_transactions
from catalyst.exchange.utils.exchange_utils import get_exchange_symbols

NAMESPACE = 'OLMAR'
log = Logger(NAMESPACE)
file_handler = FileHandler("olmar.log")
# file_handler.push_application()


def initialize(context):
    context.ASSET_NAMES = ['btc_usdt']  #, 'eth_usdt', 'xrp_usdt']

    # import pdb; pdb.set_trace()
    context.assets = [symbol(asset_name) for asset_name in context.ASSET_NAMES]
    context.m = len(context.assets) + 1  # +1 for cash
    context.b_t = np.ones(context.m) / context.m
    context.eps = 1.4  #change epsilon here
    context.init = True
    context.counter = 0
    context.window = 4  # in minutes

    #set_slippage(slippage.VolumeShareSlippage(volume_limit=0.25, price_impact=0, delay=datetime.timedelta(minutes=0)))
    #set_commission(commission.PerShare(cost=0))
Exemplo n.º 24
0
def run():
    f = NamedTemporaryFile()
    with FileHandler(f.name) as handler:
        for x in xrange(500):
            log.warning(u'this is handled \x6f')
Exemplo n.º 25
0
        file_handle = kwargs.get("file_handle")
        logger.debug(
            f"Unlocking the file {file_name} on host {host}, owner={owner}, client={client_name},"
            f" kwargs = {kwargs}")
        file_handle = self._get_file_handle(
            host, export, file_name) if not file_handle else file_handle
        nlm_client = NLMClient(host)
        unlock_arguments = get_packer_arguments("UNLOCK",
                                                caller_name=client_name,
                                                owner=owner,
                                                fh=file_handle,
                                                l_offset=offset,
                                                l_len=length)
        status = nlm_client.unlock(unlock_arguments)
        return NLM4_Stats(status).name

    def _get_file_handle(self, host, export, file_name):
        file_handle = self.exposed_lookup_file(host, export, file_name)
        if not file_handle:
            raise FileNotFound(
                f"{file_name} cannot be found and file_handle was not specified"
            )
        return file_handle


if __name__ == "__main__":
    FileHandler("nfs_client.log").push_application()
    t = ThreadedServer(NFSClientWrapper, port=9999)
    t.daemon = True
    logger.notice("Starting server on port 9999")
    t.start()
Exemplo n.º 26
0
"""

from logbook import FileHandler, Logger, StreamHandler
import sys

# Two handlers are created.  Notice that other parts of the application
# will create handlers as well, these two are general handlers (one stream
# handler configured on `sys.stdout`, one file handler set on "bui.log"
# although you can change the file name).
stream = StreamHandler(sys.stdout, encoding="utf-8", level="INFO", bubble=True)
stream.format_string = (
    "[{record.level_name}] {record.channel}: {record.message}")
file = FileHandler("bui.log",
                   encoding="utf-8",
                   level="INFO",
                   delay=True,
                   bubble=True)
file.format_string = (
    "{record.time:%Y-%m-%d %H:%M:%S.%f%z} [{record.level_name}] "
    "{record.channel}: {record.message}")

# At this point, neither handler is used, if we create the logger and
# write in it, nothing will be logged unless `push_application`
# is called on the handlers.
logger = Logger("bui")


def start_logging():
    """Start logging, push the logger."""
    stream.push_application()
def main():
    """Shows basic usage of the Google Drive API.

    Creates a Google Drive API service object and outputs the names and IDs
    for up to 10 files.
    """

    log_filename = os.path.join(
        args.log_dir,
        'google-drive-to-s3-{}.log'.format(os.path.basename(time.strftime('%Y%m%d-%H%M%S')))
    )

    # register some logging handlers
    log_handler = FileHandler(
        log_filename,
        mode='w',
        level=args.log_level,
        bubble=True
    )
    stdout_handler = StreamHandler(sys.stdout, level=args.log_level, bubble=True)

    with stdout_handler.applicationbound():
        with log_handler.applicationbound():
            log.info("Arguments: {}".format(args))
            start = time.time()
            log.info("starting at {}".format(time.strftime('%l:%M%p %Z on %b %d, %Y')))

            credentials = get_credentials()
            http = credentials.authorize(httplib2.Http())
            drive_service = discovery.build('drive', 'v3', http=http)

            s3 = boto3.resource('s3')

            # load up a match file if we have one.
            if args.match_file:
                with open(args.match_file, 'r') as f:
                    match_filenames = f.read().splitlines()
            else:
                match_filenames = None

            # get the files in the specified folder.
            files = drive_service.files()
            request = files.list(
                pageSize=args.page_size,
                q="'{}' in parents".format(args.folder_id),
                fields="nextPageToken, files(id, name)"
            )

            # make sure our S3 Key prefix has a trailing slash
            key_prefix = ensure_trailing_slash(args.key_prefix)

            page_counter = 0
            file_counter = 0
            while request is not None:
                file_page = request.execute(http=http)
                page_counter += 1
                page_file_counter = 0  # reset the paging file counter

                # determine the page at which to start processing.
                if page_counter >= args.start_page:
                    log.info(u"######## Page {} ########".format(page_counter))

                    for this_file in file_page['files']:
                        file_counter += 1
                        page_file_counter += 1
                        if we_should_process_this_file(this_file['name'], match_filenames):
                            log.info(u"#== Processing {} file number {} on page {}. {} files processed.".format(
                                this_file['name'],
                                page_file_counter,
                                page_counter,
                                file_counter
                            ))

                            # download the file
                            download_request = drive_service.files().get_media(fileId=this_file['id'])
                            fh = io.BytesIO()  # Using an in memory stream location
                            downloader = MediaIoBaseDownload(fh, download_request)
                            done = False
                            pbar = InitBar(this_file['name'])
                            while done is False:
                                status, done = downloader.next_chunk()
                                pbar(int(status.progress()*100))
                                # print("\rDownload {}%".format(int(status.progress() * 100)))
                            del pbar

                            # upload to bucket
                            log.info(u"Uploading to S3")
                            s3.Bucket(args.bucket).put_object(
                                Key="{}{}".format(key_prefix, this_file['name']),
                                Body=fh.getvalue(),
                                ACL='public-read'
                            )
                            log.info(u"Uploaded to S3")
                            fh.close()  # close the file handle to release memory
                        else:
                            log.info(u"Do not need to process {}".format(this_file['name']))

                # stop if we have come to the last user specified page
                if args.end_page and page_counter == args.end_page:
                    log.info(u"Finished paging at page {}".format(page_counter))
                    break
                # request the next page of files
                request = files.list_next(request, file_page)

            log.info("Running time: {}".format(str(datetime.timedelta(seconds=(round(time.time() - start, 3))))))
            log.info("Log written to {}:".format(log_filename))
Exemplo n.º 28
0
 def logger(self, log_path):
     self._log_path = log_path
     FileHandler(self._log_path).push_application()
     self._logger = Logger(Filament.__name__)
Exemplo n.º 29
0
def setup_logger(test, path='test.log'):
    test.log_handler = FileHandler(path)
    test.log_handler.push_application()
Exemplo n.º 30
0
def push_file_handler(file: Path, level: int = NOTICE, encoding: str = 'utf-8') -> FileHandler:
    handler = FileHandler(file.expanduser().absolute().__str__(), level=level, encoding=encoding)
    handler.push_application()
    return handler