コード例 #1
0
from logbook import Logger

from catalyst.api import (
    record,
    order,
    symbol
)
from catalyst.exchange.utils.stats_utils import get_pretty_stats
from catalyst.utils.run_algo import run_algorithm

algo_namespace = 'arbitrage_eth_btc'
log = Logger(algo_namespace)


def initialize(context):
    log.info('initializing arbitrage algorithm')

    # The context contains a new "exchanges" attribute which is a dictionary
    # of exchange objects by exchange name. This allow easy access to the
    # exchanges.
    context.buying_exchange = context.exchanges['poloniex']
    context.selling_exchange = context.exchanges['binance']

    context.trading_pair_symbol = 'eth_btc'
    context.trading_pairs = dict()

    # Note the second parameter of the symbol() method
    # Passing the exchange name here returns a TradingPair object including
    # the exchange information. This allow all other operations using
    # the TradingPair to target the correct exchange.
    context.trading_pairs[context.buying_exchange] = \
コード例 #2
0
import json
import sys
import pickle
from plyvel import DB
from logbook import StreamHandler, Logger
import logging

handler = StreamHandler(sys.stdout, level='WARNING')
handler.push_application()
logger = Logger('data.snapshot')


class Snapshot(object):
    """
    use persistent method (like file, db and so on)
    to store (cache) Output of the Input,
    so we can bypass the known pair to save time/cpu/...
    """

    def __init__(self, dbpath, *args, debug=False, refresh=None, **kwargs):
        """
        :param refresh: ignore data in db and refresh using new value
        """
        super().__init__(*args, **kwargs)
        try:
            self.db = DB(dbpath, create_if_missing=True)
        except Exception as e:
            self.db = None
            raise e
        self.old_key = None
        self.upgrade = False
コード例 #3
0
ファイル: web.py プロジェクト: sylvainrocheleau/torabot
from urllib.parse import quote
from flask import render_template
from logbook import Logger
from copy import deepcopy
from ..query import get_bangumi, parse
from .. import name, bp

log = Logger(__name__)


@bp.route('/bilibili_<hash>.html')
def bilibili_site_verification(hash):
    return bp.send_static_file('bilibili_%s.html' % hash)


def format_query_result(query):
    query = deepcopy(query)
    query.result.query = parse(query.text)
    return render_template('bilibili/result/%s.html' %
                           query.result.query.method,
                           query=query)


def format_notice_body(notice):
    return {
        'update': format_sp_notice_body,
        'sp_update': format_sp_notice_body,
        'sp_new': format_sp_notice_body,
        'user_new_post': format_post_notice_body,
        'query_new_post': format_post_notice_body,
    }[notice.change.kind](notice)
コード例 #4
0
ファイル: Watchdog.py プロジェクト: Mabo-IoT/ziyan
# -*- coding:utf-8 -*-

import ctypes
import inspect
import threading
import time

from logbook import Logger

log = Logger('watchdog')


def watchdog(*args):
    """
    守护线程
    :param args: 
    :return: None
    """
    threads_name = {thread.name for thread in args[0].values()}
    while True:

        threads = set()

        for item in threading.enumerate():
            threads.add(item.name)

        log.debug('\n' + str(threads) + '\n')

        if threads - {'watchdog', 'MainThread'} != threads_name:
            dead_threads = threads_name - (threads -
                                           {'watchdog', 'MainThread'})
コード例 #5
0
from contextlib2 import ExitStack

from logbook import Logger, Processor
from pandas.tslib import normalize_date

from zipline.utils.api_support import ZiplineAPI

from zipline.finance import trading
from zipline.protocol import (
    BarData,
    SIDData,
    DATASOURCE_TYPE
)

log = Logger('Trade Simulation')


class AlgorithmSimulator(object):

    EMISSION_TO_PERF_KEY_MAP = {
        'minute': 'minute_perf',
        'daily': 'daily_perf'
    }

    def __init__(self, algo, sim_params):

        # ==============
        # Simulation
        # Param Setup
        # ==============
コード例 #6
0
from qdb.comm import fmt_msg, fmt_err_msg


# errno's that are safe to ignore when killing a session.
safe_errnos = (
    errno.EBADF,
    errno.ECONNRESET,
    errno.EPIPE,
)

# Symbolic constant for the attach_timeout case.
ALLOW_ORPHANS = 0


log = Logger('QdbSessionStore')


class DebuggingSession(namedtuple('DebuggingSessionBase', ['tracer',
                                                           'local_pid',
                                                           'pause_signal',
                                                           'clients',
                                                           'both_sides_event',
                                                           'timestamp'])):
    """
    A DebuggingSession stores all the information about a task that is being
    debugged, including the socket to the client, the websockets to the
    client, and the timers that manage new connections.
    """
    def __new__(cls,
                tracer=None,
コード例 #7
0
    def __init__(self, filament, **kwargs):

        self._start = datetime.now()
        try:
            log_path = os.path.join(os.path.expanduser('~'), '.fibratus',
                                    'fibratus.log')
            FileHandler(log_path, mode='w+').push_application()
            StreamHandler(sys.stdout, bubble=True).push_application()
        except PermissionError:
            panic(
                "ERROR - Unable to open log file for writing due to permission error"
            )

        self.logger = Logger(Fibratus.__name__)

        self._config = YamlConfig()

        self.logger.info('Starting Fibratus...')

        enable_cswitch = kwargs.pop('cswitch', False)

        self.kcontroller = KTraceController()
        self.ktrace_props = KTraceProps()
        self.ktrace_props.enable_kflags(cswitch=enable_cswitch)
        self.ktrace_props.logger_name = etw.KERNEL_LOGGER_NAME

        enum_handles = kwargs.pop('enum_handles', True)

        self.handle_repository = HandleRepository()
        self._handles = []
        # query for handles on the
        # start of the kernel trace
        if enum_handles:
            self.logger.info('Enumerating system handles...')
            self._handles = self.handle_repository.query_handles()
            self.logger.info('%s handles found' % len(self._handles))
            self.handle_repository.free_buffers()

        image_meta_config = self._config.image_meta
        self.image_meta_registry = ImageMetaRegistry(
            image_meta_config.enabled, image_meta_config.imports,
            image_meta_config.file_info)

        self.thread_registry = ThreadRegistry(self.handle_repository,
                                              self._handles,
                                              self.image_meta_registry)

        self.kevt_streamc = KEventStreamCollector(
            etw.KERNEL_LOGGER_NAME.encode())
        skips = self._config.skips
        image_skips = skips.images if 'images' in skips else []
        if len(image_skips) > 0:
            self.logger.info("Adding skips for images %s" % image_skips)
            for skip in image_skips:
                self.kevt_streamc.add_skip(skip)

        self.kevent = KEvent(self.thread_registry)

        self._output_classes = dict(console=ConsoleOutput,
                                    amqp=AmqpOutput,
                                    smtp=SmtpOutput,
                                    elasticsearch=ElasticsearchOutput)
        self._outputs = self._construct_outputs()
        self.output_aggregator = OutputAggregator(self._outputs)

        if filament:
            filament.logger = self.logger
            filament.do_output_accessors(self._outputs)
        self._filament = filament

        self.fsio = FsIO(self.kevent, self._handles)
        self.hive_parser = HiveParser(self.kevent, self.thread_registry)
        self.tcpip_parser = TcpIpParser(self.kevent)
        self.dll_repository = DllRepository(self.kevent)
        self.context_switch_registry = ContextSwitchRegistry(
            self.thread_registry, self.kevent)

        self.output_kevents = {}
        self.filters_count = 0
コード例 #8
0
ファイル: text_polyglot.py プロジェクト: guiscaranse/beaver
import os
import sys

import nltk
from logbook import Logger, StreamHandler
from nltk.corpus import stopwords
from polyglot.downloader import downloader
from polyglot.text import Text

from beaver.config import settings
from beaver.util import normalize

if "BEAVER_DEBUG" in os.environ:
    StreamHandler(sys.stdout).push_application()
log = Logger('Lumberjack')


def check_and_download(package: str) -> bool:
    """
    Verifica se determinado pacote do Polyglot está instalado, caso não, será instalado automaticamente
    :param package: pacote a ser procurado
    :return: Verdadeiro sempre. Se houverem erros uma excessão será levantada
    """
    if downloader.is_installed(package) == downloader.NOT_INSTALLED:
        log.info(package.split(".")[0] + " não instalado, instalando.")
        downloader.download(package)
        log.info(package.split(".")[0] + " instalado.")
    return True


def verify_polyglot():
コード例 #9
0
import scipy.io as sio
import numpy as np
import emg_features
from itertools import product
from collections import namedtuple
#from joblib import Parallel, delayed
import argparse
import warnings
from tqdm import tqdm
from logbook import Logger, StderrHandler, NullHandler, FileHandler
#import threading
#import multiprocessing


Combo = namedtuple('Combo', ['subject', 'gesture', 'trial'], verbose=False)
log = Logger('Extract features')

config = {}
config['ninapro-db1'] = dict(
    subjects=list(range(0, 27)),
    gestures=list(range(1, 53)),
    trials=list(range(10)),
    framerate=100
)
config['ninapro-db2'] = dict(
    subjects=list(range(40)),
    gestures=list(range(50)),
    trials=list(range(6)),
    framerate=2000
)
config['ninapro-db3'] = dict(
コード例 #10
0
LOG_LEVEL = logbook.base.INFO
# 日志存放路径
# LOG_DIR = os.path.join(os.path.dirname(__file__), '/logs')
LOG_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), r'logs')
#print("LOG_DIR="+LOG_DIR)
if not os.path.exists(LOG_DIR):
    os.makedirs(LOG_DIR)
# 日志打印到控制台
log_std = ColorizedStderrHandler(bubble=True, level=LOG_LEVEL)
log_std.formatter = log_type
# 日志打印到文件
log_file = TimedRotatingFileHandler(os.path.join(LOG_DIR, '%s.log' % LOG_NAME),
                                    date_format='%Y-%m-%d',
                                    bubble=True,
                                    level=LOG_LEVEL,
                                    encoding='utf-8')
log_file.formatter = log_type

# 脚本日志
run_log = Logger(LOG_NAME)


def init_logger():
    logbook.set_datetime_format("local")
    run_log.handlers = []
    run_log.handlers.append(log_file)
    run_log.handlers.append(log_std)


# 实例化,默认调用
logger = init_logger()
コード例 #11
0
ファイル: pylog.py プロジェクト: Jizhongpeng/spider163
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from spider163.utils import config
from logbook import FileHandler, Logger

path = config.get_path()
log_handler = FileHandler(filename=path + '/spider163.log')
log_handler.push_application()
log = Logger("")


def Log(msg):
    log.warn(msg)
コード例 #12
0
ファイル: cutwhite.py プロジェクト: valour01/pdf-cut-white
import argparse
import miner
import os
import sys
import logging
import traceback

from logbook import Logger,StreamHandler
import PyPDF2 as pdflib
from PyPDF2 import PdfFileWriter, PdfFileReader

handler = StreamHandler(sys.stdout,level='INFO')
handler.push_application()
logger = Logger('cutwhite')

parser = argparse.ArgumentParser()
parser.add_argument("-i", help="input file", action="store",
                    default='', type=str, dest="input")
parser.add_argument("-o", help="output file", action="store",
                    default='', type=str, dest="output")
parser.add_argument("-id", help="input directory", action="store",
                    default='', type=str, dest="indir")
parser.add_argument("-od", help="output directory", action="store",
                    default='', type=str, dest="outdir")
parser.add_argument("-t", "--test", help="run test",
                    action="store_true", dest="test")
parser.add_argument("--ignore", help="ignore global",
                    action="store", type=int, default=0, dest="ignore")
parser.add_argument("--verbose", help="choose verbose (DEBUG)",
                    action="store_true", default=False, dest="verbose")
# parser.add_argument(nargs=argparse.REMAINDER, dest="value")
コード例 #13
0
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from logbook import Logger

from catalyst import run_algorithm
from catalyst.api import (record, symbol, order_target_percent,
                          get_open_orders)
from catalyst.exchange.utils.stats_utils import extract_transactions

NAMESPACE = 'dual_moving_average'
log = Logger(NAMESPACE)


def initialize(context):
    context.i = 0
    context.asset = symbol('ltc_usd')
    context.base_price = None


def handle_data(context, data):
    # define the windows for the moving averages
    short_window = 2
    long_window = 2

    # Skip as many bars as long_window to properly compute the average
    context.i += 1
    if context.i < long_window:
        return

    # Compute moving averages calling data.history() for each
コード例 #14
0
ファイル: ak_lib.py プロジェクト: HeathKang/ziyan_dam
# -*- coding: utf-8 -*-

from __future__ import absolute_import

import logging
import time
import struct
import socket

import threading

import binascii

from logbook import Logger

log = Logger('ak_lib')

STX = 0x02
ETX = 0x03
BLANK = 0x20
K = ord('K')

AK_CONNECTED = 1
AK_DISCONNECTED = 0


class AKClient(object):
    """ AK Client """
    def __init__(self, conf):
        """ init """
コード例 #15
0
ファイル: fixtures.py プロジェクト: cub-/zipline
 def init_class_fixtures(cls):
     super(WithLogger, cls).init_class_fixtures()
     cls.log = Logger()
     cls.log_handler = cls.enter_class_context(
         cls.make_log_handler().applicationbound(), )
コード例 #16
0
ファイル: channel-bot.py プロジェクト: yokensei/channel-bot
# coding:utf-8
import os
import sys
import time
import json
from ConfigParser import SafeConfigParser, MissingSectionHeaderError

from slackclient import SlackClient
from logbook import Logger
from logbook import RotatingFileHandler
from logbook import StreamHandler

SLACK_SECTION_NAME = "slack"
logger = Logger("channel-bot")


def is_channels_message(res, channel_list):
    if not "channel" in res:
        return False
    exist_list = [
        channel for channel in channel_list if res["channel"] == channel["id"]
    ]
    return (res["type"] == "message") and exist_list and ("text" in res) and (
        command == res["text"])


def is_channel_created_event(res):
    return res["type"] == "channel_created"


def is_direct_message(res, im_list):
コード例 #17
0
ファイル: server.py プロジェクト: Fuyukai/repo-notifier
import time
from wsgiref.simple_server import make_server, WSGIRequestHandler

import arrow as arrow
from logbook import Logger, StreamHandler
from logbook.compat import redirect_logging
from pyramid.config import Configurator
from pyramid.request import Request
from pyramid.response import Response
from pyramid.view import view_config
from slacker import Slacker

# Enable logging
redirect_logging()
StreamHandler(sys.stderr).push_application()
logger = Logger("slack-repo-notif")

# Setup the slack handler
try:
    slack = Slacker(sys.argv[1])
except IndexError:
    print("Must pass your slack bot token on the command line.",
          file=sys.stdout)

try:
    report_channel = sys.argv[2]
except IndexError:
    report_channel = "repo-report"
    logger.warning("Using default report channel repo-report")

コード例 #18
0
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from asyncio import get_event_loop

from logbook import Logger
from monkq.config import Setting
from monkq.context import Context
from monkq.ticker import FrequencyTicker

from .log import core_log_group

logger = Logger('runner')
core_log_group.add_logger(logger)


class Runner():
    def __init__(self, settings: Setting) -> None:
        self.setting = settings

        self.context = Context(settings)
        self.context.setup_context()

        self.start_datetime = settings.START_TIME  # type: ignore
        self.end_datetime = settings.END_TIME  # type: ignore

        self.ticker = FrequencyTicker(self.start_datetime, self.end_datetime, '1m')
コード例 #19
0
 def __init__(self, user):
     self.user = user
     self.log = Logger(os.path.basename(__file__))
     StreamHandler(sys.stdout).push_application()
コード例 #20
0
ファイル: engine.py プロジェクト: yssource/tdx
from pytdx.reader import CustomerBlockReader, GbbqReader
from tdx.utils.util import fillna
import pandas as pd
from functools import wraps
import gevent
from tdx.utils.memoize import lazyval
from six import PY2

if not PY2:
    from concurrent.futures import ThreadPoolExecutor

from tdx.config import *

from logbook import Logger

logger = Logger('engine')


def stock_filter(code):
    if code[0] == 1:
        if code[1][0] == '6':
            return True
    else:
        if code[1].startswith("300") or code[1][:2] == '00':
            return True
    return False


class SecurityNotExists(Exception):
    pass
コード例 #21
0
ファイル: loader.py プロジェクト: yaelmi3/slash
from .ctx import context
from .core.local_config import LocalConfig
from .core.markers import repeat_marker
from . import hooks
from .core.runnable_test import RunnableTest
from .core.test import Test, TestTestFactory, is_valid_test_name
from .core.function_test import FunctionTestFactory
from .exception_handling import handling_exceptions, mark_exception_handled, get_exception_frame_correction
from .exceptions import CannotLoadTests, SlashInternalError
from .core.runnable_test_factory import RunnableTestFactory
from .utils.pattern_matching import Matcher
from .utils.python import check_duplicate_functions
from .resuming import ResumedTestData
from .utils.interactive import generate_interactive_test

_logger = Logger(__name__)


class Loader(object):

    """
    Provides iteration interfaces to load runnable tests from various places
    """

    def __init__(self):
        super(Loader, self).__init__()
        self._local_config = LocalConfig()
        self._duplicate_funcs = set()

    _cached_matchers = NOTHING
コード例 #22
0
ファイル: bep_handler.py プロジェクト: HeathKang/ziyan_dam
# -*- coding: utf-8 -*-
"""
plugin for BEP AK Client

"""
from __future__ import absolute_import

import os
import sys

import time

from logbook import Logger

log = Logger('ak_chk')

from maboio.lib.utils import fn_timer

# from lib.sharedq import SharedQ
from ziyan.lib.exceptions import NoDataException

from ziyan.lib.ak_lib import AKClient
from ziyan.lib.check_base import CheckBase


class BEPCheck(CheckBase):
    def __init__(self, plugin):

        # log.debug(__file__)
        #
コード例 #23
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from time import sleep

from logbook import Logger
import pandas as pd

BAR = 0
SESSION_START = 1
SESSION_END = 2
MINUTE_END = 3
BEFORE_TRADING_START_BAR = 4

log = Logger('Realtime Clock')


class RealtimeClock(object):
    """Realtime clock for live trading.

    This class is a drop-in replacement for
    :class:`zipline.gens.sim_engine.MinuteSimulationClock`.
    The key difference between the two is that the RealtimeClock's event
    emission is synchronized to the (broker's) wall time clock, while
    MinuteSimulationClock yields a new event on every iteration (regardless of
    wall clock).

    The :param:`time_skew` parameter represents the time difference between
    the Broker and the live trading machine's clock.
    """
コード例 #24
0
from zipline.errors import (
    MultipleSymbolsFound,
    RootSymbolNotFound,
    SidNotFound,
    SymbolNotFound,
    MapAssetIdentifierIndexError,
)
from zipline.assets import (
    Asset,
    Equity,
    Future,
)
from zipline.assets.asset_writer import (
    split_delimited_symbol, )

log = Logger('assets.py')

# A set of fields that need to be converted to strings before building an
# Asset to avoid unicode fields
_asset_str_fields = frozenset({
    'symbol',
    'asset_name',
    'exchange',
})

# A set of fields that need to be converted to timestamps in UTC
_asset_timestamp_fields = frozenset({
    'start_date',
    'end_date',
    'first_traded',
    'notice_date',
コード例 #25
0
ファイル: executor.py プロジェクト: Treestanx/pylivetrader-1
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import datetime
from contextlib import ExitStack
from logbook import Logger

from pylivetrader.executor.realtimeclock import (RealtimeClock, BAR,
                                                 SESSION_START,
                                                 BEFORE_TRADING_START_BAR)
from pylivetrader.data.bardata import BarData
from pylivetrader.misc.api_context import LiveTraderAPI

log = Logger('Executor')


class AlgorithmExecutor:
    def __init__(self, algo, data_portal):

        self.data_portal = data_portal
        self.algo = algo

        # This object is the way that user algorithms interact with OHLCV data,
        # fetcher data, and some API methods like `data.can_trade`.
        self.current_data = BarData(
            data_portal,
            self.algo.data_frequency,
        )
コード例 #26
0
    def __init__(self,
                 episode_duration=None,
                 timeframe=1,
                 generator_fn=base_random_generator_fn,
                 generator_parameters_fn=base_generator_parameters_fn,
                 generator_parameters_config=None,
                 spread_generator_fn=None,
                 spread_generator_parameters=None,
                 name='BaseSyntheticDataGenerator',
                 data_names=('default_asset', ),
                 parsing_params=None,
                 target_period=-1,
                 global_time=None,
                 task=0,
                 log_level=WARNING,
                 _nested_class_ref=None,
                 _nested_params=None,
                 **kwargs):
        """

        Args:
            episode_duration:               dict, duration of episode in days/hours/mins
            generator_fn                    callabale, should return generated data as 1D np.array
            generator_parameters_fn:        callable, should return dictionary of generator_fn kwargs
            generator_parameters_config:    dict, generator_parameters_fn args
            spread_generator_fn:            callable, should return values of spread to form {High, Low}
            spread_generator_parameters:    dict, spread_generator_fn args
            timeframe:                      int, data periodicity in minutes
            name:                           str
            data_names:                     iterable of str
            target_period:                  int or dict, if set to -1 - disables `test` sampling
            global_time:                    dict {y, m, d} to set custom global time (only for plotting)
            task:                           int
            log_level:                      logbook.Logger level
            **kwargs:

        """
        # Logging:
        self.log_level = log_level
        self.task = task
        self.name = name
        self.filename = self.name + '_sample'
        self.target_period = target_period

        self.data_names = data_names
        self.data_name = self.data_names[0]
        self.sample_instance = None
        self.metadata = {
            'sample_num': 0,
            'type': None,
            'parent_sample_type': None
        }

        self.data = None
        self.data_stat = None

        self.sample_num = 0
        self.is_ready = False

        if _nested_class_ref is None:
            self.nested_class_ref = BaseDataGenerator
        else:
            self.nested_class_ref = _nested_class_ref

        if _nested_params is None:
            self.nested_params = dict(
                episode_duration=episode_duration,
                timeframe=timeframe,
                generator_fn=generator_fn,
                generator_parameters_fn=generator_parameters_fn,
                generator_parameters_config=generator_parameters_config,
                name=name,
                data_names=data_names,
                task=task,
                log_level=log_level,
                _nested_class_ref=_nested_class_ref,
                _nested_params=_nested_params,
            )
        else:
            self.nested_params = _nested_params

        StreamHandler(sys.stdout).push_application()
        self.log = Logger('{}_{}'.format(self.name, self.task),
                          level=self.log_level)

        # Default sample time duration:
        if episode_duration is None:
            self.episode_duration = dict(
                days=0,
                hours=23,
                minutes=55,
            )
        else:
            self.episode_duration = episode_duration

        # Btfeed parsing setup:
        if parsing_params is None:
            self.parsing_params = dict(names=['ask', 'bid', 'mid'],
                                       datetime=0,
                                       timeframe=1,
                                       open='mid',
                                       high='ask',
                                       low='bid',
                                       close='mid',
                                       volume=-1,
                                       openinterest=-1)
        else:
            self.parsing_params = parsing_params

        self.columns_map = {
            'open': 'mean',
            'high': 'maximum',
            'low': 'minimum',
            'close': 'mean',
            'bid': 'minimum',
            'ask': 'maximum',
            'mid': 'mean',
        }
        self.nested_params['parsing_params'] = self.parsing_params

        for key, value in self.parsing_params.items():
            setattr(self, key, value)

        # base data feed related:
        self.params = {}
        if global_time is None:
            self.global_time = datetime.datetime(year=2018, month=1, day=1)
        else:
            self.global_time = datetime.datetime(**global_time)

        self.global_timestamp = self.global_time.timestamp()

        # Infer time indexes and sample number of records:
        self.train_index = pd.timedelta_range(
            start=datetime.timedelta(days=0, hours=0, minutes=0),
            end=datetime.timedelta(**self.episode_duration),
            freq='{}min'.format(self.timeframe))
        self.test_index = pd.timedelta_range(
            start=self.train_index[-1] +
            datetime.timedelta(minutes=self.timeframe),
            periods=len(self.train_index),
            freq='{}min'.format(self.timeframe))
        self.train_index += self.global_time
        self.test_index += self.global_time
        self.episode_num_records = len(self.train_index)

        self.generator_fn = generator_fn
        self.generator_parameters_fn = generator_parameters_fn

        if generator_parameters_config is not None:
            self.generator_parameters_config = generator_parameters_config

        else:
            self.generator_parameters_config = {}

        self.spread_generator_fn = spread_generator_fn

        if spread_generator_parameters is not None:
            self.spread_generator_parameters = spread_generator_parameters

        else:
            self.spread_generator_parameters = {}
コード例 #27
0
    QdbAuthenticationError,
    QdbPrognEndsInStatement,
)
from qdb.utils import Timeout, progn

try:
    from cStringIO import StringIO
except ImportError:
    from StringIO import StringIO

try:
    import cPickle as pickle
except ImportError:
    import pickle

log = Logger('Qdb')


@contextmanager
def capture_output():
    """
    Captures stdout and stderr for the duration of the body.
    example
    with capture_output() as (out, err):
        print 'hello'
    """
    old_stdout = sys.stdout
    old_stderr = sys.stderr
    sys.stdout = StringIO()
    sys.stderr = StringIO()
    try:
コード例 #28
0
    disallowed_in_before_trading_start,
)
from pylivetrader.misc.pd_utils import normalize_date
from pylivetrader.misc.preprocess import preprocess
from pylivetrader.misc.input_validation import (
    coerce_string,
    ensure_upper_case,
    expect_types,
    expect_dtypes,
    optional,
)
from pylivetrader.statestore import StateStore

from logbook import Logger, lookup_level

log = Logger('Algorithm')


class Algorithm(object):
    """Provides algorithm compatible with zipline.
    """
    def __setattr__(self, name, value):
        # Reject names that overlap with API method names
        if hasattr(self, 'api_methods') and name in self.api_methods:
            raise AttributeError(
                'Cannot set {} on context object as it is the name of '
                'an API method.'.format(name))
        else:
            object.__setattr__(self, name, value)

    def __init__(self, *args, **kwargs):
コード例 #29
0
    ORDER_STATUS as ZP_ORDER_STATUS,
)
from pylivetrader.finance.execution import (
    MarketOrder,
    LimitOrder,
    StopOrder,
    StopLimitOrder,
)
from pylivetrader.misc.pd_utils import normalize_date
from pylivetrader.errors import SymbolNotFound
from pylivetrader.assets import Equity

from logbook import Logger


log = Logger('Alpaca')

NY = 'America/New_York'

end_offset = pd.Timedelta('1000 days')
one_day_offset = pd.Timedelta('1 day')


def skip_http_error(statuses):
    '''
    A decorator to wrap with try..except to swallow
    specific HTTP errors.

    @skip_http_error((404, 503))
    def fetch():
        ...
コード例 #30
0
ファイル: train_semimyo.py プロジェクト: wind666/semimyo
from __future__ import division
import click
import mxnet as mx
from logbook import Logger
from pprint import pformat
import os
from .utils import packargs, Bunch
from .module_semimyo import Module
from .data import Preprocess, Dataset
from . import Context, constant

logger = Logger('semimyo')


@click.group()
def cli():
    pass


@cli.command()
@click.option('--batch-norm-momentum',
              type=float,
              default=constant.BATCH_NORM_MOMENTUM)
@click.option('--batch-norm-use-global-stats/--no-batch-norm-use-global-stats',
              default=constant.BATCH_NORM_USE_GLOBAL_STATS)
@click.option('--cudnn-tune',
              type=click.Choice(['off', 'limited_workspace', 'fastest']),
              default='fastest')
@click.option('--symbol', default='semimyo')
@click.option('--shared-net')
@click.option('--gesture-net')