コード例 #1
0
    def __init__(self, endpoint=settings.BASE_URL, symbol=settings.SYMBOL, sub_topic=settings.SUB_TOPICS, api_key=settings.API_KEY, api_secret=settings.API_SECRET):
        self.logger = u.setup_custom_logger(__name__)
        self.logger.debug("Initializing WebSocket.")

        self.endpoint = endpoint
        self.symbol = symbol
        self.sub_topic = [sub_topic] if not isinstance(sub_topic, list) else sub_topic

        if api_key is not None and api_secret is None:
            raise ValueError('api_secret is required if api_key is provided')
        if api_key is None and api_secret is not None:
            raise ValueError('api_key is required if api_secret is provided')

        self.api_key = api_key
        self.api_secret = api_secret

        self.data = {}
        self.keys = {}
        self.exited = False

        # We can subscribe right in the connection querystring, so let's build that.
        # Subscribe to all pertinent endpoints
        wsURL = self.__get_url()
        self.logger.info("Connecting to %s" % wsURL)
        self.__connect(wsURL, symbol)
        self.logger.info('Connected to WS.')
コード例 #2
0
def main(_):
    parser = argparse.ArgumentParser(
        description='Classification model training')
    parser.add_argument('--config_file', type=str, default=None,
                        help='Optional config file for params')
    parser.add_argument('opts', help='see config.py for all options',
                        default=None, nargs=argparse.REMAINDER)

    args = parser.parse_args()
    if args.config_file is not None:
        cfg_from_file(args.config_file)
    if args.opts is not None:
        cfg_from_list(args.opts)

    assert_and_infer_cfg()
    print_cfg()

    os.environ["CUDA_VISIBLE_DEVICES"] = str(cfg.GPU_ID)
    logger = utils.setup_custom_logger('root')
    tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)

    tf_config = tf.ConfigProto(device_count=dict(
        GPU=1), gpu_options=tf.GPUOptions(allow_growth=True))
    tf.enable_resource_variables()

    train(tf_config, logger)
    test(tf_config, logger)
コード例 #3
0
def build_static_matrix(start_index, stop_index, name, layer):
    '''
    --------------------------------------------------------------------------------------------------
    The method extracts the words coming from the PMC-w2v vocabulary by BioBERT contextual model.
    
    It is an accesory method of context2static method implemented in contextual. It performs the 
    processing of models, the loading and the logger prints.
    --------------------------------------------------------------------------------------------------
    '''
    # Logging instation for next time printing
    logger = utils.setup_custom_logger('myapp')

    # Load the vocabulary of PMC-w2v embedding: for avoiding the loading of model via gensim,
    # a list with all the words of vocabulary was previously computed and stored
    #vocabs = utils.extract_w2v_vocab(w2v)
    a = datetime.datetime.now().replace(microsecond=0)
    vocabs = utils.inputs_load('Utilities/PMC_w2v_vocabs')
    logger.info('PMC-w2v vocabulary (previously stored) is loaded in ' +
                str(datetime.datetime.now().replace(microsecond=0) - a) + '\n')

    # Load the contextual BioBERT embedding
    a = datetime.datetime.now().replace(microsecond=0)
    tokenizer = AutoTokenizer.from_pretrained(
        'Embeddings/contextual/biobert-base-cased-v1.1',
        output_hidden_states=True,
        cache_dir=None)
    model = AutoModel.from_pretrained(
        'Embeddings/contextual/biobert-base-cased-v1.1',
        output_hidden_states=True,
        cache_dir=None)
    logger.info('The loading time for BioBERT is: ' +
                str(datetime.datetime.now().replace(microsecond=0) - a) + '\n')

    # Load tokenized input
    a = datetime.datetime.now().replace(microsecond=0)
    inputs = contextual.tokenize_words(tokenizer, vocabs, start_index,
                                       stop_index)
    logger.info('The tokenization time is: ' +
                str(datetime.datetime.now().replace(microsecond=0) - a) + '\n')

    # Build the context2static matrix
    a = datetime.datetime.now().replace(microsecond=0)
    tmp = contextual.context2static(model,
                                    inputs['input_ids'],
                                    vocabs,
                                    start_index,
                                    stop_index,
                                    name=name,
                                    n_layer=layer,
                                    log=logger)
    logger.info('The context2static converting of matrix \"' +
                str(start_index) + '--' + str(stop_index) + name +
                '\" time process is: ' +
                str(datetime.datetime.now().replace(microsecond=0) - a) + '\n')
コード例 #4
0
def set_control_params(ctrl, args, graph):
    ctrl.refine_model.double_base = args.double_base
    ctrl.refine_model.learning_rate = args.learning_rate
    ctrl.refine_model.self_weight = args.self_weight

    ctrl.coarsen_level = args.coarsen_level
    ctrl.coarsen_to = max(1, graph.node_num //
                          (2**ctrl.coarsen_level))  # rough estimation.
    ctrl.embed_dim = args.embed_dim
    ctrl.basic_embed = args.basic_embed
    ctrl.refine_type = args.refine_type
    ctrl.data = args.data
    ctrl.workers = args.workers
    ctrl.max_node_wgt = int((5.0 * graph.node_num) / ctrl.coarsen_to)
    ctrl.logger = setup_custom_logger('MILE')

    if ctrl.debug_mode:
        ctrl.logger.setLevel(logging.DEBUG)
    else:
        ctrl.logger.setLevel(logging.INFO)
    ctrl.logger.info(args)
コード例 #5
0
import argparse
import os
import tensorflow as tf
from model import LicensePlatesCNN
from utils import setup_custom_logger

log = setup_custom_logger(os.path.basename(__file__))


def eval(test_data,
         store_results_path,
         input_channels=3,
         checkpoint_dir="checkpoint",
         summary_dir="summary"):
    with tf.Session() as sess:
        cnn = LicensePlatesCNN(sess=sess,
                               checkpoint_dir=checkpoint_dir,
                               summary_dir=summary_dir,
                               input_channels=input_channels)

        # Load the trained weights
        if not cnn.load():
            log.error("Unable to restore model from checkpoint")
            return

        # Feed test data through network
        cnn.evaluate(test_data, store_results_path=store_results_path)


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
コード例 #6
0
import datetime
import gzip
import os
import time
from cbpro import PublicClient
import json

import schedule

from utils import setup_custom_logger
from utils import create_snapshot_string

# Do we want to get fixed currency-pairs or dynamic?
CONFIG_MODE = 'fixed'

logger = setup_custom_logger('snapshot-crawler')

product_ids = ['BTC-USD', 'ETH-USD', 'BTC-EUR', 'XRP-USD', 'EOS-USD']
client = PublicClient()

targets = {
    pr_id: gzip.open(create_snapshot_string(pr_id), 'a+')
    for pr_id in product_ids
}


def job(target_csv_files):
    logger.info('Opening new csv files for the next day')
    for csv_file in target_csv_files:
        csv_file.close()
        target_csv_files.clear()
コード例 #7
0
import datetime
import logging
import os
import sys
from logging.handlers import TimedRotatingFileHandler
import time
import cbpro
import orjson

from utils import setup_custom_logger

product_id_arg = sys.argv[1]

logger = setup_custom_logger('update-crawler_' + product_id_arg)

logger.info('Starting process: ' + product_id_arg)


def create_product_logger(product_id):
    log_path = './data-2020/updates/' + product_id
    if not os.path.exists(log_path):
        os.makedirs(log_path)

    product_handler = TimedRotatingFileHandler(os.path.join(
        log_path,
        str(product_id) + '__L3Update.log'),
                                               when='midnight')
    new_logger = logging.getLogger(product_id)
    new_logger.addHandler(product_handler)
    product_handler.setLevel(1)
    new_logger.setLevel(1)
コード例 #8
0
    print(args)

    # Check on quality of inserted data
    # Embedding type
    assert args.embedding_type in [
        'both', 'cuis', 'words'
    ], "Insert a string like 'both', 'cuis', or 'words'"

    # Measures check
    assert ('all' in args.measure and len(args.measure) == 1) or (
        len(set(args.measure).intersection(set(['add', 'mul', 'pair'
                                                ]))) == len(args.measure)
    ), "Choose if take 'all' or only certain measures among 'add', 'mul', 'pair'"

    # Logger instantiation
    logger = utils.setup_custom_logger('myapp')
    logger.info('Start\n')

    # K_umls only for copd related concepts or for all.
    if args.copd_K_switch:
        K_umls = umls_tables_processing.count_pairs(
            umls_tables_processing.USEFUL_RELA,
            cuis_list=[umls_tables_processing.COPD])
        label_K = '_umls_copd'

    else:
        # CUIs
        concepts = umls_tables_processing.concepts_related_to_concept(
            concept=umls_tables_processing.COPD,
            two_way=True,
            polishing_rels=False,
コード例 #9
0
            if item[key] != matchData[key]:
                matched = False
        if matched:
            return item


def order_leaves_quantity(o):
    if o['leavesQty'] is None:
        return True
    return o['leavesQty'] > 0


if __name__ == '__main__':
# Basic use of websocket.

    logger = u.setup_custom_logger('console')

    symbolSubs = ["execution", "instrument", "order", "orderBookL2", "position", "quote", "trade", "margin"]

    # Instantiating the WS will make it connect. Be sure to add your api_key/api_secret.
    ws = BitMEXWebsocket(endpoint="https://testnet.bitmex.com/api/v1", symbol="XBTUSD", sub_topic=symbolSubs)

    logger.info("Instrument data: %s" % ws.get_instrument())

    # Run forever
    while(ws.ws.sock.connected):
        logger.info("Ticker: %s" % ws.get_ticker())
        if ws.api_key:
            logger.info("Funds: %s" % ws.funds())
        logger.info("Market Depth: %s" % ws.market_depth())
        logger.info("Recent Trades: %s\n\n" % ws.recent_trades())
コード例 #10
0
import utils
from corpus import Corpus
from moral_matrix import MoralMatrix

if __name__ == '__main__':
    args = utils.parse_args()
    logger = utils.setup_custom_logger("preprocessing")

    # Prepare corpus and moral value matrix.
    moral_matrix = MoralMatrix(args.moral_dictionary_path, logger)
    corpus = Corpus(args.users_path, args.tweets_path, moral_matrix, logger)
コード例 #11
0
    db = {
        "host": "db131.prodtest2.vindicia.com",
        "port": "5432",
        "database": "pgprodtestdb1",
        "user": "******",
        "password": "******",
        "schema": "pg_schema",
    }
except Exception:
    # No logger has been defined yet so print the traceback and return an error
    import traceback
    traceback.print_exc()
    api_return("400", "ERROR: Missing required environment >" + "<")

# Setup Logging - logging to stdout
log = setup_custom_logger('root')

# create a parser object
parser = argparse.ArgumentParser(description="Python wrapper for Subscribe")

# Applcation control
parser.add_argument("--debug",
                    metavar="debug",
                    type=int,
                    help="Integer debug level for this script",
                    default=0)
parser.add_argument("--nocreate",
                    action='store_true',
                    help="If set, new Subscription creation will be skipped.")
parser.add_argument(
    "--getdata",
コード例 #12
0
ファイル: model.py プロジェクト: saswat01/license-plates
from ops import conv2d, weights_variable_xavier, bias_variable, weights_variable_truncated_normal
from writer import BufferedWriter, DATA_IMAGES, DATA_CHAR_LABELS, DATA_CHAR_PROBABILITIES
from utils import setup_custom_logger
import tensorflow as tf
import numpy as np
import h5py
import time
import os

log = setup_custom_logger("LicensePlatesCNN")

# Some string constants
CONV0_WEIGHTS = "conv0_weights"
CONV0_BIAS = "conv0_bias"
CONV1_WEIGHTS = "conv1_weights"
CONV1_BIAS = "conv1_bias"
CONV2_WEIGHTS = "conv2_weights"
CONV2_BIAS = "conv2_bias"
CONV3_WEIGHTS = "conv3_weights"
CONV3_BIAS = "conv3_bias"
CONV4_WEIGHTS = "conv4_weights"
CONV4_BIAS = "conv4_bias"
CONV5_WEIGHTS = "conv5_weights"
CONV5_BIAS = "conv5_bias"
CONV6_WEIGHTS = "conv6_weights"
CONV6_BIAS = "conv6_bias"
CONV7_WEIGHTS = "conv7_weights"
CONV7_BIAS = "conv7_bias"
FC0_WEIGHTS = "fc0_weights"
FC0_BIAS = "fc0_bias"
FC1_WEIGHTS = "fc1_weights"