コード例 #1
0
    def __init__(self, configuration, measure_model, time_to_shutdown_event):
        self.measure_model = measure_model
        self.time_to_shutdown_event = time_to_shutdown_event

        self.logger = logging_utils.get_logger("CommsInterfaceServer")

        try:
            self.vty_host = configuration.get('osmo_nitb_vty', 'host')
            self.vty_port = configuration.getint('osmo_nitb_vty', 'port')
            self.vty_readtimeout_secs = configuration.getint('osmo_nitb_vty', 'timeout')

            self.xmlrpc_server_host = configuration.get('app:main', 'xmlrpc.host')
            self.xmlrpc_server_port = configuration.getint('app:main', 'xmlrpc.port')

            self.pf_db_conn_str = configuration.get('app:main', 'sqlalchemy.pf.url')
            self.hlr_db_conn_str = configuration.get('app:main', 'sqlalchemy.hlr.url')

        except ConfigParser.Error as err:
            raise ValueError('Configuration error: {0}'.format(err.message))

        self.vty_client_connection = None
        self.xmlrpc_thread = None
        self.proc_measure_thread = None
        self.proc_unknow_adress_sms_thread = None

        self.pf_phone_number = pf_subscriber_extension
        self.pf_subscriber_imsi = pf_subscriber_imsi
        self.measure_update_period = 3

        bind_session(self.pf_db_conn_str)
        bind_hlr_session(self.hlr_db_conn_str)

        self.vty_use_send_sms_rlock = threading.RLock()
コード例 #2
0
def start_comms_interface_server_process(configuration, comms_model):
    logger = logging_utils.get_logger(multiprocessing.current_process().name)
    try:
        srv = CommsInterfaceServer(configuration, comms_model, multiprocessing.Event(), logger)
        srv.serve_forever()
    except ValueError as err:
        logger.error("Cann't init comms interface server: {0}".format(err.message))
        sys.exit(1)
コード例 #3
0
    def __init__(self):
        self.queue = []
        self.unknown_adresses_sms = []

        self.__cc = GPSCoordinatesCollection()
        self.current_gps = (None, None)

        self.logger = logging_utils.get_logger("CommsModel")
コード例 #4
0
    def __init__(self, comms_model):
        try:
            SocketServer.TCPServer.__init__(self, ("", 8085), PostHandler)
        except:
            raise ValueError(sys.exc_info()[0])

        self.comms_model = comms_model
        self.logger = logging_utils.get_logger("SMSServer")
コード例 #5
0
def start_sms_server(configuration, comms_model):
    logger = logging_utils.get_logger(multiprocessing.current_process().name)
    try:
        srv = SMSServer(configuration, comms_model, logger)
        srv.serve_forever()
    except ValueError as err:
        print "Cann't init sms server: {0}".format(err.message)
        logger.error("Cann't init sms server: {0}".format(err.message))
        sys.exit(1)
コード例 #6
0
    def __init__(self):
        self.__gps_times = []
        self.__gps_coordinates = []
        self.__count = 0
        self.__max_for_save = 100

        self.logger = logging_utils.get_logger("GPSCoordinatesCollection")

        self.add(time.time(), None, None)
コード例 #7
0
def create_graphml(input_pairs,
                   input_clusterinfosummary,
                   input_librarysearch,
                   input_analoglibrarysearch,
                   input_pairsfolder,
                   output_graphml,
                   collapse_ion_edges=False):
    logger = logging_utils.get_logger(__name__)
    # Doing other filtering
    logger.debug("Creating network")
    G = molecular_network_filtering_library.loading_network(input_pairs,
                                                            hasHeaders=True)
    molecular_network_filtering_library.add_clusterinfo_summary_to_graph(
        G, input_clusterinfosummary)
    molecular_network_filtering_library.add_library_search_results_to_graph(
        G, input_librarysearch)
    # mark all nodes as feature or ion identity nodes (constants.NODE.TYPE_ATTRIBUTE)
    logger.debug("Mark all node types")
    ion_network_utils.mark_all_node_types(G)

    # add analogs
    if input_analoglibrarysearch is not None:
        logger.debug("Add analog library search results")
        molecular_network_filtering_library.add_library_search_results_to_graph(
            G, input_analoglibrarysearch, annotation_prefix="Analog:")

    # add additional edges - e.g. ion identity edges between different ion species of the same molecule
    if input_pairsfolder is not None:
        all_pairs_files = glob.glob(os.path.join(input_pairsfolder, "*"))
        logger.debug("Adding additional edges from files: " +
                     str(len(all_pairs_files)))
        for additional_pairs_file in all_pairs_files:
            logger.debug("Adding Additional Edges from " +
                         str(additional_pairs_file))
            molecular_network_filtering_library.add_additional_edges(
                G, additional_pairs_file)

        # collapse all ion identity networks, each into a single node
        if collapse_ion_edges:
            logger.debug("Collapsing additional edges of type: " +
                         CONST.EDGE.ION_TYPE)
            try:
                G = ion_network_utils.collapse_ion_networks(G)
            except:
                logger.debug("Failed collapsing")
                pass

    # export graphml
    logger.info("Writing graphml: " + output_graphml)
    nx.write_graphml(G, output_graphml, infer_numeric_types=True)
コード例 #8
0
    def run(self):
        self.logger = logging_utils.get_logger("GPSDListenerProcess")

        while not self.__time_to_shutdown.is_set():
            try:
                self.logger.info("Try to create connection with gpsd")
                self.__session = gps.gps()
                self.__session.stream(gps.WATCH_ENABLE)
                self.logger.info("Connection with gpsd is established!")
            except:
                self.logger.error("To establish connection with gpsd failed!")
                time.sleep(0.1)
                continue

            for report in self.__session:
                if self.__time_to_shutdown.is_set():
                    break

                if report.get(u'class') == u'TPV':
                    lat = report.get(u'lat')
                    lon = report.get(u'lon')

                    time_str = report.get('time')
                    if time_str is None:
                        self.logger.error(
                            "GPS coordinate does not have timestamps: {0}".
                            format(report))
                        continue

                    time_timestamp = None
                    try:
                        time_timestamp = time.mktime(
                            datetime.datetime.strptime(
                                time_str, "%Y-%m-%dT%H:%M:%S.%fZ").timetuple())
                    except ValueError as err:
                        self.logger.error(
                            "Cann't deformed time in gpsd message: {0}".format(
                                err.message))
                        continue

                    self.__comms_model.add_gps_meas(time_timestamp, lat, lon)

            if not self.__time_to_shutdown.is_set():
                self.logger.error("Connection with gpsd lost!")
コード例 #9
0
    def __init__(self, configuration, measure_model, time_to_shutdown_event):
        self.measure_model = measure_model
        self.time_to_shutdown_event = time_to_shutdown_event

        self.logger = logging_utils.get_logger("CommsInterfaceServer")

        try:
            self.vty_host = configuration.get('osmo_nitb_vty', 'host')
            self.vty_port = configuration.getint('osmo_nitb_vty', 'port')
            self.vty_readtimeout_secs = configuration.getint(
                'osmo_nitb_vty', 'timeout')

            self.xmlrpc_server_host = configuration.get(
                'app:main', 'xmlrpc.host')
            self.xmlrpc_server_port = configuration.getint(
                'app:main', 'xmlrpc.port')

            self.pf_db_conn_str = configuration.get('app:main',
                                                    'sqlalchemy.pf.url')
            self.hlr_db_conn_str = configuration.get('app:main',
                                                     'sqlalchemy.hlr.url')

            self.kannel_url = configuration.get('app:main', 'kannel.url')
            self.kannel_smssend_port = configuration.get(
                'app:main', 'kannel.smssend.port')

        except ConfigParser.Error as err:
            raise ValueError('Configuration error: {0}'.format(err.message))

        self.vty_client_connection = None
        self.xmlrpc_thread = None
        self.proc_measure_thread = None
        self.proc_unknow_adress_sms_thread = None

        self.pf_phone_number = pf_subscriber_extension
        self.pf_subscriber_imsi = pf_subscriber_imsi
        self.measure_update_period = 3

        bind_session(self.pf_db_conn_str)
        bind_hlr_session(self.hlr_db_conn_str)

        self.vty_use_send_sms_rlock = threading.RLock()
コード例 #10
0
ファイル: gpsd_client.py プロジェクト: nextgis/peoplefinder
    def run(self):
        self.logger = logging_utils.get_logger("GPSDListenerProcess")

        while not self.__time_to_shutdown.is_set():
            try:
                self.logger.info("Try to create connection with gpsd")
                self.__session = gps.gps()
                self.__session.stream(gps.WATCH_ENABLE)
                self.logger.info("Connection with gpsd is established!")
            except:
                self.logger.error("To establish connection with gpsd failed!")
                time.sleep(0.1)
                continue

            for report in self.__session:
                if self.__time_to_shutdown.is_set():
                    break

                if report.get(u'class') == u'TPV':
                    lat = report.get(u'lat')
                    lon = report.get(u'lon')

                    time_str = report.get('time')
                    if time_str is None:
                        self.logger.error("GPS coordinate does not have timestamps: {0}".format(report))
                        continue

                    time_timestamp = None
                    try:
                        time_timestamp = time.mktime(datetime.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ").timetuple())
                    except ValueError as err:
                        self.logger.error("Cann't deformed time in gpsd message: {0}".format(err.message))
                        continue

                    self.__comms_model.add_gps_meas(time_timestamp, lat, lon)

            if not self.__time_to_shutdown.is_set():
                self.logger.error("Connection with gpsd lost!")
コード例 #11
0
 def run(self):
     self.logger = logging_utils.get_logger("MeasJsonListenerProcess")
     self.try_to_create_meas_json_process()
     self.start_loop()
コード例 #12
0
from logging_utils import get_logger

logger = get_logger()
logger.info("Let's start!")
コード例 #13
0
sys.path.append('/Users/luoyonggui/PycharmProjects/mayiutils_n1/mayiutils/db')
from pymongo_wrapper import PyMongoWrapper

sys.path.append(
    '/Users/luoyonggui/PycharmProjects/mayiutils_n1/mayiutils/finance')
from stock_wrapper import get_tushare_pro
import argparse
from datetime import datetime, timedelta
import sys

sys.path.append(
    '/Users/luoyonggui/PycharmProjects/mayiutils_n1/mayiutils/config')
from logging_utils import get_logger

log_path = os.path.join(LOG_PATH, 'output.log')
logger = get_logger(__file__, file_handler=True, log_path=log_path)

mongo = PyMongoWrapper()
dbname = 'finance'
table_name = 'stock_daily_basic1'
table = mongo.getCollection(dbname, table_name)
if not mongo.isExists(dbname, table_name):
    mongo.setUniqueIndex(dbname, table_name, ['ts_code', 'trade_date'])
pro = get_tushare_pro()
f = 'ts_code,trade_date,close,turnover_rate,turnover_rate_f,volume_ratio,pe,pe_ttm,pb,ps,ps_ttm,total_share,float_share,free_share,total_mv,circ_mv'
# df = pro.daily_basic(ts_code='', trade_date=datetime.now().strftime('%Y%m%d'), fields=f)
df = pro.daily_basic(ts_code='', trade_date='20190926', fields=f)
# df = pro.daily_basic(ts_code='', trade_date='20190827', fields=f)
if not df.empty:
    logger.info(f'请求到{len(df)}条数据!')
    df.columns = [
コード例 #14
0
#!/usr/bin/env python3
"""
Main cryptochat-client module
"""

import os
import sys

import app
from logging_utils import init_logging, get_logger

LOGGER = get_logger(__name__)
CLIENT_VERSION = os.environ.get('VERSION')


def show_error_and_exit(error_text):
    raise NotImplementedError()


def check_requirements():
    raise NotImplementedError()


def main():
    init_logging()
    LOGGER.info("Starting (version %s).", CLIENT_VERSION)
    status = app.run()
    sys.exit(status)


if __name__ == '__main__':
コード例 #15
0
ファイル: utils.py プロジェクト: mandarup/courses
nmt.py: NMT Model
Pencheng Yin <*****@*****.**>
Sahil Chopra <*****@*****.**>
"""

import math
from typing import List

import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F

import logging_utils

logger = logging_utils.get_logger()


def pad_sents(sents, pad_token):
    """ Pad list of sentences according to the longest sentence in the batch.
    @param sents (list[list[str]]): list of sentences, where each sentence
                                    is represented as a list of words
    @param pad_token (str): padding token
    @returns sents_padded (list[list[str]]): list of sentences where sentences shorter
        than the max length sentence are padded out with the pad_token, such that
        each sentences in the batch now has equal length.
    """
    sents_padded = []

    ### YOUR CODE HERE (~6 Lines)
    max_len = max(len(s) for s in sents)
コード例 #16
0
import sys
import numpy

sys.path.insert(0, "../tools/feature-based-molecular-networking/scripts/")

import logging_utils
logger = logging_utils.get_logger(__name__)


def test_written_description():
    import write_description
    input_filename = "reference_data/params.xml"
    write_description.write_description(input_filename, "/dev/null")


def test_network_stats():
    import calculate_stats_graphml
    input_filename = "reference_data/network.graphml"
    calculate_stats_graphml.calculate_stats(input_filename, "/dev/null")


def test_metadata_test():
    import metadata_permanova_prioritizer

    input_filename = "reference_data/test_metadata_permanova_parse.tsv"
    selected_columns = metadata_permanova_prioritizer.permanova_validation(input_filename)

    print(selected_columns)


def test_additional_edges():
コード例 #17
0
from __future__ import division, unicode_literals, print_function
from glob import glob
from io import open
from utils import normalize_string
import os

from language import Language
from logging_utils import get_logger

FULL_TEXT_EXTENSION = ".text"
SUMMARY_EXTENSION = ".summary"
LOGGER = get_logger('seq2seq.dataloader')


class DataLoader(object):
    def __init__(self, full_text_directory, summary_directory=None):
        self.full_text_directory = full_text_directory
        self.summary_directory = summary_directory
        if self.summary_directory is None:
            self.summary_directory = self.full_text_directory

    def load(self, trim=None):
        LOGGER.info('Loading data from %s and %s' %
                    (self.full_text_directory, self.summary_directory))
        full_text_lang = Language(type='full_text')
        summary_text_lang = Language(type='summary_text')
        pairs = []
        for doc, summary in self:
            full_text_lang.add_text(doc)
            summary_text_lang.add_text(summary)
            pairs.append((doc, summary))
コード例 #18
0
ファイル: nmt_model.py プロジェクト: mandarup/courses
"""
from collections import namedtuple
import sys
from typing import List, Tuple, Dict, Set, Union
import torch
import torch.nn as nn
import torch.nn.utils
import torch.nn.functional as F
from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence

from model_embeddings import ModelEmbeddings
Hypothesis = namedtuple('Hypothesis', ['value', 'score'])

import logging
import logging_utils
logger = logging_utils.get_logger(module=__name__, loglevel=logging.DEBUG)

class NMT(nn.Module):
    """ Simple Neural Machine Translation Model:
        - Bidrectional LSTM Encoder
        - Unidirection LSTM Decoder
        - Global Attention Model (Luong, et al. 2015)
    """
    def __init__(self, embed_size, hidden_size, vocab, dropout_rate=0.2):
        """ Init NMT Model.

        @param embed_size (int): Embedding size (dimensionality)
        @param hidden_size (int): Hidden Size (dimensionality)
        @param vocab (Vocab): Vocabulary object containing src and tgt languages
                              See vocab.py for documentation.
        @param dropout_rate (float): Dropout probability, for attention
コード例 #19
0
import random
import torch.nn as nn
from torch import optim
from argparse import ArgumentParser
from pathlib import Path

from data_loader import DataLoader
from language import SOD_TOKEN, EOD_TOKEN
from seq2seq import device, MAX_LENGTH, EncoderRNN, AttentionDecoderRNN
from utils import time_since, time_string
from tensor_utils import tensors_from_pair
from logging_utils import get_logger

teacher_forcing_ratio = 0.5

LOGGER = get_logger('seq2seq.train')


def train_tensor(input_tensor,
                 target_tensor,
                 encoder,
                 decoder,
                 encoder_optimizer,
                 decoder_optimizer,
                 criterion,
                 max_length=MAX_LENGTH):
    encoder_hidden = encoder.init_hidden()

    encoder_optimizer.zero_grad()
    decoder_optimizer.zero_grad()
コード例 #20
0
    except ValueError as err:
        print "Cann't init sms server: {0}".format(err.message)
        logger.error("Cann't init sms server: {0}".format(err.message))
        sys.exit(1)

if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description='People finder. Comm interface.')
    parser.add_argument('-c', '--configuration', type=file, required=True)
    parser.add_argument('-t', '--test_mode', action='store_true')
    args = parser.parse_args()

    configuration = ConfigParser.ConfigParser()
    configuration.readfp(args.configuration)

    logger = logging_utils.get_logger("main")
    logger.info("Comm interface started! pid: {0}".format(os.getpid()))

    # Init DB ================================================================
    pf_db_conn_str = None
    try:
        pf_db_conn_str = configuration.get('app:main', 'sqlalchemy.pf.url')
    except ConfigParser.Error as err:
        logger.error("Identification People Finder DB fail: {0}".format(err.message))
        sys.exit(1)

    logger.info("PF db sqlite path: {0}".format(pf_db_conn_str))
    try:
        bind_session(pf_db_conn_str)
        DBSession.query(Measure).count()
        DBSession.query(Settings).count()
コード例 #21
0
# -*- coding: utf-8 -*-
"""
CS224N 2018-19: Homework 4
model_embeddings.py: Embeddings for the NMT model
Pencheng Yin <*****@*****.**>
Sahil Chopra <*****@*****.**>
Anand Dhoot <*****@*****.**>
"""

import torch.nn as nn

import logging_utils
import logging

LOGLEVEL = logging.DEBUG
logger = logging_utils.get_logger(loglevel=LOGLEVEL)


class ModelEmbeddings(nn.Module):
    """
    Class that converts input words to their embeddings.
    """
    def __init__(self, embed_size, vocab):
        """
        Init the Embedding layers.

        @param embed_size (int): Embedding size (dimensionality)
        @param vocab (Vocab): Vocabulary object containing src and tgt languages
                              See vocab.py for documentation.
        """
        super(ModelEmbeddings, self).__init__()