示例#1
0
    def POST(self):
        rtn = None
        try:
            if session.has_token() is False:
                anonymous_user = user.login(user.ANONYMOUS_ACCOUNT_NAME, None)
                session.set_token(anonymous_user)

            operation = user.get_operation(handler_class=self.get_qualified_name())
            if operation is not None:
                paramnames = operation.get_resource_oql_paramnames()
                oqlparams = self._get_resource_oql_params(paramnames)
                if user.has_permission(self._get_user_id(), operation.operation_key, oqlparams=oqlparams):
                    rtn = self.execute()
                else:
                    if session.get_token().is_anonymous():
                        raise SessionExpiredError(session.get_token())
                    else:
                        raise UnauthorizedError()
            else:
                raise CoreError("%s is not related to operation.", self.get_qualified_name())

        except BaseException, e:
            msg = str(e)
            msg += traceback.format_exc()
            log.get_logger().error(msg)
            rtn = self._new_rtn(e=e).to_json()
示例#2
0
文件: AO.py 项目: idobarkan/my-code
 def __init__(self, wrapped, timeout = None):
     DynamicProxy.__init__(self)
     self._timeout = timeout
     self._wrapped = wrapped
     if hasattr(wrapped,'logger'):
         name = wrapped.logger.name + '.Sync'
         self.logger = log.get_logger(name)
     else:
         self.logger = log.get_logger('AO.Sync') # will propagate to root logger
         assertions.warn('%s - No logger found for %s' % (self,wrapped))
示例#3
0
 def POST(self):
     try:
         h = self._get_str_parameter(self._get_handler_name())
         handler = self._get_handler(h)
         if handler is None:
             raise CoreError("The responding handler to %s is not defined", h)
         return handler.POST()
     except BaseException, e:
         msg = str(e)
         msg += traceback.format_exc()
         log.get_logger().error(msg)
         rtn = self._new_rtn(e=e).to_json()
         return rtn
示例#4
0
文件: irc.py 项目: rainya/pyhole
    def __init__(self, network):
        irclib.SimpleIRCClient.__init__(self)
        network_config = utils.get_config(network)

        self.log = log.get_logger(str(network))
        self.version = version.version_string()
        self.source = None
        self.target = None
        self.addressed = False

        self.admins = CONFIG.get("admins", type="list")
        self.command_prefix = CONFIG.get("command_prefix")
        self.reconnect_delay = CONFIG.get("reconnect_delay", type="int")
        self.rejoin_delay = CONFIG.get("rejoin_delay", type="int")

        self.server = network_config.get("server")
        self.password = network_config.get("password", default=None)
        self.port = network_config.get("port", type="int", default=6667)
        self.ssl = network_config.get("ssl", type="bool", default=False)
        self.ipv6 = network_config.get("ipv6", type="bool", default=False)
        self.bind_to = network_config.get("bind_to", default=None)
        self.nick = network_config.get("nick")
        self.username = network_config.get("username", default=None)
        self.identify_password = network_config.get("identify_password",
                default=None)
        self.channels = network_config.get("channels", type="list")

        self.load_plugins()

        self.log.info("Connecting to %s:%d as %s" % (self.server, self.port,
                self.nick))
        self.connect(self.server, self.port, self.nick, self.password,
                ssl=self.ssl, ipv6=self.ipv6, localaddress=self.bind_to,
                username=self.username)
示例#5
0
def extract(video_file_path, image_basedir, log_dir=default_log_dir, ffmpeg_path=default_ffmpeg_path):
    start_time = time.time()
    
    try:
        video_file_name = os.path.basename(video_file_path)
        video_name = os.path.splitext(video_file_name)[0]
        image_dir = os.path.join(image_basedir, video_name)
        image_file_pattern = os.path.join(image_dir, video_name + '%5d.bmp')
        
        logger = log.get_logger('extract', video_name)
        
        if not os.path.exists(image_dir):
            # 不存在才创建
            logger.debug('make dir %s' % (image_dir, ))
            os.makedirs(image_dir)
        
        cmd = [os.path.join(ffmpeg_path, 'ffmpeg'), '-i', video_file_path, '-r', '60', '-f', 'image2', image_file_pattern]
        logger.debug('run ffmpeg cmd %s' % (' '.join(cmd), ))
        child = subprocess.Popen(cmd, cwd=ffmpeg_path)
        child.wait()
        
        return {
            'video_name': video_name,
            'image_dir': image_dir,
            'image_files': [ os.path.join(image_dir, _) for _ in os.listdir(image_dir) if os.path.isfile(os.path.join(image_dir, _)) ]
        }
    finally:
        end_time = time.time()
        logger.debug('extract %s spent %ds' % (video_file_path, (end_time-start_time) ))
示例#6
0
 def __init__(self, filename):
     '''初始化数据库'''
     # filename[string],数据库文件名
     
     super(DataBase, self).__init__()
     # 生成日志对象
     self.logger = log.get_logger("DataBase")
     
     # 若数据库文件夹不存在,创建文件夹
     db_path = 'db'
     if not os.path.exists(db_path):
         self.logger.info("create directory: db")
         os.mkdir(db_path)
     
     db_name = os.path.join(db_path, filename)
     if not os.path.exists(db_name):
         self.logger.info("create database file:" + filename)
         self.__create_table(db_name)
     
     # 调试开关
     sqlite3.enable_callback_tracebacks(True)
     
     # 连接数据库
     self.con = sqlite3.connect(db_name)
     self.con.row_factory = sqlite3.Row
     self.logger.info("init DataBase successfully")
示例#7
0
def quality(video_name, image_file_paths, csv_basedir):
    start_time = time.time()
    try:
        logger = log.get_logger('quality', video_name)
        
        csv_file_path = os.path.join(csv_basedir, '%s.csv' % (video_name, ))
        
        queue = Queue.Queue()
        map(queue.put, image_file_paths)
        
        threads = []
        result = []
        for i in range(15):
            t = WorkerThread(queue, result, logger=logger)
            t.start()
            threads.append(t)
        
        for t in threads:
            t.join()
        
        with open(csv_file_path, 'wb') as f:
            writer = csv.writer(f)
            for _ in result:
                image_file_name = os.path.basename(_[0])
                image_name = os.path.splitext(image_file_name)[0]
                image_pattern =re.compile(video_name + '(\\d+)')
                match = image_pattern.match(image_file_name)
                if match:
                    writer.writerow([match.group(1) , image_file_name, _[1]])
    finally:
        end_time = time.time()
        logger.debug('quality %s spent %ds' % (video_name, (end_time-start_time) ))
示例#8
0
    def __init__(self):
        self.logger = log.get_logger(__name__)
        self.logger.debug('Initialising grabber')

        self.api = LowLevelApi(TITLE)
        self.assets = self.api.load_assets()
        self.field_size = Point(int(self.api.size.x / CELL_SIZE), int((self.api.size.y - HEADER_HEIGHT - FOOTER_HEIGHT) / CELL_SIZE))
示例#9
0
def execute(token=None):
    logger = log.get_logger(__name__)

    # Get the raw loans and loan information.
    response_json = api.get_listed_loans(token=token)

    if not response_json:
        logger.warn("Aborting. No API response.")
        return

    asOfDate = response_json['asOfDate']
    loans = [Loan(asOfDate, loan) for loan in response_json['loans']]

    # Port over to database.
    with database.SqliteDatabase() as db_conn:

        if not database.has_been_recorded(asOfDate, db_conn):
            # Populate tables.
            database.add_raw_loan_dates(asOfDate, db_conn)
            database.add_raw_loans(loans, db_conn)
            database.add_loans_funded_as_of_date(loans, db_conn)

            logger.info("%s added %s loans." % (asOfDate, len(loans)))
        else:
            logger.info("%s already exists." % asOfDate)
示例#10
0
    def __init__(self, config_path='config.json'):
        self._logger = log.get_logger('manager')
        self._config_path = config_path

        self._loop = None
        self._server = None
        self._config = None
示例#11
0
文件: update.py 项目: 0X1A/servo
def update_from_cli(**kwargs):
    tests_root = kwargs["tests_root"]
    path = kwargs["path"]
    assert tests_root is not None

    m = None
    logger = get_logger()

    if not kwargs.get("rebuild", False):
        try:
            m = manifest.load(tests_root, path)
        except manifest.ManifestVersionMismatch:
            logger.info("Manifest version changed, rebuilding")
            m = None
        else:
            logger.info("Updating manifest")

    if m is None:
        m = manifest.Manifest(None)


    update(tests_root,
           kwargs["url_base"],
           m,
           ignore_local=kwargs.get("ignore_local", False))
    manifest.write(m, path)
示例#12
0
文件: AO.py 项目: idobarkan/my-code
    def __init__(self, passive_obj, factory=None):
        DynamicProxy.__init__(self)
        
        # configure hooks from factory if provided
        hook_config = getattr(factory, 'config', None) 
        self.method_call_hook = getattr(hook_config, 'AO_method_call_hook', NullObject())
        self.dct_run_hooks = getattr(hook_config, 'AO_dct_run_hooks', {}) # hook_name -> hook (so different ones don't override each other, but same ones do) 
        self.thread_start_hook = getattr(hook_config, 'AO_thread_start_hook', NullObject())
        
        assertions.assert_that(passive_obj is not None, "AO obj cannot be None", ao=self)
        self.obj = passive_obj
        self.obj._ao = self # allow passive object to make async calls to itself (and be aware of AO in general)
        self.id = random.getrandbits(63)
        self.is_active = False

        if hasattr(self.obj, 'logger'):
            logger_name = self.obj.logger.name
            self.logger = log.get_logger(logger_name+'.AO', b_raw_name=True)
        else:
            assertions.fail('Object wrapped as AO must have a logger. No logger found for passive object %s' % (self,))

        if hasattr(self.obj, 'name'):
            self.name = self.obj.name
        else:
            self.name = type(self.obj).__name__
示例#13
0
 def __init__(self, sock, server):
     asyncore.dispatcher.__init__(self, sock=sock)
     self.server = server
     server.append_client(self)
     self.name = ''
     self.recvbuf = ''
     self.sentbuf = ''
     self.logger = log.get_logger('client')
     self.add_sentbuf("connect")
示例#14
0
 def __init__(self, test_params: TestParams, results_exporter: AbstractBaseResultExporter, *, loop):
     self.loop = loop
     self._test_params = test_params
     self._results_exporter = results_exporter
     self._user_futs = []
     self._is_started = False
     self._is_stopping = False
     self._logger = get_logger('loadtester')
     self._session = self._init_client_session()
示例#15
0
    def __init__(self, host, port, router, loop):

        self.logger = log.get_logger('gopher_server')

        self.router = router
        self._loop = loop
        self._server = asyncio.start_server(self.handle_gopher,
                                            host=host,
                                            port=port)
        self.logger.info({'action': 'gopher server init'})
示例#16
0
def get_logger_for_options(options):
    """
    Return a Logger instance with a command-line specified severity threshold.

    Return a Logger instance with severity threshold specified by the command
    line option log.LOG_LEVEL. Log messages will be written to standard out.
    options: Dictionary mapping from command-line option strings to option
    values.
    """
    return log.get_logger(sys.stderr, options[_LOG_LEVEL_OPTION])
示例#17
0
    def as_dict(self):
        """returns the augmented diff as a python dictionary"""

        logger = log.get_logger()
        diff_dict = xmltodict.parse(self.xml)
        if 'osm' in diff_dict:
            return diff_dict['osm']
        else:
            logger.error('no osm root element in diff file')
            return None
示例#18
0
 def __init__(self):
     self.logger = log.get_logger("Server")
     asyncore.dispatcher.__init__(self)
     self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
     host, port = '', 13579
     addr = (host, port)
     self.bind(addr)
     self.listen(100)
     self.client_set = set()
     self.logic = logic.Logic()
示例#19
0
 def __init__(self, **kwargs):
     """Set up the logger, and run a setup method if it's been defined."""
     self.logger = log.get_logger(self._slug, self._version)
     self.settings = Settings()
     self.add_settings(**kwargs)
     self.setup()
     self.redis = None
     if "redis_host" in self.settings and 'redis' in globals():
         self.redis = redis.StrictRedis(host=self.settings["redis_host"])
     # This cache is used for a single process when redis is not installed
     self.cache = {}
示例#20
0
    def as_dict(self):
        """returns changesets metadata as a python dictionary"""

        logger = log.get_logger()

        changesets_dict = xmltodict.parse(self.XML)
        if 'changeset' in changesets_dict['osm']:
            return changesets_dict['osm']['changeset']
        else:
            logger.error('no changesets in result')
            return {}
示例#21
0
 def to_dict(self):
     code = "core_status_success"
     message = ""
     arguments = None
     data = self.__data
     if self.__e != None:
         try:
             e = self.__e
             if not isinstance(e, Error):
                 e = CoreError(traceback.format_exc())
             code = e.get_code()
             arguments = e.get_arguments()
             message = self._get_i18n_message(code, arguments)
             message = str(e) if message is None else message
             data = e.get_data()
         except BaseException, e:
             log.get_logger().error(traceback.format_exc())
             message = str(e)
             code = CoreError(message).get_code()
             arguments = {}
             data = ""
示例#22
0
	def __init__(self, *args, **kwargs):
		"""
		================ =========================================================
		Attribute        Description
		================ =========================================================
		switches         The dictionary storing the switches
		gateways         The dictionary storing the switches enabled with
						 gateway functionalities
		================ =========================================================
		"""	
		super(Ijoin, self).__init__(*args, **kwargs)
		self.logger = log.get_logger(self.name)
示例#23
0
	def __init__(self, *args, **kwargs):
		"""
		================ =========================================================
		Attribute        Description
		================ =========================================================
		access_points    The dictionary storing the switches enabled with access
						 point functionalities
		================ =========================================================
		"""	
		super(AccessPoint, self).__init__(*args, **kwargs)
		self.logger = log.get_logger(self.name)
		self.access_points = {}
		self.connected_ues = {}
示例#24
0
    def latest_from_overpass(self):
        """returns the latest augmented diff."""

        logger = log.get_logger()
        try:
            new_diff = self.from_overpass()
            if new_diff is not None:
                self.XML = new_diff
                return True
        except Exception as e:
            logger.error('could not retrieve augmented diff')
            logger.debug(e.message)
            return False
示例#25
0
 def __init__(self):
     self.logger = log.get_logger("Logic")
     self.logger.info("init Logic successfully")
     self.db = data.DataBase("sheng.db")
     self.user_mgr = mgr.UserManager(self.db)
     self.product_mgr = mgr.ProductManager(self.db)
     self.factory_mgr = mgr.FactoryManager(self.db)
     self.purchaseC_mgr = mgr.PurchaseCManager(self.db)
     self.purchaseS_mgr = mgr.PurchaseSManager(self.db)
     self.saleC_mgr = mgr.SaleCManager(self.db)
     self.saleS_mgr = mgr.SaleSManager(self.db)
     self.returnC_mgr = mgr.ReturnCManager(self.db)
     self.returnS_mgr = mgr.ReturnSManager(self.db)
示例#26
0
    def __init__(self,controller,dispatch,error):
        Thread.__init__(self)
        self.policy = policy.get_policy()
        self.controller = controller
        self.do_dispatch = dispatch
        self.error = error
        self.queue = Queue()
        self.queue.load()
        self.lock = Lock()
        self._quit = Event()

        self.add_queue = SyncQueue(0)
        self.num_run = 0
        self.log = get_logger()
示例#27
0
def load(tests_root, manifest_path):
    logger = get_logger()

    if os.path.exists(manifest_path):
        logger.debug("Opening manifest at %s" % manifest_path)
    else:
        logger.debug("Creating new manifest at %s" % manifest_path)
    try:
        with open(manifest_path) as f:
            manifest = Manifest.from_json(tests_root, json.load(f))
    except IOError:
        manifest = Manifest(None)

    return manifest
示例#28
0
	def __init__(self, *args, **kwargs):
		"""
		================ =========================================================
		Attribute        Description
		================ =========================================================
		switches         The dictionary storing the switches.
		last_ue_id		 The last id assigned to an UE.
		ues              The dictionary storing the UEs.
		================ =========================================================
		"""	
		super(Mme, self).__init__(*args, **kwargs)
		self.logger = log.get_logger(self.name)
		self.switches = {}
		self.last_ue_id = 0
		self.ues = {}
示例#29
0
    def latest_from_osm(self):
        """returns the latest OSM changesets metadata."""

        logger = log.get_logger()

        try:
            new_changesets_meta = self.from_osm()
            if new_changesets_meta is not None:
                self.XML = new_changesets_meta
                logger.debug(self.XML)
                return True
        except Exception as e:
            logger.error('could not retrieve changesets meta file from OSM')
            logger.debug(e.message)
            return False
示例#30
0
    def set_logging(self, log_level=logging.ERROR, file_path_name=None):
        """
        This function allows to change the logging backend, either output or file as backend
        It also allows to set the logging level (whether to display only critical/error/info/debug.
        e.g.
        yag = yagmail.SMTP()
        yag.set_logging(yagmail.logging.DEBUG)  # to see everything

        and

        yagmail.set_logging(yagmail.logging.DEBUG, 'somelocalfile.log')

        lastly, a log_level of None will make sure there is no I/O.
        """
        self.log = get_logger(log_level, file_path_name)
示例#31
0
import os
from model.MCFT import MCFT
from model.SiameseStyle import SiameseStyle
from model.VGGishEmbedding import VGGishEmbedding
from data.TestDataset import TestDataset
from data.OtoMobile import OtoMobile
from log import get_logger

logger = get_logger('factory')


def model_factory(model_name, model_filepath):
    '''
    Given a model name and weight file location, construct the model for
    query-by-voice search.

    Arguments:
        model_name: A string. The name of the model.
        model_filepath: A string. The location of the weight file on disk.

    Returns:
        A QueryByVoiceModel.
    '''
    logger.debug('Attempting to load the {} model from {}'.format(
        model_name, model_filepath))

    if model_name == 'mcft':
        model = MCFT(model_filepath)
    elif model_name == 'siamese-style':
        model = SiameseStyle(model_filepath)
    elif model_name == 'VGGish-embedding':
示例#32
0
#
# You should have received a copy of the GNU General Public License
# along with exitmap.  If not, see <http://www.gnu.org/licenses/>.
"""
Provides and API to execute system commands over torsocks.
"""

import os
import socket
import threading
import subprocess

import log
import util

logger = log.get_logger()


class Command(object):
    """
    Provide an abstraction for a shell command which is to be run.
    """
    def __init__(self, torsocks_conf, queue, circ_id, origsock):

        os.environ["TORSOCKS_CONF_FILE"] = torsocks_conf
        os.environ["TORSOCKS_LOG_LEVEL"] = "5"

        self.process = None
        self.stdout = None
        self.stderr = None
        self.output_callback = None
示例#33
0
 def __init__(self):
     self.content_file = ''
     self.default_file = ''
     self.log = log_.get_logger()
示例#34
0
import falcon
import json

import log
from util.encoder import encoder
from collections import OrderedDict
from util.error.errors import NotSupportedError
LOG = log.get_logger()


class BaseResource(object):
    def to_json(self, body_dict):
        return json.dumps(body_dict)

    def from_db_to_json(self, db):
        return json.dumps(db, cls=encoder())

    def on_error(self, res, error=None):
        res.status = error['status']
        meta = OrderedDict()
        meta['code'] = error['code']
        meta['message'] = error['message']

        object = OrderedDict()
        object['meta'] = meta
        res.body = self.to_json(object)

    def on_success(self, res, data=None):
        res.status = falcon.HTTP_200
        meta = OrderedDict()
        meta['code'] = 200
path = os.path.join(os.path.dirname(__file__), "../")
sys.path.append(path)

from app.config import (WEB3_HTTP_PROVIDER, DATABASE_URL, WORKER_COUNT,
                        SLEEP_INTERVAL, IBET_SHARE_EXCHANGE_CONTRACT_ADDRESS,
                        TOKEN_LIST_CONTRACT_ADDRESS, COMPANY_LIST_URL)
from app.model import (Notification, NotificationType)
from app.contracts import Contract
from batch.lib.token import TokenFactory
from batch.lib.company_list import CompanyListFactory
from batch.lib.token_list import TokenList
from batch.lib.misc import wait_all_futures
import log

JST = timezone(timedelta(hours=+9), "JST")
LOG = log.get_logger(process_name="PROCESSOR-NOTIFICATIONS-SHARE-EXCHANGE")

WORKER_COUNT = int(WORKER_COUNT)
SLEEP_INTERVAL = int(SLEEP_INTERVAL)

web3 = Web3(Web3.HTTPProvider(WEB3_HTTP_PROVIDER))
web3.middleware_onion.inject(geth_poa_middleware, layer=0)

engine = create_engine(DATABASE_URL, echo=False)
db_session = scoped_session(sessionmaker())
db_session.configure(bind=engine)

token_factory = TokenFactory(web3)
company_list_factory = CompanyListFactory(COMPANY_LIST_URL)

# 起動時のblockNumberを取得
示例#36
0
def needs_more_dank(frame, filter_list, noise_type, gauss_amount, sp_ratio,
                    sp_amount, motion, sharpening, saturation, brightness,
                    contrast, jpeg_iterations, jpeg_quality, filters_dir,
                    overrides, effects):
    logger = get_logger()
    filters = []

    for d in os.listdir(filters_dir):
        if os.path.isdir(os.path.join(filters_dir, d)) and d in filter_list:
            f = Filter(os.path.join(filters_dir, d, 'filter.yml'),
                       overrides[d] if d in overrides else {})
            filters.append(f)
            f.describe()

    l = []
    try:
        l = face_recognition.face_landmarks(frame)
    except Exception as exce:
        logger.error("Could not detect faces: {}".format(exce))
    rendered_filters = []
    for face in l:
        face = needs_more_face_points(face)
        for f in [f for f in filters if f.mask]:
            try:
                rendered = f.apply(face, frame)
                if len(rendered) != 0:
                    rendered_filters += rendered
            except Exception as exce:
                logger.error("Could not apply mask {}: {}".format(
                    f.name, exce))

    # Non face filters
    width, height = frame.shape[:2]
    img = {
        "center": (int(height / 2), int(width / 2)),
    }
    for f in [f for f in filters if not f.mask]:
        try:
            rendered = f.apply(img, frame)
            if len(rendered) != 0:
                rendered_filters += rendered
        except Exception as exce:
            logger.error("Could not apply filter {}: {}".format(f.name, exce))

    rendered_filters = sorted(rendered_filters, key=lambda x: x.priority)
    for f in rendered_filters:
        try:
            frame = f.apply(frame)
        except:
            pass

    for effect in effects:
        if effect == "saturation":
            frame = needs_more_saturation(frame, saturation)
        elif effect == "noise":
            if noise_type == "gauss":
                frame = needs_more_gaussian_noise(frame, gauss_amount)
            elif noise_type == "poisson":
                frame = needs_more_poisson_noise(frame)
            elif noise_type == "speckle":
                frame = needs_more_speckle_noise(frame)
            elif noise_type == "sp":
                frame = needs_more_salt_and_pepper_noise(
                    frame, sp_ratio, sp_amount)
            else:
                logger.error("Unknown noise type {}".format(noise_type))
        elif effect == "sharpening":
            frame = needs_more_sharpening(frame, sharpening)
        elif effect == "contrast":
            frame = needs_more_contrast(frame, brightness, contrast)
        elif effect == "motion":
            frame = needs_more_motion_blur(frame, motion)
        elif effect == "jpeg":
            for i in range(0, jpeg_iterations):
                frame = needs_more_jpeg(frame, jpeg_quality)

    return frame
示例#37
0
# -*- coding=utf-8 -*-
import utils, runner, log, time, json, simplejson
# from selenium import webdriver
# from appium import webdriver
obj_log = log.get_logger()

isbn_list = {
    'A': '9787308156417',
    'B': '9787516143261',
    'C': '9787509745816',
    'D': '9787040213607',
    'E': '9787509755280',
    'F': '9787516410790',
    'G': '9787561466584',
    'H': '9787561460207',
    'I': '9787108032911',
    'J': '9787561460232',
    'K': '9787509748985',
    'N': '9787030334282',
    'O': '9787513535663',
    'P': '9787500672012',
    'Q': '9787502554774',
    'R': '9787200008715',
    'S': '9787030323859',
    'T': '9787121212437',
    'U': '9787111465300',
    'V': '9787515901701',
    'X': '9787511107633',
    'Z': '9787500086062',
    'Test_null': '123456789'
}
示例#38
0
 def opened(self):
     log.get_logger().log("WebSocket opened.", log.INFO)
     return
# GameParser.py
'''
This class will parse the full pokerstar string
'''
import log
from my_utils import parse_info, parse_betting_round, parse_summary, \
                     parse_showdown, fill_in_last_player_to_bet
from game import Game
logger = log.get_logger(__name__)


class Parser():
    '''
    Parser is taking in a gamestr and outputting a data representation of a poker
    hand.
    '''
    def __init__(self, gamestr):
        self._separate = []
        lines = [line for line in gamestr.strip().split('\n') if line]
        temp = []
        for line in lines:
            if '***' in line:
                self._separate.append(temp)
                temp = [line.strip()]
            else:
                temp.append(line.strip())
        if temp:
            self._separate.append(temp)

        if self._separate:
            self.populate()
示例#40
0
        #Connection.clients[self.uid].stream.write(data)
        for k, client in Connection.clients.items():
            client.stream.write((res + "##").encode('utf8'))
        self.read_message()

    def on_close(self):
        #Connection.clients[str(self._address[0])+ str(self._address[1])].stream.close()
        del (Connection.clients[str(self._address[0]) + str(self._address[1])])
        print "A user has left.", self._address
        #Connection.clients.remove(self)


class GameServer(TCPServer):
    def handle_stream(self, stream, address):
        print "New connection :", address, stream
        Connection(stream, address)
        print "connection num is:", len(Connection.clients)


if __name__ == '__main__':
    print "Server start ......"
    server = GameServer()
    server.listen(9527)
    logger = log.get_logger(log.log_path, log.level)
    log.error = logger.error
    log.warn = logger.warning
    log.info = logger.info
    log.debug = logger.debug
    loop = IOLoop.instance()
    autoreload.start(loop)
    loop.start()
示例#41
0
import os
from datetime import datetime, timedelta
from pprint import pprint
import json
from log import get_logger
from storage import *
from dpi_api import *
from caching import Cache
import sqlite3
import requests
from requests.utils import urlparse
from urlparse import ParseResult

os.chdir("/usr/local/comcom/dpi_spy_api")
logger = get_logger(__name__, "uploader")

DATA = []
URL = "http://integration_srv_url.2com.net:8080/save_dpi_log"  # Oracle endpoint

statistic_day = datetime.strftime(datetime.now() - timedelta(days=1),
                                  "%Y-%m-%d")  # Yesterday
#week_ago = datetime.strftime(datetime.now() - timedelta(days = 7), "%Y-%m-%d")

conn = sqlite3.connect("STORAGE.db")  # local storage
logger.debug("Sqlite database conn establisged")
cache = Cache(100)  # Better use redis
logger.debug("Cache var initialized for {0} items".format(
    cache.cacheSize))  # Won't annoy procera with the same queries
storage = Storage(conn)
logger.debug("Abstarct storage initialized")
domains = storage.get_type(
示例#42
0
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the logic for the form-only part of BERTRAM."""
import itertools
import random
from typing import List
from collections import Counter

import torch
import numpy as np
from torch import nn

import log
from utils import length_to_mask

logger = log.get_logger("root")

START_SYMBOL = '<S>'
END_SYMBOL = '</S>'
UNK_TOKEN = 'UNK'
PAD_TOKEN = 'PAD'

UNK_ID = 0
PAD_ID = 1


class NGramFeatures:
    def __init__(self, ngrams: List[str], ngram_ids: List[int]):
        self.ngrams = ngrams
        self.ngram_ids = ngram_ids
        self.ngram_lengths = len(ngram_ids)
示例#43
0
import log
import datetime
import traceback
import sys
import subprocess
from astropy.time import Time
from astropy.time import TimeDelta
import time
import json

# path to configuration
cfg_path = "/home/mcnowinski/seo/nebulizer/curvacious.json"

# set up logger
logger = log.get_logger('curvacious')

# load target and comparison observations
with open(cfg_path) as f:
    cfg = json.load(f)

# user, hardcode for now
user = cfg['user']

# min obs altitude
min_obs_alt = float(cfg['min_obs_alt'])

# seo
observatory = Observatory(cfg['observatory']['code'],
                          cfg['observatory']['latitude'],
                          cfg['observatory']['longitude'],
示例#44
0
 def closed(self, code, reason):
     log.get_logger().log("WebSocket closed: ({}, {})".format(code, reason),
                          log.INFO)
     self.set_s_done(True)
     return
示例#45
0
# -*- coding: utf-8 -*-
# @Author: JinHua
# @Date:   2019-11-08 10:30:37
# @Last Modified by:   JinHua
# @Last Modified time: 2019-11-08 10:48:30

import os
import log
import flask

logger = log.get_logger('http_main', filePath='log/http_main.log')

base_folder = os.path.dirname(os.path.abspath(__file__))
static_folder = os.path.join(base_folder, 'file')
logger.info('Static folder is {}'.format(static_folder))

app = flask.Flask('http_server',
                  static_url_path='/static',
                  static_folder=static_folder)


def send_file(filename):
    logger.info('Start to download file {}'.format(filename))
    with open(os.path.join(static_folder, filename), 'rb') as f:
        while 1:
            data = f.read(10 * 1024 * 1024)
            if not data:
                break
            yield data

    TOKEN_LIST_CONTRACT_ADDRESS,
    COMPANY_LIST_URL
)
from app.model import (
    Notification,
    NotificationType,
    Listing
)
from app.contracts import Contract
from batch.lib.company_list import CompanyListFactory
from batch.lib.token_list import TokenList
from batch.lib.misc import wait_all_futures
import log

JST = timezone(timedelta(hours=+9), "JST")
LOG = log.get_logger(process_name="PROCESSOR-NOTIFICATIONS-SHARE-TOKEN")

WORKER_COUNT = int(WORKER_COUNT)
SLEEP_INTERVAL = int(SLEEP_INTERVAL)

web3 = Web3(Web3.HTTPProvider(WEB3_HTTP_PROVIDER))
web3.middleware_onion.inject(geth_poa_middleware, layer=0)

engine = create_engine(DATABASE_URL, echo=False)
db_session = scoped_session(sessionmaker())
db_session.configure(bind=engine)

company_list_factory = CompanyListFactory(COMPANY_LIST_URL)

# 起動時のblockNumberを取得
NOW_BLOCKNUMBER = web3.eth.blockNumber
示例#47
0
# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
import json
import codecs
import datetime
from utils import appropriate_title
import time
from concurrent import futures
import log as Logger

DEFAULT_PREFIX = 'http://tass.ru'
DEFAULT_PREFIX_URL = 'http://tass.ru/api/news/lenta?limit=50&before={date_end}'

log = Logger.get_logger(__name__)


def start_request(params, start_date, end_date):
    print('tass_parser start')
    start_time = time.time()
    start_date = int(
        time.mktime(
            datetime.datetime.strptime(start_date, '%d/%m/%Y').timetuple()))
    end_date = int(
        time.mktime(
            datetime.datetime.strptime(end_date, '%d/%m/%Y').replace(
                hour=23, minute=59).timetuple()))
    exit_node = []
    with codecs.open("articles/tass_news.txt", "w", "utf-8") as jsonfile:
        while (end_date - start_date) >= -1:
            url = DEFAULT_PREFIX_URL.format(date_end=end_date)
示例#48
0
import os
import struct

from log import get_logger
import traceback
from config import *
import sys
import copy
from zip_64 import *
from zipentry import *
logger = get_logger("zipalign")


def track_error():
    error_message = ""
    (type, value, trace) = sys.exc_info()
    logger.error(
        "**************************************************************")
    logger.error("Error_Type:\t%s\n" % type)
    logger.error("Error_Value:\t%s\n" % value)
    logger.error("%-40s %-20s %-20s %-20s\n" %
                 ("Filename", "Function", "Linenum", "Source"))
    for filename, linenum, funcname, source in traceback.extract_tb(trace):
        logger.info("%-40s %-20s %-20s%-20s" %
                    (os.path.basename(filename), funcname, linenum, source))
    logger.error(
        "**************************************************************")


class End_Central_Dir(object):
    def __init__(self):
示例#49
0
    def setUpClass(cls):

        super(SweetcombTestCase, cls).setUpClass()
        cls.logger = log.get_logger(cls.__name__)
    val_rmse = rmse(labels_arr, pre_arr)
    return val_score, val_rmse


if __name__ == '__main__':
    #
    opt = Config()
    torch.cuda.empty_cache()
    device = torch.device('cuda')
    if opt.loss == 'L1':
        criterion = torch.nn.L1Loss().cuda()
    if opt.loss == 'MSE':
        criterion = torch.nn.MSELoss().cuda()
    model_save_dir = os.path.join(opt.checkpoints_dir, opt.model_name)
    if not os.path.exists(model_save_dir): os.makedirs(model_save_dir)
    logger = get_logger(os.path.join(model_save_dir, 'log.log'))
    logger.info('Using: {}'.format(opt.model_name))
    logger.info('use_frt: {} use_lstm_decoder:{}'.format(
        opt.use_frt, opt.use_lstm_decoder))
    logger.info('train_batch_size: {}'.format(opt.train_batch_size))
    logger.info('optimizer: {}'.format(opt.optimizer))
    logger.info('scheduler: {}'.format(opt.scheduler))
    logger.info('lr: {}'.format(opt.lr))
    logger.info('T_0:{} T_mult:{}'.format(opt.T_0, opt.T_mult))
    logger.info('p:{} extend:{}'.format(opt.p, opt.extend))
    model = SpatailTimeNN()
    model.to(device)
    #
    if opt.optimizer == 'adamw':
        optimizer = torch.optim.AdamW(model.parameters(),
                                      lr=opt.lr,
示例#51
0
# -*- coding:utf-8 -*-
import log
from Queue import Queue
from threading import Thread
import traceback
import os, signal, sys

LOG = log.get_logger('zxLogger')
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# ThreadPool implementation
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++


class ThreadPool(object):
    def __init__(self, size):
        self.size = size
        self.tasks = Queue(size)
        for i in range(size):
            Worker(self.tasks)

    def add_task(self, func, *args, **kargs):
        self.tasks.put((func, args, kargs))

    def wait_completion(self):
        self.tasks.join()


class Worker(Thread):
    def __init__(self, taskQueue):
        Thread.__init__(self)
        self.tasks = taskQueue
from eth_utils import to_checksum_address
from web3.middleware import geth_poa_middleware

path = os.path.join(os.path.dirname(__file__), "../")
sys.path.append(path)

from app.config import (WEB3_HTTP_PROVIDER, DATABASE_URL,
                        TOKEN_LIST_CONTRACT_ADDRESS, ZERO_ADDRESS)
from app.model import (Listing, IDXConsumeCoupon)
from app.contracts import Contract
import log

JST = timezone(timedelta(hours=+9), "JST")

process_name = "INDEXER-CONSUME-COUPON"
LOG = log.get_logger(process_name=process_name)

web3 = Web3(Web3.HTTPProvider(WEB3_HTTP_PROVIDER))
web3.middleware_onion.inject(geth_poa_middleware, layer=0)

engine = create_engine(DATABASE_URL, echo=False)
db_session = scoped_session(sessionmaker())
db_session.configure(bind=engine)


class Sinks:
    def __init__(self):
        self.sinks = []

    def register(self, sink):
        self.sinks.append(sink)
示例#53
0
# -*- coding:utf-8 -*-

import log
import mxnet as mx
import numpy as np
from collections import namedtuple
from cv_tools import image_tool

logger = log.get_logger(name='eval',
                        filename='eval.log',
                        filemode='a',
                        level=log.DEBUG,
                        file_and_line=True)


class Eval:
    def __init__(self,
                 modelprefix,
                 imagepath,
                 inputshape,
                 labelpath,
                 epoch=0,
                 format='NCHW'):
        self.modelprefix = modelprefix
        self.imagepath = imagepath
        self.labelpath = labelpath
        self.inputshape = inputshape
        self.epoch = epoch
        self.format = format

        with open(labelpath, 'r') as fo:
from log import get_logger
from models.error_handler import CustomException

logger = get_logger(__name__)


class ValidationException(CustomException):
    """
    This class is used for Validation Exceptions
    """

    def __init__(self, message):
        """
        :param message: str
        """
        super().__init__(message, status_code=400)


def validate_fields(req_data, fields, content_type="json"):
    """
    validates if given request data has all fields or not
    :param req_data: dict
    :param fields: list
    :param content_type: str
    :return: None || Exception
    """
    logger.info("entering function validate_fields")
    if req_data is None:
        raise ValidationException(f"excepting {content_type}")
    for field in fields:
        if field not in req_data:
示例#55
0
from typing import Dict

import itertools
import torch
import io

import log

logger = log.get_logger('root')


def pairwise(iterable):
    a, b = itertools.tee(iterable)
    next(b, None)
    return zip(a, b)


def load_embeddings(embd_file: str) -> Dict[str, torch.Tensor]:
    logger.info('Loading embeddings from {}'.format(embd_file))
    embds = {}
    with io.open(embd_file, 'r', encoding='utf8') as f:
        for line in f:
            comps = line.split()
            word = comps[0]
            embd = [float(x) for x in comps[1:]]
            embds[word] = torch.tensor(embd)
    logger.info('Found {} embeddings'.format(len(embds)))
    return embds
示例#56
0
# -*- coding:utf-8 -*-
import log
import zhuaxia.config as config
LOG = log.get_logger("zxLogger")


class Option(object):
    """
    a class containing user given options
    """
    def __init__(self):
        self.is_hq = False
        self.need_proxy_pool = False
        self.proxy_pool = None
        self.dl_lyric = False
        self.inFile = ''
        self.inUrl = ''
        self.incremental_dl = False
        self.proxy = config.CHINA_PROXY_HTTP

    def debug_me(self):
        LOG.debug("hq: " + str(self.is_hq))
        LOG.debug("inFile: " + self.inFile)
        LOG.debug("inUrl: " + self.inUrl)
        LOG.debug("proxy: " + str(self.proxy))
        LOG.debug("needProxyPool: " + str(self.need_proxy_pool))
        LOG.debug("dl_lyric: " + str(self.dl_lyric))
        LOG.debug("incremental_dl: " + str(self.incremental_dl))
示例#57
0
#!/usr/bin/env python
# coding: utf-8

# In[1]:

import pandas as pd
import random
from database import Connector
import warnings
warnings.filterwarnings("ignore")
from log import get_logger, STUNDENT_NAME
logger = get_logger("dw_order_by_day.log")

# In[2]:

import os
#结果保存路径
output_path = 'F:/some_now/pro_output'

if not os.path.exists(output_path):
    os.makedirs(output_path)

# In[3]:

import pymysql
pymysql.install_as_MySQLdb()
import sqlalchemy

# In[4]:

if __name__ == "__main__":
示例#58
0
app = Flask(__name__)
app.secret_key = 'yolo'
upload_dir = "uploads"
fried_dir = "fried"

if os.path.isabs(upload_dir):
    app.config['UPLOAD_FOLDER'] = upload_dir
else:
    app.config['UPLOAD_FOLDER'] = os.path.join(os.getcwd(), upload_dir)

if os.path.isabs(fried_dir):
    app.config['FRIED_FOLDER'] = fried_dir
else:
    app.config['FRIED_FOLDER'] = os.path.join(os.getcwd(), fried_dir)

logger = get_logger()

if not os.path.exists(app.config["UPLOAD_FOLDER"]):
    logger.info("Creating upload directory")
    os.mkdir(app.config["UPLOAD_FOLDER"])

if not os.path.exists(app.config["FRIED_FOLDER"]):
    logger.info("Creating fried directory")
    os.mkdir(app.config["FRIED_FOLDER"])

def allowed_file(filename):
    return '.' in filename and \
        filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS

@app.route('/upload', methods=['POST'])
def upload_file():
示例#59
0
class HelpHandler(RequestHandler):

    log = get_logger('HelpHandler')

    # noinspection PyAttributeOutsideInit
    def initialize(self) -> None:
        self.doc = Doc()
        self.nesting_level = 0
        self.f_nesting = 2  # == <html><body>
        self.footer = Doc()
        self.footnote_counter = 1

        if self.application.settings.get('debug'):

            def nl(main=True):
                doc = self.doc if main else self.footer
                doc.nl()
                nest = self.nesting_level if main else self.f_nesting
                doc.text('  ' * nest)

            def go_in(main=True):
                doc = self.doc if main else self.footer
                doc.nl()
                if main:
                    self.nesting_level += 1
                    nest = self.nesting_level
                else:
                    self.f_nesting += 1
                    nest = self.f_nesting
                doc.text('  ' * nest)

            def go_out(main=True):
                doc = self.doc if main else self.footer
                if main:
                    self.nesting_level -= 1
                    nest = self.nesting_level
                else:
                    self.f_nesting -= 1
                    nest = self.f_nesting
                doc.nl()
                doc.text('  ' * nest)

            def line(tag_name, text_content, *args, **kwargs):
                self.doc.line(tag_name, text_content, *args, **kwargs)
                nl()

            def f_line(tag_name, text_content, *args, **kwargs):
                self.footer.line(tag_name, text_content, *args, **kwargs)
                nl(False)

            def stag(tag_name, *args, **kwargs):
                self.doc.stag(tag_name, *args, **kwargs)
                nl()
        else:

            def no_op(*_a, **_ka):
                pass

            nl = go_in = go_out = no_op
            line = self.doc.line
            f_line = self.footer.line
            stag = self.doc.stag
        self.nl = nl
        self.line = line
        self.f_line = f_line
        self.stag = stag
        self.go_in = go_in
        self.go_out = go_out
        # Prepare footer
        self.nl(False)
        self.footer.stag('hr')
        self.nl(False)

    def get_fc(self) -> str:
        temp = self.footnote_counter
        self.footnote_counter += 1
        return str(temp)

    def footnote(self, fn_text: str) -> None:
        c = self.get_fc()
        sup_id = 'ref' + c
        note_id = 'fn' + c
        tag = self.doc.tag
        text = self.doc.text
        line = self.line
        with tag('sup'):
            self.go_in()
            line('a', href='#' + note_id, id=sup_id, text_content=c)
            self.go_out()
        self.nl()
        del tag
        del text
        del line

        f_tag = self.footer.tag
        f_text = self.footer.asis
        f_line = self.f_line
        with f_tag('p', id=note_id):
            self.go_in(False)
            with f_tag('small'):
                self.go_in(False)
                f_text(c + '. ')
                f_line('a',
                       href='#' + sup_id,
                       title='Go back',
                       text_content='^')
                f_text(' ' + fn_text)
                self.go_out(False)
            self.nl(False)
            self.go_out(False)
        self.nl(False)

    def head(self):
        tag, line, stag = self.doc.tag, self.line, self.stag
        with tag('head'):
            self.go_in()
            stag(
                'link',
                rel="stylesheet",
                href=
                "https://cdnjs.cloudflare.com/ajax/libs/mini.css/3.0.1/mini-default.min.css"
            )
            line('title', 'General purpose VNF configuration server')
            request = self.request  # type: HTTPServerRequest
            uri = request.protocol + '://' + request.host + '/favicon.png'
            stag('link', rel="icon", href=uri, type="image.png")
            self.go_out()
        self.nl()

    def description(self):
        tag, text, line, fn = self.doc.tag, self.doc.text, self.doc.line, self.footnote
        line('h3', id='description', text_content='Description')
        with tag('p'):
            self.go_in()
            text("This is a general-purpose HTTP server serving VNF")
            fn("<a href=https://www.etsi.org/deliver/etsi_gs/nfv-ifa/"
               "001_099/011/03.01.01_60/gs_nfv-ifa011v030101p.pdf> "
               "https://www.etsi.org/deliver/etsi_gs/nfv-ifa/"
               "001_099/011/03.01.01_60/gs_nfv-ifa011v030101p.pdf</a>")
            text(" configuration information. "
                 "It offers an ETSI IFA 008 compliant")
            fn("<a href=https://www.etsi.org/deliver/etsi_gs/NFV-IFA/"
               "001_099/008/03.01.01_60/gs_nfv-ifa008v030101p.pdf> "
               "https://www.etsi.org/deliver/etsi_gs/NFV-IFA/"
               "001_099/008/03.01.01_60/gs_nfv-ifa008v030101p.pdf</a>"
               " - paragraph 6.2.3")
            text(" NBI.")
            self.go_out()
        self.nl()

    def nbi(self):
        tag, text, line = self.doc.tag, self.doc.text, self.line
        line('h3', id='nbi', text_content='North bound interface')
        with tag('p'):
            self.go_in()
            text('To configure this VNF, perform an HTTP PATCH request to ')

            request = self.request  # type: HTTPServerRequest
            uri = request.protocol + '://' + request.host + '/vnfconfig/v1/configuration'
            line('a', uri, href=uri)
            text('.')
            self.go_out()
        self.nl()
        line('p', 'The format is as follows:')
        self.format_example()

    def format_example(self):
        ex = """{
    "vnfConfigurationData": {
        "vnfSpecificData": [
            {
                "key": "vnf.hss.vdu.hss_vdu.hostname",
                "value": "some-value",
            },
            {
                "key": "uservnf.hss.vdu.hss.domainname",
                "value": "some-other-value"
            }
        ]
    }
}"""

        ex = ex.replace('\n', '<br>')

        with self.doc.tag('pre'):
            self.doc.asis(ex)
        self.nl()

    def config(self):
        tag, text, line, fn, stag = self.doc.tag, self.doc.text, self.doc.line, self.footnote, self.stag
        line('h3', id='config', text_content='Configuration file')
        self.nl()
        line(
            'p', "The configuration file (by default the 'settings.json' file "
            "in the same current working directory) is a json file "
            "following this format: ")
        self.nl()
        self.config_example()
        line(
            'p',
            "where 'port' is the TCP port on on which the server should bind itself, "
            "'script' is the command which should be run to enforce the actual "
            "configuration, taking as arguments the parameter values, and "
            "'params' are the name of the parameters which are required for configuration."
        )
        self.nl()
        line(
            'p',
            "For example, if there are two parameters configured with values '1' and 'stop' "
            "respectively, the command called will be ")
        self.nl()
        with tag('pre'):
            text('<script> 1 stop')
        self.go_out()
        self.nl()

    def config_example(self):
        ex = """{
    "port": 8080,
    "script": "/opt/VnfServer/hss.sh",
    "params": [
        "vnf.hss.vdu.hss_vdu.hostname",
        "uservnf.hss.vdu.hss.domainname"
    ]
}"""

        ex = ex.replace('\n', '<br>')

        with self.doc.tag('pre'):
            self.doc.asis(ex)
        self.nl()

    def body(self):
        doc, tag, text, line, fn = self.doc, self.doc.tag, self.doc.text, self.doc.line, self.footnote
        with tag('body'):
            self.go_in()
            line('h1', 'General purpose VNF indicator server')
            self.description()
            self.nbi()
            self.config()
            self.doc.asis(self.footer.getvalue())
            self.nl()
            self.go_out()
        self.nl()

    def get(self) -> None:
        self.log.debug('Received call')
        doc, tag, text, line, fn = self.doc, self.doc.tag, self.doc.text, self.doc.line, self.footnote
        with tag('html'):
            self.go_in()
            self.head()
            self.body()
            self.go_out()
        self.nl()
        self.write(doc.getvalue())
"""
Test object related class
"""
from base_class import Entrypoint, check_func_entrys
from env import Env

from log import get_logger

LOGGER = get_logger(__name__)


class TestObject(object):
    _test_entry = set()

    def __call__(self, *args, **kwargs):
        """
        Put the real steps in this function
        """
        raise NotImplementedError


class CheckPoint(Entrypoint):
    """
    CheckPoint
    """
    def __init__(self, test_level, version=None):
        self.test_level = test_level
        self.version = version
        self.checkpoints = None

    def bind(self, checkpoints):