コード例 #1
0
ファイル: __init__.py プロジェクト: teitei-tk/flask-skeleton
    def get_logger(self, key='console'):
        from lib.logger import get_logger

        logger = None

        try:
            logger = get_logger(key)
        except KeyError:
            logger = get_logger('file')

        return logger
コード例 #2
0
ファイル: worker.py プロジェクト: ariel17/baserguin
    def __init__(self, *args, **kwargs):
        Thread.__init__(self)

        self._logger = get_logger(kwargs.get('name', 'snoopy_thread'))
        self._logger.info('Initializing...')

        self._is_running = kwargs['is_running']
コード例 #3
0
ファイル: worker.py プロジェクト: ariel17/baserguin
    def __init__(self, *args, **kwargs):
        Process.__init__(self)

        log_name = kwargs.get('name', 'snoopy_no_name')
        if isinstance(kwargs.get('proc_num'), int):
            log_name = '%s[%0.2d]' % (log_name, kwargs.get('proc_num'))

        self._logger = get_logger(log_name)
        self._logger.info('Initializing...')

        self._is_running = kwargs['is_running']
コード例 #4
0
ファイル: dcollector.py プロジェクト: ariel17/baserguin
 def __init__(self, cfg):
     self._cfg = cfg
     Daemon.__init__(self, self._cfg['pid_path'])
     self._logger = get_logger('snoopy-collector')
コード例 #5
0
from app.flask_app import celery
from env import QuerybookSettings
from lib.logger import get_logger

from .run_query import run_query_task
from .run_sample_query import run_sample_query
from .dummy_task import dummy_task
from .update_metastore import update_metastore
from .sync_elasticsearch import sync_elasticsearch
from .run_datadoc import run_datadoc
from .delete_mysql_cache import delete_mysql_cache
from .poll_engine_status import poll_engine_status
from .presto_hive_function_scrapper import presto_hive_function_scrapper
from .db_clean_up_jobs import run_all_db_clean_up_jobs

LOG = get_logger(__file__)

try:
    tasks_module = import_module("tasks_plugin")
except (ImportError, ModuleNotFoundError) as err:
    LOG.info("Cannot import %s for tasks due to: %s", "task_plugin", err)

# Linter
celery
run_query_task
dummy_task
update_metastore
sync_elasticsearch
run_datadoc
delete_mysql_cache
poll_engine_status
コード例 #6
0
import lib.logger as logging
from lib.functions import wait_until
from lib.game import ui
from lib.game.battle_bot import ManualBattleBot
from lib.game.missions.missions import Missions

logger = logging.get_logger(__name__)


class Story(Missions):
    """Class for working with Story missions."""
    class STORY_MISSION:
        DIMENSIONAL_CLASH_NORMAL = "STORY_MISSION_DIMENSIONAL_CLASH_NORMAL"
        DIMENSIONAL_CLASH_ULTIMATE = "STORY_MISSION_DIMENSIONAL_CLASH_ULTIMATE"

    class STORY_STAGE:
        DIMENSIONAL_CLASH_1_1 = "STORY_MISSION_DIMENSIONAL_CLASH_1_1"

    def __init__(self, game):
        """Class initialization.

        :param lib.game.game.Game game: instance of the game.
        """
        super().__init__(game, mode_name='STORY')

    @property
    def battle_over_conditions(self):
        def rewards():
            return self.emulator.is_ui_element_on_screen(
                ui.STORY_BATTLE_REWARDS)
コード例 #7
0
import os
import os.path
import string
import random
import mlflow
import pickle
from lib.logger import get_logger

from Player import config
from Model.model import AZero
from Azts.config import GAMEDIR, PLAYERDIR
from Azts import player
from Azts import stockfish_player
from Azts import mock_model

log = get_logger("Utility")

GAME = "game"
STATS = "stats"
MOVES = "moves"


# from https://pynative.com/python-generate-random-string/
def random_string(length=8):
    '''
    generate random string as an id stamp
    for self plays
    '''
    letters = string.ascii_lowercase
    return ''.join(random.choice(letters) for i in range(length))
コード例 #8
0
from datetime import datetime, timedelta
from telegram import InlineKeyboardButton
from telegram.ext import Updater, Filters
from telegram.ext import MessageHandler, CallbackQueryHandler

from lib.logger import get_logger

logger = get_logger(__name__)


class View:
    def __init__(self, updater: Updater, handler):
        updater.dispatcher.add_handler(
            MessageHandler(Filters.chat_type, handler))
        updater.dispatcher.add_handler(CallbackQueryHandler(handler))
        updater.start_polling()
        logger.info('TelegramBot started')

    @staticmethod
    def welcome(first_name: str) -> str:
        message = 'Ciao {} 👋'.format(first_name)
        return message

    @staticmethod
    def menu(devices: list) -> tuple:
        message = 'Seleziona un dispositivo'
        inline_keyboard = [[
            InlineKeyboardButton(text='{ico} {name} {ico}'.format(
                ico=('❌' if r.boolDisable else ''), name=r.strName),
                                 callback_data='device|{}|{}'.format(
                                     r.intIdDevice, 0))
コード例 #9
0
import Queue as queue
import re
import StringIO as stringio
import sgmllib
import sys
import threading
import time
import urllib
import urllib2
import urlparse

from lib import htmlparse
from lib import config
from lib import logger

LOG = logger.get_logger(__name__)


class Spider(object):
    """
         Spider主程序

        Attributes:
            module_name: 业务模块名称
            cfg: 配置文件解析类对象
            url_queue: 待抓取的url队列,包括url和当前抓取深度
            handled_set: 已处理过的url的set集合,抓取操作完成后将url加入
            lock: 线程锁
            fetch: 具体抓取操作执行函数
            max_depth: 最大抓取深度(种子为0级)
            crawl_interval: 抓取间隔. 单位: 秒
コード例 #10
0
ファイル: parser.py プロジェクト: 0311snitch/TaskTracker
def parse_column(subcategory, args):
    """
    "Column" handler
    :param subcategory:
    :param args:
    :return:
    """
    log_tag = "parse_column"
    log = logger.get_logger(log_tag)
    if subcategory == SubCategories.add:
        if len(args) != 5:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(5, len(args)))
            column_view.create_format()
        else:
            log.info(
                "Trying to create a column with the name - {} and description - {}"
                .format(args[3], args[4]))
            ColumnController.create_columm(args[0], args[1], args[2], args[3],
                                           args[4])
            column_view.success_create()
            log.info("Column {} was successfully created".format(args[3]))
    if subcategory == SubCategories.delete:
        if len(args) != 4:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(3, len(args)))
            column_view.delete_format()
        else:
            log.info(
                "Trying to delete a column with the name - {} and description - {}"
                .format(args[3], args[4]))
            ColumnController.delete_column(args[0], args[1], args[2], args[3])
            column_view.success_delete()
            log.info("Column {} was successfully deleted".format(args[3]))
    if subcategory == SubCategories.edit:
        if len(args) != 6:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(6, len(args)))
        else:
            log.info(
                "Trying to edit a column with the name - {} and description - {}"
                .format(args[3], args[4]))
            if args[0] == 'name':
                ColumnController.edit_name(args[1], args[2], args[3], args[4],
                                           args[5])
                column_view.success_edit()
                log.info(
                    "Column {} is successfully edited. New name is {}".format(
                        args[4], args[5]))
            elif args[0] == 'description' or 'desc':
                ColumnController.edit_desc(args[1], args[2], args[3], args[4],
                                           args[5])
                column_view.success_edit()
                log.info(
                    "Column {} is successfully edited. New description is {}".
                    format(args[4], args[5]))
            else:
                log.error("Incorrect format")
    if subcategory == SubCategories.show:
        if len(args) != 4:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(4, len(args)))
            column_view.show_format()
        else:
            if args[3] == 'all':
                log.info(
                    "Trying to show a column with the name - {} and description - {}"
                    .format(args[3], args[4]))
                cols = ColumnController.show_all(args[0], args[1], args[2])
                column_view.show_all(cols)
                log.info("All columns was shown")
コード例 #11
0
    @timing
    def function_to_be_timed():
        ...

OR directly apply the wrapper:
    timing(function_to_be_timed)()

report for accumulated run-times:
    runtime_summary()
"""

import time
import functools

from lib.logger import get_logger
log = get_logger("Timing", level="DEBUG")

_RT_START = time.perf_counter()

_RT_BINS = dict()
_RT_FUNCS = dict()


def timing(_func=None, *, bin=None):
    """ decorator for timing functions
    Usage:
        @timing
        def function_to_be_timed():
            ...

    OR directly apply the wrapper:
コード例 #12
0
'''
this instantiates an analysis match which
tracks metrics on the mlflow server
'''
import time
import argparse
import mlflow
import pickle
import os
from lib.logger import get_logger

from Azts import utility
from Azts.config import TO_STRING, WHITE, BLACK
from Matches import match

log = get_logger("AnalysisMatch")


class AnalysisMatch(match.Match):

    match_moves = []
    gamestats = []

    def simulate(self):
        '''
        simulate a game. this starts a
        loop of taking turns and making
        moves between the players while
        storing each game position and
        corresponding move distributions
        in data collection. loop ends with
コード例 #13
0
ファイル: tasks.py プロジェクト: zippies/chaos
def worker_run_job(loadtool,
                   user,
                   script_name,
                   script,
                   mission_id,
                   agent_list=list()):
    logger = get_logger("job", "INFO", "logs/job_%s.log" % mission_id)

    def create_script():
        folder = os.path.join(
            Config.SCRIPT_FOLDER
            if loadtool == "gatling" else Config.JMETER_SCRIPT_FOLDER, user)
        if loadtool == "jmeter":
            report_folder = os.path.join(
                Config.REPORT_FOLDER,
                "report-{mission_id}-jmeter".format(mission_id=mission_id))
            os.makedirs(report_folder)
        if not os.path.exists(folder):
            os.makedirs(folder)

        filename = os.path.join(
            folder, "%s.%s" %
            ("%s-%s" % (script_name, mission_id) if loadtool != "gatling" else
             script_name, "scala" if loadtool == "gatling" else "jmx"))

        with open(filename, "wb") as f:
            f.write(script)

    def save_to_remote(js_dir, mission_id, save_list):
        try:
            for file in save_list:
                with open(os.path.join(js_dir, file), "rb") as f:
                    cache.setex("%s-%s" % (file.replace(".", "_"), mission_id),
                                60, f.read())
        except Exception as e:
            print e

    try:
        create_script()
        exec_cmd = ""
        if loadtool == "gatling":
            gatling_bin = os.path.join(Config.GATLING_HOME, "bin")

            exec_cmd = "sh {gatling_bin}/gatling.sh -s {user}.{script_name} -rf {report_folder} -on {result_dir_name}".format(
                gatling_bin=gatling_bin,
                user=user,
                script_name=script_name,
                mission_id=mission_id,
                report_folder=Config.REPORT_FOLDER,
                result_dir_name="report-%s" % mission_id)
        elif loadtool == "jmeter":
            if len(agent_list):
                exec_cmd = "sh {jmeter_home}/bin/jmeter.sh -n -t {jmeter_script_folder}/{user}/{script_name}-{mission_id}.jmx -R {agent_list} -l {jmeter_script_folder}/{user}/{script_name}.{mission_id}.jtl -e -o {report_folder}/report-{mission_id}-jmeter".format(
                    debug_detail_folder=Config.DEBUG_DETAIL_FOLDER,
                    jmeter_home=Config.JMETER_HOME,
                    jmeter_script_folder=Config.JMETER_SCRIPT_FOLDER,
                    user=user,
                    script_name=script_name,
                    report_folder=Config.REPORT_FOLDER,
                    mission_id=mission_id,
                    agent_list=",".join(agent_list))
            else:
                exec_cmd = "sh {jmeter_home}/bin/jmeter.sh -n -t {jmeter_script_folder}/{user}/{script_name}-{mission_id}.jmx -l {jmeter_script_folder}/{user}/{script_name}.{mission_id}.jtl -e -o {report_folder}/report-{mission_id}-jmeter".format(
                    debug_detail_folder=Config.DEBUG_DETAIL_FOLDER,
                    jmeter_home=Config.JMETER_HOME,
                    jmeter_script_folder=Config.JMETER_SCRIPT_FOLDER,
                    user=user,
                    script_name=script_name,
                    report_folder=Config.REPORT_FOLDER,
                    mission_id=mission_id)

        cache.append("mission_%s_log" % mission_id, exec_cmd)

        try:
            p = pexpect.spawn(exec_cmd, timeout=120)
        except Exception as e:
            cache.set("mission_%s_error" % mission_id, str(e))
            logger.error(str(e))
        while p.isalive():
            p.expect(['\n', pexpect.EOF, pexpect.TIMEOUT])
            info = p.before
            cache.append("mission_%s_log" % mission_id, "<br>" + info)
        else:
            if loadtool == "jmeter":
                save_to_remote(
                    os.path.join(Config.REPORT_FOLDER,
                                 "report-%s-jmeter" % mission_id, "content",
                                 "js"), mission_id,
                    ["dashboard.js", "graph.js"])
            elif loadtool == "gatling":
                report_dir = get_gatling_report_dir(mission_id)
                source_dir = os.path.join(Config.REPORT_FOLDER, report_dir)
                report_file = os.path.join(Config.REPORT_FOLDER,
                                           "report-gatling",
                                           "report-%s.tar.gz" % mission_id)
                with open(
                        os.path.join(Config.REPORT_FOLDER, report_dir, "js",
                                     "stats.json")) as f:
                    cache.setex("mission_%s_stats" % mission_id, 60, f.read())
                with tarfile.open(report_file, "w:gz") as tar:
                    tar.add(source_dir, arcname=os.path.basename(source_dir))
                Domino.save(report_file, "report-%s.tar.gz" % mission_id)
            cache.setex("mission_%s_finish" % mission_id, 60, 1)
            cache.delete("mission_%s_log" % mission_id)
            send_wechat_notice(mission_id, user)
    except Exception as e:
        logger.error(traceback.format_exc())
    finally:
        return "ok"
コード例 #14
0
"""

import os.path
import time
import pickle 
from lib.logger import get_logger

from Player import config 
from Azts import player
from Azts import mock_model
from Azts import utility
from Azts.config import GAMEDIR, \
    ROLLOUTS_PER_MOVE, SHOW_GAME
from Matches import match

log = get_logger("Contest")

class Contest():
    '''
    selfplay is initialized with the number of
    rollouts that the matching ai player are
    using per move.
    the number of game simulations is determined
    by the parameter in function start() which
    actually starts the series of matches.
    After each match, the match data is written
    to a separate file which facilitates
    parallelisation of creating data for many
    matches.
    '''
コード例 #15
0
ファイル: irc.py プロジェクト: xigua369/bed-pool-proxy
from twisted.words.protocols import irc
from twisted.internet import reactor, protocol
import random
import string

import custom_exceptions
import lib.logger as logger
log = logger.get_logger('irc')

# Reference to open IRC connection
_connection = None


def get_connection():
    if _connection:
        return _connection

    raise custom_exceptions.IrcClientException("IRC not connected")


class IrcLurker(irc.IRCClient):
    def connectionMade(self):
        irc.IRCClient.connectionMade(self)
        self.peers = {}

        global _connection
        _connection = self

    def get_peers(self):
        return self.peers.values()
コード例 #16
0
from twisted.web.server import Request, Session, NOT_DONE_YET
from twisted.internet import defer
from twisted.python.failure import Failure
import hashlib
import json
import string

import helpers
import semaphore
#from storage import Storage
from protocol import Protocol, RequestCounter
from event_handler import GenericEventHandler
from lib.settings import s as settings

import lib.logger as logger
log = logger.get_logger('http_transport')


class Transport(object):
    def __init__(self, session_id, lock):
        self.buffer = []
        self.session_id = session_id
        self.lock = lock
        self.push_url = None  # None or full URL for HTTP Push
        self.peer = None

        # For compatibility with generic transport, not used in HTTP transport
        self.disconnecting = False

    def getPeer(self):
        return self.peer
コード例 #17
0
ファイル: protocol.py プロジェクト: xigua369/bed-pool-proxy
import time
import socket

from twisted.protocols.basic import LineOnlyReceiver
from twisted.internet import defer, reactor, error
from twisted.python.failure import Failure

#import services
import stats
import signature
import custom_exceptions
import connection_registry
from lib.settings import s as settings

import lib.logger as logger
log = logger.get_logger('protocol')

class RequestCounter(object):
    def __init__(self):
        self.on_finish = defer.Deferred()
        self.counter = 0
        
    def set_count(self, cnt):
        self.counter = cnt
        
    def decrease(self):
        self.counter -= 1
        if self.counter <= 0:
            self.finish()

    def finish(self):
コード例 #18
0
 def initialize(self):
     self.logger = get_logger('routers')
     self.http_bridge = HttpBridge()
     self.db = DB()
     self.expected_network_magic = Network().network_magic
コード例 #19
0
ファイル: parser.py プロジェクト: 0311snitch/TaskTracker
def parse_project(subcategory, args):
    """
    "Project" handler
    :param subcategory:
    :param args:
    :return:
    """
    log_tag = "parse_project"
    log = logger.get_logger(log_tag)
    if subcategory == SubCategories.add:
        if len(args) != 4:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(4, len(args)))
            project_view.add_format()
        else:
            log.info(
                "Trying to create a project with the name - {} and description - {}"
                .format(args[2], args[3]))
            project = ProjectController.create(args[0], args[1], args[2],
                                               args[3])
            project_view.success_create(project)
            log.info("Project {} was successfully created".format(args[2]))
    elif subcategory == SubCategories.delete:
        if len(args) != 3:
            log.error(
                "Incorrect number of arguments. Expected {}, but {} was recieved"
                .format(3, len(args)))
            delete_format()
        else:
            log.info("Trying to delete a project {}".format(args[2]))
            ProjectController.delete(args[0], args[1], args[2])
            project_view.success_delete()
            log.info("Project {} is successfully deleted".format(args[2]))
    elif subcategory == SubCategories.show:
        if len(args) != 3:
            incorrent_args_len()
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(4, len(args)))
        else:
            if args[0] == 'all':
                log.info("Trying to show all projects of this user")
                projects = ProjectController.show_all(args[1], args[2])
                project_view.show_info(projects)
                log.info("All project was shown")
    elif subcategory == SubCategories.edit:
        if len(args) != 5:
            project_view.edit_format()
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(4, len(args)))
        else:
            if args[0] == 'name':
                log.info("Trying to change the name of '{}' project".format(
                    args[3]))
                ProjectController.edit_name(args[1], args[2], args[3], args[4])
                project_view.success_edit()
                log.info("Project is successfully edited")
            if args[0] == 'description' or 'desc':
                log.info(
                    "Trying to change the description of  '{}' project".format(
                        args[3]))
                ProjectController.edit_description(args[1], args[2], args[3],
                                                   args[4])
                project_view.success_edit()
                log.info("Project is successfully edited")
            else:
                print(project_view.edit_format())
                log.error("Incorrect format")
    elif subcategory == SubCategories.members:
        if len(args) != 5:
            members_add_format()
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(4, len(args)))
        else:
            if args[0] == 'add':
                log.info("Trying to add {} to '{}' project".format(
                    args[4], args[3]))
                ProjectController.add_person_to_project(
                    args[1], args[2], args[3], args[4])
                log.info("User is successfully added to the project")
                project_view.user_added()
            elif args[0] == 'delete':
                ProjectController.delete_person_from_project(
                    args[1], args[2], args[3], args[4])
                log.info(("Trying to delete {} from '{}' project".format(
                    args[3], args[4])))
                log.info("User is successfully deleted from project")
                project_view.user_deleted()
            else:
                members_add_format()
コード例 #20
0
    delete_schema,
    create_table,
    delete_table,
    create_table_information,
    create_column,
    delete_column,
    iterate_data_schema,
    get_table_by_schema_id,
    get_column_by_table_id,
    get_schema_by_name,
    get_table_by_schema_id_and_name,
)

from .utils import MetastoreTableACLChecker

LOG = get_logger(__name__)


class DataSchema(NamedTuple):
    name: str


class DataTable(NamedTuple):
    name: str

    # The type of table, it can be an arbitrary string
    type: str = None
    owner: str = None

    # Expected in UTC seconds
    table_created_at: int = None
コード例 #21
0
ファイル: parser.py プロジェクト: 0311snitch/TaskTracker
def parse_task(subcategory, args):
    """
    "Task" handler
    :param subcategory:
    :param args:
    :return:
    """
    log_tag = "parse_task"
    log = logger.get_logger(log_tag)
    if subcategory == SubCategories.add:
        if len(args) != 10:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(10, len(args)))
            task_view.create_format()
        else:
            log.info("Trying to add task with the name - {}".format(args[4]))
            task = TaskController.add_task(args[0], args[1], args[2], args[3],
                                           args[4], args[5], args[6], args[7],
                                           args[8], args[9])
            task_view.success_create(task)
            log.info("Task is successfully added")
    if subcategory == SubCategories.show:
        if args[0] == 'all':
            log.info("Trying to show all tasks in this column of project")
            tasks = TaskController.show_tasks(args[1], args[2], args[3],
                                              args[4])
            task_view.show_tasks(tasks)
            log.info("All task was shown")
    if subcategory == SubCategories.delete:
        if len(args) != 5:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(5, len(args)))
            incorrent_args_len()
        else:
            log.info("Trying to delete task")
            TaskController.delete_task(args[0], args[1], args[2], args[3],
                                       args[4])
            log.info("Task is successfully deleted")
    if subcategory == SubCategories.edit:
        if len(args) != 7:
            log.error(
                "Incorrect number of arguments. Expected {} , but {} was recieved"
                .format(7, len(args)))
            task_view.edit_format()
        else:
            log.info("Tring to edit a column")
            TaskController.edit(args[0], args[1], args[2], args[3], args[4],
                                args[5], args[6])
            task_view.success_edit()
            log.info("Task is successfully edited")
    if subcategory == SubCategories.subtask:
        if args[0] == 'add':
            if len(args) != 7:
                log.error(
                    "Incorrect number of arguments. Expected {} , but {} was recieved"
                    .format(4, len(args)))
                task_view.add_subtask_format()
            else:
                log.info("Trying to set subtask to task")
                TaskController.set_subtask(args[1], args[2], args[3], args[4],
                                           args[5], args[6])
                log.info("Task is successfully set as subtask")
コード例 #22
0
from twisted.internet import defer
from twisted.internet import reactor
from twisted.names import client
import random
import time

from services import GenericService, signature, synchronous
import pubsub

import lib.logger as logger
log = logger.get_logger('example')


class ExampleService(GenericService):
    service_type = 'example'
    service_vendor = 'Stratum'
    is_default = True

    def hello_world(self):
        return "Hello world!"

    hello_world.help_text = "Returns string 'Hello world!'"
    hello_world.params = []

    @signature
    def ping(self, payload):
        return payload

    ping.help_text = "Returns signed message with the payload given by the client."
    ping.params = [
        ('payload', 'mixed', 'This payload will be sent back to the client.'),
コード例 #23
0
""" handles configuration parsing from yaml
combines default settings, warning for unknown settings
and type checking
"""
import os
import sys
import yaml
import mlflow

ROOTDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(ROOTDIR)

from lib.logger import get_logger
log = get_logger("Config")
CONFIGDIR = "Player"


class Options(object):
    """ Options Class
    able to parse yaml config settings
    while checking for correct data types and unknown settings
    (that aren't contained in the derived class' attributes)
    """

    def __init__(self, d, default):
        self.load_options_safe(d, default)

    def load_options_safe(self, d, default_options):
        """ safely set member attributes of Options class
        while using default attributes;
        type-assertion for overridden values,
コード例 #24
0
from random import randint
import uuid
import os
import re

from lib.utils import mask_line

from lib.logger import get_logger
logger = get_logger(LOG_NAME='noplp')


class NoPLPSong(object):
    def __init__(self, title, artist, decade, basepath, tab):
        super(NoPLPSong, self).__init__()

        self.id = self.id = uuid.uuid1().hex
        self.title = title
        self.artist = artist
        self.decade = decade
        self.basepath = basepath
        self.tab = tab

        self.current_load_level = None

        self.lines = []
        self.levels = {}  # {"50": [...], "40": [...] }

        self.load_song()

    def __str__(self):
        return "{0} - {1}".format(self.title, self.artist)
コード例 #25
0
 def test_get_logger(self):
     """test function _get_logger"""
     self.assertTrue(logger.get_logger("testlog"))
コード例 #26
0
from twisted.internet.protocol import ServerFactory
from twisted.internet.protocol import ReconnectingClientFactory
from twisted.internet import reactor, defer, endpoints

import socksclient
import custom_exceptions
from protocol import Protocol, ClientProtocol
from event_handler import GenericEventHandler

import lib.logger as logger
log = logger.get_logger('socket_transport')


def sockswrapper(proxy, dest):
    endpoint = endpoints.TCP4ClientEndpoint(reactor, dest[0], dest[1])
    return socksclient.SOCKSWrapper(reactor, proxy[0], proxy[1], endpoint)


class SocketTransportFactory(ServerFactory):
    def __init__(self,
                 debug=False,
                 signing_key=None,
                 signing_id=None,
                 event_handler=GenericEventHandler,
                 tcp_proxy_protocol_enable=False):
        self.debug = debug
        self.signing_key = signing_key
        self.signing_id = signing_id
        self.event_handler = event_handler
        self.protocol = Protocol
コード例 #27
0
#!/usr/bin/python

import argparse
import os
from lib.logger import get_logger
from lib.iocReader import IocReader
from lib.md5FileSystemScanner import Md5FileSystemScanner
from lib.fileSystemListGeneratorProvider import FileSystemListGeneratorProvider
from lib.md5Generator import Md5Generator
from lib.resultsWriter import ResultsWriter, Results
from lib.iocBundleDownloader import IocBundleDownloader

logger = get_logger()


class Main(object):
    ioc_file_path = 'openioc/'
    def __init__(self):
        self.parser = argparse.ArgumentParser(description='Sentinel scans for and reports the presence of malware-related and malware-modified files.')

        md5Generator = Md5Generator()
        fileSystemListGeneratorProvider = FileSystemListGeneratorProvider()

        iocReader = IocReader(Main.ioc_file_path)
        self.fileSystemScanner = Md5FileSystemScanner(md5Generator, fileSystemListGeneratorProvider, iocReader, logger)
        self.resultsWriter = ResultsWriter(logger)

    def execute(self):
        app.process_arguments()

        app.print_logo()
コード例 #28
0
    def __init__(self, adj_mx, **kwargs):

        self._kwargs = kwargs
        self._data_kwargs = kwargs.get('data')
        self._model_kwargs = kwargs.get('model')
        self._train_kwargs = kwargs.get('train')

        # logging.
        self._log_dir = self._get_log_dir(kwargs)
        log_level = self._kwargs.get('log_level', 'INFO')
        self._logger = get_logger(self._log_dir,
                                  __name__,
                                  'info.log',
                                  level=log_level)
        self._writer = tf.summary.FileWriter(self._log_dir)
        self._logger.info(kwargs)

        # Data preparation
        self.ds = utils.load_dataset(**self._data_kwargs)
        self._data = self.ds.data
        for k, v in self._data.items():
            if hasattr(v, 'shape'):
                self._logger.info((k, v.shape))

        # Build models.
        scaler = self._data['scaler']
        with tf.name_scope('Train'):
            with tf.variable_scope('DCRNN', reuse=False):
                self._train_model = DCRNNModel(
                    is_training=True,
                    scaler=scaler,
                    batch_size=self._data_kwargs['batch_size'],
                    adj_mx=adj_mx,
                    **self._model_kwargs)

        with tf.name_scope('Test'):
            with tf.variable_scope('DCRNN', reuse=True):
                self._test_model = DCRNNModel(
                    is_training=False,
                    scaler=scaler,
                    batch_size=self._data_kwargs['test_batch_size'],
                    adj_mx=adj_mx,
                    **self._model_kwargs)

        # Learning rate.
        self._lr = tf.get_variable('learning_rate',
                                   shape=(),
                                   initializer=tf.constant_initializer(0.01),
                                   trainable=False)
        self._new_lr = tf.placeholder(tf.float32,
                                      shape=(),
                                      name='new_learning_rate')
        self._lr_update = tf.assign(self._lr, self._new_lr, name='lr_update')

        # Configure optimizer
        optimizer_name = self._train_kwargs.get('optimizer', 'adam').lower()
        epsilon = float(self._train_kwargs.get('epsilon', 1e-3))
        optimizer = tf.train.AdamOptimizer(self._lr, epsilon=epsilon)
        if optimizer_name == 'sgd':
            optimizer = tf.train.GradientDescentOptimizer(self._lr, )
        elif optimizer_name == 'amsgrad':
            optimizer = AMSGrad(self._lr, epsilon=epsilon)

        # Calculate loss
        output_dim = self._model_kwargs.get('output_dim')
        preds = self._train_model.outputs
        labels = self._train_model.labels[..., :output_dim]

        null_val = 0.
        self._loss_fn = masked_mae_loss(scaler, null_val)
        self._train_loss = self._loss_fn(preds=preds, labels=labels)

        tvars = tf.trainable_variables()
        grads = tf.gradients(self._train_loss, tvars)
        max_grad_norm = kwargs['train'].get('max_grad_norm', 1.)
        grads, _ = tf.clip_by_global_norm(grads, max_grad_norm)
        global_step = tf.train.get_or_create_global_step()
        self._train_op = optimizer.apply_gradients(zip(grads, tvars),
                                                   global_step=global_step,
                                                   name='train_op')

        max_to_keep = self._train_kwargs.get('max_to_keep', 100)
        self._epoch = 0
        self._saver = tf.train.Saver(tf.global_variables(),
                                     max_to_keep=max_to_keep)

        # Log model statistics.
        total_trainable_parameter = utils.get_total_trainable_parameter_size()
        self._logger.info('Total number of trainable parameters: {:d}'.format(
            total_trainable_parameter))
        for var in tf.global_variables():
            self._logger.debug('{}, {}'.format(var.name, var.get_shape()))
コード例 #29
0
import asyncio
from db import DB
from lib.logger import get_logger
from models.http_bridge import HttpBridge
from models.genesis import Genesis
from models.scheduler import Scheduler
from routers import Routers
from tornado.web import Application
from tornado.ioloop import IOLoop
from tornado.options import define, options
from tornado.log import enable_pretty_logging
enable_pretty_logging()

logger = get_logger('server')


async def main():
    database = DB()
    http_bridge = HttpBridge()
    is_loaded = await database.is_genesis_loaded()
    if not is_loaded:
        logger.info('start to load genesis.')
        genesis = Genesis()
        genesis_file = await http_bridge.get_genesis(genesis.genesis_hash)
        if genesis_file.get('nonAvvmBalances'):
            utxos = genesis.non_avvm_balances_to_utxos(
                genesis_file['nonAvvmBalances'])
            await database.save_utxos(utxos)

        if genesis_file.get('avvmDistr'):
            utxos = genesis.avvm_distr_to_utxos(genesis_file['avvmDistr'],
コード例 #30
0
import random
import string
import multiprocessing
import os.path
import pickle
import argparse
from lib.logger import get_logger

from Player import config
from Azts import mock_model
from Azts import player
from Azts import utility
from Azts.config import GAMEDIR, DATASETDIR
from Matches import contest

log = get_logger("create_dataset")


# @timing
def create_dataset(yamlpaths,
                   rollouts_per_move,
                   num_of_parallel_processes,
                   num_of_games_per_process,
                   fork_method="spawn"):
    '''
    starts parallel training which creates
    many different game_[...].pkl files in
    GAMEDIR and then calls assemble_dataset
    which creates a dataset in DATASETDIR
    from all created games in GAMESDIR
    '''
コード例 #31
0
SCHEMA = 'schema'
TABLE_ID = 'tableid'
SIMPLIFICATION = 'simplification'
GEOM_FIELD = 'geomfield'
FACTOR = 'factor'
MAX_MEMORY = 'maxmemory'
SKIP_FAILURES = 'skipfailures'

SIMPLIFICATION_MAPSHAPER = 'mapshaper'
SIMPLIFICATION_POSTGIS = 'postgis'

DEFAULT_SIMPLIFICATION = SIMPLIFICATION_MAPSHAPER
DEFAULT_MAXMEMORY = '8192'
DEFAULT_SKIPFAILURES = 'no'

LOGGER = get_logger(__name__)


def get_simplification_params(table_id):
    with open(os.path.join(os.path.dirname(__file__),
                           'simplifications.json')) as file:
        return json.load(file).get(table_id)


class Simplify(WrapperTask):
    schema = Parameter()
    table = Parameter()
    table_id = Parameter(default='')
    suffix = Parameter(default=SIMPLIFIED_SUFFIX)

    def __init__(self, *args, **kwargs):
コード例 #32
0
ファイル: pantri.py プロジェクト: facebook/IT-CPE
# Import in respect to the path of script
# http://stackoverflow.com/q/50499
current_frame = inspect.currentframe()
script_path = os.path.abspath(inspect.getfile(current_frame))

# Insert external_deps path to import the correct external modules
external_deps = os.path.join(os.path.dirname(script_path), "external_deps")
sys.path.insert(1, external_deps)

import lib.utils as utils
from lib import logger
from lib.pantri import Pantri

if __name__ == "__main__":
  logger = logger.get_logger()

  def retrieve(options):

    # check if git repo was update before retrieving files.
    pantri = Pantri(options)
    if pantri.nothing_to_retrieve():
      logger.info('it-bin repo already up-to-date. Use -f/--force to override')
      return

    # In order to selectively choose which shelves to retrieve and have
    # different options per shelf, need to call "pantri.retrieve()" for each
    # shelf.
    if 'shelf' in options:
      for shelf in options['shelf']:
        options['shelf'] = shelf
コード例 #33
0
from keras.optimizers import Adam
from keras.callbacks import ReduceLROnPlateau
from keras.utils.vis_utils import plot_model

sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from Model.utility import *
from lib.timing import timing

from Azts.config import DATASETDIR
from Player.config import Config
from Model.resnet import resnet_model, inference_model, transfer_update

from lib.logger import get_logger
log = get_logger("Model")

DEBUG = False


class AZero:
    """ The AlphaZero Class

    Attributes:
        model (Keras Model): The ResNet Model with two output heads
        initial_epoch

    Functions:
        train: starts the training process
        restore_latest_model: restores newest weights from model directory
        remember_model_architecture: makes sure the architecture along with config is saved once
コード例 #34
0
 def __init__(self):
     self.logger = get_logger('genesis')
     self.genesis_hash = Network().genesis_hash
コード例 #35
0
ファイル: milano.py プロジェクト: julien-effinet/milano
#!/usr/bin/python

import argparse
import os
from lib.logger import get_logger
from lib.iocReader import IocReader
from lib.md5FileSystemScanner import Md5FileSystemScanner
from lib.fileSystemListGeneratorProvider import FileSystemListGeneratorProvider
from lib.md5Generator import Md5Generator
from lib.resultsWriter import ResultsWriter, Results
from lib.iocBundleDownloader import IocBundleDownloader

logger = get_logger()


class Main(object):
    ioc_hashes_file = 'ioc_files/hacking_team_dll_hashes.ioc'


    def __init__(self):
        self.parser = argparse.ArgumentParser(description='Sentinel scans for and reports the presence of malware-related and malware-modified files.')

        md5Generator = Md5Generator()
        fileSystemListGeneratorProvider = FileSystemListGeneratorProvider()
        iocReader = IocReader(Main.ioc_hashes_file)
        self.fileSystemScanner = Md5FileSystemScanner(md5Generator, fileSystemListGeneratorProvider, iocReader, logger)

        self.resultsWriter = ResultsWriter(logger)


    def execute(self):
コード例 #36
0
ファイル: train.py プロジェクト: tonycai/AI-Detect-Code
# -*- coding:utf-8 -*-

import argparse

from model import get_recognizer
from lib.logger import get_logger

logger = get_logger('train')

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='模型训练脚本')
    parser.add_argument('rec_type', help='识别类别')

    args = parser.parse_args()

    logger.info('---- %s train ----' % args.rec_type)

    recognizer = get_recognizer(args.rec_type, model_config={'train': True}, use_cache=False)
    recognizer.train()

    logger.info('---- %s end ----' % args.rec_type)