Esempio n. 1
0
 def run_trajectory(self, logger=None):
     if logger is None:
         from logging import info as logger
     model = self.tree.model
     R = 0
     logger('New trajectory')
     self.reset()
     s = model.sample_start()
     belief_quotient = model._int_to_state().belief_quotient
     belief_preferences = model._int_to_state().belief_preferences
     while not model.is_final(s):
         a = self.get_action()
         ns, o, r = model.sample_transition(model.actions.index(a), s)
         self.step(model.observations[o])
         logger('{} -- {} --> {}, {}, {}'.format(
             model._int_to_state(s),
             a,
             model._int_to_state(ns),
             model.observations[o],
             r))
         logger('belief: {} | {:.2f}'.format(
             format_belief_array(belief_quotient(self.belief.array)),
             belief_preferences(self.belief.array)[0]))
         s = ns
         R += r
     logger("Total reward: %f" % R)
Esempio n. 2
0
 def __populate(queue, stream):
     log = logger()
     for line in iter(stream.readline, b''):
         # process line
         line = line.decode().strip()
         # save line
         queue.put(line)
         # log line
         log.debug(line)
Esempio n. 3
0
def main():
    """
	MAIN function, passes command line arguments into the ServerSocket, calls FTPServer to run protocol

	:return: returns nothing:
	"""

    #handle commandline arguments
    if len(sys.argv) == 3:

        filename = sys.argv[1]
        port = sys.argv[2]

        #establish init variables {LOG, AuthorizedUsers}
        global log
        log = logger(filename, "[server]")

        #read config file and establish variables
        config_init()

        #Create a serversocket
        serversocket = ServerSocket(filename, port)
        ftpserver = None

    else:
        log.usage("server.py <filename> <port>")
        exit(0)

    #Run the protocol for each clients
    while True:
        try:
            (clientsocket, address) = serversocket.accept()

            msg = "Client Connected at %s:%s" % (address[0], address[1])
            log.debug(msg)
            ftpserver = FTPServer(clientsocket, address)
            if FTPS_MODE == "yes":
                secure = Secure(clientsocket, address, "server.crt",
                                "server.key")
                secure.doProtocol()
            else:
                ftpserver.doProtocol()

        #Handle CTL-C make sure threads are done running
        except KeyboardInterrupt as error:
            log.debug("Shutting down server")
            #if we have client running
            if ftpserver:
                try:
                    ftpserver.close()
                except socket.error as error:
                    pass

            serversocket.close()
            exit()
Esempio n. 4
0
def main():
    """
	MAIN function, passes command line arguments into the ServerSocket, calls FTPServer to run protocol

	:return: returns nothing:
	"""

    #handle commandline arguments
    if len(sys.argv) == 3:

        filename = sys.argv[1]
        port = sys.argv[2]

        #establish init variables {LOG, AuthorizedUsers}
        global log
        log = logger(filename, "[server]")

        try:
            config.read(authorized_users_file)
            for key in config["Authorized Users"]:
                AUTHORIZED_USERS[key] = config["Authorized Users"][key]
        #handle invalid config file
        except (DuplicateOptionError, Error) as error:
            log.error("Error " + str(error.message), "[main]")
            msg = "Error in %s file, Fix before proceeding" % (
                authorized_users_file)
            log.error(msg, "[main]")
            exit(1)

        #Create a serversocket
        serversocket = ServerSocket(filename, port)
        ftpserver = None

    else:
        log.usage("server.py <filename> <port>")
        exit(0)

    #Run the protocol for each clients
    while True:
        try:
            (clientsocket, address) = serversocket.accept()
            msg = "Client Connected at %s:%s" % (address[0], address[1])
            log.debug(msg)
            ftpserver = FTPServer(clientsocket, address)
            ftpserver.doProtocol()
        #Handle CTL-C make sure threads are done running
        except KeyboardInterrupt as error:
            log.debug("Shutting down server")
            #if we have client running
            if ftpserver:
                ftpserver.close()
            serversocket.close()
            exit()
Esempio n. 5
0
 def __init__(self, conf={}, ext=[]):
     # initialize logger
     self._log = logger()
     # load context
     self._context = self._load_context(conf.get('context', {}))
     # load services
     self._services = self._load_services(conf.get('services', {}))
     # load extensions
     self._extensions = self._load_extensions(conf.get('extensions', {}),
                                              ext)
     # load sequences
     self._sequences = self._load_sequences(conf.get('sequences', {}))
Esempio n. 6
0
 def __init__(self, dbase_name, debug_level=0):
     """!
     The constructor for the class
     @param self The pointer for the object
     @param dbase_name The name of the database file
     @param debug_level The debug level for operation of the database
     """
     ##@var debug_level
     #Controls debug functionality throughout the class
     self.debug_level = debug_level
     ##@var logger
     #The logger module for the class
     self.logger = logger(debug_level=self.debug_level)
     ##@var error_logger
     #The error logger module for the class.  Enforces debug_level=2
     #to ensure logging to file
     self.error_logger = logger(file_name='error.log',
                                debug_level=2)
     ##@var name
     #The name of the sql file
     self.name = dbase_name
     self.open_dbase()
    def __init__(self,
                 dbPathFactorValues,
                 dbPathMarketData,
                 begDate,
                 endDate,
                 logger=None):
        """Constructor"""
        if logger == None:
            self.logger = logging.logger()
        else:
            self.logger = logger

        dbPathRawData = GetPath.GetLocalDatabasePath()["RawEquity"]
        dbPathProcessedData = GetPath.GetLocalDatabasePath()["ProcEquity"]

        #价格数据录入内存数据库
        self.connDbMkt = lite.connect(":memory:")
        self.connDbMkt.text_factory = str
        cur = self.connDbMkt.cursor()
        self.logger.info(
            "<{}>-Load local database into in-memory database".format(
                __name__.split('.')[-1]))
        _dbPathMarketData = dbPathRawData + dbPathMarketData
        cur.execute("ATTACH '{}' AS MktData".format(_dbPathMarketData))
        cur.execute(
            "CREATE TABLE MktData AS SELECT StkCode,Date,LC,TC FROM MktData.AStockData WHERE Date>='{}'"
            .format(begDate))
        cur.execute("CREATE INDEX mId ON MktData (Date,StkCode)")
        self.logger.info("<{}>-Done".format(__name__.split('.')[-1]))

        #连接因子数据库
        dbFactorValue = dbPathProcessedData + dbPathFactorValues
        self.connDbFV = lite.connect(dbFactorValue)
        self.connDbFV.text_factory = str

        #获取交易日
        self._trdDays = GetTrdDay.GetTradeDays()
        self.revalueDays = []
        self.trdDays = []
        cur = self.connDbFV.cursor()
        cur.execute("SELECT DISTINCT Date FROM FactorValues ORDER BY Date ASC")
        rows = cur.fetchall()
        for row in rows:
            self.revalueDays.append(row[0])
        for d in self._trdDays:
            if d >= self.revalueDays[0] and d <= endDate and d >= begDate:
                self.trdDays.append(d)

        #Initialte CalculatePortfolioReturn
        self.objCalcPortReturn = CalcPortRet.CalculatePortfolioReturn(
            dbPathMarketData, self.connDbMkt, self.logger)
Esempio n. 8
0
    def __init__(self,
                 host=socket.gethostname(),
                 port=21,
                 backlog=5,
                 filename="server.log"):
        self.host = host
        self.port = port
        self.backlog = backlog
        self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.serversocket.bind((host, port))
        self.serversocket.listen(backlog)
        self.log = logger(filename, "[server]")

        #log init
        msg = "Starting server on %s:%s" % (self.host, self.port)
        self.log.debug(msg)
Esempio n. 9
0
def git_init(api, config):
    log = logger()
    # setup repositories
    api._git_repo = {}
    for name, params in config.get('repositories', {}).items():
        try:
            repo = None
            path = Pattern(params.get('path',
                                      '.')).substitute(api.get_context())
            # check if repository exist if not clone it
            try:
                repo = Repo(path)
            except:
                repo = Repo.clone_from(params['uri'], path)
            # add repository
            api._git_repo[name] = repo
        except Exception as ex:
            log.error(ex)
    # save reference name
    api._git_reference = config.get('reference', 'point')
Esempio n. 10
0
    def __init__(self,
                 host=socket.gethostname(),
                 filename="client.log",
                 port=21):

        self.host = host
        self.port = port
        self.clientsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.log = logger(filename, "[client]")
        self.clientsocket.settimeout(2)
        self.datasocket = None
        """
		Establishes a connection otherwise exists the program
		"""
        try:
            self.clientsocket.connect((self.host, self.port))
            msg = "Connected to %s:%s" % (self.host, self.port)
            self.log.debug(msg)

        except socket.error as e:
            self.log.error(str(e))
            exit(0)
            pass
Esempio n. 11
0
def __init():
    import logging  # our own logging module
    import outputHandler
    import tokenHandler  # our ouwn token hangling module
    import settingsHandler
    import pinDef  # our own pin definition module
    import systemHandler
    import inputHandler  # our own input handling module
    try:
        import pigpio
    except ImportError:
        print(
            "*** PiGPIO not found - please run the following command to install it ***"
        )
        print(
            "sudo apt-get update && sudo apt-get install pigpio python-pigpio python3-pigpio\n"
        )
        exit()
    # exit flag
    global __flagExit
    __flagExit = False

    # get our run mode - find out if daemon
    global runMode
    # Assume runMode is normal
    runMode = "normal"

    # Confirm if it's actually running as a daemon
    for i in sys.argv:
        if i == "--daemon":
            runMode = "daemon"
            break
    if os.environ.get("LAUNCHED_BY_SYSTEMD") == "1":
        runMode = "daemon"

    # start logging
    global l
    l = logging.logger(runMode=runMode)
    del logging
    l.log("NOTE", "DIYAC starting")

    # systemHandler
    global sysH
    sysH = systemHandler.systemHandler(l)
    del systemHandler
    sysH.setup("sigInt", runQuit=True)
    sysH.setup("sigTerm", runQuit=True)
    sysH.setup("sigHup", sigHup_callback, runQuit=False)
    sysH.setup("quit", cleanup)

    # get all the settings
    s = settingsHandler.settingsHandler(sysH, l)
    del settingsHandler

    # update the logger with new settings
    l.loadSettings(s)

    # see if pigpiod is running
    # if not running
    #  try to start
    #  check again
    #  if not running
    #   exit
    # pigpiod.pi()
    # if not connected
    #  exit
    stat = subprocess.call("systemctl status pigpiod > /dev/null", shell=True)
    if stat != 0:
        l.log("WARN", "PIGPIOD is not running, will try to start")
        subprocess.call("sudo systemctl start pigpiod > /dev/null", shell=True)
        stat = subprocess.call("service pigpiod status > /dev/null",
                               shell=True)
        if stat != 0:
            l.log("ERRR", "Unable to start pigpiod daemon")
            sysH.quit(code=1,
                      status="Fail - PIGPIO not started and unable to start")
        else:
            l.log("INFO", "Starting pigpiod daemon successful")
    global pi
    pi = pigpio.pi()
    if not pi.connected:
        l.log("ERRR", "PiGPIO - Unable to connect")
        sysH.quit(code=1, status="Failed - unable to connect to PIGPIOD")

    # set tokens
    global tokens
    tokens = tokenHandler.tokenHandler(sysH, s, l)
    del tokenHandler

    # pin definitions
    global p
    p = pinDef.pinDef(sysH, s, l)
    del pinDef

    # output handler (settings, logger, gpio, pins
    global outH
    outH = outputHandler.outputHandler(sysH, s, l, pi, p)
    del outputHandler

    # Input handler
    global inH
    inH = inputHandler.inputHandler(sysH, s, l, tokens, outH, pi, p)
    del inputHandler

    time.sleep(0.1)

    # register these GPI pins to run __cbf on rising or falling edge
    for pin in p.pins["input"]:
        pi.callback(p.pins[pin], pigpio.EITHER_EDGE, __callbackInput)

    # register these GPO pins to run __cbf on rising or falling edge
    for pin in p.pins["output"]:
        pi.callback(p.pins[pin], pigpio.EITHER_EDGE, __callbackOutput)

    # state ready
    sysH.notifyUp("READY=1")
    sysH.notifyUp("STATUS=Running")
    l.log("NOTE", "DIYAC running", runMode)
    import getpass
    l.log("DBUG", "Running program as user", getpass.getuser())
Esempio n. 12
0
print '------------ 导入模块 ------------'

print math.pow(2, 0.5)
print math.pow(3, 2)
print math.pow(3, 3)
print math.pi

print 'from math import pow, sin, log  这种写法直接饮用pow,sin,log这三个函数,math包下的其他函数没有导入进来'

print '如果遇到不同模块下函数名相同,则可以通过模块名引用函数名来调用,或者对函数起一个别名'

print math.log(10)

print logging.log(10, 'something')

print logger(10, 'something')

#
print os.path.isdir(r'C:\windows')
print os.path.isdir(r'C:\Windows')
print os.path.isfile(r'C:\Windows\notepad.exe')

# 动态导入模块

print '------------ 动态导入模块 --------------'

print '有时候,两个不同的模块提供了相同的功能,比如StringIO和cStringIO都提供了StringIO这个功能 \n' \
      '因为Python是动态语言,解释执行,因此python代码运行速度慢。\n' \
      '如果要提高python代码的运行速度,最简单的办法就是把某些关键函数用c语言重写,这样就能大大提高运行速度。\n' \
      '同样的功能,StringIO是纯python代码编写的,而cStringIO部分函数是C写的,因此cStringIO运行速度更快。'
Esempio n. 13
0
# 之后就可以使用math模块中的变量和类
print math.pow(2, 0.5)  # 1.41421356237
print math.pi # 3.14159265359
# 如果只想导入用到的math模块中的几个函数,而不是所有函数,可以使用一下语句
from math import pow, sin, log
print pow(2, 10)    # 1024.0
print sin(3.14)     # 0.00159265291649

# 如果使用import导入模块名,必须使用模块名来引用函数,不会出现名称的冲突
# 而使用from...import使用的时候不用使用模块名,势必会出现冲突
# 因此可以使用别名来避免冲突
# 例如:
from math import log
from logging import log as logger
print log(10) # 2.30258509299
logger(10, 'import from logging')
# 这样就可以避免命名冲突了

import os
print os.path.isdir(r'C:\Windows')  # True
print os.path.isfile(r'C:\Windows\notepad.exe') # True

# 动态导入模块
# 如果导入的模块不存在,Python编译器就会报importError错误
# 因为Python是解释性语言,代码运行速度慢,一些关键函数会使用C语言编写
# 例如Python中的StringIO模块和cStringIO模块
# 利用ImportError错误,我们就可以动态导入模块
# 例子:
try :
    from  cStringIO import StringIO
except ImportError :
Esempio n. 14
0
from status_server import StatusServer
import plugins
from plugins import *
import time
from gflags import *
import daemon
from daemon import pidfile
import sys
import os.path
import logging

#TODO: add logger
#TODO: add json output
#TODO: support multi memcached, the feature of plugin

logger = logging.logger(__file__)

def run():
    """init status server, and run in a dead loop """

    global logger
    logger.info("inter run")

    statusserver = StatusServer(logger = logger, **FLAGS.FlagValuesDict())

    if(FLAGS.plugin_module \
        and FLAGS.plugin_class \
        and FLAGS.plugin_handlers):
        plugin_module = getattr(plugins, FLAGS.plugin_module) 
        plugin_class = getattr(plugin_module, FLAGS.plugin_class)
        plugin_inst = plugin_class(*FLAGS.plugin_params)
def main():
    logger = logging.logger('project_two_logs.log')
    date_now = datetime.datetime.now().date()
    client = bq.Client()
    bucket_name = 'data-engineering-data-sources'
    dest_blob = 'raw/project_two/weather-{}.json'
    dest_blob_fact_transform = 'transformed/project_two/weather-{}.pq'
    dest_blob_dim_transform = 'transformed/project_two/{}.pq'
    source_weather = '/tmp/weather-{}.json'
    # source_dim='/tmp/{}.pq'
    source_weather_transform = '/tmp/weather-{}.pq'
    source_dim_transform = '/tmp/{}.pq'
    url_fact = 'gs://' + bucket_name + '/' + dest_blob_fact_transform.format(
        date_now)
    url_dim = 'gs://' + bucket_name + '/' + dest_blob_dim_transform
    print(url_fact)
    json_data = get_dataset(param=param)  # request api and save to local file
    with open(source_weather.format(date_now), 'w') as weather_json:
        json.dump(json_data, weather_json)

    load.load_blob(
        bucket_name, dest_blob.format(date_now),
        source_weather.format(date_now))  #load fact table into cloud storage

    dict_fact = transform.json_data(
        json_data=json_data)  # transform fact table
    name_of_fact_table = str(*dict_fact.keys())
    df_of_fact_table = dict_fact[name_of_fact_table]
    df_dim = transform.html_data()  # transform html reference data into df

    df_of_fact_table.to_parquet(
        source_weather_transform.format(date_now),
        engine='pyarrow',
        compression='gzip')  # prepare parquet and load into staging
    load.load_blob(bucket_name, dest_blob_fact_transform.format(date_now),
                   source_weather_transform.format(date_now))
    for name, df in df_dim.items():
        df.to_parquet(source_dim_transform.format(name),
                      engine='pyarrow',
                      compression='gzip')
        load.load_blob(bucket_name, dest_blob_dim_transform.format(name),
                       source_dim_transform.format(name))

    load.pq_gcs_to_bigquery(url_fact, 'project_two', name_of_fact_table,
                            'WRITE_APPEND')  #load into bigquery

    logger.info(
        "{}'s of rows were succesfully loaded into project_two.FACT_weather".
        format(len(df_of_fact_table)))
    # load dim only on first run since this reference to fact table so they are not updating every running tasks
    # call list of existing table in bigquery
    check_table = client.list_tables('project_two')
    table_in_bq = []
    for table in check_table:
        if table.table_id != 'FACT_weather':
            table_in_bq.append(table.table_id)

    def dim_loop(df_dim, table_in_bq):
        '''
        load table into bigquery dataset, get list of existing table in dataset. upload all DIM table if not exist and appending FACT table. 
        
        Args:

            1.df_dim = dataframe of transformed html_data()

            2.table_in_bq = list off table in biquery dataset
        '''
        table_not_exist = set(table_in_bq).symmetric_difference(
            set(df_dim.keys()))
        table_to_load = {k: df_dim[k] for k in table_not_exist}
        list_of_DIM_table = []
        for tablename, df_dim in table_to_load.items():
            load.pq_gcs_to_bigquery(url_dim.format(tablename), 'project_two',
                                    tablename, 'WRITE_TRUNCATE')
            list_of_DIM_table.append(tablename)

        logger.info(
            'this {} dimensional tables are loaded into project_two '.format(
                list_of_DIM_table))

    if len(table_in_bq) < 5:
        dim_loop(df_dim, table_in_bq)

    print('task complete')
Esempio n. 16
0
import time
import select
import sys
import logging

TIMEOUT = 1
CLOSED = 2
FD = 1
TIME = 2

events = []
logger = logging.logger("EVENT", logging.DEBUG)


def getCurrentMills():
    return int(round(time.time() * 500))


def eventTimeout(timeMs, callback, argument, strId):
    event = EventData(TIME, callback, argument)
    event.time = getCurrentMills() + timeMs
    event.id = strId
    logger.log(Level.DEBUG, "Registering timeout " + str(event))
    events.append(event)


def eventTimeoutDelete(callback, argument):
    for event in events:
        if event.type == TIME and event.callback == callback and event.argument == argument:
            logger.log(Level.DEBUG, "Deleting " + str(event))
            events.remove(event)
Esempio n. 17
0
def ask():
    logging.logger()
Esempio n. 18
0
# -*- coding: utf-8 -*-

# 模块
from __future__ import division
from math import log
from logging import log as logger   # as 起别名

print log(10)
logger(10, 'import from logger')

# python中动态导入模块
try:
    import json
except ImportError:
    import simplejson as json

print json.dumps({'python':2.7})

# python之使用__future__
# Python的新版本会引入新的功能,但是,实际上这些功能在上一个老版本中就已经存在了。要“试用”某一新的特性,就可以通过导入__future__模块的某些功能来实现。

print 10 / 3
Esempio n. 19
0
 def __init__(self):
     self._stack = SimpleQueue()
     self._logger = logger()
Esempio n. 20
0
from selenium.common.exceptions import NoSuchWindowException
from selenium.common.exceptions import WebDriverException
from selenium.common.exceptions import StaleElementReferenceException
import _thread as thread
import PySimpleGUI as Psg
from logging import INFO as INFO
from logging import basicConfig as logger
from logging import info as log_info
from logging import warning as log_warning
from logging import error as log_error

# ######################################################################### ###
# ########################## Globals and Setup ############################ ###
# ######################################################################### ###

logger(filename='EHX.log', level=INFO)
log_info("Launched EHX v1.0")

LIGHT = ""
DD_BROWSERS = ["Internet Explorer", "Firefox", "Chrome"]
DD_ELEMENTS_DICT = {
    "CSS Selector": "find_element_by_css_selector",
    "XPATH": "find_elements_by_xpath",
    "ID": "find_elements_by_id",
    "Class Name": "find_elements_by_class_name",
    "Name": "find_elements_by_name"
}
DD_ELEMENTS = ["CSS Selector", "XPATH", "ID", "Class Name", "Name"]
DD_COLORS = [
    "Red", "Green", "Orchid", "Aqua", "Aquamarine ", "Orange", "Tomato",
    "Salmon", "Yellow", "Blue", "Plum", "PeachPuff"
Esempio n. 21
0
import math

from math import pow, sin, log

import math, logging
math.log(10)
logging.log(10)

from math import log
from logging import log as logger   # logging的log现在变成了logger, alias log module of a package
print log(10)   # 调用的是math的log
logger(10, 'import from logging')   # 调用的是logging的log

import simplejson as json #alias simplejson module of a package
Esempio n. 22
0
import logging
from core.relay.config import BasicConfiguration
import os, sys

DIR = os.path.abspath(os.path.dirname(__file__))

logging.config.fileConfig(os.path.join(DIR,'logging.conf'))
logger = logging.logger("smtprelay")

'''
  Add some logic to redirect emails going to
  a user
'''
def redirect_to(user):
  unprocessed_email_address, domain = str(user).split("@")
  username, realdomain = unprocessed_email_address.split('-')

  # Handle yahoo as the exception, they do not allow "yahoo" to appear in the username for an email address
  if realdomain.lower() == 'yaho.com':
    realdomain = 'yahoo.com'

  redirect_email = "%s@%s"%(username, realdomain)
  logger.info("Sending redirected message to %s"%redirect_email)

  return redirect_email

'''
  Start the configuration
'''
configuration = {
  "OUTBOUND_EMAIL_HOST":"...",
import time
import select
import sys
import logging
TIMEOUT = 1
CLOSED = 2
FD = 1
TIME = 2

events = []
logger = logging.logger("EVENT", logging.DEBUG)


def getCurrentMills():
    return int(round(time.time() * 500))

def eventTimeout(timeMs, callback, argument, strId):    
    event = EventData(TIME, callback, argument)
    event.time = getCurrentMills() + timeMs
    event.id = strId
    logger.log(Level.DEBUG, "Registering timeout " + str(event)) 
    events.append(event)

def eventTimeoutDelete(callback, argument):
    for event in events:
        if event.type == TIME and event.callback == callback and event.argument == argument:
            logger.log(Level.DEBUG, "Deleting " + str(event))
            events.remove(event)
            return True
    return False
Esempio n. 24
0
print "\n只导入用到的math模块的某几个函数,而不是所有函数"
from math import pow, sin, log
print pow(2, 10)
print sin(3.14)

print "\n处理不同包内有相同的函数名冲突问题"
import math, logging
print math.log(10)
print logging.log(10, "something")

print "\n为导入的函数取别名,以访止冲突"
from math import log
from logging import log as logger
print log(10)
print logger(10, "import from logging")

print "\n练习\n"
import os

dirPath = "/Users/xianglingchuan/Documents/work/pythonWork/learnProject/static/"
#dirPath = "static";
print os.path.isdir(dirPath)
#创建目录
#dirPathNew = "/Users/xianglingchuan/Documents/work/pythonWork/learnProject/static/dirPath/";
#os.mkdir(dirPathNew);
#os.remove(dirPathNew);

fileName = "/Users/xianglingchuan/Documents/work/pythonWork/learnProject/static/file22.txt"
print os.path.isfile(fileName)
#os.mkfifo(fileName);
Esempio n. 25
0
import math
print math.pow(2, 0.5)
print math.pi
from math import pow, sin, log
print pow(2, 2)
print sin(30)
print log(10)
import math, logging
print math.log(10)
print logging.log(10, 'somthing')
from math import log
from logging import log as logger
print log(10)
print logger(10, 'import from ligging')