Beispiel #1
0
def ftp_server():
    authorizer = DummyAuthorizer()
    user_list = get_user('conf/user.conf')
    for user in user_list:
        name, password, permit, homedir = user
        try:
            authorizer.add_user(name, password, homedir, perm=permit)
        except Exception as e:
            print(e)

    if setting.enable_anonymous == 'on':
        authorizer.add_anonymous('/home/')

    dtp_handler = ThrottledDTPHandler
    dtp_handler.read_limit = setting.max_download
    dtp_handler.write_limit = setting.max_upload

    handler = FTPHandler
    handler.authorizer = authorizer

    if setting.enable_logging == 'on':
        logger.config_logger()

    handler.banner = setting.welcome_msg

    handler.passive_ports = range(setting.passive_ports[0],
                                  setting.passive_ports[1])

    server = FTPServer((SERVER_IP, SERVER_PORT), handler)
    server.max_cons = setting.max_cons
    server.max_cons_per_ip = setting.max_per_ip

    server.serve_forever()
Beispiel #2
0
def main():

	args = arguments()
	#parse params
	if "graph" not in args or "result" not in args:
		print("Missing params specify arguments:\n"
		"-graph -result -process -weight\n"
		"graph => path of graph from src folder\n"
		"result => name of file with results\n"
		"process => optional, improve performance best value is number of core\n"
		"weight => optional, compute also weighted average shortest path only loaded graph")
	else:
		GRAPH_PATH = args["graph"]
		OUTPUT_NAME = args["result"]
		PROCESSES_NUMBER = args["process"] if "process" in args else 1
		WEIGHTED = args["weight"] if "weight" in args else False

		if not os.path.exists(GRAPH_PATH):
			print("File {0} does not exist, please check your path".format(GRAPH_PATH))
			sys.exit(1)

		logger.config_logger(OUTPUT_NAME)
		
		#load file
		logger.log("Start loading graph")
		loaded_graph = graph_creator.load_pajek(GRAPH_PATH)
		logger.log("Terminated graph loading")
		
		if len(loaded_graph.edges) != 0:

			loaded_metrics = analyze_loaded(loaded_graph, PROCESSES_NUMBER, WEIGHTED)
			random_metrics = analyze_random(nodes_number=loaded_graph.number_of_nodes(), 
				edges_number=loaded_graph.number_of_edges(), processNumber=PROCESSES_NUMBER)

			small_world = {}
			small_world["L"] = ("NaN" if random_metrics["main_component"]["average_path_length"] == 0 
				else loaded_metrics["main_component"]["average_path_length"] / 
					random_metrics["main_component"]["average_path_length"])
			small_world["C"] = ("NaN" if random_metrics["main_component"]["clustering_coefficient"] == 0 
				else loaded_metrics["main_component"]["clustering_coefficient"] / 
					random_metrics["main_component"]["clustering_coefficient"])

			results = {}
			results["loaded_graph"] = loaded_metrics
			results["random_graph"] = random_metrics
			results["small_world"] = small_world

			logger.log("Start saving metrics")
			output_manager.save_json_file(results, OUTPUT_NAME)
			logger.log("Metrics saved in file: {0}".format(OUTPUT_NAME))
		else:
			logger.log("Empty graph, no metrics calculated")
Beispiel #3
0
def init():
    config_logger()

    updater = Updater(token=config.get(TELEGRAM_TOKEN))
    dispatcher = updater.dispatcher

    # add handlers
    dispatcher.add_handler(command_handler())
    dispatcher.add_handler(text_handler())
    dispatcher.add_handler(audio_handler())

    updater.start_polling()
    logging.info("Bot is listening for messages...")
Beispiel #4
0
def search_data(cur, name='', shouzhi='', createtime1='', createtime2=''):
    lg = logger.config_logger('search_data')
    lg.info('准备查询数据')
    sql = "SELECT u.name ,p.payname,pay.product_name,pay.createtime,pay.money ,pay.shouzhi\
       FROM user_info u,payment_info p, pay_info pay \
       WHERE u.id=pay.user_id AND p.id=pay.payment_id "

    i = 1
    if name != '' and name != '全部':
        sql = sql + "and u.name='%s'" % (name)
        i = 0

    if shouzhi != '':
        if i == 0:
            sql = sql + "and pay.shouzhi='%s'" % (shouzhi)
            i = 0
        else:
            sql = sql + "and pay.shouzhi='%s'" % (shouzhi)
            i = 1

    if createtime1 != '' and createtime2 != '':
        sql = sql + "and pay.createtime  BETWEEN '%s' AND '%s'" % (createtime1,
                                                                   createtime2)
    elif createtime1 == '' and createtime2 == '':
        sql = sql
    elif createtime1 == '':
        sql = sql + "and pay.createtime  <'%s'" % (createtime2)
    elif createtime2 == '':
        sql = sql + "and pay.createtime  >'%s'" % (createtime1)
    sql = sql + " and pay.zijinzhuangtai !='' AND pay.zijinzhuangtai !='资金转移' order by pay.createtime DESC"

    lg.info(sql)
    try:
        cur.execute(sql)
        fet_results = cur.fetchall()

        results = []

        for row in fet_results:

            result = {'name': row[0], \
                      'payname': row[1], \
                      'product_name': row[2], \
                      'createtime': row[3], \
                      'money': row[4], \
                      'shouzhi': row[5], \
                      }
            if result['shouzhi'] == 1:
                result['shouzhi'] = '支出'
            if result['shouzhi'] == 2:
                result['shouzhi'] = '收入'
            results.append(result)

        lg.info('查询完数据')

    except:
        import traceback
        traceback.print_exc()
        print("Error :unable to fetchall data")
    return results
Beispiel #5
0
    def CreateExcel(self):
        lg = logger.config_logger('CreateExcel')
        lg.info('开始导出数据到excel文件 ')
        xw = openpyxl.Workbook()
        # 创建文件
        sheet = xw.active
        sheet.title = 'result'
        # 创建文件表头
        sheet['A1'] = '收入/支出'
        sheet['B1'] = '姓名'
        sheet['C1'] = '支付方式'
        sheet['D1'] = '金额'
        sheet['E1'] = '商品名'
        sheet['F1'] = '时间'

        for n in range(3, len(self.result) + 3):
            sheet.cell(row=n - 1,
                       column=1).value = self.result[n - 3]['shouzhi']
            sheet.cell(row=n - 1, column=2).value = self.result[n - 3]['name']
            sheet.cell(row=n - 1,
                       column=3).value = self.result[n - 3]['payname']
            sheet.cell(row=n - 1, column=4).value = self.result[n - 3]['money']
            sheet.cell(row=n - 1,
                       column=5).value = self.result[n - 3]['product_name']
            sheet.cell(row=n - 1,
                       column=6).value = self.result[n - 3]['createtime']

        dt = time.strftime('%Y%m%d-%H%M%S', time.localtime())
        result_path = readconfig.caseresult_path
        fileresult = os.path.join(result_path, dt + '.xlsx')
        xw.save(fileresult)
        lg.info('导出数据到以下文件:%s' % fileresult)
Beispiel #6
0
def connDB():  # 连接数据库
    lg = logger.config_logger('connDB')
    lg.info('开始连接数据库')
    rc = readconfig.ReadConfig()

    host = rc.get_mysql('host')
    port = int(rc.get_mysql('port'))
    user = rc.get_mysql('user')
    password = rc.get_mysql('password')
    database = rc.get_mysql('db')
    try:
        conn = pymysql.connect(host=host,
                               port=port,
                               user=user,
                               password=password,
                               db=database,
                               charset='utf8')
        lg.info('已成功连接数据库')
    except Exception as e:
        print(e)
        lg.info('数据库访问失败')

    cur = conn.cursor()
    return (conn, cur)
Beispiel #7
0
# author : liuyanmei

# date : 2018-1-2
import csv
import tkinter
import tkinter.ttk

import logger
import sys
import mysqloption
import tkinter.messagebox
import optionExcel
import tkinter.filedialog

lg = logger.config_logger('MainWindow')


class MainWindow(object):
    def __init__(self, width=680, height=300):
        self.width = width
        self.heigth = height

        self.root = tkinter.Tk()
        # 固定窗口的大小不可变
        self.root.resizable(width=False, height=False)
        self.root.title('家庭收支记账单')
        self.center()
        self.menu_data()
        # 连接数据库
        self.db, self.cur = mysqloption.connDB()
Beispiel #8
0
def connClose(conn, cur):  # 关闭所有连接
    cur.close()
    conn.close()
    lg = logger.config_logger('connClose')
    lg.info('断开与数据库的连接')
Beispiel #9
0
def main():
    _is_windows = platform.system() is 'Windows'

    parser = argparse.ArgumentParser(description='Universal deployer')

    parser.add_argument('--log-file',
                        help='File where save the application log',
                        type=str)
    parser.add_argument('--debug',
                        help='Debug log',
                        action='store_true',
                        default=False)
    parser.add_argument('--trace',
                        help='Trace log',
                        action='store_true',
                        default=False)
    parser.add_argument('--quiet',
                        help='Silent log',
                        action='store_true',
                        default=False)

    parser.add_argument('--list',
                        help='list apps by type or all',
                        nargs='*',
                        type=str)

    parser.add_argument('--list-installed-versions',
                        help='Get installed version of each app',
                        type=str)

    parser.add_argument('--start',
                        help='start all apps, all by type or by name',
                        nargs='*',
                        type=str)
    parser.add_argument('--stop',
                        help='stop all apps, all by type or by name',
                        nargs='*',
                        type=str)
    parser.add_argument('--restart',
                        help='restart all apps, all by type or by name',
                        nargs='*',
                        type=str)

    parser.add_argument('--deploy',
                        help='deploy all apps, all by type or by name',
                        nargs='*',
                        type=str)
    parser.add_argument('--deploy-pre',
                        help='pre deploy all apps, all by type or by name',
                        nargs='*',
                        type=str)
    parser.add_argument('--deploy-post',
                        help='post deploy all apps, all by type or by name',
                        nargs='*',
                        type=str)

    parser.add_argument('--download',
                        help='download all apps, all by type or by name',
                        nargs='*',
                        type=str)

    parser.add_argument('--configure',
                        help='configure all apps, all by type or by name',
                        nargs='*',
                        type=str)

    parser.add_argument('--backup',
                        help='Perform a backup of install path',
                        action='store_true',
                        default=False)
    parser.add_argument('--backup-path',
                        help='path where store backup of old deployment',
                        type=str,
                        default='{0}/opt/universal-deployer-backups'.format(
                            'C:' if _is_windows else ''))

    parser.add_argument('--config-file',
                        help=('Contains apps to install with its version '
                              'number and parameters'),
                        type=str)
    parser.add_argument('--config-path',
                        help=('Folder that contains the configuration of apps'
                              ' to install. Default /etc/universal-deployer'),
                        type=str,
                        default='{0}/etc/universal-deployer'.format(
                            'C:' if _is_windows else ''))
    parser.add_argument('--install-path',
                        help=('Folder where will be installed all apps. '
                              'Default /opt/universal-deployer'),
                        type=str,
                        default='{0}/opt/universal-deployer'.format(
                            'C:' if _is_windows else ''))

    args = parser.parse_args()

    if args.config_file is None:
        print('You need specify a valid config file.')
        sys.exit(-1)

    config_logger(args.log_file, args.debug, args.trace, args.quiet)

    logger.info(
        'Starting {project_name}'.format(project_name='universal_deployer'))

    deployer = Deployer(
        args.config_file,
        args.config_path if args.config_path is not None else None,
        args.install_path if args.install_path is not None else None)

    if args.backup:
        deployer.backup(args.backup_path)

    if args.list:
        deployer.execute('str', args.list)
    if args.list_installed_versions:
        deployer.execute('get_installed_versions', args.configure)
    if args.start:
        deployer.execute('start', args.start)
    if args.stop:
        deployer.execute('stop', args.stop)
    if args.restart:
        deployer.execute('restart', args.restart)
    if args.deploy:
        deployer.execute('deploy', args.deploy)
    if args.deploy_pre:
        deployer.execute('deploy_pre', args.deploy_pre)
    if args.deploy_post:
        deployer.execute('deploy_post', args.deploy_post)
    if args.download:
        deployer.execute('download', args.download)
    if args.configure:
        deployer.execute('configure', args.configure)

    logger.info('{project_name} finished successfully!'.format(
        project_name='universal_deployer'))
Beispiel #10
0
import logging

import conf_filesystem.write_conf as cf
import database.repository as r
import entity.conf as c
import logger
import requests as req

logger.config_logger()
LOG = logging.getLogger(__name__)

conf = c.EnvoyConf()
conf.load_from_file()

redis = r.RedisRepository()
redis.flushall()
redis.save_conf(conf)
redis.setup_lds_uuid_db(conf)
redis.setup_eds_uuid_db(conf)


def server():
    while True:
        try:
            request: req.REQUEST_TYPE = redis.get_queue()
        except IndexError:
            continue

        mode: str = request[req.MODE_KEY]

        new_conf: c.EnvoyConf = conf.copy_conf()
Beispiel #11
0
                        type=str,
                        default='cuda:3')
    parser.add_argument("--optim",
                        help='optimization algorithm',
                        type=str,
                        default='sgd')
    parser.add_argument("--learning_rate",
                        help='learning rate',
                        type=float,
                        default=1e-4)
    parser.add_argument("--model_prefix",
                        help='trained model prefix name',
                        type=str,
                        default='wiki_org_gee_')
    args = parser.parse_args()
    config = Config()
    config.use_elmo = args.use_elmo
    config.use_glove = args.use_glove
    config.is_test = args.is_test
    config.batch_size = args.batch_size
    config.pretrained_parameters = args.pretrained_parameters
    config.device = args.device
    config.optim = args.optim
    config.learning_rate = args.learning_rate
    config.use_extra = args.use_extra
    config.model_prefix = args.model_prefix
    config.print()
    logger = config_logger("main")

    trainer = Trainer(logger, config)
    trainer.train()
Beispiel #12
0
def create_app():
    # app = Flask(__name__)
    app = Flask('hitorimeshi')
    config_logger(app)
    return app
Beispiel #13
0
base_path = os.path.dirname(__file__)
sys.path.append(os.path.abspath(os.path.join(base_path, '..')))

from sqlalchemy import func
from sqlalchemy.sql.expression import func as ffunc
from sqlalchemy.engine import create_engine
from sqlalchemy.sql import select

from database_definition import *
from logger import config_logger
from database_output_formatting import *
from nn.unknown_input import get_unknown_hanlder
import numpy as np

LOG = config_logger(__name__)

connection = None


def db_init(db_string):
    """
    Initialises the database to the given string.
    Should be an SQLite database, but others will work too.
    """
    engine = create_engine(db_string)

    global connection
    connection = engine.connect()

    return connection