Esempio n. 1
0
import sys
import os
import shutil

from libs import qconfig
from libs.metautils import remove_from_quast_py_args, Assembly, correct_meta_references, correct_assemblies, \
    get_downloaded_refs_with_alignments, partition_contigs
from libs.options_parser import parse_options

qconfig.check_python_version()
from libs import contigs_analyzer, reads_analyzer, search_references_meta
from libs import qutils
from libs.qutils import cleanup

from libs.log import get_logger
logger = get_logger(qconfig.LOGGER_META_NAME)
logger.set_up_console_handler()

from site import addsitedir
addsitedir(os.path.join(qconfig.LIBS_LOCATION, 'site_packages'))


def _start_quast_main(args,
                      assemblies,
                      reference_fpath=None,
                      output_dirpath=None,
                      num_notifications_tuple=None,
                      is_first_run=None):
    args = args[:]

    args.extend([asm.fpath for asm in assemblies])
Esempio n. 2
0
# -*- coding: utf-8 -*-

""" 返回 Nginx 配置文件的下载地址.

"""


import os

from libs import log, utils
from web.const import NGINX_CONF_DIR, NGINX_TEMPLATE_DIR, NGINX_SERVER_TEMPLATE_FILE
from nginx.libs import template, upstream


logger = log.get_logger("Nginx DOMAINS ")


def _shell(cmd, _logger=logger):
    """ 执行命令, 记录日志.

    """
    rc, so, se = utils.shell(cmd)
    if rc == 0:
        message = "cmd:%s" % cmd
        _logger.info(message)
    else:
        message = "cmd:%s, error:%s" % (cmd, se)
        raise Exception(message)


def add(product, _type, idc, name, server_names, log_name, log_format, upstream_node):
Esempio n. 3
0
import hmac
import hashlib
import base64
import random
import logging
import traceback
import urllib
import logging

import json
import requests

from libs.log import get_logger
from libs.config import init_sng_voice_config

logger = get_logger('sng')


class SNGVoiceService(object):
    def __init__(self):
        self.__base = None
        self.__request_ids = {}

    def __del__(self):
        self.__request_ids = {}
        self.__request_ids = None

    def initialize(self):
        try:
            self.__conf = init_sng_voice_config()
        except:
Esempio n. 4
0
import os
import re
import functools

from libs import log, utils, loki, redisoj, storage
from web.const import (NGINX_CONF_DIR, NGINX_TMP_STORAGE_DIR,
                       NGINX_TEMPLATE_DIR, NGINX_UPSTREAM_TEMPLATE_FILE,
                       NGINX_SSL_ORIGIN_DIR, NGINX_SSL_DEST_DIR,
                       REDIS_DB_NGINX)
from nginx.libs import global_id, template, upstream

_redis_oj = redisoj.PooledConnection(REDIS_DB_NGINX)
client = _redis_oj.get()

logger = log.get_logger("Nginx CONF ")


def _shell(cmd, _logger=logger):
    """ 执行命令, 记录日志.

    """
    rc, so, se = utils.shell(cmd)
    if rc == 0:
        message = "cmd:%s" % cmd
        _logger.info(message)
    else:
        message = "cmd:%s, error:%s" % (cmd, se)
        raise Exception(message)

Esempio n. 5
0
#-*- coding: utf-8 -*-
""" 增加和删除 lb.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs, lips
from web.const import REDIS_DB_LVS

logger = log.get_logger("LVS LB")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, new_lbinfos):
    """ 增加 lb.

    """
    # 检查集群是否存在.
    checkdict = {
        "name": name,
    }
    if not funcs.check(checkdict, check_exist=True):
        logger.error("No cluster:%s" % name)
        return False

    # 获取新增 lb.
    new_lbs = [i["hostname"] for i in new_lbinfos]
    if len(new_lbs) != len(set(new_lbs)):
Esempio n. 6
0
# Copyright (c) 2015 Saint Petersburg State University
# Copyright (c) 2011-2015 Saint Petersburg Academic University
# All Rights Reserved
# See file LICENSE for details.
############################################################################

from __future__ import with_statement
import os
import shutil
import re
from libs import qconfig, qutils
from libs.html_saver import json_saver

from libs.log import get_logger

log = get_logger(qconfig.LOGGER_DEFAULT_NAME)


def get_real_path(relpath_in_html_saver):
    return os.path.join(qconfig.LIBS_LOCATION, "html_saver", relpath_in_html_saver)


html_colors = [
    "#FF0000",  # red
    "#0000FF",  # blue
    "#008000",  # green
    "#A22DCC",  # fushua
    "#FFA500",  # orange
    "#800000",  # maroon
    "#00CCCC",  # aqua
    "#B2DF8A",  # light green
Esempio n. 7
0
def create_meta_report(results_dirpath, json_texts):
    html_fpath = os.path.join(results_dirpath, report_fname)
    if not os.path.isfile(html_fpath):
        init(html_fpath, is_meta=True)

    from libs import search_references_meta
    taxons_for_krona = search_references_meta.taxons_for_krona
    meta_log = get_logger(qconfig.LOGGER_META_NAME)
    if taxons_for_krona:
        meta_log.info('  Drawing interactive Krona plots...')
        krona_dirpath = os.path.join(qconfig.LIBS_LOCATION, 'kronatools')
        krona_res_dirpath = os.path.join(results_dirpath,
                                         qconfig.krona_dirname)
        simplejson_error = False
        try:
            import json
        except ImportError:
            try:
                import simplejson as json
            except ImportError:
                log.warning(
                    'Can\'t draw Krona charts - please install python-simplejson'
                )
                simplejson_error = True
        if not simplejson_error:
            if not os.path.isdir(krona_res_dirpath):
                os.mkdir(krona_res_dirpath)
            json_data = json.loads(json_texts[0])
            assemblies = json_data['assembliesNames']
            krona_txt_ext = '_taxonomy.txt'
            krona_common_fpath = os.path.join(krona_res_dirpath,
                                              'overall' + krona_txt_ext)
            krona_common_file = open(krona_common_fpath, 'w')
            for index, name in enumerate(assemblies):
                krona_file = open(
                    os.path.join(krona_res_dirpath, name + krona_txt_ext), 'w')
                krona_file.close()
            for json_text in json_texts[1:]:
                json_data = json.loads(json_text)
                ref = json_data['referenceName']
                report = json_data['report'][0]
                for metric in report[1]:
                    if metric['metricName'] == 'Total length':
                        lengths = metric['values']
                        break
                cur_assemblies = json_data['assembliesNames']
                for index, name in enumerate(cur_assemblies):
                    krona_fpath = os.path.join(krona_res_dirpath,
                                               name + krona_txt_ext)
                    with open(krona_fpath, 'a') as f_krona:
                        if ref in taxons_for_krona:
                            f_krona.write(
                                str(lengths[index]) + '\t' +
                                taxons_for_krona[ref] + '\n')
                        else:
                            f_krona.write(str(lengths[index]) + '\n')
                if ref in taxons_for_krona:
                    krona_common_file.write(
                        str(sum(lengths)) + '\t' + taxons_for_krona[ref] +
                        '\n')
                else:
                    krona_common_file.write(str(sum(lengths)) + '\n')
            krona_common_file.close()
            krona_fpaths = []
            for index, name in enumerate(assemblies):
                krona_fpath = os.path.join(krona_res_dirpath,
                                           name + '_taxonomy_chart.html')
                krona_txt_fpath = os.path.join(krona_res_dirpath,
                                               name + krona_txt_ext)
                qutils.call_subprocess([
                    'perl', '-I', krona_dirpath + '/lib',
                    krona_dirpath + '/scripts/ImportText.pl', krona_txt_fpath,
                    '-o', krona_fpath, '-a'
                ],
                                       stdout=open(os.devnull, 'w'),
                                       stderr=open(os.devnull, 'w'))
                krona_fpaths.append(
                    os.path.join(qconfig.krona_dirname,
                                 name + '_taxonomy_chart.html'))
                meta_log.main_info('  Krona chart for ' + name +
                                   ' is saved to ' + krona_fpath)
                os.remove(krona_txt_fpath)
            if len(assemblies) > 1:
                name = 'summary'
                krona_fpath = os.path.join(krona_res_dirpath,
                                           name + '_taxonomy_chart.html')
                qutils.call_subprocess([
                    'perl', '-I', krona_dirpath + '/lib',
                    krona_dirpath + '/scripts/ImportText.pl',
                    krona_common_fpath, '-o', krona_fpath, '-a'
                ],
                                       stdout=open(os.devnull, 'w'),
                                       stderr=open(os.devnull, 'w'))
                meta_log.main_info('  Summary Krona chart is saved to ' +
                                   krona_fpath)
                krona_fpaths.append(
                    os.path.join(qconfig.krona_dirname, name +
                                 '_taxonomy_chart.html'))  # extra fpath!
            os.remove(krona_common_fpath)
            save_krona_paths(results_dirpath, krona_fpaths, assemblies)

    # reading html template file
    with open(html_fpath) as f_html:
        html_text = f_html.read()
    keyword = 'totalReport'
    html_text = re.sub('{{ ' + keyword + ' }}',
                       '[' + ','.join(json_texts) + ']', html_text)
    html_text = re.sub(r'{{(\s+\S+\s+)}}', '{}', html_text)
    with open(html_fpath, 'w') as f_html:
        f_html.write(html_text)
    meta_log.main_info(
        '  Extended version of HTML-report (for all references and assemblies) is saved to '
        + html_fpath)
Esempio n. 8
0
#-*- coding: utf-8 -*-
""" 获取, 增加和删除 cluster.

"""

import time

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS

logger = log.get_logger("LVS CLUSTER")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, _type, lbinfos, vip2ws, vipnets, device):
    """ 增加一个 cluster.

    """
    # 拿到 lb 信息.
    lbs = [i["hostname"] for i in lbinfos]
    if len(lbs) != len(set(lbs)):
        logger.error("Lb duplicates")
        return False

    # 拿到 vip 信息.
    vips = [i["vip"] for i in vip2ws]
    if len(vips) != len(set(vips)):
        logger.error("Vip duplicates")
        return False
Esempio n. 9
0
#-*- coding: utf-8 -*-
""" 增加和删除 vip.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS

logger = log.get_logger("LVS VIP")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, new_vip2ws):
    """ 增加 VIP.

    """
    # 检查集群是否存在.
    checkdict = {
        "name": name,
    }
    if not funcs.check(checkdict, check_exist=True):
        logger.error("No cluster:%s" % name)
        return False

    # 获取新增 vip.
    new_vips = [i["vip"] for i in new_vip2ws]
    if len(new_vips) != len(set(new_vips)):
Esempio n. 10
0
#-*- coding: utf-8 -*-

""" 增加和删除 vip.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs 
from web.const import REDIS_DB_LVS


logger = log.get_logger("LVS VIP")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, new_vip2ws):
    """ 增加 VIP.

    """
    # 检查集群是否存在.
    checkdict = {
        "name": name,
    }
    if not funcs.check(checkdict, check_exist=True):
        logger.error("No cluster:%s" % name)
        return False

    # 获取新增 vip.
Esempio n. 11
0
# -*- coding: utf-8 -*-

""" 修改 vip 的 wstype.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS


logger = log.get_logger("LVS WSTYPE")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def modify(name, vip, wstype):
    """ 修改一个集群的一个 vip 的 wstype.

    """
    # 检查集群是否存在.
    check_dict = {"name": name}
    if not funcs.check(check_dict, check_exist=True):
        logger.error("No cluster:%s" % name)
        return False

    # 获取集群信息.
    key = "cluster:%s" % name
    old_vip2ws = eval(client.hget(key, "vip2ws"))
Esempio n. 12
0
""" 生成 LVS 配置文件并传输到 LVS 机器.

"""

import os
import time

from jinja2 import Environment, FileSystemLoader

from libs import log, utils
from lvs.libs import lips
from web.const import (LVS_TEMPLATE_DIR, LVS_CFG_TMP_DIR, 
                       LB_CFG_BAK_DIR)


logger = log.get_logger("LVS CONF")

if not os.path.exists(LVS_CFG_TMP_DIR):
    os.mkdir(LVS_CFG_TMP_DIR)


def _backup(hosts, remote_dir, base_dir=LB_CFG_BAK_DIR):
    """ 备份远程主机的目录. 

    """
    # 自定义
    now = time.strftime("%Y%m%d%H%I%M%S")
    bak_dir = "%s/%s_%s" % (base_dir, \
        remote_dir.replace("/","-"), now)

    for host in hosts:
Esempio n. 13
0
reference_ls = 'dashed' # ls = line style

# axis params:
logarithmic_x_scale = False  # for cumulative plots only

####################################################################################
########################  END OF CONFIGURABLE PARAMETERS  ##########################
####################################################################################

import os
import itertools
from libs import fastaparser, qutils
from libs import qconfig

from libs.log import get_logger
logger = get_logger(qconfig.LOGGER_DEFAULT_NAME)
meta_logger = get_logger(qconfig.LOGGER_META_NAME)

import reporting

# Supported plot formats: .emf, .eps, .pdf, .png, .ps, .raw, .rgba, .svg, .svgz
plots_file_ext = '.' + qconfig.plot_extension

# checking if matplotlib is installed
matplotlib_error = False
try:
    import matplotlib
    matplotlib.use('Agg')  # non-GUI backend
    if matplotlib.__version__.startswith('0') or matplotlib.__version__.startswith('1.0'):
        logger.warning('Can\'t draw plots: matplotlib version is old! Please use matplotlib version 1.1 or higher.')
        matplotlib_error = True
Esempio n. 14
0
#-*- coding: utf-8 -*-
""" 增加和删除 vip 的 port.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS

logger = log.get_logger("LVS PORT")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, vip, ports):
    """ 对一个集群的一个 vip 增加若干个 port.

    ports 是一个 list, 格式如下:
    [
        {
            dport: xxx,
            synproxy: 1,
            sport: yyy,
            persistence_timeout: 50
        },
        {
            dport: aaa,
            synproxy: 0,
            sport: bbb,
Esempio n. 15
0
# See file LICENSE for details.
############################################################################

from __future__ import with_statement

RELEASE_MODE = True

import getopt
import os
import shutil
import sys
from libs import qconfig, qutils, fastaparser
from libs.qutils import assert_file_exists

from libs.log import get_logger
logger = get_logger('metaquast')
logger.set_up_console_handler(debug=not RELEASE_MODE)

import quast

from site import addsitedir
addsitedir(os.path.join(qconfig.LIBS_LOCATION, 'site_packages'))

COMBINED_REF_FNAME = 'combined_reference.fasta'


class Assembly:
    def __init__(self, fpath, label):
        self.fpath = fpath
        self.label = label
        self.name = os.path.splitext(os.path.basename(self.fpath))[0]
Esempio n. 16
0
#-*- coding: utf-8 -*-
""" 返回 Nginx 配置文件的下载地址.

"""

import os

from libs import log, utils
from web.const import (NGINX_CONF_DIR, NGINX_TEMPLATE_DIR,
                       NGINX_SERVER_TEMPLATE_FILE)
from nginx.libs import template, upstream

logger = log.get_logger("Nginx DOMAINS ")


def _shell(cmd, _logger=logger):
    """ 执行命令, 记录日志.

    """
    rc, so, se = utils.shell(cmd)
    if rc == 0:
        message = "cmd:%s" % cmd
        _logger.info(message)
    else:
        message = "cmd:%s, error:%s" % (cmd, se)
        raise Exception(message)


def add(product, _type, idc, name, server_names, log_name, log_format,
        upstream_node):
    """ 增加域名的配置.
Esempio n. 17
0
############################################################################
# Copyright (c) 2011-2014 Saint-Petersburg Academic University
# All Rights Reserved
# See file LICENSE for details.
############################################################################

import datetime
import os
from libs import qutils, qconfig

from libs.log import get_logger
log = get_logger('quast')

simplejson_error = False
try:
    import json
except:
    try:
        import simplejson as json
    except:
        log.warning('Can\'t build html report - please install python-simplejson')
        simplejson_error = True

total_report_fname    = '/report.json'
contigs_lengths_fn    = '/contigs_lengths.json'
ref_length_fn         = '/ref_length.json'
aligned_contigs_fn    = '/aligned_contigs_lengths.json'
assemblies_lengths_fn = '/assemblies_lengths.json'
in_contigs_suffix_fn  = '_in_contigs.json'
gc_fn                 = '/gc.json'
Esempio n. 18
0
import logging
import traceback
import time
import urllib
import base64

# 第三方模块
import zerorpc

# 业务自定义模块
from libs.log import get_logger
from service.sng_voice_service import SNGVoiceService
from tools.parser_parameter import ParserParameter

# 日志实例
logger = get_logger('enter')
##################################################################################

##################################################################################
# 语音服务类
class VoiceService(object):
    __sng_voice_service = None
    def __init__(self):
        self.__sng_voice_service = None

    def __del__(self):
        self.__sng_voice_service = None

    def hello(self, name):
        return 'Hello, {0}'.format(name)
Esempio n. 19
0
# -*- coding: utf-8 -*-


import time
import re

import pexpect

from web.const import PXELINUX_CFGS
from libs import log, utils


logger = log.get_logger("pm ilo oper")


class generate(object):

    def __init__(self, idc, ip, passwd):
        self.idc = idc
        self.ip = ip
        self.passwd = passwd

    def ssh_cmd(self, cmd):
        ssh = pexpect.spawn(
            '''ssh -t -p 2222 -o StrictHostKeyChecking=no '''\
            '''-t %s-relay.nosa.me " ssh -o StrictHostKeyChecking=no '''\
            '''-o ConnectTimeout=600 root@%s '%s' " ''' % (
                self.idc, self.ip, cmd), timeout=600)
        ssh.expect([pexpect.TIMEOUT, 'password: '])
        ssh.sendline(self.passwd)
        time.sleep(1)
Esempio n. 20
0
#-*- coding: utf-8 -*-
""" 修改 vip 的 wstype.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS

logger = log.get_logger("LVS WSTYPE")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def modify(name, vip, wstype):
    """ 修改一个集群的一个 vip 的 wstype.

    """
    # 检查集群是否存在.
    check_dict = {
        "name": name,
    }
    if not funcs.check(check_dict, check_exist=True):
        logger.error("No cluster:%s" % name)
        return False

    # 获取集群信息.
    key = "cluster:%s" % name
    old_vip2ws = eval(client.hget(key, "vip2ws"))
Esempio n. 21
0
import sys
import os
import shutil
import getopt

quast_dirpath = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(os.path.join(quast_dirpath, 'libs'))
from libs import qconfig
qconfig.check_python_version()

from libs import qutils, fastaparser
from libs.qutils import assert_file_exists

from libs.log import get_logger
logger = get_logger('metaquast')
logger.set_up_console_handler()

from site import addsitedir
addsitedir(os.path.join(quast_dirpath, 'libs', 'site_packages'))

import quast

COMBINED_REF_FNAME = 'combined_reference.fasta'


class Assembly:
    def __init__(self, fpath, label):
        self.fpath = fpath
        self.label = label
        self.name = os.path.splitext(os.path.basename(self.fpath))[0]
Esempio n. 22
0
#-*- coding: utf-8 -*-

""" 获取, 增加和删除 cluster.

"""


import time

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS


logger = log.get_logger("LVS CLUSTER")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, _type, lbinfos, vip2ws, vipnets, device):
    """ 增加一个 cluster.

    """
    # 拿到 lb 信息.
    lbs = [i["hostname"] for i in lbinfos]
    if len(lbs) != len(set(lbs)):
        logger.error("Lb duplicates")
        return False

    # 拿到 vip 信息.
    vips = [i["vip"] for i in vip2ws]
Esempio n. 23
0
import datetime
import os
import random
import time

from bottle import Bottle, request, run, response, static_file, abort

from libs.config import conf_global
from libs.log import get_logger
from libs.utils import get_host_ip

__author__ = '*****@*****.**'

current_dir = conf_global.get('app', 'current_dir',
                              fallback='storage_0')  # 获取当前存放文件的目录别名
logger = get_logger(__name__)
app = Bottle()


@app.route('/upload/<file_type>', method='POST')
def do_upload(file_type):
    """上传文件接收并保存"""
    if 'multipart/form-data' not in request.headers.get('Content-Type'):
        return {'code': 2, 'msg': 'Content-Type must be multipart/form-data!'}
    if not request.files:
        return {'code': 1, 'msg': 'Parameter file must not be empty!'}
    if file_type not in ('permanent', 'temporary'):
        return {'code': 1, 'msg': 'Parameter file_type must not be empty!'}

    # 处理文件上传路径
    now = datetime.datetime.now()
Esempio n. 24
0
# -*- coding: utf-8 -*-

""" 增加和删除 ws.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS


logger = log.get_logger("LVS WS")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, new_vip2ws):
    """ 增加 Ws.

    由于 ws 属于 vip,  所以需要传入 vip, 而且 vip 都要存在.

    new_vip2ws 只需要含有 vip 和 wss 两个 key 即可.

    """
    # 获取 vip.
    share_vips = [i["vip"] for i in new_vip2ws]
    if len(share_vips) != len(set(share_vips)):
        logger.error("Vip duplicates")
        return False
Esempio n. 25
0
#-*- coding: utf-8 -*-

""" 获取, 修改和删除 Nginx upstream 信息.

"""

import numbers

from libs import log

from web import const
from nginx.libs import mysqloj


logger = log.get_logger("Nginx UPSTREAM ")


def get():
    """ 获取 upstream 信息.

    """
    _mysql_oj = mysqloj.PooledConnection()
    sql = "select name, loki_id, port, ip_hash, online "\
          "from %s;" % const.MYSQL_TABLE
    ret = _mysql_oj.select(sql) 

    data = list()
    for i in ret:
        _dict = {
            "name": i[0],
            "loki_id": i[1],
Esempio n. 26
0
1). 重启, 按 F12 进入 PXE 模式;
2). 如果遇到网卡选择界面, 选择第二块网卡(假设第二块网卡是内网);
3). 遇到 boot 界面, 直接回车.

"""

import sys
import os
import traceback

from libs import html, log, mail, redisoj
from pm.libs import create_man
from web.const import REDIS_DB_PM


logger = log.get_logger("pm manual create")
client = redisoj.generate(REDIS_DB_PM)


def main():
    while 1:
        try:
            m = client.brpop("queue:create_man")
            m = eval(m[1])
            install_lists = m["install_lists"]
            task_id = m["task_id"]            
            email = m["email"]
    
            logger.info(
                "install_lists-%s,email-%s" % (install_lists, email))
    
Esempio n. 27
0
#-*- coding: utf-8 -*-

""" 增加和删除 lb.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs, lips
from web.const import REDIS_DB_LVS


logger = log.get_logger("LVS LB")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, new_lbinfos):
    """ 增加 lb.

    """
    # 检查集群是否存在.
    checkdict = {
        "name": name,
    }
    if not funcs.check(checkdict, check_exist=True):
        logger.error("No cluster:%s" % name)
        return False

    # 获取新增 lb.
Esempio n. 28
0
############################################################################
# Copyright (c) 2011-2014 Saint-Petersburg Academic University
# All Rights Reserved
# See file LICENSE for details.
############################################################################

from __future__ import with_statement
import os
import shutil
import re
from libs import qconfig
from libs.html_saver import json_saver

from libs.log import get_logger
log = get_logger('quast')


def get_real_path(relpath_in_html_saver):
    return os.path.join(qconfig.LIBS_LOCATION, 'html_saver',
                        relpath_in_html_saver)


scripts_inserted = False

report_fname = qconfig.report_prefix + ".html"

template_fpath = get_real_path('template.html')

static_dirname = 'static'
static_dirpath = get_real_path(static_dirname)
Esempio n. 29
0
from libs import log, utils, loki, redisoj, storage
from web.const import (NGINX_CONF_DIR, 
                       NGINX_TMP_STORAGE_DIR, 
                       NGINX_TEMPLATE_DIR, 
                       NGINX_UPSTREAM_TEMPLATE_FILE, 
                       NGINX_SSL_ORIGIN_DIR, 
                       NGINX_SSL_DEST_DIR, 
                       REDIS_DB_NGINX)
from nginx.libs import global_id, template, upstream


_redis_oj = redisoj.PooledConnection(REDIS_DB_NGINX)
client = _redis_oj.get() 

logger = log.get_logger("Nginx CONF ")


def _shell(cmd, _logger=logger):
    """ 执行命令, 记录日志.

    """
    rc , so, se = utils.shell(cmd)
    if rc == 0:
        message = "cmd:%s" % cmd
        _logger.info(message)
    else:
        message = "cmd:%s, error:%s" % (
            cmd, se)
        raise Exception(message)
Esempio n. 30
0
import random
import sys
import os
import traceback
from multiprocessing.dummy import Pool as ThreadPool

import requests
import ujson as json

from web.const import MAX_THREAD_NUM
from web.const import REDIS_DB_PM, REDIS_DB_COMMON
from libs import redisoj, log, utils, server, dnsapi
from pm.libs import ilo_info, ilo_oper


logger = log.get_logger("pm auto create")
client = redisoj.generate(REDIS_DB_PM)

client_user_data = redisoj.generate(REDIS_DB_COMMON)


def multi(install_lists, task_id):
    client.hset(task_id, "install_lists", install_lists)

    pool = ThreadPool(MAX_THREAD_NUM)
    install_results = pool.map(single, install_lists)
    pool.close()
    pool.join()

    client.hset(task_id, "install_results", install_results)
    return install_results
Esempio n. 31
0
#-*- coding: utf-8 -*-

""" 增加和删除 vip 的 port.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs
from web.const import REDIS_DB_LVS


logger = log.get_logger("LVS PORT")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, vip, ports):
    """ 对一个集群的一个 vip 增加若干个 port.

    ports 是一个 list, 格式如下:
    [
        {
            dport: xxx,
            synproxy: 1,
            sport: yyy,
            persistence_timeout: 50
        },
        {
            dport: aaa,
Esempio n. 32
0
# -*- coding: utf-8 -*-

""" 检查队列.

"""

import sys
import os
import traceback

from libs import html, log, mail, redisoj
from pm.libs import check
from web.const import REDIS_DB_PM


logger = log.get_logger("pm check")
client = redisoj.generate(REDIS_DB_PM)


def main():
    while 1:
        try:
            m = client.brpop("queue:check")
            m = eval(m[1])
            check_lists = m["check_lists"]
            task_id = m["task_id"]
            email = m["email"]
    
            logger.info(
                "check_lists-%s,email-%s" % (check_lists, email))
    
Esempio n. 33
0
import base64
import requests
from multiprocessing.dummy import Pool as ThreadPool

import boto.ec2
from boto.ec2.blockdevicemapping import BlockDeviceMapping, \
    BlockDeviceType
from boto.ec2.networkinterface import NetworkInterfaceSpecification, \
    NetworkInterfaceCollection

from libs import html, log, mail, asset_hostname
from web.const import EXTRA_INFO, AMI_SNAP_INFO, DNS_INFO
from web.const import POST_INSTALL_SCRIPT


logger = log.get_logger("aws ")


def vm(install_list):
    region = install_list["region"]
    subnet_id = install_list["subnet_id"]
    ami_id = install_list["ami_id"]
    snap_id = install_list["snap_id"]
    volume_capacity = install_list["volume_capacity"]
    key_name = install_list["key_name"]
    instance_type = install_list["instance_type"]
    sg_id = install_list["sg_id"]
    user_data = install_list["user_data"]
    usage = install_list["usage"]

    block_device_map = BlockDeviceMapping()
Esempio n. 34
0
############################################################################
# Copyright (c) 2015 Saint Petersburg State University
# Copyright (c) 2011-2015 Saint Petersburg Academic University
# All Rights Reserved
# See file LICENSE for details.
############################################################################



import os
import shutil
import qconfig
from libs.log import get_logger
import reporting
logger = get_logger(qconfig.LOGGER_META_NAME)


def get_results_for_metric(ref_names, metric, contigs_num, labels, output_dirpath, report_fname):

    all_rows = []
    cur_ref_names = []
    row = {'metricName': 'References', 'values': cur_ref_names}
    all_rows.append(row)
    results = []
    for i in range(contigs_num):
        row = {'metricName': labels[i], 'values': []}
        all_rows.append(row)
    for i, ref_name in enumerate(ref_names):
        results_fpath = os.path.join(output_dirpath, ref_name, report_fname)
        if not os.path.exists(results_fpath):
            all_rows[0]['values'] = cur_ref_names
Esempio n. 35
0
############################################################################
# Copyright (c) 2015 Saint Petersburg State University
# Copyright (c) 2011-2015 Saint Petersburg Academic University
# All Rights Reserved
# See file LICENSE for details.
############################################################################

import logging
import os
import fastaparser
import genes_parser
from libs import reporting, qconfig, qutils
from libs.html_saver import json_saver

from libs.log import get_logger
logger = get_logger(qconfig.LOGGER_DEFAULT_NAME)
ref_lengths_by_contigs = {}


# reading genes and operons
class FeatureContainer:
    def __init__(self, fpaths, kind=''):
        self.kind = kind  # 'gene' or 'operon'
        self.fpaths = fpaths
        self.region_list = []
        self.chr_names_dict = {}


def get_ref_aligned_lengths():
    return ref_lengths_by_contigs
Esempio n. 36
0
""" 一些功能集合.

"""


import os
import time
from multiprocessing.dummy import Pool as ThreadPool

from web.const import (LOCAL_SSH_KNOWN_HOSTS, LVS_FULLNAT_CMD, 
                       REDIS_DB_LVS)
from libs import dnsapi, log, mail, redisoj, utils
from lvs.libs import conf, info


logger = log.get_logger("LVS FUNCS")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def check(_dict, check_exist=True):
    """ 检查 name, vip 或 lb 是否已经存在.

    _dict 格式如下:
        {
            "name": name,
            "vips": vips,
            "lbs": lbs
        }

    当 check_exist 为 True 时, _dict 中的信息即使有一个不存在, 返回 False;
Esempio n. 37
0
def create_meta_report(results_dirpath, json_texts):
    html_fpath = os.path.join(results_dirpath, report_fname)
    if not os.path.isfile(html_fpath):
        init(html_fpath, is_meta=True)

    from libs import search_references_meta

    taxons_for_krona = search_references_meta.taxons_for_krona
    meta_log = get_logger(qconfig.LOGGER_META_NAME)
    if taxons_for_krona:
        meta_log.info("  Drawing interactive Krona plots...")
        krona_dirpath = os.path.join(qconfig.LIBS_LOCATION, "kronatools")
        krona_res_dirpath = os.path.join(results_dirpath, qconfig.krona_dirname)
        simplejson_error = False
        try:
            import json
        except ImportError:
            try:
                import simplejson as json
            except ImportError:
                log.warning("Can't draw Krona charts - please install python-simplejson")
                simplejson_error = True
        if not simplejson_error:
            if not os.path.isdir(krona_res_dirpath):
                os.mkdir(krona_res_dirpath)
            json_data = json.loads(json_texts[0])
            assemblies = json_data["assembliesNames"]
            krona_txt_ext = "_taxonomy.txt"
            krona_common_fpath = os.path.join(krona_res_dirpath, "overall" + krona_txt_ext)
            krona_common_file = open(krona_common_fpath, "w")
            for index, name in enumerate(assemblies):
                krona_file = open(os.path.join(krona_res_dirpath, name + krona_txt_ext), "w")
                krona_file.close()
            for json_text in json_texts[1:]:
                json_data = json.loads(json_text)
                ref = json_data["referenceName"]
                report = json_data["report"][0]
                for metric in report[1]:
                    if metric["metricName"] == "Total length":
                        lengths = metric["values"]
                        break
                cur_assemblies = json_data["assembliesNames"]
                for index, name in enumerate(cur_assemblies):
                    krona_fpath = os.path.join(krona_res_dirpath, name + krona_txt_ext)
                    with open(krona_fpath, "a") as f_krona:
                        if ref in taxons_for_krona:
                            f_krona.write(str(lengths[index]) + "\t" + taxons_for_krona[ref] + "\n")
                        else:
                            f_krona.write(str(lengths[index]) + "\n")
                if ref in taxons_for_krona:
                    krona_common_file.write(str(sum(lengths)) + "\t" + taxons_for_krona[ref] + "\n")
                else:
                    krona_common_file.write(str(sum(lengths)) + "\n")
            krona_common_file.close()
            krona_fpaths = []
            for index, name in enumerate(assemblies):
                krona_fpath = os.path.join(krona_res_dirpath, name + "_taxonomy_chart.html")
                krona_txt_fpath = os.path.join(krona_res_dirpath, name + krona_txt_ext)
                qutils.call_subprocess(
                    [
                        "perl",
                        "-I",
                        krona_dirpath + "/lib",
                        krona_dirpath + "/scripts/ImportText.pl",
                        krona_txt_fpath,
                        "-o",
                        krona_fpath,
                        "-a",
                    ],
                    stdout=open(os.devnull, "w"),
                    stderr=open(os.devnull, "w"),
                )
                krona_fpaths.append(os.path.join(qconfig.krona_dirname, name + "_taxonomy_chart.html"))
                meta_log.main_info("  Krona chart for " + name + " is saved to " + krona_fpath)
                os.remove(krona_txt_fpath)
            if len(assemblies) > 1:
                name = "summary"
                krona_fpath = os.path.join(krona_res_dirpath, name + "_taxonomy_chart.html")
                qutils.call_subprocess(
                    [
                        "perl",
                        "-I",
                        krona_dirpath + "/lib",
                        krona_dirpath + "/scripts/ImportText.pl",
                        krona_common_fpath,
                        "-o",
                        krona_fpath,
                        "-a",
                    ],
                    stdout=open(os.devnull, "w"),
                    stderr=open(os.devnull, "w"),
                )
                meta_log.main_info("  Summary Krona chart is saved to " + krona_fpath)
                krona_fpaths.append(os.path.join(qconfig.krona_dirname, name + "_taxonomy_chart.html"))  # extra fpath!
            os.remove(krona_common_fpath)
            save_krona_paths(results_dirpath, krona_fpaths, assemblies)

    # reading html template file
    with open(html_fpath) as f_html:
        html_text = f_html.read()
    keyword = "totalReport"
    html_text = re.sub("{{ " + keyword + " }}", "[" + ",".join(json_texts) + "]", html_text)
    html_text = re.sub(r"{{(\s+\S+\s+)}}", "{}", html_text)
    with open(html_fpath, "w") as f_html:
        f_html.write(html_text)
    meta_log.main_info(
        "  Extended version of HTML-report (for all references and assemblies) is saved to " + html_fpath
    )
Esempio n. 38
0
#-*- coding: utf-8 -*-

""" 增加和删除 ws.

"""

import copy

from libs import log, redisoj
from lvs.libs import funcs 
from web.const import REDIS_DB_LVS


logger = log.get_logger("LVS WS")
_redis_oj = redisoj.PooledConnection(REDIS_DB_LVS)
client = _redis_oj.get()


def add(name, new_vip2ws):
    """ 增加 Ws.

    由于 ws 属于 vip,  所以需要传入 vip, 而且 vip 都要存在.

    new_vip2ws 只需要含有 vip 和 wss 两个 key 即可.

    """
    # 获取 vip.
    share_vips = [i["vip"] for i in new_vip2ws]
    if len(share_vips) != len(set(share_vips)):
        logger.error("Vip duplicates")
        return False