Exemple #1
0
def sendmail(attach):
    '''发送测试报告'''
    subject = '接口测试结果:%s' % time.strftime('%Y/%m/%d %H:%M:%S')
    content = "接口测试已完成,测试详情请查看附件测试报告!"


    mail_server = conf.get_conf('mail','server')
    mail_user = conf.get_conf('mail','user')
    mail_passwd = conf.get_conf('mail','passwd')
    mail_to = eval(conf.get_conf('mail','to'))
    mail_cc = eval(conf.get_conf('mail','cc'))
    sendmail = SendMail(mail_server,mail_user,mail_passwd)
    sendmail.send(to=mail_to,cc=mail_cc,subject=subject,content=content,attach=[attach])
Exemple #2
0
    def process_request(self, request):
        lang = request.GET.get(conf.get_conf(conf.LANG_PARAM), '')

        if not lang:
            lang = request.META.get(
                "HTTP_%s" % conf.get_conf(conf.LANG_PARAM).upper(), '')

        if not lang:
            lang = request.COOKIES.get(conf.get_conf(conf.LANG_PARAM), '')

        if lang:
            translation.activate(lang)
            request.LANGUAGE_CODE = translation.get_language()
Exemple #3
0
 def get_session(connection='preview'):
     conf = get_conf('database', 'MYSQL').get(connection)
     connection = 'mysql+pymysql://{user}:{password}@{host}:{port}/{db}?charset=utf8'.format(
         **conf)
     engine = create_engine(connection)
     session = sessionmaker(bind=engine)
     return session()
Exemple #4
0
    def __init__(self):
        try:
            self.conf = get_conf()
        except IOError as e:
            raise FileError(e.filename, hint='Are you in the right directory?')

        try:
            self.name = self.conf['name']
        except KeyError:
            raise ClickException('No name in skipper.yml')

        self.services = []
        for name, details in self.conf['services'].items():
            try:
                self.services.append(
                    self.make_service(name=name, **details))
            except (TypeError, ValueError) as e:
                raise ClickException("%s: %s" % (name, e.message))

        self.host = get_host(self.conf.get('host', 'aws'))
        self.host.creds = creds
        self.host.project = self

        self.groups = []
        for name, details in self.conf['groups'].items():
            try:
                self.groups.append(
                    self.host.make_group(
                        name=name,
                        region=self.conf.get('region'),
                        **details))
            except (TypeError, ValueError) as e:
                raise ClickException("%s: %s" % (name, e.message))
Exemple #5
0
 def connection_str(connection='base'):
     registry.register('clickhouse', 'clickhouse_sqlalchemy.drivers.base',
                       'dialect')
     conf = get_conf('database', 'CLICKHOUSE').get(connection)
     connection = 'clickhouse://{user}:{password}@{host}:{port}/{db}'.format(
         **conf)
     return connection
Exemple #6
0
def main():
    """Create the sockets and start listening.
    """

    selector = selectors.DefaultSelector()
    log_level = get_conf("LOG_LEVEL", logging.WARNING, t=int)
    logging.basicConfig(level=log_level)

    tcp_socket = tcp.get_socket(HOST, PORT)
    tcp_handler = tcp.get_handler(
        query_dns_fn=lambda data: tls.query_dns(data, DNS_HOST, DNS_PORT),
        selector=selector,
        event=selectors.EVENT_READ)

    udp_socket = udp.get_socket(HOST, PORT)
    udp_handler = udp.get_handler(
        query_dns_fn=lambda data: tls.query_dns(data, DNS_HOST, DNS_PORT))

    selector.register(tcp_socket, selectors.EVENT_READ, tcp_handler)
    logging.info("Listening on %s:%s/tcp", HOST, PORT)
    selector.register(udp_socket, selectors.EVENT_READ, udp_handler)
    logging.info("Listening on %s:%s/udp", HOST, PORT)

    try:
        while True:
            events = selector.select()
            for key, _ in events:
                callback = key.data
                callback(key.fileobj)
    except KeyboardInterrupt:
        logging.debug('Closing sockets... (press Ctrl+C again to force)')
        tcp_socket.close()
        udp_socket.close()
Exemple #7
0
def main():
    # get options from console.
    options = args()

    # get configuration from file.
    config = get_conf(options['config_file'])

    # create ES connection to hosts.
    connections.create_connection(hosts=config['elasticsearch']['hosts'],
                                  timeout=30)

    # create the searcher instance to find alarms, given the options from
    # console.
    searcher = Searcher(options['from'],
                        options['query'],
                        ttime=options['to'],
                        per_page=500,
                        min_priority=options['min_priority'])

    buckets = [
        PathClassBucket(
            utils.build_url(config['kibana']['host'],
                            config['kibana']['secure']))
    ]

    # manually fetch all alarms from the searcher and pass it to every bucket.
    for alarm in searcher.pages():
        for bucket in buckets:
            bucket.cherry_pick(alarm)

    # dump all buckets, this will print out all buckets.
    for bucket in buckets:
        bucket.dump()
Exemple #8
0
def main():
    '''
    Run!
    Prints results to the screen.
    '''
    # Parse args and define some basic params
    opts = conf.get_conf()
    args = parse_args()
    doc_dir = os.path.join(opts['SALT_REPO_PATH'], 'doc')
    man_build_dir = os.path.join(doc_dir, '_build', 'man')
    man_dir = os.path.join(doc_dir, 'man')
    old = args.old_version

    print('Building man pages in directory: {0}'.format(doc_dir))

    # Check out master branch
    _cmd_run(['make', 'man', '-C', doc_dir])

    print('Copying new man files from {0} to {1}'.format(
        man_build_dir, man_dir))
    for file_ in os.listdir(man_build_dir):
        file_path = os.path.join(man_build_dir, file_)
        print('Copying file: {0}'.format(file_path))
        shutil.copy(file_path, man_dir)

    for file_ in os.listdir(man_dir):
        _replace_txt(os.path.join(man_dir, file_),
                     old='"{0}*"'.format(old),
                     new='"{0}" '.format(args.version),
                     regex=True)

        print('Adding Salt Version {0} to file: {1}'.format(
            args.version, file_))
Exemple #9
0
 def import_button_click(self):
     """
         Import button click handler
     """
     cfg = get_conf(askopenfilename())
     for k in self.input_variables:
         if k in cfg:
             self.input_variables[k].set(cfg[k])
Exemple #10
0
def get_objects(project,fixver=None,obj_id=None,fields=None):
    cnf = get_conf('JIRA')
    server = cnf['server_url']
    user_name = cnf['username']
    pwd = cnf['password']
    objs=[]
    obj={}
    try:
        if obj_id:
            jql_str ='issue = "{}" and issuetype in ("Story","Bug") '.format(obj_id)
        else:
            jql_str ='project = "{}" and issuetype in ("Story","Bug") and fixVersion = "{}" '.format(project,fixver)
        jira = JIRA(server=server,basic_auth=(user_name,pwd))
        if not fields:
            fields='summary,reporter,issuetype,project,fixVersions,customfield_10401,customfield_10402'
        else:
            if fields.find('fixVersions') <0:
                fields+=',fixVersions'
        object_list = jira.search_issues(jql_str=jql_str,fields=fields,maxResults=200)
        print(len(object_list))
        for i in range(0,len(object_list)):
            fix_ver = object_list[i].fields.fixVersions
            if (fix_ver and str(fix_ver[0]).strip()==str(fixver).strip()) or (obj_id):
                obj['object_id']=object_list[i].key
                obj['object_desc']=object_list[i].fields.summary
                obj['object_rel']=str(fix_ver[0]).strip()
                if object_list[i].fields.project:
                    obj['object_track']=object_list[i].fields.project.name
                if object_list[i].fields.customfield_10401:
                    lst = object_list[i].fields.customfield_10401
                    tmp = ''
                    for k in lst:
                        tmp+=k.displayName+' | '
                    obj['object_dev'] = tmp
                else:
                    obj['object_dev']=None
                    
                if object_list[i].fields.customfield_10402:
                    lst = object_list[i].fields.customfield_10402
                    tmp = ''
                    for k in lst:
                        tmp+=k.displayName+' | '
                    obj['object_qa'] = tmp
                else:
                    obj['object_qa']=None
                if object_list[i].fields.issuetype:
                    obj['object_type']=object_list[i].fields.issuetype.name
                objs.append(obj.copy())
                obj.clear()
                
    except Exception as e:
        print(e)
    finally:
        jira.close()
        return objs
Exemple #11
0
    def __init__(self, http_method, url, **kwargs):
        self.__retry_times = 1
        self.__retry_max = get_conf('base', 'BASE').get('curl_retry_max', 3)
        self.res = None
        self.reset_retry()

        self._http_method = http_method
        self._url = url
        kwargs['verify'] = False
        self._kwargs = kwargs
        self._data = kwargs.get('json', kwargs.get('data'))
def main():
    '''
    Run!
    Prints results to the screen.
    '''
    # Parse args and define some basic params
    opts = conf.get_conf()
    args = parse_args()
    new_stable = args.new_latest
    old_stable = args.old_latest
    new_prev = args.new_previous
    old_prev = args.old_previous

    git_dir = '--git-dir={0}/.git'.format(opts['SALT_REPO_PATH'])
    work_tree = '--work-tree={0}'.format(opts['SALT_REPO_PATH'])
    file_name = '{0}/doc/conf.py'.format(opts['SALT_REPO_PATH'])

    for branch in opts['SALT_BRANCHES']:
        print('Updating release version for {0}'.format(branch))

        # Check out base branch
        _cmd_run(['git', git_dir, work_tree, 'checkout', branch])

        # Create a new branch
        branch_name = 'update_version_doc_{0}'.format(branch)
        _cmd_run(['git', git_dir, work_tree, 'checkout', '-b', branch_name])
        print('New branch: {0}'.format(branch_name))

        # Update release version for "latest"
        if new_stable:
            print('Replacing {0} with {1} in branch {2}'.format(
                old_stable, new_stable, branch))
            _replace_txt(file_name, old_stable, new_stable)

        # Update release version for "previous"
        if new_prev:
            print('Replacing {0} with {1} in branch {2}'.format(
                old_prev, new_prev, branch))
            _replace_txt(file_name, old_prev, new_prev)

        # Set the commit title
        commit_msg = 'Update release versions for the {0} branch'.format(
            branch)

        # Add files to git
        _cmd_run(['git', git_dir, work_tree, 'add', 'doc/conf.py'])

        print('Committing change and pushing branch {0} to {1}\n'.format(
            branch_name, opts['USER_REMOTE']))
        _cmd_run(['git', git_dir, work_tree, 'commit', '-m', commit_msg])
        _cmd_run([
            'git', git_dir, work_tree, 'push', opts['USER_REMOTE'], branch_name
        ])
Exemple #13
0
def main():
    global exp_score
    hadoop_conf = conf.get_conf('../conf/hadoop.conf', '=')
    hadoop_bin = hadoop_conf['HADOOP_BIN']
    pig_bin = hadoop_conf['PIG_BIN']
    hadoop_jar = hadoop_conf['HADOOP_JAR']
    hadoop_kpi = hadoop_conf['HADOOP_KPI']
    hadoop_log = hadoop_conf['HADOOP_LOG']
    hadoop_log_file_name = hadoop_conf['HADOOP_LOG_FILE_NAME']

    t_day = (datetime.datetime.now() - datetime.timedelta(days=2)).strftime('%Y%m%d')
    #t_day = time.strftime('%Y%m%d',time.localtime(time.time()))
    
    input_path = hadoop_log + '/' + t_day
    output_path = hadoop_kpi + '/' + t_day + '/' + t_day

    
    domain_cmd = pig_bin + ' -p inputPU=' + output_path + '/BasicField' + ' -p inputPT=' + output_path + '/PageTimeLength' + \
                           ' -p time=' + t_day + ' kpi_domain_d.pig'

    page_cmd = pig_bin + ' -p inputPU=' + output_path + '/BasicField' + ' -p inputPT=' + output_path + '/PageTimeLength' + \
                         ' -p time=' + t_day + ' kpi_page_d.pig'

    source_analysis_cmd = pig_bin + ' -p inputPU=' + output_path + '/BasicField' + ' -p inputPT=' + output_path + '/PageTimeLength' + \
                                    ' -p time=' + t_day + ' kpi_source_analysis.pig'

    region_cmd = pig_bin + ' -p inputPU=' + output_path + '/BasicField' + ' -p inputPT=' + output_path + '/PageTimeLength' + \
                           ' -p time=' + t_day + ' kpi_region_distribute.pig'

    hot_cmd = pig_bin + ' -p inputHeat=' + output_path + '/HeatMap' + ' -p inputHotLink=' + output_path + '/HotLink' + \
                        ' -p time=' + t_day + ' kpi_heatmap_d.pig'

    ads_cmd = pig_bin + ' -p inputAds=' + output_path + '/Ads' + ' -p inputAdsPT=' + output_path + '/AdsTimeLength' + \
                        ' -p time=' + t_day + ' kpi_ads_d.pig'

    loops_cmd = [domain_cmd, page_cmd, source_analysis_cmd, region_cmd, hot_cmd, ads_cmd]

    threads = []
    nloops= range(len(loops_cmd))

    for i in nloops:
        t = threading.Thread(target=loop, args=(i+1, loops_cmd[i], time.time()))
        threads.append(t)

    for i in nloops:#开始线程
        threads[i].setDaemon(True)
        threads[i].start()

    for i in nloops:        #等待所有的子线程执行结束(最多等待3秒,默认为None,无限等待)
        threads[i].join(timeout=None)
        print('loop',i+1,'is alive',threads[i].isAlive())
        print('loop',i+1,'is daemon',threads[i].isDaemon())
Exemple #14
0
 def get_from_file(filename):
     """get_md5 - calculates md5 sum for a file"""
     # system_encoding = sys.getfilesystemencoding()
     # f = open(filename.decode(system_encoding), "rb")
     f = open(filename, mode='rb')
     m = hashlib.md5()
     block_size = get_conf()['checksum']['md5']['block_size']
     while True:
         # Don't read the entire file at once...
         data = f.read(block_size)
         if len(data) == 0:
             break
         m.update(data)
     res = m.hexdigest()
     return res
Exemple #15
0
def main():
    '''
    Run!
    '''
    # Parse args and define some basic params
    opts = conf.get_conf()
    args = parse_args()
    if args.list_msg:
        _list_msgs(opts)

    send_email(args.msg,
               args.salt_ver,
               opts,
               args,
               sender=args.sender,
               receiver=args.receiver)
Exemple #16
0
def main():
    # pre process
    check_python_version()
    conf = get_conf(get_conf_path())

    # main process
    opts = get_args()
    if opts.plugin == "normal":
        handler = get_handler(opts)
    else:
        plugin_translator, handler = get_plugin(opts)
        if plugin_translator:
            TRANSLATE_API[opts.plugin] = plugin_translator
            opts.api = opts.api if opts.api else opts.plugin

    try:
        t = TRANSLATE_API[opts.api](opts.lang_from, opts.lang_to, handler)
        t.set_parameter_from_conf(conf)
        t.call_method_with_handler()
    except IkazuchiError as err:
        print err
Exemple #17
0
    def run(self, ctx):
        # 还原上次挂起
        data_source = self.__data_source
        data_source.get_handler()

        # 自动数秒
        wait_seconds = get_conf('base', 'BASE').get('source_wait_second', 10)
        if not self.boot_conf.get('source_topic'):
            raise ValueError('ERROR SOURCE TOPIC')
        try:
            while self._running:
                position = data_source.mount(ctx)
                if position:
                    data_source.set_position(position)
                    # break
                else:
                    time.sleep(wait_seconds)
        except Exception as err:
            logger().error('Generator raise error: {}, job exited'.format(err))
        finally:
            logger().info('Job finished.\r\n')
Exemple #18
0
# -*- coding: utf-8 -*-
#Actualización de la tabla Inciden por primera vez, llena todos los registros del año actual.
import psycopg2
from conf import get_conf

conn, cr, path = get_conf()

#Borrar los registros del mes
cr.execute(
    "delete from asistmil_inciden where fecha >= date_trunc('year', now())")

#Son registros incompletos todos los que su campo horario empiecen con un dígito, y les corresponde el tipo 0355
cr.execute("""insert into asistmil_inciden(empleado,fecha,tipo,secuencia)
              select empleado,fecha,'0355',0
              from asistmil_incidencias
              where substring(horario from 1 for 1) in ('1','2','3','4','5','6','7','8','9')
              and registros=1
              and extract(year from fecha) = extract(year from now())
              and fecha <= now()""")

#Los registros de la tabla Autorizaciones entran con Tipo igual a la clave del justificante
cr.execute(
    """insert into asistmil_inciden(empleado,fecha,tipo,tiempo,secuencia)
              select empleado,fecha,lpad(justificante::text, 4, '0'),0,0
              from asistmil_autorizaciones
              where extract(year from fecha) = extract(year from now())
              and fecha <= now()""")

#Los retardos tienen tipo 0599 y se guarda el tiempo
cr.execute(
    """insert into asistmil_inciden(empleado,fecha,tipo,tiempo,secuencia)
Exemple #19
0
import conf

debug = False


def run_cmd(cmd):
    global debug
    print(cmd)
    if debug:
        pass
    else:
        os.system(cmd)


if __name__ == '__main__':
    hadoop_conf = conf.get_conf('../conf/hadoop.conf', '=')
    hadoop_bin = hadoop_conf['HADOOP_BIN']
    pig_bin = hadoop_conf['PIG_BIN']
    hadoop_jar = hadoop_conf['HADOOP_JAR']
    hadoop_kpi = hadoop_conf['HADOOP_KPI']
    hadoop_log = hadoop_conf['HADOOP_LOG']
    hadoop_log_file_name = hadoop_conf['HADOOP_LOG_FILE_NAME']

    #    t_day = time.strftime('%Y%m%d',time.localtime(time.time()))
    t_day = (datetime.datetime.now() -
             datetime.timedelta(days=0)).strftime('%Y%m%d')
    t_time = (datetime.datetime.now() -
              datetime.timedelta(days=0, hours=2)).strftime('%Y%m%d%H')

    input_path = hadoop_log + '/' + t_day + '/' + hadoop_log_file_name + t_time
    output_path = hadoop_kpi + '/' + t_day + '/' + t_time
#coding:utf-8
from conf import get_conf
import monitor
import sched
import time

def run_monitor(conf):
        monitor.MachineMonitor(conf).scan()

if __name__ == '__main__':
	conf = get_conf('monitor.conf')
	scheduler = sched.scheduler(time.time, time.sleep)
	scan_interval = int(conf.get('monitor', 'scan_interval'))
	def monitor_action():
		scheduler.enterabs(time.time() + scan_interval, 0, monitor_action, ())
		run_monitor(conf)
	monitor_action()
	scheduler.run()
Exemple #21
0
import random
import hashlib
import json
import time
import uuid

from urllib import request, parse

from conf import get_conf

__youdao_fanyi_api_base_url = 'https://openapi.youdao.com/api'
__youdao_conf = get_conf('youdao')


def truncate(q):
    if q is None:
        return None
    size = len(q)
    return q if size <= 20 else q[0:10] + str(size) + q[size - 10:size]


def youdao_fanyi_query(q, f, t):
    if __youdao_conf is None:
        return

    app_key = __youdao_conf['fanyi_app_key']
    secret_key = __youdao_conf['fanyi_secret_key']

    salt = str(uuid.uuid1())
    curtime = str(int(time.time()))
    sign = app_key + truncate(q) + salt + curtime + secret_key
Exemple #22
0
 def __init__(self, conf='base'):
     """初始化构造函数"""
     self.__rabbitmq_conf = get_conf('base', 'RABBITMQ').get(conf)
     self.connection = None
Exemple #23
0
def main(t_day):
    hadoop_conf = conf.get_conf('../conf/hadoop.conf', '=')
    hadoop_bin = hadoop_conf['HADOOP_BIN']
    pig_bin = hadoop_conf['PIG_BIN']
    hadoop_jar = hadoop_conf['HADOOP_JAR']
    hadoop_kpi = hadoop_conf['HADOOP_KPI']
    hadoop_log = hadoop_conf['HADOOP_LOG']
    hadoop_harlog = hadoop_conf['HADOOP_HARLOG']

    input_path = hadoop_log + '/' + t_day
    output_path = hadoop_kpi + '/' + t_day + '/' + t_day
    harlog_name = t_day + '.har'
    harlog_path = hadoop_harlog + '/' + t_day + '.har' + '/part*'

    ##     delete the output_path
    cmd = hadoop_bin + ' fs -rm -r ' + output_path
    run_cmd(cmd)

    ##    hadoop archive 压缩
    cmd = hadoop_bin + ' archive -archiveName ' + harlog_name + ' -p ' + input_path + ' ' + hadoop_harlog
    try:
        if run_cmd(cmd) == 0:
            pass
            #run_cmd(hadoop_bin + ' fs -rm -r ' + input_path)
        else:
            raise Exception(cmd)
    except Exception as ex:
        logger.error(
            '\033[91m hadoop archive error/exception occurred.\033[0m')
        logger.error("\033[91m Exception:%s\033[0m" % str(ex))
        sys.exit(1)

    ##    execute the mapredcue jar
    cmd = '{0} jar {1} com.jobs.kpi.mapred.KPIMapRed {2} {3}'.format(
        hadoop_bin, hadoop_jar, harlog_path, output_path)
    try:
        ctime = time.time()
        a = run_cmd(cmd)
        if a == 0:
            logger.info('%s success!!' % cmd)
            logger.info('{0} done takes: {1:.2f} seconds.'.format(
                cmd,
                time.time() - ctime))
        else:
            raise Exception(cmd)
    except Exception as ex:
        logger.error('\nSome error/exception occurred.')
        logger.error("\033[91m Exception:%s\033[0m" % str(ex))
        sys.exit(1)
    ##    Determine the hdfs file exists
    hdfs_outdirs = frozenset(
        ['BasicField', 'PageTimeLength', 'Ads', 'AdsTimeLength', 'HotLink'])
    # 确认目录存在不管是否为空
    assert_outdir_exist(hadoop_bin, output_path, hdfs_outdirs)

    PIG_CMD_TEMPLATE1 = '{0} -p inputPU={1} -p inputPT={2} -p time={3} {{0}}'.format(
        pig_bin, output_path + '/BasicField', output_path + '/PageTimeLength',
        t_day)
    PIG_CMD_TEMPLATE2 = '{0} -p inputHeat={1} -p inputHotLink={2} -p time={3} {{0}}'.format(
        pig_bin, output_path + '/HeatMap', output_path + '/HotLink', t_day)
    PIG_CMD_TEMPLATE3 = '{0} -p inputAds={1} -p inputAdsPT={2} -p time={3} {{0}}'.format(
        pig_bin, output_path + '/Ads', output_path + '/AdsTimeLength', t_day)

    domain_cmd = PIG_CMD_TEMPLATE1.format('kpi_domain_d.pig')
    page_cmd = PIG_CMD_TEMPLATE1.format('kpi_page_d.pig')
    source_analysis_cmd = PIG_CMD_TEMPLATE1.format('kpi_source_analysis.pig')
    region_cmd = PIG_CMD_TEMPLATE1.format('kpi_region_distribute.pig')
    hot_cmd = PIG_CMD_TEMPLATE2.format('kpi_heatmap_d.pig')
    ads_cmd = PIG_CMD_TEMPLATE3.format('kpi_ads_d.pig')

    loops_cmd = [
        domain_cmd, page_cmd, source_analysis_cmd, region_cmd, hot_cmd, ads_cmd
    ]

    nloops = range(len(loops_cmd))

    cmd_queue = multiprocessing.JoinableQueue()
    for i in nloops:
        worker = Worker(cmd_queue, i + 1, time.time())
        worker.daemon = True
        worker.start()

    for i in nloops:
        cmd_queue.put(loops_cmd[i])

    cmd_queue.join()
Exemple #24
0

if __name__ == '__main__':
    if len(sys.argv) > 1:
        if not os.path.exists(sys.argv[1]):
            exit('error pro dir --%s--' % sys.argv[1])
        file = sys.argv[1]
    else:
        file = 'app/stream/main.py'

    Printer.empty(
        '#############################################################')
    Printer.info('RUNNING \'[FANG]PYFLINK-FRAMEWORK\' BY FANG')
    Printer.empty(
        '#############################################################')

    packages = load_packages()
    package_str = ' '.join(packages)
    boot_conf_list = get_conf('boot', 'BOOT')
    Printer.info(
        '+-------------------------------------------------------------------------------+'
    )
    for boot_conf in boot_conf_list:
        Printer.info(
            'BOOT \'{name}\' USE MODULE \'{module}\' ALREADY SUBMIT'.format(
                **boot_conf))
        Printer.info(
            '+-------------------------------------------------------------------------------+'
        )
        stream_boot_join(file, package_str, boot_conf)
Exemple #25
0
def main(t_day):
    hadoop_conf = conf.get_conf('../conf/hadoop.conf', '=')
    hadoop_bin = hadoop_conf['HADOOP_BIN']
    pig_bin = hadoop_conf['PIG_BIN']
    hadoop_jar = hadoop_conf['HADOOP_JAR']
    hadoop_kpi = hadoop_conf['HADOOP_KPI']
    hadoop_log = hadoop_conf['HADOOP_LOG']
    hadoop_harlog = hadoop_conf['HADOOP_HARLOG']

    
    input_path = hadoop_log + '/' + t_day
    output_path = hadoop_kpi + '/' + t_day + '/' + t_day
    harlog_name = t_day + '.har'
    harlog_path = hadoop_harlog + '/' + t_day + '.har' + '/part*'

    ##     delete the output_path
    cmd = hadoop_bin + ' fs -rm -r ' + output_path
    run_cmd(cmd)

    ##    hadoop archive 压缩
    cmd = hadoop_bin + ' archive -archiveName ' + harlog_name + ' -p ' + input_path + ' ' + hadoop_harlog
    try:
        if run_cmd(cmd) == 0: pass
            #run_cmd(hadoop_bin + ' fs -rm -r ' + input_path)
        else:
            raise Exception(cmd)
    except Exception as ex:
        logger.error('\033[91m hadoop archive error/exception occurred.\033[0m')
        logger.error("\033[91m Exception:%s\033[0m" %str(ex))
        sys.exit(1)

    ##    execute the mapredcue jar
    cmd = '{0} jar {1} com.jobs.kpi.mapred.KPIMapRed {2} {3}'.format(
                                            hadoop_bin, hadoop_jar, harlog_path, output_path)
    try:
        ctime = time.time()
        a = run_cmd(cmd)
        if a == 0:
            logger.info('%s success!!'%cmd)
            logger.info('{0} done takes: {1:.2f} seconds.'.format(cmd, time.time() - ctime))
        else:
            raise Exception(cmd)
    except Exception as ex:
        logger.error('\nSome error/exception occurred.')
        logger.error("\033[91m Exception:%s\033[0m" %str(ex))
        sys.exit(1)
    ##    Determine the hdfs file exists
    hdfs_outdirs = frozenset(['BasicField', 'PageTimeLength', 'Ads', 'AdsTimeLength', 'HotLink'])
    # 确认目录存在不管是否为空
    assert_outdir_exist(hadoop_bin, output_path, hdfs_outdirs)

    PIG_CMD_TEMPLATE1 = '{0} -p inputPU={1} -p inputPT={2} -p time={3} {{0}}'.format(
                    pig_bin, output_path+'/BasicField', output_path+'/PageTimeLength', t_day)
    PIG_CMD_TEMPLATE2 = '{0} -p inputHeat={1} -p inputHotLink={2} -p time={3} {{0}}'.format(
                    pig_bin, output_path+'/HeatMap', output_path+'/HotLink', t_day)
    PIG_CMD_TEMPLATE3 = '{0} -p inputAds={1} -p inputAdsPT={2} -p time={3} {{0}}'.format(
                    pig_bin, output_path+'/Ads', output_path+'/AdsTimeLength', t_day)

    domain_cmd = PIG_CMD_TEMPLATE1.format('kpi_domain_d.pig')
    page_cmd = PIG_CMD_TEMPLATE1.format('kpi_page_d.pig')
    source_analysis_cmd = PIG_CMD_TEMPLATE1.format('kpi_source_analysis.pig')
    region_cmd = PIG_CMD_TEMPLATE1.format('kpi_region_distribute.pig')
    hot_cmd = PIG_CMD_TEMPLATE2.format('kpi_heatmap_d.pig')
    ads_cmd = PIG_CMD_TEMPLATE3.format('kpi_ads_d.pig')
    
    loops_cmd = [domain_cmd, page_cmd, source_analysis_cmd, region_cmd, hot_cmd, ads_cmd]

    nloops = range(len(loops_cmd))
    
    cmd_queue = multiprocessing.JoinableQueue()
    for i in nloops:
        worker = Worker(cmd_queue, i+1, time.time())
        worker.daemon = True
        worker.start()

    for i in nloops:
        cmd_queue.put(loops_cmd[i])
    
    cmd_queue.join()
Exemple #26
0
 def validate(self, token):
     return token == get_conf("WX_CONF")
Exemple #27
0
 def __combine_url(uri):
     host = get_conf('base', 'URL').get('gateway_domain')
     url = '{}{}'.format(host, uri)
     return url
Exemple #28
0
#!/usr/bin/env python3
"""Main module. Gathers all the configuration from the environment, and sets up
both TCP and UDP sockets.
"""

import logging
import selectors

import udp
import tcp
import tls
from conf import get_conf

HOST = get_conf("HOST")
PORT = get_conf("PORT", t=int)
DNS_HOST = get_conf("DNS_HOST")
DNS_PORT = get_conf("DNS_PORT", t=int)


def main():
    """Create the sockets and start listening.
    """

    selector = selectors.DefaultSelector()
    log_level = get_conf("LOG_LEVEL", logging.WARNING, t=int)
    logging.basicConfig(level=log_level)

    tcp_socket = tcp.get_socket(HOST, PORT)
    tcp_handler = tcp.get_handler(
        query_dns_fn=lambda data: tls.query_dns(data, DNS_HOST, DNS_PORT),
        selector=selector,
Exemple #29
0
# File: power_test.py
# Author: Jayanth M ([email protected])
# Created: 3/6/2018 6:12 PM
# Project: gtpower
# Description:

import unittest
import requests
import conf

# ALWAYS USE DEV CONFIGURATION FOR TESTING
config = conf.get_conf("dev")


# RUN THE TESTS
class TestPlacesApiUsingRequests(unittest.TestCase):
    def test_CheckUser(self):
        response = requests.get(config['TEST_Url'] + '/checkuser')
        self.assertEqual(response.status_code, 200)
        response = requests.get(config['TEST_Url'] +
                                '/checkuser')  # check for unauthorized here
        self.assertEqual(response.status_code, 403)

    def test_Energy(self):
        response = requests.get(
            config['TEST_Url'] +
            '/facilities/energy/026?start=2016-09-01 00:00:00&stop=2016-09-03 23:59:59'
        )
        self.assertEqual(response.status_code, 200)
        response = requests.get(config['TEST_Url'] + '/facilities/energy/026')
        self.assertEqual(response.status_code, 400)
Exemple #30
0
#!/usr/bin/python
import sys
sys.path.append("../../python-testclient")
from conf import get_conf

print "#ifndef MQTT_CONSTANTS_H"
print "#define MQTT_CONSTANTS_H"
for (k, v) in get_conf().iteritems():
    if not isinstance(v, int):
        v = '"%s"' % v
    print """#define MQTT_%s %s """ % (k.upper(), v)
print "#endif"
from pypxlib import Table
import psycopg2
import datetime
from conf import get_conf

conn, cr, tpath = get_conf()

table = Table(tpath + "Asignacion.DB")
tup = []
for i, row in enumerate(table):
    if not row.Inicio:
        continue
    anio, semana, dia = row.Inicio.isocalendar()
    cr.execute(
        "select * from asistmil_asignaciones where emp=%s and semana=%s and anio=%s",
        (row.Registro, semana, anio))
    if not cr.fetchall():
        tup.append([i + 1, row.Registro, semana, anio, row.Secuencia])
if tup:
    args_str = ','.join(cr.mogrify("(%s,%s,%s,%s,%s)", x) for x in tup)
    cr.execute(
        "insert into asistmil_asignaciones(id, emp, semana, anio, secuencia) values "
        + args_str)
    conn.commit()
conn.close()
Exemple #32
0
import time
import conf

debug = False

def run_cmd(cmd):
    global debug
    print(cmd)
    if debug:
        pass
    else:
        res = os.system(cmd)
        return res

if __name__ == '__main__':
    hadoop_conf = conf.get_conf('../conf/hadoop.conf', '=')
    hadoop_bin = hadoop_conf['HADOOP_BIN']
    pig_bin = hadoop_conf['PIG_BIN']
    hadoop_jar = hadoop_conf['HADOOP_JAR']
    hadoop_kpi = hadoop_conf['HADOOP_KPI']
    hadoop_log = hadoop_conf['HADOOP_LOG']
    hadoop_log_file_name = hadoop_conf['HADOOP_LOG_FILE_NAME']

    # t_day = (datetime.datetime.now() - datetime.timedelta(days=0)).strftime('%Y%m%d')
    t_day = time.strftime('%Y%m%d',time.localtime(time.time()))
    
    input_path = hadoop_log + '/' + t_day
    output_path = hadoop_kpi + '/' + t_day + '/' + t_day

    ##     delete the output_path
    cmd = hadoop_bin + ' fs -rm -r ' + output_path
Exemple #33
0
def main():
    '''
    Run!
    Prints results to the screen.
    '''
    # Parse args and define some basic params
    opts = conf.get_conf()
    args = parse_args()

    # replace versions
    repo_dir = opts['REPO_SALTSTACK_DIR']
    file_dir = os.path.join(repo_dir, 'content', 'repo')
    if args.replace:
        branch = '.'.join(args.version.split('.')[:-1])
        pre_ver = branch + '.' + str(int(args.version[-1]) - 1)
        fed_pre_ver = branch + '.' + str(int(args.version[-1]) - 2)
        if branch == '2017.7':
            file = '2017.7.md'
        elif branch == '2018.3':
            file = 'index.md'

        edit_file = os.path.join(file_dir, file)

        # now commit and push changes
        git_dir = '--git-dir={0}/.git'.format(opts['SSE_DIR'])
        work_tree = '--work-tree={0}'.format(opts['SSE_DIR'])

        git_cmd = ['git', git_dir, work_tree]

        # Check out master branch
        _cmd_run(git_cmd + ['checkout', 'master'])

        branch_name = 'repo_update_{0}-2'.format(args.version)
        _cmd_run(git_cmd + ['checkout', '-b', branch_name])
        print('New branch: {0}'.format(branch_name))

        # new date
        if args.date:
            _replace_txt(edit_file,
                         old=pre_ver,
                         new=args.date,
                         replace_line='set release_date')

        _replace_txt(edit_file, old=pre_ver, new=args.version)

        # replace fedora versions for latest
        if 'index' in file:
            _replace_txt(edit_file, old=fed_pre_ver, new=pre_ver)

        commit_msg = 'Update release version and date for repo landing to {0}'.format(
            args.version)
        _cmd_run(git_cmd + ['add', edit_file])
        print('Committing change and pushing branch {0} to {1}\n'.format(
            branch_name, opts['FORK_REMOTE']))
        _cmd_run(git_cmd + ['commit', '-m', commit_msg])
        _cmd_run(git_cmd + ['push', opts['FORK_REMOTE'], branch_name])

    if args.build:
        print('Building Docs')
        print(_cmd_run(['acrylamid', 'co'], cwd=repo_dir))

    if args.staging:
        print('Pushing to staging')
        upload = opts['REPO_UPLOAD_SCRIPT']

        # check we have the appropriate command
        ret = _cmd_run(['bash', upload])

        # deploy
        ssh_cmd = [
            'ssh',
            '-i',
            opts['REPO_DEPLOY_KEY'],
            '{0}@{1}'.format(opts['REPO_DEPLOY_SRV_USR'],
                             opts['REPO_DEPLOY_SRV']),
        ]
        ret = _cmd_run(ssh_cmd + ["/root/deploy_staging_only.sh"])
def main():
    '''
    Run!
    Prints results to the screen.
    '''
    # Parse args and define some basic params
    opts = conf.get_conf()
    args = parse_args()
    new_stable = args.new_latest
    old_stable = args.old_latest
    update_stable = args.latest_branch

    new_prev = args.new_previous
    old_prev = args.old_previous
    update_previous = args.previous_branch

    git_dir = '--git-dir={0}/.git'.format(opts['DOC_BUILDS_PATH'])
    work_tree = '--work-tree={0}'.format(opts['DOC_BUILDS_PATH'])
    file_name = '{0}/builddocs/print.sls'.format(opts['DOC_BUILDS_PATH'])

    git_cmd = ['git', git_dir, work_tree]

    print('Updating release version for in builddocs/print.sls')

    # Check out master branch
    _cmd_run(git_cmd + ['checkout', 'master'])

    # Create a new branch
    branch_name = 'update_print_version'
    _cmd_run(git_cmd + ['checkout', '-b', branch_name])
    print('New branch: {0}'.format(branch_name))

    # Update release version for "latest"
    if new_stable:
        print('Replacing latest version {0} with {1}'.format(
            old_stable, new_stable))
        _replace_txt(file_name, old_stable, new_stable)

        # Update the base branch for "latest"
        if update_stable:
            new_base = new_stable.rsplit('.', 1)[0]
            old_base = old_stable.rsplit('.', 1)[0]
            print('Updating latest stable branch {0} with {1}'.format(
                old_base, new_base))
            _replace_txt(file_name, old_base, new_base)

    # Update release version for "previous"
    if new_prev:
        print('Replacing previous version {0} with {1}'.format(
            old_prev, new_prev))
        _replace_txt(file_name, old_prev, new_prev)

        # Update the base branch for "previous"
        if update_previous:
            new_base = new_prev.rsplit('.', 1)[0]
            old_base = old_prev.rsplit('.', 1)[0]
            print('Updating previous stable branch {0} with {1}'.format(
                old_base, new_base))
            _replace_txt(file_name, old_base, new_base)

    # Set the commit title
    commit_msg = 'Update release version in print.sls file'

    # Add files to git
    _cmd_run(git_cmd + ['add', 'builddocs/print.sls'])

    # Commit changes and push up the branch
    print('Committing change and pushing branch {0} to {1}\n'.format(
        branch_name, opts['FORK_REMOTE']))
    _cmd_run(git_cmd + ['commit', '-m', commit_msg])
    _cmd_run(git_cmd + ['push', opts['FORK_REMOTE'], branch_name])
Exemple #35
0
from sqlalchemy import Integer, Float, String, Text, DateTime, text
from sqlalchemy.sql import select, and_
from sqlalchemy.ext.declarative import declarative_base
from flasgger import Swagger
import flask
from flask_cas import CAS, login_required
import flask_restful
import conf  # all configurations are stored here, change individually for development and release configurations.

# Import the right configuration from conf.py, based on if it is the development environment or release environment
# Run 'python3 power_api.py release' for deployment to release, 'python3 power_api.py dev' or 'python3 power_api.py' will deploy to development environment

if __name__ == '__main__':
    env = sys.argv[1] if len(
        sys.argv) > 2 else 'dev'  # always fall back to dev environment
    config = conf.get_conf(env)

swagger_template = {
    "swagger": "2.0",
    "info": {
        "title": config['SWAGGER_Title'],
        "description": config['SWAGGER_Description'],
        "contact": {
            "responsibleOrganization": "GT-RNOC",
            "responsibleDeveloper": "RNOC Lab Staff",
            "email": "*****@*****.**",
            "url": "http://rnoc.gatech.edu/"
        },
        # "termsOfService": "http://me.com/terms",
        "version": "2.0"
    },
import json

from tencentcloud.common import credential
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.tmt.v20180321 import tmt_client, models

from conf import get_conf
__tencent_conf = get_conf('tencent')


def tencent_fanyi_query(q, f, t):
    if __tencent_conf is None:
        return
    app_key = __tencent_conf['fanyi_app_key']
    secret_key = __tencent_conf['fanyi_secret_key']
    try:
        cred = credential.Credential(app_key, secret_key)
        httpProfile = HttpProfile()
        httpProfile.endpoint = "tmt.tencentcloudapi.com"

        clientProfile = ClientProfile()
        clientProfile.httpProfile = httpProfile
        client = tmt_client.TmtClient(cred, "ap-shanghai", clientProfile)

        req = models.TextTranslateRequest()

        params = json.dumps({
            'SourceText': q,
            'Source': f,
Exemple #37
0
 def validate(self, token):
     return token == get_conf("WX_CONF")