Esempio n. 1
0
def remove_logger(config, found_workers):
    import yaml
    NN = len(found_workers) - 1

    if NN == 0:
        elog.warn("no logger can be removed")
        return

    loggerPrefix = found_workers[0].split('-')[0]

    delLoggerName = "%s-%d" % (loggerPrefix, NN)

    while delLoggerName not in found_workers and NN < LOGGER_WORKERS_MAX:
        NN = NN + 1
        delLoggerName = "%s-%d" % (loggerPrefix, NN)

    if delLoggerName not in found_workers:
        elog.warn("no logger can be removed")
        return

    # remove file loggerNN:
    loggerNN = os.path.join(config['loggers_abspath'], "%s.py" % delLoggerName)
    loggerNNPosition = os.path.join(config['loggers_abspath'], ".%s.position" % delLoggerName)

    elog.info("%s: %s", delLoggerName, loggerNN)

    (fr, fd) = (None, None)
    try:
        loggingConfigYaml = config['logger']['logging_config']
        loggingConfigYamlDefault = "%s.0" % loggingConfigYaml

        shutil.copy(loggingConfigYaml, loggingConfigYamlDefault)

        fr = open(loggingConfigYaml)
        cfg = yaml.load(fr)
        fr.close()
        fr = None

        del cfg['loggers'][delLoggerName]

        fd = util.open_file(loggingConfigYaml)
        yaml.dump(cfg, fd, default_flow_style=False)
        fd.close()
        fd = None

        os.remove(loggerNN)
        os.remove(loggerNNPosition)

        shutil.copy(loggingConfigYaml, loggingConfigYamlDefault)
        elog.info("success: %s", delLoggerName)
    except:
        shutil.copy(loggingConfigYamlDefault, loggingConfigYaml)
        elog.error("failed: %s", delLoggerName)
        pass
    finally:
        if fr:
            fr.close()
        if fd:
            fd.close()
    pass
Esempio n. 2
0
def add_logger(config, found_workers):
    import yaml
    from copy import deepcopy

    NN = len(found_workers)

    if NN > LOGGER_WORKERS_MAX:
        elog.warn("too many loggers(>%d) to add", LOGGER_WORKERS_MAX)
        return

    loggerPrefix = found_workers[0].split('-')[0]

    newLoggerName = "%s-%d" % (loggerPrefix, NN)
    while newLoggerName in found_workers:
        NN = NN + 1
        newLoggerName = "%s-%d" % (loggerPrefix, NN)

    # add loggerNN:
    logger0 = os.path.join(config['loggers_abspath'], "%s.py" % loggerPrefix)
    loggerNN = os.path.join(config['loggers_abspath'], "%s.py" % newLoggerName)

    elog.info("%s: %s", newLoggerName, loggerNN)

    (fr, fd) = (None, None)
    try:
        loggingConfigYaml = config['logger']['logging_config']
        loggingConfigYamlDefault = "%s.0" % loggingConfigYaml

        shutil.copy(loggingConfigYaml, loggingConfigYamlDefault)

        fr = open(loggingConfigYaml)
        cfg = yaml.load(fr)
        fr.close()
        fr = None

        fd = util.open_file(loggingConfigYaml)

        cfg['loggers'][newLoggerName] = deepcopy(cfg['loggers'][loggerPrefix])

        yaml.dump(cfg, fd, default_flow_style=False)

        fd.close()
        fd = None

        shutil.copy(logger0, loggerNN)

        shutil.copy(loggingConfigYaml, loggingConfigYamlDefault)
        elog.info("success: %s", newLoggerName)
    except:
        shutil.copy(loggingConfigYamlDefault, loggingConfigYaml)
        elog.error("failed: %s", newLoggerName)
        pass
    finally:
        if fr:
            fr.close()
        if fd:
            fd.close()
    pass
Esempio n. 3
0
def sweep_dir(path, srcs, results):
    dname = os.path.basename(path)

    if dname not in ignore_dirs:
        filelist = os.listdir(path)
        filelist.sort(key=lambda x: x[0:20])

        for f in filelist:
            pf = os.path.join(path, f)

            if util.dir_exists(pf):
                sweep_dir(pf, srcs, results)
            elif util.file_exists(pf):
                _, ext = os.path.splitext(f)

                passed_filters = False

                if f not in ignore_files and ext not in ignore_exts:
                    passed_filters = True

                if len(only_exts) > 0:
                    if ext not in only_exts:
                        passed_filters = False

                if passed_filters:
                    fd = None
                    try:
                        fd = open(pf, 'r')
                        lines = fd.readlines()
                        lineno = 0
                        for line in lines:
                            lineno += 1

                            for src in srcs:
                                if line.find(src) != -1:
                                    elog.info("found '%s': [%s:%d]", src,
                                              os.path.relpath(pf, APPPATH),
                                              lineno)
                                    elog.force_clean("%s", line)
                                    if pf not in results:
                                        results.append(pf)
                    except:
                        elog.error("%r: %s", sys.exc_info(), pf)
                    finally:
                        util.close_file_nothrow(fd)
                else:
                    #elog.warn("ignore file: %s", pf)
                    pass
    else:
        elog.warn("ignore path: %s", path)
        pass
Esempio n. 4
0
def validate_options_config(options):
    elog.info("validate options...")

    if not options.name:
        options.name = options.artifact.replace("-", "")

    if not options.packagename:
        options.packagename = options.group + "." + options.name

    if options.packaging.capitalize(
    ) != "Jar" and options.packaging.capitalize() != "War":
        raise Exception("packaging not Jar or War")

    options.packaging = options.packaging.capitalize()

    if options.java != "8":
        raise Exception("java version not 8")

    if not options.context_path:
        options.context_path = "/"

    if options.context_path == "$name":
        options.context_path = options.name
Esempio n. 5
0
def create_sb2web_project(appConfig, options):
    artifactDir = os.path.join(appConfig.projectsDir, options.artifact)

    elog.info("starting create project: %s", artifactDir)

    if util.dir_exists(artifactDir):
        util.warn("artifact already exists: %s" % artifactDir)
        if not options.force:
            util.warn("using '--force' to ovewrite it")
            sys.exit(0)
        pass

    try:
        shutil.rmtree(artifactDir)
    except:
        pass

    pairConfig = (util.DotDict(), util.DotDict())

    # 载入模板工程的配置文件
    templateDict = {}
    templateYaml = os.path.join(appConfig.sb2webRoot, "j2template.yaml")
    if util.file_exists(templateYaml):
        fd = open(templateYaml)
        templateDict = yaml.load(fd.read())
        fd.close()

    renderConfig = util.DotDict(
        LICENSE_HEADER=read_file_content(
            os.path.join(APPHOME, 'license_header.txt')),
        j2envRoot=appConfig.sb2webRoot,
        j2env=Environment(loader=FileSystemLoader(appConfig.sb2webRoot)),
        templateDict=templateDict,
        springbootVer=options.springboot,
        groupId=options.group,
        artifactId=options.artifact,
        artifactName=options.name,
        artifactVer=options.ver,
        description=options.description,
        packageName=options.packagename,
        packaging=options.packaging,
        javaVersion=options.java,
        serverPort=options.port,
        contextPath=options.context_path)

    # 复制目录树, 同时处理模板文件
    util.copydirtree(appConfig.sb2webRoot, artifactDir, pairConfig, True,
                     copy_template, renderConfig)

    elog.info("success create project: %s", artifactDir)

    if options.run:
        elog.info("starting run: %s", artifactDir)
        start_run_project(artifactDir)
    pass
Esempio n. 6
0
def main(config, parser):
    import utils.logger as logger

    (options, args) = parser.parse_args(args=None, values=None)

    logConfigDict = logger.set_logger(config['logger'], options.log_path, options.log_level)

    loggers = {}

    if config['loggers'] and len(config['loggers']):
        loggers = load_logger_workers('loggers', config['loggers'], {
                'logger_config' : logConfigDict,
                'logger_stash' : options.logger_stash,
                'batch_rows' : options.batch_rows,
                'end_time' : options.end_time,
                'end_rowid' : options.end_rowid
            })

    if len(loggers) > LOGGER_WORKERS_MAX:
        elog.error("too many logger workers. please increase LOGGER_WORKERS_MAX and try!")
        exit(-1)

    found_workers = list_logger_workers(logConfigDict, config['loggers_abspath'])

    if options.list_logger_workers:
        for logger_worker in found_workers:
            elog.info("found worker: %s (%s/%s.py)", logger_worker, config['loggers_abspath'], logger_worker)
        elog.force("total %d workers: %r", len(found_workers), found_workers)
        return

    if options.add_logger:
        add_logger(config, found_workers)
        return

    if options.remove_logger:
        remove_logger(config, found_workers)
        return

    if len(loggers) == 0 and options.force:
        loggers = load_logger_workers('loggers', found_workers, {
                'logger_config' : logConfigDict,
                'logger_stash' : options.logger_stash,
                'batch_rows' : options.batch_rows,
                'end_time' : options.end_time,
                'end_rowid' : options.end_rowid
            })

    if options.reset_logger_position:
        if len(loggers):
            reset_logger_position(loggers, config['loggers_abspath'], options.start_time, options.start_rowid)
        else:
            elog.error("--reset-position ignored: logger worker not found. use --force for all.")
        pass

    if options.startup:
        if len(loggers):
            startup(loggers, config)
        else:
            elog.error("--startup ignored: logger worker not found. use --force for all.")
        pass

    pass
Esempio n. 7
0
    def init_data(self, logfile):
        self.restore_position()

        if not util.dir_exists(self.log_prefix):
            elog.warn("create dir for stash log: %s", self.log_prefix)
            os.makedirs(self.log_prefix)

        elog.debug('log config: %r', self.dictcfg)
        elog.info('stash prefix: %s', self.log_prefix)
        elog.info('start tstamp: %d', self.start_tstamp)
        elog.info('start rowid: %d', self.start_rowid)
        elog.info('batch rows: %d', self.batch_rows)

        file_dests = os.path.join(self.plugins_dir, 'config' , 'dests.csv')
        file_proxys = os.path.join(self.plugins_dir, 'config' , 'proxys.csv')
        file_keywds = os.path.join(self.plugins_dir, 'config' , 'keywds.csv')

        elog.info("dests file: %s", file_dests)
        elog.info("proxys file: %s", file_proxys)
        elog.info("keywds file: %s", file_keywds)

        with open(file_dests, 'r') as fd:
            dests = fd.readlines()

        with open(file_proxys, 'r') as fd:
            proxys = fd.readlines()

        with open(file_keywds, 'r') as fd:
            keywds = fd.readlines()

        self.dests = []
        for n in range(0, len(dests)):
            # id, ip, port, host
            # 100005,67.64.46.91,80,www.zhibo8.cc
            self.dests.append(tuple(dests[n].strip('\n').split(',')))
        del dests

        self.proxys = []
        for n in range(0, len(proxys)):
            # ip, port, type
            # 121.232.144.158,9000,HTTP
            self.proxys.append(tuple(proxys[n].strip('\n').split(',')))
        del proxys

        self.keywds = []
        for n in range(0, len(keywds)):
            # id, word
            self.keywds.append(tuple(keywds[n].strip('\n').split(',')))
        del keywds

        self.max_dests = len(self.dests) - 1
        self.max_proxys = len(self.proxys) - 1
        self.max_keywds = len(self.keywds) - 1

        # update dictcfg with logfile
        elog.update_log_config(self.dictcfg, self.logger_name, logfile, 'INFO')

        # reload config
        logging.config.dictConfig(self.dictcfg)

        # update logger
        self.logger = logging.getLogger(self.logger_name)
        self.logfile = logfile

        (self.a, self.b, self.c, self.d, self.p) = ((1, 220), (10, 230), (20, 240), (30, 250), (10000, 60000))

        self.fields = (
            'rowid',
            'timestr',
            'timeint',
            'destid',
            'sourip',
            'sourport',
            'destip',
            'destport',
            'desturl',
            'proxyip',
            'proxyport',
            'proxytype',
            'keywdid')
        pass
Esempio n. 8
0
 def stop(self, pname):
     elog.info("stop %s on %s", pname, util.nowtime())
     self.statDict[pname + ':stop-time'] = time.time()
     pass
Esempio n. 9
0
 def start(self, pname):
     elog.info("start %s on %s", pname, util.nowtime())
     self.statDict[pname + ":start-time"] = time.time()
     pass