コード例 #1
0
    def modulePull(path, version):
        # 获取本地模块版本号
        gitVersion = FileUtils.getModuleGitVersion(path)
        success = True
        versionS = version
        if not version.startswith("v"):
            versionS = "v" + version
        # 对比本地版本号和需要的版本号,不同则切换分支
        print("gitVersion |" + gitVersion + "|")
        print("version  |" + versionS + "|")
        print(gitVersion == versionS)

        if FileUtils.judgeFileExist(path):
            os.chdir(path)
            os.system(
                'git -c diff.mnemonicprefix=false -c core.quotepath=false fetch --prune --tags origin'
            )
            needChackOut = False
            if not gitVersion.endswith(versionS):
                needChackOut = True
                if not CommandUtils.gitChackOut(path, versionS):
                    success = False
            if success:
                # 拉取新代码
                pullB = CommandUtils.gitPull(path)
                if pullB == 1:
                    return 1
                elif pullB == 2 and needChackOut:
                    return 1
                else:
                    return pullB
        return 0
コード例 #2
0
    def __hack_maven_settings(fs: FileUtils = FileUtils()):
        # attempt to locate maven by running atlas-version
        oneliner = OneLiner(None, 'atlas-version | grep "ATLAS Maven"')
        oneliner(LOG)
        if oneliner.returncode != Callable.success:
            return None
        maven_home = oneliner.line[oneliner.line.find(':') + 2:]

        target_dir = fs.existing_dir(os.sep.join(['.', 'jmake_src', 'target']))
        if not fs.file_exists(os.sep.join([target_dir, 'Decipher.class'])):
            oneliner = OneLiner(
                None,
                '$JAVA_HOME/bin/javac -cp %s:. -d %s Decipher.java' %
                (os.sep.join([maven_home, 'lib', '*']), target_dir),
                cwd=os.sep.join(['.', 'jmake_src', 'jbac', 'java']))
            oneliner(LOG)

        oneliner = OneLiner(
            None, '$JAVA_HOME/bin/java -cp %s:%s Decipher' %
            (os.sep.join([maven_home, 'lib', '*']), target_dir))
        oneliner(LOG)
        if oneliner.returncode != Callable.success:
            return None
        credentials = oneliner.line.split(':')
        return JbacAuthentication(credentials[0], credentials[1])
コード例 #3
0
ファイル: ListenerLog.py プロジェクト: rsa-tools/rsat-code
    def __init__(self, file_path, verbosity, option):

        self.verbosity = verbosity
        self.log = True

        if self.verbosity > 0:
            self.trace = True
        else:
            self.trace = False

        if self.verbosity > 1:
            self.info = True
        else:
            self.info = False

        try:
            if self.log:
                self.logFile = FileUtils.openFile(
                    os.path.join(file_path,
                                 "listener_" + Constants.LOG_FILE_NAME),
                    option)

            if self.trace:
                self.traceFile = FileUtils.openFile(
                    os.path.join(file_path,
                                 "listener_" + Constants.TRACE_FILE_NAME),
                    option)
        except Exception, exce:
            raise ConfigException(
                "Log.__init__ : Unable to create log file in directory '" +
                file_path + "'. From:\n\t---> " + str(exce))
コード例 #4
0
    def __normalise(self, sourceFilename):
        media = MediaBuilder.build(sourceFilename)

        if media.getCreationDate().getOrigin() != "DateTimeOriginal":
            self.__setDateTimeOriginal(media)

        FileUtils.mv(sourceFilename, self.destinationPath + '/')
コード例 #5
0
 def saveToDB(self, fn, cols_name, data, update_date=''):
     if self.isSaveToLocal:
         FileUtils.save_pd_csv(
             self.saveDataDir.joinpath(fn + '.csv').as_posix(), cols_name,
             data)
     if self.isSaveToMysql:
         self.push(fn, cols_name, data, update_date)
コード例 #6
0
 def run(self):
     for filename in self.__getFilenamesToClassify():
         try:
             self.__classify(filename)
         except:
             self.__handleError(filename)
     FileUtils.removeDir(self.sourcePath)
コード例 #7
0
    def createLogos(self, input_commstruct):

        db_file_path = []
        for index in range(len(self.dbFiles)):
            db_file_path.append(os.path.join(self.dbPath, self.dbFiles[index]))

        motif_name_list = input_commstruct.motifStatistics.keys()
        motif_definition = MotifUtils.getMotifsDefinitionFromTF(
            motif_name_list, db_file_path)
        logos_path = os.path.join(self.outPath,
                                  FinalOutputProcessor.LOGOS_DIR_NAME)
        FileUtils.createDirectory(logos_path)

        for motif_name in motif_name_list:
            if motif_name in motif_definition.keys():
                file_name = motif_name + ".tf"
                def_file_path = os.path.join(logos_path, file_name)
                def_file = open(def_file_path, "w")
                for line in motif_definition[motif_name]:
                    def_file.write(line)
                    def_file.flush
                def_file.close()
                RSATUtils.createLogoFromTF(logos_path, file_name, motif_name)
            else:
                Log.log(
                    "FinalOutputProcessor.createLogos : No definition found to create logo for motif : "
                    + motif_name)
コード例 #8
0
 def run(self):
     for filename in self.__getFilenamesToShrink():
         try:
             self.__shrink(filename)
         except:
             self.__handleError(filename)
     FileUtils.removeDir(self.sourcePath)
コード例 #9
0
 def __init__(self):
     super().__init__()
     logging.info('@ Start Auto Refresh Service ... ')
     fu = FileUtils()
     self.config = fu.load_yaml('od_config.yaml')['order']
     self.app = atomacos.getAppRefByBundleId(self.config['appId'])
     self.window = self.app.windows()[0]
     self.button = self.window.buttons(self.config['defaultCheckBoxTitle'])[0]
コード例 #10
0
ファイル: preprocessor.py プロジェクト: TSS-Rep/preprocessor
 def read(self, paths, concat=True, **read_options):
     if concat:
         data = FileUtils.read(paths, **read_options)
         # Concat if there's more than one file
         if type(data) == pd.DataFrame:
             self.data = data
         else:
             self.data = pd.concat(data, sort=False, ignore_index=True)
     else:
         self.data = FileUtils.read(paths, **read_options)
コード例 #11
0
 def run(self):
     ## 1.构建所有url
     urlList = self.getAllUrl()
     ## 2.获取页面数据
     fileUtils = FileUtils()
     for url in urlList:
         htmlContent = self.getHtmlInfo(url)
         content = self.parseHtmlContent(htmlContent)
         ## 3.保存文件
         fileUtils.saveFile("qiushibaike.txt", content, "a")
コード例 #12
0
    def __call__(self, logger, file_utils:FileUtils=FileUtils()):

        svndir = self.args.layout.studio_svn_dir()
        svnlink = self.args.layout.studio_svn_link()

        if not file_utils.file_exists(svnlink):
            # create symlink to svndir
            logger.info("Creating symlink: %s -> %s" % (svnlink, svndir))
            file_utils.symlink(svndir, svnlink)

        return Callable.success
コード例 #13
0
    def __call__(self, logger, file_utils: FileUtils = FileUtils()):

        svndir = self.args.layout.studio_svn_dir()
        svnlink = self.args.layout.studio_svn_link()

        if not file_utils.file_exists(svnlink):
            # create symlink to svndir
            logger.info("Creating symlink: %s -> %s" % (svnlink, svndir))
            file_utils.symlink(svndir, svnlink)

        return Callable.success
コード例 #14
0
    def copyWorkflowResultFileToFinalOutput(self, stats_param, stats_param_attribute, local_attribute, file_pathes):
        
        if stats_param_attribute in stats_param.keys():

            dir_path = os.path.join( self.outPath, stats_param_attribute)
            file_path =  stats_param[ stats_param_attribute]
            
            FileUtils.createDirectory( dir_path)
            FileUtils.copyFile( file_path, dir_path)
            
            final_path = os.path.join( stats_param_attribute, os.path.basename( file_path))
            
            file_pathes[ local_attribute] = final_path        
コード例 #15
0
ファイル: Logs.py プロジェクト: MarioCodes/my-vault
    def convert_into_csv(self, input_file, output_file):
        """ Reads a Java log file, groups together the data from the logs, cleans it and gives it back as a .csv file.
        This data was obtained from the logs with Linux AWK command and exported into a .txt file so we only have the
        data we want without external noise.

        :param input_file: .txt file with all raw Java logs.
        :param output_file: .csv file with only the data we need to recover.
        """
        grouped_raw_lines = self.__get_grouped_lines(input_file)
        has_valid_size = self.svw_logs.validate_lists_size(grouped_raw_lines)
        if has_valid_size:
            cleaned_lines = self.__clean_log_lines(grouped_raw_lines)
            csv_content = FileUtils.construct_csv(self.svw_logs.headers,
                                                  cleaned_lines)
            FileUtils.write_file(output_file, csv_content)
コード例 #16
0
    def create_jmake_credentials_file(args, fs: FileUtils = FileUtils()):
        def jmake_user_credentials_closure(logger):
            if fs.file_exists(PGPASS_FILE):
                with open(PGPASS_FILE, 'r') as f:
                    for line in f:
                        if PGPASS_CREDENTIALS in line:
                            logger.info(
                                'Password for user "%s" already exist in %s' %
                                (DB_USER, PGPASS_FILE))
                            return Callable.success

            logger.info('Adding default password for db user "%s"... ' %
                        (DB_USER, ))
            try:
                with open(PGPASS_FILE, 'a') as f:
                    f.write(PGPASS_CREDENTIALS)
                    f.write('\n')

                #  postgres requires this file is with mode 0600 otherwise it won't trust it:
                rc = SystemCallable(args,
                                    'chmod 0600 %s' % PGPASS_FILE)(logger)
                return rc
            except Exception as e:
                logger.error(
                    'Could not add default jmkae credentials to % file.' %
                    PGPASS_FILE)
                return Callable.do_not_proceed

        return jmake_user_credentials_closure
コード例 #17
0
 def moduleClone(path, gitPath):
     # 检查项目是否存在,不存在则克隆项目
     print("moduelClone: " + path)
     if not FileUtils.judgeModuleExist(path):
         if not CommandUtils.gitClone(path, gitPath):
             return False
     return True
コード例 #18
0
    def __init__(self,
                 args=object,
                 path=None,
                 process_utils=ProcessUtils(),
                 file_utils=FileUtils()):
        super().__init__(args, cwd=path)

        self.maven_version = None
        self.projects = []
        self.profiles = []
        self.phases = []
        self.properties = dict()
        self.options = []
        self.parallel = False
        self.process_utils = process_utils
        self.project_prefix = ""
        options_from_args = {
            'mvn_offline': '-o',
            'mvn_update': '-U',
            'mvn_debug': '-X',
            'mvn_errors': '-e'
        }

        for key in options_from_args.keys():
            if getattr(args, key, False):
                self.option(options_from_args[key])
        if getattr(args, 'mvn_clean', False):
            self.phase('clean')
コード例 #19
0
ファイル: JmakeClean.py プロジェクト: linuxscn/mysource
 def __init__(self, fileutils=FileUtils()):
     super().__init__(fileutils)
     self.command = 'cleanall'
     self.description = 'Most thorough clean provided by jmake. '
     self.prevent_post_commands = True
     self.fileutils = fileutils
     self.check_branch = False
コード例 #20
0
def process_local_test_settings(args, fileutils=FileUtils()):
    def process_local_test_settings_closure(logger):
        logger.info('Preparing local test settings for your new instance...')
        template = os.sep.join([
            'jira-func-tests', 'src', 'main', 'resources', 'localtest.template'
        ])

        template_renderings = {
            'jira-func-tests':
            'jira-func-tests',
            'jira-webdriver-tests':
            'jira-webdriver-tests',
            os.sep.join(['jira-distribution', 'jira-integration-tests']):
            'jira-func-tests'
        }

        for project, xml_location in template_renderings.items():
            dir = fileutils.existing_dir(
                os.sep.join(['.', project, 'src', 'main', 'resources']))
            dest = os.sep.join([dir, 'localtest.properties'])

            # just for unit tests this settings dict is not reused:
            settings = {
                '${jira.port}': str(args.port),
                '${jira.context}': args.jira_context,
                '${test.xml.location}': PathUtils.abspath(xml_location)
            }

            logger.debug('Processing ' + template + ' to ' + dest)
            fileutils.filter_file(template, dest, settings)
        return Callable.success

    return process_local_test_settings_closure
コード例 #21
0
 def __init__(self, fs: FileUtils = FileUtils(),
              xml: XmlUtils = XmlUtils()):
     super().__init__()
     self.roots = []
     self.fs = fs
     self.xml = xml
     self.report_header_logged = False
コード例 #22
0
    def copyMotifResultFileToFinalOutput(self, motif_stats, motifStatistics_attribute, local_attribute, file_pathes):
        
        
        if motif_stats.hasAttribute( motifStatistics_attribute):
            if not local_attribute in file_pathes.keys():
                file_pathes[ local_attribute] = {}

            dir_path = os.path.join( self.outPath, motifStatistics_attribute)
            file_path =  motif_stats.getAttribute( motifStatistics_attribute)
            
            FileUtils.createDirectory( dir_path)
            FileUtils.copyFile( file_path, dir_path)
            
            final_path = os.path.join( motifStatistics_attribute, os.path.basename( file_path))
            
            file_pathes[ local_attribute][ motif_stats.motifName] = final_path     
コード例 #23
0
    def initServerQueue(self):

        queue_file_path = os.path.join(
            self.config[PFConstants.QUEUE_DIR_PARAM],
            PFConstants.SERVER_QUEUE_FILE_NAME)
        if os.path.exists(queue_file_path):
            try:
                commands_list = []
                file = FileUtils.openFile(queue_file_path)
                for line in file:
                    command_params = [None, None, 0, "True", None]
                    if not line.isspace(
                    ) and line[0] != PFConstants.COMMENT_CHAR:
                        tokens = line.split("|**|")
                        if len(tokens) > 0 and len(tokens) <= 5:
                            for index in range(len(tokens)):
                                command_params[index] = tokens[index]
                        commands_list.append(command_params)
                file.close()
                options = ast.literal_eval(command_params[1])
                for command_params in commands_list:
                    self.addToQueue(command_params[0], options,
                                    command_params[2], command_params[3],
                                    command_params[4])
            except IOError, io_exce:
                raise ExecutionException(
                    " PipelineManager.initServerQueue : Unable to read Server queue from file : "
                    + queue_file_path + ". From:\n\t---> " + str(io_exce))
コード例 #24
0
ファイル: JmakeCIReplay.py プロジェクト: linuxscn/mysource
    def process_webdriver(args, executor, fileutils=FileUtils()):

        maven = MavenCallable()
        maven.project('jira-distribution/jira-webdriver-tests-runner').option(
            '-am')
        maven.phase('verify').profile('hallelujahClient').profile(
            'distribution')
        maven.property('maven.test.unit.skip', 'true')

        if fileutils.file_exists(args.replay_file):
            replay_file = os.path.abspath(args.replay_file)
        else:
            replay_file = os.path.abspath(
                os.sep.join(['target', 'replays', args.replay_file]))
        maven.property('jira.user.extra.jvmargs',
                       '"-Dhallelujah.local.test.list=%s"' % replay_file)
        maven.property('func.mode.plugins').property('reference.plugins')

        maven.property(
            'system.bamboo.agent.home',
            os.path.abspath(
                os.sep.join([
                    'jira-distribution', 'jira-webdriver-tests-runner',
                    'target'
                ])))
        executor.append(maven.can_run_in_parallel())
コード例 #25
0
 def __call__(self, logger, fileutils=FileUtils()):
     if not getattr (self.inspector, 'inconsistent', False):
         target_dir = fileutils.existing_dir('target')
         file = os.sep.join([target_dir, '.jmake-branch'])
         with open(file, 'w', encoding='utf-8') as file:
             file.write(self.inspector.branch)
     return Callable.success
コード例 #26
0
    def read(self, paths, **read_options):

        print("--- Reading ---")

        weekly_file_paths = []
        daily_file_paths = []

        # If input is a string put inside a list.
        if type(paths) == str:
            paths = [paths]

        for path in paths:
            if "history" in path.lower():
                weekly_file_paths.append(path)
            else:
                daily_file_paths.append(path)

        if daily_file_paths:
            read_options['skiprows'] = 1
            daily_data = FileUtils.read(daily_file_paths, **read_options)
            if type(daily_data) == pd.DataFrame:
                daily_data = [daily_data]
            daily_data = pd.concat(daily_data, sort=False, ignore_index=True)

        if weekly_file_paths:
            weekly_data = pd.DataFrame()
            for weekly_file_path in weekly_file_paths:
                read_options['sheet_name'] = None
                read_options['skiprows'] = 2
                read_options['encoding'] = 'latin1'
                weekly_data_temp = FileUtils.read(
                    weekly_file_path,  **read_options)

                columns = weekly_data_temp['Ticket History IBM'].columns

                for sheet_name in weekly_data_temp:
                    if sheet_name != "Ticket History IBM":
                        weekly_data_temp[sheet_name].columns = columns

                weekly_data_temp = pd.concat(
                    weekly_data_temp, sort=False, ignore_index=True)

                weekly_data = pd.concat([weekly_data, weekly_data_temp],
                                        sort=False, ignore_index=True)

        self.weekly_data = weekly_data
        self.daily_data = daily_data
コード例 #27
0
    def read(self, paths, concat=True, **read_options):

        data = FileUtils.read(paths, **read_options)
        # Concat if there's more than one file
        if type(data) == pd.DataFrame:
            self.data = [data]
        else:
            self.data = data
コード例 #28
0
ファイル: ListenerLog.py プロジェクト: rsa-tools/rsat-code
    def isLogEmpty():

        if ListenerLog.isInitialized():
            size = FileUtils.getSize(ListenerLog.logInstance.logFile.name)
            if size != 0:
                return False

        return True
コード例 #29
0
    def execute( self, input_commstructs):
    
        if input_commstructs == None or len( input_commstructs) == 0:
            raise ExecutionException( "FinalOutputProcessor.execute : No inputs")
        
        input_commstruct = input_commstructs[0]
        
        # Retrieve the processor parameters
        self.dbPath = self.getParameter( FinalOutputProcessor.MOTIF_DATABASE_PATH_PARAM)
        
        # Retrieve the list of motif database files to use
        database_file_line = self.getParameter( FinalOutputProcessor.MOTIF_DATABASE_FILE_LIST_PARAM)
        if database_file_line != None and not database_file_line.isspace():
            file_list = database_file_line.split()
            self.dbFiles = []
            for file_path in file_list:
                self.dbFiles.append( os.path.join( self.dbPath, file_path))
        else:
            raise ExecutionException( "FinalOutputProcessor.getMethodParameters : No motif database file specified in parameter '" + FinalOutputProcessor.MOTIF_DATABASE_FILE_LIST_PARAM + "'")

        # Add the custom motif database files if any
        custom_database_file_line = self.getParameter( FinalOutputProcessor.CUSTOM_MOTIF_DATABASE_FILE_PARAM, False)
        if custom_database_file_line != None and not custom_database_file_line.isspace():
            self.dbFiles.append( custom_database_file_line)
        
        limit_value = self.getParameter( FinalOutputProcessor.DISPLAY_LIMIT_VALUE, False)
        if limit_value == None:
            limit_value = 1.0
        
        # Prepare the processor output dir
        self.outPath = os.path.join( self.component.outputDir, self.component.getComponentPrefix())
        shutil.rmtree( self.outPath, True)
        FileUtils.createDirectory( self.outPath, 0777)
        
        # Copy motif graph and stats files
        analysis = self.AnalyseMotifStats( input_commstruct)
        
        # Create motif logos
        self.createLogos( input_commstruct)
        
        # Output Results
        self.outputClassification( input_commstruct, analysis, limit_value)
        
        # Copy other information
        FileUtils.copyFile( os.path.join( self.component.outputDir, Constants.PROGRESSION_XSL_FILE), self.outPath) 
        FileUtils.copyFile( os.path.join( self.component.outputDir, Constants.PROGRESSION_XML_FILE), self.outPath)
コード例 #30
0
 def __buildDestinationFilename(self, filename, destinationPath):
     media = MediaBuilder.build(filename)
     partialPath = FileUtils.getDestinationSubdirectory(media)
     filePath = os.path.join(destinationPath, partialPath,
                             media.getNextNewFileName())
     filePathWithoutExtension = os.path.splitext(filePath)[0]
     newExtension = '.' + Config.get('handbrake.destination.extension')
     return filePathWithoutExtension + newExtension
コード例 #31
0
ファイル: JmakeCIReplay.py プロジェクト: linuxscn/mysource
 def __init__(self, fileutils=FileUtils()):
     JmakeModule.__init__(self)
     self.command = 'replay'
     self.description = 'Replay JBAC failure locally. The most common use case is to replay a file prepared by ' \
                        '"ci investigate". The autocomplete should provide these filenames for your convenience. ' \
                        'You can use this to create a "local hallelujah client" to which you can supply a file ' \
                        'containing tests to run and it will run them as if they were read from the JMS queue.'
     self.fileutils = fileutils
コード例 #32
0
class MockLayout:
    def __init__(self, ondemand: bool):
        self.temp_dir = tempfile.mkdtemp()
        self.utils = FileUtils()
        self.ondemand = ondemand

    def tomcat_conf_src(self):
        return self.utils.existing_dir(os.path.join(self.temp_dir, 'src'))

    def tomcat_conf_dest(self):
        return self.utils.existing_dir(os.path.join(self.temp_dir, 'dest'))

    def jira_webapp_dir(self):
        return os.path.join(self.temp_dir, 'jira_webapp_dir')

    def tomcat_work_dir(self):
        return os.path.join(self.temp_dir, 'tomcat_work_dir')

    def tomcat_temp_dir(self):
        return os.path.join(self.temp_dir, 'tomcat_temp_dir')

    def tomcat_dir(self, create):
        return os.path.join(self.temp_dir, 'tomcat_dir')

    def jira_home(self):
        return os.path.join(self.temp_dir, 'jira_home')

    def studio_ititial_data(self):
        return os.path.join(self.temp_dir, 'jira_home', 'some-data.xml')

    def tomcat_executable(self):
        return os.path.join(self.tomcat_dir(False), 'some-runnable.sh')

    def tomcat_download_dir(self):
        return os.path.join(self.temp_dir, 'jira_home')

    def remove(self):
        shutil.rmtree(self.temp_dir)

    def studio_initial_data(self):
        return os.path.join(self.temp_dir, 'jira_home', 'some-initial-data.xml')

    def webdav_dir(self):
        return os.path.join(self.temp_dir, 'jira_home', 'webdav')
コード例 #33
0
ファイル: JBAC.py プロジェクト: emperor1983/mysource
    def __hack_maven_settings(fs: FileUtils=FileUtils()):
        # attempt to locate maven by running atlas-version
        oneliner = OneLiner(None, 'atlas-version | grep "ATLAS Maven"')
        oneliner(LOG)
        if oneliner.returncode != Callable.success:
            return None
        maven_home = oneliner.line[oneliner.line.find(':') + 2:]

        target_dir = fs.existing_dir(os.sep.join(['.', 'jmake_src', 'target']))
        if not fs.file_exists(os.sep.join([target_dir, 'Decipher.class'])):
            oneliner = OneLiner(None, '$JAVA_HOME/bin/javac -cp %s:. -d %s Decipher.java'
                        % (os.sep.join([maven_home, 'lib', '*']),
                           target_dir), cwd=os.sep.join(['.', 'jmake_src', 'jbac', 'java']))
            oneliner(LOG)

        oneliner = OneLiner(None, '$JAVA_HOME/bin/java -cp %s:%s Decipher'
                            % (os.sep.join([maven_home, 'lib', '*']), target_dir))
        oneliner(LOG)
        if oneliner.returncode != Callable.success:
            return None
        credentials = oneliner.line.split(':')
        return JbacAuthentication(credentials[0], credentials[1])
コード例 #34
0
    def __generate_report_files(self, data, file_utils: FileUtils, log: Logger, json_writter: JsonWriter):
        report_output_dir = os.sep.join(["target", "eh-metrics-report"])
        if file_utils.file_exists(report_output_dir):
            file_utils.remove_dir(report_output_dir)

        report_skeleton = os.sep.join(["jmake_src", "eh", "metrics", "skeleton", "report"])
        file_utils.copy_tree(report_skeleton, report_output_dir)
        # Generate data file
        data_js_value = '(function() { var data = %s; executeReport(data); })();' % json_writter.as_str(data)
        # Save data to file
        data_js_file = os.sep.join([report_output_dir, "js", "data.js"])
        file_utils.write_lines(data_js_file, [data_js_value])
        # Display log info
        report_file_url = "file://%s/index.html" % pathname2url(file_utils.abs_path(report_output_dir))
        log.info("Report generated to %s, open in browser: %s" % (report_output_dir, report_file_url))
コード例 #35
0
    def test_server_xml_is_filtered(self):
        file_utils = FileUtils()
        #        having
        args = Mock(port = 1234, sh_port = 8123, jira_context = 'strange', layout = self.layout, ssl = False)
        context_preparer = ContextPreparer(args)
        web_wml_content = 'just testing web wml'

        with open(os.path.join(self.layout.tomcat_conf_src(), ContextPreparer.WEB_XML_NAME), 'w') as web_xml:
            web_xml.write(web_wml_content)
        with open(os.path.join(self.layout.tomcat_conf_src(), ContextPreparer.SERVER_XML_NAME), 'w') as server_xml:
            for line in SERVER_XML_VARIABLES: server_xml.write(line + '\n')

        #       when
        context_preparer(LOG)
        #       then

        web_xml_dest = os.path.join(self.layout.tomcat_conf_dest(), ContextPreparer.WEB_XML_NAME)
        self.assertTrue(os.path.lexists(web_xml_dest), 'Expected %s file to be created' % web_xml_dest)
        self.assertListEqual(file_utils.read_lines(web_xml_dest), [web_wml_content])
        server_xml_dest = os.path.join(self.layout.tomcat_conf_dest(), ContextPreparer.SERVER_XML_NAME)
        self.assertTrue(os.path.lexists(server_xml_dest), 'Expected %s file to be created' % server_xml_dest)
        self.assertListEqual(file_utils.read_lines(server_xml_dest),
            [str(args.port), str(args.sh_port), args.jira_context, self.layout.jira_webapp_dir(), self.layout.tomcat_work_dir()])
コード例 #36
0
ファイル: GenerateDict.py プロジェクト: vulgatecn/webscan1
 def parser(self):
     # 读取字典文件存入变量
     tmp_dict = []
     for line in FileUtils.getLines(self.dicfile):
         tmp_dict.append(line)
     # 检查是否拥有需要替换的固定组合
     if len(self.replace_dict) >= 1:
         for key in self.replace_dict.keys():
             new_dict = []
             replace_key = key
             replace_value = self.replace_dict[key]
             for tmp_line in tmp_dict:
                 if replace_key in tmp_line:
                     new_dict.append(
                         tmp_line.replace(replace_key, replace_value))
                 else:
                     new_dict.append(tmp_line)
             tmp_dict = new_dict
         fuzz_lst = []
         for line_ in new_dict:
             # 利用正则引擎遍历一次字典
             parser = DictParser(line_)
             wyparser_result = parser.parse()
             if wyparser_result:
                 for parser_line in wyparser_result:
                     fuzz_lst.append(parser_line)
             else:
                 fuzz_lst.append(line_)
         return fuzz_lst
     else:
         fuzz_lst = []
         for line_ in tmp_dict:
             # 利用正则引擎遍历一次字典
             parser = DictParser(line_)
             wyparser_result = parser.parse()
             if wyparser_result:
                 for parser_line in wyparser_result:
                     fuzz_lst.append(parser_line)
             else:
                 fuzz_lst.append(line_)
         return fuzz_lst
コード例 #37
0
 def __init__(self, ondemand: bool):
     self.temp_dir = tempfile.mkdtemp()
     self.utils = FileUtils()
     self.ondemand = ondemand
コード例 #38
0
        def clustered_info(_, fs: FileUtils=FileUtils()):
            files = fs.listdir(fs.existing_dir(ClusterContext.CLUSTERED_INFO_DIR))
            clusters = {}

            for file in files:
                if file.startswith('instance-'):

                    with open(os.sep.join([ClusterContext.CLUSTERED_INFO_DIR, file]), 'r') as f:
                        status = JSONDecoder().decode(f.read())

                    key = (status['cluster-name'], status['cluster-hash'])

                    if not key in clusters:
                        clusters[key] = [status]
                    else:
                        clusters[key].append(status)

            if clusters:

                checker = AllPortsChecker()
                checker(Logger().set_none())

                red_ports =  []

                def portprint(port):
                    try:
                        p = int(port)
                        if p in checker.get_open_ports():
                            return Logger.color_text(port, Logger.L_ONLINE)
                        else:
                            red_ports.append(p)
                            return Logger.color_text(port, Logger.L_OFFLINE)
                    except Exception as _:
                        return port

                def homeprint(home:str):
                    return home.replace(fs.abs_path('.'), '.')

                def serverstatus(url:str):
                    try:
                        response = urlopen(Request(url + '/status', headers = {
                            'Accept': 'application/json'
                        }))
                        jsonResponse = json.loads(response.read().decode())
                        return jsonResponse['state']
                    except Exception as _:
                        return 'NOT RUNNING'


                for cluster_info in sorted(clusters.keys(), key=lambda tpl: tpl[0]):
                    cluster_name, cluster_hash = cluster_info
                    print('Cluster %s "%s" (%d instances):' % (cluster_hash, cluster_name, len(clusters[cluster_info])))

                    for instance in sorted(clusters[cluster_info], key=lambda instance: instance['start-time']):
                        print(' -> %s %s ' % (instance['instance-hash'], instance['url']))
                        print('     http: %s ctrl: %s debug: %s mcast: %s peer: %s' %
                              (portprint(instance['http-port']), portprint(instance['shutdown-port']), portprint(instance['debug-port'] if 'debug-port' in instance else 'n/a'),
                               instance['cluster-multicast-port'], portprint(instance['cluster-peer-cache-port'])))
                        print('     home: %s shared: %s' % (homeprint(instance['jira-home']), homeprint(instance['shared-home'])))
                        print('     shutdown: ./jmake run shutdown --instance-name %s' % instance['instance-hash'])
                        print('     status %s' % (serverstatus(instance['url'])))

                    print('')

                if red_ports:
                    print('Wait until all ports are %s before running more instances to avoid port clashes.' % Logger.color_text('green', Logger.L_ONLINE))
                else:
                    print('You can now run more instances with %s' % Logger.color_text('./jmake clustered run', Logger.L_INFO))

            else:
                print('There are no clustered instances running. Run some with %s' % Logger.color_text('./jmake clustered run', Logger.L_INFO))

            return Callable.success