Example #1
0
    def test_verify(self):
        # test 1
        vuln_regex = """www\.tgz
web\.log
"""
        rule = FileRule(
            id=9527,
            name='敏感文件泄漏',
            key='file-leak',
            risk_id=2,
            risk_name='medium',
            match_type='file',
            regex=vuln_regex,
        )

        vuln_example = ['www.tgz', 'web.log']

        for i in vuln_example:
            vuln_path = os.path.join(BASEDIR, 'tests', 'data', 'engines')
            make_dir(vuln_path)
            vuln_file = os.path.join(vuln_path, i)
            with open(vuln_file, 'wb') as fp:
                fp.write(b'test')
            status, _ = rule.verify(reference_value=vuln_file)
            self.assertTrue(status)
            self.assertEqual(i, _)
            clean_dir(vuln_path)
Example #2
0
    def test_upload_file_and_download_file(self):
        file_name = '{0}.txt'.format(hash_md5('{0}'.format(time.time())))
        file_path = os.path.join(BASEDIR, 'tests', 'data', 'tmp', file_name)
        file_content = 'ftp test.'
        make_dir(os.path.join(BASEDIR, 'tests', 'data', 'tmp'))
        with open(file_path, 'wb') as fp:
            fp.write(file_content.encode('utf-8'))
        self.assertTrue(os.path.isfile(file_path))

        ftp = FTPWork(
            host=FTPWorkTestCase.host,
            port=FTPWorkTestCase.port,
            username=FTPWorkTestCase.username,
            password=FTPWorkTestCase.password,
        )
        # test upload
        self.assertTrue(ftp.upload_file(file_path, file_name))
        # test download
        save_file_local_path = os.path.join(BASEDIR, 'tests', 'data', 'tmp', 'ftp_download.txt')
        ftp.download_file(save_file_local_path, file_name)
        self.assertTrue(os.path.isfile(save_file_local_path))
        with open(file_path, 'rb') as fp:
            c = fp.read()
            self.assertEqual(file_content, c.decode('utf-8'))
        # test delete
        self.assertTrue(ftp.delete_file(file_name))

        clean_dir(os.path.join(BASEDIR, 'tests', 'data', 'tmp'))
    def test_verify(self):
        vuln_regex = r"password\s*?=\s*?['\"](.+?)['\"]  ### password leak"
        rule = ContentRule(
            id=9527,
            name='密码硬编码',
            key='hard-coded-password',
            risk_id=2,
            risk_name='medium',
            match_type='content',
            regex=vuln_regex,
            file_ext='.txt',
        )

        vuln_example = [
            'password="******"',
            'password = "******"',
            'password = \'admin1234\'',
        ]
        vuln_path = os.path.join(BASEDIR, 'tests', 'data', 'engines')
        make_dir(vuln_path)
        vuln_file = os.path.join(vuln_path, 'vuln_exmaple.txt')
        for i in vuln_example:
            with open(vuln_file, 'w') as fp:
                fp.write(i)
            status, _ = rule.verify(reference_value=vuln_file)
            self.assertTrue(status)
            self.assertEqual(_, i)
        clean_dir(vuln_path)
Example #4
0
 def test_checkFile(self):
     f_p = os.path.join(BASEDIR, 'tests', 'data', 'tmp')
     f_f = os.path.join(f_p, 'test.txt')
     make_dir(f_p)
     with self.assertRaises(Exception) as _:
         checkFile("'not_found")
     with open(f_f, 'wb') as fp:
         fp.write(b'')
     self.assertTrue(checkFile(f_f))
     clean_dir(f_p)
Example #5
0
    def execute(self):
        make_dir(RESULT_TMP_DIR)
        cmd_list = [
            '/usr/bin/env',
            _get_python_path(),
            _get_scanner_script(),
        ]

        if self.customize_command:
            cmd_list.extend(self.customize_command)

        if hasattr(self, 'param_cmd'):
            cmd_list.extend(self.param_cmd)

        cmd_list = [str(i) for i in cmd_list]  # 传递给 subprocess 必须 str
        self.assertEqual(0, subprocess.call(cmd_list))
        self.verify_result()
        clean_dir(RESULT_TMP_DIR)
    def __init_work_dir(self):
        """

        :return:
        """
        scan_work = '{0}'.format(self._task_id or self._project_name)
        self._paths_log = os.path.join(self._work_dir, 'logs', scan_work)
        self._paths_project_origin = os.path.join(
            self._work_dir, 'projects', self._group_key or self._project_name)
        self._project_path = os.path.join(self._paths_project_origin,
                                          self._project_name)
        self._paths_project_scan = os.path.join(self._work_dir, 'tasks',
                                                scan_work)

        if os.path.isdir(self._paths_project_scan):
            clean_dir(self._paths_project_scan)
        make_dir([
            self._paths_log, self._paths_project_origin,
            self._paths_project_scan
        ])
Example #7
0
    def test_make_dir(self):
        # test1
        dir_list = ['/tmp/t/1', ['/tmp/t/2', '/tmp/t/3']]
        for i in dir_list:
            if isinstance(i, list):
                make_dir(i)
                for _ in i:
                    self.assertTrue(os.path.isdir(_))
                    clean_dir(_)
                    self.assertFalse(os.path.isdir(_))
            else:
                make_dir(i)
                self.assertTrue(os.path.isdir(i))
                clean_dir(i)
                self.assertFalse(os.path.isdir(i))

        dir_list = ['/not_exists_dir_test_123']
        for i in dir_list:
            make_dir(i)
            self.assertFalse(os.path.isdir(i))
    def test_limit_size_verify(self):
        vuln_regex = r"password\s*?=\s*?['\"](.+?)['\"]  ### password leak"
        rule = ContentRule(
            id=9527,
            name='密码硬编码',
            key='hard-coded-password',
            risk_id=2,
            risk_name='medium',
            match_type='content',
            regex=vuln_regex,
            file_ext='.txt',
            size=1024,
        )

        vuln_path = os.path.join(BASEDIR, 'tests', 'data', 'engines')
        make_dir(vuln_path)
        vuln_file = os.path.join(vuln_path, 'vuln_exmaple_file_size.txt')
        gen_size_file(vuln_file, 1024 * 20)
        with self.assertRaises(FileLimitSizeException) as _:
            rule.verify(reference_value=vuln_file)

        clean_dir(vuln_path)
 def __del__(self):
     try:
         clean_dir(self.work_dir)
     except:
         pass
Example #10
0
 def tearDown(self):
     clean_dir(GitOperatorTestCase.work_dir)
 def tearDown(self):
     clean_dir(self.profile_path)
Example #12
0
 def __del__(self):
     clean_dir(paths.UPGRADE_PATH)
     pass
Example #13
0
    def start_upgrade(self):
        """

        :return:
        """
        try:
            logger.info(
                'Start upgrading, check if the local version is consistent with the server version...'
            )
            if self.is_upgrade:
                self.download()
                if os.path.isfile(self.local_upgrade_file):
                    logger.info(
                        'Start decompressing the encryption upgrade package...'
                    )
                    # Decrypted encrypted upgrade package
                    tar = tarfile.open(self.local_upgrade_file, 'r:gz')
                    file_names = tar.getnames()
                    for file_name in file_names:
                        tar.extract(file_name, self._upgrade_path)
                    tar.close()
                    file_list = glob.glob(
                        os.path.join(self._upgrade_path, '*.bin'))
                    save_tgz = ''
                    for l in file_list:
                        file_name = os.path.splitext(os.path.basename(l))[0]
                        save_tgz = os.path.join(self._upgrade_path,
                                                '{0}.tgz'.format(file_name))
                        self.rsa.decrypt_file(l, save_tgz)
                        break
                    logger.info(
                        'Unzip the encryption upgrade package to complete.')

                    # Decompress and decrypt the compressed package
                    if os.path.isfile(save_tgz):
                        logger.info(
                            'Start decompressing the decryption upgrade package...'
                        )
                        decrypt_path = os.path.join(self._upgrade_path,
                                                    'decrypt')
                        tar = tarfile.open(save_tgz, 'r:gz')
                        file_names = tar.getnames()
                        for file_name in file_names:
                            tar.extract(file_name, decrypt_path)
                        tar.close()

                        logger.info(
                            'Decompression and decryption upgrade package completed'
                        )

                        logger.info('Start syncing scan templates...')
                        # profiles
                        profiles = glob.glob(
                            os.path.join(decrypt_path, '*.xml'))
                        for p in profiles:
                            file_name = os.path.basename(p)
                            dest = os.path.join(paths.ROOT_PROFILE_PATH,
                                                file_name)
                            if os.path.isfile(dest):
                                os.unlink(dest)
                            shutil.copy(p, dest)
                        logger.info('Synchronous scan template completion.')
                        # plugins
                        blacklist_path = os.path.join(decrypt_path, 'plugins',
                                                      'blacklist')
                        if os.path.isdir(blacklist_path):
                            logger.info('Start syncing blacklist plugin...')
                            clean_dir(paths.ROOT_PLUGINS_BLACKLIST_PATH)
                            shutil.copytree(blacklist_path,
                                            paths.ROOT_PLUGINS_BLACKLIST_PATH)
                            logger.info(
                                'Synchronous blacklist plugin completed.')
                        whitelist_path = os.path.join(decrypt_path, 'plugins',
                                                      'whitelist')
                        if os.path.isdir(whitelist_path):
                            logger.info('Start syncing whitelist plugin...')
                            clean_dir(paths.ROOT_PLUGINS_WHITELIST_PATH)
                            shutil.copytree(whitelist_path,
                                            paths.ROOT_PLUGINS_WHITELIST_PATH)
                            logger.info(
                                'Synchronous whitelist plugin completed.')

                        for d in [
                                paths.ROOT_PLUGINS_BLACKLIST_PATH,
                                paths.ROOT_PLUGINS_WHITELIST_PATH
                        ]:
                            module_file = os.path.join(d, '__init__.py')
                            if not os.path.isfile(module_file):
                                with open(module_file, 'wb') as fp:
                                    fp.write(''.encode('utf-8'))

                    # write version
                    logger.info(
                        'Start updating the current version to v{0}.'.format(
                            self.current_version))
                    with open(paths.PROFILE_VERSION_PATH, 'wb') as fp:
                        fp.write(self.current_version.encode("utf-8"))
                    self.upload_upgrade_status_success()
                    logger.info(
                        'Upgrade completed, current version: v{0}'.format(
                            self.current_version))
                else:
                    self.download()
                    logger.error(
                        'Download upgrade package failed, "{0}" not found.'.
                        format(self._upgrade_info['download_path']))
            else:
                logger.info(
                    '[*] No upgrade required, exit the upgrade program.')
        except Exception as ex:
            import traceback
            self.upload_upgrade_status_failure(
                reason=str(ex), stack_trace=traceback.format_exc())
            logger.critical(ex)