Exemple #1
0
    def test_verify(self):
        # test 1
        vuln_regex = """www\.tgz
web\.log
"""
        rule = FileRule(
            id=9527,
            name='敏感文件泄漏',
            key='file-leak',
            risk_id=2,
            risk_name='medium',
            match_type='file',
            regex=vuln_regex,
        )

        vuln_example = ['www.tgz', 'web.log']

        for i in vuln_example:
            vuln_path = os.path.join(BASEDIR, 'tests', 'data', 'engines')
            make_dir(vuln_path)
            vuln_file = os.path.join(vuln_path, i)
            with open(vuln_file, 'wb') as fp:
                fp.write(b'test')
            status, _ = rule.verify(reference_value=vuln_file)
            self.assertTrue(status)
            self.assertEqual(i, _)
            clean_dir(vuln_path)
Exemple #2
0
    def test_upload_file_and_download_file(self):
        file_name = '{0}.txt'.format(hash_md5('{0}'.format(time.time())))
        file_path = os.path.join(BASEDIR, 'tests', 'data', 'tmp', file_name)
        file_content = 'ftp test.'
        make_dir(os.path.join(BASEDIR, 'tests', 'data', 'tmp'))
        with open(file_path, 'wb') as fp:
            fp.write(file_content.encode('utf-8'))
        self.assertTrue(os.path.isfile(file_path))

        ftp = FTPWork(
            host=FTPWorkTestCase.host,
            port=FTPWorkTestCase.port,
            username=FTPWorkTestCase.username,
            password=FTPWorkTestCase.password,
        )
        # test upload
        self.assertTrue(ftp.upload_file(file_path, file_name))
        # test download
        save_file_local_path = os.path.join(BASEDIR, 'tests', 'data', 'tmp', 'ftp_download.txt')
        ftp.download_file(save_file_local_path, file_name)
        self.assertTrue(os.path.isfile(save_file_local_path))
        with open(file_path, 'rb') as fp:
            c = fp.read()
            self.assertEqual(file_content, c.decode('utf-8'))
        # test delete
        self.assertTrue(ftp.delete_file(file_name))

        clean_dir(os.path.join(BASEDIR, 'tests', 'data', 'tmp'))
    def test_verify(self):
        vuln_regex = r"password\s*?=\s*?['\"](.+?)['\"]  ### password leak"
        rule = ContentRule(
            id=9527,
            name='密码硬编码',
            key='hard-coded-password',
            risk_id=2,
            risk_name='medium',
            match_type='content',
            regex=vuln_regex,
            file_ext='.txt',
        )

        vuln_example = [
            'password="******"',
            'password = "******"',
            'password = \'admin1234\'',
        ]
        vuln_path = os.path.join(BASEDIR, 'tests', 'data', 'engines')
        make_dir(vuln_path)
        vuln_file = os.path.join(vuln_path, 'vuln_exmaple.txt')
        for i in vuln_example:
            with open(vuln_file, 'w') as fp:
                fp.write(i)
            status, _ = rule.verify(reference_value=vuln_file)
            self.assertTrue(status)
            self.assertEqual(_, i)
        clean_dir(vuln_path)
Exemple #4
0
    def __init__(self):
        self.server = RequestConnect(server_token=conf.server.token)

        if not os.path.isfile(CORE_CONF_FILE):
            raise FileNotFoundError(
                'The "seecode_scanner.yml" profile was not found, please confirm that it is configured.'
            )

        self._is_upgrade = False
        self._local_version = None
        self._current_version = None
        self._transport_encryption = False
        self._upgrade_info = {}
        self.rsa = RSAEncrypt()
        if conf.server.public_key_path and conf.server.private_key_path:
            self.rsa.load(conf.server.public_key_path,
                          conf.server.private_key_path)
        self._upgrade_path = os.path.join(paths.UPGRADE_PATH,
                                          self.current_version)
        logger.info(
            'Initialize the upgrade environment and create an upgrade directory...'
        )
        make_dir([paths.UPGRADE_PATH, self._upgrade_path])
        self.local_upgrade_file = os.path.join(
            paths.UPGRADE_PATH, '{0}.tgz'.format(self.current_version))
    def sync_code(self):
        """

        :return:
        """
        self._logger.info(
            "[ScanProject] Start syncing project code into the scan directory..."
        )

        if self._project_type == 'online':  # online
            self._git.checkout_branch_sync_code(self._project_branch,
                                                self._force_sync_code)
            self._branch_commit = self._git.get_branch_last_commit_id()
            self._logger.info(
                '[ScanProject] current branch commit:{0}, branch name:{1}'.
                format(self._branch_commit, self._project_branch))
            cmd = " -R {0}/* {1}".format(self._project_path,
                                         self._paths_project_scan)
            self._logger.debug("[ScanProject] cp{0}".format(cmd))
            self.cp.exec_match(cmd=cmd)

        elif self._project_type == 'offline' and self._project_local_path:
            cmd = " -R {0}/* {1}".format(self._project_local_path,
                                         self._paths_project_scan)
            self._logger.debug("[ScanProject] cp{0}".format(cmd))
            self.cp.exec_match(cmd=cmd)

        else:  # offline
            self.__init_distributed()
            if self._project_storage_type in self.storage:
                if self._project_storage_type == 'local':  # TODO
                    pass
                elif self._project_storage_type == 'ftp':  # FTP
                    make_dir(self._project_path)
                    local_file = os.path.join(self._project_path,
                                              self._project_file_origin_name)
                    remote_file = urlparse(self._project_ssh).path
                    if not os.path.isfile(local_file):
                        self.storage['ftp'].download_file(
                            local_file, remote_file)
                    unzip_cmd = [
                        '/usr/bin/unzip', '-n', local_file, '-d',
                        self._paths_project_scan
                    ]
                    self._logger.debug(
                        "[ScanProject] Start unzipping the file:[{0}]".format(
                            ' '.join(unzip_cmd)))
                    output, err = exec_cmd(' '.join(unzip_cmd))
                    self.storage['ftp'].close()
                    if err:
                        raise Exception(err)
            else:
                msg = 'Does not support "{0}" storage mode.'.format(
                    self._project_storage_type)
                raise DistributedDoesNotSupportStorage(msg)

        self._logger.info(
            "[ScanProject] Synchronization project code completion.")
Exemple #6
0
 def test_checkFile(self):
     f_p = os.path.join(BASEDIR, 'tests', 'data', 'tmp')
     f_f = os.path.join(f_p, 'test.txt')
     make_dir(f_p)
     with self.assertRaises(Exception) as _:
         checkFile("'not_found")
     with open(f_f, 'wb') as fp:
         fp.write(b'')
     self.assertTrue(checkFile(f_f))
     clean_dir(f_p)
    def execute(self):
        make_dir(RESULT_TMP_DIR)
        cmd_list = [
            '/usr/bin/env',
            _get_python_path(),
            _get_scanner_script(),
        ]

        if self.customize_command:
            cmd_list.extend(self.customize_command)

        if hasattr(self, 'param_cmd'):
            cmd_list.extend(self.param_cmd)

        cmd_list = [str(i) for i in cmd_list]  # 传递给 subprocess 必须 str
        self.assertEqual(0, subprocess.call(cmd_list))
        self.verify_result()
        clean_dir(RESULT_TMP_DIR)
    def __init_work_dir(self):
        """

        :return:
        """
        scan_work = '{0}'.format(self._task_id or self._project_name)
        self._paths_log = os.path.join(self._work_dir, 'logs', scan_work)
        self._paths_project_origin = os.path.join(
            self._work_dir, 'projects', self._group_key or self._project_name)
        self._project_path = os.path.join(self._paths_project_origin,
                                          self._project_name)
        self._paths_project_scan = os.path.join(self._work_dir, 'tasks',
                                                scan_work)

        if os.path.isdir(self._paths_project_scan):
            clean_dir(self._paths_project_scan)
        make_dir([
            self._paths_log, self._paths_project_origin,
            self._paths_project_scan
        ])
Exemple #9
0
    def generate_pem(self, save_path, nbits=2048):
        """

        :param save_path:  保存路径
        :param nbits:
        :return:
        """
        make_dir(save_path)
        self.public_key, self.private_key = rsa.newkeys(nbits)
        public_pem = os.path.join(save_path, 'public.pem')
        private_pem = os.path.join(save_path, 'private.pem')
        try:
            with open(public_pem, 'w+') as fp:
                fp.write(self.public_key.save_pkcs1().decode())

            with open(private_pem, 'w+') as fp:
                fp.write(self.private_key.save_pkcs1().decode())
        except Exception as ex:
            logger.error(ex)

        return public_pem, private_pem
    def test_limit_size_verify(self):
        vuln_regex = r"password\s*?=\s*?['\"](.+?)['\"]  ### password leak"
        rule = ContentRule(
            id=9527,
            name='密码硬编码',
            key='hard-coded-password',
            risk_id=2,
            risk_name='medium',
            match_type='content',
            regex=vuln_regex,
            file_ext='.txt',
            size=1024,
        )

        vuln_path = os.path.join(BASEDIR, 'tests', 'data', 'engines')
        make_dir(vuln_path)
        vuln_file = os.path.join(vuln_path, 'vuln_exmaple_file_size.txt')
        gen_size_file(vuln_file, 1024 * 20)
        with self.assertRaises(FileLimitSizeException) as _:
            rule.verify(reference_value=vuln_file)

        clean_dir(vuln_path)
Exemple #11
0
    def test_make_dir(self):
        # test1
        dir_list = ['/tmp/t/1', ['/tmp/t/2', '/tmp/t/3']]
        for i in dir_list:
            if isinstance(i, list):
                make_dir(i)
                for _ in i:
                    self.assertTrue(os.path.isdir(_))
                    clean_dir(_)
                    self.assertFalse(os.path.isdir(_))
            else:
                make_dir(i)
                self.assertTrue(os.path.isdir(i))
                clean_dir(i)
                self.assertFalse(os.path.isdir(i))

        dir_list = ['/not_exists_dir_test_123']
        for i in dir_list:
            make_dir(i)
            self.assertFalse(os.path.isdir(i))
 def setUp(self):
     self.normal_name = 'normal'
     self.abnormal_name = 'abnormal'
     self.profile_path = os.path.join(BASEDIR, 'tests', 'data', 'profile')
     make_dir(self.profile_path)
 def setUp(self):
     self.work_dir = os.path.join(BASEDIR, 'tests', 'data', 'tmp')
     make_dir(self.work_dir)