currentdir = os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) from re import compile, I, M from shutil import copyfile, copy from ujson import load from argparse import ArgumentParser from sdk.fs_processor import Scanner from sdk.wrapper import ToolRunner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) plugin = Plugin(name="sast_brakeman", version="1") result_project_extractor = compile( pattern='^.*(levelops-brakeman-(.*)).json\s*$', flags=(I | M)) def get_formats_and_outputs(options): formats = [] if options.json or options.submit: formats.append("json") if options.csv: formats.append("csv") if options.html: formats.append("html") if options.table: formats.append("table")
import io import time import inspect currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0,parentdir) from ujson import dump, dumps from argparse import ArgumentParser from sdk.types import Endpoint, API, Report from sdk.fs_processor import Scanner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) plugin = Plugin(name="sast_api_express", version="1") express_pattern = re.compile(pattern='^\s*var\s*(\w*)\s*=\s*(require\(\s*\'express\'\s*\))\s*.*;?$', flags=(re.I | re.M)) require_pattern = re.compile(pattern='^\s*var\s*(\w*)\s*=\s*(require\(\s*\'([\.\/\w]+)\'\s*\))\s*;?.*$', flags=(re.I | re.M)) router_pattern = re.compile(pattern='^\s*var\s*(\w*)\s*=\s*(require\(\s*\'express\'\s*\)|\w*)\s*\.Router\(.*\)\s*;?$', flags=(re.I | re.M)) use_pattern = re.compile(pattern='^.*\.\s*use\(\s*\'([\/\w]+)\'\s*,\s*(require\s*[=(]\'([\/\.\w]+)\'|\w+).*$', flags=(re.I | re.M)) endpoint_pattern1 = re.compile(pattern='^\s*.*\w+\s*\.\s*(get|post|delete|put)\s*\(\s*\'(\/[\/\w\:\}\{]*)\'\s*.*$', flags=(re.I | re.M)) endpoint_pattern2 = re.compile(pattern='^\s*.*\w+\s*\.\s*route\s*\(\s*\'(\/[\/\w\:\}\{]*)\'\s*\)\s*\.\s*(get|post|delete|put)\s*\(.*$', flags=(re.I | re.M)) resources = {} def process_file(f_path): """ Supported annotations: require('express') require('express').Router();
import os import inspect import time currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0,parentdir) from ujson import dump, dumps from argparse import ArgumentParser from sdk.types import Endpoint, API, Report from sdk.fs_processor import Scanner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) plugin = Plugin(name="sast_api_apigee", version="1") def process_file(f_path): # parse file contents and collect APIs # detect single line annotations # detect multi line annotations """ Supported extractions: <BasePath>/default</ BasePath> <BasePath>/with/{var}</BasePath > < BasePath> /with/pattern/*</ BasePath > < BasePath >/spaces </ BasePath> <BasePath > /more/spaces/and/levels </BasePath> <BasePath > /with/new/lines
import time from ujson import loads from ujson import dump, dumps from os import walk, path, listdir currentdir = os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) from argparse import ArgumentParser from sdk.types import Endpoint, API, Report from sdk.fs_processor import Scanner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) plugin = Plugin(name="sast_api_k8s", version="1") # scan directory and subdirectories # collect yaml, yml and json files and put them in the work queue # consume files from the work queue # # parse file # # collect APIs # return results r_types = ["ingress"] def process_file(f_path): # filter by content. only parse if the file actually contains any reource that we are intesrested in. # parse file contnents. # analize resource definitinon.
from ujson import loads, load from ujson import dump, dumps import yaml import time currentdir = os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) from argparse import ArgumentParser from sdk.types import Endpoint, API, Report from sdk.fs_processor import Scanner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) plugin = Plugin(name="sast_api_cloudformation", version="1") class Node(object): def __init__(self, node_id, value, parent=None, children=None): self._node_id = node_id self.value = value self._parent = parent if not children: self._children = [] else: self._children = children def get_id(self): return self._node_id
import os import inspect import time currentdir = os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) from ujson import dump, dumps from argparse import ArgumentParser from sdk.types import Endpoint, API, Report from sdk.fs_processor import Scanner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) plugin = Plugin(name="sast_api_flask", version="1") def process_file(f_path): # parse file contents and collect APIs # detect single line annotations # detect multi line annotations """ @app.route('/user/<username>') @app.route( '/user/<username>') @app.route ( '/user/<username>') @app. route ( '/user/<username>') @app .route ( '/user/<username>') @app . route ( '/user/<username>' ) @app.route('/post/<int:post_id>') @app.route('/path/<path:subpath>')
parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) from re import compile, I, M from shutil import copyfile, copy from ujson import load, dump, dumps from argparse import ArgumentParser from uuid import uuid4 from sdk.fs_processor import Scanner from sdk.wrapper import ToolRunner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) levelops_module_name = 'praetorian' plugin = Plugin(name="report_" + levelops_module_name, version="1") SUMMARY_BY_CAT_COLUMN_COUNT = 7 page_header_matcher = compile(pattern='^.*\s\|\s\d*$', flags=(I | M)) score_module_matcher = compile(pattern='^\((\d*)\)((\s+\w+)+)$', flags=(I | M)) number_matcher = compile(pattern='^\d+$', flags=(I | M)) OUTPUT_TYPES = ((".htm", "html"), (".html", "html"), (".xml", "xml"), (".tag", "tag")) def extract_text(files=[], outfile='-', no_laparams=False, all_texts=None, detect_vertical=None,
import os import inspect import time currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0,parentdir) from ujson import dump, dumps from argparse import ArgumentParser from sdk.types import Endpoint, API, Report from sdk.fs_processor import Scanner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) plugin = Plugin(name="sast_api_springmvc", version="1") def process_file(f_path): # parse file contents and collect APIs # detect single line annotations # detect multi line annotations """ Supported annotations: @RequestMapping("/rest/path") @RequestMapping(value="/rest/path") @RequestMapping(path="/rest/path") @RequestMapping(method= ,value="/rest/path", produces=) @PostMapping("/rest/path") """
from bs4 import BeautifulSoup from bs4 import element from re import compile, I, M from shutil import copyfile, copy from ujson import load, dump, dumps from argparse import ArgumentParser from uuid import uuid4 from sdk.fs_processor import Scanner from sdk.wrapper import ToolRunner from sdk.plugins import Runner, Plugin, labels_parser, default_plugin_options log = logging.getLogger(__name__) levelops_module_name = 'ms_tmt' levelops_plugin_name = 'report_' + levelops_module_name plugin = Plugin(name=levelops_plugin_name, version="1") SUMMARY_BY_CAT_COLUMN_COUNT = 7 page_header_matcher = compile(pattern='^.*\s\|\s\d*$', flags=(I | M)) score_module_matcher = compile(pattern='^\((\d*)\)((\s+\w+)+)$', flags=(I | M)) number_matcher = compile(pattern='^\d+$', flags=(I | M)) def get_formats_and_outputs(options): formats = [] if options.json: formats.append("json") # if options.csv: # formats.append("csv") outputs = [] if options.output_file: