Ejemplo n.º 1
0
def setup_logger():
    handler = logging.FileHandler('vcscalicense.log', encoding='utf8')
    handler.setFormatter(
        anticrlf.LogFormatter(
            '%(asctime)s - %(levelname)s - %(funcName)s - %(message)s'))
    logger = logging.getLogger(__name__)
    logger.addHandler(handler)
    logger.setLevel(logging.INFO)
Ejemplo n.º 2
0
    def get(self, path):
        """ get method """

        handler = logging.StreamHandler()
        handler.setFormatter(
            anticrlf.LogFormatter('%(levelname)s:%(name)s:%(message)s'))
        logger = logging.getLogger(__name__)
        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)

        logger.debug("request to download: %s", path)

        # If the file is large, we want to abandon downloading
        # if user cancels the requests.
        # pylint: disable=attribute-defined-outside-init
        self.connection_closed = False

        self.set_header("Content-Disposition", "attachment")
        if not utils.check_path(path):
            self.write("Only relative paths are allowed")
            self.set_status(403)
            self.finish()
            return

        if path is None or not os.path.isfile(path):
            self.write("File %s  not found" % path)
            self.set_status(404)
            self.finish()
            return

        length = int(4 * 1024 * 1024)
        offset = int(0)
        while True:
            data = utils.read_chunk(path,
                                    offset=offset,
                                    length=length,
                                    escape_data=False)
            if self.connection_closed or 'data' not in data or len(
                    data['data']) < length:
                break
            offset += length
            self.write(data['data'])
            self.flush()

        if 'data' in data:
            self.write(data['data'])
        self.finish()
def main():
    parser = argparse.ArgumentParser(
        description=
        'This script looks at the results set of the FROM APP. For any flaws that have an '
        'accepted mitigation, it checks the TO APP to see if that flaw exists. If it exists, '
        'it copies all mitigation information.')
    parser.add_argument('-f',
                        '--fromapp',
                        help='App GUID to copy from',
                        required=True)
    parser.add_argument('-t',
                        '--toapp',
                        help='App GUID to copy to',
                        required=True)
    args = parser.parse_args()

    handler = logging.FileHandler(filename='MitigationCopier.log')
    handler.setFormatter(
        anticrlf.LogFormatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'))

    logging.basicConfig(handlers={handler},
                        datefmt='%m/%d/%Y %I:%M:%S%p',
                        level=logging.INFO)

    # CHECK FOR CREDENTIALS EXPIRATION
    creds_expire_days_warning()

    # SET VARIABLES FOR FROM AND TO APPS
    results_from_app_id = args.fromapp
    results_from_app_name = get_application_name(results_from_app_id)
    formatted_from = format_application_name(results_from_app_id,
                                             results_from_app_name)
    print('Getting findings for {}'.format(formatted_from))
    findings_from = findings_api(args.fromapp)
    print('Found {} findings in "from" {}'.format(len(findings_from),
                                                  formatted_from))
    results_from_flawid = [None] * len(findings_from)
    results_from_unique = [None] * len(findings_from)

    results_to_app_id = args.toapp
    results_to_app_name = get_application_name(args.toapp)
    formatted_to = format_application_name(results_to_app_id,
                                           results_to_app_name)
    print('Getting findings for {}'.format(formatted_to))
    findings_to = findings_api(args.toapp)
    print('Found {} findings in "to" {}'.format(len(findings_to),
                                                formatted_to))
    results_to_flawid = [None] * len(findings_to)
    results_to_unique = [None] * len(findings_to)
    results_to_build_id = get_latest_build(args.toapp)

    # GET DATA FOR BUILD COPYING FROM
    iteration = -1
    for flaw in findings_from:
        if flaw['finding_status']['resolution_status'] != 'APPROVED':
            continue

        finding_lookup = format_finding_lookup(flaw)
        if finding_lookup != '':
            iteration += 1
            results_from_flawid[iteration] = flaw['issue_id']
            results_from_unique[iteration] = finding_lookup

    # CREATE LIST OF UNIQUE VALUES FOR BUILD COPYING TO
    iteration = -1

    for flaw in findings_to:
        iteration += 1
        results_to_flawid[iteration] = flaw['issue_id']
        results_to_unique[iteration] = format_finding_lookup(flaw)

    # CREATE COUNTER VARIABLE
    counter = 0

    # CYCLE THROUGH RESULTS_TO_UNIQUE
    for i in range(0, len(results_to_unique) - 1):
        # CHECK IF IT'S IN RESULTS FROM
        if results_to_unique[i] in results_from_unique:
            # FIND THE FLAW IDS FOR FROM AND TO
            from_id = results_from_flawid[results_from_unique.index(
                results_to_unique[i])]
            to_id = results_to_flawid[results_to_unique.index(
                results_to_unique[i])]

            # CHECK IF IT'S ALREADY MITIGATED IN TO
            flaw_copy_to_list = next(flaw for flaw in findings_to
                                     if flaw['issue_id'] == to_id)
            # CHECK IF COPY TO IS ALREADY ACCEPTED
            if flaw_copy_to_list['finding_status'][
                    'resolution_status'] != 'APPROVED':

                source_flaw = next(flaw for flaw in findings_from
                                   if flaw['issue_id'] == from_id)
                mitigation_list = source_flaw['annotations']

                for mitigation_action in reversed(
                        mitigation_list
                ):  #findings API puts most recent action first
                    proposal_action = mitigation_action['action']
                    proposal_comment = '[COPIED FROM APP {}}] {}'.format(
                        args.fromapp, mitigation_action['comment'])
                    update_mitigation_info(results_to_build_id, to_id,
                                           proposal_action, proposal_comment,
                                           results_to_app_id)
                counter += 1
            else:
                logging.info('Flaw ID {} in {} already has an accepted mitigation; skipped.'.\
                    format(str(to_id),results_to_app_id))

    print('[*] Updated {} flaws in application {} (guid {}). See log file for details.'.\
                format(str(counter),results_to_app_name,results_to_app_id))
def setup_logger():
    handler = logging.FileHandler('vcworkspace.log', encoding='utf8')
    handler.setFormatter(anticrlf.LogFormatter('%(asctime)s - %(levelname)s - %(funcName)s - %(message)s'))
    logging.basicConfig(level=logging.INFO, handlers=[handler])
Ejemplo n.º 5
0
import sys
from timeit import default_timer as timer

import anticrlf
import util
from db.model import ModelHelper, SightingTable, TechniqueTable
from objects.sighting import SightingSchema
from sqlalchemy import create_engine, exc
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy_batch_inserts import enable_batch_inserting

CONTAINER_ENV_MARKER = "IN_CONTAINER"
LOG_FILE = "pipeline.log"

log_formatter = anticrlf.LogFormatter(
    "%(asctime)s - %(levelname)-8s - %(filename)s:%(funcName)s - %(message)s")

logging.basicConfig(datefmt="%d-%b-%y %H:%M:%S")

logger = logging.getLogger(__name__)

# if running in container, log to stdout and use docker logging facilities
if CONTAINER_ENV_MARKER in os.environ:
    consoleHandler = logging.StreamHandler(
        sys.stdout)  # set streamhandler to stdout
    consoleHandler.setFormatter(log_formatter)
    logger.addHandler(consoleHandler)
else:
    fileHandler = logging.FileHandler(LOG_FILE, encoding="utf8")
    fileHandler.setFormatter(log_formatter)
    logger.addHandler(fileHandler)