コード例 #1
0
    def test_from_config_yml(self, mock_db_from_connection):
        db.db_from_config("default", "inputs/test_all.yml", True)
        mock_db_from_connection.assert_called_with(
            "mongodb://*****:*****@localhost:27017/local", "localuser")

        db.db_from_config(config_filename="inputs/test_all.yml", yaml=True)
        mock_db_from_connection.assert_called_with(
            "mongodb://*****:*****@localhost:27017/local", "localuser")
コード例 #2
0
    def test_from_config_etc(self, mock_db_from_connection):
        db.db_from_config()  # default section set to testing
        mock_db_from_connection.assert_called_with(
            "mongodb://*****:*****@localhost:27017/test-cyhy", "test-cyhy")

        db.db_from_config("testing")
        mock_db_from_connection.assert_called_with(
            "mongodb://*****:*****@localhost:27017/test-cyhy", "test-cyhy")
コード例 #3
0
    def test_from_config_conf(self, mock_db_from_connection):
        db.db_from_config("testconf", "inputs/test-conf.conf")
        mock_db_from_connection.assert_called_with(
            "mongodb://*****:*****@localhost:27017/test-conf", "test-name")

        db.db_from_config(config_filename="inputs/test-conf.conf")
        mock_db_from_connection.assert_called_with(
            "mongodb://*****:*****@localhost:27017/test-conf", "test-name")
コード例 #4
0
def create_app(
    debug=None,
    local=None,
    secret_key=None,
    async_mode="gevent",
    config_filename=None,
    section=None,
    new_hire_section=None,
):
    app = Flask(__name__, instance_path="/var/cyhy/web")
    gunicorn_logger = logging.getLogger("gunicorn.error")
    app.logger.handlers = gunicorn_logger.handlers
    app.logger.setLevel(gunicorn_logger.level)
    cache.init_app(app)

    # Manually setting cors_allowed_origins to allow all due to a change in July
    # (2019) per https://github.com/miguelgrinberg/python-engineio/commit/7548f704a0a3000b7ac8a6c88796c4ae58aa9c37
    # Previously the default resulted in similar behavior, but now the default
    # is to use the host address in the request. The configuration of our
    # application does not work with this change so I am forcing an equivalent to
    # the old behavior. We may want to look into providing CORS for websocket
    # connections in the future.
    socketio.init_app(app, async_mode=async_mode, cors_allowed_origins="*")

    install_secret_key(app, secret_key)
    register_blueprints(app)
    using_yaml = str(config_filename).lower().endswith((".yml", ".yaml"))
    app.db = database.db_from_config(section,
                                     config_filename=config_filename,
                                     yaml=using_yaml)
    # TODO add exception handler for no new_hire_section
    app.new_hire_db = database.db_from_config(new_hire_section,
                                              config_filename=config_filename,
                                              yaml=using_yaml)
    app.logger.debug(app.new_hire_db)
    app.logger.debug(app.db)
    start_scheduler(app)
    # TODO set origins via environment variables
    origins = [".*[\.]?data\.ncats\.dhs\.gov"]
    if local:
        origins.append("^.*\/\/localhost(:[0-9]+)?$")
    print origins
    CORS(app, resources={r"\/.*": {"origins": origins}})
    # import IPython; IPython.embed() #<<< BREAKPOINT >>>
    # app.run(host='::', debug, threaded=True)
    app.config["DEBUG"] = debug

    return app
コード例 #5
0
ファイル: GNIS_data_import.py プロジェクト: cisagov/cyhy-core
def main():
    global __doc__
    __doc__ = re.sub("COMMAND_NAME", __file__, __doc__)
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["--section"])

    with open(args["PLACES_FILE"], "r") as place_file:
        header_line = (
            place_file.readline().strip().decode("utf-8-sig")
        )  # Files downloaded from geonames.usgs.gov are UTF8-BOM
        csv_reader = csv.DictReader(skip_comments(place_file),
                                    delimiter="|",
                                    fieldnames=header_line.split("|"))

        if header_line == GOVT_UNITS_HEADER:
            if args["--force"] is not True:
                if is_imported(db, place_file, 0):
                    exit_if_imported(args["PLACES_FILE"])
            import_govt_units(db, csv_reader)
        elif header_line == POP_PLACES_HEADER:
            if args["--force"] is not True:
                if is_imported(db, place_file, 1):
                    exit_if_imported(args["PLACES_FILE"])
            import_populated_places(
                db, csv_reader
            )  # IMPORTANT: This import must be done AFTER import_govt_units()
        else:
            print "ERROR: Unknown header line found in: {}".format(
                args["PLACES_FILE"])
            sys.exit(-1)
コード例 #6
0
def main():
    args = docopt(__doc__, version='v0.0.1')
    db = database.db_from_config(args['--section'])
    success = False
    
    if args['--previous']:
        scorecard_id = ObjectId(args['--previous'])
    else:
        scorecard_id = None
    
    if args['--title-date']:
        title_date = dateutil.parser.parse(args['--title-date'])
    else:
        title_date = None

    if args['list']:
        list_scorecards(db)
        sys.exit(0)
        
    if args['delete']:
        confirmed = warn_and_confirm('This will delete a scorecard document from the database.')    
        if confirmed:
            delete_scorecard(db, args['SCORECARD_ID'])
            sys.exit(0)
        else:
            print 'ABORTED!'
            sys.exit(-1)
            
    if args['create']:
        print 'Generating scorecard...',
        generator = ScorecardGenerator(db, debug=args['--debug'], scorecard_id=scorecard_id,
                                    title_date=title_date, final=args['--final'], anonymize=args['--anonymize'])
        results = generator.generate_scorecard()
        print 'Done'
        sys.exit(0)
コード例 #7
0
ファイル: fema_stats.py プロジェクト: cisagov/ncats-webd
def main():
    global __doc__
    __doc__ = re.sub("COMMAND_NAME", __file__, __doc__)
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["--section"])

    print fema_csv(db).getvalue()
コード例 #8
0
def main():
    """Perform the query and print the results."""
    args = docopt.docopt(__doc__, version="0.0.1")

    # Set up logging
    log_level = args["--log-level"]
    try:
        logging.basicConfig(
            format="%(asctime)-15s %(levelname)s %(message)s", level=log_level.upper()
        )
    except ValueError:
        logging.critical(
            '"{}" is not a valid logging level.  Possible values are debug, info, warning, and error.'.format(
                log_level
            )
        )
        return 1

    # Handle command line arguments
    days = int(args["--days"])
    group = args["--group"]
    severity = int(args["--severity"])
    start_date = datetime.datetime.fromisoformat(args["--start-date"]).replace(
        tzinfo=datetime.timezone.utc
    )

    # Connect to database
    db = database.db_from_config(args["--section"])

    # Perform the query
    data = csv_get_cybex_data(db, start_date, severity, group, days)

    # Stop logging and clean up
    logging.shutdown()
    print(data)
コード例 #9
0
def main():
    """Generate a notification PDF."""
    args = docopt(__doc__, version=__version__)
    cyhy_db = database.db_from_config(args["--cyhy-section"])

    for owner in args["OWNER"]:
        if args["--encrypt"]:
            report_key = Config(args["--cyhy-section"]).report_key
        else:
            report_key = None

        if args["--anonymize"]:
            print("Generating anonymized notification based on {} ...".format(owner)),
        else:
            print("Generating notification for {} ...".format(owner)),
        generator = NotificationGenerator(
            cyhy_db,
            owner,
            debug=args["--debug"],
            final=args["--final"],
            anonymize=args["--anonymize"],
            encrypt_key=report_key,
        )
        was_encrypted, results = generator.generate_notification()

        if results:
            if len(results["notifications"]) > 0:
                if was_encrypted:
                    print("Done (encrypted)")
                else:
                    print("Done")
            else:
                print("No notifications found, no PDF created!")
コード例 #10
0
def main():
    args = docopt(__doc__, version='v0.0.1')
    db = database.db_from_config(args['--section'])
    
    bod_effective_date = parser.parse(args['BOD_EFFECTIVE_DATE']).replace(tzinfo=tz.tzutc())
    
    print 'Generating Binding Operational Directive (BOD) Scorecard...',
    generator = ScorecardGenerator(db, bod_effective_date, args['PREVIOUS_SCORECARD_JSON_FILE'], args['EXCEPTIONS_GRANTED_JSON_FILE'], debug=args['--debug'], final=args['--final'])
    results = generator.generate_bod_scorecard()
    print 'Done'
    sys.exit(0)
コード例 #11
0
ファイル: data-pusher.py プロジェクト: cisagov/ncats-webd
def main():
    global __doc__
    __doc__ = re.sub("COMMAND_NAME", __file__, __doc__)
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["--section"])
    # import IPython; IPython.embed() #<<< BREAKPOINT >>>

    generate_push_data_files(db)  # Initial call
    logger.info("scheduled refresh interval: {!s} seconds".format(REFRESH_INTERVAL))
    schedule.every(REFRESH_INTERVAL).seconds.do(generate_push_data_files, db)

    logger.info("starting scheduler loop")
    while True:
        schedule.run_pending()
        time.sleep(1)
コード例 #12
0
ファイル: ticketReport.py プロジェクト: cisagov/ncats-webd
def main():
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["--section"])

    rd, stats = get_stats(db)

    print "Currently-Open Non-False-Positive Tickets:", stats[
        "open_non_FP_tix"]["total"]
    print "\tFederal:", stats["open_non_FP_tix"]["federal"]
    print "\tSLTT:", stats["open_non_FP_tix"]["SLTT"]
    print "\tPrivate:", stats["open_non_FP_tix"]["private"]
    print "Currently-Open False-Positive Tickets:", stats["open_FP_tix"][
        "total"]
    print "\tFederal:", stats["open_FP_tix"]["federal"]
    print "\tSLTT:", stats["open_FP_tix"]["SLTT"]
    print "\tPrivate:", stats["open_FP_tix"]["private"]
    print "\nData below does not include False-Positive tickets"

    FY_ticket_counts_by_year = stats["FY_ticket_counts_by_year"].items()
    FY_ticket_counts_by_year.sort()
    print "\nFiscal Year   Tickets Opened   Tickets Open   Tickets Closed"
    for fiscal_year, ticket_counts in FY_ticket_counts_by_year:
        print "{:>11}   {:>14,}   {:>12,}   {:>14,}".format(
            fiscal_year,
            ticket_counts["opened"],
            ticket_counts["open"],
            ticket_counts["closed"],
        )
    print "{:>11}   {:>14,}   {:>12}   {:>14,}".format(
        "TOTAL",
        stats["FY_ticket_count_totals"]["opened"],
        " ",
        stats["FY_ticket_count_totals"]["closed"],
    )

    print_activity(stats["currentFYactivity"], "Current FY Activity",
                   rd["fy_start"], rd["now"])
    print_activity(
        stats["previousFYactivity"],
        "Previous FY Activity",
        rd["prev_fy_start"],
        rd["prev_fy_end"],
    )
    print_activity(
        stats["currentMonthActivity"],
        "Current Month Activity",
        rd["month_start"],
        rd["now"],
    )
    print_activity(
        stats["previousMonthActivity"],
        "Previous Month Activity",
        rd["prev_month_start"],
        rd["prev_month_end"],
    )
    print_activity(
        stats["currentWeekActivity"],
        "Current Week Activity",
        rd["week_start"],
        rd["now"],
    )
    print_activity(
        stats["previousWeekActivity"],
        "Previous Week Activity",
        rd["prev_week_start"],
        rd["prev_week_end"],
    )
コード例 #13
0
ファイル: contacts.py プロジェクト: cisagov/ncats-webd
def main():
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["--section"])
    # import IPython; IPython.embed() #<<< BREAKPOINT >>>
    print write_contacts_csv(db).getvalue()
コード例 #14
0
ファイル: common_fixtures.py プロジェクト: cisagov/cyhy-core
def database():
    # connection = MongoClient('mongodb://*****:*****@[::1]:27017/test_database2', 27017)
    # db = connection['test_database2']
    db = pcsdb.db_from_config("testing")
    return db
コード例 #15
0
def main():
    # import IPython; IPython.embed() #<<< BREAKPOINT >>>
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["CYHY_DB_SECTION"])
    logging.basicConfig(
        filename=os.path.join(WEEKLY_REPORT_BASE_DIR, LOG_FILE),
        format="%(asctime)-15s %(levelname)s - %(message)s",
        level=LOGGING_LEVEL,
    )
    start_time = time.time()
    logging.info("BEGIN")

    cyhy_db_section = args["CYHY_DB_SECTION"]
    scan_db_section = args["SCAN_DB_SECTION"]
    use_docker = 1
    # To track third-party snapshot and report status
    successful_tp_snaps = list()
    failed_tp_snaps = list()
    successful_tp_reports = list()
    failed_tp_reports = list()

    create_subdirectories()
    if args["--no-dock"]:
        # take action to run scorecard and reports without docker
        use_docker = 0

    nolog = False
    if args["--no-log"]:
        nolog = True

    if not args["--no-pause"]:
        control_id = pause_commander(db)
        logging.info("Pausing Commander...")
        logging.info("Control ID: %s", control_id)

    # Check for cyhy-reports container running
    if use_docker == 1:
        if (subprocess.call(
                "docker run --rm --volume /etc/cyhy:/etc/cyhy --volume {}:/home/cyhy {}/cyhy-reports:stable cyhy-report -h"
                .format(WEEKLY_REPORT_BASE_DIR, NCATS_DHUB_URL),
                shell=True,
        ) != 0):
            # Output of stderr & out if fail
            logging.critical("Docker: cyhy-reports container failed")
            sys.exit(-1)

    try:
        logging.info("Generating CybEx Scorecard...")

        # list all cybex json files and grab latest filename
        os.chdir(
            os.path.join(WEEKLY_REPORT_BASE_DIR, SCORECARD_JSON_OUTPUT_DIR))
        old_json_files = filter(os.path.isfile,
                                glob.glob("cybex_scorecard_*.json"))
        old_json_files.sort(key=lambda x: os.path.getmtime(x))
        if old_json_files:
            previous_scorecard_filename = old_json_files[-1]
            logging.info("  Using previous CybEx Scorecard JSON: {}".format(
                previous_scorecard_filename))
            scorecard_success = gen_weekly_scorecard(
                previous_scorecard_filename,
                cyhy_db_section,
                scan_db_section,
                use_docker,
                nolog,
            )
            if scorecard_success == 0:
                logging.info("Successfully generated CybEx Scorecard")
                # Create latest directory where we can stash a copy of the
                # latest CybEx scorecard.  This is for the automated sending of
                # reports.
                latest = os.path.join(WEEKLY_REPORT_BASE_DIR,
                                      SCORECARD_OUTPUT_DIR, "latest")
                if os.path.exists(latest):
                    shutil.rmtree(latest)
                os.mkdir(latest)
                # Find the CybEx scorecard that was just created in the
                # scorecard output directory and copy it to the latest
                # directory.
                cybex_scorecards = filter(
                    os.path.isfile,
                    glob.glob(
                        "../{}/Federal_Cyber_Exposure_Scorecard-*.pdf".format(
                            SCORECARD_OUTPUT_DIR)),
                )
                cybex_scorecards.sort(key=lambda x: os.path.getmtime(x))
                shutil.copy(cybex_scorecards[-1], latest)

                # Move newly-created cybex_scorecard.json to SCORECARD_JSON_OUTPUT_DIR
                new_json_files = filter(os.path.isfile,
                                        glob.glob("cybex_scorecard_*.json"))
                new_json_files.sort(key=lambda x: os.path.getmtime(x))
                shutil.move(
                    new_json_files[-1],
                    os.path.join(
                        WEEKLY_REPORT_BASE_DIR,
                        SCORECARD_JSON_OUTPUT_DIR,
                        new_json_files[-1],
                    ),
                )
            else:
                logging.warning("Failed to generate CybEx Scorecard")
        else:
            logging.critical(
                "No previous CybEx Scorecard JSON file found - continuing without creating CybEx Scorecard"
            )

        if args["--no-snapshots"]:
            # Skip creation of snapshots
            logging.info(
                "Skipping snapshot creation due to --no-snapshots parameter")
            reports_to_generate = create_list_of_reports_to_generate(db)
        else:
            reports_to_generate = generate_weekly_snapshots(
                db, cyhy_db_section)

        sample_report(cyhy_db_section, scan_db_section,
                      nolog)  # Create the sample (anonymized) report
        gen_weekly_reports(db, reports_to_generate, cyhy_db_section,
                           scan_db_section, use_docker, nolog)

        # Fetch list of third-party report IDs with children; if a third-party
        # report has no children, there is no point in generating a report
        # for it
        third_party_report_ids = [
            i["_id"] for i in db.RequestDoc.collection.find(
                {
                    "report_types": REPORT_TYPE.CYHY_THIRD_PARTY,
                    "children": {
                        "$exists": True,
                        "$ne": []
                    },
                },
                {"_id": 1},
            )
        ]

        if third_party_report_ids:
            if args["--no-snapshots"]:
                # Skip creation of third-party snapshots
                logging.info("Skipping third-party snapshot creation "
                             "due to --no-snapshots parameter")
                successful_tp_snaps = third_party_report_ids
            else:
                # Create snapshots needed for third-party reports
                successful_tp_snaps, failed_tp_snaps = create_third_party_snapshots(
                    db, cyhy_db_section, third_party_report_ids)

            # Generate third-party reports
            successful_tp_reports, failed_tp_reports = generate_third_party_reports(
                db, cyhy_db_section, scan_db_section, nolog,
                successful_tp_snaps)
        else:
            logging.info(
                "No third-party reports to generate; skipping this step")

        pull_cybex_ticket_csvs(db)
    finally:
        sync_all_tallies(db)
        if not args["--no-pause"]:
            resume_commander(db, control_id)

        if args["--no-snapshots"]:
            logging.info("Number of snapshots generated: 0")
            logging.info("Number of snapshots failed: 0")
        else:
            logging.info(
                "Number of snapshots generated: %d",
                len(successful_snapshots),
            )
            logging.info(
                "  Third-party snapshots generated: %d",
                len(successful_tp_snaps),
            )
            logging.info(
                "Number of snapshots failed: %d",
                len(failed_snapshots),
            )
            logging.info(
                "  Third-party snapshots failed: %d",
                len(failed_tp_snaps),
            )
            if failed_snapshots:
                logging.error("Failed snapshots:")
                for i in failed_snapshots:
                    if i in failed_tp_snaps:
                        logging.error("%s (third-party)", i)
                    else:
                        logging.error(i)

        logging.info(
            "Number of reports generated: %d",
            len(successful_reports + successful_tp_reports),
        )
        logging.info(
            "  Third-party reports generated: %d",
            len(successful_tp_reports),
        )
        logging.info("Number of reports failed: %d",
                     len(failed_reports + failed_tp_reports))
        logging.info(
            "  Third-party reports failed: %d",
            len(failed_tp_reports),
        )
        if failed_reports or failed_tp_reports:
            logging.info("Failed reports:")
            for i in failed_reports + failed_tp_reports:
                if i in failed_tp_reports:
                    logging.error("%s (third-party)", i)
                else:
                    logging.error(i)

        logging.info("Total time: %.2f minutes",
                     (time.time() - start_time) / 60)
        logging.info("END\n\n")
コード例 #16
0
def main():
    """Set up logging and call the notification-related functions."""
    args = docopt.docopt(__doc__, version="1.0.0")
    # Set up logging
    log_level = args["--log-level"]
    try:
        logging.basicConfig(
            format="%(asctime)-15s %(levelname)s %(message)s", level=log_level.upper()
        )
    except ValueError:
        logging.critical(
            '"{}" is not a valid logging level.  Possible values '
            "are debug, info, warning, and error.".format(log_level)
        )
        return 1

    # Set up database connection
    db = database.db_from_config(args["CYHY_DB_SECTION"])

    # Create all necessary output subdirectories
    create_output_directories()

    # Change to the correct output directory
    os.chdir(os.path.join(NOTIFICATIONS_BASE_DIR, NOTIFICATION_ARCHIVE_DIR))

    # Build list of CyHy orgs
    cyhy_org_ids = build_cyhy_org_list(db)
    logging.debug("Found {} CYHY orgs: {}".format(len(cyhy_org_ids), cyhy_org_ids))

    # Create notification PDFs for CyHy orgs
    master_report_key = Config(args["CYHY_DB_SECTION"]).report_key
    num_pdfs_created = generate_notification_pdfs(db, cyhy_org_ids, master_report_key)
    logging.info("{} notification PDFs created".format(num_pdfs_created))

    # Create a symlink to the latest notifications.  This is for the
    # automated sending of notification emails.
    latest_notifications = os.path.join(
        NOTIFICATIONS_BASE_DIR, "notification_archive/latest"
    )
    if os.path.exists(latest_notifications):
        os.remove(latest_notifications)
    os.symlink(
        os.path.join(NOTIFICATIONS_BASE_DIR, NOTIFICATION_ARCHIVE_DIR),
        latest_notifications,
    )

    if num_pdfs_created:
        # Email all notification PDFs in
        # NOTIFICATIONS_BASE_DIR/notification_archive/latest
        os.chdir(CYHY_MAILER_DIR)
        p = subprocess.Popen(
            [
                "docker-compose",
                "-f",
                "docker-compose.yml",
                "-f",
                "docker-compose.cyhy-notification.yml",
                "up",
            ],
            stdout=subprocess.PIPE,
            stdin=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )
        data, err = p.communicate()
        return_code = p.returncode

        if return_code == 0:
            logging.info("Notification emails successfully sent")
        else:
            logging.error("Failed to email notifications")
            logging.error("Stderr report detail: %s%s", data, err)

        # Delete all NotificationDocs where generated_for is not []
        result = db.NotificationDoc.collection.delete_many(
            {"generated_for": {"$ne": []}}
        )
        logging.info(
            "Deleted {} notifications from DB (corresponding to "
            "those just emailed out)".format(result.deleted_count)
        )
    else:
        logging.info("Nothing to email - skipping this step")

    # Delete all NotificationDocs where ticket_owner is not a CyHy org, since
    # we are not currently sending out notifications for non-CyHy orgs
    result = db.NotificationDoc.collection.delete_many(
        {"ticket_owner": {"$nin": cyhy_org_ids}}
    )
    logging.info(
        "Deleted {} notifications from DB (owned by "
        "non-CyHy organizations, which do not currently receive "
        "notification emails)".format(result.deleted_count)
    )

    # Stop logging and clean up
    logging.shutdown()
    return 0
コード例 #17
0
def main():
    global __doc__
    __doc__ = re.sub("COMMAND_NAME", __file__, __doc__)
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["--section"])
    congressional_data(db, args["START_DATE"], args["END_DATE"])
コード例 #18
0
ファイル: risk_me.py プロジェクト: cisagov/ncats-webd
def main():
    global __doc__
    __doc__ = re.sub("COMMAND_NAME", __file__, __doc__)
    args = docopt(__doc__, version="v0.0.1")
    db = database.db_from_config(args["--section"])
    get_ranking_lists(db)