コード例 #1
0
    def handle(self, *args, sudo_pwd="", **kwargs):
        tmux_server = libtmux.Server()
        tmux_session = tmux_server.find_where(
            {"session_name": settings.TMUX_SESSION_NAME}
        )
        restart_frequency = (
            settings.PROCESS_RESTART_INTERVAL_IN_SECONDS
            / settings.STATUS_MONITORING_INTERVAL_IN_SECONDS
        )
        logger.info(
            "I will restart processes after %d status check(s)..." % restart_frequency
        )
        monitoring_count = 0

        while True:
            start_time = time.time()

            monitor_tmux_windows(tmux_session)
            sleep_until_interval_is_complete(
                start_time, settings.STATUS_MONITORING_INTERVAL_IN_SECONDS
            )
            monitoring_count += 1

            if monitoring_count == restart_frequency:
                restart_airomon_ng(tmux_session, sudo_pwd)
                monitoring_count = 0

            print()
コード例 #2
0
def aggregate_data_to_db():
    logger.info(f"{settings.TERM_LBL} Starting to aggregate event data ...")

    while True:
        start_time = time.time()
        recent_hour = get_most_recent_hour()
        look_back_until = recent_hour - timedelta(
            hours=7 * 24)  # catch up until a week of yet unaggregated data

        # This aggregates all unaggregated hours/days up until the preceding one, plus the currently active one.
        with transaction.atomic():
            for hour in get_unaggregated_hours(
                    look_back_until, recent_hour -
                    timedelta(hours=1)) + [get_most_recent_hour()]:
                seen_by_hour = aggregate_hour(hour)
                seen_by_hour.save()
                logger.info(f"Saved {seen_by_hour}")

            for day in get_unaggregated_days(
                    look_back_until, recent_hour - timedelta(hours=24)) + [
                        get_most_recent_hour().replace(hour=0)
                    ]:
                seen_by_day = aggregate_day(day)
                seen_by_day.save()
                logger.info(f"Saved {seen_by_day}")

        sleep_until_interval_is_complete(start_time,
                                         settings.UPLOAD_INTERVAL_IN_SECONDS)

        print()
コード例 #3
0
def sensor_data_to_db(tmp_path: str):
    logger.info(
        f"{settings.TERM_LBL} Starting to transfer the sensor input to db ...")
    sensor = get_sensor()

    while True:
        start_time = time.time()
        sensor_data_df = sensor.get_latest_reading_as_df(tmp_path)

        # check if expected columns are given
        for expected_column in ("observable_id", "time_seen", "value",
                                "observations"):
            if expected_column not in sensor_data_df.columns:
                logger.error(
                    "The sensor module function 'get_latest_reading_as_df' did not return a dataframe"
                    " with the column %s."
                    " Instead, the dataframe only has these columns: %s",
                    expected_column, sensor_data_df.columns)

        # hash observable IDs if wanted
        sensor_data_df["observable_id"] = sensor_data_df["observable_id"].map(
            lambda x: hash_observable_ids(str(x)))

        update_database_with_new_and_updated_observables(sensor_data_df)
        sleep_until_interval_is_complete(
            start_time, settings.SENSOR_LOG_INTERVAL_IN_SECONDS)

        print()
コード例 #4
0
ファイル: record_events_to_db.py プロジェクト: norenjr/aileen
def csv_file_to_db(tmp_path: str, csv_filename_prefix: str):
    logger.info(f"{settings.TERM_LBL} Starting to watch the airodump file ...")

    while True:
        start_time = time.time()

        airodump_df = read_airodump_csv_and_return_df(tmp_path,
                                                      csv_filename_prefix)
        update_database_with_new_and_updated_devices(airodump_df)
        sleep_until_interval_is_complete(
            start_time, settings.AIRODUMP_LOG_INTERVAL_IN_SECONDS)

        print()
コード例 #5
0
ファイル: upload_data.py プロジェクト: norenjr/aileen
    def handle(self, *args, **kwargs):
        box_settings = BoxSettings.objects.first()
        if box_settings is None:
            logger.error(
                f"{settings.TERM_LBL} No box settings found. Upload attempt aborted ..."
            )
            return
        logger.info(
            f"{settings.TERM_LBL} Starting the uploader against {box_settings.server_url} ..."
        )

        while True:
            start_time = time.time()

            if settings.UPLOAD_EVENTS is True:
                upload_latest_events()
            upload_latest_aggregations()
            upload_tmux_status()
            sleep_until_interval_is_complete(
                start_time, settings.UPLOAD_INTERVAL_IN_SECONDS)

            print()