Example #1
0
    def execute(self):
        logging.info("executing process engine")
        data_folder = dataset_scheme["folder"]

        # load data from scheme above
        for log_obj in dataset_scheme["data"]:
            log_file_data: CoreDataFrame = self.process_data(data_folder, log_obj)
            logging.warning("dataframe shape: "+str(log_file_data.data.shape))

        # with the CoreDataFrame from process data, perform user/entity analysis/extraction
        # get entities, and users
        all_entities: dict = GetAllEntities().get()
        all_users: dict = GetAllUsers().get()
Example #2
0
    def execute(self) -> bool:
        logging.info("executing process engine")
        loaded_data_scheme: dict = ReadJSONFileFS(DATASET_SCEME_URL).data

        # for each data source_group
        for source_group in loaded_data_scheme["source_groups"]:

            data_folder = source_group["folder"]

            # load data from scheme above
            for log_obj in source_group["data"]:

                logging.info("Process: model Log_obj: " +
                             str(log_obj["log_name"]))
                # TODO: load dataset index file holding dataset statuses

                #TODO: load "unprocessed" datasets, mostly by scheme set above in dataset_scheme

                # get the new dataframe
                log_file_dataset_session: DatasetSession = self.process_data(
                    data_folder, log_obj)

                # TODO: condition on log_type, and location_type
                # TODO: with the CoreDataFrame from process data, perform user/entity analysis/extraction
                extracted_users: UserSet = ExtractAllUsersCSV.get(
                    log_file_dataset_session, log_obj)
                test_user: str = str(list(extracted_users.users.keys())[:2])
                logging.info(
                    "ProcessEngine, execute, extracted_users, test user: " +
                    test_user)

                # store the extracted users, or update the storage
                # extracted_users.set_of_users
                #TODO: mark log_obj as processed afterwards

        # get entities
        all_entities: dict = GetAllEntities().get()

        # get users
        all_users: dict = GetAllUsers().get()

        # after read the data, perform entity analysis using Entity types

        # adjust risk per entity

        return True
Example #3
0
    def __call__(self, *args) -> str:
        display_type: str = args[0]
        logging.info("PriorGetDisplay OF TYPE -- __call__: "+args[0])

        # fetch the display data
        display = Display()
        logging.error("Display Type: "+str(display_type))
        logging.error("Display types 1: "+str(APIType.GET_ALL_ENTITIES.value))
        logging.error("Display types 2: "+str(APIType.GET_ALL_USERS.value))
        logging.error("Display types 3: "+str(APIType.GET_SYSTEM_LOG.value))

        if display_type == APIType.GET_ALL_ENTITIES.value:
            all_entities: dict = GetAllEntities().get()
            display.set(all_entities)
        elif display_type == APIType.GET_ALL_USERS.value:
            all_users: dict = GetAllUsers().get()
            display.set(all_users)
        elif display_type == APIType.GET_SYSTEM_LOG.value:
            system_display: dict = display.get_system_display()
            display.set(system_display)
        else:
            raise Exception("Unsupported API Display type")
        return str(display.data)
Example #4
0
    def execute(self) -> bool:
        logging.info("executing process engine")
        data_folder = dataset_scheme["folder"]

        # load data from scheme above
        for log_obj in dataset_scheme["data"]:

            # TODO: load dataset index file holding dataset statuses

            #TODO: load "unprocessed" datasets, mostly by scheme set above in dataset_scheme

            # get the new dataframe
            log_file_dataset_session: DatasetSession = self.process_data(
                data_folder, log_obj)

            #TODO: with the CoreDataFrame from process data, perform user/entity analysis/extraction
            extracted_users: UserSet = ExtractAllUsersCSV.get(
                log_file_dataset_session, log_obj)
            logging.info("ProcessEngine, execute, extracted_users: " +
                         str(extracted_users.users[:2]))

            # store the extracted users, or update the storage
            # extracted_users.set_of_users
            #TODO: mark log_obj as processed afterwards

        # get entities
        all_entities: dict = GetAllEntities().get()

        # get users
        all_users: dict = GetAllUsers().get()

        # after read the data, perform entity analysis using Entity types

        # adjust risk per entity

        # return a report for execution round
        return True