예제 #1
0
def queue_email_message_text(msg_template, alert_level, msg_text):
    """
    # Queue message in the DB For sending, then try to send it after queuing it

    :param msg_template: Enum notifier.template
    :param alert_level: alert to be included in email subject
    :param msg_text: Message text
    :return:
    """
    try:
        logger.log_debug(
            "queue_message(): Constructing Email message"
            "Template: '{}'".format(TEMPLATE.WATCHLIST_FILE_READ_ERROR),
            module_name)

        if msg_template == TEMPLATE.WATCHLIST_FILE_NOT_FOUND:
            msg_dict = construct_msg_watchlist_not_found(
                alert_level, functions.get_datetime())

            row_id = db.insert_email_msg(msg_dict)
            if row_id > 0:
                logger.log_debug(
                    "queue_message(): Email message has been queued for sending. "
                    "Template: '{}'".format(TEMPLATE.WATCHLIST_FILE_NOT_FOUND),
                    module_name)

        elif msg_template == TEMPLATE.WATCHLIST_FILE_EMPTY:
            msg_dict = construct_msg_watchlist_file_empty(
                alert_level, functions.get_datetime())

            row_id = db.insert_email_msg(msg_dict)
            if row_id > 0:
                logger.log_debug(
                    "queue_message(): Email message has been queued for sending. "
                    "Template: '{}'".format(TEMPLATE.WATCHLIST_FILE_NOT_FOUND),
                    module_name)

        elif msg_template == TEMPLATE.WATCHLIST_FILE_READ_ERROR:
            msg_dict = construct_msg_watchlist_read_error(
                alert_level, functions.get_datetime())

            row_id = db.insert_email_msg(msg_dict)
            if row_id > 0:
                logger.log_debug(
                    "queue_message(): Email message has been queued for sending. "
                    "Template: '{}'".format(
                        TEMPLATE.WATCHLIST_FILE_READ_ERROR), module_name)
    except Exception as e:
        logger.log_error(
            "queue_message(): Failed to queue email message in the database. More info: {}"
            .format(e), module_name)

    # Try Send the message
    try:
        send_queued_messages()
    except:
        pass
예제 #2
0
    def test_get_datetime(self):
        from functions import get_datetime

        with patch('datetime.datetime') as datetime:
            datetime.now.return_value.strftime.return_value = 'foo'

            self.assertDictEqual(get_datetime(), {'date': 'foo', 'time': 'foo'})
예제 #3
0
def log_file_creation(file_path, file_size, file_hash):
    try:
        # Log format: [%time%] "%file_path%" %file_size% %file_hash%

        log_row = "[{}] \"{}\" {} {}".format(functions.get_datetime(),
                                             file_path, file_size, file_hash)

        with open(FILE_CREATION_LOG_FILE_PATH, 'a+') as f:
            f.write(log_row)
            f.write("\n")
    except Exception as e:
        log_critical(e, "logger.py")
예제 #4
0
    def __init__(self, dirpath: str):
        self._content_add = list()
        self.start_dt = get_datetime(datetime_format=LOG_DATETIME_FORMAT)

        self.path = os.path.join(dirpath, LOG_FILENAME)

        if not os.path.exists(self.path):
            try:
                os.makedirs(dirpath)
            except:
                pass
            self._reset()
예제 #5
0
def log_file_rename(old_file_path, new_file_path, file_size, file_hash):
    try:
        # Log format: [%time%] "%old_file_path%" "%new_file_path%" %file_size% %file_hash%

        log_row = "[{}] \"{}\" \"{}\" {} {}".format(functions.get_datetime(),
                                                    old_file_path,
                                                    new_file_path, file_size,
                                                    file_hash)

        with open(FILE_RENAME_LOG_FILE_PATH, 'a+') as f:
            f.write(log_row)
            f.write("\n")
    except Exception as e:
        log_critical(e, "logger.py")
예제 #6
0
def upload_file(username):
    """
    This function takes an image as input and detects the faces in it. Also it updates the database accordingly.
    :param username: User Name of the user who is uploading the image
    :return: Displays necessary data as JSON object containing all the necessary details of the faces detected
    """
    start = time.time()
    upload_folder = os.path.basename('temp_img_dir')
    app.config['UPLOAD_FOLDER'] = upload_folder

    time_now = str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    cameracode = request.form['camcode']
    camera_id = cameracode[3:]

    info = extract_info(functions.s_cam_table, functions.s_cam_id, camera_id)[1]

    if info[0] == 128:
        return redirect(url_for('error', error_str=info[1], error_code=info[0]))

    if len(info) == 0:
        return redirect(url_for('error', error_str=errordict[113], error_code=113))

    if str(info[0][functions.s_mrkdel]) == '1':
        return redirect(url_for('error', error_str=errordict[114], error_code=114))

    bucket_id = info[0][functions.s_buc_id]
    oid = info[0][functions.s_org_id]
    bucket_code = 'BUC' + str(bucket_id).zfill(9)
    o_code = 'ORG' + str(oid).zfill(5)

    file = request.files['image']
    imgtxn_id = str((functions.initial_transaction(bucket_id, oid, camera_id))[1]).zfill(10)
    file.filename = (imgtxn_id + '_' + time_now + '.jpg').replace(' ', '_')
    f = os.path.join(app.config['UPLOAD_FOLDER'], file.filename)
    file.save(f)

    time_capture = get_datetime(functions.dir_path + functions.temp_img_dir + file.filename)
    if len(time_capture) == 0:
        time_capture = time_now

    img_path = '/Organisations' + o_code + '/' + bucket_code + '/' + cameracode + '_dump/' + file.filename
    full_img_txn(imgtxn_id, img_path, time_capture, time_now)

    json1 = input_image(cameracode, time_now, imgtxn_id, bucket_id, oid, camera_id, time_capture)
    json_2 = json.loads(json1)
    end = time.time()
    time_taken = start - end
    res = json.dumps(json_2, indent=4, sort_keys=True) + str(time_taken)
    return render_template('result.html', value=res, username=username)
예제 #7
0
    def save(self):
        """
        Write changes to log
        """
        self.stop_dt = get_datetime(datetime_format=LOG_DATETIME_FORMAT)

        with open(self.path, 'a', encoding='utf-8') as log:
            log.write('\n')

            log.writelines(self._make_header())

            for line in self._content_add:
                log.write(f'{line}\n')

        # Reset content list
        self._content_add = list()
예제 #8
0
def create_file_record(file_path):
    """
        # Calculate sha256 for given file path then insert a record into the database
        # FILE_COUNTER is increased by 1 if file processed successfully
        :param file_path: file path
        :return: True if db insertion success, false if insertion failed
        """
    if not os.path.isfile(file_path):
        return False

    try:
        logger.log_debug(
            "create_file_record(): Creating a record for '{}'".format(
                file_path), module_name)

        sha256 = functions.sha256_file(file_path)
        check_date = functions.get_datetime()
        file_size = functions.get_file_size(file_path)

        file_record = {
            "path": file_path,
            "hash": sha256,
            "file_size": file_size,
            "exists_on_disk": "True",
            "datetime_last_check": check_date
        }

        if db.insert_file_record(file_record) > 0:
            print("[+] Created a record for '{}'".format(file_path))
            logger.log_debug(
                "create_file_record(): Created a DB file record for '{}'".
                format(file_path), module_name)
            return True
        else:
            print("[+] Failed to create a record for '{}'".format(file_path))
            logger.log_debug(
                "create_file_record(): Failed to create a DB file record for '{}'"
                .format(file_path), module_name)
            return False

    except sqlite3.IntegrityError:
        print("[*] Ignoring '{}' Already has a record.".format(file_path))
        logger.log_debug(
            "create_file_record(): The file '{}' is already exist in the database"
            .format(file_path), module_name)
        return False
예제 #9
0
def start_routine_scan():
    """
    # Checks if the registered file(s) hash changed since last hash check.
    # Detects new files added in directories being watched and has no record in the DB (is genuinely added ?).
    # Detects if a file(s) is deleted from disk.f
    # Detects if a file(s) is renamed.
    :return: tuple (list of files that is changed since last check, list of files that has no record in th db,
    list of files that is deleted from disk and has a record in the DB)
    """

    logger.log_debug("Started a routine scan", module_name)
    print("[+] Started a routine scan")

    files_changed_list = [
    ]  # path's for files that has been changed, hash does not match
    new_files_path_list = [
    ]  # path's for new files added since last scan. (no record in the DB)
    deleted_files_path_list = [
    ]  # path's for files deleted from disk but has a record in the DB.
    renamed_files_path_list = []  # path's for files that has been renamed

    reset_processed_files_counter()

    # Get file path list
    watch_list_file_lines = read_file_watch_list()

    # Get file path list
    file_path_list = process_watch_list(watch_list_file_lines)
    # Exclude DB file
    db_path = db.get_db_path()
    if db_path in file_path_list:
        file_path_list.remove(db_path)

    # processed renamed file list.. to be ignored, avoid conflicts
    processed_rename_file_path_list = []

    # Detects new files in dirs being watched
    # Detects File change
    # Detects File rename
    for file_path in file_path_list:
        logger.log_debug(
            "start_routine_scan(): Processing '{}' ".format(file_path),
            module_name)
        try:
            file_hash = functions.sha256_file(file_path)
            file_size = functions.get_file_size(file_path)

            # Get all database records that belongs to file
            file_records = db.get_file_records_by_hash(file_hash)

            if file_records is None or len(file_records) == 0:
                file_records = db.get_file_records(file_path)

            # Detect File rename
            if file_records is not None and not is_file_path_exist(
                    file_records, file_path):
                is_there_files_renamed = False

                for file_record in file_records:
                    try:
                        file_path_in_db = file_record[1]

                        if not os.path.exists(
                                file_path_in_db
                        ) and file_path_in_db not in processed_rename_file_path_list and file_path not in processed_rename_file_path_list:
                            processed_rename_file_path_list.append(file_path)
                            processed_rename_file_path_list.append(
                                file_path_in_db)
                            is_there_files_renamed = True
                        else:
                            continue

                        logger.log_file_rename(file_path_in_db, file_path,
                                               file_size, file_hash)
                        logger.log_debug(
                            "Detected a file RENAME. '{}' => '{}'".format(
                                file_path_in_db, file_path), module_name)

                        print(
                            "[*] Detected a file RENAME. '{}' => '{}'".format(
                                file_path_in_db, file_path))

                        incident = {
                            "old_path": file_path_in_db,
                            "new_path": file_path,
                            "hash": file_hash,
                            "detection_time": functions.get_datetime()
                        }

                        renamed_files_path_list.append(incident)
                        db.update_file_path(file_hash, file_path_in_db,
                                            file_path)
                        db.update_exists_on_disk_value(file_path, "True")

                    except Exception as e:
                        logger.log_error(
                            "start_routine_scan(): Unable to process file '{}' An error has occurred. {}"
                            .format(file_path, e), module_name)

                if is_there_files_renamed:
                    continue

            # Check if it's a new file
            p = db.get_file_hash(file_path)
            if p is None:
                try:
                    logger.log_file_creation(file_path, file_size, file_hash)
                    logger.log_info(
                        "New file detected '{}' '{}' '{}'".format(
                            file_path, file_hash, file_size), module_name)
                    logger.log_debug(
                        "start_routine_scan(): Processed '{}' ".format(
                            file_path), module_name)

                    file_record_dict = {
                        "path": file_path,
                        "hash": file_hash,
                        "size": file_size,
                        "detection_time": functions.get_datetime()
                    }
                    new_files_path_list.append(file_record_dict)
                    create_file_record(file_path)

                    print("[*] New file detected '{}' '{}'".format(
                        file_path, file_hash))

                    continue
                except Exception as e:
                    logger.log_error(
                        "start_routine_scan(): Unable to process file '{}' An error has occurred. {}"
                        .format(file_path, e), module_name)

            # Detect file change
            # check if the file is changed since last check
            if file_records is not None:
                for file_record in file_records:
                    try:
                        file_hash_in_db = file_record[2]
                        file_size_in_db = file_record[3]

                        if file_hash_in_db is not None and file_hash != file_hash_in_db:
                            logger.log_file_change(file_path, file_hash_in_db,
                                                   file_size_in_db, file_size,
                                                   file_hash)

                            logger.log_debug(
                                "Detected a file CHANGE in '{}' '{}' => '{}' '{}' => '{}'"
                                .format(file_path, file_hash_in_db, file_hash,
                                        file_size_in_db, file_size),
                                module_name)

                            # update the DB with the new file hash
                            db.update_file_hash(file_path, file_hash)

                            inc = {
                                "path": file_path,
                                "previous_hash": file_hash_in_db,
                                "new_hash": file_hash,
                                "previous_size": file_size_in_db,
                                "new_size": file_size,
                                "detection_time": functions.get_datetime()
                            }
                            files_changed_list.append(inc)

                            print(
                                "[*] Detected a file CHANGE in '{}' '{}' => '{}'"
                                .format(file_path, file_hash_in_db, file_hash))
                    except Exception as e:
                        logger.log_error(
                            "start_routine_scan(): Unable to process file '{}' An error has occurred. {}"
                            .format(file_path, e), module_name)

            logger.log_debug(
                "start_routine_scan(): Processed '{}' ".format(file_path),
                module_name)
        except Exception as e:
            logger.log_error(
                "start_routine_scan(): Unable to process file '{}' An error has occurred. {}"
                .format(file_path, e), module_name)
            continue

    try:
        deleted_list = get_file_path_list_in_db_not_exists_on_disk()
        for f_path in deleted_list:
            inc = {
                "path": f_path,
                "size": db.get_file_size(f_path),
                "hash": db.get_file_hash(f_path),
                "detection_time": functions.get_datetime()
            }

            logger.log_file_deletion(inc["path"], inc["size"], inc["hash"])

            logger.log_debug("Detected a file DELETION. '{}'".format(f_path),
                             module_name)
            print("[*] Detected a file DELETION. '{}'".format(f_path))
            deleted_files_path_list.append(inc)
    except Exception as e:
        logger.log_error(
            "start_routine_scan(): An error has occurred while detecting deleted files. {}"
            .format(e), module_name)

    return files_changed_list, new_files_path_list, deleted_files_path_list, renamed_files_path_list