Example #1
0
def new_alerts_present(username):
    """Check for new alerts based on the last access time for the specified user."""
    ret = False
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = 'select * from admin_alerts where user = "******"' % username
        entry, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
        if not entry:
            ret = True
        # print entry
        query = 'select count(*) as count from alerts where last_update_time > "%d"' % entry[
            'last_refresh_time']
        # print query
        entry, err = db.get_single_row(db_path, query)
        # print entry, err
        if err:
            raise Exception(err)
        if entry and int(entry['count']) > 0:
            ret = True

    except Exception, e:
        return False, 'Error checking for new alerts : %s' % str(e)
Example #2
0
def new_alerts_present(username):
    """Check for new alerts based on the last access time for the specified user."""
    ret = False
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = 'select * from admin_alerts where user = "******"' % username
        entry, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
        if not entry:
            ret = True
        # print entry
        query = 'select count(*) as count from alerts where last_update_time > "%d"' % entry[
            'last_refresh_time']
        # print query
        entry, err = db.get_single_row(db_path, query)
        # print entry, err
        if err:
            raise Exception(err)
        if entry and int(entry['count']) > 0:
            ret = True

    except Exception, e:
        return False, 'Error checking for new alerts : %s' % str(e)
Example #3
0
def get_org_info():
    """Return organization's information

    args:       None
    returns:    dict with keys-
                    'org_name',
                    'unit_name',
                    'unit_id',
                    'subunit_name',
                    'subunit_id'
    """
    ret_dict = {}
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)

        cmd = "select * from org_info"
        ret, err = db.get_single_row(db_path, cmd)
        if err:
            raise Exception(err)
        if ret:
            # return the dict which has all the fields
            ret_dict = ret

    except Exception, e:
        return None, 'Could not fetch details of the organisation: %s' % e
Example #4
0
def get_event_notification_trigger(ent_id):
    """Get the trigger entry corresponding to the passed trigger id

    """
    return_dict = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = 'select * from event_notification_triggers where ent_id=%d' % int(
            ent_id)
        return_dict, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
        if return_dict:
            cron_list, err = scheduler_utils.get_cron_tasks(
                return_dict['cron_task_id'])
            if err:
                raise Exception(err)
            if cron_list:
                return_dict['schedule_description'] = cron_list[0][
                    'schedule_description']
                return_dict['description'] = cron_list[0]['description']
    except Exception, e:
        return None, 'Error retrieving event notification trigger : %s' % str(
            e)
Example #5
0
def get_log_level():
    d = None
    conn = None
    log_level = None
    try:
        d, err = db.get_single_row(db_path,
                                   "select * from global_params where id=1")
        if err:
            raise Exception(err)
        if d and "logging_level" in d:
            log_level = d["logging_level"]
        else:
            # Not yet set so insert the default and return it
            cmd_list = []
            cmd = [
                "insert into global_params (logging_level, id) values(?,?)",
                (
                    logging.INFO,
                    1,
                )
            ]
            cmd_list.append(cmd)
            ret, err = db.execute_iud(db_path, cmd_list)
            if err:
                raise Exception(err)
            log_level = logging.INFO
    except Exception, e:
        return None, "Error getting log level : %s" % str(e)
Example #6
0
def generate_alert_email_body(alert_id):
    """Given an alert id, generate the appropriate email message body for that alert

    """
    msg = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = 'select * from alerts where alert_id = "%s"' % alert_id
        entry, err = db.get_single_row(db_path, query)
        # print entry
        if err:
            raise Exception(err)
        fat, err = datetime_utils.convert_from_epoch(
            entry['first_alert_time'], return_format='str', str_format='%c', to='local')
        if err:
            raise Exception(err)
        lut, err = datetime_utils.convert_from_epoch(
            entry['last_update_time'], return_format='str', str_format='%c', to='local')
        if err:
            raise Exception(err)
        msg = 'Alert time: %s\nAlert message: %s.' % (lut, entry['alert_str'])

        if entry['repeat_count'] > 1:
            msg += ' This alert has been generated %d times since %s.' % (
                entry['repeat_count'], fat)
    except Exception, e:
        return None, 'Error generating alert email message body : %s' % str(e)
def get_scan_configurations(db_location = None, scan_configuration_id = None, standalone = False, include_deleted=False):
    configurations = None
    try:
        if not db_location:
            db_location, err = get_db_location(standalone)
            if err:
                raise Exception(err)
        if scan_configuration_id:
            query = 'select * from scan_configurations where id="%d"'%scan_configuration_id
        else:
            query = 'select * from scan_configurations'
        if not include_deleted:
            if scan_configuration_id:
                query += ' and status_id != -1'
            else:
                query += ' where status_id != -1'
        #print query
        configurations, err = db.get_multiple_rows(db_location, query)
        if err:
            raise Exception(err)
        for c in configurations:
            query = 'select count(*) as num_files, sum(size) as total_size, count(distinct(extension)) as num_extensions from file_info where scan_configuration_id="%d"'%c['id']
            row, err = db.get_single_row(db_location, query)
            if err:
                raise Exception(err)
            row['total_size_human_readable'] = '0'
            if row:
                if not row['total_size']:
                    row['total_size'] = 0
                row['total_size_human_readable'] = filesize.get_naturalsize(row['total_size'])
                c.update(row)
    except Exception, e:
        return None, 'Error loading Storage Insights configurations : %s'%str(e)
Example #8
0
def save_email_settings(d):
    """Save the email server settings

    """
    conn = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        d1, err = db.get_single_row(db_path, "select * from email_config")
        if err:
            raise Exception(err)
        if d1:
            # Config exists so update
            ret, err = db.execute_iud(db_path, [["update email_config set server=?, port=?, username=?, pswd=?, tls=? where id = ?", (
                d["server"], d["port"], d["username"], d["pswd"], d["tls"], 1,)]])
            if err:
                raise Exception(err)

        else:
            # No config exists so insert
            ret, err = db.execute_iud(db_path, [["insert into email_config (server, port, username, pswd, tls, id) values (?,?,?,?,?,?)", (
                d["server"], d["port"], d["username"], d["pswd"], d["tls"], 1, )]])
            if err:
                raise Exception(err)
    except Exception, e:
        return False, 'Error saving email settings : %s' % str(e)
def set_log_level(level):
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        if level not in [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL]:
            logger.setLevel(logging.INFO)
        else:
            d1, err = db.get_single_row(
                db_path, "select * from global_params")
            if err:
                raise Exception(err)
            cmd_list = []
            if d1:
                cmd = [
                    "update global_params set logging_level=? where id = ?", (level, 1,)]
            else:
                cmd = [
                    "insert into global_params (logging_level, id) values(?,?)", (level, 1,)]
            cmd_list.append(cmd)
            ret, err = db.execute_iud(db_path, cmd_list)
            if err:
                raise Exception(err)
            logger.setLevel(level)
    except Exception, e:
        return False, 'Error setting log level : %s' % str(e)
Example #10
0
def get_file_info(param, param_type='id', db_location = None, standalone = False):
    result = None
    try:
        if not param:
            raise Exception('Unspecified param')
        if param_type not in ['id', 'path']:
            raise Exception('Invalid param type')
        if param_type == 'id':
            query = 'select * from file_info where id="%s"'%param
        elif param_type == 'path':
            query = 'select * from file_info where path="%s"'%param
        if not db_location:
            db_location, err = scan_utils.get_db_location(standalone)
            if err:
                raise Exception(err)
        result, err = db.get_single_row(db_location, query)
        if err:
            raise Exception(err)
        if 'last_modify_time' in result:
            tm_str, err = datetime_utils.convert_from_epoch(result['last_modify_time'], return_format='str', str_format='%c', to='local')
            if err:
                raise Exception(err)
            result['last_modify_time_str'] = tm_str
        if 'size' in result:
            result['size_human_readable'] = filesize.get_naturalsize(result['size'])
    except Exception, e:
        return None, 'Error retrieving duplicate file sets : %s'%str(e)
Example #11
0
def generate_alert_email_body(alert_id):
    """Given an alert id, generate the appropriate email message body for that alert

    """
    msg = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = 'select * from alerts where alert_id = "%s"' % alert_id
        entry, err = db.get_single_row(db_path, query)
        # print entry
        if err:
            raise Exception(err)
        fat, err = datetime_utils.convert_from_epoch(entry['first_alert_time'],
                                                     return_format='str',
                                                     str_format='%c',
                                                     to='local')
        if err:
            raise Exception(err)
        lut, err = datetime_utils.convert_from_epoch(entry['last_update_time'],
                                                     return_format='str',
                                                     str_format='%c',
                                                     to='local')
        if err:
            raise Exception(err)
        msg = 'Alert time: %s\nAlert message: %s.' % (lut, entry['alert_str'])

        if entry['repeat_count'] > 1:
            msg += ' This alert has been generated %d times since %s.' % (
                entry['repeat_count'], fat)
    except Exception, e:
        return None, 'Error generating alert email message body : %s' % str(e)
Example #12
0
def _get_repeat_entry(alert_dict, past_x_seconds=900):
    """Check the db to see if the passed alert is a repeat of an existing alert entry in the past x seconds

    """
    result = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        now, err = datetime_utils.get_epoch(when='now', num_previous_days=0)
        if alert_dict['component']:
            query = "select * from alerts where severity_type_id = '%d' and subsystem_type_id = '%d' and component = '%s' and alert_str=\"%s\" and  first_alert_time >= %d;" % (
                alert_dict['severity_type_id'],
                alert_dict['subsystem_type_id'], alert_dict['component'],
                alert_dict['alert_str'], now - past_x_seconds)
        else:
            query = "select * from alerts where severity_type_id = '%d' and subsystem_type_id = '%d' and alert_str=\"%s\" and  first_alert_time >= %d);" % (
                alert_dict['severity_type_id'],
                alert_dict['subsystem_type_id'], alert_dict['alert_str'],
                now - past_x_seconds)
        # print query
        result, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
        # print result, err
    except Exception, e:
        return None, 'Error checking for repeats : %s' % str(e)
def get_file_info_row(db_path, path):
    row = False
    try:
        query = 'select * from file_info where path="%s"'%path
        row, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
    except Exception, e:
        return None, 'Error retrieving file info row : %s'%str(e)
def get_file_info_row(db_path, path):
    row = False
    try:
        query = 'select * from file_info where path="%s"' % path
        row, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
    except Exception, e:
        return None, 'Error retrieving file info row : %s' % str(e)
Example #15
0
def get_auth_settings():
    """Get the current authentication settings from the db."""
    d = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        d, err = db.get_single_row(
            db_path, "select * from samba_global_common where id=1")
        if err:
            raise Exception(err)
        if d and 'security' in d and d['security'] == "ads":
            d1, err = db.get_single_row(
                db_path, "select * from samba_global_ad where id=1")
            if err:
                raise Exception(err)
            if d1:
                d.update(d1)
    except Exception, e:
        return None, 'Error loading authentication settings : %s' % str(e)
Example #16
0
def get_auth_settings():
    """Get the current authentication settings from the db."""
    d = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        d, err = db.get_single_row(
            db_path, "select * from samba_global_common where id=1")
        if err:
            raise Exception(err)
        if d and 'security' in d and d['security'] == "ads":
            d1, err = db.get_single_row(
                db_path, "select * from samba_global_ad where id=1")
            if err:
                raise Exception(err)
            if d1:
                d.update(d1)
    except Exception, e:
        return None, 'Error loading authentication settings : %s' % str(e)
Example #17
0
def get_rsync_share_details(name):
    share = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        share, err = db.get_single_row(
            db_path, "select * from rsync_shares where name='%s'" % name)
        if not share:
            raise Exception("Specified share not found ")
    except Exception, e:
        return False, 'Error deleting the share: %s' % str(e)
Example #18
0
def get_event_notification_configuration(enc_id):
    """Get a particular email configuration..

    """
    ret = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        ret, err = db.get_single_row(
            db_path, "select * from event_notification_configuration_email where enc_id = %d" % enc_id)
        if err:
            raise Exception(err)
    except Exception, e:
        return None, 'Error retrieving email notification configuration : %s' % str(e)
Example #19
0
def load_email_settings():
    """Load the email server settings

    """
    conn = None
    d = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        d, err = db.get_single_row(
            db_path, "select * from email_config where id = 1")
        if err:
            raise Exception(err)
    except Exception, e:
        return None, 'Error loading email settings : %s' % str(e)
def get_task(task_id):
    """Get a particular entry with the passed task_id from the tasks table."""
    task = None
    try:

        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)

        cmd = "select * from tasks where task_id=='%d'" % int(task_id)
        task, err = db.get_single_row(db_path, cmd)
        if err:
            raise Exception(err)
        if not task:
            raise Exception('Selected task not found')
    except Exception, e:
        return None, 'Error retrieving task details : %s' % e
Example #21
0
def get_task(task_id):
    """Get a particular entry with the passed task_id from the tasks table."""
    task = None
    try:

        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)

        cmd = "select * from tasks where task_id=='%d'" % int(task_id)
        task, err = db.get_single_row(db_path, cmd)
        if err:
            raise Exception(err)
        if not task:
            raise Exception('Selected task not found')
    except Exception, e:
        return None, 'Error retrieving task details : %s' % e
Example #22
0
def get_share_info(mode, index):
    """Get the info for a share either based on name or by db id"""
    d = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = None
        if mode == "by_id":
            query = "select * from samba_shares where share_id = %s" % index
        else:
            query = "select * from samba_shares where name = %s" % index
        d, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)

    except Exception, e:
        return None, 'Error loading CIFS share information : %s' % str(e)
Example #23
0
def get_share_info(mode, index):
    """Get the info for a share either based on name or by db id"""
    d = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = None
        if mode == "by_id":
            query = "select * from samba_shares where share_id = %s" % index
        else:
            query = "select * from samba_shares where name = %s" % index
        d, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)

    except Exception, e:
        return None, 'Error loading CIFS share information : %s' % str(e)
def get_db_details(standalone = False):
    db_details = None
    try:
        db_details = {}
        db_location, err = get_db_location(standalone)
        if err:
            raise Exception(err)
        size = os.path.getsize(db_location)
        db_details['size'] = size
        db_details['size_str'] = filesize.get_naturalsize(size)
        query = 'select count(*) as count from file_info'
        row, err = db.get_single_row(db_location, query)
        if err:
            raise Exception(err)
        db_details['num_files'] = row['count']

    except Exception, e:
        return None, 'Error loading Storage Insights database details : %s'%str(e)
Example #25
0
def get_db_details(standalone=False):
    db_details = None
    try:
        db_details = {}
        db_location, err = get_db_location(standalone)
        if err:
            raise Exception(err)
        size = os.path.getsize(db_location)
        db_details['size'] = size
        db_details['size_str'] = filesize.get_naturalsize(size)
        query = 'select count(*) as count from file_info'
        row, err = db.get_single_row(db_location, query)
        if err:
            raise Exception(err)
        db_details['num_files'] = row['count']

    except Exception, e:
        return None, 'Error loading Storage Insights database details : %s' % str(
            e)
Example #26
0
def delete_rsync_share(name):
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        cmd_list = []
        check, err = db.get_single_row(
            db_path, "select * from rsync_shares where name='%s'" % name)
        if not check:
            raise Exception("Specified share not found ")
        cmd = ["delete from rsync_shares where name='%s'" % name]
        cmd_list.append(cmd)
        ret, err = db.execute_iud(db_path, cmd_list)
        if err:
            raise Exception(err)
        conf, err = _generate_rsync_config()
        if err:
            raise Exception(err)
    except Exception, e:
        return False, 'Error deleting the share: %s' % str(e)
Example #27
0
def get_count(action='READ', past_x_seconds=60):
    count = -1
    try:
        config_dir, err = config.get_config_dir()
        if err:
            raise Exception(err)

        if action != 'ALL':
            query = "select count(*) as count from logs where timestamp >= Datetime('now', '-%d seconds');" % past_x_seconds
        else:
            query = "select count(*) as count from logs where actions = '%s' and timestamp >= Datetime('now', '-%d seconds');" % (
                action.upper(), past_x_seconds)
        # print query
        db_path = '%s/db/inotify.db' % config_dir
        ret, err = db.get_single_row(db_path, query)
        if err:
            print err
        count = ret['count']
        # print ret
    except Exception, e:
        return -1, 'Error getting counts : %s' % str(e)
Example #28
0
def get_count(action='READ', past_x_seconds=60):
    count = -1
    try:
        config_dir, err = config.get_config_dir()
        if err:
            raise Exception(err)

        if action != 'ALL':
            query = "select count(*) as count from logs where timestamp >= Datetime('now', '-%d seconds');" % past_x_seconds
        else:
            query = "select count(*) as count from logs where actions = '%s' and timestamp >= Datetime('now', '-%d seconds');" % (
                action.upper(), past_x_seconds)
        # print query
        db_path = '%s/db/inotify.db' % config_dir
        ret, err = db.get_single_row(db_path, query)
        if err:
            print err
        count = ret['count']
        # print ret
    except Exception, e:
        return -1, 'Error getting counts : %s' % str(e)
Example #29
0
def get_scan_configurations(db_location=None,
                            scan_configuration_id=None,
                            standalone=False,
                            include_deleted=False):
    configurations = None
    try:
        if not db_location:
            db_location, err = get_db_location(standalone)
            if err:
                raise Exception(err)
        if scan_configuration_id:
            query = 'select * from scan_configurations where id="%d"' % scan_configuration_id
        else:
            query = 'select * from scan_configurations'
        if not include_deleted:
            if scan_configuration_id:
                query += ' and status_id != -1'
            else:
                query += ' where status_id != -1'
        #print query
        configurations, err = db.get_multiple_rows(db_location, query)
        if err:
            raise Exception(err)
        for c in configurations:
            query = 'select count(*) as num_files, sum(size) as total_size, count(distinct(extension)) as num_extensions from file_info where scan_configuration_id="%d"' % c[
                'id']
            row, err = db.get_single_row(db_location, query)
            if err:
                raise Exception(err)
            row['total_size_human_readable'] = '0'
            if row:
                if not row['total_size']:
                    row['total_size'] = 0
                row['total_size_human_readable'] = filesize.get_naturalsize(
                    row['total_size'])
                c.update(row)
    except Exception, e:
        return None, 'Error loading Storage Insights configurations : %s' % str(
            e)
Example #30
0
def get_integralstor_uuid():
    """Return Integralstor's UUID fields

    args:       None
    returns:    dict with keys 'uuid_hex' & 'uuid_str'

    """
    id_dict = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)

        cmd = "select * from integralstor_info"
        ret, err = db.get_single_row(db_path, cmd)
        if err:
            raise Exception(err)
        if ret:
            # return the dict which has all the fields
            id_dict = ret
    except Exception, e:
        return None, 'Could not fetch UUID: %s' % e
Example #31
0
def set_log_level(level):
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        if level not in [
                logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
                logging.CRITICAL
        ]:
            logger.setLevel(logging.INFO)
        else:
            d1, err = db.get_single_row(db_path, "select * from global_params")
            if err:
                raise Exception(err)
            cmd_list = []
            if d1:
                cmd = [
                    "update global_params set logging_level=? where id = ?",
                    (
                        level,
                        1,
                    )
                ]
            else:
                cmd = [
                    "insert into global_params (logging_level, id) values(?,?)",
                    (
                        level,
                        1,
                    )
                ]
            cmd_list.append(cmd)
            ret, err = db.execute_iud(db_path, cmd_list)
            if err:
                raise Exception(err)
            logger.setLevel(level)
    except Exception, e:
        return False, 'Error setting log level : %s' % str(e)
Example #32
0
def _get_repeat_entry(alert_dict, past_x_seconds=900):
    """Check the db to see if the passed alert is a repeat of an existing alert entry in the past x seconds

    """
    result = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        now, err = datetime_utils.get_epoch(when='now', num_previous_days=0)
        if alert_dict['component']:
            query = "select * from alerts where severity_type_id = '%d' and subsystem_type_id = '%d' and component = '%s' and alert_str=\"%s\" and  first_alert_time >= %d;" % (
                alert_dict['severity_type_id'], alert_dict['subsystem_type_id'], alert_dict['component'], alert_dict['alert_str'], now - past_x_seconds)
        else:
            query = "select * from alerts where severity_type_id = '%d' and subsystem_type_id = '%d' and alert_str=\"%s\" and  first_alert_time >= %d);" % (
                alert_dict['severity_type_id'], alert_dict['subsystem_type_id'], alert_dict['alert_str'], now - past_x_seconds)
        # print query
        result, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
        # print result, err
    except Exception, e:
        return None, 'Error checking for repeats : %s' % str(e)
Example #33
0
def create_rsync_share(name, path, comment, list, readonly, uid, gid):
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)

        check, err = db.get_single_row(
            db_path, "select * from rsync_shares where name='%s'" % name)
        if check:
            raise Exception("Share already exists.Use a different share name")

        cmd_list = []
        cmd = ["insert into rsync_shares (name,path,comment,list,readonly,uid,gid) values(?,?,?,?,?,?,?)", (
            name, path, comment, list, readonly, uid, gid)]
        cmd_list.append(cmd)
        ret, err = db.execute_iud(db_path, cmd_list)
        if err:
            raise Exception(err)
        conf, err = _generate_rsync_config()
        if err:
            raise Exception(err)
    except Exception, e:
        return False, 'Error saving rsync config settings : %s' % str(e)
def get_log_level():
    d = None
    conn = None
    log_level = None
    try:
        d, err = db.get_single_row(
            db_path, "select * from global_params where id=1")
        if err:
            raise Exception(err)
        if d and "logging_level" in d:
            log_level = d["logging_level"]
        else:
            # Not yet set so insert the default and return it
            cmd_list = []
            cmd = [
                "insert into global_params (logging_level, id) values(?,?)", (logging.INFO, 1,)]
            cmd_list.append(cmd)
            ret, err = db.execute_iud(db_path, cmd_list)
            if err:
                raise Exception(err)
            log_level = logging.INFO
    except Exception, e:
        return None, "Error getting log level : %s" % str(e)
Example #35
0
def update_auth_settings(d):
    """Update the authentication settings in the db to what has been passed in the dict"""
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        auth_settings, err = get_auth_settings()
        if err:
            raise Exception(err)
        if not auth_settings:
            cmd = ["insert into samba_global_common (id, workgroup, netbios_name, security, include_homes_section) values (?, ?, ?, ?, ?)",
                   (1, d["workgroup"], d["netbios_name"], d["security"], True,)]
        else:
            cmd = ["update samba_global_common set workgroup=?, netbios_name=?, security=?, include_homes_section=? where id = ?",
                   (d["workgroup"], d["netbios_name"], d["security"], True, 1,)]
        cmd_list = []
        cmd_list.append(cmd)
        if d["security"] == "ads":
            d1, err = db.get_single_row(
                db_path, "select * from samba_global_ad")
            if err:
                raise Exception(err)
            if d1:
                cmd = ["update samba_global_ad set realm=?, password_server=?, ad_schema_mode=?, id_map_min=?, id_map_max=?, password_server_ip=?  where id = ?",
                       (d["realm"], d["password_server"], 'rfc2307', 16777216, 33554431, d["password_server_ip"], 1, )]
                cmd_list.append(cmd)
            else:
                cmd = ["insert into samba_global_ad (realm, password_server, ad_schema_mode, id_map_min, id_map_max, password_server_ip, id) values(?,?,?,?,?,?,?)", (
                    d["realm"], d["password_server"], 'rfc2307', 16777216, 33554431, d["password_server_ip"], 1,)]
                cmd_list.append(cmd)
        # print cmd_list
        ret, err = db.execute_iud(db_path, cmd_list)
        if err:
            raise Exception(err)
    except Exception, e:
        return False, 'Error saving authentication settings : %s' % str(e)
def get_event_notification_trigger(ent_id):
    """Get the trigger entry corresponding to the passed trigger id

    """
    return_dict = None
    try:
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        query = 'select * from event_notification_triggers where ent_id=%d' % int(
            ent_id)
        return_dict, err = db.get_single_row(db_path, query)
        if err:
            raise Exception(err)
        if return_dict:
            cron_list, err = scheduler_utils.get_cron_tasks(
                return_dict['cron_task_id'])
            if err:
                raise Exception(err)
            if cron_list:
                return_dict['schedule_description'] = cron_list[0]['schedule_description']
                return_dict['description'] = cron_list[0]['description']
    except Exception, e:
        return None, 'Error retrieving event notification trigger : %s' % str(e)
Example #37
0
def export_old_alerts(older_than_days=1):
    """Move all alerts older than the older_than_days into a file in
    /var/log/integralstor/logs/exported dir

    """
    try:
        cutoff_seconds, err = datetime_utils.get_epoch(
            when='now', num_previous_days=older_than_days)
        if err:
            raise Exception(err)
        #query = "select * from alerts where last_update_time < Datetime('now', '-%d days') order by alert_id;"%older_than_days
        query = "select * from alerts where last_update_time < %d order by alert_id;" % cutoff_seconds
        full_alerts_list, err = _get_and_parse_alerts(query)
        if err:
            raise Exception(err)
        alerts_list = []
        # print 'full', full_alerts_list
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        for a in full_alerts_list:
            # If it is still in the holding table then skip it so it can get
            # processed..
            query = "select * from event_notifications_holding where event_id=%d;" % int(
                a['alert_id'])
            ret, err = db.get_single_row(db_path, query)
            # print ret, err
            if err:
                raise Exception(err)
            if not ret:
                alerts_list.append(a)
        # print 'to export', alerts_list
        if alerts_list:
            delete_commands = []
            export_dir_name, err = config.get_exported_logs_dir_path()
            if err:
                raise Exception(err)
            if not os.path.exists(export_dir_name):
                os.makedirs(export_dir_name)
            now, err = datetime_utils.get_epoch(when='now',
                                                num_previous_days=0)
            if err:
                raise Exception(err)
            now_str, err = datetime_utils.convert_from_epoch(
                now,
                return_format='str',
                str_format='%Y_%m_%d_%H_%M',
                to='local')
            export_filename = 'alerts_%s' % now_str
            # print export_filename
            with open('%s/%s' % (export_dir_name, export_filename), 'w') as f:
                f.write(
                    'First alert time(UTC)  |  Last update time(UTC) | Repeat count | Subsystem | Severity | Alert message\n'
                )
                f.write(
                    '-------------------------------------------------------------------------------------------\n'
                )
                for al in alerts_list:
                    f.write('%s | %s | %d | %s | %s | %s\n\n' %
                            (al['first_alert_time'], al['last_update_time'],
                             al['repeat_count'], al['subsystem'],
                             al['severity'], al['alert_str']))
                    delete_commands.append([
                        'delete from alerts where alert_id="%d"' %
                        int(al['alert_id'])
                    ])
            # print delete_commands
            db_path, err = config.get_db_path()
            if err:
                raise Exception(err)
            ret, err = db.execute_iud(db_path, delete_commands)
            if err:
                raise Exception(err)

    except Exception, e:
        return False, 'Error exporting old alerts : %s' % str(e)
Example #38
0
def export_old_audits(min_to_export=1000, export_count=500):
    """Export the oldest export_count audits if the total number of audits exceeds min_to_export

    """
    try:
        # print min_to_export, export_count
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        ret, err = db.get_single_row(
            db_path, 'select count(*) as count from audit')
        if err:
            raise Exception(err)
        if ret['count'] > int(min_to_export):
            query = "select * from audit order by audit_id limit %d;" % int(
                export_count)
            full_audit_list, err = db.get_multiple_rows(db_path, query)
            if err:
                raise Exception(err)
            # print full_audit_list
            audit_list = []
            for a in full_audit_list:
                # If it is still in the holding table then skip it so it can
                # get processed..
                query = "select * from event_notifications_holding where event_id=%d;" % int(
                    a['audit_id'])
                ret, err = db.get_single_row(db_path, query)
                # print ret, err
                if err:
                    raise Exception(err)
                if not ret:
                    audit_list.append(a)

            # print audit_list
            if audit_list:
                delete_commands = []
                export_dir_name, err = config.get_exported_logs_dir_path()
                if not os.path.exists(export_dir_name):
                    os.makedirs(export_dir_name)
                now, err = datetime_utils.get_epoch(
                    when='now', num_previous_days=0)
                if err:
                    raise Exception(err)
                now_str, err = datetime_utils.convert_from_epoch(
                    now, return_format='str', str_format='%Y_%m_%d_%H_%M', to='local')
                export_filename = 'audits_%s' % now_str
                with open('%s/%s' % (export_dir_name, export_filename), 'w') as f:
                    f.write(
                        'Audit time(UTC)  |  Audit type | Performed by | Performed from | Audit message\n')
                    f.write(
                        '-------------------------------------------------------------------------------------------\n')
                    for entry in audit_list:
                        # print entry
                        aud, err = _parse_audit_entry(entry)
                        if err:
                            raise Exception(err)
                        # print aud, err
                        f.write('%s | %s | %s | %s | %s\n\n' % (
                            aud['time'], aud['action'], aud['username'], aud['ip'], aud['action_str']))
                        delete_commands.append(
                            ['delete from audit where audit_id="%d"' % int(aud['audit_id'])])
                # print delete_commands
                ret, err = db.execute_iud(db_path, delete_commands)
                if err:
                    raise Exception(err)

    except Exception, e:
        return False, 'Error exporting old audits : %s' % str(e)
Example #39
0
def export_old_alerts(older_than_days=1):
    """Move all alerts older than the older_than_days into a file in
    /var/log/integralstor/logs/exported dir

    """
    try:
        cutoff_seconds, err = datetime_utils.get_epoch(
            when='now', num_previous_days=older_than_days)
        if err:
            raise Exception(err)
        #query = "select * from alerts where last_update_time < Datetime('now', '-%d days') order by alert_id;"%older_than_days
        query = "select * from alerts where last_update_time < %d order by alert_id;" % cutoff_seconds
        full_alerts_list, err = _get_and_parse_alerts(query)
        if err:
            raise Exception(err)
        alerts_list = []
        # print 'full', full_alerts_list
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        for a in full_alerts_list:
            # If it is still in the holding table then skip it so it can get
            # processed..
            query = "select * from event_notifications_holding where event_id=%d;" % int(
                a['alert_id'])
            ret, err = db.get_single_row(db_path, query)
            # print ret, err
            if err:
                raise Exception(err)
            if not ret:
                alerts_list.append(a)
        # print 'to export', alerts_list
        if alerts_list:
            delete_commands = []
            export_dir_name, err = config.get_exported_logs_dir_path()
            if err:
                raise Exception(err)
            if not os.path.exists(export_dir_name):
                os.makedirs(export_dir_name)
            now, err = datetime_utils.get_epoch(
                when='now', num_previous_days=0)
            if err:
                raise Exception(err)
            now_str, err = datetime_utils.convert_from_epoch(
                now, return_format='str', str_format='%Y_%m_%d_%H_%M', to='local')
            export_filename = 'alerts_%s' % now_str
            # print export_filename
            with open('%s/%s' % (export_dir_name, export_filename), 'w') as f:
                f.write(
                    'First alert time(UTC)  |  Last update time(UTC) | Repeat count | Subsystem | Severity | Alert message\n')
                f.write(
                    '-------------------------------------------------------------------------------------------\n')
                for al in alerts_list:
                    f.write('%s | %s | %d | %s | %s | %s\n\n' % (
                        al['first_alert_time'], al['last_update_time'], al['repeat_count'], al['subsystem'], al['severity'], al['alert_str']))
                    delete_commands.append(
                        ['delete from alerts where alert_id="%d"' % int(al['alert_id'])])
            # print delete_commands
            db_path, err = config.get_db_path()
            if err:
                raise Exception(err)
            ret, err = db.execute_iud(db_path, delete_commands)
            if err:
                raise Exception(err)

    except Exception, e:
        return False, 'Error exporting old alerts : %s' % str(e)
Example #40
0
def export_old_audits(min_to_export=1000, export_count=500):
    """Export the oldest export_count audits if the total number of audits exceeds min_to_export

    """
    try:
        # print min_to_export, export_count
        db_path, err = config.get_db_path()
        if err:
            raise Exception(err)
        ret, err = db.get_single_row(
            db_path, 'select count(*) as count from audit')
        if err:
            raise Exception(err)
        if ret['count'] > int(min_to_export):
            query = "select * from audit order by audit_id limit %d;" % int(
                export_count)
            full_audit_list, err = db.get_multiple_rows(db_path, query)
            if err:
                raise Exception(err)
            # print full_audit_list
            audit_list = []
            for a in full_audit_list:
                # If it is still in the holding table then skip it so it can
                # get processed..
                query = "select * from event_notifications_holding where event_id=%d;" % int(
                    a['audit_id'])
                ret, err = db.get_single_row(db_path, query)
                # print ret, err
                if err:
                    raise Exception(err)
                if not ret:
                    audit_list.append(a)

            # print audit_list
            if audit_list:
                delete_commands = []
                export_dir_name, err = config.get_exported_logs_dir_path()
                if not os.path.exists(export_dir_name):
                    os.makedirs(export_dir_name)
                now, err = datetime_utils.get_epoch(
                    when='now', num_previous_days=0)
                if err:
                    raise Exception(err)
                now_str, err = datetime_utils.convert_from_epoch(
                    now, return_format='str', str_format='%Y_%m_%d_%H_%M', to='local')
                export_filename = 'audits_%s' % now_str
                with open('%s/%s' % (export_dir_name, export_filename), 'w') as f:
                    f.write(
                        'Audit time(UTC)  |  Audit type | Performed by | Performed from | Audit message\n')
                    f.write(
                        '-------------------------------------------------------------------------------------------\n')
                    for entry in audit_list:
                        # print entry
                        aud, err = _parse_audit_entry(entry)
                        if err:
                            raise Exception(err)
                        # print aud, err
                        f.write('%s | %s | %s | %s | %s\n\n' % (
                            aud['time'], aud['action'], aud['username'], aud['ip'], aud['action_str']))
                        delete_commands.append(
                            ['delete from audit where audit_id="%d"' % int(aud['audit_id'])])
                # print delete_commands
                ret, err = db.execute_iud(db_path, delete_commands)
                if err:
                    raise Exception(err)

    except Exception, e:
        return False, 'Error exporting old audits : %s' % str(e)
Example #41
0
def initiate_scan(scan_configuration_id, standalone=False):
    scan_id = 0
    db_location = None
    conn = None
    error_list = []
    successful_creation_modification_transactions_count = 0
    failed_creation_modification_transactions_count = 0
    successful_deletion_transactions_count = 0
    failed_deletion_transactions_count = 0
    scanned_files_count = 0
    scanned_dirs_count = 0
    new_files_count = 0
    modified_files_count = 0
    deleted_files_count = 0
    try:

        global scan_killed
        scan_killed = False
        print '-----------------------------SETTINGS---------------------------------'

        db_location, err = get_db_location(standalone)
        if err:
            raise Exception(err)
        configs, err = get_scan_configurations(
            db_location=db_location,
            scan_configuration_id=scan_configuration_id,
            standalone=standalone,
            include_deleted=False)
        if err:
            raise Exception(err)
        if not configs:
            raise Exception('Specified configuration does not exist')

        scan_dir = configs[0]['scan_dir']
        exclude_dirs = configs[0]['exclude_dirs']
        generate_checksum = configs[0]['generate_checksum']
        db_transaction_size = configs[0]['db_transaction_size']
        record_history = configs[0]['record_history']

        if generate_checksum:
            print 'Generating file checksums'
        else:
            print 'Not generating file checksums'
        if record_history:
            print 'Recording file event history'
        else:
            print 'Not recording file event history'
        dir_str = 'Collecting statistics for the folder : %s..\n' % scan_dir
        exclude_dirs_list = None
        if exclude_dirs:
            dir_str += '..but excluding the following directories : "%s"' % exclude_dirs
            exclude_dirs_list = []
            components = exclude_dirs.split(',')
            for component in components:
                exclude_dirs_list.append(
                    '%s/%s' % (scan_dir, component.strip().lstrip('/')))
        print dir_str
        print '----------------------------------------------------------------------'

        pid = os.getpid()
        scan_id, err = log_scan_start(db_location, configs[0], pid)
        if err:
            raise Exception(err)

        counter = 0
        cmd_list = []
        transaction_file_list = []
        if not os.path.exists(scan_dir):
            raise Exception('Specified scan directory %s does not exist.' %
                            scan_dir)
        print 'Collecting information about the directory structure for %s..' % scan_dir
        for root, dirs, files in os.walk(unicode(scan_dir)):
            if scan_killed:
                break
            scanned_dirs_count += 1
            #print 'Processing directory %s'%root
            if exclude_dirs_list and root in exclude_dirs_list:
                #Config says exclude this dir so skip it.
                print 'Skipping excluded directory : %s' % root
                continue
            for file in files:
                full_path = os.path.normpath('%s/%s' % (root, file))
                #print full_path
                try:
                    db_to_be_updated = False
                    scanned_files_count += 1
                    transaction_file_list.append(full_path)
                    if os.path.islink(full_path):
                        continue
                    extension = None
                    if full_path:
                        rt, extension = os.path.splitext(full_path)
                    mtime = os.path.getmtime(full_path)
                    #print 'mtime', mtime
                    size = os.path.getsize(full_path)
                    chksum = None
                    if generate_checksum:
                        chksum, err = checksum_utils.generate_checksum(
                            full_path, algorithm='sha256')
                        if err:
                            error_list.append((full_path, err))
                            print '!!!!', err
                            continue

                    #First check if we have some recorded info about this file.
                    query = 'select * from file_info where path = "%s"' % full_path
                    file_info_row, err = db.get_single_row(db_location, query)
                    if err:
                        raise Exception(err)

                    #Now decide whether to insert/update the DB..
                    insert_update_file_info = False
                    update_file_info = False
                    if not file_info_row:
                        #Info not in the DB so insert..
                        #Insert it if the path does not exist or else ignore..
                        cmd = [
                            'insert or ignore into file_info(path, extension, size, checksum, last_modify_time, last_access_time, last_scan_id, scan_configuration_id) values (?,?,?,?,?,?,?,?)',
                            (
                                full_path,
                                extension,
                                size,
                                chksum,
                                int(mtime),
                                int(mtime),
                                scan_id,
                                scan_configuration_id,
                            )
                        ]
                        cmd_list.append(cmd)
                        new_files_count += 1
                        db_to_be_updated = True
                    else:
                        #Exists in DB but has anything changed with the file?? If so, update
                        if file_info_row['last_modify_time'] != int(mtime):
                            #File mtime does not match whats in the DB so modify
                            update_file_info = True
                        if generate_checksum:
                            if file_info_row['checksum'] != chksum:
                                #File checksum does not match whats in the DB so modify
                                update_file_info = True
                        if update_file_info:
                            modified_files_count += 1
                            #This file has NOT been processed already in a previous run of the same scan_id
                            if generate_checksum:
                                update_cmd = [
                                    'update file_info set size=?, last_modify_time=?, last_access_time=?, extension=?, checksum=?, last_scan_id = ?, scan_configuration_id=? where path = ?',
                                    (
                                        size,
                                        int(mtime),
                                        int(mtime),
                                        extension,
                                        chksum,
                                        scan_id,
                                        scan_configuration_id,
                                        full_path,
                                    )
                                ]
                            else:
                                update_cmd = [
                                    'update file_info set size=?, last_modify_time=?, last_access_time=?, extension=?, last_scan_id=?, scan_configuration_id=? where path = ?',
                                    (
                                        size,
                                        int(mtime),
                                        int(mtime),
                                        extension,
                                        scan_id,
                                        scan_configuration_id,
                                        full_path,
                                    )
                                ]
                            cmd_list.append(update_cmd)
                            db_to_be_updated = True
                    if record_history:
                        cmd = [
                            'insert or ignore into file_events_history(file_info_id, path, events, event_time, last_scan_id, scan_configuration_id) values ((select id from file_info where path=?),?,?,?,?, ?)',
                            (
                                full_path,
                                full_path,
                                'MODIFY',
                                int(mtime),
                                scan_id,
                                scan_configuration_id,
                            )
                        ]
                        cmd_list.append(cmd)
                        db_to_be_updated = True
                    if db_to_be_updated:
                        counter += 1
                    if cmd_list and (counter != 0) and (
                            counter % db_transaction_size == 0):
                        print 'Scanned %d files' % scanned_files_count
                        #print cmd_list
                        ret, err = db.execute_iud(db_location,
                                                  cmd_list,
                                                  get_rowid=False)
                        #print ret, err
                        if err:
                            failed_creation_modification_transactions_count += counter
                            for transaction_file in transaction_file_list:
                                error_list.append((
                                    transaction_file,
                                    'Error inserting/updating into the database : %s'
                                    % err))
                        else:
                            successful_creation_modification_transactions_count += counter
                            pd = {
                                'scan_id':
                                scan_id,
                                'status_id':
                                1,
                                'status_str':
                                'Scanned %d directories and %d files. Processed %d creations/modifilcation transactions successfully with %d errors. New files detected : %d, files modified since last scan : %d'
                                %
                                (scanned_dirs_count, scanned_files_count,
                                 successful_creation_modification_transactions_count,
                                 failed_creation_modification_transactions_count,
                                 new_files_count, modified_files_count),
                                'scanned_dirs_count':
                                scanned_dirs_count,
                                'scanned_files_count':
                                scanned_files_count,
                                'successful_creation_modification_transactions_count':
                                successful_creation_modification_transactions_count,
                                'failed_creation_modification_transactions_count':
                                failed_creation_modification_transactions_count,
                                'successful_deletion_transactions_count':
                                successful_deletion_transactions_count,
                                'failed_deletion_transactions_count':
                                failed_deletion_transactions_count,
                                'new_files_count':
                                new_files_count,
                                'modified_files_count':
                                modified_files_count,
                                'deleted_files_count':
                                deleted_files_count
                            }
                            ret, err = log_scan_progress(db_location, pd)
                        cmd_list = []
                        transaction_file_list = []
                        counter = 0
                except Exception, e:
                    #print e
                    error_list.append((full_path, str(e)))
        if not scan_killed:
            if cmd_list:
                print 'Processing the last batch of %d files.' % counter
                #print cmd_list
                #Still have unprocessed files so insert them!
                ret, err = db.execute_iud(db_location,
                                          cmd_list,
                                          get_rowid=False)
                if err:
                    failed_creation_modification_transactions_count += counter
                    for transaction_file in transaction_file_list:
                        error_list.append(
                            (transaction_file,
                             'Error inserting into the database : %s' % err))
                else:
                    successful_creation_modification_transactions_count += counter
            pd = {
                'scan_id':
                scan_id,
                'status_id':
                1,
                'status_str':
                'Processed all creations and modifications. Now processing deletions. Scanned %d directories and %d files. Processed %d creation/modification transactions successfully with %d errors'
                % (scanned_dirs_count, scanned_files_count,
                   successful_creation_modification_transactions_count,
                   failed_creation_modification_transactions_count),
                'scanned_dirs_count':
                scanned_dirs_count,
                'scanned_files_count':
                scanned_files_count,
                'successful_creation_modification_transactions_count':
                successful_creation_modification_transactions_count,
                'failed_creation_modification_transactions_count':
                failed_creation_modification_transactions_count,
                'successful_deletion_transactions_count':
                successful_deletion_transactions_count,
                'failed_deletion_transactions_count':
                failed_deletion_transactions_count,
                'new_files_count':
                new_files_count,
                'modified_files_count':
                modified_files_count,
                'deleted_files_count':
                deleted_files_count
            }
            ret, err = log_scan_progress(db_location, pd)

            #Now scan for deleted files that are in our db but have actually been deleted..
            print 'scanning for deletes'
            query = 'select * from file_info where scan_configuration_id="%d" and last_scan_id != "%d"' % (
                scan_configuration_id, scan_id)
            (cur, conn), err = db.get_query_cursor_and_connection(
                db_location, query)
            if err:
                raise Exception(err)
            file_info_row, err = db.get_next_row(cur)
            if err:
                raise Exception(err)
            cmd_list = []
            transaction_file_list = []
            while file_info_row:
                if not os.path.isfile(file_info_row['path']):
                    #File no longer exists so delete the file_info entry and put in a delete into the file_events_history table
                    deleted_files_count += 1
                    transaction_file_list.append(file_info_row['path'])
                    if record_history:
                        #!!!!!!!!!!!!CHANGE TO USE INTEGRALSTOR's CALLS
                        now_time = int(time.time())
                        #cmd_list.append(['insert into file_events_history(path, events, event_time, last_scan_id, scan_configuration_id) values (?,?,?,?, ?)', (full_path, 'DELETE', int(now_time), scan_id, scan_configuration_id, )])
                        cmd_list.append([
                            'insert or ignore into file_events_history(file_info_id, path, events, event_time, last_scan_id, scan_configuration_id) values ((select id from file_info where path=?),?,?,?,?, ?)',
                            (
                                file_info_row['path'],
                                file_info_row['path'],
                                'DELETE',
                                int(now_time),
                                scan_id,
                                scan_configuration_id,
                            )
                        ])
                    cmd_list.append([
                        'delete from file_info where id="%d"' %
                        file_info_row['id']
                    ])
                #Now read the next row
                file_info_row, err = db.get_next_row(cur)
                if err:
                    raise Exception(err)
            if conn:
                db.close_connection(conn)
            #print cmd_list
            if cmd_list:
                print cmd_list
                ret, err = db.execute_iud(db_location, cmd_list)
                print ret, err
                if err:
                    failed_deletion_transactions_count += counter
                    for transaction_file in transaction_file_list:
                        error_list.append(
                            (transaction_file,
                             'Error inserting into the database : %s' % err))
                    raise Exception(err)
                else:
                    successful_deletion_transactions_count += counter
            if failed_creation_modification_transactions_count > 0 or failed_deletion_transactions_count > 0:
                status_id = 3
            else:
                status_id = 2
            pd = {
                'scan_id':
                scan_id,
                'status_id':
                status_id,
                'status_str':
                'Processed all creation/modification/deletion changes. %d file creations, %d file modifications and %d file deletions detected. %d failed creation/modification transactions and %d failed deletion transactions. '
                % (new_files_count, modified_files_count, deleted_files_count,
                   failed_creation_modification_transactions_count,
                   failed_deletion_transactions_count),
                'scanned_dirs_count':
                scanned_dirs_count,
                'scanned_files_count':
                scanned_files_count,
                'successful_creation_modification_transactions_count':
                successful_creation_modification_transactions_count,
                'failed_creation_modification_transactions_count':
                failed_creation_modification_transactions_count,
                'successful_deletion_transactions_count':
                successful_deletion_transactions_count,
                'failed_deletion_transactions_count':
                failed_deletion_transactions_count,
                'new_files_count':
                new_files_count,
                'modified_files_count':
                modified_files_count,
                'deleted_files_count':
                deleted_files_count
            }
            print 'a'
            ret, err = log_scan_progress(db_location, pd)
            print ret, err
            print 'aa'
        else:
            time_str = time.strftime('%a, %d %b %Y %H:%M:%S')
            pd = {
                'scan_id': scan_id,
                'status_id': 4,
                'status_str': 'Paused at %s' % time_str
            }
            ret, err = log_scan_progress(db_location, pd)
def initiate_scan(scan_configuration_id, standalone = False):
    scan_id = 0
    db_location = None
    conn = None
    error_list = []
    successful_creation_modification_transactions_count = 0
    failed_creation_modification_transactions_count = 0
    successful_deletion_transactions_count = 0
    failed_deletion_transactions_count = 0
    scanned_files_count = 0
    scanned_dirs_count = 0
    new_files_count = 0
    modified_files_count = 0
    deleted_files_count = 0
    try:

        global scan_killed
        scan_killed = False
        print '-----------------------------SETTINGS---------------------------------'

        db_location, err = get_db_location(standalone)
        if err:
            raise Exception(err)
        configs, err = get_scan_configurations(db_location = db_location, scan_configuration_id = scan_configuration_id, standalone = standalone, include_deleted=False)
        if err:
            raise Exception(err)
        if not configs:
            raise Exception('Specified configuration does not exist')
    

        scan_dir = configs[0]['scan_dir']
        exclude_dirs = configs[0]['exclude_dirs']
        generate_checksum = configs[0]['generate_checksum']
        db_transaction_size = configs[0]['db_transaction_size']
        record_history = configs[0]['record_history']

        if generate_checksum:
            print 'Generating file checksums'
        else:
            print 'Not generating file checksums'
        if record_history:
            print 'Recording file event history'
        else:
            print 'Not recording file event history'
        dir_str = 'Collecting statistics for the folder : %s..\n'%scan_dir
        exclude_dirs_list = None
        if exclude_dirs:
            dir_str += '..but excluding the following directories : "%s"'%exclude_dirs
            exclude_dirs_list = []
            components = exclude_dirs.split(',')
            for component in components:
                exclude_dirs_list.append('%s/%s'%(scan_dir, component.strip().lstrip('/')))
        print dir_str
        print '----------------------------------------------------------------------'


        pid = os.getpid()
        scan_id, err = log_scan_start(db_location, configs[0], pid)
        if err:
            raise Exception(err)

        counter = 0
        cmd_list = []
        transaction_file_list = []
        if not os.path.exists(scan_dir):
            raise Exception('Specified scan directory %s does not exist.'%scan_dir)
        print 'Collecting information about the directory structure for %s..'%scan_dir
        for root, dirs, files in os.walk(unicode(scan_dir)):
            if scan_killed:
                break
            scanned_dirs_count += 1
            #print 'Processing directory %s'%root
            if exclude_dirs_list and root in exclude_dirs_list:
                #Config says exclude this dir so skip it.
                print 'Skipping excluded directory : %s'%root
                continue
            for file in files:
                full_path = os.path.normpath('%s/%s'%(root, file))
                #print full_path
                try:
                    db_to_be_updated = False
                    scanned_files_count += 1
                    transaction_file_list.append(full_path)
                    if os.path.islink(full_path):
                        continue
                    extension = None
                    if full_path:
                        rt, extension = os.path.splitext(full_path)
                    mtime = os.path.getmtime(full_path)
                    #print 'mtime', mtime
                    size = os.path.getsize(full_path)
                    chksum = None
                    if generate_checksum:
                        chksum, err = checksum_utils.generate_checksum(full_path, algorithm = 'sha256')
                        if err:
                            error_list.append((full_path, err))
                            print '!!!!', err
                            continue

                    #First check if we have some recorded info about this file.
                    query = 'select * from file_info where path = "%s"'%full_path
                    file_info_row, err = db.get_single_row(db_location, query)
                    if err:
                        raise Exception(err)

                    #Now decide whether to insert/update the DB..
                    insert_update_file_info = False
                    update_file_info = False
                    if not file_info_row:
                        #Info not in the DB so insert..
                        #Insert it if the path does not exist or else ignore.. 
                        cmd = ['insert or ignore into file_info(path, extension, size, checksum, last_modify_time, last_access_time, last_scan_id, scan_configuration_id) values (?,?,?,?,?,?,?,?)', (full_path, extension, size, chksum, int(mtime), int(mtime),scan_id, scan_configuration_id,)]
                        cmd_list.append(cmd)
                        new_files_count += 1
                        db_to_be_updated = True
                    else:
                        #Exists in DB but has anything changed with the file?? If so, update
                        if file_info_row['last_modify_time'] != int(mtime):
                            #File mtime does not match whats in the DB so modify
                            update_file_info = True
                        if generate_checksum:
                            if file_info_row['checksum'] != chksum:
                                #File checksum does not match whats in the DB so modify
                                update_file_info = True
                        if update_file_info:
                            modified_files_count += 1
                            #This file has NOT been processed already in a previous run of the same scan_id 
                            if generate_checksum:
                                update_cmd = ['update file_info set size=?, last_modify_time=?, last_access_time=?, extension=?, checksum=?, last_scan_id = ?, scan_configuration_id=? where path = ?', (size, int(mtime), int(mtime), extension, chksum, scan_id, scan_configuration_id, full_path,)]
                            else:
                                update_cmd = ['update file_info set size=?, last_modify_time=?, last_access_time=?, extension=?, last_scan_id=?, scan_configuration_id=? where path = ?', (size, int(mtime), int(mtime), extension, scan_id, scan_configuration_id, full_path,)]
                            cmd_list.append(update_cmd)
                            db_to_be_updated = True
                    if record_history:
                        cmd = ['insert or ignore into file_events_history(file_info_id, path, events, event_time, last_scan_id, scan_configuration_id) values ((select id from file_info where path=?),?,?,?,?, ?)', (full_path, full_path, 'MODIFY', int(mtime), scan_id, scan_configuration_id, )]
                        cmd_list.append(cmd)
                        db_to_be_updated = True
                    if db_to_be_updated:
                        counter += 1
                    if cmd_list and (counter != 0) and (counter % db_transaction_size == 0):
                        print 'Scanned %d files'%scanned_files_count
                        #print cmd_list
                        ret, err = db.execute_iud(db_location, cmd_list, get_rowid=False)
                        #print ret, err
                        if err:
                            failed_creation_modification_transactions_count += counter
                            for transaction_file in transaction_file_list:
                                error_list.append((transaction_file, 'Error inserting/updating into the database : %s'%err))
                        else:
                            successful_creation_modification_transactions_count += counter
                            pd = {'scan_id': scan_id, 'status_id':1, 'status_str':'Scanned %d directories and %d files. Processed %d creations/modifilcation transactions successfully with %d errors. New files detected : %d, files modified since last scan : %d'%(scanned_dirs_count, scanned_files_count, successful_creation_modification_transactions_count, failed_creation_modification_transactions_count, new_files_count, modified_files_count), 'scanned_dirs_count': scanned_dirs_count, 'scanned_files_count': scanned_files_count, 'successful_creation_modification_transactions_count':successful_creation_modification_transactions_count, 'failed_creation_modification_transactions_count':failed_creation_modification_transactions_count, 'successful_deletion_transactions_count' : successful_deletion_transactions_count, 'failed_deletion_transactions_count':failed_deletion_transactions_count, 'new_files_count':new_files_count, 'modified_files_count':modified_files_count, 'deleted_files_count' : deleted_files_count}
                            ret, err = log_scan_progress(db_location, pd)
                        cmd_list = []
                        transaction_file_list = []
                        counter = 0
                except Exception, e:
                    #print e
                    error_list.append((full_path, str(e)))
        if not scan_killed:
            if cmd_list:
                print 'Processing the last batch of %d files.'%counter
                #print cmd_list
                #Still have unprocessed files so insert them!
                ret, err = db.execute_iud(db_location, cmd_list, get_rowid=False)
                if err:
                    failed_creation_modification_transactions_count += counter
                    for transaction_file in transaction_file_list:
                        error_list.append((transaction_file, 'Error inserting into the database : %s'%err))
                else:
                    successful_creation_modification_transactions_count += counter
            pd = {'scan_id': scan_id, 'status_id':1, 'status_str':'Processed all creations and modifications. Now processing deletions. Scanned %d directories and %d files. Processed %d creation/modification transactions successfully with %d errors'%(scanned_dirs_count, scanned_files_count, successful_creation_modification_transactions_count, failed_creation_modification_transactions_count), 'scanned_dirs_count': scanned_dirs_count, 'scanned_files_count': scanned_files_count, 'successful_creation_modification_transactions_count':successful_creation_modification_transactions_count, 'failed_creation_modification_transactions_count':failed_creation_modification_transactions_count, 'successful_deletion_transactions_count' : successful_deletion_transactions_count, 'failed_deletion_transactions_count':failed_deletion_transactions_count, 'new_files_count':new_files_count, 'modified_files_count':modified_files_count, 'deleted_files_count' : deleted_files_count}
            ret, err = log_scan_progress(db_location, pd)

            #Now scan for deleted files that are in our db but have actually been deleted..
            print 'scanning for deletes'
            query = 'select * from file_info where scan_configuration_id="%d" and last_scan_id != "%d"'%(scan_configuration_id, scan_id)
            (cur, conn), err = db.get_query_cursor_and_connection(db_location, query)
            if err:
                raise Exception(err)
            file_info_row, err = db.get_next_row(cur)
            if err:
                raise Exception(err)
            cmd_list = []
            transaction_file_list = []
            while file_info_row:
                if not os.path.isfile(file_info_row['path']):
                    #File no longer exists so delete the file_info entry and put in a delete into the file_events_history table
                    deleted_files_count += 1
                    transaction_file_list.append(file_info_row['path'])
                    if record_history:
                        #!!!!!!!!!!!!CHANGE TO USE INTEGRALSTOR's CALLS
                        now_time = int(time.time())
                        #cmd_list.append(['insert into file_events_history(path, events, event_time, last_scan_id, scan_configuration_id) values (?,?,?,?, ?)', (full_path, 'DELETE', int(now_time), scan_id, scan_configuration_id, )])
                        cmd_list.append(['insert or ignore into file_events_history(file_info_id, path, events, event_time, last_scan_id, scan_configuration_id) values ((select id from file_info where path=?),?,?,?,?, ?)', (file_info_row['path'], file_info_row['path'], 'DELETE', int(now_time), scan_id, scan_configuration_id, )])
                    cmd_list.append(['delete from file_info where id="%d"'%file_info_row['id']])
                #Now read the next row
                file_info_row, err = db.get_next_row(cur)
                if err:
                    raise Exception(err)
            if conn:
                db.close_connection(conn)
            #print cmd_list
            if cmd_list:
                print cmd_list
                ret, err = db.execute_iud(db_location, cmd_list)
                print ret, err
                if err:
                    failed_deletion_transactions_count += counter
                    for transaction_file in transaction_file_list:
                        error_list.append((transaction_file, 'Error inserting into the database : %s'%err))
                    raise Exception(err)
                else:
                    successful_deletion_transactions_count += counter
            if failed_creation_modification_transactions_count > 0 or failed_deletion_transactions_count > 0:
                status_id = 3
            else:
                status_id = 2
            pd = {'scan_id': scan_id, 'status_id':status_id, 'status_str':'Processed all creation/modification/deletion changes. %d file creations, %d file modifications and %d file deletions detected. %d failed creation/modification transactions and %d failed deletion transactions. '%(new_files_count, modified_files_count, deleted_files_count, failed_creation_modification_transactions_count, failed_deletion_transactions_count), 'scanned_dirs_count': scanned_dirs_count, 'scanned_files_count': scanned_files_count, 'successful_creation_modification_transactions_count':successful_creation_modification_transactions_count, 'failed_creation_modification_transactions_count':failed_creation_modification_transactions_count, 'successful_deletion_transactions_count' : successful_deletion_transactions_count, 'failed_deletion_transactions_count':failed_deletion_transactions_count, 'new_files_count':new_files_count, 'modified_files_count':modified_files_count, 'deleted_files_count' : deleted_files_count}
            print 'a'
            ret, err = log_scan_progress(db_location, pd)
            print ret, err
            print 'aa'
        else:
            time_str = time.strftime('%a, %d %b %Y %H:%M:%S')
            pd = {'scan_id': scan_id, 'status_id':4, 'status_str':'Paused at %s'%time_str}
            ret, err = log_scan_progress(db_location, pd)