Esempio n. 1
0
    def delete(self, db_session):
        """
        Delete host inventory job session logs
        Delete host install job session logs
        Update Inventory table accordingly
        Delete this host
        """
        for inventory_job in self.inventory_job_history:
            try:
                shutil.rmtree(
                    os.path.join(get_log_directory(),
                                 inventory_job.session_log))
            except:
                pass  # do nothing

        for install_job in self.install_job_history:
            try:
                shutil.rmtree(
                    os.path.join(get_log_directory(), install_job.session_log))
            except:
                pass  # do nothing

        for inventory in self.inventory:
            inventory.update(db_session, host_id=None)

        db_session.delete(self)
        db_session.commit()
Esempio n. 2
0
    def generate_post_migrate_file_diff(self, ctx):
        install_job = get_last_completed_install_job_for_install_action(ctx.db_session, ctx.host.id, InstallAction.PRE_MIGRATE)
        if install_job is None:
            return

        self.generate_file_diff(source_file_directory=os.path.join(get_log_directory(), install_job.session_log),
                                target_file_directory=ctx.log_directory)
Esempio n. 3
0
    def purge_install_job_history(self, db_session, entry_per_host):
        # Scanning the InstallJobHistory table for records that should be deleted.
        try:
            skip_count = 0
            host_id = -1

            install_jobs = db_session.query(InstallJobHistory) \
                .order_by(InstallJobHistory.host_id, InstallJobHistory.created_time.desc())

            for install_job in install_jobs:
                if install_job.host_id != host_id:
                    host_id = install_job.host_id
                    skip_count = 0

                if skip_count >= entry_per_host:
                    # Delete the session log directory
                    try:
                        if install_job.session_log is not None:
                            shutil.rmtree(get_log_directory() + install_job.session_log)
                    except:
                        logger.exception('purge_install_job_history() hit exception - install job = %s', install_job.id)

                    db_session.delete(install_job)

                skip_count += 1

            db_session.commit()
        except:
            db_session.rollback()
            logger.exception('purge_install_job_history() hit exception')
Esempio n. 4
0
File: log.py Progetto: smjurcak/csm
def api_get_session_logs(table):
    id = request.args.get("record_id")

    db_session = DBSession()
    if table == 'install_job':
        install_job = db_session.query(InstallJob).filter(InstallJob.id == id).first()
    elif table == 'install_job_history':
        install_job = db_session.query(InstallJobHistory).filter(InstallJobHistory.id == id).first()
    elif table == 'inventory_job_history':
        install_job = db_session.query(InventoryJobHistory).filter(InventoryJobHistory.id == id).first()

    if install_job is None:
        abort(404)

    log_folder = install_job.session_log
    file_path = os.path.join(get_log_directory(), log_folder)

    if not os.path.isdir(file_path):
        abort(404)

    rows = []
    log_file_list = get_file_list(file_path)
    for file in log_file_list:
        row = dict()
        row['filepath'] = os.path.join(file_path, file)
        row['filename'] = file
        rows.append(row)

    return jsonify(**{'data': rows})
Esempio n. 5
0
def aggregate_and_upload_log(ctx):
    chain = get_dependency_chain(ctx.db_session, ctx.install_job)

    filename_template = "%s_%s_%s-to-%s-%s.txt"
    platform = ctx.host.software_platform
    hostname = ctx.host.hostname.replace(' ', '_')

    from_release = get_from_release(ctx.db_session, chain)
    to_release = ctx.host.software_version

    timestamp = datetime.datetime.strftime(datetime.datetime.now(), "%Y_%m_%d_%H_%M_%S")
    filename = filename_template % (platform, hostname, from_release, to_release, timestamp)

    # "<software platform>_<CSM hostname>_<from release>-to-<to release>-<time stamp>.txt"
    output_file = os.path.join(get_doc_central_directory(), filename)

    with open(output_file, 'w') as outfile:
        for job_id in chain:
            install_job = ctx.db_session.query(InstallJobHistory).filter(InstallJobHistory.id == job_id).first()
            if install_job.install_action == InstallAction.POST_UPGRADE:
                install_job.save_data('doc_central_log_file_path', filename)
                ctx.db_session.commit()

            log_directory = os.path.join(get_log_directory(), install_job.session_log)
            job_logs = os.listdir(log_directory)
            for log in job_logs:
                if ('.txt' in log or '.log' in log) and log not in ['plugins.log', 'condoor.log'] and '.html' not in log:
                    with open(os.path.join(log_directory, log)) as f:
                        outfile.write('#' * 50 + "\n")
                        outfile.write("%s: %s \n" % (install_job.install_action, log))
                        outfile.write('#' * 50 + "\n")
                        outfile.write(f.read())
                        outfile.write("\n\n")
Esempio n. 6
0
File: log.py Progetto: smjurcak/csm
def api_get_session_logs(table):
    id = request.args.get("record_id")

    db_session = DBSession()
    if table == 'install_job':
        install_job = db_session.query(InstallJob).filter(
            InstallJob.id == id).first()
    elif table == 'install_job_history':
        install_job = db_session.query(InstallJobHistory).filter(
            InstallJobHistory.id == id).first()
    elif table == 'inventory_job_history':
        install_job = db_session.query(InventoryJobHistory).filter(
            InventoryJobHistory.id == id).first()

    if install_job is None:
        abort(404)

    log_folder = install_job.session_log
    file_path = os.path.join(get_log_directory(), log_folder)

    if not os.path.isdir(file_path):
        abort(404)

    rows = []
    log_file_list = get_file_list(file_path)
    for file in log_file_list:
        row = dict()
        row['filepath'] = os.path.join(file_path, file)
        row['filename'] = file
        rows.append(row)

    return jsonify(**{'data': rows})
Esempio n. 7
0
    def purge_install_job_history(self, db_session, entry_per_host):
        # Scanning the InstallJobHistory table for records that should be deleted.
        try:
            skip_count = 0
            host_id = -1

            install_jobs = db_session.query(InstallJobHistory) \
                .order_by(InstallJobHistory.host_id, InstallJobHistory.created_time.desc())

            for install_job in install_jobs:
                if install_job.host_id != host_id:
                    host_id = install_job.host_id
                    skip_count = 0

                if skip_count >= entry_per_host:
                    # Delete the session log directory
                    try:
                        if install_job.session_log is not None:
                            shutil.rmtree(get_log_directory() +
                                          install_job.session_log)
                    except:
                        logger.exception(
                            'purge_install_job_history() hit exception - install job = %s',
                            install_job.id)

                    db_session.delete(install_job)

                skip_count += 1

            db_session.commit()
        except:
            db_session.rollback()
            logger.exception('purge_install_job_history() hit exception')
Esempio n. 8
0
    def generate_post_upgrade_file_diff(self, ctx):
        install_job = get_last_successful_pre_upgrade_job(ctx.db_session, ctx.host.id)
        if install_job is None:
            return

        self.generate_file_diff(source_file_directory=os.path.join(get_log_directory(), install_job.session_log),
                                target_file_directory=ctx.log_directory)
Esempio n. 9
0
def create_log_directory(host_or_ip, id):
    host = host_or_ip.strip().replace('.', '_').replace(' ', '_')
    date_string = datetime.datetime.utcnow().strftime("%Y_%m_%d_%H_%M_%S")
    directory = get_log_directory() + host + '-' + date_string + '-' + str(id)

    if not path.exists(directory):
        makedirs(directory)

    return host + '-' + date_string + '-' + str(id)
Esempio n. 10
0
def create_log_directory(host_or_ip, id):
    host = host_or_ip.strip().replace('.', '_').replace(' ', '_')
    date_string = datetime.datetime.utcnow().strftime("%Y_%m_%d_%H_%M_%S")
    directory = get_log_directory() + host + '-' + date_string + '-' + str(id)

    if not path.exists(directory):
        makedirs(directory)
            
    return host + '-' + date_string + '-' + str(id)
Esempio n. 11
0
def create_log_directory(host_or_ip, id=None):
    job_id = (('-' + str(id)) if id else "")
    host_ip = host_or_ip.strip().replace('.', '_').replace(' ', '_')
    date_string = datetime.datetime.utcnow().strftime("%Y_%m_%d_%H_%M_%S")
    directory = get_log_directory() + host_ip + '-' + date_string + job_id

    if not path.exists(directory):
        makedirs(directory)

    return host_ip + '-' + date_string + job_id
Esempio n. 12
0
def init():
    # Create the necessary supporting directories
    create_directory(get_log_directory())
    create_directory(get_repository_directory())
    create_directory(get_temp_directory())
    create_directory(get_migration_directory())
    create_directory(get_doc_central_directory())

    if not is_ldap_supported():
        print('LDAP authentication is not supported because it has not been installed.')
Esempio n. 13
0
def create_log_directory(host_or_ip, id=None):
    job_id = (('-' + str(id)) if id else "")
    host_ip = host_or_ip.strip().replace('.', '_').replace(' ', '_')
    date_string = datetime.datetime.utcnow().strftime("%Y_%m_%d_%H_%M_%S")
    directory = get_log_directory() + host_ip + '-' + date_string + job_id

    if not path.exists(directory):
        makedirs(directory)

    return host_ip + '-' + date_string + job_id
Esempio n. 14
0
def init():
    # Create the necessary supporting directories
    create_directory(get_log_directory())
    create_directory(get_repository_directory())
    create_directory(get_temp_directory())
    create_directory(get_migration_directory())
    create_directory(get_doc_central_directory())

    if not is_ldap_supported():
        print(
            'LDAP authentication is not supported because it has not been installed.'
        )
Esempio n. 15
0
File: log.py Progetto: smjurcak/csm
def host_session_log(hostname, table, id):
    """
    This route is also used by mailer.py for email notification.
    """
    db_session = DBSession()

    record = None
    doc_central_log_file_path = ''

    if table == 'install_job':
        record = db_session.query(InstallJob).filter(InstallJob.id == id).first()
    elif table == 'install_job_history':
        record = db_session.query(InstallJobHistory).filter(InstallJobHistory.id == id).first()

        doc_central_log_file_path = get_doc_central_log_path(record)
    elif table == 'inventory_job_history':
        record = db_session.query(InventoryJobHistory).filter(InventoryJobHistory.id == id).first()

    if record is None:
        abort(404)

    file_path = request.args.get('file_path')
    log_file_path = get_log_directory() + file_path

    if not(os.path.isdir(log_file_path) or os.path.isfile(log_file_path)):
        abort(404)

    file_pairs = {}
    log_file_contents = ''

    file_suffix = '.diff.html'
    if os.path.isdir(log_file_path):
        # Returns all files under the requested directory
        log_file_list = get_file_list(log_file_path)
        diff_file_list = [filename for filename in log_file_list if file_suffix in filename]

        for filename in log_file_list:
            diff_file_path = ''
            if file_suffix not in filename:
                if filename + file_suffix in diff_file_list:
                    diff_file_path = os.path.join(file_path, filename + file_suffix)
                file_pairs[os.path.join(file_path, filename)] = diff_file_path

        file_pairs = collections.OrderedDict(sorted(file_pairs.items()))
    else:
        with io.open(log_file_path, "rt", encoding='latin-1') as fo:
            log_file_contents = fo.read()

    return render_template('host/session_log.html', hostname=hostname, table=table,
                           record_id=id, file_pairs=file_pairs, log_file_contents=log_file_contents,
                           is_file=os.path.isfile(log_file_path),
                           doc_central_log_file_path=doc_central_log_file_path)
Esempio n. 16
0
File: log.py Progetto: smjurcak/csm
def api_get_session_log_file_diff():
    diff_file_path = request.args.get("diff_file_path")

    if is_empty(diff_file_path):
        return jsonify({'status': 'diff file is missing.'})

    file_diff_contents = ''
    with io.open(os.path.join(get_log_directory(), diff_file_path), "rt", encoding='latin-1') as fo:
        file_diff_contents = fo.read()

    data = [{'file_diff_contents': file_diff_contents}]

    return jsonify(**{'data': data})
Esempio n. 17
0
File: log.py Progetto: smjurcak/csm
def api_get_session_log_file_diff():
    diff_file_path = request.args.get("diff_file_path")

    if is_empty(diff_file_path):
        return jsonify({'status': 'diff file is missing.'})

    file_diff_contents = ''
    with io.open(os.path.join(get_log_directory(), diff_file_path),
                 "rt",
                 encoding='latin-1') as fo:
        file_diff_contents = fo.read()

    data = [{'file_diff_contents': file_diff_contents}]

    return jsonify(**{'data': data})
Esempio n. 18
0
    def delete(self, db_session):
        """
        Delete host inventory job session logs
        Delete host install job session logs
        Update Inventory table accordingly
        Delete this host
        """
        for inventory_job in self.inventory_job_history:
            try:
                shutil.rmtree(os.path.join(get_log_directory(), inventory_job.session_log))
            except:
                pass # do nothing

        for install_job in self.install_job_history:
            try:
                shutil.rmtree(os.path.join(get_log_directory(), install_job.session_log))
            except:
                pass # do nothing

        for inventory in self.inventory:
            inventory.update(db_session, host_id=None)

        db_session.delete(self)
        db_session.commit()
Esempio n. 19
0
    def generate_post_upgrade_file_diff(self, ctx):
        """
        Search for the last Pre-Upgrade job and generate file diffs.
        """
        if not (os.path.isdir(ctx.log_directory)):
            return

        pre_upgrade_job = get_last_successful_pre_upgrade_job(ctx.db_session, ctx.host.id)
        if pre_upgrade_job is None:
            return

        source_file_directory = os.path.join(get_log_directory(), pre_upgrade_job.session_log)
        target_file_directory = ctx.log_directory

        self.generate_file_diff(source_string='PRE-UPGRADE',
                                target_string='POST-UPGRADE',
                                source_file_directory=source_file_directory,
                                target_file_directory=target_file_directory)
Esempio n. 20
0
def discover_platform_info(ctx):
    try:
        log_dir = os.path.join(get_log_directory(), create_log_directory(ctx.host.connection_param[0].host_or_ip))
    except Exception:
        log_dir = None

    """Discover platform when added to CSM."""
    conn = condoor.Connection(name=ctx.hostname, urls=ctx.host_urls, log_level=logging.CRITICAL, log_dir=log_dir)
    try:
        conn.connect(force_discovery=True)
        ctx.host.family = conn.family
        ctx.host.platform = conn.platform
        ctx.host.software_platform = get_software_platform(family=conn.family, os_type=conn.os_type)
        ctx.host.software_version = get_software_version(conn.os_version)
        ctx.host.os_type = conn.os_type
        ctx.db_session.commit()
    except condoor.ConnectionError as e:
        logger.error(str(e))
    finally:
        conn.disconnect()
Esempio n. 21
0
def api_get_session_log(id):
    db_session = DBSession

    if not id:
        raise ValueError("Install job id must be specified.")

    install_job = db_session.query(InstallJob).filter((InstallJob.id == id)).first()
    if install_job is None:
        install_job = db_session.query(InstallJobHistory).filter((InstallJobHistory.install_job_id == id)).first()

    if install_job is None:
        raise ValueError("Install job id '%d' does not exist in the database." % id)

    if install_job.session_log is not None:
        log_dir = os.path.join(get_log_directory(), install_job.session_log)
        file_list = [os.path.join(log_dir, f) for f in os.listdir(log_dir)]

        return download_session_logs(file_list)
    else:
        raise ValueError("Session log does not exist for install job id '%d'." % id)
Esempio n. 22
0
def aggregate_and_upload_log(ctx):
    chain = get_dependency_chain(ctx.db_session, ctx.install_job)

    filename_template = "%s_%s_%s-to-%s-%s.txt"
    platform = ctx.host.software_platform
    hostname = ctx.host.hostname.replace(' ', '_')

    from_release = get_from_release(ctx.db_session, chain)
    to_release = ctx.host.software_version

    timestamp = datetime.datetime.strftime(datetime.datetime.now(),
                                           "%Y_%m_%d_%H_%M_%S")
    filename = filename_template % (platform, hostname, from_release,
                                    to_release, timestamp)

    # "<software platform>_<CSM hostname>_<from release>-to-<to release>-<time stamp>.txt"
    output_file = os.path.join(get_doc_central_directory(), filename)

    with open(output_file, 'w') as outfile:
        for job_id in chain:
            install_job = ctx.db_session.query(InstallJobHistory).filter(
                InstallJobHistory.id == job_id).first()
            if install_job.install_action == InstallAction.POST_UPGRADE:
                install_job.save_data('doc_central_log_file_path', filename)
                ctx.db_session.commit()

            log_directory = os.path.join(get_log_directory(),
                                         install_job.session_log)
            job_logs = os.listdir(log_directory)
            for log in job_logs:
                if ('.txt' in log or '.log' in log) and log not in [
                        'plugins.log', 'condoor.log'
                ] and '.html' not in log:
                    with open(os.path.join(log_directory, log)) as f:
                        outfile.write('#' * 50 + "\n")
                        outfile.write("%s: %s \n" %
                                      (install_job.install_action, log))
                        outfile.write('#' * 50 + "\n")
                        outfile.write(f.read())
                        outfile.write("\n\n")
Esempio n. 23
0
def api_get_session_log(id):
    db_session = DBSession

    if not id:
        raise ValueError("Install job id must be specified.")

    install_job = db_session.query(InstallJob).filter(
        (InstallJob.id == id)).first()
    if install_job is None:
        install_job = db_session.query(InstallJobHistory).filter(
            (InstallJobHistory.install_job_id == id)).first()

    if install_job is None:
        raise ValueError(
            "Install job id '%d' does not exist in the database." % id)

    if install_job.session_log is not None:
        log_dir = os.path.join(get_log_directory(), install_job.session_log)
        file_list = [os.path.join(log_dir, f) for f in os.listdir(log_dir)]

        return download_session_logs(file_list)
    else:
        raise ValueError(
            "Session log does not exist for install job id '%d'." % id)
Esempio n. 24
0
    def perform_housekeeping_tasks(self, db_session, system_option):

        inventory_history_per_host = system_option.inventory_history_per_host
        install_history_per_host = system_option.install_history_per_host
        download_history_per_user = system_option.download_history_per_user
        total_system_logs = system_option.total_system_logs
    
        current_system_logs_count = db_session.query(Log).count()   
        system_logs_threshold = int(total_system_logs * 1.1)
        # If the current system logs count > the threshold (10% more than total_system_logs),
        # trim the log table back to the total_system_logs
        if current_system_logs_count > system_logs_threshold:
            num_records_to_purge = current_system_logs_count - total_system_logs
            # Select the logs by created_time in ascending order (older logs)
            logs = db_session.query(Log).order_by(Log.created_time.asc()).limit(num_records_to_purge)
            for log in logs:
                db_session.delete(log)
            db_session.commit()
    
        # Scanning the InventoryJobHistory table for records that should be deleted.
        skip_count = 0   
        current_host_id = -1
        
        inventory_jobs = db_session.query(InventoryJobHistory) \
            .order_by(InventoryJobHistory.host_id, InventoryJobHistory.created_time.desc())
    
        for inventory_job in inventory_jobs:
            if inventory_job.host_id != current_host_id:
                current_host_id = inventory_job.host_id
                skip_count = 0
            
            if skip_count >= inventory_history_per_host:
                # Delete the session log directory
                try:
                    if inventory_job.session_log is not None: 
                        shutil.rmtree(get_log_directory() + inventory_job.session_log)
                except:
                    logger.exception('InventoryManagerScheduler hit exception- inventory job = %s', inventory_job.id)
                    
                db_session.delete(inventory_job)
            
            skip_count += 1
                
        db_session.commit()
        
        # Scanning the InstallJobHistory table for records that should be deleted.
        skip_count = 0   
        current_host_id = -1
        
        install_jobs = db_session.query(InstallJobHistory) \
            .order_by(InstallJobHistory.host_id, InstallJobHistory.created_time.desc())
    
        for install_job in install_jobs:
            if install_job.host_id != current_host_id:
                current_host_id = install_job.host_id
                skip_count = 0
            
            if skip_count >= install_history_per_host:
                # Delete the session log directory
                try:
                    if install_job.session_log is not None:
                        shutil.rmtree(get_log_directory() + install_job.session_log)
                except:
                    logger.exception('InventoryManagerScheduler hit exception - install job = %s', install_job.id)
                
                db_session.delete(install_job)
            
            skip_count += 1
                
        db_session.commit()

        # Scanning the DownloadJobHistory table for records that should be deleted.
        skip_count = 0   
        current_user_id = -1
        
        download_jobs = db_session.query(DownloadJobHistory) \
            .order_by(DownloadJobHistory.user_id, DownloadJobHistory.created_time.desc())
    
        for download_job in download_jobs:
            if download_job.user_id != current_user_id:
                current_user_id = download_job.user_id
                skip_count = 0
            
            if skip_count >= download_history_per_user:
                db_session.delete(download_job)
            
            skip_count += 1
                
        db_session.commit()

        # Deleting old CreateTarJobs
        create_tar_jobs = db_session.query(CreateTarJob).all

        for create_tar_job in create_tar_jobs:
            if create_tar_job.status == JobStatus.COMPLETED or create_tar_job.status == JobStatus.FAILED:
                db_session.delete(create_tar_job)

        db_session.commit()
Esempio n. 25
0
File: log.py Progetto: smjurcak/csm
def download_session_log():
    return send_file(get_log_directory() + request.args.get('file_path'),
                     as_attachment=True)
Esempio n. 26
0
from utils import is_ldap_supported

from constants import get_csm_data_directory
from constants import get_log_directory
from constants import get_repository_directory
from constants import get_temp_directory
from constants import get_migration_directory
from constants import get_doc_central_directory

import os
import shutil

# Handle legacy: Rename directory autlogs to log
if os.path.isdir(os.path.join(get_csm_data_directory(), 'autlogs')):
    shutil.move(os.path.join(get_csm_data_directory(), 'autlogs'),
                get_log_directory())


def relocate_database_ini():
    csm_data_database_ini = os.path.join(get_csm_data_directory(),
                                         'database.ini')
    if not os.path.isfile(csm_data_database_ini):
        shutil.move(os.path.join(os.getcwd(), 'database.ini'),
                    csm_data_database_ini)


def init():
    # Create the necessary supporting directories
    create_directory(get_log_directory())
    create_directory(get_repository_directory())
    create_directory(get_temp_directory())
Esempio n. 27
0
File: log.py Progetto: smjurcak/csm
def host_session_log(hostname, table, id):
    """
    This route is also used by mailer.py for email notification.
    """
    db_session = DBSession()

    record = None
    doc_central_log_file_path = ''

    if table == 'install_job':
        record = db_session.query(InstallJob).filter(
            InstallJob.id == id).first()
    elif table == 'install_job_history':
        record = db_session.query(InstallJobHistory).filter(
            InstallJobHistory.id == id).first()

        doc_central_log_file_path = get_doc_central_log_path(record)
    elif table == 'inventory_job_history':
        record = db_session.query(InventoryJobHistory).filter(
            InventoryJobHistory.id == id).first()

    if record is None:
        abort(404)

    file_path = request.args.get('file_path')
    log_file_path = get_log_directory() + file_path

    if not (os.path.isdir(log_file_path) or os.path.isfile(log_file_path)):
        abort(404)

    file_pairs = {}
    log_file_contents = ''

    file_suffix = '.diff.html'
    if os.path.isdir(log_file_path):
        # Returns all files under the requested directory
        log_file_list = get_file_list(log_file_path)
        diff_file_list = [
            filename for filename in log_file_list if file_suffix in filename
        ]

        for filename in log_file_list:
            diff_file_path = ''
            if file_suffix not in filename:
                if filename + file_suffix in diff_file_list:
                    diff_file_path = os.path.join(file_path,
                                                  filename + file_suffix)
                file_pairs[os.path.join(file_path, filename)] = diff_file_path

        file_pairs = collections.OrderedDict(sorted(file_pairs.items()))
    else:
        with io.open(log_file_path, "rt", encoding='latin-1') as fo:
            log_file_contents = fo.read()

    return render_template('host/session_log.html',
                           hostname=hostname,
                           table=table,
                           record_id=id,
                           file_pairs=file_pairs,
                           log_file_contents=log_file_contents,
                           is_file=os.path.isfile(log_file_path),
                           doc_central_log_file_path=doc_central_log_file_path)
Esempio n. 28
0
 def log_directory(self):
     return get_log_directory() + self.install_job.session_log
Esempio n. 29
0
 def log_directory(self):
     return get_log_directory() + self.inventory_job.session_log
Esempio n. 30
0
File: log.py Progetto: smjurcak/csm
def download_session_log():
    return send_file(get_log_directory() + request.args.get('file_path'), as_attachment=True)
Esempio n. 31
0
    def perform_housekeeping_tasks(self, db_session, system_option):

        inventory_history_per_host = system_option.inventory_history_per_host
        install_history_per_host = system_option.install_history_per_host
        download_history_per_user = system_option.download_history_per_user
        total_system_logs = system_option.total_system_logs

        current_system_logs_count = db_session.query(Log).count()
        system_logs_threshold = int(total_system_logs * 1.1)
        # If the current system logs count > the threshold (10% more than total_system_logs),
        # trim the log table back to the total_system_logs
        if current_system_logs_count > system_logs_threshold:
            num_records_to_purge = current_system_logs_count - total_system_logs
            # Select the logs by created_time in ascending order (older logs)
            logs = db_session.query(Log).order_by(
                Log.created_time.asc()).limit(num_records_to_purge)
            for log in logs:
                db_session.delete(log)
            db_session.commit()

        # Scanning the InventoryJobHistory table for records that should be deleted.
        skip_count = 0
        current_host_id = -1

        inventory_jobs = db_session.query(InventoryJobHistory) \
            .order_by(InventoryJobHistory.host_id, InventoryJobHistory.created_time.desc())

        for inventory_job in inventory_jobs:
            if inventory_job.host_id != current_host_id:
                current_host_id = inventory_job.host_id
                skip_count = 0

            if skip_count >= inventory_history_per_host:
                # Delete the session log directory
                try:
                    if inventory_job.session_log is not None:
                        shutil.rmtree(get_log_directory() +
                                      inventory_job.session_log)
                except:
                    logger.exception(
                        'InventoryManagerScheduler hit exception- inventory job = %s',
                        inventory_job.id)

                db_session.delete(inventory_job)

            skip_count += 1

        db_session.commit()

        # Scanning the InstallJobHistory table for records that should be deleted.
        skip_count = 0
        current_host_id = -1

        install_jobs = db_session.query(InstallJobHistory) \
            .order_by(InstallJobHistory.host_id, InstallJobHistory.created_time.desc())

        for install_job in install_jobs:
            if install_job.host_id != current_host_id:
                current_host_id = install_job.host_id
                skip_count = 0

            if skip_count >= install_history_per_host:
                # Delete the session log directory
                try:
                    if install_job.session_log is not None:
                        shutil.rmtree(get_log_directory() +
                                      install_job.session_log)
                except:
                    logger.exception(
                        'InventoryManagerScheduler hit exception - install job = %s',
                        install_job.id)

                db_session.delete(install_job)

            skip_count += 1

        db_session.commit()

        # Scanning the DownloadJobHistory table for records that should be deleted.
        skip_count = 0
        current_user_id = -1

        download_jobs = db_session.query(DownloadJobHistory) \
            .order_by(DownloadJobHistory.user_id, DownloadJobHistory.created_time.desc())

        for download_job in download_jobs:
            if download_job.user_id != current_user_id:
                current_user_id = download_job.user_id
                skip_count = 0

            if skip_count >= download_history_per_user:
                db_session.delete(download_job)

            skip_count += 1

        db_session.commit()

        # Deleting old CreateTarJobs
        create_tar_jobs = db_session.query(CreateTarJob).all

        for create_tar_job in create_tar_jobs:
            if create_tar_job.status == JobStatus.COMPLETED or create_tar_job.status == JobStatus.FAILED:
                db_session.delete(create_tar_job)

        db_session.commit()
Esempio n. 32
0
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from models import initialize
from models import SystemVersion 
from sqlalchemy import inspect
from database import DBSession, CURRENT_SCHEMA_VERSION, engine

from utils import create_directory, is_ldap_supported
from constants import get_log_directory, get_repository_directory, get_temp_directory
from schema.loader import get_schema_migrate_class

import traceback

# Create the necessary supporting directories
create_directory(get_log_directory())
create_directory(get_repository_directory())
create_directory(get_temp_directory())


def init():
    if not is_ldap_supported():
        print('LDAP authentication is not supported because it has not been installed.')

    db_session = DBSession()
    system_version = SystemVersion.get(db_session)

    # Handles database schema migration starting from the next schema version
    for version in range(system_version.schema_version + 1, CURRENT_SCHEMA_VERSION + 1):
        handler_class = get_schema_migrate_class(version)
        if handler_class is not None:
Esempio n. 33
0
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from models import initialize
from models import SystemVersion
from sqlalchemy import inspect
from database import DBSession, CURRENT_SCHEMA_VERSION, engine

from utils import create_directory, is_ldap_supported
from constants import get_log_directory, get_repository_directory, get_temp_directory
from schema.loader import get_schema_migrate_class

import traceback

# Create the necessary supporting directories
create_directory(get_log_directory())
create_directory(get_repository_directory())
create_directory(get_temp_directory())


def init():
    if not is_ldap_supported():
        print(
            'LDAP authentication is not supported because it has not been installed.'
        )

    db_session = DBSession()
    system_version = SystemVersion.get(db_session)

    # Handles database schema migration starting from the next schema version
    for version in range(system_version.schema_version + 1,
Esempio n. 34
0
from utils import create_directory
from utils import is_ldap_supported

from constants import get_csm_data_directory
from constants import get_log_directory
from constants import get_repository_directory
from constants import get_temp_directory
from constants import get_migration_directory
from constants import get_doc_central_directory

import os
import shutil

# Handle legacy: Rename directory autlogs to log
if os.path.isdir(os.path.join(get_csm_data_directory(), 'autlogs')):
    shutil.move(os.path.join(get_csm_data_directory(), 'autlogs'), get_log_directory())


def relocate_database_ini():
    csm_data_database_ini = os.path.join(get_csm_data_directory(), 'database.ini')
    if not os.path.isfile(csm_data_database_ini):
        shutil.move(os.path.join(os.getcwd(), 'database.ini'), csm_data_database_ini)


def init():
    # Create the necessary supporting directories
    create_directory(get_log_directory())
    create_directory(get_repository_directory())
    create_directory(get_temp_directory())
    create_directory(get_migration_directory())
    create_directory(get_doc_central_directory())