Beispiel #1
0
def get_sp_files_from_csm_repository():
    rows = []
    file_list = get_file_list(get_repository_directory())

    for filename in file_list:
        if '.pie' in filename:
            statinfo = os.stat(get_repository_directory() + filename)
            row = {}
            row['image_name'] = filename
            row['image_size'] = '{} bytes'.format(statinfo.st_size)
            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #2
0
def get_sp_files_from_csm_repository():
    rows = []
    file_list = get_file_list(get_repository_directory())

    for filename in file_list:
        if is_external_file_a_smu(filename):
            statinfo = os.stat(get_repository_directory() + filename)
            row = {}
            row['image_name'] = filename
            row['image_size'] = '{} bytes'.format(statinfo.st_size)
            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #3
0
def get_full_software_tar_files_from_csm_repository():
    rows = []
    file_list = get_file_list(get_repository_directory())

    for filename in file_list:
        if '-iosxr-' in filename and filename.endswith('.tar'):
            statinfo = os.stat(get_repository_directory() + filename)
            row = {}
            row['image_name'] = filename
            row['image_size'] = '{} bytes'.format(statinfo.st_size)
            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #4
0
def get_full_software_tar_files_from_csm_repository():
    rows = []
    file_list = get_file_list(get_repository_directory())

    for filename in file_list:
        if '-iosxr-' in filename and filename.endswith('.tar'):
            statinfo = os.stat(get_repository_directory() + filename)
            row = {}
            row['image_name'] = filename
            row['image_size'] = '{} bytes'.format(statinfo.st_size)
            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #5
0
def api_get_files_from_csm_repository():
    rows = []
    file_list = get_file_list(get_repository_directory())

    for filename in file_list:
        if filename.endswith('.tar'):
            statinfo = os.stat(get_repository_directory() + filename)
            row = dict()
            row['image_name'] = filename
            row['image_size'] = str(statinfo.st_size)
            row['downloaded_time'] = datetime_from_local_to_utc(
                datetime.datetime.fromtimestamp(statinfo.st_mtime))
            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #6
0
def init():
    # Create the necessary supporting directories
    create_directory(get_log_directory())
    create_directory(get_repository_directory())
    create_directory(get_temp_directory())
    create_directory(get_migration_directory())
    create_directory(get_doc_central_directory())

    if not is_ldap_supported():
        print('LDAP authentication is not supported because it has not been installed.')
Beispiel #7
0
def init():
    # Create the necessary supporting directories
    create_directory(get_log_directory())
    create_directory(get_repository_directory())
    create_directory(get_temp_directory())
    create_directory(get_migration_directory())
    create_directory(get_doc_central_directory())

    if not is_ldap_supported():
        print(
            'LDAP authentication is not supported because it has not been installed.'
        )
Beispiel #8
0
def api_delete_image_from_repository(image_name):
    if current_user.privilege != UserPrivilege.ADMIN and current_user.privilege != UserPrivilege.NETWORK_ADMIN:
        abort(401)

    tar_image_path = get_repository_directory() + image_name
    try:
        # Remove the tar file contents
        file_list = get_tarfile_file_list(tar_image_path)
        for filename in file_list:
            try:
                file_path = get_repository_directory() + filename
                if os.path.exists(file_path):
                    os.remove(file_path)
            except:
                logger.exception(
                    'api_delete_image_from_repository() hit exception filename='
                    + file_path)
    except ReadError:
        # In case, it is a partial downloaded TAR.
        pass

    try:
        # Remove the actual tar file
        file_path = tar_image_path
        if os.path.exists(file_path):
            os.remove(file_path)

        # Remove the auxiliary file for the tar file
        file_path = tar_image_path + '.size'
        if os.path.exists(file_path):
            os.remove(file_path)
    except:
        logger.exception(
            'api_delete_image_from_repository() hit exception filename=' +
            file_path)
        return jsonify({'status': 'Failed'})

    return jsonify({'status': 'OK'})
Beispiel #9
0
def get_smu_or_sp_list(hostname, hide_installed_packages, smu_info_list,
                       file_suffix):
    """
    Return the SMU/SP list.  If hostname is given, compare its active packages.
    """
    file_list = get_file_list(get_repository_directory(), '.' + file_suffix)

    host_packages = [] if hostname is None else get_host_active_packages(
        hostname)

    rows = []
    for smu_info in smu_info_list:

        # Verify if the package has already been installed.
        installed = False
        for host_package in host_packages:
            if smu_info.name in host_package:
                installed = True
                break

        include = False if (hide_installed_packages == 'true'
                            and installed) else True
        if include:
            row = dict()
            row['ST'] = 'True' if smu_info.name + '.' + file_suffix in file_list else 'False'
            row['package_name'] = smu_info.name + '.' + file_suffix
            row['posted_date'] = smu_info.posted_date.split()[0]
            row['ddts'] = smu_info.ddts
            row['ddts_url'] = BUG_SEARCH_URL + smu_info.ddts
            row['type'] = smu_info.type
            row['description'] = smu_info.description
            row['impact'] = smu_info.impact
            row['functional_areas'] = smu_info.functional_areas
            row['id'] = smu_info.id
            row['name'] = smu_info.name
            row['status'] = smu_info.status
            row['package_bundles'] = smu_info.package_bundles
            row['compressed_image_size'] = smu_info.compressed_image_size
            row['uncompressed_image_size'] = smu_info.uncompressed_image_size
            row['is_installed'] = installed

            if not is_empty(hostname) and SMU_INDICATOR in smu_info.name:
                row['is_applicable'] = is_smu_applicable(
                    host_packages, smu_info.package_bundles)
            else:
                row['is_applicable'] = True

            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #10
0
def get_tar_contents():
    files = request.args.getlist('files[]')
    files = files[0].strip().split(',')
    rows = []
    repo_path = get_repository_directory()

    for file in files:
        if file:
            for f in get_tarfile_file_list(repo_path + file):
                row = {}
                row['file'] = repo_path + file + '/' + f
                row['filename'] = f
                row['source_tar'] = file
                rows.append(row)
    return jsonify(**{'data': rows})
Beispiel #11
0
def get_tar_contents():
    files = request.args.getlist('files[]')
    files = files[0].strip().split(',')
    rows = []
    repo_path = get_repository_directory()

    for file in files:
        if file:
            for f in get_tarfile_file_list(repo_path + file):
                row = {}
                row['file'] = repo_path + file + '/' + f
                row['filename'] = f
                row['source_tar'] = file
                rows.append(row)
    return jsonify(**{'data': rows})
Beispiel #12
0
def get_smu_or_sp_list(hostname, hide_installed_packages, smu_info_list, file_suffix):
    """
    Return the SMU/SP list.  If hostname is given, compare its active packages.
    """
    file_list = get_file_list(get_repository_directory(), '.' + file_suffix)

    host_packages = [] if hostname is None else get_host_active_packages(hostname)

    rows = []
    for smu_info in smu_info_list:

        # Verify if the package has already been installed.
        installed = False
        for host_package in host_packages:
            if smu_info.name in host_package:
                installed = True
                break

        include = False if (hide_installed_packages == 'true' and installed) else True
        if include:
            row = dict()
            row['ST'] = 'True' if smu_info.name + '.' + file_suffix in file_list else 'False'
            row['package_name'] = smu_info.name + '.' + file_suffix
            row['posted_date'] = smu_info.posted_date.split()[0]
            row['ddts'] = smu_info.ddts
            row['ddts_url'] = BUG_SEARCH_URL + smu_info.ddts
            row['type'] = smu_info.type
            row['description'] = smu_info.description
            row['impact'] = smu_info.impact
            row['functional_areas'] = smu_info.functional_areas
            row['id'] = smu_info.id
            row['name'] = smu_info.name
            row['status'] = smu_info.status
            row['package_bundles'] = smu_info.package_bundles
            row['compressed_image_size'] = smu_info.compressed_image_size
            row['uncompressed_image_size'] = smu_info.uncompressed_image_size
            row['is_installed'] = installed

            if not is_empty(hostname) and SMU_INDICATOR in smu_info.name:
                row['is_applicable'] = is_smu_applicable(host_packages, smu_info.package_bundles)
            else:
                row['is_applicable'] = True

            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #13
0
def api_get_tar_list(platform, release):
    smu_loader = SMUInfoLoader(platform, release, from_cco=False)

    if not smu_loader.is_valid:
        return jsonify(**{'data': []})
    else:
        file_list = get_file_list(get_repository_directory(), '.tar')
        tars_list = smu_loader.get_tar_list()
        rows = []
        for tar_info in tars_list:
            row = dict()
            row['ST'] = 'True' if tar_info.name in file_list else 'False'
            row['name'] = tar_info.name
            row['compressed_size'] = tar_info.compressed_image_size
            row['description'] = ""
            rows.append(row)

    return jsonify(**{'data': rows})
Beispiel #14
0
def api_get_tar_list(platform, release):
    smu_loader = SMUInfoLoader(platform, release, from_cco=False)

    if not smu_loader.is_valid:
        return jsonify(**{'data': []})
    else:
        file_list = get_file_list(get_repository_directory(), '.tar')
        tars_list = smu_loader.get_tar_list()
        rows = []
        for tar_info in tars_list:
            row = dict()
            row['ST'] = 'True' if tar_info.name in file_list else 'False'
            row['name'] = tar_info.name
            row['compressed_size'] = tar_info.compressed_image_size
            row['description'] = ""
            rows.append(row)

    return jsonify(**{'data': rows})
    def start(self, db_session, logger, process_name):
        self.db_session = db_session
        try:
            self.create_tar_job = self.db_session.query(CreateTarJob).filter(CreateTarJob.id == self.job_id).first()
            if self.create_tar_job is None:
                logger.error('Unable to retrieve create tar job: %s' % self.job_id)
                return

            self.create_tar_job.set_status(JobStatus.PROCESSING)

            server_id = self.create_tar_job.server_id
            server_directory = self.create_tar_job.server_directory
            source_tars = self.create_tar_job.source_tars
            contents = self.create_tar_job.contents
            additional_packages = self.create_tar_job.additional_packages
            new_tar_name = self.create_tar_job.new_tar_name
            created_by = self.create_tar_job.created_by

            date_string = datetime.datetime.utcnow().strftime("%Y_%m_%d_%H_%M_%S")

            repo_dir = get_repository_directory()
            temp_path = get_temp_directory() + str(date_string)
            new_tar_path = os.path.join(temp_path, str(date_string))

            try:
                if not os.path.exists(temp_path):
                    self.create_tar_job.set_status('Creating temporary directories.')
                    self.db_session.commit()
                    os.makedirs(temp_path)
                    os.makedirs(new_tar_path, 7777)

                # Untar source tars into the temp/timestamp directory
                if source_tars:
                    self.create_tar_job.set_status('Extracting from source tar files.')
                    self.db_session.commit()
                    for source in source_tars.split(','):
                        with tarfile.open(os.path.join(repo_dir, source)) as tar:
                            tar.extractall(temp_path)

                # Copy the selected contents from the temp/timestamp directory
                # to the new tar directory
                if contents:
                    self.create_tar_job.set_status('Copying selected tar contents.')
                    self.db_session.commit()
                    for f in contents.strip().split(','):
                        _, filename = os.path.split(f)
                        shutil.copy2(os.path.join(temp_path, filename), new_tar_path)

                # Copy the selected additional packages from the repository to the new tar directory
                if additional_packages:
                    self.create_tar_job.set_status('Copying selected additional files.')
                    for pkg in additional_packages.split(','):
                        self.db_session.commit()
                        shutil.copy2(os.path.join(repo_dir, pkg), new_tar_path)

                self.create_tar_job.set_status('Tarring new file.')
                self.db_session.commit()
                tarname = os.path.join(temp_path, new_tar_name)
                shutil.make_archive(tarname, format='tar', root_dir=new_tar_path)
                make_file_writable(os.path.join(new_tar_path, tarname) + '.tar')

                server = self.db_session.query(Server).filter(Server.id == server_id).first()
                if server is not None:
                    self.create_tar_job.set_status('Uploading to external repository.')
                    self.db_session.commit()

                    server_impl = get_server_impl(server)

                    # If the new file already exists on the remote host, delete it
                    if new_tar_name in server_impl.get_file_list():
                        server_impl.delete_file(new_tar_name)

                    statinfo = os.stat(tarname + '.tar')
                    self.new_tar_size = statinfo.st_size
                    self.chunk_list = self.get_chunks(self.new_tar_size, self.new_tar_size / 1048576)

                    if isinstance(server_impl, FTPServer):
                        server_impl.upload_file(tarname + '.tar', new_tar_name + ".tar", sub_directory=server_directory,
                                            callback=self.ftp_progress_listener)
                    elif isinstance(server_impl, SFTPServer):
                        server_impl.upload_file(tarname + '.tar', new_tar_name + ".tar", sub_directory=server_directory,
                                            callback=self.sftp_progress_listener)
                    else:
                        server_impl.upload_file(tarname + '.tar', new_tar_name + ".tar", sub_directory=server_directory)

                shutil.rmtree(temp_path, onerror=self.handleRemoveReadonly)
                self.create_tar_job.set_status(JobStatus.COMPLETED)
                self.db_session.commit()

            except Exception:
                self.create_tar_job.set_status(JobStatus.FAILED)
                self.db_session.commit()
                logger.exception('Exception while creating %s requested by %s - job id = %s',
                                  new_tar_name, created_by, self.job_id)
                shutil.rmtree(temp_path, onerror=self.handleRemoveReadonly)
                os.remove(temp_path + '.tar')

        finally:
            self.db_session.close()
Beispiel #16
0
    def start(self, db_session, logger, process_name):
        self.db_session = db_session
        try:
            self.create_tar_job = self.db_session.query(CreateTarJob).filter(
                CreateTarJob.id == self.job_id).first()
            if self.create_tar_job is None:
                logger.error('Unable to retrieve create tar job: %s' %
                             self.job_id)
                return

            self.create_tar_job.set_status(JobStatus.PROCESSING)

            server_id = self.create_tar_job.server_id
            server_directory = self.create_tar_job.server_directory
            source_tars = self.create_tar_job.source_tars
            contents = self.create_tar_job.contents
            additional_packages = self.create_tar_job.additional_packages
            new_tar_name = self.create_tar_job.new_tar_name
            created_by = self.create_tar_job.created_by

            date_string = datetime.datetime.utcnow().strftime(
                "%Y_%m_%d_%H_%M_%S")

            repo_dir = get_repository_directory()
            temp_path = get_temp_directory() + str(date_string)
            new_tar_path = os.path.join(temp_path, str(date_string))

            try:
                if not os.path.exists(temp_path):
                    self.create_tar_job.set_status(
                        'Creating temporary directories.')
                    self.db_session.commit()
                    os.makedirs(temp_path)
                    os.makedirs(new_tar_path, 7777)

                # Untar source tars into the temp/timestamp directory
                if source_tars:
                    self.create_tar_job.set_status(
                        'Extracting from source tar files.')
                    self.db_session.commit()
                    for source in source_tars.split(','):
                        with tarfile.open(os.path.join(repo_dir,
                                                       source)) as tar:
                            tar.extractall(temp_path)

                # Copy the selected contents from the temp/timestamp directory
                # to the new tar directory
                if contents:
                    self.create_tar_job.set_status(
                        'Copying selected tar contents.')
                    self.db_session.commit()
                    for f in contents.strip().split(','):
                        _, filename = os.path.split(f)
                        shutil.copy2(os.path.join(temp_path, filename),
                                     new_tar_path)

                # Copy the selected additional packages from the repository to the new tar directory
                if additional_packages:
                    self.create_tar_job.set_status(
                        'Copying selected additional files.')
                    for pkg in additional_packages.split(','):
                        self.db_session.commit()
                        shutil.copy2(os.path.join(repo_dir, pkg), new_tar_path)

                self.create_tar_job.set_status('Tarring new file.')
                self.db_session.commit()
                tarname = os.path.join(temp_path, new_tar_name)
                shutil.make_archive(tarname,
                                    format='tar',
                                    root_dir=new_tar_path)
                make_file_writable(
                    os.path.join(new_tar_path, tarname) + '.tar')

                server = self.db_session.query(Server).filter(
                    Server.id == server_id).first()
                if server is not None:
                    self.create_tar_job.set_status(
                        'Uploading to external repository.')
                    self.db_session.commit()

                    server_impl = get_server_impl(server)

                    # If the new file already exists on the remote host, delete it
                    if new_tar_name in server_impl.get_file_list():
                        server_impl.delete_file(new_tar_name)

                    statinfo = os.stat(tarname + '.tar')
                    self.new_tar_size = statinfo.st_size
                    self.chunk_list = self.get_chunks(
                        self.new_tar_size, self.new_tar_size / 1048576)

                    if isinstance(server_impl, FTPServer):
                        server_impl.upload_file(
                            tarname + '.tar',
                            new_tar_name + ".tar",
                            sub_directory=server_directory,
                            callback=self.ftp_progress_listener)
                    elif isinstance(server_impl, SFTPServer):
                        server_impl.upload_file(
                            tarname + '.tar',
                            new_tar_name + ".tar",
                            sub_directory=server_directory,
                            callback=self.sftp_progress_listener)
                    else:
                        server_impl.upload_file(tarname + '.tar',
                                                new_tar_name + ".tar",
                                                sub_directory=server_directory)

                shutil.rmtree(temp_path, onerror=self.handleRemoveReadonly)
                self.create_tar_job.set_status(JobStatus.COMPLETED)
                self.db_session.commit()

            except Exception:
                self.create_tar_job.set_status(JobStatus.FAILED)
                self.db_session.commit()
                logger.exception(
                    'Exception while creating %s requested by %s - job id = %s',
                    new_tar_name, created_by, self.job_id)
                shutil.rmtree(temp_path, onerror=self.handleRemoveReadonly)
                os.remove(temp_path + '.tar')

        finally:
            self.db_session.close()
    def start(self, db_session, logger, process_name):
        # Save the db_session reference for progress_listener
        self.db_session = db_session
        try:
            self.download_job = db_session.query(DownloadJob).filter(DownloadJob.id == self.job_id).first()
            if self.download_job is None:
                logger.error('Unable to retrieve download job: %s' % self.job_id)
                return

            output_file_path = get_repository_directory() + self.download_job.cco_filename

            # Only download if the image (tar file) is not in the downloads directory.
            # And, the image is a good one.
            if not self.is_tar_file_valid(output_file_path):
                user_id = self.download_job.user_id
                user = db_session.query(User).filter(User.id == user_id).first()
                if user is None:
                    logger.error('Unable to retrieve user: %s' % user_id)

                preferences = db_session.query(Preferences).filter(Preferences.user_id == user_id).first()
                if preferences is None:
                    logger.error('Unable to retrieve user preferences: %s' % user_id)

                self.download_job.set_status(JobStatus.PROCESSING)
                db_session.commit()

                bsd = BSDServiceHandler(username=preferences.cco_username, password=preferences.cco_password,
                                        image_name=self.download_job.cco_filename, PID=self.download_job.pid,
                                        MDF_ID=self.download_job.mdf_id,
                                        software_type_ID=self.download_job.software_type_id)

                self.download_job.set_status('Preparing to download from cisco.com.')
                db_session.commit()

                bsd.download(output_file_path, callback=self.progress_listener)

                tarfile_file_list = untar(output_file_path, get_repository_directory())
            else:
                tarfile_file_list = get_tarfile_file_list(output_file_path)

            # Now transfers to the server repository
            self.download_job.set_status('Transferring file to server repository.')
            db_session.commit()

            server = db_session.query(Server).filter(Server.id == self.download_job.server_id).first()
            if server is not None:
                server_impl = get_server_impl(server)
                for filename in tarfile_file_list:
                    server_impl.upload_file(get_repository_directory() + filename, filename,
                                            sub_directory=self.download_job.server_directory)

            self.archive_download_job(db_session, self.download_job, JobStatus.COMPLETED)
            db_session.commit()

        except Exception:
            try:
                logger.exception('DownloadManager hit exception - download job = %s', self.job_id)
                self.archive_download_job(db_session, self.download_job, JobStatus.FAILED, traceback.format_exc())
                db_session.commit()
            except Exception:
                logger.exception('DownloadManager hit exception - download job = %s', self.job_id)
        finally:
            db_session.close()
Beispiel #18
0
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from models import initialize
from models import SystemVersion 
from sqlalchemy import inspect
from database import DBSession, CURRENT_SCHEMA_VERSION, engine

from utils import create_directory, is_ldap_supported
from constants import get_log_directory, get_repository_directory, get_temp_directory
from schema.loader import get_schema_migrate_class

import traceback

# Create the necessary supporting directories
create_directory(get_log_directory())
create_directory(get_repository_directory())
create_directory(get_temp_directory())


def init():
    if not is_ldap_supported():
        print('LDAP authentication is not supported because it has not been installed.')

    db_session = DBSession()
    system_version = SystemVersion.get(db_session)

    # Handles database schema migration starting from the next schema version
    for version in range(system_version.schema_version + 1, CURRENT_SCHEMA_VERSION + 1):
        handler_class = get_schema_migrate_class(version)
        if handler_class is not None:
            try:
Beispiel #19
0
# By importing models here, it forces creation of tables in the database for a new installation.
# This will prevent gunicorn workers from trying to create the database tables all at the same time. 
# See csmserver launch script
import models

from utils import create_directory
from constants import get_autlogs_directory, get_repository_directory, get_temp_directory

# Create the necessary supporting directories
create_directory(get_autlogs_directory())
create_directory(get_repository_directory())
create_directory(get_temp_directory())
Beispiel #20
0
    def start(self, db_session, logger, process_name):
        # Save the db_session reference for progress_listener
        self.db_session = db_session
        try:
            self.download_job = db_session.query(DownloadJob).filter(
                DownloadJob.id == self.job_id).first()
            if self.download_job is None:
                logger.error('Unable to retrieve download job: %s' %
                             self.job_id)
                return

            output_file_path = get_repository_directory(
            ) + self.download_job.cco_filename

            # Only download if the image (tar file) is not in the downloads directory.
            # And, the image is a good one.
            if not self.is_tar_file_valid(output_file_path):
                user_id = self.download_job.user_id
                user = db_session.query(User).filter(
                    User.id == user_id).first()
                if user is None:
                    logger.error('Unable to retrieve user: %s' % user_id)

                preferences = db_session.query(Preferences).filter(
                    Preferences.user_id == user_id).first()
                if preferences is None:
                    logger.error('Unable to retrieve user preferences: %s' %
                                 user_id)

                self.download_job.set_status(JobStatus.PROCESSING)
                db_session.commit()

                bsd = BSDServiceHandler(
                    username=preferences.cco_username,
                    password=preferences.cco_password,
                    image_name=self.download_job.cco_filename,
                    PID=self.download_job.pid,
                    MDF_ID=self.download_job.mdf_id,
                    software_type_ID=self.download_job.software_type_id)

                self.download_job.set_status(
                    'Preparing to download from cisco.com.')
                db_session.commit()

                bsd.download(output_file_path, callback=self.progress_listener)

                tarfile_file_list = untar(output_file_path,
                                          get_repository_directory())
            else:
                tarfile_file_list = get_tarfile_file_list(output_file_path)

            # Now transfers to the server repository
            self.download_job.set_status(
                'Transferring file to server repository.')
            db_session.commit()

            server = db_session.query(Server).filter(
                Server.id == self.download_job.server_id).first()
            if server is not None:
                server_impl = get_server_impl(server)
                for filename in tarfile_file_list:
                    server_impl.upload_file(
                        get_repository_directory() + filename,
                        filename,
                        sub_directory=self.download_job.server_directory)

            self.archive_download_job(db_session, self.download_job,
                                      JobStatus.COMPLETED)
            db_session.commit()

        except Exception:
            try:
                logger.exception(
                    'DownloadManager hit exception - download job = %s',
                    self.job_id)
                self.archive_download_job(db_session, self.download_job,
                                          JobStatus.FAILED,
                                          traceback.format_exc())
                db_session.commit()
            except Exception:
                logger.exception(
                    'DownloadManager hit exception - download job = %s',
                    self.job_id)
        finally:
            db_session.close()