Exemple #1
0
    def install(self, dest=None):
        """Install Firefox."""
        if dest is None:
            dest = os.getcwd()

        resp = self.get_from_nightly("<a[^>]*>(firefox-\d+\.\d(?:\w\d)?.en-US.%s\.tar\.bz2)" % self.platform_string())
        untar(resp.raw, dest=dest)
        return os.path.join(dest, "firefox")
def install_syncthing():
    syncthing_package = \
        utils.download_cloudify_resource(
            ctx_properties['syncthing_package_url'],
            SERVICE_NAME
        )
    utils.mkdir(HOME_DIR)
    utils.untar(syncthing_package, destination=HOME_DIR)
    utils.remove(syncthing_package)
def install_riemann():
    langohr_source_url = ctx_properties['langohr_jar_source_url']
    daemonize_source_url = ctx_properties['daemonize_rpm_source_url']
    riemann_source_url = ctx_properties['riemann_rpm_source_url']
    # Needed for Riemann's config
    cloudify_resources_url = ctx_properties['cloudify_resources_url']
    rabbitmq_username = ctx_properties['rabbitmq_username']
    rabbitmq_password = ctx_properties['rabbitmq_password']

    riemann_config_path = '/etc/riemann'
    riemann_log_path = '/var/log/cloudify/riemann'
    langohr_home = '/opt/lib'
    extra_classpath = '{0}/langohr.jar'.format(langohr_home)

    # Confirm username and password have been supplied for broker before
    # continuing.
    # Components other than logstash and riemann have this handled in code.
    # Note that these are not directly used in this script, but are used by the
    # deployed resources, hence the check here.
    if not rabbitmq_username or not rabbitmq_password:
        ctx.abort_operation(
            'Both rabbitmq_username and rabbitmq_password must be supplied '
            'and at least 1 character long in the manager blueprint inputs.')

    rabbit_props = utils.ctx_factory.get('rabbitmq')
    ctx.instance.runtime_properties['rabbitmq_endpoint_ip'] = \
        utils.get_rabbitmq_endpoint_ip(
                rabbit_props.get('rabbitmq_endpoint_ip'))
    ctx.instance.runtime_properties['rabbitmq_username'] = \
        rabbit_props.get('rabbitmq_username')
    ctx.instance.runtime_properties['rabbitmq_password'] = \
        rabbit_props.get('rabbitmq_password')

    ctx.logger.info('Installing Riemann...')
    utils.set_selinux_permissive()

    utils.copy_notice(RIEMANN_SERVICE_NAME)
    utils.mkdir(riemann_log_path)
    utils.mkdir(langohr_home)
    utils.mkdir(riemann_config_path)
    utils.mkdir('{0}/conf.d'.format(riemann_config_path))

    langohr = utils.download_cloudify_resource(langohr_source_url,
                                               RIEMANN_SERVICE_NAME)
    utils.sudo(['cp', langohr, extra_classpath])
    ctx.logger.info('Applying Langohr permissions...')
    utils.sudo(['chmod', '644', extra_classpath])
    utils.yum_install(daemonize_source_url, service_name=RIEMANN_SERVICE_NAME)
    utils.yum_install(riemann_source_url, service_name=RIEMANN_SERVICE_NAME)

    utils.logrotate(RIEMANN_SERVICE_NAME)

    ctx.logger.info('Downloading cloudify-manager Repository...')
    manager_repo = utils.download_cloudify_resource(cloudify_resources_url,
                                                    RIEMANN_SERVICE_NAME)
    ctx.logger.info('Extracting Manager Repository...')
    utils.untar(manager_repo, '/tmp')
Exemple #4
0
    def install(self, dest=None):
        """Install Firefox."""
        if dest is None:
            dest = os.getcwd()

        resp = self.get_from_nightly(
            "<a[^>]*>(firefox-\d+\.\d(?:\w\d)?.en-US.%s\.tar\.bz2)" %
            self.platform_string())
        untar(resp.raw, dest=dest)
        return find_executable("firefox", os.path.join(dest, "firefox"))
Exemple #5
0
    def install(self, platform, dest=None):
        """Install Firefox."""
        if dest is None:
            dest = os.pwd

        self.get_from_nightly(
            "<a[^>]*>(firefox-\d+\.\d(?:\w\d)?.en-US.%s\.tar\.bz2)" %
            self.platform_string())
        untar(resp.raw, path=dest)
        return os.path.join(dest, "firefox")
Exemple #6
0
    def install_webdriver(self, dest=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.pwd

        version = self._latest_geckodriver_version()
        logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.tar.gz" %
               (version, version, self.platform_string_geckodriver()))
        untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
Exemple #7
0
    def install(self, dest=None):
        """Install Firefox."""
        if dest is None:
            dest = os.getcwd()

        nightly_link = self.get_nightly_link(self.latest_nightly_listing(),
                                             self.platform_string())
        resp = get(nightly_link)
        resp.raise_for_status()
        untar(resp.raw, dest=dest)
        return find_executable("firefox", os.path.join(dest, "firefox"))
def _install_stage():
    nodejs_source_url = ctx_properties['nodejs_tar_source_url']
    stage_source_url = ctx_properties['stage_tar_source_url']

    if not utils.resource_factory.local_resource_exists(stage_source_url):
        ctx.logger.info('Stage package not found in manager resources '
                        'package. Stage will not be installed.')
        ctx.instance.runtime_properties['skip_installation'] = 'true'
        return

    # injected as an input to the script
    ctx.instance.runtime_properties['influxdb_endpoint_ip'] = \
        os.environ.get('INFLUXDB_ENDPOINT_IP')

    utils.set_selinux_permissive()
    utils.copy_notice(SERVICE_NAME)

    utils.mkdir(NODEJS_DIR)
    utils.mkdir(HOME_DIR)
    utils.mkdir(LOG_DIR)

    utils.create_service_user(STAGE_USER, STAGE_GROUP, HOME_DIR)

    ctx.logger.info('Installing NodeJS...')
    nodejs = utils.download_cloudify_resource(nodejs_source_url, SERVICE_NAME)
    utils.untar(nodejs, NODEJS_DIR)
    utils.remove(nodejs)

    ctx.logger.info('Installing Cloudify Stage (UI)...')
    stage_tar = utils.download_cloudify_resource(stage_source_url,
                                                 SERVICE_NAME)
    utils.untar(stage_tar, HOME_DIR)
    utils.remove(stage_tar)

    ctx.logger.info('Fixing permissions...')
    utils.chown(STAGE_USER, STAGE_GROUP, HOME_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, NODEJS_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, LOG_DIR)
    utils.deploy_sudo_command_script(
        'restore-snapshot.py',
        'Restore stage directories from a snapshot path',
        component=SERVICE_NAME,
        allow_as=STAGE_USER)
    utils.chmod('a+rx', '/opt/cloudify/stage/restore-snapshot.py')

    utils.logrotate(SERVICE_NAME)
    utils.systemd.configure(SERVICE_NAME)

    backend_dir = join(HOME_DIR, 'backend')
    npm_path = join(NODEJS_DIR, 'bin', 'npm')
    subprocess.check_call(
            'cd {0}; {1} run db-migrate'.format(backend_dir, npm_path),
            shell=True)
Exemple #9
0
def _install_stage():
    nodejs_source_url = ctx_properties['nodejs_tar_source_url']
    stage_source_url = ctx_properties['stage_tar_source_url']

    if not utils.resource_factory.local_resource_exists(stage_source_url):
        ctx.logger.info('Stage package not found in manager resources '
                        'package. Stage will not be installed.')
        ctx.instance.runtime_properties['skip_installation'] = 'true'
        return

    # injected as an input to the script
    ctx.instance.runtime_properties['influxdb_endpoint_ip'] = \
        os.environ.get('INFLUXDB_ENDPOINT_IP')

    utils.set_selinux_permissive()
    utils.copy_notice(SERVICE_NAME)

    utils.mkdir(NODEJS_DIR)
    utils.mkdir(HOME_DIR)
    utils.mkdir(LOG_DIR)

    utils.create_service_user(STAGE_USER, STAGE_GROUP, HOME_DIR)

    ctx.logger.info('Installing NodeJS...')
    nodejs = utils.download_cloudify_resource(nodejs_source_url, SERVICE_NAME)
    utils.untar(nodejs, NODEJS_DIR)
    utils.remove(nodejs)

    ctx.logger.info('Installing Cloudify Stage (UI)...')
    stage_tar = utils.download_cloudify_resource(stage_source_url,
                                                 SERVICE_NAME)
    utils.untar(stage_tar, HOME_DIR)
    utils.remove(stage_tar)

    ctx.logger.info('Fixing permissions...')
    utils.chown(STAGE_USER, STAGE_GROUP, HOME_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, NODEJS_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, LOG_DIR)
    utils.deploy_sudo_command_script(
        'restore-snapshot.py',
        'Restore stage directories from a snapshot path',
        component=SERVICE_NAME,
        allow_as=STAGE_USER)
    utils.chmod('a+rx', '/opt/cloudify/stage/restore-snapshot.py')

    utils.logrotate(SERVICE_NAME)
    utils.systemd.configure(SERVICE_NAME)

    backend_dir = join(HOME_DIR, 'backend')
    npm_path = join(NODEJS_DIR, 'bin', 'npm')
    subprocess.check_call('cd {0}; {1} run db-migrate'.format(
        backend_dir, npm_path),
                          shell=True)
Exemple #10
0
def check_for_ds():
    """
    Check if Deepspeech is present or not. If not it downloads Italian deepspeech model.
    :return:
    """
    string = ""

    # PRINT THE
    layout = [[sg.Text(size=(100, 5), key='text')]]
    window = sg.Window('Preparing the game...',
                       layout,
                       finalize=True,
                       size=(300, 200))
    window.disappear()

    # manage the window
    event, values = window.read(timeout=100)
    if event == sg.WINDOW_CLOSED or event == 'Quit':
        exit(0)

    if not os.path.exists('DS'):
        os.makedirs('DS')
        string += "DS"

    os.chdir("DS")

    flg_files = utils.check_for_files()
    flg_xz = utils.check_for_xz()

    if not flg_xz and not flg_files:
        window.reappear()
        window['text'].update("Downloading italian Deepspeech model...")
        window.refresh()
        utils.download_model()

        window['text'].update("Unzipping model...")
        window.refresh()
        utils.untar()

        os.remove("model_tensorflow_it.tar.xz")
    elif flg_xz:
        window.reappear()

        window['text'].update("Unzipping model...")
        window.refresh()

        utils.untar()
        os.remove("model_tensorflow_it.tar.xz")

    os.chdir("..")

    window.disappear()
    window.close()
Exemple #11
0
 def _add_firmware(self, name, dst):
     """download, extract and copy Debian non free firmware"""
     distribution, release, _ = name.split('-')
     path = '-'.join(('firmware', distribution, release))
     archive_dst = os.path.join(self.cfg['paths']['archives'], path)
     temp_firmware = os.path.join(self.temp, 'firmware')
     archive = os.path.join(archive_dst, 'firmware.tar.gz')
     url = self.cfg[distribution]['url_firmware'].replace('${release}',
         release)
     self._download(url, archive_dst)
     utils.untar(archive, temp_firmware)
     self._extract_debs(temp_firmware)
     src = os.path.join(temp_firmware, 'temp', 'lib/firmware')
     utils.file_move(src, dst)
     utils.rmtree(temp_firmware)
Exemple #12
0
 def _add_firmware(self, name, dst):
     """download, extract and copy Debian non free firmware"""
     distribution, release, _ = name.split('-')
     path = '-'.join(('firmware', distribution, release))
     archive_dst = os.path.join(self.cfg['paths']['archives'], path)
     temp_firmware = os.path.join(self.temp, 'firmware')
     archive = os.path.join(archive_dst, 'firmware.tar.gz')
     url = self.cfg[distribution]['url_firmware'].replace(
         '${release}', release)
     self._download(url, archive_dst)
     utils.untar(archive, temp_firmware)
     self._extract_debs(temp_firmware)
     src = os.path.join(temp_firmware, 'temp', 'lib/firmware')
     utils.file_move(src, dst)
     utils.rmtree(temp_firmware)
Exemple #13
0
    def install_webdriver(self, dest=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.getcwd()

        version = self._latest_geckodriver_version()
        format = "zip" if uname[0] == "Windows" else "tar.gz"
        logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
               (version, version, self.platform_string_geckodriver(), format))
        if format == "zip":
            unzip(get(url).raw, dest=dest)
        else:
            untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
Exemple #14
0
    def install_webdriver(self, dest=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.getcwd()

        version = self._latest_geckodriver_version()
        format = "zip" if uname[0] == "Windows" else "tar.gz"
        logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
               (version, version, self.platform_string_geckodriver(), format))
        if format == "zip":
            unzip(get(url).raw, dest=dest)
        else:
            untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
def _install_composer():
    composer_source_url = ctx_properties['composer_tar_source_url']

    if not utils.resource_factory.local_resource_exists(composer_source_url):
        ctx.logger.info('Composer package not found in manager resources '
                        'package. Composer will not be installed.')
        ctx.instance.runtime_properties['skip_installation'] = 'true'
        return

    utils.set_selinux_permissive()
    utils.copy_notice(SERVICE_NAME)

    utils.mkdir(NODEJS_DIR)
    utils.mkdir(HOME_DIR)
    utils.mkdir(LOG_DIR)

    utils.create_service_user(COMPOSER_USER, COMPOSER_GROUP, HOME_DIR)
    # adding cfyuser to the composer group so that its files are r/w for
    # replication and snapshots (restart of mgmtworker necessary for change
    # to take effect)
    utils.sudo(['usermod', '-aG', COMPOSER_GROUP, utils.CLOUDIFY_USER])
    # This makes sure that the composer folders will be writable after
    # snapshot restore
    utils.sudo(['usermod', '-aG', utils.CLOUDIFY_GROUP, COMPOSER_USER])
    utils.systemd.restart('mgmtworker')

    ctx.logger.info('Installing Cloudify Composer...')
    composer_tar = utils.download_cloudify_resource(composer_source_url,
                                                    SERVICE_NAME)
    utils.untar(composer_tar, HOME_DIR)
    utils.remove(composer_tar)

    ctx.logger.info('Fixing permissions...')
    utils.chown(COMPOSER_USER, COMPOSER_GROUP, HOME_DIR)
    utils.chown(COMPOSER_USER, COMPOSER_GROUP, LOG_DIR)
    utils.chmod('g+w', CONF_DIR)
    utils.chmod('g+w', dirname(CONF_DIR))

    utils.logrotate(SERVICE_NAME)
    utils.systemd.configure(SERVICE_NAME)

    npm_path = join(NODEJS_DIR, 'bin', 'npm')
    subprocess.check_call('cd {}; {} run db-migrate'.format(
        HOME_DIR, npm_path),
                          shell=True)
def install_stage():

    nodejs_source_url = ctx_properties['nodejs_tar_source_url']
    stage_source_url = ctx_properties['stage_tar_source_url']

    # injected as an input to the script
    ctx.instance.runtime_properties['influxdb_endpoint_ip'] = \
        os.environ.get('INFLUXDB_ENDPOINT_IP')

    nodejs_home = '/opt/nodejs'
    stage_home = '/opt/cloudify-stage'
    stage_log_path = '/var/log/cloudify/stage'

    stage_user = '******'
    stage_group = 'stage'

    utils.set_selinux_permissive()

    utils.copy_notice(STAGE_SERVICE_NAME)

    utils.mkdir(nodejs_home)
    utils.mkdir(stage_home)
    utils.mkdir(stage_log_path)

    utils.create_service_user(stage_user, stage_home)

    ctx.logger.info('Installing NodeJS...')
    nodejs = utils.download_cloudify_resource(nodejs_source_url,
                                              STAGE_SERVICE_NAME)
    utils.untar(nodejs, nodejs_home)

    ctx.logger.info('Installing Cloudify Stage (UI)...')
    stage = utils.download_cloudify_resource(stage_source_url,
                                             STAGE_SERVICE_NAME)
    utils.untar(stage, stage_home)

    ctx.logger.info('Fixing permissions...')
    utils.chown(stage_user, stage_group, stage_home)
    utils.chown(stage_user, stage_group, nodejs_home)
    utils.chown(stage_user, stage_group, stage_log_path)

    utils.logrotate(STAGE_SERVICE_NAME)
    utils.systemd.configure(STAGE_SERVICE_NAME)
def install_optional(rest_venv):
    props = ctx_properties

    dsl_parser_source_url = props['dsl_parser_module_source_url']
    rest_client_source_url = props['rest_client_module_source_url']
    plugins_common_source_url = props['plugins_common_module_source_url']
    script_plugin_source_url = props['script_plugin_module_source_url']
    agent_source_url = props['agent_module_source_url']
    pip_constraints = props['pip_constraints']

    rest_service_source_url = props['rest_service_module_source_url']

    constraints_file = utils.write_to_tempfile(pip_constraints) \
        if pip_constraints else None

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')

    if dsl_parser_source_url:
        utils.install_python_package(dsl_parser_source_url, rest_venv,
                                     constraints_file)
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, rest_venv,
                                     constraints_file)
    if plugins_common_source_url:
        utils.install_python_package(plugins_common_source_url, rest_venv,
                                     constraints_file)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, rest_venv,
                                     constraints_file)
    if agent_source_url:
        utils.install_python_package(agent_source_url, rest_venv,
                                     constraints_file)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             SERVICE_NAME)
        ctx.logger.info('Extracting Manager Repository...')
        tmp_dir = utils.untar(manager_repo, unique_tmp_dir=True)
        rest_service_dir = join(tmp_dir, 'rest-service')
        resources_dir = join(tmp_dir, 'resources/rest-service/cloudify/')

        ctx.logger.info('Installing REST Service...')
        utils.install_python_package(rest_service_dir, rest_venv,
                                     constraints_file)

        ctx.logger.info('Deploying Required Manager Resources...')
        utils.move(resources_dir, utils.MANAGER_RESOURCES_HOME)

        utils.remove(tmp_dir)

    if constraints_file:
        os.remove(constraints_file)
Exemple #18
0
def _install_stage():
    nodejs_source_url = ctx_properties['nodejs_tar_source_url']
    stage_source_url = ctx_properties['stage_tar_source_url']

    if not utils.resource_factory.local_resource_exists(stage_source_url):
        ctx.logger.info('Stage package not found in manager resources '
                        'package. Stage will not be installed.')
        ctx.instance.runtime_properties['skip_installation'] = 'true'
        return

    # injected as an input to the script
    ctx.instance.runtime_properties['influxdb_endpoint_ip'] = \
        os.environ.get('INFLUXDB_ENDPOINT_IP')

    utils.set_selinux_permissive()
    utils.copy_notice(SERVICE_NAME)

    utils.mkdir(NODEJS_DIR)
    utils.mkdir(HOME_DIR)
    utils.mkdir(LOG_DIR)

    utils.create_service_user(STAGE_USER, STAGE_GROUP, HOME_DIR)

    ctx.logger.info('Installing NodeJS...')
    nodejs = utils.download_cloudify_resource(nodejs_source_url, SERVICE_NAME)
    utils.untar(nodejs, NODEJS_DIR)
    utils.remove(nodejs)

    ctx.logger.info('Installing Cloudify Stage (UI)...')
    stage_tar = utils.download_cloudify_resource(stage_source_url,
                                                 SERVICE_NAME)
    utils.untar(stage_tar, HOME_DIR)
    utils.remove(stage_tar)

    ctx.logger.info('Fixing permissions...')
    utils.chown(STAGE_USER, STAGE_GROUP, HOME_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, NODEJS_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, LOG_DIR)

    utils.logrotate(SERVICE_NAME)
    utils.systemd.configure(SERVICE_NAME)
Exemple #19
0
 def _debian_firmware(self, target):
     """download and integrate the debian non free firmware"""
     distribution, release, _ = target.split('-')
     path = 'firmware-' + distribution + '-' + release
     dst = os.path.join(self.cfg['paths']['archives'], path)
     temp_initrd = os.path.join(self.temp, 'initrd')
     temp_firmware = os.path.join(self.temp, 'firmware')
     firmware = os.path.join(dst, 'firmware.tar.gz')
     initrd = os.path.join(self.cfg['paths']['tftpboot'], 'seedbank', target,
         'initrd.gz')
     url = self.cfg['urls']['debian_firmware'].replace('${release}', release)
     self._download(url, dst)
     utils.untar(firmware, temp_firmware)
     self._extract_debs(temp_firmware)
     utils.make_dirs(temp_initrd)
     utils.initrd_extract(temp_initrd, initrd)
     src = os.path.join(temp_firmware, 'temp', 'lib/firmware')
     dst = os.path.join(self.temp, 'initrd/lib/firmware')
     utils.file_move(src, dst)
     self._disable_usb(temp_initrd)
     utils.initrd_create(temp_initrd, initrd)
Exemple #20
0
def _install_optional(mgmtworker_venv):

    rest_props = utils.ctx_factory.get('restservice')
    rest_client_source_url = \
        rest_props['rest_client_module_source_url']
    plugins_common_source_url = \
        rest_props['plugins_common_module_source_url']
    script_plugin_source_url = \
        rest_props['script_plugin_module_source_url']
    rest_service_source_url = \
        rest_props['rest_service_module_source_url']
    agent_source_url = \
        rest_props['agent_module_source_url']

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, mgmtworker_venv)
    if plugins_common_source_url:
        utils.install_python_package(plugins_common_source_url,
                                     mgmtworker_venv)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, mgmtworker_venv)
    if agent_source_url:
        utils.install_python_package(agent_source_url, mgmtworker_venv)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             MGMT_WORKER_SERVICE_NAME)
        ctx.logger.info('Extracting Manager Repository...')
        utils.untar(manager_repo)

        ctx.logger.info('Installing Management Worker Plugins...')
        # shouldn't we extract the riemann-controller and workflows modules to
        # their own repos?
        utils.install_python_package('/tmp/plugins/riemann-controller',
                                     mgmtworker_venv)
        utils.install_python_package('/tmp/workflows', mgmtworker_venv)
def install_optional(rest_venv):
    props = ctx_properties

    dsl_parser_source_url = props['dsl_parser_module_source_url']
    rest_client_source_url = props['rest_client_module_source_url']
    securest_source_url = props['securest_module_source_url']
    plugins_common_source_url = props['plugins_common_module_source_url']
    script_plugin_source_url = props['script_plugin_module_source_url']
    agent_source_url = props['agent_module_source_url']

    rest_service_source_url = props['rest_service_module_source_url']

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')
    if dsl_parser_source_url:
        utils.install_python_package(dsl_parser_source_url, rest_venv)
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, rest_venv)
    if securest_source_url:
        utils.install_python_package(securest_source_url, rest_venv)
    if plugins_common_source_url:
        utils.install_python_package(plugins_common_source_url, rest_venv)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, rest_venv)
    if agent_source_url:
        utils.install_python_package(agent_source_url, rest_venv)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             REST_SERVICE_NAME)
        ctx.logger.info('Extracting Manager Repository...')
        utils.untar(manager_repo)

        ctx.logger.info('Installing REST Service...')
        utils.install_python_package('/tmp/rest-service', rest_venv)
        ctx.logger.info('Deploying Required Manager Resources...')
        utils.move(
            '/tmp/resources/rest-service/cloudify/', MANAGER_RESOURCES_HOME)
def install_optional(rest_venv):
    props = ctx_properties

    dsl_parser_source_url = props['dsl_parser_module_source_url']
    rest_client_source_url = props['rest_client_module_source_url']
    securest_source_url = props['securest_module_source_url']
    plugins_common_source_url = props['plugins_common_module_source_url']
    script_plugin_source_url = props['script_plugin_module_source_url']
    agent_source_url = props['agent_module_source_url']

    rest_service_source_url = props['rest_service_module_source_url']

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')
    if dsl_parser_source_url:
        utils.install_python_package(dsl_parser_source_url, rest_venv)
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, rest_venv)
    if securest_source_url:
        utils.install_python_package(securest_source_url, rest_venv)
    if plugins_common_source_url:
        utils.install_python_package(plugins_common_source_url, rest_venv)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, rest_venv)
    if agent_source_url:
        utils.install_python_package(agent_source_url, rest_venv)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             REST_SERVICE_NAME)
        ctx.logger.info('Extracting Manager Repository...')
        utils.untar(manager_repo)

        ctx.logger.info('Installing REST Service...')
        utils.install_python_package('/tmp/rest-service', rest_venv)
        ctx.logger.info('Deploying Required Manager Resources...')
        utils.move(
            '/tmp/resources/rest-service/cloudify/', MANAGER_RESOURCES_HOME)
def _install_optional(mgmtworker_venv):

    rest_props = utils.ctx_factory.get('restservice')
    rest_client_source_url = \
        rest_props['rest_client_module_source_url']
    plugins_common_source_url = \
        rest_props['plugins_common_module_source_url']
    script_plugin_source_url = \
        rest_props['script_plugin_module_source_url']
    rest_service_source_url = \
        rest_props['rest_service_module_source_url']
    agent_source_url = \
        rest_props['agent_module_source_url']

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, mgmtworker_venv)
    if plugins_common_source_url:
        utils.install_python_package(
            plugins_common_source_url, mgmtworker_venv)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, mgmtworker_venv)
    if agent_source_url:
        utils.install_python_package(agent_source_url, mgmtworker_venv)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             MGMT_WORKER_SERVICE_NAME)
        ctx.logger.info('Extracting Manager Repository...')
        utils.untar(manager_repo)

        ctx.logger.info('Installing Management Worker Plugins...')
        # shouldn't we extract the riemann-controller and workflows modules to
        # their own repos?
        utils.install_python_package(
            '/tmp/plugins/riemann-controller', mgmtworker_venv)
        utils.install_python_package('/tmp/workflows', mgmtworker_venv)
def install_webui():

    nodejs_source_url = ctx.node.properties['nodejs_tar_source_url']
    webui_source_url = ctx.node.properties['webui_tar_source_url']
    grafana_source_url = ctx.node.properties['grafana_tar_source_url']

    # injected as an input to the script
    ctx.instance.runtime_properties['influxdb_endpoint_ip'] = \
        os.environ.get('INFLUXDB_ENDPOINT_IP')

    nodejs_home = '/opt/nodejs'
    webui_home = '/opt/cloudify-ui'
    webui_log_path = '/var/log/cloudify/webui'
    grafana_home = '{0}/grafana'.format(webui_home)

    webui_user = '******'
    webui_group = 'webui'

    ctx.logger.info('Installing Cloudify\'s WebUI...')
    utils.set_selinux_permissive()

    utils.copy_notice('webui')

    utils.mkdir(nodejs_home)
    utils.mkdir(webui_home)
    utils.mkdir('{0}/backend'.format(webui_home))
    utils.mkdir(webui_log_path)
    utils.mkdir(grafana_home)

    utils.create_service_user(webui_user, webui_home)

    ctx.logger.info('Installing NodeJS...')
    nodejs = utils.download_file(nodejs_source_url)
    utils.untar(nodejs, nodejs_home)

    ctx.logger.info('Installing Cloudify\'s WebUI...')
    webui = utils.download_file(webui_source_url)
    utils.untar(webui, webui_home)

    ctx.logger.info('Installing Grafana...')
    grafana = utils.download_file(grafana_source_url)
    utils.untar(grafana, grafana_home)

    ctx.logger.info('Deploying WebUI Configuration...')
    utils.deploy_blueprint_resource(
        '{0}/gsPresets.json'.format(CONFIG_PATH),
        '{0}/backend/gsPresets.json'.format(webui_home))
    ctx.logger.info('Deploying Grafana Configuration...')
    utils.deploy_blueprint_resource(
        '{0}/grafana_config.js'.format(CONFIG_PATH),
        '{0}/config.js'.format(grafana_home))

    ctx.logger.info('Fixing permissions...')
    utils.chown(webui_user, webui_group, webui_home)
    utils.chown(webui_user, webui_group, nodejs_home)
    utils.chown(webui_user, webui_group, webui_log_path)

    utils.logrotate('webui')
    utils.systemd.configure('webui')
Exemple #25
0
def install_webui():

    nodejs_source_url = ctx.node.properties['nodejs_tar_source_url']
    webui_source_url = ctx.node.properties['webui_tar_source_url']
    grafana_source_url = ctx.node.properties['grafana_tar_source_url']

    # injected as an input to the script
    ctx.instance.runtime_properties['influxdb_endpoint_ip'] = \
        os.environ.get('INFLUXDB_ENDPOINT_IP')

    nodejs_home = '/opt/nodejs'
    webui_home = '/opt/cloudify-ui'
    webui_log_path = '/var/log/cloudify/webui'
    grafana_home = '{0}/grafana'.format(webui_home)

    webui_user = '******'
    webui_group = 'webui'

    ctx.logger.info('Installing Cloudify\'s WebUI...')
    utils.set_selinux_permissive()

    utils.copy_notice('webui')

    utils.mkdir(nodejs_home)
    utils.mkdir(webui_home)
    utils.mkdir('{0}/backend'.format(webui_home))
    utils.mkdir(webui_log_path)
    utils.mkdir(grafana_home)

    utils.create_service_user(webui_user, webui_home)

    ctx.logger.info('Installing NodeJS...')
    nodejs = utils.download_file(nodejs_source_url)
    utils.untar(nodejs, nodejs_home)

    ctx.logger.info('Installing Cloudify\'s WebUI...')
    webui = utils.download_file(webui_source_url)
    utils.untar(webui, webui_home)

    ctx.logger.info('Installing Grafana...')
    grafana = utils.download_file(grafana_source_url)
    utils.untar(grafana, grafana_home)

    ctx.logger.info('Deploying WebUI Configuration...')
    utils.deploy_blueprint_resource(
        '{0}/gsPresets.json'.format(CONFIG_PATH),
        '{0}/backend/gsPresets.json'.format(webui_home))
    ctx.logger.info('Deploying Grafana Configuration...')
    utils.deploy_blueprint_resource(
        '{0}/grafana_config.js'.format(CONFIG_PATH),
        '{0}/config.js'.format(grafana_home))

    ctx.logger.info('Fixing permissions...')
    utils.chown(webui_user, webui_group, webui_home)
    utils.chown(webui_user, webui_group, nodejs_home)
    utils.chown(webui_user, webui_group, webui_log_path)

    utils.logrotate('webui')
    utils.systemd.configure('webui')
Exemple #26
0
    def install_webdriver(self, dest=None, channel=None, browser_binary=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.getcwd()

        if channel == "nightly":
            path = self.install_geckodriver_nightly(dest)
            if path is not None:
                return path
            else:
                self.logger.warning("Nightly webdriver not found; falling back to release")

        version = self._latest_geckodriver_version()
        format = "zip" if uname[0] == "Windows" else "tar.gz"
        self.logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
               (version, version, self.platform_string_geckodriver(), format))
        if format == "zip":
            unzip(get(url).raw, dest=dest)
        else:
            untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
Exemple #27
0
    def install_webdriver(self, dest=None, channel=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.getcwd()

        if channel == "nightly":
            path = self.install_geckodriver_nightly(dest)
            if path is not None:
                return path
            else:
                self.logger.warning("Nightly webdriver not found; falling back to release")

        version = self._latest_geckodriver_version()
        format = "zip" if uname[0] == "Windows" else "tar.gz"
        self.logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
               (version, version, self.platform_string_geckodriver(), format))
        if format == "zip":
            unzip(get(url).raw, dest=dest)
        else:
            untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
Exemple #28
0
def _install_optional(mgmtworker_venv):
    rest_client_source_url = ctx_properties['rest_client_module_source_url']
    plugins_common_source_url = \
        ctx_properties['plugins_common_module_source_url']
    script_plugin_source_url = \
        ctx_properties['script_plugin_module_source_url']
    rest_service_source_url = ctx_properties['rest_service_module_source_url']
    agent_source_url = ctx_properties['agent_module_source_url']
    pip_constraints = ctx_properties['pip_constraints']

    constraints_file = utils.write_to_tempfile(pip_constraints) if \
        pip_constraints else None

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, mgmtworker_venv,
                                     constraints_file)
    if plugins_common_source_url:
        utils.install_python_package(
            plugins_common_source_url, mgmtworker_venv,
            constraints_file)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, mgmtworker_venv,
                                     constraints_file)
    if agent_source_url:
        utils.install_python_package(agent_source_url, mgmtworker_venv,
                                     constraints_file)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             SERVICE_NAME)

        ctx.logger.info('Extracting Manager Repository...')
        tmp_dir = utils.untar(manager_repo, unique_tmp_dir=True)
        workflows_dir = join(tmp_dir, 'workflows')
        riemann_dir = join(tmp_dir, 'plugins/riemann-controller')

        ctx.logger.info('Installing Management Worker Plugins...')
        utils.install_python_package(riemann_dir, mgmtworker_venv,
                                     constraints_file)
        utils.install_python_package(workflows_dir, mgmtworker_venv,
                                     constraints_file)

        utils.remove(tmp_dir)

    if constraints_file:
        os.remove(constraints_file)
Exemple #29
0
def result(request):
    url = request.POST.get('url')
    print "Getting file"
    cached = cache.get(url)
    if cached:
        response = cached
    else:
        try:
            response = requests.get(url=url)
            cache.set(url, response)
        except:
            message = {"errorMessage": "Invalid URL"}
            return render(request, 'error.html', message)
    try:
        analyze(untar(response))
        return render(request, 'result.html')
    except RuntimeError as output:
        return render(request, 'error.html', {"errorMessage": output})
Exemple #30
0
def get_manager_config():
    """
    Extracting specific files from cloudify-manager repo, with clean-ups after
    """
    cloudify_resources_url = ctx_properties['cloudify_resources_url']

    ctx.logger.info('Downloading cloudify-manager Repository...')
    manager_repo = utils.download_cloudify_resource(cloudify_resources_url,
                                                    SERVICE_NAME)
    ctx.logger.info('Extracting Manager Repository...')
    manager_dir = utils.untar(manager_repo, unique_tmp_dir=True)

    ctx.logger.info('Deploying Riemann manager.config...')
    config_src_path = join(manager_dir, 'plugins', 'riemann-controller',
                           'riemann_controller', 'resources', 'manager.config')
    utils.move(config_src_path,
               '{0}/conf.d/manager.config'.format(RIEMANN_CONFIG_PATH))
    utils.remove(manager_dir)
    utils.remove(manager_repo)
def get_manager_config():
    """
    Extracting specific files from cloudify-manager repo, with clean-ups after
    """
    cloudify_resources_url = ctx_properties['cloudify_resources_url']

    ctx.logger.info('Downloading cloudify-manager Repository...')
    manager_repo = utils.download_cloudify_resource(
        cloudify_resources_url, SERVICE_NAME)
    ctx.logger.info('Extracting Manager Repository...')
    manager_dir = utils.untar(manager_repo, unique_tmp_dir=True)

    ctx.logger.info('Deploying Riemann manager.config...')
    config_src_path = join(
        manager_dir, 'plugins', 'riemann-controller',
        'riemann_controller', 'resources', 'manager.config'
    )
    utils.move(
        config_src_path,
        '{0}/conf.d/manager.config'.format(RIEMANN_CONFIG_PATH)
    )
    utils.remove(manager_dir)
    utils.remove(manager_repo)
Exemple #32
0
def _install_optional(mgmtworker_venv):
    rest_props = utils.ctx_factory.get('restservice')
    rest_client_source_url = rest_props['rest_client_module_source_url']
    plugins_common_source_url = rest_props['plugins_common_module_source_url']
    script_plugin_source_url = rest_props['script_plugin_module_source_url']
    rest_service_source_url = rest_props['rest_service_module_source_url']
    agent_source_url = rest_props['agent_module_source_url']

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, mgmtworker_venv)
    if plugins_common_source_url:
        utils.install_python_package(plugins_common_source_url,
                                     mgmtworker_venv)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, mgmtworker_venv)
    if agent_source_url:
        utils.install_python_package(agent_source_url, mgmtworker_venv)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             SERVICE_NAME)

        ctx.logger.info('Extracting Manager Repository...')
        tmp_dir = utils.untar(manager_repo, unique_tmp_dir=True)
        workflows_dir = join(tmp_dir, 'workflows')
        riemann_dir = join(tmp_dir, 'plugins/riemann-controller')

        ctx.logger.info('Installing Management Worker Plugins...')
        utils.install_python_package(riemann_dir, mgmtworker_venv)
        utils.install_python_package(workflows_dir, mgmtworker_venv)

        utils.remove(tmp_dir)
def _install_optional(mgmtworker_venv):
    rest_props = utils.ctx_factory.get('restservice')
    rest_client_source_url = rest_props['rest_client_module_source_url']
    plugins_common_source_url = rest_props['plugins_common_module_source_url']
    script_plugin_source_url = rest_props['script_plugin_module_source_url']
    rest_service_source_url = rest_props['rest_service_module_source_url']
    agent_source_url = rest_props['agent_module_source_url']

    # this allows to upgrade modules if necessary.
    ctx.logger.info('Installing Optional Packages if supplied...')
    if rest_client_source_url:
        utils.install_python_package(rest_client_source_url, mgmtworker_venv)
    if plugins_common_source_url:
        utils.install_python_package(
            plugins_common_source_url, mgmtworker_venv)
    if script_plugin_source_url:
        utils.install_python_package(script_plugin_source_url, mgmtworker_venv)
    if agent_source_url:
        utils.install_python_package(agent_source_url, mgmtworker_venv)

    if rest_service_source_url:
        ctx.logger.info('Downloading cloudify-manager Repository...')
        manager_repo = \
            utils.download_cloudify_resource(rest_service_source_url,
                                             SERVICE_NAME)

        ctx.logger.info('Extracting Manager Repository...')
        tmp_dir = utils.untar(manager_repo, unique_tmp_dir=True)
        workflows_dir = join(tmp_dir, 'workflows')
        riemann_dir = join(tmp_dir, 'plugins/riemann-controller')

        ctx.logger.info('Installing Management Worker Plugins...')
        utils.install_python_package(riemann_dir, mgmtworker_venv)
        utils.install_python_package(workflows_dir, mgmtworker_venv)

        utils.remove(tmp_dir)
Exemple #34
0
def download_tarballs(downloads, working_dir):
    for download_url in downloads:
        download_file_path = download(download_url,
                                      get_download_dir(working_dir))
        untar(download_file_path, get_source_dir(working_dir))
def install_syncthing():
    syncthing_package = \
        utils.download_cloudify_resource(
            ctx_properties['syncthing_package_url'], SYNCTHING_SERVICE_NAME)
    utils.untar(syncthing_package, destination='/opt/cloudify/syncthing')
def install_riemann():
    langohr_source_url = ctx_properties['langohr_jar_source_url']
    daemonize_source_url = ctx_properties['daemonize_rpm_source_url']
    riemann_source_url = ctx_properties['riemann_rpm_source_url']
    # Needed for Riemann's config
    cloudify_resources_url = ctx_properties['cloudify_resources_url']
    rabbitmq_username = ctx_properties['rabbitmq_username']
    rabbitmq_password = ctx_properties['rabbitmq_password']

    riemann_config_path = '/etc/riemann'
    riemann_log_path = '/var/log/cloudify/riemann'
    langohr_home = '/opt/lib'
    extra_classpath = '{0}/langohr.jar'.format(langohr_home)

    # Confirm username and password have been supplied for broker before
    # continuing.
    # Components other than logstash and riemann have this handled in code.
    # Note that these are not directly used in this script, but are used by the
    # deployed resources, hence the check here.
    if not rabbitmq_username or not rabbitmq_password:
        ctx.abort_operation(
            'Both rabbitmq_username and rabbitmq_password must be supplied '
            'and at least 1 character long in the manager blueprint inputs.')

    rabbit_props = utils.ctx_factory.get('rabbitmq')
    ctx.instance.runtime_properties['rabbitmq_endpoint_ip'] = \
        utils.get_rabbitmq_endpoint_ip(
                rabbit_props.get('rabbitmq_endpoint_ip'))
    ctx.instance.runtime_properties['rabbitmq_username'] = \
        rabbit_props.get('rabbitmq_username')
    ctx.instance.runtime_properties['rabbitmq_password'] = \
        rabbit_props.get('rabbitmq_password')

    ctx.logger.info('Installing Riemann...')
    utils.set_selinux_permissive()

    utils.copy_notice(RIEMANN_SERVICE_NAME)
    utils.mkdir(riemann_log_path)
    utils.mkdir(langohr_home)
    utils.mkdir(riemann_config_path)
    utils.mkdir('{0}/conf.d'.format(riemann_config_path))

    langohr = utils.download_cloudify_resource(langohr_source_url,
                                               RIEMANN_SERVICE_NAME)
    utils.sudo(['cp', langohr, extra_classpath])
    ctx.logger.info('Applying Langohr permissions...')
    utils.sudo(['chmod', '644', extra_classpath])
    utils.yum_install(daemonize_source_url, service_name=RIEMANN_SERVICE_NAME)
    utils.yum_install(riemann_source_url, service_name=RIEMANN_SERVICE_NAME)

    utils.logrotate(RIEMANN_SERVICE_NAME)

    ctx.logger.info('Downloading cloudify-manager Repository...')
    manager_repo = utils.download_cloudify_resource(cloudify_resources_url,
                                                    RIEMANN_SERVICE_NAME)
    ctx.logger.info('Extracting Manager Repository...')
    utils.untar(manager_repo, '/tmp')
    ctx.logger.info('Deploying Riemann manager.config...')
    utils.move(
        '/tmp/plugins/riemann-controller/riemann_controller/resources/manager.config',  # NOQA
        '{0}/conf.d/manager.config'.format(riemann_config_path))

    ctx.logger.info('Deploying Riemann conf...')
    utils.deploy_blueprint_resource(
        '{0}/main.clj'.format(CONFIG_PATH),
        '{0}/main.clj'.format(riemann_config_path),
        RIEMANN_SERVICE_NAME)

    # our riemann configuration will (by default) try to read these environment
    # variables. If they don't exist, it will assume
    # that they're found at "localhost"
    # export MANAGEMENT_IP=""
    # export RABBITMQ_HOST=""

    # we inject the management_ip for both of these to Riemann's systemd
    # config.
    # These should be potentially different
    # if the manager and rabbitmq are running on different hosts.
    utils.systemd.configure(RIEMANN_SERVICE_NAME)
    utils.clean_var_log_dir(RIEMANN_SERVICE_NAME)
def deploy_manager_sources():
    """Deploys all manager sources from a single archive.
    """
    archive_path = ctx_properties['manager_resources_package']
    archive_checksum_path = \
        ctx_properties['manager_resources_package_checksum_file']
    skip_checksum_validation = ctx_properties['skip_checksum_validation']
    agent_archives_path = utils.AGENT_ARCHIVES_PATH
    utils.mkdir(agent_archives_path)
    if archive_path:
        sources_agents_path = os.path.join(
            utils.CLOUDIFY_SOURCES_PATH, 'agents')
        # this will leave this several hundreds of MBs archive on the
        # manager. should find a way to clean it after all operations
        # were completed and bootstrap succeeded as it is not longer
        # necessary
        utils.mkdir(utils.CLOUDIFY_SOURCES_PATH)
        resource_name = os.path.basename(archive_path)
        destination = os.path.join(utils.CLOUDIFY_SOURCES_PATH, resource_name)

        ctx.logger.info('Downloading manager resources package...')
        resources_archive_path = \
            utils.download_cloudify_resource(
                archive_path, NODE_NAME, destination=destination)
        # This would ideally go under utils.download_cloudify_resource but as
        # of now, we'll only be validating the manager resources package.

        if not skip_checksum_validation:
            ctx.logger.info('Validating checksum...')
            skip_if_failed = False
            if not archive_checksum_path:
                skip_if_failed = True
                archive_checksum_path = archive_path + '.md5'
            md5_name = os.path.basename(archive_checksum_path)
            destination = os.path.join(utils.CLOUDIFY_SOURCES_PATH, md5_name)
            resources_archive_md5_path = utils.download_cloudify_resource(
                archive_checksum_path, NODE_NAME, destination=destination)
            if not utils.validate_md5_checksum(resources_archive_path,
                                               resources_archive_md5_path):
                    if skip_if_failed:
                        ctx.logger.warn('Checksum validation failed. '
                                        'Continuing as no checksum file was '
                                        'explicitly provided.')
                    else:
                        ctx.abort_operation(
                            'Failed to validate checksum for {0}'.format(
                                resources_archive_path))
            else:
                ctx.logger.info('Resources Package downloaded successfully...')
        else:
            ctx.logger.info(
                'Skipping resources package checksum validation...')

        utils.untar(
            resources_archive_path,
            utils.CLOUDIFY_SOURCES_PATH,
            skip_old_files=True)

        def splitext(filename):
            # not using os.path.splitext as it would return .gz instead of
            # .tar.gz
            if filename.endswith('.tar.gz'):
                return '.tar.gz'
            elif filename.endswith('.exe'):
                return '.exe'
            else:
                ctx.abort_operation(
                    'Unknown agent format for {0}. '
                    'Must be either tar.gz or exe'.format(filename))

        def normalize_agent_name(filename):
            # this returns the normalized name of an agent upon which our agent
            # installer retrieves agent packages for installation.
            # e.g. Ubuntu-trusty-agent_3.4.0-m3-b392.tar.gz returns
            # ubuntu-trusty-agent
            return filename.split('_', 1)[0].lower()

        def backup_agent_resources(agents_dir):
            ctx.logger.info('Backing up agents in {0}...'.format(agents_dir))
            if not os.path.isdir(utils.AGENTS_ROLLBACK_PATH):
                utils.mkdir(utils.AGENTS_ROLLBACK_PATH)
                utils.copy(agents_dir, utils.AGENTS_ROLLBACK_PATH)

        def restore_agent_resources(agents_dir):
            ctx.logger.info('Restoring agents in {0}'.format(
                utils.AGENTS_ROLLBACK_PATH))
            if os.path.isdir(agents_dir):
                utils.remove(agents_dir)
            utils.mkdir(agents_dir)
            utils.copy(os.path.join(utils.AGENTS_ROLLBACK_PATH, 'agents', '.'),
                       agents_dir)

        manager_scripts_path = os.path.join(
            utils.MANAGER_RESOURCES_HOME, 'packages', 'scripts')
        manager_templates_path = os.path.join(
            utils.MANAGER_RESOURCES_HOME, 'packages', 'templates')
        if utils.is_upgrade:
            backup_agent_resources(agent_archives_path)
            utils.remove(agent_archives_path)
            utils.mkdir(agent_archives_path)
            utils.remove(manager_scripts_path)
            utils.remove(manager_templates_path)
            ctx.logger.info('Upgrading agents...')
        elif utils.is_rollback:
            ctx.logger.info('Restoring agents...')
            restore_agent_resources(agent_archives_path)

        for agent_file in os.listdir(sources_agents_path):

            agent_id = normalize_agent_name(agent_file)
            agent_extension = splitext(agent_file)
            utils.move(
                os.path.join(sources_agents_path, agent_file),
                os.path.join(agent_archives_path, agent_id + agent_extension))
def install_syncthing():
    syncthing_package = \
        utils.download_cloudify_resource(
            ctx_properties['syncthing_package_url'], SYNCTHING_SERVICE_NAME)
    utils.untar(syncthing_package, destination='/opt/cloudify/syncthing')
Exemple #39
0
def download_tarballs(downloads, working_dir):
    for download_url in downloads:
        download_file_path = download(download_url, get_download_dir(working_dir))
        untar(download_file_path, get_source_dir(working_dir))
def deploy_manager_sources():
    """Deploys all manager sources from a single archive.
    """
    archive_path = ctx_properties['manager_resources_package']
    archive_checksum_path = \
        ctx_properties['manager_resources_package_checksum_file']
    skip_checksum_validation = ctx_properties['skip_checksum_validation']
    agent_archives_path = utils.AGENT_ARCHIVES_PATH
    utils.mkdir(agent_archives_path)
    if archive_path:
        sources_agents_path = os.path.join(utils.CLOUDIFY_SOURCES_PATH,
                                           'agents')
        # this will leave this several hundreds of MBs archive on the
        # manager. should find a way to clean it after all operations
        # were completed and bootstrap succeeded as it is not longer
        # necessary
        utils.mkdir(utils.CLOUDIFY_SOURCES_PATH)
        resource_name = os.path.basename(archive_path)
        destination = os.path.join(utils.CLOUDIFY_SOURCES_PATH, resource_name)

        ctx.logger.info('Downloading manager resources package...')
        resources_archive_path = \
            utils.download_cloudify_resource(
                archive_path, NODE_NAME, destination=destination)
        # This would ideally go under utils.download_cloudify_resource but as
        # of now, we'll only be validating the manager resources package.

        if not skip_checksum_validation:
            ctx.logger.info('Validating checksum...')
            skip_if_failed = False
            if not archive_checksum_path:
                skip_if_failed = True
                archive_checksum_path = archive_path + '.md5'
            md5_name = os.path.basename(archive_checksum_path)
            destination = os.path.join(utils.CLOUDIFY_SOURCES_PATH, md5_name)
            resources_archive_md5_path = utils.download_cloudify_resource(
                archive_checksum_path, NODE_NAME, destination=destination)
            if not utils.validate_md5_checksum(resources_archive_path,
                                               resources_archive_md5_path):
                if skip_if_failed:
                    ctx.logger.warn('Checksum validation failed. '
                                    'Continuing as no checksum file was '
                                    'explicitly provided.')
                else:
                    ctx.abort_operation(
                        'Failed to validate checksum for {0}'.format(
                            resources_archive_path))
            else:
                ctx.logger.info('Resources Package downloaded successfully...')
        else:
            ctx.logger.info(
                'Skipping resources package checksum validation...')

        utils.untar(resources_archive_path,
                    utils.CLOUDIFY_SOURCES_PATH,
                    skip_old_files=True)

        def splitext(filename):
            # not using os.path.splitext as it would return .gz instead of
            # .tar.gz
            if filename.endswith('.tar.gz'):
                return '.tar.gz'
            elif filename.endswith('.exe'):
                return '.exe'
            else:
                ctx.abort_operation(
                    'Unknown agent format for {0}. '
                    'Must be either tar.gz or exe'.format(filename))

        def normalize_agent_name(filename):
            # this returns the normalized name of an agent upon which our agent
            # installer retrieves agent packages for installation.
            # e.g. Ubuntu-trusty-agent_3.4.0-m3-b392.tar.gz returns
            # ubuntu-trusty-agent
            return filename.split('_', 1)[0].lower()

        def backup_agent_resources(agents_dir):
            ctx.logger.info('Backing up agents in {0}...'.format(agents_dir))
            if not os.path.isdir(utils.AGENTS_ROLLBACK_PATH):
                utils.mkdir(utils.AGENTS_ROLLBACK_PATH)
                utils.copy(agents_dir, utils.AGENTS_ROLLBACK_PATH)

        def restore_agent_resources(agents_dir):
            ctx.logger.info('Restoring agents in {0}'.format(
                utils.AGENTS_ROLLBACK_PATH))
            if os.path.isdir(agents_dir):
                utils.remove(agents_dir)
            utils.mkdir(agents_dir)
            utils.copy(os.path.join(utils.AGENTS_ROLLBACK_PATH, 'agents', '.'),
                       agents_dir)

        manager_scripts_path = os.path.join(utils.MANAGER_RESOURCES_HOME,
                                            'packages', 'scripts')
        manager_templates_path = os.path.join(utils.MANAGER_RESOURCES_HOME,
                                              'packages', 'templates')
        if utils.is_upgrade:
            backup_agent_resources(agent_archives_path)
            utils.remove(agent_archives_path)
            utils.mkdir(agent_archives_path)
            utils.remove(manager_scripts_path)
            utils.remove(manager_templates_path)
            ctx.logger.info('Upgrading agents...')
        elif utils.is_rollback:
            ctx.logger.info('Restoring agents...')
            restore_agent_resources(agent_archives_path)

        for agent_file in os.listdir(sources_agents_path):

            agent_id = normalize_agent_name(agent_file)
            agent_extension = splitext(agent_file)
            utils.move(
                os.path.join(sources_agents_path, agent_file),
                os.path.join(agent_archives_path, agent_id + agent_extension))
Exemple #41
0
def install_riemann():
    langohr_source_url = ctx_properties['langohr_jar_source_url']
    daemonize_source_url = ctx_properties['daemonize_rpm_source_url']
    riemann_source_url = ctx_properties['riemann_rpm_source_url']
    # Needed for Riemann's config
    cloudify_resources_url = ctx_properties['cloudify_resources_url']
    rabbitmq_username = ctx_properties['rabbitmq_username']
    rabbitmq_password = ctx_properties['rabbitmq_password']

    riemann_config_path = '/etc/riemann'
    riemann_log_path = '/var/log/cloudify/riemann'
    langohr_home = '/opt/lib'
    extra_classpath = '{0}/langohr.jar'.format(langohr_home)

    # Confirm username and password have been supplied for broker before
    # continuing.
    # Components other than logstash and riemann have this handled in code.
    # Note that these are not directly used in this script, but are used by the
    # deployed resources, hence the check here.
    if not rabbitmq_username or not rabbitmq_password:
        ctx.abort_operation(
            'Both rabbitmq_username and rabbitmq_password must be supplied '
            'and at least 1 character long in the manager blueprint inputs.')

    rabbit_props = utils.ctx_factory.get('rabbitmq')
    ctx.instance.runtime_properties['rabbitmq_endpoint_ip'] = \
        utils.get_rabbitmq_endpoint_ip(
                rabbit_props.get('rabbitmq_endpoint_ip'))
    ctx.instance.runtime_properties['rabbitmq_username'] = \
        rabbit_props.get('rabbitmq_username')
    ctx.instance.runtime_properties['rabbitmq_password'] = \
        rabbit_props.get('rabbitmq_password')

    ctx.logger.info('Installing Riemann...')
    utils.set_selinux_permissive()

    utils.copy_notice(RIEMANN_SERVICE_NAME)
    utils.mkdir(riemann_log_path)
    utils.mkdir(langohr_home)
    utils.mkdir(riemann_config_path)
    utils.mkdir('{0}/conf.d'.format(riemann_config_path))

    langohr = utils.download_cloudify_resource(langohr_source_url,
                                               RIEMANN_SERVICE_NAME)
    utils.sudo(['cp', langohr, extra_classpath])
    ctx.logger.info('Applying Langohr permissions...')
    utils.sudo(['chmod', '644', extra_classpath])
    utils.yum_install(daemonize_source_url, service_name=RIEMANN_SERVICE_NAME)
    utils.yum_install(riemann_source_url, service_name=RIEMANN_SERVICE_NAME)

    utils.logrotate(RIEMANN_SERVICE_NAME)

    ctx.logger.info('Downloading cloudify-manager Repository...')
    manager_repo = utils.download_cloudify_resource(cloudify_resources_url,
                                                    RIEMANN_SERVICE_NAME)
    ctx.logger.info('Extracting Manager Repository...')
    utils.untar(manager_repo, '/tmp')
    ctx.logger.info('Deploying Riemann manager.config...')
    utils.move(
        '/tmp/plugins/riemann-controller/riemann_controller/resources/manager.config',  # NOQA
        '{0}/conf.d/manager.config'.format(riemann_config_path))

    ctx.logger.info('Deploying Riemann conf...')
    utils.deploy_blueprint_resource('{0}/main.clj'.format(CONFIG_PATH),
                                    '{0}/main.clj'.format(riemann_config_path),
                                    RIEMANN_SERVICE_NAME)

    # our riemann configuration will (by default) try to read these environment
    # variables. If they don't exist, it will assume
    # that they're found at "localhost"
    # export MANAGEMENT_IP=""
    # export RABBITMQ_HOST=""

    # we inject the management_ip for both of these to Riemann's systemd
    # config.
    # These should be potentially different
    # if the manager and rabbitmq are running on different hosts.
    utils.systemd.configure(RIEMANN_SERVICE_NAME)
    utils.clean_var_log_dir(RIEMANN_SERVICE_NAME)
def deploy_manager_sources():
    """Deploys all manager sources from a single archive.
    """
    archive_path = ctx.node.properties['manager_resources_package']
    archive_checksum_path = \
        ctx.node.properties['manager_resources_package_checksum_file']
    skip_checksum_validation = ctx.node.properties['skip_checksum_validation']
    if archive_path:
        sources_agents_path = os.path.join(
            utils.CLOUDIFY_SOURCES_PATH, 'agents')
        agent_archives_path = utils.AGENT_ARCHIVES_PATH
        utils.mkdir(agent_archives_path)
        # this will leave this several hundreds of MBs archive on the
        # manager. should find a way to clean it after all operations
        # were completed and bootstrap succeeded as it is not longer
        # necessary
        resources_archive_path = \
            utils.download_cloudify_resource(archive_path)
        # This would ideally go under utils.download_cloudify_resource but as
        # of now, we'll only be validating the manager resources package.

        if not skip_checksum_validation:
            skip_if_failed = False
            if not archive_checksum_path:
                skip_if_failed = True
                archive_checksum_path = archive_path + '.md5'
            resources_archive_md5_path = \
                utils.download_cloudify_resource(archive_checksum_path)
            if not utils.validate_md5_checksum(resources_archive_path,
                                               resources_archive_md5_path):
                    if skip_if_failed:
                        ctx.logger.warn('Checksum validation failed. '
                                        'Continuing as no checksum file was '
                                        'explicitly provided.')
                    else:
                        utils.error_exit(
                            'Failed to validate checksum for {0}'.format(
                                resources_archive_path))
            else:
                ctx.logger.info('Resources Package downloaded successfully...')
        else:
            ctx.logger.info(
                'Skipping resources package checksum validation...')

        utils.untar(
            resources_archive_path,
            utils.CLOUDIFY_SOURCES_PATH,
            skip_old_files=True)

        def splitext(filename):
            # not using os.path.splitext as it would return .gz instead of
            # .tar.gz
            if filename.endswith('.tar.gz'):
                return '.tar.gz'
            elif filename.endswith('.exe'):
                return '.exe'
            else:
                utils.exit_error(
                    'Unknown agent format for {0}. '
                    'Must be either tar.gz or exe'.format(filename))

        def normalize_agent_name(filename):
            # this returns the normalized name of an agent upon which our agent
            # installer retrieves agent packages for installation.
            # e.g. Ubuntu-trusty-agent_3.4.0-m3-b392.tar.gz returns
            # ubuntu-trusty-agent
            return filename.split('_', 1)[0].lower()

        for agent_file in os.listdir(sources_agents_path):

            agent_id = normalize_agent_name(agent_file)
            agent_extension = splitext(agent_file)
            utils.move(
                os.path.join(sources_agents_path, agent_file),
                os.path.join(agent_archives_path, agent_id + agent_extension))
def _install_stage():
    nodejs_source_url = ctx_properties['nodejs_tar_source_url']
    stage_source_url = ctx_properties['stage_tar_source_url']

    if not utils.resource_factory.local_resource_exists(stage_source_url):
        ctx.logger.info('Stage package not found in manager resources '
                        'package. Stage will not be installed.')
        ctx.instance.runtime_properties['skip_installation'] = 'true'
        return

    # injected as an input to the script
    ctx.instance.runtime_properties['influxdb_endpoint_ip'] = \
        os.environ.get('INFLUXDB_ENDPOINT_IP')

    utils.set_selinux_permissive()
    utils.copy_notice(SERVICE_NAME)

    utils.mkdir(NODEJS_DIR)
    utils.mkdir(HOME_DIR)
    utils.mkdir(LOG_DIR)
    utils.mkdir(RESOURCES_DIR)

    utils.create_service_user(STAGE_USER, STAGE_GROUP, HOME_DIR)

    ctx.logger.info('Installing NodeJS...')
    nodejs = utils.download_cloudify_resource(nodejs_source_url, SERVICE_NAME)
    utils.untar(nodejs, NODEJS_DIR)
    utils.remove(nodejs)

    ctx.logger.info('Installing Cloudify Stage (UI)...')
    stage_tar = utils.download_cloudify_resource(stage_source_url,
                                                 SERVICE_NAME)
    if 'community' in stage_tar:
        ctx.logger.info('Community edition')
        ctx.instance.runtime_properties['community_mode'] = '-mode community'
    else:
        ctx.instance.runtime_properties['community_mode'] = ''

    utils.untar(stage_tar, HOME_DIR)
    utils.remove(stage_tar)

    ctx.logger.info('Fixing permissions...')
    utils.chown(STAGE_USER, STAGE_GROUP, HOME_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, NODEJS_DIR)
    utils.chown(STAGE_USER, STAGE_GROUP, LOG_DIR)
    configure_script(
        'restore-snapshot.py',
        'Restore stage directories from a snapshot path',
    )
    configure_script(
        'make-auth-token.py',
        'Update auth token for stage user',
    )
    # Allow snapshot restores to restore token
    utils.allow_user_to_sudo_command(
        '/opt/manager/env/bin/python',
        'Snapshot update auth token for stage user',
        allow_as=STAGE_USER,
    )
    subprocess.check_call([
        'sudo',
        '-u',
        'stage_user',
        '/opt/manager/env/bin/python',
        '/opt/cloudify/stage/make-auth-token.py',
    ])

    utils.logrotate(SERVICE_NAME)
    utils.systemd.configure(SERVICE_NAME)

    backend_dir = join(HOME_DIR, 'backend')
    npm_path = join(NODEJS_DIR, 'bin', 'npm')
    subprocess.check_call('cd {0}; {1} run db-migrate'.format(
        backend_dir, npm_path),
                          shell=True)
Exemple #44
0
    def start(self, db_session, logger, process_name):
        # Save the db_session reference for progress_listener
        self.db_session = db_session
        try:
            self.download_job = db_session.query(DownloadJob).filter(
                DownloadJob.id == self.job_id).first()
            if self.download_job is None:
                logger.error('Unable to retrieve download job: %s' %
                             self.job_id)
                return

            output_file_path = get_repository_directory(
            ) + self.download_job.cco_filename

            # Only download if the image (tar file) is not in the downloads directory.
            # And, the image is a good one.
            if not self.is_tar_file_valid(output_file_path):
                user_id = self.download_job.user_id
                user = db_session.query(User).filter(
                    User.id == user_id).first()
                if user is None:
                    logger.error('Unable to retrieve user: %s' % user_id)

                preferences = db_session.query(Preferences).filter(
                    Preferences.user_id == user_id).first()
                if preferences is None:
                    logger.error('Unable to retrieve user preferences: %s' %
                                 user_id)

                self.download_job.set_status(JobStatus.PROCESSING)
                db_session.commit()

                bsd = BSDServiceHandler(
                    username=preferences.cco_username,
                    password=preferences.cco_password,
                    image_name=self.download_job.cco_filename,
                    PID=self.download_job.pid,
                    MDF_ID=self.download_job.mdf_id,
                    software_type_ID=self.download_job.software_type_id)

                self.download_job.set_status(
                    'Preparing to download from cisco.com.')
                db_session.commit()

                bsd.download(output_file_path, callback=self.progress_listener)

                tarfile_file_list = untar(output_file_path,
                                          get_repository_directory())
            else:
                tarfile_file_list = get_tarfile_file_list(output_file_path)

            # Now transfers to the server repository
            self.download_job.set_status(
                'Transferring file to server repository.')
            db_session.commit()

            server = db_session.query(Server).filter(
                Server.id == self.download_job.server_id).first()
            if server is not None:
                server_impl = get_server_impl(server)
                for filename in tarfile_file_list:
                    server_impl.upload_file(
                        get_repository_directory() + filename,
                        filename,
                        sub_directory=self.download_job.server_directory)

            self.archive_download_job(db_session, self.download_job,
                                      JobStatus.COMPLETED)
            db_session.commit()

        except Exception:
            try:
                logger.exception(
                    'DownloadManager hit exception - download job = %s',
                    self.job_id)
                self.archive_download_job(db_session, self.download_job,
                                          JobStatus.FAILED,
                                          traceback.format_exc())
                db_session.commit()
            except Exception:
                logger.exception(
                    'DownloadManager hit exception - download job = %s',
                    self.job_id)
        finally:
            db_session.close()
    def start(self, db_session, logger, process_name):
        # Save the db_session reference for progress_listener
        self.db_session = db_session
        try:
            self.download_job = db_session.query(DownloadJob).filter(DownloadJob.id == self.job_id).first()
            if self.download_job is None:
                logger.error('Unable to retrieve download job: %s' % self.job_id)
                return

            output_file_path = get_repository_directory() + self.download_job.cco_filename

            # Only download if the image (tar file) is not in the downloads directory.
            # And, the image is a good one.
            if not self.is_tar_file_valid(output_file_path):
                user_id = self.download_job.user_id
                user = db_session.query(User).filter(User.id == user_id).first()
                if user is None:
                    logger.error('Unable to retrieve user: %s' % user_id)

                preferences = db_session.query(Preferences).filter(Preferences.user_id == user_id).first()
                if preferences is None:
                    logger.error('Unable to retrieve user preferences: %s' % user_id)

                self.download_job.set_status(JobStatus.PROCESSING)
                db_session.commit()

                bsd = BSDServiceHandler(username=preferences.cco_username, password=preferences.cco_password,
                                        image_name=self.download_job.cco_filename, PID=self.download_job.pid,
                                        MDF_ID=self.download_job.mdf_id,
                                        software_type_ID=self.download_job.software_type_id)

                self.download_job.set_status('Preparing to download from cisco.com.')
                db_session.commit()

                bsd.download(output_file_path, callback=self.progress_listener)

                tarfile_file_list = untar(output_file_path, get_repository_directory())
            else:
                tarfile_file_list = get_tarfile_file_list(output_file_path)

            # Now transfers to the server repository
            self.download_job.set_status('Transferring file to server repository.')
            db_session.commit()

            server = db_session.query(Server).filter(Server.id == self.download_job.server_id).first()
            if server is not None:
                server_impl = get_server_impl(server)
                for filename in tarfile_file_list:
                    server_impl.upload_file(get_repository_directory() + filename, filename,
                                            sub_directory=self.download_job.server_directory)

            self.archive_download_job(db_session, self.download_job, JobStatus.COMPLETED)
            db_session.commit()

        except Exception:
            try:
                logger.exception('DownloadManager hit exception - download job = %s', self.job_id)
                self.archive_download_job(db_session, self.download_job, JobStatus.FAILED, traceback.format_exc())
                db_session.commit()
            except Exception:
                logger.exception('DownloadManager hit exception - download job = %s', self.job_id)
        finally:
            db_session.close()