def create(**params): """ Creates and stores the daemon parameters. """ attributes = dict(**params) custom_arg = attributes.pop('custom_options', ()) attributes.update(_parse_custom_options(custom_arg)) click.echo('Creating...') from cloudify_agent.shell.main import get_logger if attributes['broker_get_settings_from_manager']: broker = api_utils.internal.get_broker_configuration(attributes) attributes.update(broker) daemon = DaemonFactory().new( logger=get_logger(), **attributes ) daemon.create() _save_daemon(daemon) click.echo('Successfully created daemon: {0}' .format(daemon.name))
def create(**params): """ Creates and stores the daemon parameters. """ attributes = dict(**params) attributes.update(_parse_security_settings(attributes)) custom_arg = attributes.pop('custom_options', ()) attributes.update(_parse_custom_options(custom_arg)) click.echo('Creating...') _create_rest_ssl_cert(attributes) # _create_rest_ssl_cert called before get_broker_configuration because it # might be required for the rest call to succeed if attributes['broker_get_settings_from_manager']: broker = api_utils.internal.get_broker_configuration(attributes) attributes.update(broker) # _create_broker_ssl_cert called after get_broker_configuration because the # cert might be retrieved there _create_broker_ssl_cert(attributes) from cloudify_agent.shell.main import get_logger daemon = DaemonFactory().new( logger=get_logger(), **attributes ) daemon.create() _save_daemon(daemon) click.echo('Successfully created daemon: {0}' .format(daemon.name))
def restart(new_name=None, delay_period=5, **_): cloudify_agent = ctx.instance.runtime_properties['cloudify_agent'] if new_name is None: new_name = utils.internal.generate_new_agent_name( cloudify_agent.get('name', 'agent')) # update agent name in runtime properties so that the workflow will # what the name of the worker handling tasks to this instance. # the update cannot be done by setting a nested property directly # because they are not recognized as 'dirty' cloudify_agent['name'] = new_name ctx.instance.runtime_properties['cloudify_agent'] = cloudify_agent # must update instance here because the process may shutdown before # the decorator has a chance to do it. ctx.instance.update() daemon = _load_daemon(logger=ctx.logger) # make the current master stop listening to the current queue # to avoid a situation where we have two masters listening on the # same queue. app.control.cancel_consumer( queue=daemon.queue, destination=['celery@{0}'.format(daemon.name)] ) # clone the current daemon to preserve all the attributes attributes = utils.internal.daemon_to_dict(daemon) # give the new daemon the new name attributes['name'] = new_name # remove the log file and pid file so that new ones will be created # for the new agent del attributes['log_file'] del attributes['pid_file'] # Get the broker credentials for the daemon attributes.update(ctx.bootstrap_context.broker_config()) new_daemon = DaemonFactory().new(logger=ctx.logger, **attributes) # create the new daemon new_daemon.create() _save_daemon(new_daemon) # configure the new daemon new_daemon.configure() new_daemon.start() # start a thread that will kill the current master. # this is done in a thread so that the current task will not result in # a failure thread = threading.Thread(target=shutdown_current_master, args=[delay_period, ctx.logger]) thread.daemon = True thread.start()
def restart(new_name=None, delay_period=5, **_): cloudify_agent = ctx.instance.runtime_properties['cloudify_agent'] if new_name is None: new_name = utils.internal.generate_new_agent_name( cloudify_agent.get('name', 'agent')) # update agent name in runtime properties so that the workflow will # what the name of the worker handling tasks to this instance. # the update cannot be done by setting a nested property directly # because they are not recognized as 'dirty' cloudify_agent['name'] = new_name ctx.instance.runtime_properties['cloudify_agent'] = cloudify_agent # must update instance here because the process may shutdown before # the decorator has a chance to do it. ctx.instance.update() daemon = _load_daemon(logger=ctx.logger) # make the current master stop listening to the current queue # to avoid a situation where we have two masters listening on the # same queue. app = get_celery_app(tenant=cloudify_agent['rest_tenant']) app.control.cancel_consumer(queue=daemon.queue, destination=['celery@{0}'.format(daemon.name)]) # clone the current daemon to preserve all the attributes attributes = utils.internal.daemon_to_dict(daemon) # give the new daemon the new name attributes['name'] = new_name # remove the log file and pid file so that new ones will be created # for the new agent del attributes['log_file'] del attributes['pid_file'] # Get the broker credentials for the daemon attributes.update(ctx.bootstrap_context.broker_config()) new_daemon = DaemonFactory().new(logger=ctx.logger, **attributes) # create the new daemon new_daemon.create() _save_daemon(new_daemon) # configure the new daemon new_daemon.configure() new_daemon.start() # start a thread that will kill the current master. # this is done in a thread so that the current task will not result in # a failure thread = threading.Thread(target=shutdown_current_master, args=[delay_period, ctx.logger]) thread.daemon = True thread.start()
def restart(new_name=None, delay_period=5, **_): cloudify_agent = ctx.instance.runtime_properties['cloudify_agent'] if new_name is None: new_name = utils.internal.generate_new_agent_name( cloudify_agent.get('name', 'agent')) # update agent name in runtime properties so that the workflow will # what the name of the worker handling tasks to this instance. # the update cannot be done by setting a nested property directly # because they are not recognized as 'dirty' cloudify_agent['name'] = new_name update_agent_runtime_properties(cloudify_agent) daemon = _load_daemon(logger=ctx.logger) # make the current master stop listening to the current queue # to avoid a situation where we have two masters listening on the # same queue. app = get_celery_app(tenant=ctx.tenant) app.control.cancel_consumer(queue=daemon.queue, destination=['celery@{0}'.format(daemon.name)]) # clone the current daemon to preserve all the attributes attributes = utils.internal.daemon_to_dict(daemon) # give the new daemon the new name attributes['name'] = new_name # remove the log file and pid file so that new ones will be created # for the new agent del attributes['log_file'] del attributes['pid_file'] # Get the broker credentials for the daemon attributes.update(ctx.bootstrap_context.broker_config()) new_daemon = DaemonFactory().new(logger=ctx.logger, **attributes) # create the new daemon new_daemon.create() _save_daemon(new_daemon) # configure the new daemon new_daemon.configure() new_daemon.start() # ..and stop the old agent daemon.before_self_stop() raise StopAgent()
def create(**params): """ Creates and stores the daemon parameters. """ attributes = dict(**params) custom_arg = attributes.pop('custom_options', ()) attributes.update(_parse_custom_options(custom_arg)) click.echo('Creating...') from cloudify_agent.shell.main import get_logger daemon = DaemonFactory().new(logger=get_logger(), **attributes) daemon.create() _save_daemon(daemon) click.echo('Successfully created daemon: {0}'.format(daemon.name))
def create(**params): """ Creates and stores the daemon parameters. """ attributes = dict(**params) custom_arg = attributes.pop('custom_options', ()) attributes.update(_parse_custom_options(custom_arg)) click.echo('Creating...') from cloudify_agent.shell.main import get_logger daemon = DaemonFactory().new( logger=get_logger(), **attributes ) daemon.create() _save_daemon(daemon) click.echo('Successfully created daemon: {0}' .format(daemon.name))
def create(**params): """ Creates and stores the daemon parameters. """ attributes = dict(**params) custom_arg = attributes.pop('custom_options', ()) attributes.update(_parse_custom_options(custom_arg)) click.echo('Creating...') if attributes['broker_get_settings_from_manager']: broker = api_utils.internal.get_broker_configuration(attributes) attributes.update(broker) from cloudify_agent.shell.main import get_logger daemon = DaemonFactory().new(logger=get_logger(), **attributes) daemon.create() _save_daemon(daemon) click.echo('Successfully created daemon: {0}'.format(daemon.name))