def install(self, env): from params import params env.set_params(params) commands = IndexingCommands(params) commands.setup_repo() Logger.info('Install RPM packages') self.install_packages(env)
def zeppelin_notebook_import(self, env): from params import params env.set_params(params) commands = IndexingCommands(params) Logger.info( ambari_format( 'Searching for Zeppelin Notebooks in {metron_config_zeppelin_path}' )) # Check if authentication is configured on Zeppelin server, and fetch details if enabled. ses = requests.session() ses = commands.get_zeppelin_auth_details(ses, params.zeppelin_server_url, env) for dirName, subdirList, files in os.walk( params.metron_config_zeppelin_path): for fileName in files: if fileName.endswith(".json"): Logger.info("Importing notebook: " + fileName) zeppelin_import_url = ambari_format( 'http://{zeppelin_server_url}/api/notebook/import') zeppelin_notebook = { 'file': open(os.path.join(dirName, fileName), 'rb') } res = ses.post(zeppelin_import_url, files=zeppelin_notebook) Logger.info("Result: " + res.text)
def restart(self, env): from params import params env.set_params(params) self.configure(env) commands = IndexingCommands(params) commands.restart_indexing_topology(env)
def service_check(self, env): from params import params parsercommands = ParserCommands(params) indexingcommands = IndexingCommands(params) all_found = parsercommands.topologies_running(env) and indexingcommands.is_topology_active(env) if all_found: exit(0) else: exit(1)
def service_check(self, env): from params import params parsercommands = ParserCommands(params) indexingcommands = IndexingCommands(params) all_found = parsercommands.topologies_running( env) and indexingcommands.is_topology_active(env) if all_found: exit(0) else: exit(1)
def elasticsearch_template_delete(self, env): from params import params env.set_params(params) Logger.info("Deleting Elasticsearch index templates") commands = IndexingCommands(params) for template_name in commands.get_templates(): # delete the index template cmd = "curl -s -XDELETE \"http://{0}/_template/{1}\"" Execute(cmd.format(params.es_http_url, template_name), logoutput=True)
def elasticsearch_template_delete(self, env): from params import params env.set_params(params) Logger.info("Deleting Elasticsearch index templates") commands = IndexingCommands(params) for template_name in commands.get_templates(): # delete the index template cmd = "curl -s -XDELETE \"http://{0}/_template/{1}\"" Execute( cmd.format(params.es_http_url, template_name), logoutput=True)
def elasticsearch_template_install(self, env): from params import params env.set_params(params) Logger.info("Installing Elasticsearch index templates") commands = IndexingCommands(params) for template_name, template_path in commands.get_templates().iteritems(): # install the index template File(template_path, mode=0755, content=StaticFile("{0}.template".format(template_name))) cmd = "curl -s -XPOST http://{0}/_template/{1} -d @{2}" Execute( cmd.format(params.es_http_url, template_name, template_path), logoutput=True)
def configure(self, env, upgrade_type=None, config_dir=None): from params import params env.set_params(params) commands = IndexingCommands(params) metron_service.load_global_config(params) if not commands.is_configured(): commands.init_kafka_topics() commands.init_hdfs_dir() commands.set_configured()
def zeppelin_notebook_import(self, env): from params import params env.set_params(params) commands = IndexingCommands(params) Logger.info(ambari_format('Searching for Zeppelin Notebooks in {metron_config_zeppelin_path}')) # Check if authentication is configured on Zeppelin server, and fetch details if enabled. ses = requests.session() ses = commands.get_zeppelin_auth_details(ses, params.zeppelin_server_url, env) for dirName, subdirList, files in os.walk(params.metron_config_zeppelin_path): for fileName in files: if fileName.endswith(".json"): Logger.info("Importing notebook: " + fileName) zeppelin_import_url = ambari_format('http://{zeppelin_server_url}/api/notebook/import') zeppelin_notebook = {'file' : open(os.path.join(dirName, fileName), 'rb')} res = ses.post(zeppelin_import_url, files=zeppelin_notebook) Logger.info("Result: " + res.text)
def zeppelin_notebook_import(self, env): from params import params env.set_params(params) metron_service.check_indexer_parameters() commands = IndexingCommands(params) Logger.info(ambari_format('Searching for Zeppelin Notebooks in {metron_config_zeppelin_path}')) # Check if authentication is configured on Zeppelin server, and fetch details if enabled. session_id = commands.get_zeppelin_auth_details(params.zeppelin_server_url, env) for dirName, subdirList, files in os.walk(params.metron_config_zeppelin_path): for fileName in files: if fileName.endswith(".json"): Logger.info("Importing notebook: " + fileName) zeppelin_notebook = os.path.join(dirName, fileName) zeppelin_import_url = 'curl -i -b \"{0}\" http://{1}/api/notebook/import -d @\'{2}\'' zeppelin_import_url = zeppelin_import_url.format(session_id, params.zeppelin_server_url, zeppelin_notebook) return_code, import_result, stderr = get_user_call_output(zeppelin_import_url, user=params.metron_user) Logger.info("Status of importing notebook: " + import_result) if return_code != 0: Logger.error("Error importing notebook: " + fileName + " Error Message: " + stderr)
def service_check(self, env): from params import params # check the parsers Logger.info("Performing Parser service check") parser_cmds = ParserCommands(params) parser_cmds.service_check(env) # check enrichment Logger.info("Performing Enrichment service check") enrichment_cmds = EnrichmentCommands(params) enrichment_cmds.service_check(env) # check indexing Logger.info("Performing Indexing service check") indexing_cmds = IndexingCommands(params) indexing_cmds.service_check(env) # check the profiler Logger.info("Performing Profiler service check") profiler_cmds = ProfilerCommands(params) profiler_cmds.service_check(env) # check the rest api Logger.info("Performing REST application service check") rest_cmds = RestCommands(params) rest_cmds.service_check(env) # check the management UI Logger.info("Performing Management UI service check") mgmt_cmds = ManagementUICommands(params) mgmt_cmds.service_check(env) # check the alerts UI Logger.info("Performing Alerts UI service check") alerts_cmds = AlertsUICommands(params) alerts_cmds.service_check(env) Logger.info("Metron service check completed successfully") exit(0)
def elasticsearch_template_install(self, env): from params import params env.set_params(params) Logger.info("Installing Elasticsearch index templates") try: metron_service.check_indexer_parameters() commands = IndexingCommands(params) for template_name, template_path in commands.get_templates().iteritems(): # install the index template File(template_path, mode=0755, content=StaticFile("{0}.template".format(template_name))) cmd = "curl -s -XPOST http://{0}/_template/{1} -d @{2}" Execute( cmd.format(params.es_http_url, template_name, template_path), logoutput=True) return True except Exception as e: msg = "WARNING: Elasticsearch index templates could not be installed. " \ "Is Elasticsearch running? Will reattempt install on next start. error={0}" Logger.warning(msg.format(e)) return False
def start(self, env, upgrade_type=None): from params import params env.set_params(params) self.configure(env) commands = IndexingCommands(params) # Install elasticsearch templates try: if not commands.is_elasticsearch_template_installed(): self.elasticsearch_template_install(env) commands.set_elasticsearch_template_installed() except Exception as e: msg = "WARNING: Elasticsearch index templates could not be installed. " \ "Is Elasticsearch running? Will reattempt install on next start. error={0}" Logger.warning(msg.format(e)) commands.start_indexing_topology(env)
def status(self, env): from params import status_params env.set_params(status_params) commands = IndexingCommands(status_params) if not commands.is_topology_active(env): raise ComponentIsNotRunning()
def stop(self, env, upgrade_type=None): from params import params env.set_params(params) commands = IndexingCommands(params) commands.stop_indexing_topology()
def start(self, env, upgrade_type=None): from params import params env.set_params(params) self.configure(env) commands = IndexingCommands(params) commands.start_indexing_topology()
def start(self, env, upgrade_type=None): from params import params env.set_params(params) self.configure(env) commands = IndexingCommands(params) if params.ra_indexing_writer == 'Solr': # Install Solr schemas if not commands.is_solr_schema_installed(): if commands.solr_schema_install(env): commands.set_solr_schema_installed() elif params.ra_indexing_writer == 'Elasticsearch': # Install elasticsearch templates if not commands.is_elasticsearch_template_installed(): if self.elasticsearch_template_install(env): commands.set_elasticsearch_template_installed() else : msg = "WARNING: index schemas/templates could not be installed. " \ "Is Indexing server configured properly ? Will reattempt install on next start. index server configured={0}" Logger.warning(msg.format(params.ra_indexing_writer)) commands.start_indexing_topology(env)
def install(self, env): from params import params env.set_params(params) commands = IndexingCommands(params) commands.setup_repo() self.install_packages(env)
def configure(self, env, upgrade_type=None, config_dir=None): from params import params env.set_params(params) commands = IndexingCommands(params) metron_service.load_global_config(params) if not commands.is_configured(): commands.init_kafka_topics() commands.init_hdfs_dir() commands.set_configured() if params.security_enabled and not commands.is_hdfs_perm_configured(): # If we Kerberize the cluster, we need to call this again, to remove write perms from hadoop group # If we start off Kerberized, it just does the same thing twice. commands.init_hdfs_dir() commands.set_hdfs_perm_configured() if params.security_enabled and not commands.is_acl_configured(): commands.init_kafka_acls() commands.set_acl_configured() Logger.info("Calling security setup") storm_security_setup(params)
def configure(self, env, upgrade_type=None, config_dir=None): from params import params env.set_params(params) Logger.info("Running indexing configure") File(format("{metron_config_path}/elasticsearch.properties"), content=Template("elasticsearch.properties.j2"), owner=params.metron_user, group=params.metron_group) File(format("{metron_config_path}/hdfs.properties"), content=Template("hdfs.properties.j2"), owner=params.metron_user, group=params.metron_group) if not metron_service.is_zk_configured(params): metron_service.init_zk_config(params) metron_service.set_zk_configured(params) metron_service.refresh_configs(params) commands = IndexingCommands(params) if not commands.is_configured(): commands.init_kafka_topics() commands.init_hdfs_dir() commands.set_configured() if params.security_enabled and not commands.is_hdfs_perm_configured(): # If we Kerberize the cluster, we need to call this again, to remove write perms from hadoop group # If we start off Kerberized, it just does the same thing twice. commands.init_hdfs_dir() commands.set_hdfs_perm_configured() if params.security_enabled and not commands.is_acl_configured(): commands.init_kafka_acls() commands.set_acl_configured() if not commands.is_hbase_configured(): commands.create_hbase_tables() if params.security_enabled and not commands.is_hbase_acl_configured(): commands.set_hbase_acls() Logger.info("Calling security setup") storm_security_setup(params)
def stop(self, env, upgrade_type=None): from params import params env.set_params(params) commands = IndexingCommands(params) commands.stop_indexing_topology(env)
def configure(self, env, upgrade_type=None, config_dir=None): from params import params env.set_params(params) Logger.info("Running indexing configure") File(format("{metron_config_path}/elasticsearch.properties"), content=Template("elasticsearch.properties.j2"), owner=params.metron_user, group=params.metron_group ) File(format("{metron_config_path}/hdfs.properties"), content=Template("hdfs.properties.j2"), owner=params.metron_user, group=params.metron_group ) if not metron_service.is_zk_configured(params): metron_service.init_zk_config(params) metron_service.set_zk_configured(params) metron_service.refresh_configs(params) commands = IndexingCommands(params) if not commands.is_configured(): commands.init_kafka_topics() commands.init_hdfs_dir() commands.set_configured() if params.security_enabled and not commands.is_hdfs_perm_configured(): # If we Kerberize the cluster, we need to call this again, to remove write perms from hadoop group # If we start off Kerberized, it just does the same thing twice. commands.init_hdfs_dir() commands.set_hdfs_perm_configured() if params.security_enabled and not commands.is_acl_configured(): commands.init_kafka_acls() commands.set_acl_configured() if not commands.is_hbase_configured(): commands.create_hbase_tables() if params.security_enabled and not commands.is_hbase_acl_configured(): commands.set_hbase_acls() Logger.info("Calling security setup") storm_security_setup(params)