def get_fields(self, collection_or_core_name): try: field_data = SolrApi( SOLR_URL.get(), self.user, SECURITY_ENABLED.get()).fields(collection_or_core_name) fields = self._format_flags(field_data['schema']['fields']) except: LOG.exception( _('Could not fetch fields for collection %s.') % collection_or_core_name) raise PopupException( _('Could not fetch fields for collection %s. See logs for more info.' ) % collection_or_core_name) try: uniquekey = SolrApi( SOLR_URL.get(), self.user, SECURITY_ENABLED.get()).uniquekey(collection_or_core_name) except: LOG.exception( _('Could not fetch unique key for collection %s.') % collection_or_core_name) raise PopupException( _('Could not fetch unique key for collection %s. See logs for more info.' ) % collection_or_core_name) return uniquekey, fields
def create_collection(self, name, fields, unique_key_field='id', df='text'): """ Create solr collection or core and instance dir. Create schema.xml file so that we can set UniqueKey field. """ if self.is_solr_cloud_mode(): # solrcloud mode # Need to remove path afterwards tmp_path, solr_config_path = utils.copy_configs(fields, unique_key_field, df, True) # Create instance directory. solrctl_path = get_solrctl_path() process = subprocess.Popen([solrctl_path, "instancedir", "--create", name, solr_config_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={ 'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get() }) status = process.wait() # Don't want directories laying around shutil.rmtree(tmp_path) if status != 0: LOG.error("Could not create instance directory.\nOutput: %s\nError: %s" % process.communicate()) raise PopupException(_('Could not create instance directory. ' 'Check if solr_zk_ensemble and solrctl_path are correct in Hue config [indexer].')) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_collection(name): # Delete instance directory if we couldn't create a collection. process = subprocess.Popen([solrctl_path, "instancedir", "--delete", name], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={ 'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get() }) if process.wait() != 0: LOG.error("Cloud not delete collection.\nOutput: %s\nError: %s" % process.communicate()) raise PopupException(_('Could not create collection. Check error logs for more info.')) else: # Non-solrcloud mode # Create instance directory locally. instancedir = os.path.join(conf.CORE_INSTANCE_DIR.get(), name) if os.path.exists(instancedir): raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir) tmp_path, solr_config_path = utils.copy_configs(fields, unique_key_field, df, False) shutil.move(solr_config_path, instancedir) shutil.rmtree(tmp_path) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_core(name, instancedir): # Delete instance directory if we couldn't create a collection. shutil.rmtree(instancedir) raise PopupException(_('Could not create collection. Check error logs for more info.'))
def get_fields(self, collection_or_core_name): try: field_data = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()).fields(collection_or_core_name) fields = self._format_flags(field_data['schema']['fields']) except: LOG.exception(_('Could not fetch fields for collection %s.') % collection_or_core_name) raise PopupException(_('Could not fetch fields for collection %s. See logs for more info.') % collection_or_core_name) try: uniquekey = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()).uniquekey(collection_or_core_name) except: LOG.exception(_('Could not fetch unique key for collection %s.') % collection_or_core_name) raise PopupException(_('Could not fetch unique key for collection %s. See logs for more info.') % collection_or_core_name) return uniquekey, fields
def _create_solr_cloud_collection(self, name, fields, unique_key_field, df): with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc: root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name) tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True) try: config_root_path = '%s/%s' % (solr_config_path, 'conf') try: zc.copy_path(root_node, config_root_path) except Exception, e: zc.delete_path(root_node) raise PopupException(_('Error in copying Solr configurations.'), detail=e) finally: # Don't want directories laying around shutil.rmtree(tmp_path) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_collection(name): # Delete instance directory if we couldn't create a collection. try: zc.delete_path(root_node) except Exception, e: raise PopupException(_('Error in deleting Solr configurations.'), detail=e) raise PopupException(_('Could not create collection. Check error logs for more info.'))
def update_data_from_hdfs(self, fs, collection_or_core_name, fields, path, data_type='separated', indexing_strategy='upload', **kwargs): """ Add hdfs path contents to index """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if indexing_strategy == 'upload': stats = fs.stats(path) if stats.size > MAX_UPLOAD_SIZE: raise PopupException(_('File size is too large to handle!')) else: # Get fields for filtering unique_key, fields = self.get_fields(collection_or_core_name) fields = [{'name': field, 'type': fields[field]['type']} for field in fields] fh = fs.open(path) if data_type == 'log': # Transform to JSON then update data = json.dumps([value for value in field_values_from_log(fh, fields)]) content_type = 'json' elif data_type == 'separated': data = json.dumps([value for value in field_values_from_separated_file(fh, kwargs.get('separator', ','), kwargs.get('quote_character', '"'), fields)], indent=2) content_type = 'json' else: raise PopupException(_('Could not update index. Unknown type %s') % data_type) fh.close() if not api.update(collection_or_core_name, data, content_type=content_type): raise PopupException(_('Could not update index. Check error logs for more info.')) else: raise PopupException(_('Could not update index. Indexing strategy %s not supported.') % indexing_strategy)
def update_data_from_hive(self, db, collection_or_core_name, database, table, columns, indexing_strategy='upload'): """ Add hdfs path contents to index """ # Run a custom hive query and post data to collection from beeswax.server import dbms import tablib api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if indexing_strategy == 'upload': table = db.get_table(database, table) hql = "SELECT %s FROM `%s.%s` %s" % (','.join(columns), database, table.name, db._get_browse_limit_clause(table)) query = dbms.hql_query(hql) try: handle = db.execute_and_wait(query) if handle: result = db.fetch(handle, rows=100) db.close(handle) dataset = tablib.Dataset() dataset.append(columns) for row in result.rows(): dataset.append(row) if not api.update(collection_or_core_name, dataset.csv, content_type='csv'): raise PopupException(_('Could not update index. Check error logs for more info.')) else: raise PopupException(_('Could not update index. Could not fetch any data from Hive.')) except Exception, e: raise PopupException(_('Could not update index.'), detail=e)
def get_collections(self): solr_collections = {} solr_aliases = {} solr_cores = {} try: api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if self.is_solr_cloud_mode(): solr_collections = api.collections() for name in solr_collections: solr_collections[name]['isCoreOnly'] = False solr_aliases = api.aliases() for name in solr_aliases: solr_aliases[name] = { 'isCoreOnly': False, 'isAlias': True, 'collections': solr_aliases[name] } solr_cores = api.cores() for name in solr_cores: solr_cores[name]['isCoreOnly'] = True except Exception, e: LOG.warn('No Zookeeper servlet running on Solr server: %s' % e)
def _create_solr_cloud_collection(self, name, fields, unique_key_field, df): with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc: root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name) tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True) try: config_root_path = '%s/%s' % (solr_config_path, 'conf') try: zc.copy_path(root_node, config_root_path) except Exception, e: zc.delete_path(root_node) raise PopupException( _('Error in copying Solr configurations.'), detail=e) finally: # Don't want directories laying around shutil.rmtree(tmp_path) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_collection(name): # Delete instance directory if we couldn't create a collection. try: zc.delete_path(root_node) except Exception, e: raise PopupException( _('Error in deleting Solr configurations.'), detail=e) raise PopupException( _('Could not create collection. Check error logs for more info.' ))
def __init__( self, solr_url, user, security_enabled=SECURITY_ENABLED.get() if search_enabled() else SECURITY_ENABLED.default, ssl_cert_ca_verify=SSL_CERT_CA_VERIFY.get(), ): self._url = solr_url self._user = user self._client = HttpClient(self._url, logger=LOG) self.security_enabled = security_enabled if self.security_enabled: self._client.set_kerberos_auth() self._client.set_verify(ssl_cert_ca_verify) self._root = resource.Resource(self._client) # The Kerberos handshake requires two requests in order to authenticate, # but if our first request is a PUT/POST, it might flat-out reject the # first request if the body is too large. So, connect here in order to get # a cookie so future PUT/POSTs will be pre-authenticated. if self.security_enabled: self._root.invoke("HEAD", "/")
def delete_collection(self, name, core): """ Delete solr collection/core and instance dir """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) client = SolrClient(self.user) if core: raise PopupException(_('Cannot remove Solr cores.')) if api.remove_collection(name): # Delete instance directory. try: root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name) with ZookeeperClient(hosts=client.get_zookeeper_host(), read_only=False) as zc: zc.delete_path(root_node) except Exception as e: # Re-create collection so that we don't have an orphan config api.add_collection(name) raise PopupException( _('Error in deleting Solr configurations.'), detail=e) else: raise PopupException( _('Could not remove collection. Check error logs for more info.' ))
def delete_collection(self, name, core): """ Delete solr collection/core and instance dir """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if core: raise PopupException(_('Cannot remove Solr cores.')) if api.remove_collection(name): # Delete instance directory. solrctl_path = get_solrctl_path() process = subprocess.Popen( [solrctl_path, "instancedir", "--delete", name], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get()}) if process.wait() != 0: LOG.error( "Cloud not delete instance directory.\nOutput stream: %s\nError stream: %s" % process.communicate()) raise PopupException( _('Could not create instance directory. Check error logs for more info.' )) else: raise PopupException( _('Could not remove collection. Check error logs for more info.' ))
def get_fields(self, collection_or_core_name): api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) try: field_data = api.fields(collection_or_core_name) fields = self._format_flags(field_data['schema']['fields']) except Exception as e: LOG.warn('/luke call did not succeed: %s' % e) try: fields = api.schema_fields(collection_or_core_name) fields = Collection2._make_luke_from_schema_fields(fields) except: LOG.exception( _('Could not fetch fields for collection %s.') % collection_or_core_name) raise PopupException( _('Could not fetch fields for collection %s. See logs for more info.' ) % collection_or_core_name) try: uniquekey = api.uniquekey(collection_or_core_name) except: LOG.exception( _('Could not fetch unique key for collection %s.') % collection_or_core_name) raise PopupException( _('Could not fetch unique key for collection %s. See logs for more info.' ) % collection_or_core_name) return uniquekey, fields
def __init__(self, solr_url=None, user=None, security_enabled=False, ssl_cert_ca_verify=SSL_CERT_CA_VERIFY.get()): if solr_url is None: solr_url = SOLR_URL.get() self._url = solr_url self._user = user self._client = HttpClient(self._url, logger=LOG) self.security_enabled = security_enabled or SECURITY_ENABLED.get() if self.security_enabled: self._client.set_kerberos_auth() self._client.set_verify(ssl_cert_ca_verify) self._root = resource.Resource(self._client) # The Kerberos handshake requires two requests in order to authenticate, # but if our first request is a PUT/POST, it might flat-out reject the # first request if the body is too large. So, connect here in order to get # a cookie so future PUT/POSTs will be pre-authenticated. if self.security_enabled: self._root.invoke('HEAD', '/')
def update_data_from_hive(self, db, collection_or_core_name, database, table, columns, indexing_strategy='upload'): """ Add hdfs path contents to index """ # Run a custom hive query and post data to collection from beeswax.server import dbms import tablib api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if indexing_strategy == 'upload': table = db.get_table(database, table) hql = "SELECT %s FROM `%s.%s` %s" % (','.join(columns), database, table.name, db._get_browse_limit_clause(table)) query = dbms.hql_query(hql) handle = db.execute_and_wait(query) if handle: result = db.fetch(handle, rows=100) db.close(handle) dataset = tablib.Dataset() dataset.append(columns) for row in result.rows(): dataset.append(row) if not api.update(collection_or_core_name, dataset.csv, content_type='csv'): raise PopupException(_('Could not update index. Check error logs for more info.')) else: raise PopupException(_('Could not update index. Could not fetch any data from Hive.')) else: raise PopupException(_('Could not update index. Indexing strategy %s not supported.') % indexing_strategy)
def update_data_from_hive(self, collection_or_core_name, columns, fetch_handle): MAX_ROWS = 10000 ROW_COUNT = 0 FETCH_BATCH = 1000 has_more = True api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) try: while ROW_COUNT < MAX_ROWS and has_more: result = fetch_handle(FETCH_BATCH, ROW_COUNT == 0) has_more = result['has_more'] if result['data']: dataset = tablib.Dataset() dataset.append(columns) for i, row in enumerate(result['data']): dataset.append([ROW_COUNT + i] + [ cell if cell else (0 if isinstance(cell, numbers.Number) else '') for cell in row ]) if not api.update(collection_or_core_name, dataset.csv, content_type='csv'): raise PopupException( _('Could not update index. Check error logs for more info.' )) ROW_COUNT += len(dataset) except Exception, e: raise PopupException(_('Could not update index.'), detail=e)
def create_collection(self, name, fields, unique_key_field='id', df='text'): """ Create solr collection or core and instance dir. Create schema.xml file so that we can set UniqueKey field. """ if self.is_solr_cloud_mode(): # solrcloud mode # Need to remove path afterwards tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True) zc = ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name) config_root_path = '%s/%s' % (solr_config_path, 'conf') try: zc.copy_path(root_node, config_root_path) except Exception, e: zc.delete_path(root_node) raise PopupException(_('Error in copying Solr configurations.'), detail=e) # Don't want directories laying around shutil.rmtree(tmp_path) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_collection(name): # Delete instance directory if we couldn't create a collection. try: zc.delete_path(root_node) except Exception, e: raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
def get_collections(self): try: api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if self.is_solr_cloud_mode(): solr_collections = api.collections() for name in solr_collections: solr_collections[name]['isCoreOnly'] = False else: solr_collections = {} solr_cores = api.cores() for name in solr_cores: solr_cores[name]['isCoreOnly'] = True solr_aliases = api.aliases() for name in solr_aliases: solr_aliases[name] = { 'isCoreOnly': False, 'isAlias': True, 'collections': solr_aliases[name] } except Exception, e: LOG.warn('No Zookeeper servlet running on Solr server: %s' % e) solr_collections = {} solr_cores = {} solr_aliases = {}
def __init__(self, solr_url, user): self._url = solr_url self._user = user self._client = HttpClient(self._url, logger=LOG) self.security_enabled = SECURITY_ENABLED.get() if self.security_enabled: self._client.set_kerberos_auth() self._root = Resource(self._client)
def __init__(self, solr_url, user, security_enabled=SECURITY_ENABLED.get()): self._url = solr_url self._user = user self._client = HttpClient(self._url, logger=LOG) self.security_enabled = security_enabled if self.security_enabled: self._client.set_kerberos_auth() self._root = resource.Resource(self._client)
def is_solr_cloud_mode(self): api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not hasattr(self, '_solr_cloud_mode'): try: api.collections() setattr(self, '_solr_cloud_mode', True) except: setattr(self, '_solr_cloud_mode', False) return getattr(self, '_solr_cloud_mode')
def get_autocomplete(self): autocomplete = {} try: api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) autocomplete['collections'] = api.collections2() autocomplete['configs'] = api.configs() except Exception, e: LOG.warn('No Zookeeper servlet running on Solr server: %s' % e)
def is_solr_cloud_mode(self): api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not hasattr(self, '_solr_cloud_mode'): try: api.collections() setattr(self, '_solr_cloud_mode', True) except Exception, e: LOG.info('Non SolrCloud server: %s' % e) setattr(self, '_solr_cloud_mode', False)
def create_collection(self, name, fields, unique_key_field='id', df='text'): """ Create solr collection and instance dir. Create schema.xml file so that we can set UniqueKey field. """ # Need to remove path afterwards tmp_path, solr_config_path = utils.copy_configs( fields, unique_key_field, df) # Create instance directory. process = subprocess.Popen( [ conf.SOLRCTL_PATH.get(), "instancedir", "--create", name, solr_config_path ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={ 'SOLR_HOME': conf.SOLR_HOME.get(), 'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get() }) status = process.wait() shutil.rmtree(tmp_path) if status != 0: LOG.error( "Cloud not create instance directory.\nOutput stream: %s\nError stream: %s" % process.communicate()) raise PopupException( _('Could not create instance directory. Check error logs for more info.' )) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_collection(name): # Delete instance directory. process = subprocess.Popen( [conf.SOLRCTL_PATH.get(), "instancedir", "--delete", name], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={ 'SOLR_HOME': conf.SOLR_HOME.get(), 'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get() }) if process.wait() != 0: LOG.error( "Cloud not delete instance directory.\nOutput stream: %s\nError stream: %s" % process.communicate()) raise PopupException( _('Could not create collection. Check error logs for more info.' ))
def get_fields(self, collection_or_core_name): api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) try: field_data = api.fields(collection_or_core_name) fields = self._format_flags(field_data['schema']['fields']) except Exception, e: LOG.warn('/luke call did not succeed: %s' % e) try: fields = api.schema_fields(collection_or_core_name) fields = Collection2._make_luke_from_schema_fields(fields) except: LOG.exception(_('Could not fetch fields for collection %s.') % collection_or_core_name) raise PopupException(_('Could not fetch fields for collection %s. See logs for more info.') % collection_or_core_name)
def create_or_edit_alias(request): if request.method != 'POST': raise PopupException(_('POST request required.')) response = {'status': -1} alias = request.POST.get('alias', '') collections = json.loads(request.POST.get('collections', '[]')) api = SolrApi(SOLR_URL.get(), request.user, SECURITY_ENABLED.get()) try: api.create_or_modify_alias(alias, collections) response['status'] = 0 response['message'] = _('Alias created or modified!') except Exception, e: response['message'] = _('Alias could not be created or modified: %s') % e
def update_collection(self, name, fields): """ Only create new fields """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) # Create only new fields # Fields that already exist, do not overwrite since there is no way to do that, currently. old_field_names = api.fields(name)['schema']['fields'].keys() new_fields = filter(lambda field: field['name'] not in old_field_names, fields) new_fields_filtered = [] for field in new_fields: new_field = {} for attribute in filter(lambda attribute: attribute in field, ALLOWED_FIELD_ATTRIBUTES): new_field[attribute] = field[attribute] new_fields_filtered.append(new_field) api.add_fields(name, new_fields_filtered)
def __init__(self, solr_url, user, security_enabled=SECURITY_ENABLED.get()): self._url = solr_url self._user = user self._client = HttpClient(self._url, logger=LOG) self.security_enabled = security_enabled if self.security_enabled: self._client.set_kerberos_auth() self._root = resource.Resource(self._client) # The Kerberos handshake requires two requests in order to authenticate, # but if our first request is a PUT/POST, it might flat-out reject the # first request if the body is too large. So, connect here in order to get # a cookie so future PUT/POSTs will be pre-authenticated. if self.security_enabled: self._root.invoke('HEAD', '/')
def _create_non_solr_cloud_collection(self, name, fields, unique_key_field, df): # Non-solrcloud mode # Create instance directory locally. instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name) if os.path.exists(instancedir): raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir) tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False) try: shutil.move(solr_config_path, instancedir) finally: shutil.rmtree(tmp_path) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_core(name, instancedir): # Delete instance directory if we couldn't create a collection. shutil.rmtree(instancedir) raise PopupException(_('Could not create collection. Check error logs for more info.'))
def delete_collection(self, name, core): """ Delete solr collection/core and instance dir """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if core: raise PopupException(_('Cannot remove Solr cores.')) if api.remove_collection(name): # Delete instance directory. try: root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name) with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc: zc.delete_path(root_node) except Exception, e: # Re-create collection so that we don't have an orphan config api.add_collection(name) raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
def delete_collection(self, name, core): """ Delete solr collection/core and instance dir """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if core: raise PopupException(_('Cannot remove Solr cores.')) if api.remove_collection(name): # Delete instance directory. try: root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name) zc = ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) zc.delete_path(root_node) except Exception, e: # Re-create collection so that we don't have an orphan config api.add_collection(name) raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
def delete_collection(self, name): """ Delete solr collection and instance dir """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if api.remove_collection(name): # Delete instance directory. process = subprocess.Popen([conf.SOLRCTL_PATH.get(), "instancedir", "--delete", name], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={ 'SOLR_HOME': conf.SOLR_HOME.get(), 'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get() }) if process.wait() != 0: LOG.error("Cloud not delete instance directory.\nOutput stream: %s\nError stream: %s" % process.communicate()) raise PopupException(_('Could not create instance directory. Check error logs for more info.')) else: raise PopupException(_('Could not create collection. Check error logs for more info.'))
def delete_collection(self, name, core): """ Delete solr collection/core and instance dir """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if core: raise PopupException(_('Cannot remove Solr cores.')) if api.remove_collection(name): # Delete instance directory. solrctl_path = get_solrctl_path() process = subprocess.Popen([solrctl_path, "--zk", get_solr_ensemble(), "instancedir", "--delete", name], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) if process.wait() != 0: LOG.error("Cloud not delete instance directory.\nOutput stream: %s\nError stream: %s" % process.communicate()) raise PopupException(_('Could not create instance directory. Check error logs for more info.')) else: raise PopupException(_('Could not remove collection. Check error logs for more info.'))
def create_collection(self, name, fields, unique_key_field='id', df='text'): """ Create solr collection and instance dir. Create schema.xml file so that we can set UniqueKey field. """ # Need to remove path afterwards tmp_path, solr_config_path = utils.copy_configs(fields, unique_key_field, df) # Create instance directory. process = subprocess.Popen([conf.SOLRCTL_PATH.get(), "instancedir", "--create", name, solr_config_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={ 'SOLR_HOME': conf.SOLR_HOME.get(), 'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get() }) status = process.wait() shutil.rmtree(tmp_path) if status != 0: LOG.error("Cloud not create instance directory.\nOutput stream: %s\nError stream: %s" % process.communicate()) raise PopupException(_('Could not create instance directory. Check error logs for more info.')) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_collection(name): # Delete instance directory. process = subprocess.Popen([conf.SOLRCTL_PATH.get(), "instancedir", "--delete", name], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={ 'SOLR_HOME': conf.SOLR_HOME.get(), 'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get() }) if process.wait() != 0: LOG.error("Cloud not delete instance directory.\nOutput stream: %s\nError stream: %s" % process.communicate()) raise PopupException(_('Could not create collection. Check error logs for more info.'))
def get_service_info(service): service_info = {} if service.lower() == 'solr': service_info['url'] = SOLR_URL.get() service_info['security_enabled'] = SOLR_SECURITY_ENABLED.get() if service.lower() == 'oozie': service_info['url'] = OOZIE_URL.get() service_info['security_enabled'] = OOZIE_SECURITY_ENABLED.get() if service.lower() == 'httpfs': hdfs_config = hdfs_conf.HDFS_CLUSTERS['default'] service_info['url'] = hdfs_config.WEBHDFS_URL.get() service_info['security_enabled'] = hdfs_config.SECURITY_ENABLED.get() if service.lower() == 'rm': yarn_cluster = cluster.get_cluster_conf_for_job_submission() service_info['url'] = yarn_cluster.RESOURCE_MANAGER_API_URL.get() service_info['security_enabled'] = yarn_cluster.SECURITY_ENABLED.get() if service.lower() == 'jhs': yarn_cluster = cluster.get_cluster_conf_for_job_submission() service_info['url'] = yarn_cluster.HISTORY_SERVER_API_URL.get() service_info['security_enabled'] = yarn_cluster.SECURITY_ENABLED.get() if service.lower() == 'sparkhs': yarn_cluster = cluster.get_cluster_conf_for_job_submission() service_info['url'] = yarn_cluster.SPARK_HISTORY_SERVER_URL.get() service_info[ 'security_enabled'] = yarn_cluster.SPARK_HISTORY_SERVER_SECURITY_ENABLED.get( ) if 'url' not in service_info: logging.info("Hue does not have %s configured, cannot test %s" % (service, service)) elif service_info['url'] is None: logging.info("Hue does not have %s configured, cannot test %s" % (service, service)) if service_info['url'].endswith('/'): service_info['url'] = service_info['url'][:-1] return service_info
def _create_non_solr_cloud_collection(self, name, fields, unique_key_field, df): # Non-solrcloud mode # Create instance directory locally. instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name) if os.path.exists(instancedir): raise PopupException( _("Instance directory %s already exists! Please remove it from the file system." ) % instancedir) tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False) try: shutil.move(solr_config_path, instancedir) finally: shutil.rmtree(tmp_path) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_core(name, instancedir): # Delete instance directory if we couldn't create a collection. shutil.rmtree(instancedir) raise PopupException( _('Could not create collection. Check error logs for more info.' ))
def __init__(self, user): self.user = user self.api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
def __init__(self, solr_url): self._url = solr_url self._client = HttpClient(self._url, logger=LOG) if SECURITY_ENABLED.get(): self._client.set_kerberos_auth() self._root = Resource(self._client)
def is_enabled(): try: from search.conf import SECURITY_ENABLED return SECURITY_ENABLED.get() except ImportError, e: LOG.warn("Search app is not enabled")
# Delete instance directory if we couldn't create a collection. try: zc.delete_path(root_node) except Exception, e: raise PopupException(_('Error in deleting Solr configurations.'), detail=e) else: # Non-solrcloud mode # Create instance directory locally. instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name) if os.path.exists(instancedir): raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir) tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False) shutil.move(solr_config_path, instancedir) shutil.rmtree(tmp_path) api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if not api.create_core(name, instancedir): # Delete instance directory if we couldn't create a collection. shutil.rmtree(instancedir) raise PopupException(_('Could not create collection. Check error logs for more info.')) def delete_collection(self, name, core): """ Delete solr collection/core and instance dir """ api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()) if core: raise PopupException(_('Cannot remove Solr cores.')) if api.remove_collection(name): # Delete instance directory.
def __init__(self, user, api=None): self.user = user self.api = api if api is not None else SolrApi( SOLR_URL.get(), self.user, SECURITY_ENABLED.get())