Exemplo n.º 1
0
def admin_collection_template(request, collection_id):
  hue_collection = Collection.objects.get(id=collection_id)
  solr_collection = SolrApi(SOLR_URL.get(), request.user).collection_or_core(hue_collection)
  sample_data = {}

  if request.method == 'POST':
    hue_collection.result.update_from_post(request.POST)
    hue_collection.result.save()
    return HttpResponse(json.dumps({}), mimetype="application/json")

  solr_query = {}
  solr_query['collection'] = hue_collection.name
  solr_query['q'] = ''
  solr_query['fq'] = ''
  solr_query['rows'] = 5
  solr_query['start'] = 0
  solr_query['facets'] = 0

  try:
    response = SolrApi(SOLR_URL.get(), request.user).query(solr_query, hue_collection)
    sample_data = json.dumps(response["response"]["docs"])
  except PopupException, e:
    message = e
    try:
      message = json.loads(e.message.message)['error']['msg'] # Try to get the core error
    except:
      pass
    request.error(_('No preview available, some facets are invalid: %s') % message)
    LOG.exception(e)
Exemplo n.º 2
0
  def create_collection(self, name, fields, unique_key_field='id', df='text'):
    """
    Create solr collection or core and instance dir.
    Create schema.xml file so that we can set UniqueKey field.
    """
    if self.is_solr_cloud_mode():
      # solrcloud mode

      # Need to remove path afterwards
      tmp_path, solr_config_path = utils.copy_configs(fields, unique_key_field, df, True)

      # Create instance directory.
      solrctl_path = get_solrctl_path()

      process = subprocess.Popen([solrctl_path, "instancedir", "--create", name, solr_config_path],
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE,
                                 env={
                                   'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get()
                                 })
      status = process.wait()

      # Don't want directories laying around
      shutil.rmtree(tmp_path)

      if status != 0:
        LOG.error("Could not create instance directory.\nOutput: %s\nError: %s" % process.communicate())
        raise PopupException(_('Could not create instance directory. '
                               'Check if solr_zk_ensemble and solrctl_path are correct in Hue config [indexer].'))

      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
      if not api.create_collection(name):
        # Delete instance directory if we couldn't create a collection.
        process = subprocess.Popen([solrctl_path, "instancedir", "--delete", name],
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   env={
                                     'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get()
                                   })
        if process.wait() != 0:
          LOG.error("Cloud not delete collection.\nOutput: %s\nError: %s" % process.communicate())
        raise PopupException(_('Could not create collection. Check error logs for more info.'))
    else:
      # Non-solrcloud mode
      # Create instance directory locally.
      instancedir = os.path.join(conf.CORE_INSTANCE_DIR.get(), name)
      if os.path.exists(instancedir):
        raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir)
      tmp_path, solr_config_path = utils.copy_configs(fields, unique_key_field, df, False)
      shutil.move(solr_config_path, instancedir)
      shutil.rmtree(tmp_path)

      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
      if not api.create_core(name, instancedir):
        # Delete instance directory if we couldn't create a collection.
        shutil.rmtree(instancedir)
        raise PopupException(_('Could not create collection. Check error logs for more info.'))
Exemplo n.º 3
0
  def fields_data(self, user):
    schema_fields = SolrApi(SOLR_URL.get(), user).fields(self.name)
    schema_fields = schema_fields['schema']['fields']

    dynamic_fields = SolrApi(SOLR_URL.get(), user).fields(self.name, dynamic=True)
    dynamic_fields = dynamic_fields['fields']

    schema_fields.update(dynamic_fields)

    return sorted([{'name': str(field), 'type': str(attributes.get('type', ''))}
                  for field, attributes in schema_fields.iteritems()])
Exemplo n.º 4
0
  def get_fields(self, collection_or_core_name):
    try:
      field_data = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()).fields(collection_or_core_name)
      fields = self._format_flags(field_data['schema']['fields'])
    except:
      LOG.exception(_('Could not fetch fields for collection %s.') % collection_or_core_name)
      raise PopupException(_('Could not fetch fields for collection %s. See logs for more info.') % collection_or_core_name)

    try:
      uniquekey = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get()).uniquekey(collection_or_core_name)
    except:
      LOG.exception(_('Could not fetch unique key for collection %s.') % collection_or_core_name)
      raise PopupException(_('Could not fetch unique key for collection %s. See logs for more info.') % collection_or_core_name)

    return uniquekey, fields
Exemplo n.º 5
0
  def get_all_indexes(self, show_all=False):
    indexes = []
    try:
      indexes = self.get_solr_collections().keys()
    except:
      LOG.exception('failed to get indexes')

    try:
      indexes += SolrApi(SOLR_URL.get(), self.user).aliases().keys()
    except:
      LOG.exception('failed to get index aliases')

    if show_all or not indexes:
      return indexes + SolrApi(SOLR_URL.get(), self.user).cores().keys()
    else:
      return indexes
Exemplo n.º 6
0
  def get_collections(self):
    solr_collections = {}
    solr_aliases = {}
    solr_cores = {}

    try:
      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())

      if self.is_solr_cloud_mode():
        solr_collections = api.collections()
        for name in solr_collections:
          solr_collections[name]['isCoreOnly'] = False

        solr_aliases = api.aliases()
        for name in solr_aliases:
          solr_aliases[name] = {
              'isCoreOnly': False,
              'isAlias': True,
              'collections': solr_aliases[name]
          }

      solr_cores = api.cores()
      for name in solr_cores:
        solr_cores[name]['isCoreOnly'] = True
    except Exception, e:
      LOG.warn('No Zookeeper servlet running on Solr server: %s' % e)
Exemplo n.º 7
0
  def update_data_from_hdfs(self, fs, collection_or_core_name, fields, path, data_type='separated', indexing_strategy='upload', **kwargs):
    """
    Add hdfs path contents to index
    """
    api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())

    if indexing_strategy == 'upload':
      stats = fs.stats(path)
      if stats.size > MAX_UPLOAD_SIZE:
        raise PopupException(_('File size is too large to handle!'))
      else:
        # Get fields for filtering
        unique_key, fields = self.get_fields(collection_or_core_name)
        fields = [{'name': field, 'type': fields[field]['type']} for field in fields]

        fh = fs.open(path)
        if data_type == 'log':
          # Transform to JSON then update
          data = json.dumps([value for value in field_values_from_log(fh, fields)])
          content_type = 'json'
        elif data_type == 'separated':
          data = json.dumps([value for value in field_values_from_separated_file(fh, kwargs.get('separator', ','), kwargs.get('quote_character', '"'), fields)], indent=2)
          content_type = 'json'
        else:
          raise PopupException(_('Could not update index. Unknown type %s') % data_type)
        fh.close()
      if not api.update(collection_or_core_name, data, content_type=content_type):
        raise PopupException(_('Could not update index. Check error logs for more info.'))
    else:
      raise PopupException(_('Could not update index. Indexing strategy %s not supported.') % indexing_strategy)
Exemplo n.º 8
0
def zkensemble():
  """
  Try to guess the value if no values are specified.
  """

  from django.conf import settings

  if 'zookeeper' in settings.INSTALLED_APPS:
    try:
      # Backward compatibility until Hue 4
      from zookeeper.conf import CLUSTERS
      clusters = CLUSTERS.get()
      if clusters['default'].HOST_PORTS.get() != 'localhost:2181':
        return '%s' % clusters['default'].HOST_PORTS.get()
    except:
      LOG.warn('Could not get zookeeper ensemble from the zookeeper app')

  if 'search' in settings.INSTALLED_APPS:
    try:
      from search.conf import SOLR_URL
      parsed = urlparse(SOLR_URL.get())
      return "%s:2181" % (parsed.hostname or 'localhost')
    except:
      LOG.warn('Could not get zookeeper ensemble from the search app')

  return "localhost:2181"
Exemplo n.º 9
0
  def update_data_from_hive(self, db, collection_or_core_name, database, table, columns, indexing_strategy='upload'):
    """
    Add hdfs path contents to index
    """
    # Run a custom hive query and post data to collection
    from beeswax.server import dbms
    import tablib

    api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
    if indexing_strategy == 'upload':
      table = db.get_table(database, table)
      hql = "SELECT %s FROM `%s.%s` %s" % (','.join(columns), database, table.name, db._get_browse_limit_clause(table))
      query = dbms.hql_query(hql)

      try:
        handle = db.execute_and_wait(query)

        if handle:
          result = db.fetch(handle, rows=100)
          db.close(handle)

          dataset = tablib.Dataset()
          dataset.append(columns)
          for row in result.rows():
            dataset.append(row)

          if not api.update(collection_or_core_name, dataset.csv, content_type='csv'):
            raise PopupException(_('Could not update index. Check error logs for more info.'))
        else:
          raise PopupException(_('Could not update index. Could not fetch any data from Hive.'))
      except Exception, e:
        raise PopupException(_('Could not update index.'), detail=e)
Exemplo n.º 10
0
def _fetch_collections(request):
  from libsolr.api import SolrApi
  from search.conf import SOLR_URL

  path = request.GET['path']
  item = None
  name = None

  if path:
    item = path
  if '/' in path:
    item, name = path.split('/')

  api = SolrApi(SOLR_URL.get(), request.user)

  if not item:
    return {"databases": ["collections", "configs", "admin"]}
  elif item and name:
    return {"authorizable_link": "/indexer/#edit/%s" % name, "extended_columns": [], "columns": [], "partition_keys": []}
  elif item == 'collections':
    return {"tables_meta": [{"comment": None, "type": "Table", "name": col} for col in api.collections2()]}
  elif item == 'configs':
    return {"tables_meta": [{"comment": None, "type": "Table", "name": conf} for conf in api.configs()]}
  elif item == 'admin':
    return {"tables_meta": [{"comment": None, "type": "Table", "name": 'collections'}, {"comment": None, "type": "Table", "name": "cores"}]}
  else:
    raise PopupException(_('Authorizable %s could not be retrieved') % path)
Exemplo n.º 11
0
  def test_query(self):
    collection = Collection2(user=self.user, name='log_analytics_demo')
    collection = json.loads(collection.get_json(self.user))

    query = {'qs': [{'q': ''}], 'fqs': [], 'start': 0}

    SolrApi(SOLR_URL.get(), self.user).query(collection['collection'], query)
Exemplo n.º 12
0
  def _create_solr_cloud_collection(self, name, fields, unique_key_field, df):
    with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc:
      root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)

      tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True)
      try:
        config_root_path = '%s/%s' % (solr_config_path, 'conf')
        try:
          zc.copy_path(root_node, config_root_path)

        except Exception, e:
          zc.delete_path(root_node)
          raise PopupException(_('Error in copying Solr configurations.'), detail=e)
      finally:
        # Don't want directories laying around
        shutil.rmtree(tmp_path)

      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
      if not api.create_collection(name):
        # Delete instance directory if we couldn't create a collection.
        try:
          zc.delete_path(root_node)
        except Exception, e:
          raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
        raise PopupException(_('Could not create collection. Check error logs for more info.'))
Exemplo n.º 13
0
Arquivo: views.py Projeto: QLGu/hue
def get_terms(request):
  result = {'status': -1, 'message': 'Error'}

  try:
    collection = json.loads(request.POST.get('collection', '{}'))
    analysis = json.loads(request.POST.get('analysis', '{}'))

    field = analysis['name']
    properties = {
      'terms.limit': 25,
      'terms.prefix': analysis['terms']['prefix']
      # lower
      # limit
      # mincount
      # maxcount
    }

    result['terms'] = SolrApi(SOLR_URL.get(), request.user).terms(collection['name'], field, properties)
    result['terms'] = pairwise2(field, [], result['terms']['terms'][field])
    result['status'] = 0
    result['message'] = ''

  except Exception, e:
    result['message'] = force_unicode(e)
    if 'not currently supported' in result['message']:
      result['status'] = 1
      result['message'] = _('This field does not support stats')
Exemplo n.º 14
0
def _create_facet(collection, user, facet_id, facet_label, facet_field, widget_type):
  properties = {
    'sort': 'desc',
    'canRange': False,
    'stacked': False,
    'limit': 10,
    'mincount': 0,
    'isDate': False,
    'andUp': False,  # Not used yet
  }

  solr_api = SolrApi(SOLR_URL.get(), user)
  range_properties = _new_range_facet(solr_api, collection, facet_field, widget_type)

  if range_properties:
    facet_type = 'range'
    properties.update(range_properties)
  elif widget_type == 'hit-widget':
    facet_type = 'query'
  else:
    facet_type = 'field'

  if widget_type == 'map-widget':
    properties['scope'] = 'world'
    properties['mincount'] = 1
    properties['limit'] = 100

  return {
    'id': facet_id,
    'label': facet_label,
    'field': facet_field,
    'type': facet_type,
    'widgetType': widget_type,
    'properties': properties
  }
Exemplo n.º 15
0
def get_terms(request):
    result = {"status": -1, "message": "Error"}

    try:
        collection = json.loads(request.POST.get("collection", "{}"))
        analysis = json.loads(request.POST.get("analysis", "{}"))

        field = analysis["name"]
        properties = {
            "terms.limit": 25,
            "terms.prefix": analysis["terms"]["prefix"]
            # lower
            # limit
            # mincount
            # maxcount
        }

        result["terms"] = SolrApi(SOLR_URL.get(), request.user).terms(collection["name"], field, properties)
        result["terms"] = pairwise2(field, [], result["terms"]["terms"][field])
        result["status"] = 0
        result["message"] = ""

    except Exception, e:
        result["message"] = force_unicode(e)
        if "not currently supported" in result["message"]:
            result["status"] = 1
            result["message"] = _("This field does not support stats")
Exemplo n.º 16
0
Arquivo: conf.py Projeto: Ile2/hue
def zkensemble():
  """
  ZooKeeper Ensemble
  """
  from search.conf import SOLR_URL
  parsed = urlparse(SOLR_URL.get())
  return "%s:2181/solr" % (parsed.hostname or 'localhost')
Exemplo n.º 17
0
def index(request):
  hue_collections = Collection.objects.filter(enabled=True)

  if not hue_collections:
    if request.user.is_superuser:
      return admin_collections(request, True)
    else:
      return no_collections(request)

  init_collection = initial_collection(request, hue_collections)

  search_form = QueryForm(request.GET, initial_collection=init_collection)
  response = {}
  error = {}
  solr_query = {}

  if search_form.is_valid():
    try:
      collection_id = search_form.cleaned_data['collection']
      hue_collection = Collection.objects.get(id=collection_id)

      solr_query = search_form.solr_query_dict
      response = SolrApi(SOLR_URL.get(), request.user).query(solr_query, hue_collection)

      solr_query['total_pages'] = int(math.ceil((float(response['response']['numFound']) / float(solr_query['rows']))))
      solr_query['search_time'] = response['responseHeader']['QTime']
    except Exception, e:
      error['title'] = force_unicode(e.title) if hasattr(e, 'title') else ''
      error['message'] = force_unicode(str(e))
Exemplo n.º 18
0
def index(request):
  hue_collections = Collection.objects.filter(enabled=True)

  if not hue_collections:
    if request.user.is_superuser:
      return admin_collections(request, True)
    else:
      return no_collections(request)

  initial_collection = request.COOKIES.get('hueSearchLastCollection', 0)
  search_form = QueryForm(request.GET, initial_collection=initial_collection)
  response = {}
  error = {}
  solr_query = {}
  hue_collection = None

  if search_form.is_valid():
    collection_id = search_form.cleaned_data['collection']
    solr_query['q'] = search_form.cleaned_data['query'].encode('utf8')
    solr_query['fq'] = search_form.cleaned_data['fq']
    if search_form.cleaned_data['sort']:
      solr_query['sort'] = search_form.cleaned_data['sort']
    solr_query['rows'] = search_form.cleaned_data['rows'] or 15
    solr_query['start'] = search_form.cleaned_data['start'] or 0
    solr_query['facets'] = search_form.cleaned_data['facets'] or 1

    try:
      hue_collection = Collection.objects.get(id=collection_id)
      solr_query['collection'] = hue_collection.name
      response = SolrApi(SOLR_URL.get(), request.user).query(solr_query, hue_collection)
    except Exception, e:
      error['message'] = unicode(str(e), "utf8")
Exemplo n.º 19
0
Arquivo: views.py Projeto: qccash/hue
def update_document(request):
  result = {'status': -1, 'message': 'Error'}

  if not can_edit_index(request.user):
    result['message'] = _('Permission to edit the document denied')
    return JsonResponse(result)

  try:
    collection = json.loads(request.POST.get('collection', '{}'))
    document = json.loads(request.POST.get('document', '{}'))
    doc_id = request.POST.get('id')

    if document['hasChanged']:
      edits = {
          "id": doc_id,
      }
      version = None # If there is a version, use it to avoid potential concurrent update conflicts

      for field in document['details']:
        if field['hasChanged']:
          edits[field['key']] = {"set": field['value']}
        if field['key'] == '_version_':
          version = field['value']

      if SolrApi(SOLR_URL.get(), request.user).update(collection['name'], json.dumps([edits]), content_type='json', version=version):
        result['status'] = 0
        result['message'] = _('Document successfully updated.')
    else:
      result['status'] = 0
      result['message'] = _('Document has no modifications to change.')

  except Exception, e:
    result['message'] = force_unicode(e)
Exemplo n.º 20
0
  def get_all_indexes(self, show_all=False):
    indexes = []
    try:
      indexes = self.get_solr_collection().keys()
    except:
      pass

    try:
      indexes += SolrApi(SOLR_URL.get(), self.user).aliases().keys()
    except:
      pass

    if show_all or not indexes:
      return indexes + SolrApi(SOLR_URL.get(), self.user).cores().keys()
    else:
      return indexes
Exemplo n.º 21
0
 def get_all_indexes(self):
   indexes = []
   try:
     indexes = self.get_solr_collection().keys()
   except:
     pass
   return indexes + SolrApi(SOLR_URL.get(), self.user).cores().keys()
Exemplo n.º 22
0
  def create_collection(self, name, fields, unique_key_field='id', df='text'):
    """
    Create solr collection or core and instance dir.
    Create schema.xml file so that we can set UniqueKey field.
    """
    if self.is_solr_cloud_mode():
      # solrcloud mode

      # Need to remove path afterwards
      tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True)

      zc = ZookeeperClient(hosts=get_solr_ensemble(), read_only=False)
      root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
      config_root_path = '%s/%s' % (solr_config_path, 'conf')
      try:
        zc.copy_path(root_node, config_root_path)
      except Exception, e:
        zc.delete_path(root_node)
        raise PopupException(_('Error in copying Solr configurations.'), detail=e)

      # Don't want directories laying around
      shutil.rmtree(tmp_path)

      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
      if not api.create_collection(name):
        # Delete instance directory if we couldn't create a collection.
        try:
          zc.delete_path(root_node)
        except Exception, e:
          raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
Exemplo n.º 23
0
 def get_new_cores(self):
   try:
     solr_cores = SolrApi(SOLR_URL.get()).cores()
     for name in Collection.objects.values_list('name', flat=True):
       solr_cores.pop(name, None)
   except Exception, e:
     solr_cores = []
     LOG.warn('No Single core setup on Solr server: %s' % e)
Exemplo n.º 24
0
def admin_collection_schema(request, collection_id):
  hue_collection = Collection.objects.get(id=collection_id)
  solr_schema = SolrApi(SOLR_URL.get(), request.user).schema(hue_collection.name)

  content = {
    'solr_schema': solr_schema.decode('utf-8')
  }
  return HttpResponse(json.dumps(content), mimetype="application/json")
Exemplo n.º 25
0
 def get_new_collections(self):
   try:
     solr_collections = SolrApi(SOLR_URL.get()).collections()
     for name in Collection.objects.values_list('name', flat=True):
       solr_collections.pop(name, None)
   except Exception, e:
     LOG.warn('No Zookeeper servlet running on Solr server: %s' % e)
     solr_collections = []
Exemplo n.º 26
0
 def fields_data(self, user, name):
   api = SolrApi(SOLR_URL.get(), user)
   try:
     schema_fields = api.fields(name)
     schema_fields = schema_fields['schema']['fields']
   except Exception, e:
     LOG.warn('/luke call did not succeed: %s' % e)
     fields = api.schema_fields(name)
     schema_fields = Collection2._make_luke_from_schema_fields(fields)
Exemplo n.º 27
0
  def get_autocomplete(self):
    autocomplete = {}
    try:
      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
      autocomplete['collections'] = api.collections2()
      autocomplete['configs'] = api.configs()

    except Exception, e:
      LOG.warn('No Zookeeper servlet running on Solr server: %s' % e)
Exemplo n.º 28
0
 def is_solr_cloud_mode(self):
   api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
   if not hasattr(self, '_solr_cloud_mode'):
     try:
       api.collections()
       setattr(self, '_solr_cloud_mode', True)
     except:
       setattr(self, '_solr_cloud_mode', False)
   return getattr(self, '_solr_cloud_mode')
Exemplo n.º 29
0
 def is_solr_cloud_mode(self):
   api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
   if not hasattr(self, '_solr_cloud_mode'):
     try:
       api.collections()
       setattr(self, '_solr_cloud_mode', True)
     except Exception, e:
       LOG.info('Non SolrCloud server: %s' % e)
       setattr(self, '_solr_cloud_mode', False)
Exemplo n.º 30
0
def admin_collection_solr_properties(request, collection_id):
  hue_collection = Collection.objects.get(id=collection_id)
  solr_collection = SolrApi(SOLR_URL.get(), request.user).collection_or_core(hue_collection)

  content = render('admin_collection_properties_solr_properties.mako', request, {
    'solr_collection': solr_collection,
    'hue_collection': hue_collection,
  }, force_template=True).content

  return HttpResponse(json.dumps({'content': content}), mimetype="application/json")
Exemplo n.º 31
0
def search(request):
  response = {}

  collection = json.loads(request.POST.get('collection', '{}'))
  query = json.loads(request.POST.get('query', '{}'))
  query['download'] = 'download' in request.POST
  # todo: remove the selected histo facet if multiq

  if collection['id']:
    hue_collection = Collection.objects.get(id=collection['id']) # TODO perms

  if collection:
    try:
      response = SolrApi(SOLR_URL.get(), request.user).query(collection, query)
      response = augment_solr_response(response, collection, query)
    except RestException, e:
      try:
        response['error'] = json.loads(e.message)['error']['msg']
      except:
        response['error'] = force_unicode(str(e))
    except Exception, e:
      raise PopupException(e, title=_('Error while accessing Solr'))

      response['error'] = force_unicode(str(e))
Exemplo n.º 32
0
    def _create_non_solr_cloud_collection(self, name, fields, unique_key_field,
                                          df):
        # Non-solrcloud mode
        # Create instance directory locally.
        instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name)
        if os.path.exists(instancedir):
            raise PopupException(
                _("Instance directory %s already exists! Please remove it from the file system."
                  ) % instancedir)

        tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df,
                                                  False)
        try:
            shutil.move(solr_config_path, instancedir)
        finally:
            shutil.rmtree(tmp_path)

        api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
        if not api.create_core(name, instancedir):
            # Delete instance directory if we couldn't create a collection.
            shutil.rmtree(instancedir)
            raise PopupException(
                _('Could not create collection. Check error logs for more info.'
                  ))
Exemplo n.º 33
0
def admin_collection_properties(request, collection_id):
  hue_collection = Collection.objects.get(id=collection_id)
  solr_collection = SolrApi(SOLR_URL.get(), request.user).collection_or_core(hue_collection)

  if request.method == 'POST':
    collection_form = CollectionForm(request.POST, instance=hue_collection, user=request.user)
    if collection_form.is_valid(): # Check for autocomplete in data?
      searcher = SearchController(request.user)
      hue_collection = collection_form.save(commit=False)
      hue_collection.is_core_only = not searcher.is_collection(hue_collection.name)
      hue_collection.autocomplete = json.loads(request.POST.get('autocomplete'))
      hue_collection.save()
      return redirect(reverse('search:admin_collection_properties', kwargs={'collection_id': hue_collection.id}))
    else:
      request.error(_('Errors on the form: %s.') % collection_form.errors)
  else:
    collection_form = CollectionForm(instance=hue_collection)

  return render('admin_collection_properties.mako', request, {
    'solr_collection': solr_collection,
    'hue_collection': hue_collection,
    'collection_form': collection_form,
    'collection_properties': json.dumps(hue_collection.properties_dict)
  })
Exemplo n.º 34
0
def index_fields_dynamic(request):
    result = {'status': -1, 'message': 'Error'}

    try:
        name = request.POST['name']
        hue_collection = Collection(name=name, label=name)

        dynamic_fields = SolrApi(SOLR_URL.get(),
                                 request.user).luke(hue_collection.name)

        result['message'] = ''
        result['fields'] = [
            Collection._make_field(name, properties)
            for name, properties in dynamic_fields['fields'].iteritems()
            if 'dynamicBase' in properties
        ]
        result['gridlayout_header_fields'] = [
            Collection._make_gridlayout_header_field({'name': name}, True)
            for name, properties in dynamic_fields['fields'].iteritems()
            if 'dynamicBase' in properties
        ]
        result['status'] = 0
    except Exception, e:
        result['message'] = unicode(str(e), "utf8")
Exemplo n.º 35
0
def update_document(request):
  result = {'status': -1, 'message': 'Error'}

  if not can_edit_index(request.user):
    result['message'] = _('Permission to edit the document denied')
    return JsonResponse(result)

  try:
    collection = json.loads(request.POST.get('collection', '{}'))
    document = json.loads(request.POST.get('document', '{}'))
    doc_id = request.POST.get('id')

    if document['hasChanged']:
      edits = {
          "id": doc_id,
      }
      version = None # If there is a version, use it to avoid potential concurrent update conflicts

      for field in document['details']:
        if field['hasChanged'] and field['key'] != '_version_':
          edits[field['key']] = {"set": field['value']}
        if field['key'] == '_version_':
          version = field['value']

      result['update'] = SolrApi(SOLR_URL.get(), request.user).update(collection['name'], json.dumps([edits]), content_type='json', version=version)
      result['message'] = _('Document successfully updated.')
      result['status'] = 0
    else:
      result['status'] = 0
      result['message'] = _('Document has no modifications to change.')
  except RestException, e:
    try:
      result['message'] = json.loads(e.message)['error']['msg']
    except:
      LOG.exception('Failed to parse json response')
      result['message'] = force_unicode(e)
Exemplo n.º 36
0
def _create_facet(collection, user, facet_id, facet_label, facet_field,
                  widget_type):
    properties = {
        'sort': 'desc',
        'canRange': False,
        'stacked': False,
        'limit': 10,
        'mincount': 0,
        'isDate': False,
        'aggregate': 'unique'
    }

    if widget_type in ('tree-widget', 'heatmap-widget', 'map-widget'):
        facet_type = 'pivot'
    elif widget_type == 'hit-widget':
        facet_type = 'function'
    else:
        solr_api = SolrApi(SOLR_URL.get(), user)
        range_properties = _new_range_facet(solr_api, collection, facet_field,
                                            widget_type)

        if range_properties:
            facet_type = 'range'
            properties.update(range_properties)
            properties['initial_gap'] = properties['gap']
            properties['initial_start'] = properties['start']
            properties['initial_end'] = properties['end']
        else:
            facet_type = 'field'

        if widget_type == 'bucket-widget':
            facet_type = 'nested'
            properties['facets_form'] = {
                'field': '',
                'mincount': 1,
                'limit': 10,
                'aggregate': 'count'
            }
            properties['facets'] = []
            properties['scope'] = 'stack'

    if widget_type in ('tree-widget', 'heatmap-widget', 'map-widget'):
        properties['mincount'] = 1
        properties['facets'] = []
        properties['stacked'] = True
        properties['facets_form'] = {'field': '', 'mincount': 1, 'limit': 5}

        if widget_type == 'map-widget':
            properties['scope'] = 'world'
            properties['limit'] = 100
        else:
            properties[
                'scope'] = 'stack' if widget_type == 'heatmap-widget' else 'tree'

    return {
        'id': facet_id,
        'label': facet_label,
        'field': facet_field,
        'type': facet_type,
        'widgetType': widget_type,
        'properties': properties
    }
Exemplo n.º 37
0
  def fields_data(self, user):
    schema_fields = SolrApi(SOLR_URL.get(), user).fields(self.name)
    schema_fields = schema_fields['schema']['fields']

    return sorted([self._make_field(field, attributes) for field, attributes in schema_fields.iteritems()])
Exemplo n.º 38
0
def _envelope_job(request,
                  file_format,
                  destination,
                  start_time=None,
                  lib_path=None):
    collection_name = destination['name']
    indexer = EnvelopeIndexer(request.user, request.fs)

    lib_path = None  # Todo optional input field
    input_path = None

    if file_format['inputFormat'] == 'table':
        db = dbms.get(request.user)
        table_metadata = db.get_table(database=file_format['databaseName'],
                                      table_name=file_format['tableName'])
        input_path = table_metadata.path_location
    elif file_format['inputFormat'] == 'file':
        input_path = file_format["path"]
        properties = {'input_path': input_path, 'format': 'csv'}
    elif file_format['inputFormat'] == 'stream' and file_format[
            'streamSelection'] == 'flume':
        pass
    elif file_format['inputFormat'] in ('stream', 'sfdc'):
        if file_format['inputFormat'] == 'sfdc':
            properties = {
                'streamSelection': file_format['streamSelection'],
                'streamUsername': file_format['streamUsername'],
                'streamPassword': file_format['streamPassword'],
                'streamToken': file_format['streamToken'],
                'streamEndpointUrl': file_format['streamEndpointUrl'],
                'streamObject': file_format['streamObject'],
            }
        elif file_format['streamSelection'] == 'kafka':
            manager = ManagerApi()
            properties = {
                "brokers": manager.get_kafka_brokers(),
                "topics": file_format['kafkaSelectedTopics'],
                "kafkaFieldType": file_format['kafkaFieldType'],
                "kafkaFieldDelimiter": file_format['kafkaFieldDelimiter'],
                "kafkaFieldNames": file_format['kafkaFieldNames'],
                "kafkaFieldTypes": file_format['kafkaFieldTypes']
            }

            if True:
                properties['window'] = ''
            else:  # For "KafkaSQL"
                properties['window'] = '''
            window {
                enabled = true
                milliseconds = 60000
            }'''

        if destination['outputFormat'] == 'table':
            if destination['isTargetExisting']:
                # Todo: check if format matches
                pass
            else:
                sql = SQLIndexer(user=request.user,
                                 fs=request.fs).create_table_from_a_file(
                                     file_format, destination).get_str()
                print sql
            if destination['tableFormat'] == 'kudu':
                manager = ManagerApi()
                properties["output_table"] = "impala::%s" % collection_name
                properties["kudu_master"] = manager.get_kudu_master()
            else:
                properties['output_table'] = collection_name
        elif destination['outputFormat'] == 'file':
            properties['path'] = file_format["path"]
            if file_format['inputFormat'] == 'stream':
                properties['format'] = 'csv'
            else:
                properties['format'] = file_format['tableFormat']  # or csv
        elif destination['outputFormat'] == 'index':
            properties['collectionName'] = collection_name
            properties['connection'] = SOLR_URL.get()


# No needed anymore
#       if destination['isTargetExisting']:
#         # Todo: check if format matches
#         pass
#       else:
#         client = SolrClient(request.user)
#         kwargs = {}
#         _create_solr_collection(request.user, request.fs, client, destination, collection_name, kwargs)

    if destination['outputFormat'] == 'stream':
        manager = ManagerApi()
        properties['brokers'] = manager.get_kafka_brokers()
        properties['topics'] = file_format['kafkaSelectedTopics']
        properties['kafkaFieldDelimiter'] = file_format['kafkaFieldDelimiter']

    properties["app_name"] = 'Data Ingest'
    properties["inputFormat"] = file_format['inputFormat']
    properties["ouputFormat"] = destination['ouputFormat']
    properties["streamSelection"] = file_format["streamSelection"]

    envelope = indexer.generate_config(properties)

    return indexer.run(request,
                       collection_name,
                       envelope,
                       input_path,
                       start_time=start_time,
                       lib_path=lib_path)
Exemplo n.º 39
0
 def test_is_solr_cloud_mode(self):
     SolrApi(SOLR_URL.get(), self.user).collections()
Exemplo n.º 40
0
        # Delete instance directory if we couldn't create a collection.
        try:
          zc.delete_path(root_node)
        except Exception, e:
          raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
    else:
      # Non-solrcloud mode
      # Create instance directory locally.
      instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name)
      if os.path.exists(instancedir):
        raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir)
      tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False)
      shutil.move(solr_config_path, instancedir)
      shutil.rmtree(tmp_path)

      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
      if not api.create_core(name, instancedir):
        # Delete instance directory if we couldn't create a collection.
        shutil.rmtree(instancedir)
        raise PopupException(_('Could not create collection. Check error logs for more info.'))

  def delete_collection(self, name, core):
    """
    Delete solr collection/core and instance dir
    """
    api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
    if core:
      raise PopupException(_('Cannot remove Solr cores.'))

    if api.remove_collection(name):
      # Delete instance directory.
Exemplo n.º 41
0
    hue_collection = None

    if search_form.is_valid():
        collection_id = search_form.cleaned_data['collection']
        solr_query['q'] = search_form.cleaned_data['query'].encode('utf8')
        solr_query['fq'] = search_form.cleaned_data['fq']
        if search_form.cleaned_data['sort']:
            solr_query['sort'] = search_form.cleaned_data['sort']
        solr_query['rows'] = search_form.cleaned_data['rows'] or 15
        solr_query['start'] = search_form.cleaned_data['start'] or 0
        solr_query['facets'] = search_form.cleaned_data['facets'] or 1

        try:
            hue_collection = Collection.objects.get(id=collection_id)
            solr_query['collection'] = hue_collection.name
            response = SolrApi(SOLR_URL.get(),
                               request.user).query(solr_query, hue_collection)
        except Exception, e:
            error['message'] = unicode(str(e), "utf8")
    else:
        error['message'] = _('There is no collection to search.')

    if hue_collection is not None:
        response = augment_solr_response(response,
                                         hue_collection.facets.get_data())

    if request.GET.get('format') == 'json':
        return HttpResponse(json.dumps(response), mimetype="application/json")

    return render(
        'search.mako', request, {
Exemplo n.º 42
0
 def __init__(self, user, cluster):
     DashboardApi.__init__(self, user, cluster)
     self.api = SolrApi(SOLR_URL.get(), self.user)
Exemplo n.º 43
0
def _envelope_job(request,
                  file_format,
                  destination,
                  start_time=None,
                  lib_path=None):
    collection_name = destination['name']
    indexer = EnvelopeIndexer(request.user, request.fs)

    lib_path = None  # Todo optional input field
    input_path = None

    if file_format['inputFormat'] == 'table':
        db = dbms.get(request.user)
        table_metadata = db.get_table(database=file_format['databaseName'],
                                      table_name=file_format['tableName'])
        input_path = table_metadata.path_location
    elif file_format['inputFormat'] == 'file':
        input_path = file_format["path"]
        properties = {'input_path': input_path, 'format': 'csv'}
    elif file_format['inputFormat'] == 'stream' and file_format[
            'streamSelection'] == 'flume':
        pass
    elif file_format['inputFormat'] == 'stream':
        if file_format['streamSelection'] == 'kafka':
            manager = ManagerApi()
            properties = {
                "brokers": manager.get_kafka_brokers(),
                "topics": file_format['kafkaSelectedTopics'],
                "kafkaFieldType": file_format['kafkaFieldType'],
                "kafkaFieldDelimiter": file_format['kafkaFieldDelimiter'],
            }

            if file_format.get(
                    'kafkaSelectedTopics') == 'NavigatorAuditEvents':
                schema_fields = MorphlineIndexer.get_kept_field_list(
                    file_format['sampleCols'])
                properties.update({
                    "kafkaFieldNames":
                    ', '.join([_field['name'] for _field in schema_fields]),
                    "kafkaFieldTypes":
                    ', '.join([_field['type'] for _field in schema_fields])
                })
            else:
                properties.update({
                    "kafkaFieldNames":
                    file_format['kafkaFieldNames'],
                    "kafkaFieldTypes":
                    file_format['kafkaFieldTypes']
                })

            if True:
                properties['window'] = ''
            else:  # For "KafkaSQL"
                properties['window'] = '''
            window {
                enabled = true
                milliseconds = 60000
            }'''
    elif file_format['inputFormat'] == 'connector':
        if file_format['streamSelection'] == 'flume':
            properties = {
                'streamSelection':
                file_format['streamSelection'],
                'channelSourceHosts':
                file_format['channelSourceHosts'],
                'channelSourceSelectedHosts':
                file_format['channelSourceSelectedHosts'],
                'channelSourcePath':
                file_format['channelSourcePath'],
            }
        else:
            # sfdc
            properties = {
                'streamSelection': file_format['streamSelection'],
                'streamUsername': file_format['streamUsername'],
                'streamPassword': file_format['streamPassword'],
                'streamToken': file_format['streamToken'],
                'streamEndpointUrl': file_format['streamEndpointUrl'],
                'streamObject': file_format['streamObject'],
            }

    if destination['outputFormat'] == 'table':
        if destination['isTargetExisting']:  # Todo: check if format matches
            pass
        else:
            destination['importData'] = False  # Avoid LOAD DATA
            if destination['tableFormat'] == 'kudu':
                properties['kafkaFieldNames'] = properties[
                    'kafkaFieldNames'].lower(
                    )  # Kudu names should be all lowercase
            # Create table
            if not request.POST.get('show_command'):
                SQLIndexer(user=request.user,
                           fs=request.fs).create_table_from_a_file(
                               file_format, destination).execute(request)

        if destination['tableFormat'] == 'kudu':
            manager = ManagerApi()
            properties["output_table"] = "impala::%s" % collection_name
            properties["kudu_master"] = manager.get_kudu_master()
        else:
            properties['output_table'] = collection_name
    elif destination['outputFormat'] == 'stream':
        manager = ManagerApi()
        properties['brokers'] = manager.get_kafka_brokers()
        properties['topics'] = file_format['kafkaSelectedTopics']
        properties['kafkaFieldDelimiter'] = file_format['kafkaFieldDelimiter']
    elif destination['outputFormat'] == 'file':
        properties['path'] = file_format["path"]
        if file_format['inputFormat'] == 'stream':
            properties['format'] = 'csv'
        else:
            properties['format'] = file_format['tableFormat']  # or csv
    elif destination['outputFormat'] == 'index':
        properties['collectionName'] = collection_name
        properties['connection'] = SOLR_URL.get()

    properties["app_name"] = 'Data Ingest'
    properties["inputFormat"] = file_format['inputFormat']
    properties["ouputFormat"] = destination['ouputFormat']
    properties["streamSelection"] = file_format["streamSelection"]

    configs = indexer.generate_config(properties)

    if request.POST.get('show_command'):
        return {'status': 0, 'commands': configs['envelope.conf']}
    else:
        return indexer.run(request,
                           collection_name,
                           configs,
                           input_path,
                           start_time=start_time,
                           lib_path=lib_path)
Exemplo n.º 44
0
class SolrApi(Api):
    def __init__(self, user, interpreter=None):
        Api.__init__(self, user, interpreter=interpreter)
        self.options = interpreter['options']

    @query_error_handler
    def execute(self, notebook, snippet):
        from search.conf import SOLR_URL

        api = NativeSolrApi(SOLR_URL.get(), self.user.username)

        collection = self.options.get('collection') or snippet.get('database')
        if not collection or collection == 'default':
            collection = api.collections2()[0]

        response = api.sql(collection, snippet['statement'])

        info = response['result-set']['docs'].pop(
            -1)  # EOF, RESPONSE_TIME, EXCEPTION
        if info.get('EXCEPTION'):
            raise QueryError(info['EXCEPTION'])

        headers = []
        for row in response['result-set']['docs']:
            for col in row.keys():
                if col not in headers:
                    headers.append(col)

        data = [[doc.get(col) for col in headers]
                for doc in response['result-set']['docs']]
        has_result_set = bool(data)

        return {
            'sync': True,
            'has_result_set': has_result_set,
            'modified_row_count': 0,
            'result': {
                'has_more':
                False,
                'data':
                data if has_result_set else [],
                'meta': [{
                    'name': col,
                    'type': '',
                    'comment': ''
                } for col in headers] if has_result_set else [],
                'type':
                'table'
            },
            'statement_id': 0,
            'has_more_statements': False,
            'statements_count': 1
        }

    @query_error_handler
    def check_status(self, notebook, snippet):
        return {'status': 'available'}

    @query_error_handler
    def fetch_result(self, notebook, snippet, rows, start_over):
        return {'has_more': False, 'data': [], 'meta': [], 'type': 'table'}

    @query_error_handler
    def fetch_result_metadata(self):
        pass

    @query_error_handler
    def cancel(self, notebook, snippet):
        return {'status': 0}

    @query_error_handler
    def get_log(self, notebook, snippet, startFrom=None, size=None):
        return 'No logs'

    def download(self, notebook, snippet, format):
        raise PopupException('Downloading is not supported yet')

    @query_error_handler
    def close_statement(self, snippet):
        return {'status': -1}

    @query_error_handler
    def autocomplete(self,
                     snippet,
                     database=None,
                     table=None,
                     column=None,
                     nested=None):
        from search.conf import SOLR_URL
        api = NativeSolrApi(SOLR_URL.get(), self.user.username)
        assist = Assist(self, self.user, api)
        response = {'status': -1}

        if database is None:
            response['databases'] = [
                self.options.get('collection') or snippet.get('database')
                or 'default'
            ]
        elif table is None:
            response['tables_meta'] = assist.get_tables(database)
        else:
            columns = assist.get_columns(database, table)
            response['columns'] = [col['name'] for col in columns]
            response['extended_columns'] = columns

        response['status'] = 0
        return response

    @query_error_handler
    def get_sample_data(self,
                        snippet,
                        database=None,
                        table=None,
                        column=None,
                        async=False):
        from search.conf import SOLR_URL
        db = NativeSolrApi(SOLR_URL.get(), self.user)

        assist = Assist(self, self.user, db)
        response = {'status': -1}

        if snippet.get('source') == 'sql':
            sample_data = assist.get_sample_data_sql(database, table, column)
        else:
            sample_data = assist.get_sample_data(database, table, column)

        if sample_data:
            response['status'] = 0
            response['headers'] = sample_data['headers']
            response['full_headers'] = sample_data.get('full_headers')
            response['rows'] = sample_data['rows']
        else:
            response['message'] = _('Failed to get sample data.')

        return response
Exemplo n.º 45
0
 def is_core(self, core_name):
     solr_cores = SolrApi(SOLR_URL.get()).cores()
     return core_name in solr_cores
Exemplo n.º 46
0
def _create_facet(collection, user, facet_id, facet_label, facet_field, widget_type):
  properties = {
    'sort': 'desc',
    'canRange': False,
    'stacked': False,
    'limit': 10,
    'mincount': 0,
    'isDate': False,
    'aggregate': {'function': 'unique', 'ops': [], 'percentiles': [{'value': 50}]}
  }

  if widget_type in ('tree-widget', 'heatmap-widget', 'map-widget'):
    facet_type = 'pivot'
  elif widget_type == 'gradient-map-widget':
    facet_type = 'nested'
    properties['facets'] = []
    properties['facets_form'] = {'field': '', 'mincount': 1, 'limit': 10, 'aggregate': 'count'}
    properties['scope'] = 'world'
    properties['limit'] = 100
  else:
    solr_api = SolrApi(SOLR_URL.get(), user)
    range_properties = _new_range_facet(solr_api, collection, facet_field, widget_type)

    if range_properties:
      facet_type = 'range'
      properties.update(range_properties)
      properties['initial_gap'] = properties['gap']
      properties['initial_start'] = properties['start']
      properties['initial_end'] = properties['end']
    else:
      facet_type = 'field'

    if widget_type in ('bucket-widget', 'pie2-widget', 'timeline-widget', 'tree2-widget', 'text-facet-widget', 'hit-widget'):
      if widget_type == 'text-facet-widget':
        properties['type'] = facet_type
      if widget_type == 'hit-widget':
        facet_type = 'function'
      else:
        facet_type = 'nested'
      properties['facets_form'] = {'field': '', 'mincount': 1, 'limit': 10, 'aggregate': {'function': 'unique', 'ops': [], 'percentiles': [{'value': 50}]}}
      properties['facets'] = []
      properties['domain'] = {'blockParent': [], 'blockChildren': []}
      if widget_type == 'pie2-widget':
        properties['scope'] = 'stack'
        properties['timelineChartType'] = 'bar'
      elif widget_type == 'tree2-widget':
        properties['scope'] = 'tree'
        properties['facets_form']['limit'] = 5
        properties['isOldPivot'] = True
      else:
        properties['scope'] = 'stack'
        properties['timelineChartType'] = 'bar'

  if widget_type in ('tree-widget', 'heatmap-widget', 'map-widget'):
    properties['mincount'] = 1
    properties['facets'] = []
    properties['stacked'] = True
    properties['facets_form'] = {'field': '', 'mincount': 1, 'limit': 5}

    if widget_type == 'map-widget':
      properties['scope'] = 'world'
      properties['limit'] = 100
    else:
      properties['scope'] = 'stack' if widget_type == 'heatmap-widget' else 'tree'

  return {
    'id': facet_id,
    'label': facet_label,
    'field': facet_field,
    'type': facet_type,
    'widgetType': widget_type,
    'properties': properties
  }
Exemplo n.º 47
0
    def create_collection(self,
                          name,
                          fields,
                          unique_key_field='id',
                          df='text'):
        """
    Create solr collection or core and instance dir.
    Create schema.xml file so that we can set UniqueKey field.
    """
        if self.is_solr_cloud_mode():
            # solrcloud mode

            # Need to remove path afterwards
            tmp_path, solr_config_path = utils.copy_configs(
                fields, unique_key_field, df, True)

            # Create instance directory.
            solrctl_path = get_solrctl_path()

            process = subprocess.Popen([
                solrctl_path, "instancedir", "--create", name, solr_config_path
            ],
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       env={
                                           'SOLR_ZK_ENSEMBLE':
                                           conf.SOLR_ZK_ENSEMBLE.get()
                                       })
            status = process.wait()

            # Don't want directories laying around
            shutil.rmtree(tmp_path)

            if status != 0:
                LOG.error(
                    "Could not create instance directory.\nOutput: %s\nError: %s"
                    % process.communicate())
                raise PopupException(
                    _('Could not create instance directory. '
                      'Check if solr_zk_ensemble and solrctl_path are correct in Hue config [indexer].'
                      ))

            api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
            if not api.create_collection(name):
                # Delete instance directory if we couldn't create a collection.
                process = subprocess.Popen(
                    [solrctl_path, "instancedir", "--delete", name],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE,
                    env={'SOLR_ZK_ENSEMBLE': conf.SOLR_ZK_ENSEMBLE.get()})
                if process.wait() != 0:
                    LOG.error(
                        "Cloud not delete collection.\nOutput: %s\nError: %s" %
                        process.communicate())
                raise PopupException(
                    _('Could not create collection. Check error logs for more info.'
                      ))
        else:
            # Non-solrcloud mode
            # Create instance directory locally.
            instancedir = os.path.join(conf.CORE_INSTANCE_DIR.get(), name)
            if os.path.exists(instancedir):
                raise PopupException(
                    _("Instance directory %s already exists! Please remove it from the file system."
                      ) % instancedir)
            tmp_path, solr_config_path = utils.copy_configs(
                fields, unique_key_field, df, False)
            shutil.move(solr_config_path, instancedir)
            shutil.rmtree(tmp_path)

            api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
            if not api.create_core(name, instancedir):
                # Delete instance directory if we couldn't create a collection.
                shutil.rmtree(instancedir)
                raise PopupException(
                    _('Could not create collection. Check error logs for more info.'
                      ))
Exemplo n.º 48
0
 def __init__(self, user):
     self.user = user
     self.api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
Exemplo n.º 49
0
 def test_is_solr_cloud_mode(self):
     raise SkipTest  # collections() no longer work
     SolrApi(SOLR_URL.get(), self.user).collections()
Exemplo n.º 50
0
def _envelope_job(request, file_format, destination, start_time=None, lib_path=None):
  collection_name = destination['name']
  indexer = EnvelopeIndexer(request.user, request.fs)

  lib_path = '/tmp/envelope-0.5.0.jar'
  input_path = None

  if file_format['inputFormat'] == 'table':
    db = dbms.get(request.user)
    table_metadata = db.get_table(database=file_format['databaseName'], table_name=file_format['tableName'])
    input_path = table_metadata.path_location
  elif file_format['inputFormat'] == 'file':
    input_path = '${nameNode}%s' % file_format["path"]
    properties = {
      'format': 'json'
    }
  elif file_format['inputFormat'] == 'stream':
    if file_format['streamSelection'] == 'sfdc':
      properties = {
        'streamSelection': file_format['streamSelection'],
        'streamUsername': file_format['streamUsername'],
        'streamPassword': file_format['streamPassword'],
        'streamToken': file_format['streamToken'],
        'streamEndpointUrl': file_format['streamEndpointUrl'],
        'streamObject': file_format['streamObject'],
      }
    elif file_format['streamSelection'] == 'kafka':
      manager = ManagerApi()
      properties = {
        "brokers": manager.get_kafka_brokers(),
        "output_table": "impala::%s" % collection_name,
        "topics": file_format['kafkaSelectedTopics'],
        "kafkaFieldType": file_format['kafkaFieldType'],
        "kafkaFieldDelimiter": file_format['kafkaFieldDelimiter'],
        "kafkaFieldNames": file_format['kafkaFieldNames'],
        "kafkaFieldTypes": file_format['kafkaFieldTypes']
      }

    if destination['outputFormat'] == 'table':
      if destination['isTargetExisting']:
        # Todo: check if format matches
        pass
      else:
        sql = SQLIndexer(user=request.user, fs=request.fs).create_table_from_a_file(file_format, destination).get_str()
        print sql
      if destination['tableFormat'] == 'kudu':
        manager = ManagerApi()
        properties["output_table"] = "impala::%s" % collection_name
        properties["kudu_master"] = manager.get_kudu_master()
      else:
        properties['output_table'] = collection_name
    elif destination['outputFormat'] == 'file':
      properties['path'] = file_format["path"]
      properties['format'] = file_format['tableFormat'] # or csv
    elif destination['outputFormat'] == 'index':
      properties['collectionName'] = collection_name
      properties['connection'] = SOLR_URL.get()
      if destination['isTargetExisting']:
        # Todo: check if format matches
        pass
      else:
        client = SolrClient(request.user)
        kwargs = {}
        _create_solr_collection(request.user, request.fs, client, destination, collection_name, kwargs)

  properties["app_name"] = 'Data Ingest'
  properties["inputFormat"] = file_format['inputFormat']
  properties["ouputFormat"] = destination['ouputFormat']
  properties["streamSelection"] = file_format["streamSelection"]

  envelope = indexer.generate_config(properties)

  return indexer.run(request, collection_name, envelope, input_path, start_time=start_time, lib_path=lib_path)
Exemplo n.º 51
0
 def is_collection(self, collection_name):
     solr_collections = SolrApi(SOLR_URL.get()).collections()
     return collection_name in solr_collections
Exemplo n.º 52
0
 def get_solr_collection(self):
   return SolrApi(SOLR_URL.get(), self.user).collections()
Exemplo n.º 53
0
def admin_collection_schema(request, collection_id):
    hue_collection = Collection.objects.get(id=collection_id)
    solr_schema = SolrApi(SOLR_URL.get()).schema(hue_collection.name)

    content = {'solr_schema': solr_schema.decode('utf-8')}
    return HttpResponse(json.dumps(content), mimetype="application/json")
Exemplo n.º 54
0
 def __init__(self, user, api=None):
     self.user = user
     self.api = api if api is not None else SolrApi(
         SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
Exemplo n.º 55
0
Arquivo: solr.py Projeto: ziq211/hue
            response['columns'] = [col['name'] for col in columns]
            response['extended_columns'] = columns

        response['status'] = 0
        return response

    @query_error_handler
    def get_sample_data(self,
                        snippet,
                        database=None,
                        table=None,
                        column=None,
                        async=False,
                        operation=None):
        from search.conf import SOLR_URL
        db = NativeSolrApi(SOLR_URL.get(), self.user)

        assist = Assist(self, self.user, db)
        response = {'status': -1}

        if snippet.get('source') == 'sql':
            sample_data = assist.get_sample_data_sql(database, table, column)
        else:
            sample_data = assist.get_sample_data(database, table, column)

        if sample_data:
            response['status'] = 0
            response['headers'] = sample_data['headers']
            response['full_headers'] = sample_data.get('full_headers')
            response['rows'] = sample_data['rows']
        else:
Exemplo n.º 56
0
def _create_facet(collection, user, facet_id, facet_label, facet_field,
                  widget_type):
    properties = {
        'sort': 'desc',
        'canRange': False,
        'stacked': False,
        'limit': 10,
        'mincount': 0,
        'isDate': False,
        'aggregate': {
            'function': 'unique',
            'ops': [],
            'percentiles': [{
                'value': 50
            }]
        }
    }

    if widget_type in ('tree-widget', 'heatmap-widget', 'map-widget'):
        facet_type = 'pivot'
    elif widget_type == 'gradient-map-widget':
        facet_type = 'nested'
        properties['facets'] = []
        properties['domain'] = {'blockParent': [], 'blockChildren': []}
        properties['facets_form'] = {
            'field': '',
            'mincount': 1,
            'limit': 10,
            'aggregate': {
                'function': 'unique',
                'ops': [],
                'percentiles': [{
                    'value': 50
                }]
            }
        }
        properties['scope'] = 'world'
        properties['limit'] = 100
    else:
        solr_api = SolrApi(SOLR_URL.get(), user)
        range_properties = _new_range_facet(solr_api, collection, facet_field,
                                            widget_type)

        if range_properties:
            facet_type = 'range'
            properties.update(range_properties)
            properties['initial_gap'] = properties['gap']
            properties['initial_start'] = properties['start']
            properties['initial_end'] = properties['end']
        else:
            facet_type = 'field'

        if widget_type in ('bucket-widget', 'pie2-widget', 'timeline-widget',
                           'tree2-widget', 'text-facet-widget', 'hit-widget'):
            if widget_type == 'text-facet-widget':
                properties['type'] = facet_type
            if widget_type == 'hit-widget':
                facet_type = 'function'
            else:
                facet_type = 'nested'
            properties['facets_form'] = {
                'field': '',
                'mincount': 1,
                'limit': 10,
                'aggregate': {
                    'function': 'unique',
                    'ops': [],
                    'percentiles': [{
                        'value': 50
                    }]
                }
            }
            properties['facets'] = []
            properties['domain'] = {'blockParent': [], 'blockChildren': []}

            if widget_type == 'pie2-widget':
                properties['scope'] = 'stack'
                properties['timelineChartType'] = 'bar'
            elif widget_type == 'tree2-widget':
                properties['scope'] = 'tree'
                properties['facets_form']['limit'] = 5
                properties['isOldPivot'] = True
            else:
                properties['scope'] = 'stack'
                properties['timelineChartType'] = 'bar'

    if widget_type in ('tree-widget', 'heatmap-widget', 'map-widget'):
        properties['mincount'] = 1
        properties['facets'] = []
        properties['stacked'] = True
        properties['facets_form'] = {'field': '', 'mincount': 1, 'limit': 5}

        if widget_type == 'map-widget':
            properties['scope'] = 'world'
            properties['limit'] = 100
        else:
            properties[
                'scope'] = 'stack' if widget_type == 'heatmap-widget' else 'tree'

    return {
        'id': facet_id,
        'label': facet_label,
        'field': facet_field,
        'type': facet_type,
        'widgetType': widget_type,
        'properties': properties,
        # Hue 4+
        'template': {
            "showFieldList": True,
            "showGrid": False,
            "showChart": True,
            "chartSettings": {
                'chartType':
                'pie' if widget_type == 'pie2-widget' else
                ('timeline' if widget_type == 'timeline-widget' else
                 ('gradientmap'
                  if widget_type == 'gradient-map-widget' else 'bars')),
                'chartSorting':
                'none',
                'chartScatterGroup':
                None,
                'chartScatterSize':
                None,
                'chartScope':
                'world',
                'chartX':
                None,
                'chartYSingle':
                None,
                'chartYMulti': [],
                'chartData': [],
                'chartMapLabel':
                None,
            },
            "fieldsAttributes": [],
            "fieldsAttributesFilter": "",
            "filteredAttributeFieldsAll": True,
            "fields": [],
            "fieldsSelected": [],
            "leafletmap": {
                'latitudeField': None,
                'longitudeField': None,
                'labelField': None
            },  # Use own?
            'leafletmapOn': False,
            'isGridLayout': False,
            "hasDataForChart": True,
            "rows": 25,
        }
    }