Exemplo n.º 1
0
def create_index(plugin, mapping, docs):
    '''
  Creates a static back compat index (.zip) with mappings using fields defined in plugins.
  '''

    logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s',
                        level=logging.INFO,
                        datefmt='%Y-%m-%d %I:%M:%S %p')
    logging.getLogger('elasticsearch').setLevel(logging.ERROR)
    logging.getLogger('urllib3').setLevel(logging.WARN)

    tmp_dir = tempfile.mkdtemp()
    plugin_installed = False
    node = None
    try:
        data_dir = os.path.join(tmp_dir, 'data')
        repo_dir = os.path.join(tmp_dir, 'repo')
        logging.info('Temp data dir: %s' % data_dir)
        logging.info('Temp repo dir: %s' % repo_dir)

        version = '2.0.0'
        classifier = '%s-%s' % (plugin, version)
        index_name = 'index-%s' % classifier

        # Download old ES releases if necessary:
        release_dir = os.path.join('backwards', 'elasticsearch-%s' % version)
        if not os.path.exists(release_dir):
            fetch_version(version)

        create_bwc_index.install_plugin(version, release_dir, plugin)
        plugin_installed = True
        node = create_bwc_index.start_node(version,
                                           release_dir,
                                           data_dir,
                                           repo_dir,
                                           cluster_name=index_name)
        client = create_bwc_index.create_client()
        put_plugin_mappings(client, index_name, mapping, docs)
        create_bwc_index.shutdown_node(node)

        print('%s server output:\n%s' %
              (version, node.stdout.read().decode('utf-8')))
        node = None
        create_bwc_index.compress_index(
            classifier, tmp_dir,
            'plugins/%s/src/test/resources/indices/bwc' % plugin)
    finally:
        if node is not None:
            create_bwc_index.shutdown_node(node)
        if plugin_installed:
            create_bwc_index.remove_plugin(version, release_dir, plugin)
        shutil.rmtree(tmp_dir)
Exemplo n.º 2
0
def main():
    '''
  Creates a static back compat index (.zip) with conflicting mappings.
  '''

    logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s',
                        level=logging.INFO,
                        datefmt='%Y-%m-%d %I:%M:%S %p')
    logging.getLogger('elasticsearch').setLevel(logging.ERROR)
    logging.getLogger('urllib3').setLevel(logging.WARN)

    tmp_dir = tempfile.mkdtemp()
    try:
        data_dir = os.path.join(tmp_dir, 'data')
        repo_dir = os.path.join(tmp_dir, 'repo')
        logging.info('Temp data dir: %s' % data_dir)
        logging.info('Temp repo dir: %s' % repo_dir)

        version = '1.7.0'
        classifier = 'conflicting-mappings-%s' % version
        index_name = 'index-%s' % classifier

        # Download old ES releases if necessary:
        release_dir = os.path.join('backwards', 'elasticsearch-%s' % version)
        if not os.path.exists(release_dir):
            fetch_version(version)

        node = create_bwc_index.start_node(version,
                                           release_dir,
                                           data_dir,
                                           repo_dir,
                                           cluster_name=index_name)
        client = create_bwc_index.create_client()

        put_conflicting_mappings(client, index_name)
        create_bwc_index.shutdown_node(node)
        print('%s server output:\n%s' %
              (version, node.stdout.read().decode('utf-8')))
        node = None
        create_bwc_index.compress_index(
            classifier, tmp_dir,
            'core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade'
        )
    finally:
        if node is not None:
            create_bwc_index.shutdown_node(node)
        shutil.rmtree(tmp_dir)
def create_index(plugin, mapping, docs):
  '''
  Creates a static back compat index (.zip) with mappings using fields defined in plugins.
  '''
  
  logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO,
                      datefmt='%Y-%m-%d %I:%M:%S %p')
  logging.getLogger('elasticsearch').setLevel(logging.ERROR)
  logging.getLogger('urllib3').setLevel(logging.WARN)

  tmp_dir = tempfile.mkdtemp()
  plugin_installed = False
  node = None
  try:
    data_dir = os.path.join(tmp_dir, 'data')
    repo_dir = os.path.join(tmp_dir, 'repo')
    logging.info('Temp data dir: %s' % data_dir)
    logging.info('Temp repo dir: %s' % repo_dir)

    version = '2.0.0'
    classifier = '%s-%s' %(plugin, version)
    index_name = 'index-%s' % classifier

    # Download old ES releases if necessary:
    release_dir = os.path.join('backwards', 'elasticsearch-%s' % version)
    if not os.path.exists(release_dir):
      fetch_version(version)

    create_bwc_index.install_plugin(version, release_dir, plugin)
    plugin_installed = True
    node = create_bwc_index.start_node(version, release_dir, data_dir, repo_dir, cluster_name=index_name)
    client = create_bwc_index.create_client()
    put_plugin_mappings(client, index_name, mapping, docs)
    create_bwc_index.shutdown_node(node)

    print('%s server output:\n%s' % (version, node.stdout.read().decode('utf-8')))
    node = None
    create_bwc_index.compress_index(classifier, tmp_dir, 'plugins/%s/src/test/resources/indices/bwc' %plugin)
  finally:
    if node is not None:
      create_bwc_index.shutdown_node(node)
    if plugin_installed:
      create_bwc_index.remove_plugin(version, release_dir, plugin)
    shutil.rmtree(tmp_dir)
def main():
  '''
  Creates a back compat index (.zip) using v0.20 and then creates a snapshot of it using v1.1
  '''
  
  logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO,
                      datefmt='%Y-%m-%d %I:%M:%S %p')
  logging.getLogger('elasticsearch').setLevel(logging.ERROR)
  logging.getLogger('urllib3').setLevel(logging.WARN)

  tmp_dir = tempfile.mkdtemp()
  try:
    data_dir = os.path.join(tmp_dir, 'data')
    logging.info('Temp data dir: %s' % data_dir)

    first_version = '0.20.6'
    second_version = '1.1.2'
    index_name = 'index-%s-and-%s' % (first_version, second_version)

    # Download old ES releases if necessary:
    release_dir = os.path.join('backwards', 'elasticsearch-%s' % first_version)
    if not os.path.exists(release_dir):
      fetch_version(first_version)

    node = create_bwc_index.start_node(first_version, release_dir, data_dir, cluster_name=index_name)
    client = create_bwc_index.create_client()

    # Creates the index & indexes docs w/ first_version:
    create_bwc_index.generate_index(client, first_version, index_name)

    # Make sure we write segments:
    flush_result = client.indices.flush(index=index_name)
    if not flush_result['ok']:
      raise RuntimeError('flush failed: %s' % str(flush_result))

    create_bwc_index.shutdown_node(node)
    print('%s server output:\n%s' % (first_version, node.stdout.read().decode('utf-8')))
    node = None

    release_dir = os.path.join('backwards', 'elasticsearch-%s' % second_version)
    if not os.path.exists(release_dir):
      fetch_version(second_version)

    # Now use second_version to snapshot the index:
    node = create_bwc_index.start_node(second_version, release_dir, data_dir, cluster_name=index_name)
    client = create_bwc_index.create_client()

    repo_dir = os.path.join(tmp_dir, 'repo')
    create_bwc_index.snapshot_index(client, second_version, repo_dir)
    create_bwc_index.shutdown_node(node)
    print('%s server output:\n%s' % (second_version, node.stdout.read().decode('utf-8')))

    create_bwc_index.compress(tmp_dir, "src/test/resources/org/elasticsearch/bwcompat", 'unsupportedrepo-%s.zip' % first_version, 'repo')

    node = None
  finally:
    if node is not None:
      create_bwc_index.shutdown_node(node)
    shutil.rmtree(tmp_dir)
def main():
  '''
  Creates a static back compat index (.zip) with conflicting mappings.
  '''
  
  logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO,
                      datefmt='%Y-%m-%d %I:%M:%S %p')
  logging.getLogger('elasticsearch').setLevel(logging.ERROR)
  logging.getLogger('urllib3').setLevel(logging.WARN)

  tmp_dir = tempfile.mkdtemp()
  try:
    data_dir = os.path.join(tmp_dir, 'data')
    repo_dir = os.path.join(tmp_dir, 'repo')
    logging.info('Temp data dir: %s' % data_dir)
    logging.info('Temp repo dir: %s' % repo_dir)

    version = '1.7.0'
    classifier = 'conflicting-mappings-%s' % version
    index_name = 'index-%s' % classifier

    # Download old ES releases if necessary:
    release_dir = os.path.join('backwards', 'elasticsearch-%s' % version)
    if not os.path.exists(release_dir):
      fetch_version(version)

    node = create_bwc_index.start_node(version, release_dir, data_dir, repo_dir, cluster_name=index_name)
    client = create_bwc_index.create_client()

    put_conflicting_mappings(client, index_name)
    create_bwc_index.shutdown_node(node)
    print('%s server output:\n%s' % (version, node.stdout.read().decode('utf-8')))
    node = None
    create_bwc_index.compress_index(classifier, tmp_dir, 'core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade')
  finally:
    if node is not None:
      create_bwc_index.shutdown_node(node)
    shutil.rmtree(tmp_dir)
Exemplo n.º 6
0
def main():
    '''
  Creates a static back compat index (.zip) with mixed 0.20 (Lucene 3.x) and 0.90 (Lucene 4.x) segments. 
  '''

    logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s',
                        level=logging.INFO,
                        datefmt='%Y-%m-%d %I:%M:%S %p')
    logging.getLogger('elasticsearch').setLevel(logging.ERROR)
    logging.getLogger('urllib3').setLevel(logging.WARN)

    tmp_dir = tempfile.mkdtemp()
    try:
        data_dir = os.path.join(tmp_dir, 'data')
        repo_dir = os.path.join(tmp_dir, 'repo')
        logging.info('Temp data dir: %s' % data_dir)
        logging.info('Temp repo dir: %s' % repo_dir)

        first_version = '0.20.6'
        second_version = '0.90.6'
        index_name = 'index-%s-and-%s' % (first_version, second_version)

        # Download old ES releases if necessary:
        release_dir = os.path.join('backwards',
                                   'elasticsearch-%s' % first_version)
        if not os.path.exists(release_dir):
            fetch_version(first_version)

        node = create_bwc_index.start_node(first_version,
                                           release_dir,
                                           data_dir,
                                           repo_dir,
                                           cluster_name=index_name)
        client = create_bwc_index.create_client()

        # Creates the index & indexes docs w/ first_version:
        create_bwc_index.generate_index(client, first_version, index_name)

        # Make sure we write segments:
        flush_result = client.indices.flush(index=index_name)
        if not flush_result['ok']:
            raise RuntimeError('flush failed: %s' % str(flush_result))

        segs = client.indices.segments(index=index_name)
        shards = segs['indices'][index_name]['shards']
        if len(shards) != 1:
            raise RuntimeError('index should have 1 shard but got %s' %
                               len(shards))

        first_version_segs = shards['0'][0]['segments'].keys()

        create_bwc_index.shutdown_node(node)
        print('%s server output:\n%s' %
              (first_version, node.stdout.read().decode('utf-8')))
        node = None

        release_dir = os.path.join('backwards',
                                   'elasticsearch-%s' % second_version)
        if not os.path.exists(release_dir):
            fetch_version(second_version)

        # Now also index docs with second_version:
        node = create_bwc_index.start_node(second_version,
                                           release_dir,
                                           data_dir,
                                           repo_dir,
                                           cluster_name=index_name)
        client = create_bwc_index.create_client()

        # If we index too many docs, the random refresh/flush causes the ancient segments to be merged away:
        num_docs = 10
        create_bwc_index.index_documents(client, index_name, 'doc', num_docs)

        # Make sure we get a segment:
        flush_result = client.indices.flush(index=index_name)
        if not flush_result['ok']:
            raise RuntimeError('flush failed: %s' % str(flush_result))

        # Make sure we see mixed segments (it's possible Lucene could have "accidentally" merged away the first_version segments):
        segs = client.indices.segments(index=index_name)
        shards = segs['indices'][index_name]['shards']
        if len(shards) != 1:
            raise RuntimeError('index should have 1 shard but got %s' %
                               len(shards))

        second_version_segs = shards['0'][0]['segments'].keys()
        #print("first: %s" % first_version_segs)
        #print("second: %s" % second_version_segs)

        for segment_name in first_version_segs:
            if segment_name in second_version_segs:
                # Good: an ancient version seg "survived":
                break
        else:
            raise RuntimeError('index has no first_version segs left')

        for segment_name in second_version_segs:
            if segment_name not in first_version_segs:
                # Good: a second_version segment was written
                break
        else:
            raise RuntimeError('index has no second_version segs left')

        create_bwc_index.shutdown_node(node)
        print('%s server output:\n%s' %
              (second_version, node.stdout.read().decode('utf-8')))
        node = None
        create_bwc_index.compress_index(
            '%s-and-%s' % (first_version, second_version), tmp_dir,
            'core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade'
        )
    finally:
        if node is not None:
            create_bwc_index.shutdown_node(node)
        shutil.rmtree(tmp_dir)
def main():
    '''
  Creates a back compat index (.zip) using v0.20 and then creates a snapshot of it using v1.1
  '''

    logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s',
                        level=logging.INFO,
                        datefmt='%Y-%m-%d %I:%M:%S %p')
    logging.getLogger('elasticsearch').setLevel(logging.ERROR)
    logging.getLogger('urllib3').setLevel(logging.WARN)

    tmp_dir = tempfile.mkdtemp()
    try:
        data_dir = os.path.join(tmp_dir, 'data')
        logging.info('Temp data dir: %s' % data_dir)

        first_version = '0.20.6'
        second_version = '1.1.2'
        index_name = 'index-%s-and-%s' % (first_version, second_version)

        # Download old ES releases if necessary:
        release_dir = os.path.join('backwards',
                                   'elasticsearch-%s' % first_version)
        if not os.path.exists(release_dir):
            fetch_version(first_version)

        node = create_bwc_index.start_node(first_version,
                                           release_dir,
                                           data_dir,
                                           cluster_name=index_name)
        client = create_bwc_index.create_client()

        # Creates the index & indexes docs w/ first_version:
        create_bwc_index.generate_index(client, first_version, index_name)

        # Make sure we write segments:
        flush_result = client.indices.flush(index=index_name)
        if not flush_result['ok']:
            raise RuntimeError('flush failed: %s' % str(flush_result))

        create_bwc_index.shutdown_node(node)
        print('%s server output:\n%s' %
              (first_version, node.stdout.read().decode('utf-8')))
        node = None

        release_dir = os.path.join('backwards',
                                   'elasticsearch-%s' % second_version)
        if not os.path.exists(release_dir):
            fetch_version(second_version)

        # Now use second_version to snapshot the index:
        node = create_bwc_index.start_node(second_version,
                                           release_dir,
                                           data_dir,
                                           cluster_name=index_name)
        client = create_bwc_index.create_client()

        repo_dir = os.path.join(tmp_dir, 'repo')
        create_bwc_index.snapshot_index(client, second_version, repo_dir)
        create_bwc_index.shutdown_node(node)
        print('%s server output:\n%s' %
              (second_version, node.stdout.read().decode('utf-8')))

        create_bwc_index.compress(tmp_dir, "src/test/resources/indices/bwc",
                                  'unsupportedrepo-%s.zip' % first_version,
                                  'repo')

        node = None
    finally:
        if node is not None:
            create_bwc_index.shutdown_node(node)
        shutil.rmtree(tmp_dir)
def main():
  '''
  Creates a static back compat index (.zip) with mixed 0.20 (Lucene 3.x) and 0.90 (Lucene 4.x) segments. 
  '''
  
  logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO,
                      datefmt='%Y-%m-%d %I:%M:%S %p')
  logging.getLogger('elasticsearch').setLevel(logging.ERROR)
  logging.getLogger('urllib3').setLevel(logging.WARN)

  tmp_dir = tempfile.mkdtemp()
  try:
    data_dir = os.path.join(tmp_dir, 'data')
    logging.info('Temp data dir: %s' % data_dir)

    first_version = '0.20.6'
    second_version = '0.90.6'
    index_name = 'index-%s-and-%s' % (first_version, second_version)

    # Download old ES releases if necessary:
    release_dir = os.path.join('backwards', 'elasticsearch-%s' % first_version)
    if not os.path.exists(release_dir):
      fetch_version(first_version)

    node = create_bwc_index.start_node(first_version, release_dir, data_dir, cluster_name=index_name)
    client = create_bwc_index.create_client()

    # Creates the index & indexes docs w/ first_version:
    create_bwc_index.generate_index(client, first_version, index_name)

    # Make sure we write segments:
    flush_result = client.indices.flush(index=index_name)
    if not flush_result['ok']:
      raise RuntimeError('flush failed: %s' % str(flush_result))

    segs = client.indices.segments(index=index_name)
    shards = segs['indices'][index_name]['shards']
    if len(shards) != 1:
      raise RuntimeError('index should have 1 shard but got %s' % len(shards))

    first_version_segs = shards['0'][0]['segments'].keys()

    create_bwc_index.shutdown_node(node)
    print('%s server output:\n%s' % (first_version, node.stdout.read().decode('utf-8')))
    node = None

    release_dir = os.path.join('backwards', 'elasticsearch-%s' % second_version)
    if not os.path.exists(release_dir):
      fetch_version(second_version)

    # Now also index docs with second_version:
    node = create_bwc_index.start_node(second_version, release_dir, data_dir, cluster_name=index_name)
    client = create_bwc_index.create_client()

    # If we index too many docs, the random refresh/flush causes the ancient segments to be merged away:
    num_docs = 10
    create_bwc_index.index_documents(client, index_name, 'doc', num_docs)

    # Make sure we get a segment:
    flush_result = client.indices.flush(index=index_name)
    if not flush_result['ok']:
      raise RuntimeError('flush failed: %s' % str(flush_result))

    # Make sure we see mixed segments (it's possible Lucene could have "accidentally" merged away the first_version segments):
    segs = client.indices.segments(index=index_name)
    shards = segs['indices'][index_name]['shards']
    if len(shards) != 1:
      raise RuntimeError('index should have 1 shard but got %s' % len(shards))

    second_version_segs = shards['0'][0]['segments'].keys()
    #print("first: %s" % first_version_segs)
    #print("second: %s" % second_version_segs)

    for segment_name in first_version_segs:
      if segment_name in second_version_segs:
        # Good: an ancient version seg "survived":
        break
    else:
      raise RuntimeError('index has no first_version segs left')

    for segment_name in second_version_segs:
      if segment_name not in first_version_segs:
        # Good: a second_version segment was written
        break
    else:
      raise RuntimeError('index has no second_version segs left')

    create_bwc_index.shutdown_node(node)
    print('%s server output:\n%s' % (second_version, node.stdout.read().decode('utf-8')))
    node = None
    create_bwc_index.compress_index('%s-and-%s' % (first_version, second_version), tmp_dir, 'src/test/resources/org/elasticsearch/rest/action/admin/indices/upgrade')
  finally:
    if node is not None:
      create_bwc_index.shutdown_node(node)
    shutil.rmtree(tmp_dir)