Пример #1
0
 def create_es_connection(self):
     ks = KeyStore()
     es_conf = ks.get_elasticsearch_credentials()
     self.es = Elasticsearch(hosts=[es_conf["es_url"]],
                             verify_certs=False,
                             http_auth=(es_conf["es_user"],
                                        es_conf["es_password"]))
def fix_es_mapping(index_name):
    ks = KeyStore()
    es_conf = ks.get_elasticsearch_credentials()

    mapping_url = "{es_url}/{index_name}/_mapping".format(
        index_name=index_name, **es_conf)
    res = requests.get(mapping_url,
                       auth=(es_conf["es_user"], es_conf["es_password"]))
    output = res.json()[index_name]

    output['mappings']['test_stats']['dynamic'] = False

    output['mappings']['test_stats']['properties']['coredumps'] = dict(
        type='object')
    output['mappings']['test_stats']['properties']['setup_details'][
        'properties']['db_cluster_details'] = dict(type='object')
    output['mappings']['test_stats']['properties']['system_details'] = {
        "dynamic": False,
        "properties": {}
    }

    res = requests.put(mapping_url + "/test_stats",
                       json=output['mappings'],
                       auth=(es_conf["es_user"], es_conf["es_password"]))
    print(res.text)
    res.raise_for_status()

    click.secho("fixed {index_name}".format(index_name=index_name), fg='green')
def migrate(old_index_name, dry_run, new_index, days):  # pylint: disable=too-many-locals

    logging.basicConfig(level=logging.DEBUG)
    logging.config.dictConfig({
        'version': 1,
        'disable_existing_loggers': True,
        'formatters': {
            'standard': {
                'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
            },
        },
        'handlers': {
            'default': {
                'level': 'INFO',
                'formatter': 'standard',
                'class': 'logging.StreamHandler',
                'stream': 'ext://sys.stdout',  # Default is stderr
            },
        },
        'loggers': {
            '': {  # root logger
                'handlers': ['default'],
                'level': 'WARNING',
                'propagate': False
            },
            '__main__': {  # if __name__ == '__main__'
                'handlers': ['default'],
                'level': 'DEBUG',
                'propagate': False
            },
        }
    })
    ks = KeyStore()
    es_conf = ks.get_elasticsearch_credentials()
    elastic_search = Elasticsearch(hosts=[es_conf["es_url"]],
                                   verify_certs=True,
                                   http_auth=(es_conf["es_user"],
                                              es_conf["es_password"]))

    if not elastic_search.indices.exists(index=new_index):
        elastic_search.indices.create(index=new_index)

    def post_to_new(doc):
        if dry_run:
            return
        elastic_search.index(index=new_index, doc_type='nemesis', body=doc)

    res = scan(elastic_search,
               index=old_index_name,
               query={
                   "query": {
                       "range": {
                           "test_details.start_time": {
                               "gte":
                               (datetime.datetime.utcnow() -
                                datetime.timedelta(days=days)).timestamp(),
                               "lte":
                               datetime.datetime.utcnow().timestamp(),
                               "boost":
                               2.0
                           }
                       }
                   }
               },
               size=300,
               scroll='3h')

    for num, hit in enumerate(res):
        nemesis_list = hit["_source"]["nemesis"]
        test_data = hit["_source"]
        LOGGER.info("%s: %s", num, test_data['test_details']['test_id'])
        if 'scylla-server' not in test_data['versions']:
            LOGGER.debug("%s: No version for %s - %s", num,
                         test_data['test_details']['test_id'],
                         test_data['test_details']['job_name'])
            if not test_data['test_details']['job_name']:
                LOGGER.debug(test_data)
            continue

        for nemesis_class, data in nemesis_list.items():
            for failure in data['failures']:
                new_nemesis_data = dict(
                    test_id=test_data['test_details']['test_id'],
                    job_name=test_data['test_details']['job_name'],
                    test_name=test_data['test_details']['test_name'],
                    scylla_version=test_data['versions']['scylla-server']
                    ['version'],
                    scylla_git_sha=test_data['versions']['scylla-server']
                    ['commit_id'],
                )

                new_nemesis_data.update(
                    dict(nemesis_name=nemesis_class,
                         nemesis_duration=failure['duration'],
                         start_time=datetime.datetime.utcfromtimestamp(
                             failure['start']),
                         end_time=datetime.datetime.utcfromtimestamp(
                             failure['end']),
                         target_node=failure['node'],
                         outcome="failure",
                         failure_message=failure['error']))
                post_to_new(new_nemesis_data)

            for run in data['runs']:
                new_nemesis_data = dict(
                    test_id=test_data['test_details']['test_id'],
                    job_name=test_data['test_details']['job_name'],
                    test_name=test_data['test_details']['test_name'],
                    scylla_version=test_data['versions']['scylla-server']
                    ['version'],
                    scylla_git_sha=test_data['versions']['scylla-server']
                    ['commit_id'],
                )
                new_nemesis_data.update(
                    dict(nemesis_name=nemesis_class,
                         nemesis_duration=run['duration'],
                         start_time=datetime.datetime.utcfromtimestamp(
                             run['start']),
                         end_time=datetime.datetime.utcfromtimestamp(
                             run['end']),
                         target_node=run['node'],
                         outcome="passed"))
                if run.get('type', '') == 'skipped':
                    new_nemesis_data['outcome'] = 'skipped'
                    new_nemesis_data['skip_reason'] = run['skip_reason']
                post_to_new(new_nemesis_data)
Пример #4
0
class ES(elasticsearch.Elasticsearch):
    """
    Provides interface for Elasticsearch DB
    """
    def __init__(self):
        self._conf = self.get_conf()
        super().__init__(hosts=[self._conf["es_url"]],
                         verify_certs=False,
                         http_auth=(self._conf["es_user"],
                                    self._conf["es_password"]))

    def get_conf(self):
        self.key_store = KeyStore()
        return self.key_store.get_elasticsearch_credentials()

    def _create_index(self, index):
        self.indices.create(index=index, ignore=400)  # pylint: disable=unexpected-keyword-arg

    def create_doc(self, index, doc_type, doc_id, body):
        """
        Add document in json format
        """
        LOGGER.info('Create doc')
        LOGGER.info('INDEX: %s', index)
        LOGGER.info('DOC_TYPE: %s', doc_type)
        LOGGER.info('DOC_ID: %s', doc_id)
        LOGGER.info('BODY: %s', body)
        self._create_index(index)
        if self.exists(index=index, doc_type=doc_type, id=doc_id):
            self.update(index=index,
                        doc_type=doc_type,
                        id=doc_id,
                        body={'doc': body})
        else:
            self.create(index=index, doc_type=doc_type, id=doc_id, body=body)

    def update_doc(self, index, doc_type, doc_id, body):
        """
        Update document with partial data
        """
        LOGGER.info('Update doc %s with info %s', doc_id, body)
        self.update(index=index,
                    doc_type=doc_type,
                    id=doc_id,
                    body=dict(doc=body))

    def get_all(self, index, limit=1000):
        """
        Search for documents for the certain index
        """
        return self.search(index=index, size=limit)  # pylint: disable=unexpected-keyword-arg

    def get_doc(self, index, doc_id, doc_type='_all'):
        """
        Get document by id
        """
        doc = self.get(
            index=index,
            doc_type=doc_type,
            id=doc_id,
            ignore=[  # pylint: disable=unexpected-keyword-arg
                400, 404
            ])
        if not doc['found']:
            LOGGER.warning('Document not found: %s %s', doc_id, doc_type)
            return None
        return doc

    def delete_doc(self, index, doc_type, doc_id):
        """
        Delete document
        """
        if self.get_doc(index, doc_id, doc_type):
            self.delete(index=index, doc_type=doc_type, id=doc_id)