def bulk_with_instruction(self, instruction, terms): """Does a POST request against ES's bulk API The POST request will be done against `/<index_name>/<mapping_name>/_bulk` The instruction will be appended before each term. For example, bulk_with_instruction('instr', ['one', 'two']) will produce:: instr one instr two :param instruction: instruction to execute for each term :type instruction: dict :param terms: list of terms for which instruction should be executed :type terms: collections.Iterable :rtype: requests.models.Response """ instruction = json.dumps(instruction) data = '\n'.join( itertools.chain(*[(instruction, json.dumps(term)) for term in terms])) + '\n' url = '/'.join( (self._url, self._index_name, self._mapping_name, '_bulk')) return self._req(self._sess.post, url, data, None, deserialize=False)
def json(self, legacy=False): """Returns a json representation of the dict returned by `as_dict`. :param legacy: Defaults to False. If True, returned dict is in legacy format. :type legacy: bool :rtype: str """ return json.dumps(self.as_dict(legacy=legacy, mutable=True))
def _write(self): data = {} data['period'] = { 'begin': self.usage_start_dt.isoformat(), 'end': self.usage_end_dt.isoformat() } data['usage'] = self._usage_data self._report.write(json.dumps(data)) self._report.write(', ') self._report.flush()
def scroll(self, body): """Does a GET request against ES's scroll API. The GET request will be done against `/_search/scroll` :param body: body of the request :type body: dict :rtype: dict """ url = '/'.join((self._url, '_search/scroll')) return self._req(self._sess.get, url, json.dumps(body), None)
def search(self, body, scroll=True): """Does a GET request against ES's search API. The GET request will be done against `/<index_name>/_search` :param body: body of the request :type body: dict :rtype: dict """ url = '/'.join((self._url, self._index_name, '_search')) params = self._scroll_params if scroll else None return self._req(self._sess.get, url, json.dumps(body), params)
def delete_by_query(self, begin=None, end=None, filters=None): """Does a POST request against ES's Delete By Query API. The POST request will be done against `/<index_name>/_delete_by_query` :param filters: Optional filters for documents to delete :type filters: list of dicts :rtype: requests.models.Response """ url = '/'.join((self._url, self._index_name, '_delete_by_query')) must = self._build_must(begin, end, None, filters) data = (json.dumps({"query": {"bool": {"must": must}}}) if must else None) return self._req(self._sess.post, url, data, None)
def close_scroll(self, body): """Does a DELETE request against ES's scroll API. The DELETE request will be done against `/_search/scroll` :param body: body of the request :type body: dict :rtype: dict """ url = '/'.join((self._url, '_search/scroll')) resp = self._req( self._sess.delete, url, json.dumps(body), None, deserialize=False) body = resp.json() LOG.debug('Freed {} scrolls contexts'.format(body['num_freed'])) return body
def _append_time_frame(self, res_type, frame, tenant_id): vol_dict = frame['vol'] qty = vol_dict['qty'] unit = vol_dict['unit'] rating_dict = frame.get('rating', {}) rate = rating_dict.get('price') if not rate: rate = decimal.Decimal(0) desc = json.dumps(frame['desc']) self.add_time_frame(begin=self.usage_start_dt.get(tenant_id), end=self.usage_end_dt.get(tenant_id), tenant_id=tenant_id, unit=unit, qty=qty, res_type=res_type, rate=rate, desc=desc)
def put_mapping(self, mapping): """Does a PUT request against ES's mapping API. The PUT request will be done against `/<index_name>/_mapping/<mapping_name>` :mapping: body of the request :type mapping: dict :rtype: requests.models.Response """ url = '/'.join( (self._url, self._index_name, '_mapping', self._mapping_name)) # NOTE(peschk_l): This is done for compatibility with # Elasticsearch 6 and 7. param = {"include_type_name": "true"} return self._req( self._sess.put, url, json.dumps(mapping), param, deserialize=False)
def test_json(self): start = datetime.datetime(2019, 3, 4, 1, tzinfo=tz.UTC) end = datetime.datetime(2019, 3, 4, 2, tzinfo=tz.UTC) df = dataframe.DataFrame(start=start, end=end) a_points = [ dataframe.DataPoint(**TestDataPoint.default_params) for _ in range(2) ] b_points = [ dataframe.DataPoint(**TestDataPoint.default_params) for _ in range(4) ] df.add_points(a_points, 'service_a') df.add_points(b_points, 'service_b') self.maxDiff = None self.assertEqual( json.loads(df.json()), json.loads( json.dumps({ 'period': { 'begin': start.isoformat(), 'end': end.isoformat() }, 'usage': { 'service_a': [ dataframe.DataPoint( **TestDataPoint.default_params).as_dict() for _ in range(2) ], 'service_b': [ dataframe.DataPoint( **TestDataPoint.default_params).as_dict() for _ in range(4) ], } })))
def test_encode_datetime(self): obj = {'date': datetime.datetime(2019, 1, 1, tzinfo=tz.UTC)} self.assertEqual(json.dumps(obj), '{"date": "2019-01-01T00:00:00+00:00"}')
def test_encode_decimal(self): obj = {'nb': decimal.Decimal(42)} self.assertEqual(json.dumps(obj), '{"nb": 42.0}')
def json(self, legacy=False): return json.dumps(self.as_dict(legacy=legacy, mutable=True))
def set_metadata(self, metadata): """Set metadata attached to the state.""" self._db.set_metadata(self._state_name, json.dumps(metadata))
def _update(self): state_file = self._open('wb') state_data = {'timestamp': self._ts, 'metadata': self._metadata} state_file.write(json.dumps(state_data)) state_file.close()
def _write_total(self): total = {'total': self.total} self._report.write(json.dumps(total)) self._report.write(']') self._report.flush()
def output_json(data, code, headers=None): """Helper function for api endpoint json serialization""" resp = flask.make_response(json.dumps(data), code) resp.headers.extend(headers or {}) return resp