def _debug_query_eklhad(self, *args, **kwargs): with timer('DEBUG QUERY') as t: res = self.__db_caller(lambda c: c.debug_query(*args, **kwargs)) logger.info(t) return res
def __init__(self, options, config): from wraptor.context import timer import json if hasattr(options, 'ApiClass'): params = {k: v for k, v in vars(options).items() if v is not None} if hasattr(options, 'pre_process_params'): options.pre_process_params(params) if options.print_params: from pprint import pprint print "Params:" pprint(params) print "\nResult:" if options.print_time: # pre-cache a connection we can check actual api query/processing time from eklhad.util import pool with pool.get_eklhad_connection(): pass api = options.ApiClass(config) try: with timer('Query') as t: result = api.query(params, ignore_extra_params=True) except ApiException as e: print e return print json.dumps(result, indent=4) if options.print_time: print t
def estimate_latency(self, iterations=100): results = [] with self._db_conn() as conn: for _ in range(iterations): with timer() as t: conn.ping() results.append(t.interval) computed = self._compute_results(results) return computed
def estimate_download(self, iterations=100): results = [] query = 'SELECT data AS a FROM %s WHERE name="payload"' % (self.data_table) with self._db_conn() as conn: for _ in range(iterations): with timer() as t: payload = conn.get(query).a results.append(len(payload) / (t.interval / 1000.)) computed = self._compute_results(results) return computed
def estimate_download(self, iterations=100): results = [] query = 'SELECT data AS a FROM %s WHERE name="payload"' % ( self.data_table) with self._db_conn() as conn: for _ in range(iterations): with timer() as t: payload = conn.get(query).a results.append(len(payload) / (t.interval / 1000.)) computed = self._compute_results(results) return computed
def estimate_roundtrip(self, iterations=100): results = [] query = 'SELECT data AS a FROM %s # %%s' % (self.data_table) with self._db_conn() as conn: half_payload = conn.get('SELECT data FROM %s WHERE name="half_payload"' % self.data_table).data conn.execute(query, half_payload) # ensure compile for _ in range(iterations): with timer() as t: conn.query(query, half_payload) results.append((len(half_payload) * 2) / (t.interval / 1000.)) computed = self._compute_results(results) return computed
def estimate_roundtrip(self, iterations=100): results = [] query = 'SELECT data AS a FROM %s # %%s' % (self.data_table) with self._db_conn() as conn: half_payload = conn.get( 'SELECT data FROM %s WHERE name="half_payload"' % self.data_table).data conn.execute(query, half_payload) # ensure compile for _ in range(iterations): with timer() as t: conn.query(query, half_payload) results.append((len(half_payload) * 2) / (t.interval / 1000.)) computed = self._compute_results(results) return computed
def estimate_upload(self, iterations=100): results = [] query = '!!%s' with self._db_conn() as conn: payload = conn.get('SELECT data FROM %s WHERE name="payload"' % self.data_table).data for _ in range(iterations): try: with timer() as t: conn.query(query, payload) except database.MySQLError as (errno, msg): if errno == errorcodes.ER_PARSE_ERROR: pass else: raise results.append(len(payload) / (t.interval / 1000.))
def test_basic(): with timer() as t: time.sleep(0.1000000) # sleep 100 ms assert t.interval >= 100
def test_params(): with timer('test') as t: pass assert t.name == 'test' assert str(t).startswith('test')