def run(self): xml = self.xmlDump() cnt = 0 for incr in self: if cnt % 20 == 0: message.information(' log_id : rows : hits : coverage') cnt += 1 message.information( ' %(log_id)6d : %(rows)6d : %(hits)6d : %(cvg)s', log_id=incr.log.log_id, rows=incr.updates, hits=incr.hits, cvg=incr.status.coverage.description()) if incr.hits: # this test contributed to overall coverage xml.add(incr) if incr.status.metric().is_hit(): message.note('all coverage hit') break message.information('coverage : ' + self.status().coverage.description()) if self.robust: message.information('robust : ' + self.status().robust.description()) message.information('tests : %(count)d', count=int( xml.xml.xpath('count(/optimize/test/log_id)'))) # now regenerate hierarchy and report coverage on point basis xml.append(self.hierarchy().xml()) return xml
def __init__(self, description='none given', test=None, root=None, parent=None, level=message.ERROR, queue='threaded', **kwargs): self.commit_level = level self.abv = activityBlockVersion(**kwargs) # init default filter self.filter_fn = self.filter self.root = root or mdbDefault().root self.parent = parent or mdbDefault().parent # add to list of instances mdb.instances.append(self) # psuedo singleton if mdb.instance is None: try: _queue = getattr(self.queue, queue) except AttributeError: message.fatal('No mdb queue type %(queue)s', queue=queue) message.information('Using queue %(queue)s', queue=queue) mdb.instance = _queue(self.abv, self.root, self.parent, description, test) # install callback message.emit_cbs.add('mdb emit', 1, self.add, None) message.debug('hello ...')
def close(self): table = 'goal' if self.reference else 'hits' if len(self.data) == 0: message.note('No data to upload into table "%(table)s", skipping', table=table) return message.information( 'starting data upload to table "%(table)s" via insert', table=table) with mdb.mdb.cursor() as cursor: rows = cursor.executemany( 'INSERT INTO ' + table + ' (log_id, bucket_id, ' + table + ') VALUES (%s, %s, %s);', self.data) warnings = cursor.warning_count() if warnings: message.warning('upload to db via insert with %(warnings)', warnings=warnings) if rows is None: message.warning( 'upload to db via insert "%(table)s" returned None', table=table) else: message.information( 'upload to db via insert added %(rows)d rows of %(data)d to "%(table)s"', rows=int(rows), data=len(self.data), table=table)
def plusarg_opt_int(cls, name, default, fmt='08x') : 'To get default/command line options' try : result = int(plusargs().get(name, str(default)), 0) except : message.warning(str(sys.exc_info())) result = default message.information('Using %(result)'+fmt+' for option "%(name)s"', result=result, name=name) return result
def plusarg_opt_int(cls, name, default, fmt='08x'): 'To get default/command line options' try: result = int(plusargs().get(name, str(default)), 0) except: message.warning(str(sys.exc_info())) result = default message.information('Using %(result)' + fmt + ' for option "%(name)s"', result=result, name=name) return result
def __init__(self, log_ids=[], test_ids=[], xml=None, threshold=0, robust=False, previous=None) : 'log_ids is a list of regression roots' self.log_ids = log_ids s_log_ids = ','.join(map(str, log_ids)) self.tests = mdb.connection().row_cursor() if log_ids : # create table of individual runs, but not root node as this may have already summarised coverage self.tests.execute('CREATE TEMPORARY TABLE '+self.invs+' AS SELECT l1.*, goal_id AS master FROM log AS l0 JOIN log AS l1 ON (l0.log_id = l1.root) LEFT OUTER JOIN master ON (l1.log_id = master.log_id) WHERE l1.root IN ('+s_log_ids+');') self.tests.execute('SELECT count(*) AS children FROM '+self.invs) children = self.tests.fetchone().children if children : message.information('%(log_ids)s %(has)s %(children)d children', log_ids=s_log_ids, children=children, has='have' if len(log_ids) > 1 else 'has') # append individual runs as given by test_ids if xml : xml_ids = xml.xml.xpath('/optimize/test/log_id/text()') else : xml_ids=[] if test_ids or xml_ids : s_test_ids = ','.join(map(str, test_ids+xml_ids)) create = ('INSERT INTO '+self.invs) if log_ids else ('CREATE TEMPORARY TABLE '+self.invs+' AS') self.tests.execute(create+' SELECT log.*, IFNULL(goal_id, goal.log_id) AS master FROM log LEFT OUTER JOIN master ON (log.log_id = master.log_id) LEFT OUTER JOIN goal ON (log.log_id = goal.log_id) WHERE log.log_id IN ('+s_test_ids+') GROUP BY log_id;') self.tests.execute('SELECT count(*) AS tests FROM '+self.invs) tests = self.tests.fetchone().tests if tests < 1 : message.fatal('no tests') message.information('starting with %(count)d tests in table %(table)s', count=tests, table=self.invs) # check congruency self.cvg = mdb.connection().row_cursor() rows=self.cvg.execute("SELECT md5_self AS md5, 'md5_self' AS type, invs.master, invs.root FROM point JOIN "+self.invs+" AS invs ON (invs.master = point.log_id AND point.parent IS NULL) GROUP BY md5;") md5 = self.cvg.fetchall() if not md5 : message.fatal('no master') elif len(md5) > 1 : message.fatal('md5 of multiple masters do not match') else : message.debug('md5 query returns %(rows)d', rows=rows) self.master = mdb.accessor(md5=md5[0]) self.cvg.execute("SELECT DISTINCT(md5_axes) AS md5, 'md5_axes' AS type, invs.master, invs.root FROM point JOIN "+self.invs+" AS invs ON (invs.master = point.log_id AND point.parent IS NULL) GROUP BY md5;") md5 = self.cvg.fetchall() if len(md5) > 1 : message.fatal('md5 of multiple axis masters do not match') self.master.axes = md5[0] # create status table, collating goal & hits self.cvg.execute('CREATE TEMPORARY TABLE '+self.covg+' (bucket_id INTEGER NOT NULL PRIMARY KEY, goal INTEGER, hits INTEGER, total_hits INTEGER, rhits INTEGER, max_hits INTEGER, tests INTEGER);') try : self.threshold = float(threshold) except : self.threshold = 0.0 message.warning('cannot convert threshold value given "%(arg)s" to float because %(exception)s, using %(threshold)2.1f', arg=threshold, exception=sys.exc_info()[0], threshold=self.threshold) self.robust = robust self.previous = previous
def close(self) : table = 'goal' if self.reference else 'hits' if len(self.data) == 0 : message.note('No data to upload into table "%(table)s", skipping', table=table) return message.information('starting data upload to table "%(table)s" via insert', table=table) with mdb.mdb.cursor() as cursor : rows = cursor.executemany('INSERT INTO '+table+' (log_id, bucket_id, '+table+') VALUES (%s, %s, %s);', self.data) warnings = cursor.warning_count() if warnings : message.warning('upload to db via insert with %(warnings)', warnings=warnings) if rows is None : message.warning('upload to db via insert "%(table)s" returned None', table=table) else : message.information('upload to db via insert added %(rows)d rows of %(data)d to "%(table)s"', rows=int(rows), data=len(self.data), table=table)
def do_import(modulename): if type(modulename) != types.StringType: return try: return sys.modules[modulename] except KeyError: pass message.information("Importing module %s" % modulename, 2) try: __import__(modulename) return do_import(modulename) except: message.error("Couldn't import %s (%s: %s)" % (modulename, sys.exc_type, sys.exc_value)) return None
def run(self) : xml = self.xmlDump() cnt = 0 for incr in self : if cnt % 20 == 0 : message.information(' log_id : rows : hits : coverage') cnt+=1 message.information(' %(log_id)6d : %(rows)6d : %(hits)6d : %(cvg)s', log_id=incr.log.log_id, rows=incr.updates, hits=incr.hits, cvg=incr.status.coverage.description()) if incr.hits : # this test contributed to overall coverage xml.add(incr) if incr.status.metric().is_hit() : message.note('all coverage hit') break message.information('coverage : ' + self.status().coverage.description()) if self.robust : message.information('robust : ' + self.status().robust.description()) message.information('tests : %(count)d', count=int(xml.xml.xpath('count(/optimize/test/log_id)'))) # now regenerate hierarchy and report coverage on point basis xml.append(self.hierarchy().xml()) return xml
# location of static data static = os.path.join(options.root, 'static') message.debug('Using %(path)s for static data', path=static) @bottle.get('/static/<filename:path>') def server_static(filename): return bottle.static_file(filename, root=static) @bottle.route('/') @bottle.route('/index.html') def index_html(): return bottle.static_file('/index.html', root=static) if options.gevent: import gevent from gevent import monkey monkey.patch_all() class wsgi_log: @classmethod def write(cls, msg): message.note(msg) bottle_opts.update(server='gevent', log=wsgi_log) message.information('Starting bottle server') bottle.run(**bottle_opts) # keyboardInterrupt gets us here ... mdb.finalize_all()
def prologue(self): message.note('Creating 1000 signal instances') instances = [duv.single_bit for i in range(0, 1000)] for idx, inst in enumerate(instances): message.information('%(idx)d is %(val)d', idx=idx, val=int(inst))
def prologue(self) : message.message.verbosity(message.INT_DEBUG) message.information('simulator is %(product)s', product=verilog.vpiInfo().product) message.note('a note')
def trace_msg(self, msg, lvl=1): message.information('*** Markup: %s' % msg, lvl)
def trace_msg(self, msg, lvl=1): message.information('*** %s %s: %s' % (self.tag(), self.path(), msg), lvl)
################################################################################ if not options.order : options.order = ['cvg', ] if options.regression is None : # presume leftover args are ids options.regression = values regressions = to_list(options.regression) tests = to_list(options.test) if not regressions and not tests : message.fatal('No invocations provided') message.information('optimizing begins') ################################################################################ coverage.messages.hush_creation() optimize_opts = {'threshold' : options.threshold, 'robust' : options.robust} def iteration(ordering, iter_cnt=1, xml=None) : # use current optimization group if this is not first iteration order = ordering[0] message.note('Iteration %(iter_cnt)d uses "%(order)s"', **locals()) if xml : opt = database.optimize.options[order](xml=xml, **optimize_opts) else : opt = database.optimize.options[order](regressions, tests, **optimize_opts) run = opt.run()
def __init__(self, log_ids=[], test_ids=[], xml=None, threshold=0, robust=False, previous=None): 'log_ids is a list of regression roots' self.log_ids = log_ids s_log_ids = ','.join(map(str, log_ids)) self.tests = mdb.connection().row_cursor() if log_ids: # create table of individual runs, but not root node as this may have already summarised coverage self.tests.execute( 'CREATE TEMPORARY TABLE ' + self.invs + ' AS SELECT l1.*, goal_id AS master FROM log AS l0 JOIN log AS l1 ON (l0.log_id = l1.root) LEFT OUTER JOIN master ON (l1.log_id = master.log_id) WHERE l1.root IN (' + s_log_ids + ');') self.tests.execute('SELECT count(*) AS children FROM ' + self.invs) children = self.tests.fetchone().children if children: message.information( '%(log_ids)s %(has)s %(children)d children', log_ids=s_log_ids, children=children, has='have' if len(log_ids) > 1 else 'has') # append individual runs as given by test_ids if xml: xml_ids = xml.xml.xpath('/optimize/test/log_id/text()') else: xml_ids = [] if test_ids or xml_ids: s_test_ids = ','.join(map(str, test_ids + xml_ids)) create = ('INSERT INTO ' + self.invs) if log_ids else ('CREATE TEMPORARY TABLE ' + self.invs + ' AS') self.tests.execute( create + ' SELECT log.*, IFNULL(goal_id, goal.log_id) AS master FROM log LEFT OUTER JOIN master ON (log.log_id = master.log_id) LEFT OUTER JOIN goal ON (log.log_id = goal.log_id) WHERE log.log_id IN (' + s_test_ids + ') GROUP BY log_id;') self.tests.execute('SELECT count(*) AS tests FROM ' + self.invs) tests = self.tests.fetchone().tests if tests < 1: message.fatal('no tests') message.information('starting with %(count)d tests in table %(table)s', count=tests, table=self.invs) # check congruency self.cvg = mdb.connection().row_cursor() rows = self.cvg.execute( "SELECT md5_self AS md5, 'md5_self' AS type, invs.master, invs.root FROM point JOIN " + self.invs + " AS invs ON (invs.master = point.log_id AND point.parent IS NULL) GROUP BY md5;" ) md5 = self.cvg.fetchall() if not md5: message.fatal('no master') elif len(md5) > 1: message.fatal('md5 of multiple masters do not match') else: message.debug('md5 query returns %(rows)d', rows=rows) self.master = mdb.accessor(md5=md5[0]) self.cvg.execute( "SELECT DISTINCT(md5_axes) AS md5, 'md5_axes' AS type, invs.master, invs.root FROM point JOIN " + self.invs + " AS invs ON (invs.master = point.log_id AND point.parent IS NULL) GROUP BY md5;" ) md5 = self.cvg.fetchall() if len(md5) > 1: message.fatal('md5 of multiple axis masters do not match') self.master.axes = md5[0] # create status table, collating goal & hits self.cvg.execute( 'CREATE TEMPORARY TABLE ' + self.covg + ' (bucket_id INTEGER NOT NULL PRIMARY KEY, goal INTEGER, hits INTEGER, total_hits INTEGER, rhits INTEGER, max_hits INTEGER, tests INTEGER);' ) try: self.threshold = float(threshold) except: self.threshold = 0.0 message.warning( 'cannot convert threshold value given "%(arg)s" to float because %(exception)s, using %(threshold)2.1f', arg=threshold, exception=sys.exc_info()[0], threshold=self.threshold) self.robust = robust self.previous = previous
def prologue(self) : message.note('Creating 1000 signal instances') instances = [duv.single_bit for i in range(0, 1000)] for idx, inst in enumerate(instances) : message.information('%(idx)d is %(val)d', idx=idx, val=int(inst))
if not options.order: options.order = [ 'cvg', ] if options.regression is None: # presume leftover args are ids options.regression = values regressions = to_list(options.regression) tests = to_list(options.test) if not regressions and not tests: message.fatal('No invocations provided') message.information('optimizing begins') ################################################################################ coverage.messages.hush_creation() optimize_opts = {'threshold': options.threshold, 'robust': options.robust} def iteration(ordering, iter_cnt=1, xml=None): # use current optimization group if this is not first iteration order = ordering[0] message.note('Iteration %(iter_cnt)d uses "%(order)s"', **locals()) if xml: opt = database.optimize.options[order](xml=xml, **optimize_opts) else: opt = database.optimize.options[order](regressions, tests,
# location of static data static = os.path.join(options.root, 'static') message.debug('Using %(path)s for static data', path=static) @bottle.get('/static/<filename:path>') def server_static(filename): return bottle.static_file(filename, root=static) @bottle.route('/') @bottle.route('/index.html') def index_html() : return bottle.static_file('/index.html', root=static) if options.gevent : import gevent from gevent import monkey; monkey.patch_all() class wsgi_log: @classmethod def write(cls, msg) : message.note(msg) bottle_opts.update(server='gevent', log=wsgi_log) message.information('Starting bottle server') bottle.run(**bottle_opts) # keyboardInterrupt gets us here ... mdb.finalize_all()