class TestViews(unittest.TestCase): def setUp(self): couchquery.createdb(testdb_url) self.testdb = Database(testdb_url) self.testdb.sync_design_doc("logcompare", logcompare.design_doc) def tearDown(self): couchquery.deletedb(testdb_url) def test_runCounts(self): product = 'Fennec' os = 'Maemo' testtype = 'crashtests' count = 3 # create documents where for a specific number of them the product, os, and testype are specified for i in range(0, count): mockdata.create_doc(self.testdb, product=product, os=os, testtype=testtype) for i in range(0, 10): mockdata.create_doc(self.testdb) counts = self.testdb.views.logcompare.runCounts(reduce = True, group = True).items() for key, value in counts: if key == [product, os, testtype]: self.failUnlessEqual(value, count, "Value should be equal to the count for a combination of product, os, testtype")
class TestViews(unittest.TestCase): def setUp(self): couchquery.createdb(testdb_url) self.testdb = Database(testdb_url) self.testdb.sync_design_doc("logcompare", logcompare.design_doc) def tearDown(self): couchquery.deletedb(testdb_url) def test_runCounts(self): product = 'Fennec' os = 'Maemo' testtype = 'crashtests' count = 3 # create documents where for a specific number of them the product, os, and testype are specified for i in range(0, count): mockdata.create_doc(self.testdb, product=product, os=os, testtype=testtype) for i in range(0, 10): mockdata.create_doc(self.testdb) counts = self.testdb.views.logcompare.runCounts(reduce=True, group=True).items() for key, value in counts: if key == [product, os, testtype]: self.failUnlessEqual( value, count, "Value should be equal to the count for a combination of product, os, testtype" )
def setupdb(): db = Database("http://localhost:5984/test_pythonviews") try: deletedb(db) except: pass createdb(db) db.sync_design_doc("pythonView", os.path.join(this_directory, "design"), language="python") db.sync_design_doc("javascriptView", os.path.join(this_directory, "design"), language="javascript") return db
def handle_notifications(doc, req): db = Database(req['db']) doc = json.loads(req['body']) doc['_id'] = str(uuid.uuid1()) doc['type'] = 'notification' jobs = func(doc, req, db) for job in jobs: job['parent-notification'] = doc['_id'] info = db.create(job) job['_id'] = info['id'] doc['jobs'] = jobs return doc, json.dumps(doc)
def main(): db = Database(pull_url) data = getTinderboxData() testName = re.compile('((reftest)|(crashtests)|(xpcshell))') build_table = data['build_table'] build = build_table[0] log_files = [] for build in build_table: builds = [( b['buildtime'], b, ) for b in build if type(b) is not int and 'buildname' in b and 'test' in b['buildname']] log_files += builds builds = [value for key, value in sorted(log_files)] for b in builds: tbox_id = b['logfile'] if len(getByTinderboxID(db, tbox_id)) is 0: parseFile(tbox_id) else: print 'skipping ' + tbox_id
class CouchDB: lock = Lock() def __init__(self): self.write_db = Database(Constants.WRITE_URL) self.read_db = Database(Constants.READ_URL) self.cache = OrderedDict() def getDoc(self, id): document = None try: self.lock.acquire() if id not in self.cache.keys(): #document not in cache document = self.read_db.get(id) if self.cache.__len__() == Constants.CACHE_SIZE: self.cache.popitem(False) #remove the oldest entry from cache self.cache.__setitem__(id, document) else: #get from cache document = self.cache.__getitem__(id) return document finally: self.lock.release() return document def createDoc(self, json): return self.write_db.create(json) def deleteDoc(self, id): try: self.lock.acquire() doc = self.getDoc(id) if id in self.cache: self.cache.pop(id) #clear document from cache self.write_db.delete(doc) finally: self.lock.release() def saveDoc(self, doc): self.db.save(doc) def updateDoc(self, doc): try: self.lock.acquire() if doc[Constants.DOCUMENT_ID] in self.cache: self.cache.pop(doc[Constants.DOCUMENT_ID]) #clear document from cache self.write_db.update(doc) finally: self.lock.release()
def main(): db = Database(pull_url) data = getTinderboxData() testName = re.compile('((reftest)|(crashtests)|(xpcshell))') build_table = data['build_table'] build = build_table[0] for build in build_table: for b in [b for b in build if type(b) is not int]: for k in b: if (k == "buildname" and testName.search(b[k])): print "checking out buildname: " + b[k] tbox_id = b['logfile'] if len(getByTinderboxID(db, tbox_id)) is 0: parseFile(tbox_id) else: print 'skipping ' + tbox_id
def setUp(self): couchquery.createdb(testdb_url) self.testdb = Database(testdb_url) self.testdb.sync_design_doc("logcompare", logcompare.design_doc)
import os os.environ['PYTHON_EGG_CACHE'] = os.path.expanduser('~/.eggs') import couchquery couchquery.debugging = False from couchquery import Database import brasstacks from brasstacks import crashtest from brasstacks import fennec crashdb = Database('http://localhost:5984/crashtest') resultdb = Database('http://localhost:5984/crashtest_results') application = brasstacks.Stub() application.add_resource('crashtest', crashtest.CrashTestApplication(crashdb, resultdb))
def execute(self, quals, columns): # the row to return line = {} # noinspection PyBroadException try: # instantiate the database object db = Database(self.connection_string) debug = '' view_container = db.views # if the call is for a view if self.has_sub_package: # build the params object that will be passed to couchdb call, filled with default parameters params = {'startkey': None, 'endkey': {}, 'group': False, 'reduce': False} # check provided keys against expected keys provided_keys = [int(qual.field_name.split('_')[1]) for qual in quals if qual.field_name.startswith('key_')] expected_keys = [number for number in range(len(provided_keys))] error = False # if the provided keys not fitting with expected keys, return a blank line with the error description if not all(k in expected_keys for k in provided_keys): error = True line['_runtime_error'] = "Keys doesn't follow a correct sequence key_0 ... key_1 ... " + str(provided_keys) + ' ' + str(expected_keys) insert_quals_in_output(line, quals) yield line # and then get out if error is True: return last_used_index = -1 # clean and replace if needed the default parameters startkey = {} endkey = {} # portion of code to build the keys object to pass as parameters to the view call for qual in quals: # deal with named key parameters if qual.field_name.startswith('key_'): last_used_index = qual.field_name.split('_')[1] startkey[last_used_index] = qual.value endkey[last_used_index] = qual.value # deal with other parameters if qual.field_name.startswith('p_'): params[qual.field_name[2:]] = eval(qual.value) if qual.value != '' else qual.value # if provided a single key if last_used_index != -1: params['startkey'] = [] params['endkey'] = [] for i in range(0, int(last_used_index)): params['startkey'].append(startkey[str(i)]) params['endkey'].append(endkey[str(i)]) params['endkey'].append({}) view_container = getattr(view_container, self.sub_package) # do the view call with the params object as parameters result = getattr(view_container, self.target_view)(**params) else: # do the 'all' call result = getattr(view_container, self.target_view)() # this case covers views call if self.has_sub_package: # read the return from the call and fill the line object with the values for key, value in result.items(): # deal with the 'value' line['value'] = value if not isinstance(value, dict) else json.dumps(value) # deal with the 'key' line['key'] = key # split the 'key' into named output values for column_name in columns: if column_name.startswith('key_'): column_index = int(column_name.split('_')[1]) parsed_key = eval(str(key)) # and then fill the correct output column line[column_name] = parsed_key[column_index] # insert the quals into output insert_quals_in_output(line, quals) # yield the line to the postgresql call yield line # this case covers the 'all' call else: # set the column values for record in result: for column_name in columns: # if a '_doc' columm is provided, put the jsonified record to output column if column_name == '_doc': line[column_name] = json.dumps(record) else: if column_name == '_runtime_error': line[column_name] = debug else: line[column_name] = record.get(column_name, None) if record is not None else None # insert the quals into output insert_quals_in_output(line, quals) # yield the line to the postgresql call yield line except: # in case of any exception, insert the exception in the column _runtime_error exc_type, exc_value, exc_traceback = sys.exc_info() # insert the quals into output insert_quals_in_output(line, quals) # format the message error_message = repr(traceback.format_exception(exc_type, exc_value, exc_traceback)) # if column _runtime_error is defined, set the formatted exception in the column if '_runtime_error' in self.columns: line['_runtime_error'] = error_message yield line
def main(): db = Database('http://localhost:5984/crashtest') db.sync_design_doc('pyCrash', os.path.join(os.path.dirname(__file__), 'design'), language="python")
def __init__(self): self.write_db = Database(Constants.WRITE_URL) self.read_db = Database(Constants.READ_URL) self.cache = OrderedDict()
# http://docs.python.org/library/unittest.html import unittest import couchquery from couchquery import Database from couchquery import Httplib2Client import logcompare from logcompare import LogCompareApplication from logcompare import Then from logcompare import mockdata db = Database("http://pythonesque.org:5984/logcompare") logcompare_application = LogCompareApplication(db) # setting up database # testdb_url = "http://*****:*****@happyhans.couch.io/test-logcompare" # couchquery.createdb(testdb_url) # testdb = Database(testdb_url) # testdb.sync_design_doc("logcompare", logcompare.design_doc) # print testdb.views.logcompare.runCounts(reduce = True, group = True).items() # deleting database # couchquery.deletedb(testdb_url) testdb_url = "http://*****:*****@happyhans.couch.io/test-logcompare" # testdb = Database(testdb_url) # print testdb # couchquery.createdb(testdb_url) class TestViews(unittest.TestCase):
import os os.environ['PYTHON_EGG_CACHE'] = os.path.expanduser('~/.eggs') import couchquery couchquery.debugging = False from couchquery import Database import brasstacks from brasstacks.sitecompare import SiteCompareApplication from brasstacks.users import UsersApplication from brasstacks.fennec import FennecApplication from brasstacks.tcm import TestCaseManagerApplication from brasstacks.logcompare import LogCompareApplication from brasstacks.firefox import FirefoxApplication db = Database("http://localhost:5984/brasstacks") users_application = UsersApplication(db) sitecompare_application = SiteCompareApplication( Database("http://localhost:5984/sitecompare")) fennec_application = FennecApplication( Database("http://localhost:5984/fennec_results")) tcm_application = TestCaseManagerApplication( Database("http://localhost:5984/tcm")) logcompare_application = LogCompareApplication( Database("http://pythonesque.org:5984/logcompare")) firefox_application = FirefoxApplication( Database("http://localhost:5984/firefox")) application = brasstacks.Stub() application.add_resource('sitecompare', sitecompare_application) application.add_resource('users', users_application)
import os os.environ['PYTHON_EGG_CACHE'] = os.path.expanduser('~/.eggs') import couchquery couchquery.debugging = False from couchquery import Database import brasstacks from brasstacks.sitecompare import SiteCompareApplication from brasstacks.users import UsersApplication from brasstacks.fennec import FennecApplication from brasstacks.tcm import TestCaseManagerApplication from brasstacks.logcompare import LogCompareApplication from brasstacks.mozmill import MozmillApplication db = Database("http://*****:*****@happyhans.couch.io/logcompare")) mozmill_application = MozmillApplication( Database("http://localhost:5984/mozmill")) application = brasstacks.Stub() application.add_resource('sitecompare', sitecompare_application)