def test_versioning(dummy_site): s = dummy_site(lambda x: x, '2.0.0') assert s.version == SV.Version('2.0.0') with pytest.raises(SystemExit): gssh.Query('--badoption', '') # Command is not implemented q = gssh.Query('') with pytest.raises(NotImplementedError): q.execute_on(s) s = dummy_site(lambda x: x, '2.4.0') q = gssh.Query('--all-reviewers') with pytest.raises(NotImplementedError): q.execute_on(s) # Option not implemented in 2.4
def test_ol_query(live_instance): log = logging.getLogger('test_ol_query') log.setLevel(logging.DEBUG) log.debug('Launching Query') q = gssh.Query(query='status:open', max_results=3) qresults = q.execute_on(live_instance) log.debug('Query Done {0}'.format(qresults)) assert qresults != [] assert len(qresults) <= 3
def test_init(): ''' Check that a Query object constructs properly with an empty list of results. ''' q = gssh.Query() assert q assert q.results == []
def test_execute(open_review_text, open_review): responses = [open_review_text, ''] # Create a duck-typed Site class which returns a set of open # code reviews. Each instance will return the value of the # responses variable, which should be a list of one or more # open-review_text fixtures. class DummySite(gssh.Site): def __init__(self, site): super(DummySite, self).__init__(site) self.gen = iter(responses) def execute(self, cmd): assert type(cmd) == type('abc') return next(self.gen) @property def version(self): return SV.Version('2.9.0') @property def connected(self): return True # Execute a dummied query and examine the response s = DummySite('...') q = gssh.Query('', 'status:open', 100) r = q.execute_on(s) assert type(r) == type([]) assert len(r) == 1 assert type(r[0]) == gssh.Review # Check that a query for open reviews (suitably dummied) returns # an iterable object of the correct length, containing a single # Review object. opn = gssh.open_reviews() r = opn.execute_on(DummySite('...')) assert type(r) == type([]) assert len(r) == 1 assert type(r[0]) == gssh.Review # Ensure we limit the number of responses by creating a list of 20 # reviews and then limiting the search to only 10 results. responses = [open_review_text] * 20 + [''] opn = gssh.open_reviews(max_results=10) r = opn.execute_on(DummySite('...')) assert len(r) == 10 # Check that iterating over the results returns a sequence # of Review results, all with the correct SHA1 for rv in r: assert type(rv) == gssh.Review assert rv.SHA1 == open_review.SHA1
def query_cmd(site, args): ''' Execute the query command and print out the results grouped by project and ordered by change number. ''' b = 'branch:{0}'.format(args.branch) if args.branch else '' p = 'project:{0}'.format(args.project) if args.project else '' s = 'status:{0}'.format(args.qstatus) if args.qstatus else '' q = ' '.join(args.querystring) if args.querystring else '' qry = gssh.Query('', ' '.join([b, p, s, q]), max_results=args.maxresults or 0) log.debug('Executing query command {0} on {1}'.format(qry._Query__query, site.site)) qry.execute_on(site) last_project = '' for r in sorted(qry, key=lambda rvw: ' '.join([rvw.project, rvw.ref])): if r.project != last_project: print('\n{0}:'.format(r.project)) last_project = r.project print('\t({0})\t{1}'.format(r.ref, r.summary)) return 0
def main(): global LF parser = optparse.OptionParser(usage="usage: %prog [options]", version="%prog 1.0") parser.add_option("-o", "--owner", dest="owner", action='store', help="gerrit pwner [default: %default]", metavar="OWNER", default=OWNER) parser.add_option("-s", "--server", dest="server", action='store', help="gerrit server [default: %default]", metavar="SERVER", default=GERRIT_HOST) parser.add_option("-p", "--port", dest="port", action='store', type="int", help="gerrit port [default: %default]", metavar="PORT", default=GERRIT_PORT) parser.add_option("--start-time", dest="start_time", action='store', type="string", help="start time for querrying in " "gerrit, in format: YYYY-MM-DD", metavar="STARTTIME", default=START_TIME) parser.add_option("-k", "--keyfile", dest="keyfile", action='store', help="gerrit ssh keyfile [default: use local keyfile]", metavar="FILE", default=None) parser.add_option("-P", "--passphrase", dest="passphrase", action='store', help="passphrase in case of enrypting keyfile", metavar="PASS", default=None) parser.add_option("-u", "--user", dest="user", action='store', help="gerrit user to querry [default: %default]", metavar="USER", default=OWNER) parser.add_option("-d", "--del", dest="bdel", action='store', type="int", help="whether to delete delivery folder " "and loc file [default: %default]", metavar="OPTION", default=0) (options, args) = parser.parse_args() check_date(options.start_time) owner = options.owner start_time = options.start_time port = options.port server = options.server keyfile = options.keyfile passp = options.passphrase user = options.user bdel = options.bdel rsite = gssh.Site(server, owner, port, keyfile, passp).connect() plist = gssh.Query( '--commit-message', 'owner:' + user + ' AND (status:merged OR status:pending)' + ' since:' + start_time).execute_on(rsite) LOG.info("| Total gerrit results: %d", len(plist)) if bdel == 1: shutil.rmtree(OUTPUT, True) if os.path.exists(LOC_FILE): os.remove(LOC_FILE) LF = open(LOC_FILE, 'a') # Create delivery folder if not os.path.exists(OUTPUT): os.makedirs(OUTPUT) if OUTPUT.startswith('/'): root_dir = OUTPUT else: root_dir = "/".join([os.getcwd(), OUTPUT]) for p in plist: LOG.info("|_ Generating doc from gerrit patch: %s ", p.number) os.chdir(root_dir) project_name = '[' + p.repo_name.split('/')[-1] + ']' topic = get_topic_name(p) pss = p.patchsets patch_urls = {} if topic.bug: name = topic.bug else: name = topic.change for num, ps in pss.iteritems(): patch_urls[num] = PROTO + GERRIT_HOST + \ '/gitweb?p=' + p.repo_name + \ '.git;a=patch;h=' + \ ps.raw['revision'] LOG.info('|____ Project: %s', project_name) LOG.info('|____ Topic: %s', name) LOG.info('|____ PS count: %s', len(patch_urls)) patch_name = project_name if topic.bp: directory = create_folder(patch_name, topic.bp) os.chdir("/".join([os.getcwd(), directory])) patch_name = None if len(patch_urls) == 1: create_file(patch_name, name, patch_urls[1], (p.patchsets[1].raw['sizeInsertions'], p.patchsets[1].raw['sizeDeletions'], p.raw['commitMessage'].split('Change-Id')[0].replace( '\n', ' '))) else: directory = create_folder( patch_name, name, p.raw['commitMessage'].split('Change-Id')[0].replace( '\n', ' ')) os.chdir("/".join([os.getcwd(), directory])) tmp_name = name if topic.bug and topic.change: tmp_name = topic.bug + '_' + topic.change for patch_num, patch_url in patch_urls.iteritems(): create_file(None, tmp_name, patch_url, (p.patchsets[patch_num].raw['sizeInsertions'], p.patchsets[patch_num].raw['sizeDeletions']), patch_num=patch_num) LF.close() LOG.info("|_ FIN!")