def _do_get_peer_agreements(options): # Single request to a single host: unthreaded pa = get_peer_agreements(options.hosts[0], options._u, options._p, options.auids[0]) if pa is None: print('No such AUID') return for peer_and_agreements_dict in pa: peer = peer_and_agreements_dict['peerId'] singleton_agreements_dict = peer_and_agreements_dict['agreements'] agreements_list = singleton_agreements_dict['entry'] for agreement_dict in agreements_list: agreement_type = agreement_dict['key'] agreement = agreement_dict['value'] percent_agreement = agreement['percentAgreement'] percent_agreement_timestamp = agreement[ 'percentAgreementTimestamp'] highest_percent_agreement = agreement['highestPercentAgreement'] highest_percent_agreement_timestamp = agreement[ 'highestPercentAgreementTimestamp'] _output_record(options, [ peer, agreement_type, percent_agreement, datetimems(percent_agreement_timestamp), highest_percent_agreement, datetimems(highest_percent_agreement_timestamp) ])
def _do_get_peer_agreements(options): pa = get_peer_agreements(options.host, options.auth, options.auid) if pa is None: print 'No such AUID' return for pae in pa: for ae in pae.Agreements.Entry: _output_record(options, [ pae.PeerId, ae.Key, ae.Value.PercentAgreement, datetimems(ae.Value.PercentAgreementTimestamp), ae.Value.HighestPercentAgreement, datetimems(ae.Value.HighestPercentAgreementTimestamp) ])
def _do_get_peer_agreements(options): pa = get_peer_agreements(options.host, options.auth, options.auid) if pa is None: print 'No such AUID' return for pae in pa: for ae in pae.Agreements.Entry: _output_record(options, [pae.PeerId, ae.Key, ae.Value.PercentAgreement, datetimems(ae.Value.PercentAgreementTimestamp), ae.Value.HighestPercentAgreement, datetimems(ae.Value.HighestPercentAgreementTimestamp)])
def _do_get_peer_agreements(options): # Single request to a single host: unthreaded pa = get_peer_agreements(options.hosts[0], options.auth, options.auids[0]) if pa is None: print 'No such AUID' return for pae in pa: for ae in pae.Agreements.Entry: _output_record(options, [pae.PeerId, ae.Key, ae.Value.PercentAgreement, datetimems(ae.Value.PercentAgreementTimestamp), ae.Value.HighestPercentAgreement, datetimems(ae.Value.HighestPercentAgreementTimestamp)])
colkeys = [x for x in itertools.product(*lstcolkeys)] for j in xrange(len(lstcolkeys)): if j < len(lstcolkeys) - 1: rowpart = [''] * len(rowheaders) else: rowpart = rowheaders _output_record(options, rowpart + [x[j] for x in colkeys]) for rowkey in sorted(set([k[0] for k in data])): _output_record(options, list(rowkey) + [data.get((rowkey, colkey)) for colkey in colkeys]) _AU_STATUS = { 'accessType': ('Access type', lambda r: r.AccessType), 'availableFromPublisher': ('Available from publisher', lambda r: r.AvailableFromPublisher), 'contentSize': ('Content size', lambda r: r.ContentSize), 'crawlPool': ('Crawl pool', lambda r: r.CrawlPool), 'crawlProxy': ('Crawl proxy', lambda r: r.CrawlProxy), 'crawlWindow': ('Crawl window', lambda r: r.CrawlWindow), 'creationTime': ('Creation time', lambda r: datetimems(r.CreationTime)), 'currentlyCrawling': ('Currently crawling', lambda r: r.CurrentlyCrawling), 'currentlyPolling': ('Currently polling', lambda r: r.CurrentlyPolling), 'diskUsage': ('Disk usage', lambda r: r.DiskUsage), 'journalTitle': ('Journal title', lambda r: r.JournalTitle), 'lastCompletedCrawl': ('Last completed crawl', lambda r: datetimems(r.LastCompletedCrawl)), 'lastCompletedPoll': ('Last completed poll', lambda r: datetimems(r.LastCompletedPoll)), 'lastCrawl': ('Last crawl', lambda r: datetimems(r.LastCrawl)), 'lastCrawlResult': ('Last crawl result', lambda r: r.LastCrawlResult), 'lastPoll': ('Last poll', lambda r: datetimems(r.LastPoll)), 'lastPollResult': ('Last poll result', lambda r: r.LastPollResult), 'pluginName': ('Plugin name', lambda r: r.PluginName), 'publisher': ('Publisher', lambda r: r.Publisher), 'publishingPlatform': ('Publishing platform', lambda r: r.PublishingPlatform), 'recentPollAgreement': ('Recent poll agreement', lambda r: r.RecentPollAgreement), 'repository': ('Repository', lambda r: r.Repository),
_output_record(options, rowpart + [x[j] for x in colkeys]) for rowkey in sorted(set([k[0] for k in data])): _output_record( options, list(rowkey) + [data.get((rowkey, colkey)) for colkey in colkeys]) _AU_STATUS = { 'accessType': ('Access type', lambda r: r.AccessType), 'availableFromPublisher': ('Available from publisher', lambda r: r.AvailableFromPublisher), 'contentSize': ('Content size', lambda r: r.ContentSize), 'crawlPool': ('Crawl pool', lambda r: r.CrawlPool), 'crawlProxy': ('Crawl proxy', lambda r: r.CrawlProxy), 'crawlWindow': ('Crawl window', lambda r: r.CrawlWindow), 'creationTime': ('Creation time', lambda r: datetimems(r.CreationTime)), 'currentlyCrawling': ('Currently crawling', lambda r: r.CurrentlyCrawling), 'currentlyPolling': ('Currently polling', lambda r: r.CurrentlyPolling), 'diskUsage': ('Disk usage', lambda r: r.DiskUsage), 'journalTitle': ('Journal title', lambda r: r.JournalTitle), 'lastCompletedCrawl': ('Last completed crawl', lambda r: datetimems(r.LastCompletedCrawl)), 'lastCompletedPoll': ('Last completed poll', lambda r: datetimems(r.LastCompletedPoll)), 'lastCrawl': ('Last crawl', lambda r: datetimems(r.LastCrawl)), 'lastCrawlResult': ('Last crawl result', lambda r: r.LastCrawlResult), 'lastPoll': ('Last poll', lambda r: datetimems(r.LastPoll)), 'lastPollResult': ('Last poll result', lambda r: r.LastPollResult), 'pluginName': ('Plugin name', lambda r: r.PluginName), 'publisher': ('Publisher', lambda r: r.Publisher), 'publishingPlatform':
for rowkey in sorted(set([k[0] for k in data])): _output_record( options, list(rowkey) + [data.get((rowkey, colkey)) for colkey in colkeys]) _AU_STATUS = { 'accessType': ('Access type', lambda r: r.get('accessType')), 'availableFromPublisher': ('Available from publisher', lambda r: r.get('availableFromPublisher')), 'contentSize': ('Content size', lambda r: r.get('contentSize')), 'crawlPool': ('Crawl pool', lambda r: r.get('crawlPool')), 'crawlProxy': ('Crawl proxy', lambda r: r.get('crawlProxy')), 'crawlWindow': ('Crawl window', lambda r: r.get('crawlWindow')), 'creationTime': ('Creation time', lambda r: datetimems(r.get('creationTime'))), 'currentlyCrawling': ('Currently crawling', lambda r: r.get('currentlyCrawling')), 'currentlyPolling': ('Currently polling', lambda r: r.get('currentlyPolling')), 'diskUsage': ('Disk usage', lambda r: r.get('diskUsage')), 'journalTitle': ('Journal title', lambda r: r.get('journalTitle')), 'lastCompletedCrawl': ('Last completed crawl', lambda r: datetimems(r.get('lastCompletedCrawl'))), 'lastCompletedPoll': ('Last completed poll', lambda r: datetimems(r.get('lastCompletedPoll'))), 'lastCrawl': ('Last crawl', lambda r: datetimems(r.get('lastCrawl'))), 'lastCrawlResult': ('Last crawl result', lambda r: r.get('lastCrawlResult')), 'lastCompletedDeepCrawl': ('Last completed deep crawl',
_output_record(options, list(rowkey) + [data.get((rowkey, colkey)) for colkey in colkeys]) # Last modified 2015-08-31 def _file_lines(fstr): with open(os.path.expanduser(fstr)) as f: ret = filter(lambda y: len(y) > 0, [x.partition('#')[0].strip() for x in f]) if len(ret) == 0: sys.exit('Error: %s contains no meaningful lines' % (fstr,)) return ret _AU_STATUS = { 'accessType': ('Access type', lambda r: r.AccessType), 'availableFromPublisher': ('Available from publisher', lambda r: r.AvailableFromPublisher), 'contentSize': ('Content size', lambda r: r.ContentSize), 'crawlPool': ('Crawl pool', lambda r: r.CrawlPool), 'crawlProxy': ('Crawl proxy', lambda r: r.CrawlProxy), 'crawlWindow': ('Crawl window', lambda r: r.CrawlWindow), 'creationTime': ('Creation time', lambda r: datetimems(r.CreationTime)), 'currentlyCrawling': ('Currently crawling', lambda r: r.CurrentlyCrawling), 'currentlyPolling': ('Currently polling', lambda r: r.CurrentlyPolling), 'diskUsage': ('Disk usage', lambda r: r.DiskUsage), 'journalTitle': ('Journal title', lambda r: r.JournalTitle), 'lastCompletedCrawl': ('Last completed crawl', lambda r: datetimems(r.LastCompletedCrawl)), 'lastCompletedPoll': ('Last completed poll', lambda r: datetimems(r.LastCompletedPoll)), 'lastCrawl': ('Last crawl', lambda r: datetimems(r.LastCrawl)), 'lastCrawlResult': ('Last crawl result', lambda r: r.LastCrawlResult), 'lastCompletedDeepCrawl': ('Last completed deep crawl', lambda r: datetimems(r.LastCompletedDeepCrawl)), 'lastDeepCrawl': ('Last deep crawl', lambda r: datetimems(r.LastDeepCrawl)), 'lastDeepCrawlResult': ('Last deep crawl result', lambda r: r.LastDeepCrawlResult), 'lastCompletedDeepCrawlDepth': ('Last completed deep crawl depth', lambda r: r.LastCompletedDeepCrawlDepth), 'lastPoll': ('Last poll', lambda r: datetimems(r.LastPoll)), 'lastPollResult': ('Last poll result', lambda r: r.LastPollResult), 'lastMetadataIndex': ('Last metadata index', lambda r: datetimems(r.LastMetadataIndex)),