def main(argv): expected_major_version = get_options(argv[1:]) print("Expected major version: {}".format(expected_major_version)) installed_version = get_apt_package_version("datadog-agent") print("Installed Agent version: {}".format(installed_version)) result = check_major_version(installed_version, expected_major_version) if result: print("Agent version check successful!") else: print("Agent version check failed.") sys.exit(1) # expected_major_version if expected_major_version: if check_install_info(expected_major_version): print("install_info check successful!") else: print("install_info check failed.") sys.exit(1) else: print("Skipping install_info check.") sys.exit()
def process_request(self,req): options = get_options(self.config) build_type_match = re.match('/builds/history/(bt\d+)?$',req.path_info) if build_type_match is None: query_string = "&".join(['buildTypeId=%s' % id for id in options['builds']]) else: build_type_id = build_type_match.groups()[0] query_string = 'buildTypeId=%s' % build_type_id add_stylesheet(req, 'teamcity/css/teamcity.css') query_string += '&sinceDate=-%d' % options['limit'] feed_url = "%s/feed.html?%s" % (options['base_url'], query_string) tc = TeamCityQuery(self.options) feed = TeamCityQuery.xml_query(feed_url) if feed is None: raise HTTPNotFound("Can't load feed") data = {'entries': []} for entry in feed.iterfind('{http://www.w3.org/2005/Atom}entry'): title = entry.find('{http://www.w3.org/2005/Atom}title').text link = entry.find('{http://www.w3.org/2005/Atom}link').attrib['href'] date = entry.find('{http://www.w3.org/2005/Atom}published').text date = datetime.strptime(date, '%Y-%m-%dT%H:%M:%SZ') summary = entry.find('{http://www.w3.org/2005/Atom}summary').text summary = summary.split('<a href')[0] build_id_match = re.search('buildId=(\d+)', link) if build_id_match: summary += "<br/><a href='%s'>Build Log</a>" % \ req.href.builds('download', build_id_match.groups()[0]) data['entries'].append({ 'title': title, 'link': link, 'date': date, 'summary': summary, }) return 'teamcity_status.html', data, None
def main(): points = get_points() options = get_options() options['outdir'] = '/tmp/' options['prefix'] = 'concave' concave, convex = get_concave(points) draw(options, concave, points, convex)
def __init__(self): self.options = get_options(self.config) self.msg_cache_dir = self.options['cache_dir'] or '/tmp/teamcity_cache' if not os.path.exists(self.msg_cache_dir): try: os.mkdir(self.msg_cache_dir) except OSError,e: self.log.error("Can't create cache_dir: %s" % e)
def main(): parser = argparse.ArgumentParser( description='Plot data stored in an sqlite database') parser.add_argument('--outdir', default='./', help='output directory for the plots') parser.add_argument('--fontsize', default=matplotlib.rcParams['font.size'], type=int, help='base font size of plots') parser.add_argument('--nodes', '-N', default=50, type=int, help='nodes thay apply restart') parser.add_argument('--qd', default=0.7, type=float, help='loss independent of tau') parser.add_argument('--ES', default=0.05, type=float, help='expexted network sojourn time') parser.add_argument('--deadline', '-T', default=15, type=float, help='deadline') parser.add_argument('--threshold', '-m', default=5, type=int, help='threshold for jobs in the queue') parser.add_argument('--max_tau', default=10, type=float, help='maximum tau (bounded by T)') args = parser.parse_args() options = get_options() options['prefix'] = 'restart' options['outdir'] = args.outdir options['fontsize'] = args.fontsize options['nodes'] = args.nodes options['qd'] = args.qd options['ES'] = args.ES options['deadline'] = args.deadline options['threshold'] = args.threshold options['max_tau'] = min(args.max_tau, args.deadline) plot(options)
def main(argv): expected_major_version = get_options(argv[1:]) print("Expected major version: {}".format(expected_major_version)) installed_version = get_yum_package_version("datadog-agent") print("Installed Agent version: {}".format(installed_version)) result = check_major_version(installed_version, expected_major_version) if result: print("Agent version check successful!") sys.exit() else: print("Agent version check failed.") sys.exit(1)
def render_admin_panel(self, req, category, page, path_info): if not req.perm.has_permission('TEAMCITY_ADMIN'): raise HTTPForbidden('You are not allowed to configure TC plugin') if req.method == 'POST': options, errors = self._save_options(req.args) if not errors: # redirect here req.redirect(req.href(req.path_info)) else: options, errors = get_options(self.config), [] tc = TeamCityQuery(options) url = "%s/httpAuth/app/rest/buildTypes" % options['base_url'] # load builds from TC using REST API try: builds_xml = tc.xml_query(url) except TeamCityError, e: errors.append("Fix base config options: %s" % e) t_data = {'options': options, 'projects': {}, 'errors': errors} return 'teamcity_admin.html', t_data
def render_admin_panel(self,req,category,page,path_info): if not req.perm.has_permission('TEAMCITY_ADMIN'): raise HTTPForbidden('You are not allowed to configure TC plugin') if req.method == 'POST': options,errors = self._save_options(req.args) if not errors: # redirect here req.redirect(req.href(req.path_info)) else: options,errors = get_options(self.config),[] tc = TeamCityQuery(options) url = "%s/httpAuth/app/rest/buildTypes" % options['base_url'] # load builds from TC using REST API try: builds_xml = tc.xml_query(url) except TeamCityError,e: errors.append("Fix base config options: %s" % e) t_data = {'options':options,'projects':{},'errors':errors} return 'teamcity_admin.html',t_data
#!/usr/bin/python from ruffus import * import yaml import os import tasks import helpers # Config --------------------------------------------------------------------- # Set up command line option handling, logger creation, and load config file options = helpers.get_options() logger_proxy, logging_mutex = helpers.make_logger(options, __file__) config = yaml.load(open(options.config).read()) def report(result): """Wrapper around Result.report""" result.report(logger_proxy, logging_mutex) # Pipeline ------------------------------------------------------------------- @files(list(tasks.fastq_to_other_files(config, extension='.clipped'))) def clip(infile, outfile): result = tasks.clip(infile, outfile, config) @transform(clip, suffix('.clipped'), '.clipped.bowtie.sam') def map(infile, outfile): result = tasks.bowtie(infile, outfile, config) report(result)
from os.path import ( join, expanduser, expandvars, splitext,split, basename, dirname, exists ) helpers.setup_shell_environment() import tasks import glob #import pyprind from termcolor import colored import datetime is_64bits = sys.maxsize > 2**32 #import graphviz if not is_64bits: print "Please upgrade your operating system to 64 bit, application such as diamond don't run on 32 bit" sys.exit(0) options = helpers.get_options() #logger_proxy, logging_mutex = helpers.make_logger(options, __file__) basedir = os.path.relpath('./') projectDir = options.outdir # set output dir proDir= os.path.basename(projectDir) print proDir inputDir = options.fastq_file inputDir = os.path.abspath(inputDir)# get fastq.gz file print inputDir dir_name = basename(inputDir) probe = os.path.abspath(options.probe_file) probPath, probFile = os.path.split(probe) print probe #plateWall_bc = os.path.abspath(options.sample_sheet)
def __init__(self): self.options = get_options(self.config)
def process_request(self,req): if not req.perm.has_permission('TEAMCITY_BUILD'): raise HTTPForbidden('You are not allowed to view/run TC builds') options = get_options(self.config) tc = TeamCityQuery(options) # projects variable will collect builds result in following format: # {'projectId': { # 'name': 'Proj1', # 'btypes': [{ # it's a list of build types assigned to this project # 'btype_id': 'bt1', # 'btype_name': 'PS3 builds' # 'build': { # 'number': 5555 # teamcity number # 'status': 'Success', # 'start_date': datetime_object, # 'end_date': datetime_object, # },] # } # } projects = {} for build_type in options['builds']: # load builds xml from teamcity url = "%s/httpAuth/app/rest/buildTypes/id:%s/builds" %\ (options['base_url'],build_type) btype_xml = tc.xml_query(url) if btype_xml is None: self.log.error("Can't load builds xml at %s" % url) continue if len(btype_xml) < 1: # there is not any builds yet url = '%s/httpAuth/app/rest/buildTypes/id:%s' % (options['base_url'], build_type) build_xml = tc.xml_query(url) if build_xml is None: continue proj_id = build_xml.xpath('/buildType/project/@id')[0] # collect here as many build info as possible and continue build_info = { 'btype_id': build_type, 'btype_name': build_xml.attrib['name'], 'build': { 'id': None, 'number': None, 'status': 'unknown', 'end_date': 'Never', 'duration': 'Unknown' } } if proj_id in projects: projects[proj_id]['btypes'].append(build_info) else: # or create new item in projects projects[proj_id] = { 'name': proj_name, 'btypes': [build_info,] } continue # There is at least one finished build last_build = btype_xml[0].attrib # load this build xml url = "%s%s" % (options['base_url'],last_build['href']) build_xml = tc.xml_query(url) if build_xml is None: self.log.error("Can't load build xml at %s" % url) proj_id = build_xml.xpath('/build/buildType/@projectId')[0] proj_name = build_xml.xpath('/build/buildType/@projectName')[0] # F**k! python2.5 has not support for timezones in datetime.strptime try: # datetime lacks timezone info start_date = build_xml.xpath('/build/startDate/text()')[0].split('+')[0] start_date = datetime.strptime(start_date, '%Y%m%dT%H%M%S') # datetime lacks timezone info end_date = build_xml.xpath('/build/finishDate/text()')[0].split('+')[0] end_date = datetime.strptime(end_date, '%Y%m%dT%H%M%S') except IndexError: # no start_date or end_date, duration is unknown end_date = 'Never' duration = 'Unknown' else: duration = end_date - start_date # parse build status try: status = build_xml.xpath('/build/statusText/text()')[0].lower() except IndexError: # no last status yet status = 'unknown' # result build dictionary build_info = { 'btype_id': build_type, 'btype_name': build_xml.xpath('/build/buildType/@name')[0], 'build': { 'id': build_xml.attrib['id'], 'number': build_xml.attrib['number'], 'status': status, 'end_date': end_date, 'duration': duration } } # add new build to project if proj_id in projects: projects[proj_id]['btypes'].append(build_info) else: # or create new item in projects projects[proj_id] = { 'name': proj_name, 'btypes': [build_info,] } add_stylesheet(req,'teamcity/css/teamcity.css') add_javascript(req,'teamcity/js/jquery.timers-1.2.js') add_javascript(req,'teamcity/js/event_tracker.js') return 'teamcity_builds.html', { 'projects':projects, 'dpath':req.href('builds/download') }, None
def main(): """ The main function of the plotting module """ parser = argparse.ArgumentParser(description='Plot data stored in an sqlite database') parser.add_argument('--db', default='./sqlite.db', help='name of the database where the parsed and evaluated data is stored') parser.add_argument('--src', nargs='+', default=['all'], help='source node sending packets') parser.add_argument('--bins', default=10, type=int, help='granularity of the 3d box plot; bin size') parser.add_argument('--fix', nargs='+', default=[0], type=int, help='fix the number of parallel sources when routers failed [steps]') parser.add_argument('--outdir', default='', help='output directory for the plots') parser.add_argument('--fontsize', default=matplotlib.rcParams['font.size'], type=int, help='base font size of plots') parser.add_argument('--username', help='username for the database access to get the nodes\' positions') parser.add_argument('--password', help='password for the database access to get the nodes\' positions') parser.add_argument('--dbhost', default='uhu.imp.fu-berlin.de', help='hostname for the database access to get the nodes\' positions') parser.add_argument('--execute', '-e', nargs='+', default=['all'], help='Execute the specified functions') parser.add_argument('--list', nargs='?', const=True, default=False, help='List all callable plotting functions') parser.add_argument('--update', nargs='?', const=True, default=False, help='Download or update data from the testbed database') parser.add_argument('--statistics', nargs='?', const=True, default=False, help='Calculate and plot distance statistics') parser.add_argument('--deadline', default=40, type=float, help='Maximum time for the retransmit plot') parser.add_argument('--restarts', default=numpy.infty, type=float, help='Maximum number of retransmsmissions to plot') parser.add_argument('--repetitions', default=13, type=int, help='for MPR phi plotting') parser.add_argument('--intervals', nargs='+', default=[], type=float, help='Selected intervals for the evaluation') parser.add_argument('--special', nargs='+', default=[''], help='Special options for the plots') parser.add_argument('--mark', nargs='+', default=[], type=int, help='Mark p_s in plots for sources') parser.add_argument('--grayscale', nargs='?', const=True, default=False, help='Create plots with grayscale colormap') args = parser.parse_args() if args.list: print('Callable plot functions:') for func in sorted([key for key in globals().keys() if key.startswith('plot_')]): print('\t'+func) sys.exit(0) logging.basicConfig(level=logging.INFO, format='%(levelname)s [%(funcName)s] %(message)s') options = get_options() options['db'] = args.db options['src'] = args.src options['bins'] = args.bins options['outdir'] = args.outdir options['fontsize'] = args.fontsize options['username'] = args.username options['dbpassword'] = args.password options['fix'] = args.fix options['dbhost'] = args.dbhost options['show3d'] = False options['statistics'] = args.statistics options['restarts'] = args.restarts options['deadline'] = args.deadline options['intervals'] = args.intervals options['special'] = args.special options['mark'] = args.mark options['repetitions'] = args.repetitions options['grayscale'] = args.grayscale db = options["db"] conn = sqlite3.connect(db) options['db_conn'] = conn cursor = conn.cursor() options['neg_rssi'] = False logging.info('old or new rssi format?') #max_rssi = max(cursor.execute('''SELECT rssi FROM rx WHERE NOT rssi=0 LIMIT 1''').fetchone()[0], cursor.execute('''SELECT rssi FROM rx WHERE NOT rssi=0 LIMIT 1''').fetchone()[0]) #if max_rssi>0: #options['neg_rssi'] = False logging.basicConfig(level=logging.INFO, format='%(levelname)s [%(funcName)s] %(message)s') logging.info('connecting to database') if args.update: logging.info('updating positions from %s', options['dbhost']) update_data(options) sys.exit(0) if 'all' in args.src: logging.info('all sources selected for plotting') try: cursor.execute(''' CREATE TABLE eval_sources ( host TEXT NOT NULL, FOREIGN KEY(host) REFERENCES addr(host) ) ''') logging.info('extracting all source node names') cursor.execute(''' INSERT INTO eval_sources SELECT DISTINCT(host) FROM tx ORDER BY host ''') conn.commit() except sqlite3.OperationalError: pass sources = cursor.execute(''' SELECT DISTINCT(host) FROM eval_sources ORDER BY host ''').fetchall() options['src'] = list(pylab.flatten(sources)) prepare_outdir(options) open_latex_file(options) eval_scenarios(options) write_scenarios(options) #try: prepare_coordinates(options) #except ValueError: #logging.warning('no router positions found in DB') if 'all' in args.execute: for func in [globals()[key] for key in globals().keys() if key.startswith('plot_')]: func(options) else: for func in args.execute: try: globals()[func](options) except KeyError: logging.critical('function not found: %s' % func) raise cursor.close()