def check_version(dataset, error_type, train_file): """Check whether train and test are of the same version""" train_path_pfx = utils.get_dir(dataset, error_type, train_file) train_version = utils.get_version(train_path_pfx) test_files = utils.get_test_files(error_type, train_file) for test_file in test_files: test_path_pfx = utils.get_dir(dataset, error_type, test_file) test_version = utils.get_version(test_path_pfx) assert (train_version == test_version)
def format_issue(issue_data): """ Build our formatted GitHub issue string """ # os.uname() is not available on Windows, so we make this optional. try: uname = os.uname() os_string = ' (%s %s %s)' % (uname[0], uname[2], uname[4]) except AttributeError: os_string = '' content = [ "*Automatic bug report from end-user.*\n## Environment\n" "**Add-on Name:** %s" % config.NAME, "**Add-on ID:** %s" % config.ADDON_ID, "**Add-on Version:** %s" % utils.get_version(), "**Kodi Version:** %s" % get_xbmc_version(), "**Python Version:** %s" % sys.version.replace('\n', ''), "**Operating System:** %s %s" % (sys.platform, os_string), "**IP Address:** %s" % get_public_ip(), "**ISP:** %s" % get_isp(), "**Kodi URL:** %s" % sys.argv[2], "**Python Path:**\n```\n%s\n```" % '\n'.join(sys.path), "\n## Traceback\n```\n%s\n```" % issue_data, ] log_url = upload_log() if log_url: content.append("\n[Full log](%s)" % log_url) return "\n".join(content)
def about_clicked(self, arg): about_dialog = Gtk.AboutDialog() about_dialog.set_name("Labyrinth") about_dialog.set_version(utils.get_version()) if os.name != 'nt': try: about_dialog.set_logo_icon_name("labyrinth") except: pass else: about_dialog.set_logo( GdkPixbuf.Pixbuf.new_from_file("images\\labyrinth-24.png")) about_dialog.set_license( "Labyrinth is free software; you can redistribute it and/or modify " "it under the terms of the GNU General Public Licence as published by " "the Free Software Foundation; either version 2 of the Licence, or " "(at your option) any later version." "\n\n" "Labyrinth is distributed in the hope that it will be useful, " "but WITHOUT ANY WARRANTY; without even the implied warranty of " "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the " "GNU General Public Licence for more details." "\n\n" "You should have received a copy of the GNU General Public Licence " "along with Labyrinth; if not, write to the Free Software Foundation, Inc., " "59 Temple Place, Suite 330, Boston, MA 02111-1307 USA") about_dialog.set_wrap_license(True) about_dialog.set_copyright("2006-2008 Don Scorgie et. al") about_dialog.set_authors(AUTHORS) about_dialog.set_website("http://code.google.com/p/labyrinth") about_dialog.set_translator_credits(_("Translation by Don Scorgie")) about_dialog.run() about_dialog.hide() del (about_dialog) return
def __repr__(self): return str(__file__) \ + "\n" + str(datetime.datetime.now()) \ + "\n" + str(platform.platform()) \ + "\nDeepNeuroAN - {}".format(get_version()) \ + "\n" + "class Training()" \ + "\n\t input data dir : %s" % self._data_dir \ + "\n\t checkpoint dir : %s" % self._ckpt_dir \ + "\n\t model name : %s" % self._model_name \ + "\n\t weights dir : %s" % self._weights_dir \ + "\n\t seed : %s" % self._seed \ + "\n\t number of epochs : %s" % (self._epochs,) \ + "\n\t batch size : %s" % self._batch_size \ + "\n\t kernel size : %s" % (self._kernel_size,) \ + "\n\t pool size : %s" % (self._pool_size,) \ + "\n\t dilation rate : %s" % (self._dilation,) \ + "\n\t strides : %s" % (self._strides,) \ + "\n\t padding : %s" % self._padding \ + "\n\t activation : %s" % self._activation \ + "\n\t batch norm : %s" % self._batch_norm \ + "\n\t preprocessing (convolution) layers : %s" % self._preproc_layers \ + "\n\t preprocessing (gaussian) layers : %s" % self._gaussian_layers \ + "\n\t motion correction : %s" % (not self._use_template) \ + "\n\t unsupervised learning : %s" % (self._unsupervised) \ + "\n\t dropout : %f" % self._dropout \ + "\n\t encode rate : %f" % self._encode_rate \ + "\n\t regression rate : %f" % self._regression_rate \ + "\n\t filters : %d" % self._filters \ + "\n\t units : %d" % self._units \ + "\n\t number of encoding layer : %d" % self._encode_layers \ + "\n\t number of regression layer : %d" % self._regression_layers \ + "\n\t learning rate : %f" % self._lr \ + "\n\t number of cpus : %d" % self._ncpu \ + "\n\t gpu : %d" % self._gpu
def about_clicked (self, arg): about_dialog = Gtk.AboutDialog () about_dialog.set_name ("Labyrinth") about_dialog.set_version (utils.get_version()) if os.name != 'nt': try: about_dialog.set_logo_icon_name("labyrinth") except: pass else: about_dialog.set_logo (GdkPixbuf.Pixbuf.new_from_file("images\\labyrinth-24.png")) about_dialog.set_license ( "Labyrinth is free software; you can redistribute it and/or modify " "it under the terms of the GNU General Public Licence as published by " "the Free Software Foundation; either version 2 of the Licence, or " "(at your option) any later version." "\n\n" "Labyrinth is distributed in the hope that it will be useful, " "but WITHOUT ANY WARRANTY; without even the implied warranty of " "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the " "GNU General Public Licence for more details." "\n\n" "You should have received a copy of the GNU General Public Licence " "along with Labyrinth; if not, write to the Free Software Foundation, Inc., " "59 Temple Place, Suite 330, Boston, MA 02111-1307 USA") about_dialog.set_wrap_license (True) about_dialog.set_copyright ("2006-2008 Don Scorgie et. al") about_dialog.set_authors (AUTHORS) about_dialog.set_website ("http://code.google.com/p/labyrinth") about_dialog.set_translator_credits (_("Translation by Don Scorgie")) about_dialog.run () about_dialog.hide () del (about_dialog) return
def parseArgs(self, argv): """Parse programs args.""" parser = OptionParser(self.USAGE, formatter=TitledHelpFormatter(), version="FunkLoad %s" % get_version()) parser.add_option("-v", "--verbose", action="store_true", help="Verbose output") parser.add_option("-p", "--port", type="string", dest="port", default=self.port, help="The proxy port.") parser.add_option("-i", "--tcp-watch-input", type="string", dest="tcpwatch_path", default=None, help="Path to an existing tcpwatch capture.") parser.add_option("-l", "--loop", type="int", dest="loop", default=1, help="Loop mode.") options, args = parser.parse_args(argv) if len(args) == 1: test_name = args[0] else: test_name = None self.verbose = options.verbose self.tcpwatch_path = options.tcpwatch_path self.port = options.port if not test_name and not self.tcpwatch_path: self.loop = options.loop if test_name: test_name = test_name.replace('-', '_') class_name = ''.join([x.capitalize() for x in re.split('_|-', test_name)]) self.test_name = test_name self.class_name = class_name self.script_path = './test_%s.py' % class_name self.configuration_path = './%s.conf' % class_name
def perform_operation(): print (60 * '-') print ("\033[91m CIC - Inventory File Creator for CNS 3.9, OCS 3.10 & 3.11 \033[0m") print (60 * '-') ocpver = utils.get_version('What version of OpenShift Container Platform are you deploying (3.9, 3.10 & 3.11)?: ') ver = '3.11' print (60 * '-') print "\033[91m \r\nThe output is NOT A COMPLETE Inventory File." print "Created Inventory file options should be copied and pasted into" print "the larger openshift-ansible inventory file for your deployment.\r\n \033[0m" print (60 * '-') print ("1. Storage for Applications + Registry ") print ("2. Storage for Applications + Logging") print ("3. Storage for Applications + Metrics ") print ("4. Storage for Applications + Registry + Logging + Metrics") print ("5. Storage for Applications Only") print (60 * '-') is_valid=0 while not is_valid : try : choice = int ( raw_input('Enter your choice [1-5] : ') ) is_valid = 1 except ValueError, e : print ("'%s' is not a valid integer." % e.args[0].split(": ")[1])
def renderDefinitions(self): """Render field definition.""" self.append(rst_title("Definitions", 2)) self.append(LI + ' CUs: Concurrent users or number of concurrent threads' ' executing tests.') self.append(LI + ' Request: a single GET/POST/redirect/XML-RPC request.') self.append(LI + ' Page: a request with redirects and resource' ' links (image, css, js) for an HTML page.') self.append(LI + ' STPS: Successful tests per second.') self.append(LI + ' SPPS: Successful pages per second.') self.append(LI + ' RPS: Requests per second, successful or not.') self.append(LI + ' maxSPPS: Maximum SPPS during the cycle.') self.append(LI + ' maxRPS: Maximum RPS during the cycle.') self.append(LI + ' MIN: Minimum response time for a page or request.') self.append(LI + ' AVG: Average response time for a page or request.') self.append(LI + ' MAX: Maximmum response time for a page or request.') self.append(LI + ' P10: 10th percentile, response time where 10 percent' ' of pages or requests are delivered.') self.append(LI + ' MED: Median or 50th percentile, response time where half' ' of pages or requests are delivered.') self.append(LI + ' P90: 90th percentile, response time where 90 percent' ' of pages or requests are delivered.') self.append(LI + ' P95: 95th percentile, response time where 95 percent' ' of pages or requests are delivered.') self.append(LI + Apdex.description_para) self.append(LI + Apdex.rating_para) self.append('') self.append('Report generated with FunkLoad_ ' + get_version() + ', more information available on the ' '`FunkLoad site <http://funkload.nuxeo.org/#benching>`_.')
def get_all_cpes(self): container = dict() # get cpe from tcp ports for port in self.tcp_ports: item = self.tcp_ports[port] cpe = item["cpe"] # add version if missing in CPE if utils.get_version(cpe) == None and "version" in item and len(item["version"]) > 0: ver_list = item["version"].split(" ") # only take the first str if ver_list[0].find(".X") != -1: version = ver_list[0][:ver_list[0].find(".X")] else: version = ver_list[0] cpe += ":" + version if cpe in container: container[cpe].append(int(port)) else: container[cpe] = [int(port)] # get os cpe if self.os != None: for os_class in self.os["osclass"]: if "cpe" in os_class and len(os_class["cpe"]) > 0: for cpe in os_class["cpe"]: container[cpe] = -1 # -1 port indicates an OS cpe return container
def test_server_additional_config(host): hostname = host.backend.get_hostname() ver = get_version(hostname) f = host.file('/var/lib/pgsql/%s/data/postgresql.conf' % ver).content_string lines = f.split('\n') assert "shared_preload_libraries = 'pg_stat_statements'" in lines assert "log_filename = 'postgresql-%F.log'" in lines
def process_page(s, rcnturl, li): name = li.a.text ao_url = urljoin(rcnturl, li.a['href']) LOG.info(ao_url) try: ao_resp = s.get(ao_url) except requests.exceptions.ReadTimeout: LOG.error('Read timeout: Internet connected?') sys.exit(1) except ConnectionResetError: LOG.error('Connection reset, skipping') return None ao_soup = BeautifulSoup(ao_resp.text, "html.parser") det = ao_soup.find('div', class_='main-details') cfurl = det.find('li', class_='curseforge').a.get('href') if cfurl[0:2] == '//': # They started returning url's without http(s) cfurl = "http:" + cfurl cfresp = s.get(cfurl) cfsoup = BeautifulSoup(cfresp.text, 'html.parser') # the curseforge page has an info pane with a lot of stuff we want cfip = cfsoup.find('div', class_='lastUnit') cfdl = urljoin(cfresp.url, cfip.find('li', class_='user-action-download').a.get('href')) # cffacts = cfip.find('div', class_='content-box-inner') cffacts = cfip.find('h3', text='Facts').parent # this gets us a unix timestamp for created date cfcreated = cffacts.find('dt', text="Date created").find_next('dd').span.get('data-epoch') cfupdated = cffacts.find('dt', text="Last update").find_next('dd').span.get('data-epoch') cflicurl = cffacts.find('a', class_='license').get('href') cflicname = cffacts.find('a', class_='license').text # find the most recent download # TODO older releases, notes, etc cfdlresp = s.get(cfdl) cfdlsoup = BeautifulSoup(cfdlresp.text, 'html.parser') cfdlfile = cfdlsoup.find('li', class_='user-action-download').a.get('href') CACHE.get(cfdlfile) # TODO need more ways of getting tags addon = { 'name': name, 'tags': [det.find('a', class_='main-category').get('title')], 'authors': [x.li.a.text for x in det.find_all('ul', class_='authors')], 'wowver': det.find('li', class_='version').text.split(' ')[-1], 'forge': det.find('li', class_='curseforge').a.get('href'), 'relqual': det.find('li', class_='release').text.split(' ')[-1], 'latest': get_version(det.find('li', class_='newest-file').text), 'created': cfcreated, 'updated': cfupdated, 'license': (cflicname, cflicurl), 'download': cfdlfile, 'datetime': datetime.datetime.now(), } return addon
def _funkload_init(self): """Initialize a funkload test case using a configuration file.""" # look into configuration file config_path = getattr(self._options, 'config', None) if not config_path: config_directory = os.getenv('FL_CONF_PATH', '.') config_path = os.path.join(config_directory, self.__class__.__name__ + '.conf') config_path = os.path.abspath(os.path.expanduser(config_path)) if not os.path.exists(config_path): config_path = "Missing: " + config_path config = ConfigParser() config.read(config_path) self._config = config self._config_path = config_path self.conf = ConfSectionFinder(self) self.default_user_agent = self.conf_get('main', 'user_agent', 'FunkLoad/%s' % get_version(), quiet=True) if self.in_bench_mode: section = 'bench' else: section = 'ftest' self.setOkCodes( self.conf_getList(section, 'ok_codes', [200, 301, 302, 303, 307], quiet=True)) self.sleep_time_min = self.conf_getFloat(section, 'sleep_time_min', 0) self.sleep_time_max = self.conf_getFloat(section, 'sleep_time_max', 0) self._simple_fetch = self.conf_getInt(section, 'simple_fetch', 0, quiet=True) self.log_to = self.conf_get(section, 'log_to', 'console file') self.log_path = self.conf_get(section, 'log_path', 'funkload.log') self.result_path = os.path.abspath( self.conf_get(section, 'result_path', 'funkload.xml')) # init loggers if self.in_bench_mode: level = logging.INFO else: level = logging.DEBUG self.logger = get_default_logger(self.log_to, self.log_path, level=level) self.logger_result = get_default_logger(log_to="xml", log_path=self.result_path, name="FunkLoadResult") #self.logd('_funkload_init config [%s], log_to [%s],' # ' log_path [%s], result [%s].' % ( # self._config_path, self.log_to, self.log_path, self.result_path)) # init webunit browser (passing a fake methodName) self._browser = WebTestCase(methodName='log') self.clearContext()
def setUpClass(cls): distro, _version = get_version() if distro == "debian": os.environ["LIBBLOCKDEV_SKIP_DEP_CHECKS"] = "" if not BlockDev.is_initialized(): BlockDev.init(cls.requested_plugins, None) else: BlockDev.reinit(cls.requested_plugins, True, None)
def n(neighbours=False): 'shows your neighbours' conf = utils.load_conf(CJDROUTE_CONF) c = cjdns.connect(password=conf['admin']['password']) STAT_FORMAT = '%s %19s v%-2d %9d %9d %12s %d/%d/%d ' nodestore = list(c.dumpTable()) connections = {} try: for peer in os.listdir(YRD_PEERS): with open(os.path.join(YRD_PEERS, peer)) as f: info = json.load(f) try: connections[info['pk']] = str(info['name']) except KeyError: pass except OSError: pass for peer in c.peerStats(): result = c.nodeForAddr(peer.ip)['result'] route = utils.grep_ns(nodestore, peer.ip) path = utils.get_path(route) setattr(peer, 'path', path) line = STAT_FORMAT % (peer.ip, peer.path, peer.version, peer.bytesIn, peer.bytesOut, peer.state, peer.duplicates, peer.lostPackets, peer.receivedOutOfRange) if hasattr(peer, 'user'): line += repr(peer.user) elif peer.publicKey in connections: line += repr(connections[peer.publicKey]) yield line if neighbours: for i in range(result['linkCount']): link = c.getLink(peer.ip, i) if link and 'child' in link['result']: child = link['result']['child'] route = utils.grep_ns(nodestore, child) version = utils.get_version(route) path = utils.get_path(route) yield ' %s %s v%s' % (child, path, version) else: yield ' -' c.disconnect()
def test_server_log_file_name(host): # Check previous day too in case this is run at midnight hostname = host.backend.get_hostname() ver = get_version(hostname) date1 = datetime.today() date0 = date1 - timedelta(days=1) logdir = '/var/lib/pgsql/%s/data/pg_log' % ver file1 = '%s/postgresql-%s.log' % (logdir, date1.strftime('%F')) file0 = '%s/postgresql-%s.log' % (logdir, date0.strftime('%F')) assert host.file(file1).is_file or host.file(file0).is_file
def parseArgs(self, argv): """Parse programs args.""" parser = OptionParser(self.usage, formatter=TitledHelpFormatter(), version="FunkLoad %s" % get_version()) parser.add_option("-q", "--quiet", action="store_true", help="Verbose output") options, args = parser.parse_args(argv) if len(args) != 3: parser.error("Missing argument") return args[1], args[2], options
def test_server_additional_config(host): hostname = host.backend.get_hostname() version = get_version(hostname) if get_distribution(hostname) == 'centos': configfile = '/var/lib/pgsql/{version}/data/postgresql.conf' else: configfile = '/etc/postgresql/{version}/main/postgresql.conf' f = host.file(configfile.format(version=version)).content_string lines = f.split('\n') assert "shared_preload_libraries = 'pg_stat_statements'" in lines assert "log_filename = 'postgresql-%F.log'" in lines
def _sendmessage(self, message, sock=None, trans_id=None, lock=None): message["v"] = get_version() if trans_id: message["t"] = trans_id encoded = bencode(message) if sock: if lock: with lock: sock.sendto(encoded, (self.host, self.port)) else: sock.sendto(encoded, (self.host, self.port))
def crawl_blog_post(blog_id, log_no, tags, written_time=None, verbose=True): def get_title(root): result = '' try: result = root.xpath('//h3[@class="tit_h3"]/text()')[0].strip() except Exception: pass if result != '': return result try: result = root.xpath('//h3[@class="se_textarea"]/text()')[0].strip() except Exception: pass #return root.xpath('//h3[@class="tit_h3"]/text()')[0].strip() return result def get_page_html(url): try: page = requests.get(url, headers=headers) root = html.fromstring(page.content) elem = root.xpath('//div[@class="_postView"]')[0] html_ = etree.tostring(elem) return (BeautifulSoup(html_, 'lxml'), get_title(root)) except IOError: print '' return (None, None) #if blog_id.startswith('http'): # url = blog_id #else: url = mobileurl % (blog_id, log_no) (doc, title) = get_page_html(url) if doc: crawled_time = utils.get_today_str() crawler_version = utils.get_version() #url = posturl % (blog_id, log_no) post_tags = tags[(blog_id, log_no)] directory_seq = None # NOTE: No directory sequence given for query crawler post = btc.make_structure(blog_id, log_no, None, doc, crawled_time, crawler_version, title, written_time, url, post_tags, directory_seq) if not verbose: del post['directorySeq'] del post['sympathyCount'] return post else: print 'No doc in %s' % posturl return None
def _funkload_init(self): """Initialize a funkload test case using a configuration file.""" # look into configuration file config_path = getattr(self._options, 'config', None) if not config_path: config_directory = os.getenv('FL_CONF_PATH', '.') config_path = os.path.join(config_directory, self.__class__.__name__ + '.conf') config_path = os.path.abspath(os.path.expanduser(config_path)) if not os.path.exists(config_path): config_path = "Missing: "+ config_path config = ConfigParser() config.read(config_path) self._config = config self._config_path = config_path self.conf = ConfSectionFinder(self) self.default_user_agent = self.conf_get('main', 'user_agent', 'FunkLoad/%s' % get_version(), quiet=True) if self.in_bench_mode: section = 'bench' else: section = 'ftest' self.setOkCodes( self.conf_getList(section, 'ok_codes', [200, 301, 302, 303, 307], quiet=True) ) self.sleep_time_min = self.conf_getFloat(section, 'sleep_time_min', 0) self.sleep_time_max = self.conf_getFloat(section, 'sleep_time_max', 0) self._simple_fetch = self.conf_getInt(section, 'simple_fetch', 0, quiet=True) self.log_to = self.conf_get(section, 'log_to', 'console file') self.log_path = self.conf_get(section, 'log_path', 'funkload.log') self.result_path = os.path.abspath( self.conf_get(section, 'result_path', 'funkload.xml')) # init loggers if self.in_bench_mode: level = logging.INFO else: level = logging.DEBUG self.logger = get_default_logger(self.log_to, self.log_path, level=level) self.logger_result = get_default_logger(log_to="xml", log_path=self.result_path, name="FunkLoadResult") #self.logd('_funkload_init config [%s], log_to [%s],' # ' log_path [%s], result [%s].' % ( # self._config_path, self.log_to, self.log_path, self.result_path)) # init webunit browser (passing a fake methodName) self._browser = WebTestCase(methodName='log') self.clearContext()
def on_about(self, sender): ad = gtk.AboutDialog() ad.set_name("Chessmemory") ad.set_comments("Open Source Chess Viewer and Database Tool") ad.set_version(utils.get_version()) ad.set_copyright("Copyright © 2006 Daniel Borgmann") ad.set_authors([ "Daniel Borgmann <*****@*****.**>", "Nils R Grotnes <*****@*****.**>", ]) ad.set_license(utils.get_license()) ad.set_logo_icon_name("chessmemory") ad.run()
def on_about(self, sender): ad = gtk.AboutDialog() ad.set_name("Chessmonk") ad.set_comments("Open Source Chess Viewer and Database Tool") ad.set_version(utils.get_version()) ad.set_copyright("Copyright © 2006 Daniel Borgmann") ad.set_authors([ "Daniel Borgmann <*****@*****.**>", "Nils R Grotnes <*****@*****.**>", ]) ad.set_license(utils.get_license()) ad.set_logo_icon_name("chessmonk") ad.run()
def make_request(url): """ Make our JSON request to GitHub """ return urllib2.Request(url, headers={ "Authorization": "Basic %s" % config.ISSUE_API_AUTH, "Content-Type": "application/json", "User-Agent": '%s/%s' % (config.ADDON_ID, utils.get_version()) })
def parseArgs(self, argv): """Parse programs args.""" parser = OptionParser(self.usage, formatter=TitledHelpFormatter(), version="FunkLoad %s" % get_version()) parser.add_option("-v", "--verbose", action="store_true", help="Verbose output") parser.add_option("-d", "--debug", action="store_true", help="debug mode, server is run in forground") options, args = parser.parse_args(argv) if len(args) != 2: parser.error("Missing configuration file argument") return args[1], options
def __repr__(self): return str(__file__) \ + "\n" + str(datetime.datetime.now()) \ + "\n" + str(platform.platform()) \ + "\nDeepNeuroAN - {}".format(get_version()) \ + "\n" + "class TrainingGeneration()" \ + "\n\t input data dir : %s" % self._data_dir \ + "\n\t dest dir : %s" % self._out_dir \ + "\n\t n transformations : %d" % self._nb_transfs \ + "\n\t maximum rotation : %.2f deg" % (self._range_rad * 180 / math.pi) \ + "\n\t maximum translation : %.2f mm" % self._range_mm \ + "\n\t p outliers : %.2f" % (self._p_outliers) \ + "\n\t seed : %d \n" % self._seed if self._seed is not None else "\n\t no seed \n"
def test_server_log_file_name(host): # Check previous day too in case this is run at midnight hostname = host.backend.get_hostname() version = get_version(hostname) if get_distribution(hostname) == 'centos': logdir = '/var/lib/pgsql/{version}/data/pg_log' else: logdir = '/var/lib/postgresql/{version}/main/pg_log' date1 = datetime.today() date0 = date1 - timedelta(days=1) logdir = logdir.format(version=version) file1 = '%s/postgresql-%s.log' % (logdir, date1.strftime('%F')) file0 = '%s/postgresql-%s.log' % (logdir, date0.strftime('%F')) assert host.file(file1).is_file or host.file(file0).is_file
def fill_status_bar(self): home_widget = QWidget() home_lay = QHBoxLayout(home_widget) home_lay.addWidget(QLabel("HOME: ", home_widget)) home_button = QPushButton(utils.PAPARAZZI_HOME, home_widget) home_lay.addWidget(home_button) home_button.clicked.connect(lambda: utils.open_terminal(utils.PAPARAZZI_HOME)) self.statusBar().addPermanentWidget(home_widget) self.statusBar().addPermanentWidget(utils.make_line(None, True)) label_version = QLabel("Version={}".format(utils.get_version())) self.statusBar().addPermanentWidget(label_version) self.statusBar().addPermanentWidget(utils.make_line(None, True)) label_build = QLabel("Build={}".format(utils.get_build_version())) self.statusBar().addPermanentWidget(label_build)
def test_server_listen(host): hostname = host.backend.get_hostname() ver = get_version(hostname) with host.sudo(): value = '/var/lib/pgsql/%s/data/postgresql.conf' % ver f = host.file(value).content_string count_listen_addresses = 0 for line in f.split('\n'): if match('\s*listen_addresses', line): count_listen_addresses += 1 listen_addresses = line assert count_listen_addresses == 1 assert listen_addresses == "listen_addresses = localhost"
def main(): setup( name = 'veusz_plugins', version = get_version('veusz_plugins/__init__.py'), description = 'A collection of miscellaneous plugins for the veusz graphing application', long_description = description('README.rst'), author = 'Dave Hughes', author_email = '*****@*****.**', url = 'https://github.com/waveform80/veusz_plugins', packages = find_packages(exclude=['distribute_setup', 'utils']), install_requires = ['xlrd'], platforms = 'ALL', zip_safe = False, classifiers = classifiers )
def parse_argument(argv): long_opts = ['help', 'version', 'verbose'] short_opts = 'hvmia' try: opts_list, args_pos = getopt.getopt(argv[1:], short_opts, long_opts) except getopt.GetoptError: util.print_empty_line() print 'Error : args parser ' usage() return False if args_pos: usage() return False for opt, val in opts_list: if opt in ['-h', '--help']: usage() return False elif opt == '--version': print util.get_version() return False elif opt in ['-v', '--verbose']: util.OPT.verbose = True elif opt == '-m': util.OPT.meminfo = True elif opt == '-i': util.OPT.ion = True elif opt == '-a': util.OPT.ionAndMeminfo = True else: print 'Error: wrong option : ' + opt return False return True
def reqs_yml(self): """ Export a requirements file in yml format. """ default_yml_item = """ - src: '%src' name: '%name' scm: '%scm' version: '%version' """ role_lines = "---\n" for role in sorted(self.report["roles"]): name = utils.normalize_role(role, self.config) galaxy_name = "{0}.{1}".format(self.config["scm_user"], name) yml_item = default_yml_item if self.config["scm_host"]: yml_item = yml_item.replace("%name", "{0}".format(galaxy_name)) if self.config["scm_repo_prefix"]: role = self.config["scm_repo_prefix"] + name src = os.path.join(self.config["scm_host"], self.config["scm_user"], role) else: src = galaxy_name yml_item = yml_item.replace(" name: '%name'\n", "") yml_item = yml_item.replace(" scm: '%scm'\n", "") yml_item = yml_item.replace("%src", src) if self.config["scm_type"]: yml_item = yml_item.replace("%scm", self.config["scm_type"]) else: yml_item = yml_item.replace(" scm: '%scm'\n", "") version_path = os.path.join(self.roles_path, role, "VERSION") version = utils.get_version(version_path) yml_item = yml_item.replace("%version", version) role_lines += "{0}".format(yml_item) if self.out_file: utils.string_to_file(self.out_file, role_lines) else: print role_lines
def stats_mode(args): first_completed = [] time_to_completion = [] num_packets_sent = [] num_adv_sent = [] num_req_sent = [] num_data_sent = [] for f in args.files: lines = utils.get_log_lines([f]) lines = utils.sync_timings(lines) nodes = utils.get_nodes(lines) version = utils.get_version(lines) t_min = utils.get_t_min(lines) total_pages = utils.get_total_pages(lines, version) start_times = utils.get_start_times(lines, nodes, t_min) completion_times = utils.get_completion_times(lines, nodes, total_pages, version) final_times = utils.get_final_times(lines, nodes, total_pages, version) time_taken = utils.get_time_taken(nodes, start_times, final_times) packets_sent = utils.get_packets_sent(lines, nodes, start_times, final_times) num_adv_sent.append(sum(v[0] for v in packets_sent.values())) num_req_sent.append(sum(v[1] for v in packets_sent.values())) num_data_sent.append(sum(v[2] for v in packets_sent.values())) num_packets_sent.append(sum(sum(v) for v in packets_sent.values())) time_to_completion.append(max(time_taken.values()).total_seconds()) first_completed.append(min(v.total_seconds() for v in time_taken.values() if v.total_seconds())) avg_time_to_completion = sum(time_to_completion) / len(time_to_completion) print "Average Time to Completion:", avg_time_to_completion avg_first_completed = sum(first_completed) / len(first_completed) print "Average Time for first node:", avg_first_completed print "Average Delta:", avg_time_to_completion - avg_first_completed avg_packets_sent = float(sum(num_packets_sent)) / len(num_packets_sent) avg_adv_sent = sum(num_adv_sent) / len(num_adv_sent) avg_req_sent = sum(num_req_sent) / len(num_req_sent) avg_data_sent = sum(num_data_sent) / len(num_data_sent) print "Average Packets Sent:", avg_packets_sent print "Total ADV Sent:", avg_adv_sent print "Total REQ Sent:", avg_req_sent print "Total DATA Sent:", avg_data_sent print "Average ADV Sent %:", 100 * avg_adv_sent / avg_packets_sent print "Average REQ Sent %:", 100 * avg_req_sent / avg_packets_sent print "Average DATA Sent %:", 100 * avg_data_sent / avg_packets_sent
def get_latest_install_folder(self): latest_version = 0 latest_folder_path = None if not os.path.isdir(self.get_install_dir()): return None for folder in os.listdir(self.get_install_dir()): version = utils.get_version(folder) if version.isdigit() and int(version) > latest_version: latest_version = int(version) latest_folder_path = folder if latest_folder_path: return '%s\%s' % (self.get_install_dir(), latest_folder_path) else: return None
def get_latest_install_package(self): latest_version = 0 latest_package = None if not os.path.isdir(self.get_download_dir()): return None for package in os.listdir(self.get_download_dir()): version = utils.get_version(package) if version.isdigit() and int(version) > latest_version: latest_version = int(version) latest_package = package if latest_package: return '%s\%s' % (self.get_download_dir(), latest_package) else: return None
def initialize() -> None: """ Must be called first to initialize pygame and this module. """ global window, clock, FPS_FONT if pygame.get_init(): return pygame.init() pygame.display.set_icon(resources.load_image('icon.png')) version = utils.get_version() window = pygame.display.set_mode(resolution, mode) pygame.display.set_caption("Ice Emblem " + version) pygame.key.set_repeat(200, 50) clock = pygame.time.Clock() FPS_FONT = pygame.font.SysFont("Liberation Sans", 12)
def reqs_txt(self): """ Export a requirements file in txt format. """ role_lines = "" for role in sorted(self.report["roles"]): name = utils.normalize_role(role, self.config) galaxy_name = "{0}.{1}".format(self.config["scm_user"], name) version_path = os.path.join(self.roles_path, role, "VERSION") version = utils.get_version(version_path) role_lines += "{0},{1}\n".format(galaxy_name, version) if self.out_file: utils.string_to_file(self.out_file, role_lines) else: print role_lines
def test_server_listen(host): hostname = host.backend.get_hostname() version = get_version(hostname) if get_distribution(hostname) == 'centos': configfile = '/var/lib/pgsql/{version}/data/postgresql.conf' else: configfile = '/etc/postgresql/{version}/main/postgresql.conf' with host.sudo(): value = configfile.format(version=version) f = host.file(value).content_string count_listen_addresses = 0 for line in f.split('\n'): if match(r'\s*listen_addresses', line): count_listen_addresses += 1 listen_addresses = line assert count_listen_addresses == 1 assert listen_addresses == "listen_addresses = localhost"
def main(): setup( name = 'dbsuite', version = get_version(os.path.join(HERE, 'dbsuite/__init__.py')), description = 'A suite of tools for maintenance of information warehouses', long_description = description(os.path.join(HERE, 'README.rst')), classifiers = CLASSIFIERS, author = 'Dave Hughes', author_email = '*****@*****.**', url = 'http://www.waveform.org.uk/trac/dbsuite/', keywords = 'database documentation', packages = find_packages(exclude=['distribute_setup', 'utils']), include_package_data = True, platforms = 'ALL', install_requires = REQUIRES, extras_require = EXTRA_REQUIRES, zip_safe = False, test_suite = 'dbsuite', entry_points = ENTRY_POINTS, )
def main(): setup( name = 'samplesdb', version = get_version(os.path.join(HERE, 'samplesdb/__init__.py')), description = 'samplesdb', long_description = description(os.path.join(HERE, 'README.rst')), classifiers = CLASSIFIERS, author = 'Dave Hughes', author_email = '*****@*****.**', url = 'https://github.com/waveform80/samplesdb', keywords = 'science samples database', packages = find_packages(exclude=['distribute_setup', 'utils']), include_package_data = True, platforms = 'ALL', install_requires = REQUIRES, extras_require = {}, zip_safe = False, test_suite = 'nose.collector', entry_points = ENTRY_POINTS, )
def main(): setup( name = NAME, version = get_version(os.path.join(HERE, NAME, '__init__.py')), description = DESCRIPTION, long_description = description(os.path.join(HERE, 'README.rst')), classifiers = CLASSIFIERS, author = AUTHOR, author_email = AUTHOR_EMAIL, url = URL, keywords = ' '.join(KEYWORDS), packages = PACKAGES, package_data = PACKAGE_DATA, platforms = 'ALL', install_requires = REQUIRES, extras_require = EXTRA_REQUIRES, zip_safe = True, test_suite = NAME, entry_points = ENTRY_POINTS, )
def check_mode(args): for f in args.files: lines = utils.get_log_lines([f]) lines = utils.sync_timings(lines) nodes = utils.get_nodes(lines) version = utils.get_version(lines) t_min = utils.get_t_min(lines) total_pages = utils.get_total_pages(lines, version) start_times = utils.get_start_times(lines, nodes, t_min) completion_times = utils.get_completion_times(lines, nodes, total_pages, version) final_times = utils.get_final_times(lines, nodes, total_pages, version) time_taken = utils.get_time_taken(nodes, start_times, final_times) packets_sent = utils.get_packets_sent(lines, nodes, start_times, final_times) # utils.get_stats(lines) all_nodes_completed = time_taken.values() and min(time_taken.values()).total_seconds() == 0 all_nodes_exists = nodes == set([2,3,4,5,6,7,8,9,10,11,20]) if not all_nodes_completed: print "Not all nodes completed:", f.name elif not all_nodes_exists: print "Not all nodes exist:", f.name, nodes
def crawl_blog_post(blog_id, log_no, tags, written_time=None, verbose=True): def get_title(root): return root.xpath('//h3[@class="tit_h3"]/text()')[0].strip() def get_page_html(url): try: root = html.parse(url) elem = root.xpath('//div[@class="_postView"]')[0] html_ = etree.tostring(elem) return (BeautifulSoup(html_), get_title(root)) except IOError: print '' return (None, None) if blog_id.startswith('http'): url = blog_id else: url = mobileurl % (blog_id, log_no) (doc, title) = get_page_html(url) if doc: crawled_time = utils.get_today_str() crawler_version = utils.get_version() url = posturl % (blog_id, log_no) post_tags = tags[(blog_id, log_no)] directory_seq = None # NOTE: No directory sequence given for query crawler post = btc.make_structure(blog_id, log_no, None, doc, crawled_time, crawler_version, title, written_time, url, post_tags, directory_seq) if not verbose: del post['directorySeq'] del post['sympathyCount'] return post else: print 'No doc in %s' % posturl return None
def write_statistics(self, successful_results): """ Write the distributed stats to a file in the output dir """ path = os.path.join(self.distribution_output, "stats.xml") if os.access(path, os.F_OK): os.rename(path, path + '.bak-' + str(int(time.time()))) config = {'id': self.test_id, 'description': self.test_description, 'class_title': self.class_title, 'class_description': self.class_description, 'module': self.module_name, 'class': self.class_name, 'method': self.method_name, 'cycles': self.cycles, 'duration': self.duration, 'sleep_time': self.sleep_time, 'startup_delay': self.startup_delay, 'sleep_time_min': self.sleep_time_min, 'sleep_time_max': self.sleep_time_max, 'cycle_time': self.cycle_time, 'configuration_file': self.config_path, 'server_url': self.test_url, 'log_xml': self.result_path, 'python_version': platform.python_version()} for (name, host, port, desc) in self.monitor_hosts: config[name] = desc with open(path, "w+") as fd: fd.write('<funkload version="{version}" time="{time}">\n'.format( version=get_version(), time=time.time())) for key, value in config.items(): # Write out the config values fd.write('<config key="{key}" value="{value}"/>\n'.format( key=key, value=value)) for xml in successful_results: fd.write(xml) fd.write("\n") fd.write("</funkload>\n")
def write_statistics(self, successful_results): """ Write the distributed stats to a file in the output dir """ path = os.path.join(self.distribution_output, "stats.xml") config = { "id": self.test_id, "description": self.test_description, "class_title": self.class_title, "class_description": self.class_description, "module": self.module_name, "class": self.class_name, "method": self.method_name, "cycles": self.cycles, "duration": self.duration, "sleep_time": self.sleep_time, "startup_delay": self.startup_delay, "sleep_time_min": self.sleep_time_min, "sleep_time_max": self.sleep_time_max, "cycle_time": self.cycle_time, "configuration_file": self.config_path, "server_url": self.test_url, "log_xml": self.result_path, "python_version": platform.python_version(), } for (name, host, port, desc) in self.monitor_hosts: config[name] = desc with open(path, "w+") as fd: fd.write('<funkload version="{version}" time="{time}">\n'.format(version=get_version(), time=time.time())) for key, value in config.items(): # Write out the config values fd.write('<config key="{key}" value="{value}"/>\n'.format(key=key, value=value)) for xml in successful_results: fd.write(xml) fd.write("\n") fd.write("</funkload>\n")
def scatter_mode(args): for f in args.files: lines = utils.get_log_lines([f]) lines = utils.sync_timings(lines) nodes = utils.get_nodes(lines) version = utils.get_version(lines) t_min = utils.get_t_min(lines) total_pages = utils.get_total_pages(lines, version) start_times = utils.get_start_times(lines, nodes, t_min) completion_times = utils.get_completion_times(lines, nodes, total_pages, version) final_times = utils.get_final_times(lines, nodes, total_pages, version) time_taken = utils.get_time_taken(nodes, start_times, final_times) packets_sent = utils.get_packets_sent(lines, nodes, start_times, final_times) all_nodes_completed = time_taken.values() and min(time_taken.values()).total_seconds() == 0 all_nodes_exists = nodes == set([2,3,4,5,6,7,8,9,10,11,20]) if not all_nodes_completed: continue # elif not all_nodes_exists: # continue elif len(nodes) < 7: continue if args.l: for n in nodes: if not time_taken.get(n) or time_taken[n].total_seconds() == 0 or packets_sent[n][2] < 100: continue print time_taken[n].total_seconds(), 100 * float(packets_sent[n][0] + packets_sent[n][1]) / sum(packets_sent[n]), 1 else: adv_sent = sum(v[0] for v in packets_sent.values()) req_sent = sum(v[1] for v in packets_sent.values()) data_sent = sum(v[2] for v in packets_sent.values()) total_sent = sum(sum(v) for v in packets_sent.values()) completion_time = max(time_taken.values()).total_seconds() print completion_time, 100 * float(adv_sent + req_sent) / total_sent, 1
def main(args=sys.argv[1:]): """Default main.""" # enable to load module in the current path cur_path = os.path.abspath(os.path.curdir) sys.path.insert(0, cur_path) parser = OptionParser(USAGE, formatter=TitledHelpFormatter(), version="FunkLoad %s" % get_version()) parser.add_option("", "--config", type="string", dest="config", metavar='CONFIG', help="Path to alternative config file") parser.add_option("-u", "--url", type="string", dest="main_url", help="Base URL to bench.") parser.add_option("-c", "--cycles", type="string", dest="bench_cycles", help="Cycles to bench, colon-separated list of " "virtual concurrent users. To run a bench with 3 " "cycles of 5, 10 and 20 users, use: -c 5:10:20") parser.add_option("-D", "--duration", type="string", dest="bench_duration", help="Duration of a cycle in seconds.") parser.add_option("-m", "--sleep-time-min", type="string", dest="bench_sleep_time_min", help="Minimum sleep time between requests.") parser.add_option("-M", "--sleep-time-max", type="string", dest="bench_sleep_time_max", help="Maximum sleep time between requests.") parser.add_option("-t", "--test-sleep-time", type="string", dest="bench_sleep_time", help="Sleep time between tests.") parser.add_option("-s", "--startup-delay", type="string", dest="bench_startup_delay", help="Startup delay between thread.") parser.add_option("-f", "--as-fast-as-possible", action="store_true", help="Remove sleep times between requests and between " "tests, shortcut for -m0 -M0 -t0") parser.add_option("", "--no-color", action="store_true", help="Monochrome output.") parser.add_option("", "--accept-invalid-links", action="store_true", help="Do not fail if css/image links are not reachable.") parser.add_option("", "--simple-fetch", action="store_true", dest="bench_simple_fetch", help="Don't load additional links like css or images " "when fetching an html page.") parser.add_option("-l", "--label", type="string", help="Add a label to this bench run for easier " "identification (it will be appended to the " "directory name for reports generated from it).") parser.add_option("--enable-debug-server", action="store_true", dest="debugserver", help="Instantiates a debug HTTP server which exposes an " "interface using which parameters can be modified " "at run-time. Currently supported parameters: " "/cvu?inc=<integer> to increase the number of " "CVUs, /cvu?dec=<integer> to decrease the number " "of CVUs, /getcvu returns number of CVUs ") parser.add_option("--debug-server-port", type="string", dest="debugport", help="Port at which debug server should run during the " "test") parser.add_option("--distribute", action="store_true", dest="distribute", help="Distributes the CVUs over a group of worker " "machines that are defined in the workers section") parser.add_option("--distribute-workers", type="string", dest="workerlist", help="This parameter will over-ride the list of " "workers defined in the config file. expected " "notation is uname@host,uname:pwd@host or just " "host...") parser.add_option("--distribute-python", type="string", dest="python_bin", help="When running in distributed mode, this Python " "binary will be used across all hosts.") parser.add_option("--is-distributed", action="store_true", dest="is_distributed", help="This parameter is for internal use only. it " "signals to a worker node that it is in " "distributed mode and shouldn't perform certain " "actions.") parser.add_option("--distributed-packages", type="string", dest="distributed_packages", help="Additional packages to be passed to easy_install " "on remote machines when being run in distributed " "mode.") parser.add_option("--distributed-log-path", type="string", dest="distributed_log_path", help="Path where all the logs will be stored when " "running a distributed test") # XXX What exactly is this checking for here?? cmd_args = " ".join([k for k in args if k.find('--distribute') < 0]) options, args = parser.parse_args(args) if len(args) != 2: parser.error("incorrect number of arguments") if not args[1].count('.'): parser.error("invalid argument; should be [class].[method]") if options.as_fast_as_possible: options.bench_sleep_time_min = '0' options.bench_sleep_time_max = '0' options.bench_sleep_time = '0' if os.path.exists(args[0]): # We were passed a file for the first argument module_name = os.path.basename(os.path.splitext(args[0])[0]) else: # We were passed a module name module_name = args[0] # registering signals signal.signal(signal.SIGTERM, shutdown) signal.signal(signal.SIGINT, shutdown) signal.signal(signal.SIGQUIT, shutdown) klass, method = args[1].split('.') if options.distribute: from Distributed import DistributionMgr ret = None global _manager try: distmgr = DistributionMgr( module_name, klass, method, options, cmd_args) _manager = distmgr except UserWarning, error: trace(red_str("Distribution failed with:%s \n" % (error))) try: try: distmgr.prepare_workers(allow_errors=True) ret = distmgr.run() distmgr.final_collect() except KeyboardInterrupt: trace("* ^C received *") finally: # in any case we want to stop the workers at the end distmgr.abort() _manager = None return ret
def main(): """Default main.""" # enable to load module in the current path cur_path = os.path.abspath(os.path.curdir) sys.path.insert(0, cur_path) parser = OptionParser(USAGE, formatter=TitledHelpFormatter(), version="FunkLoad %s" % get_version()) parser.add_option("-u", "--url", type="string", dest="main_url", help="Base URL to bench.") parser.add_option("-c", "--cycles", type="string", dest="bench_cycles", help="Cycles to bench, this is a list of number of " "virtual concurrent users, " "to run a bench with 3 cycles with 5, 10 and 20 " "users use: -c 2:10:20") parser.add_option("-D", "--duration", type="string", dest="bench_duration", help="Duration of a cycle in seconds.") parser.add_option("-m", "--sleep-time-min", type="string", dest="bench_sleep_time_min", help="Minimum sleep time between requests.") parser.add_option("-M", "--sleep-time-max", type="string", dest="bench_sleep_time_max", help="Maximum sleep time between requests.") parser.add_option("-t", "--test-sleep-time", type="string", dest="bench_sleep_time", help="Sleep time between tests.") parser.add_option("-s", "--startup-delay", type="string", dest="bench_startup_delay", help="Startup delay between thread.") parser.add_option("-f", "--as-fast-as-possible", action="store_true", help="Remove sleep times between requests and" " between tests, shortcut for -m0 -M0 -t0") parser.add_option("", "--no-color", action="store_true", help="Monochrome output.") parser.add_option("", "--accept-invalid-links", action="store_true", help="Do not fail if css/image links are " "not reachable.") parser.add_option("", "--simple-fetch", action="store_true", dest="bench_simple_fetch", help="Don't load additional links like css " "or images when fetching an html page.") parser.add_option("-l", "--label", type="string", help="Add a label to this bench run " "for easier identification (it will be appended to the directory name " "for reports generated from it).") parser.add_option("", "--enable-debug-server", action="store_true", dest="debugserver", help="Instantiates a debug HTTP server which exposes an " "interface using which parameters can be modified at " "run-time. Currently supported parameters: " "/cvu?inc=<integer> to increase the number of CVUs, " "/cvu?dec=<integer> to decrease the number of CVUs, " "/getcvu returns number of CVUs ") parser.add_option("", "--debug-server-port", type="string", dest="debugport", help="Port at which debug server should run during the test") parser.add_option("","--distribute", action="store_true", dest="distribute", help="Distributes the CVUs over a group of worker machines " "that are defined in the section [workers]") parser.add_option("","--distribute-workers", type="string", dest="workerlist", help="This parameter will over-ride the list of workers defined " "in the config file. expected notation is uname@host,uname:pwd@host or just host...") parser.add_option("","--is-distributed", action="store_true", dest="is_distributed", help="This parameter is for internal use only. it signals to a " "worker node that it is in distributed mode and shouldn't " "perform certain actions.") options, args = parser.parse_args() cmd_args = " ".join([k for k in sys.argv[1:] if k.find('--distribute')<0]) if len(args) != 2: parser.error("incorrect number of arguments") if not args[1].count('.'): parser.error("invalid argument should be class.method") if options.as_fast_as_possible: options.bench_sleep_time_min = '0' options.bench_sleep_time_max = '0' options.bench_sleep_time = '0' klass, method = args[1].split('.') if options.distribute: from Distributed import DistributionMgr ret = None try: distmgr = DistributionMgr( args[0] , klass, method, options, cmd_args ) try: distmgr.prepare_workers(allow_errors = True) ret = distmgr.run() distmgr.final_collect() except KeyboardInterrupt: trace("* ^C received *") distmgr.abort() except UserWarning,error: trace(red_str("Distribution failed with:%s \n" % (error))) sys.exit(ret)
def parseArgs(self, argv): """Parse programs args.""" global g_doctest_verbose parser = OptionParser(self.USAGE, formatter=TitledHelpFormatter(), version="FunkLoad %s" % get_version()) parser.add_option("", "--config", type="string", dest="config", metavar='CONFIG', help="Path to alternative config file.") parser.add_option("-q", "--quiet", action="store_true", help="Minimal output.") parser.add_option("-v", "--verbose", action="store_true", help="Verbose output.") parser.add_option("-d", "--debug", action="store_true", help="FunkLoad and doctest debug output.") parser.add_option("--debug-level", type="int", help="Debug level 3 is more verbose.") parser.add_option("-u", "--url", type="string", dest="main_url", help="Base URL to bench without ending '/'.") parser.add_option("-m", "--sleep-time-min", type="string", dest="ftest_sleep_time_min", help="Minumum sleep time between request.") parser.add_option("-M", "--sleep-time-max", type="string", dest="ftest_sleep_time_max", help="Maximum sleep time between request.") parser.add_option("--dump-directory", type="string", dest="dump_dir", help="Directory to dump html pages.") parser.add_option("-V", "--firefox-view", action="store_true", help="Real time view using firefox, " "you must have a running instance of firefox " "in the same host.") parser.add_option("--no-color", action="store_true", help="Monochrome output.") parser.add_option("-l", "--loop-on-pages", type="string", dest="loop_steps", help="Loop as fast as possible without concurrency " "on pages, expect a page number or a slice like 3:5." " Output some statistics.") parser.add_option("-n", "--loop-number", type="int", dest="loop_number", default=10, help="Number of loop.") parser.add_option("--accept-invalid-links", action="store_true", help="Do not fail if css/image links are " "not reachable.") parser.add_option("--simple-fetch", action="store_true", dest="ftest_simple_fetch", help="Don't load additional links like css " "or images when fetching an html page.") parser.add_option("--stop-on-fail", action="store_true", help="Stop tests on first failure or error.") parser.add_option("-e", "--regex", type="string", default=None, help="The test names must match the regex.") parser.add_option("--list", action="store_true", help="Just list the test names.") parser.add_option("--doctest", action="store_true", default=False, help="Check for a doc test.") parser.add_option("--pause", action="store_true", help="Pause between request, " "press ENTER to continue.") parser.add_option("--profile", action="store_true", help="Run test under the Python profiler.") options, args = parser.parse_args() if self.module is None: if len(args) == 0: parser.error("incorrect number of arguments") # remove the .py module = args[0] if module.endswith('.py'): module = os.path.basename(os.path.splitext(args[0])[0]) self.module = module else: args.insert(0, self.module) if not options.doctest: global g_has_doctest g_has_doctest = False if options.verbose: self.verbosity = 2 if options.quiet: self.verbosity = 0 g_doctest_verbose = False if options.debug or options.debug_level: options.ftest_debug_level = 1 options.ftest_log_to = 'console file' g_doctest_verbose = True if options.debug_level: options.ftest_debug_level = int(options.debug_level) if sys.platform.lower().startswith('win'): self.color = False else: self.color = not options.no_color self.test_name_pattern = options.regex self.list_tests = options.list self.profile = options.profile # set testloader options self.testLoader.options = options if self.defaultTest is not None: self.testNames = [self.defaultTest] elif len(args) > 1: self.testNames = args[1:]
def _open_result_log(self, **kw): """Open the result log.""" self._logr('<funkload version="%s" time="%s">' % ( get_version(), datetime.now().isoformat()), force=True) self.addMetadata(ns=None, **kw)
# Don't wrap JS files into anonymous functions. Our code isn't ready for # this, yet. "DISABLE_WRAPPER": True, # All static files that are run through pipeline "STYLESHEETS": pipelinefiles.STYLESHEETS, "JAVASCRIPT": pipelinefiles.JAVASCRIPT, } # Make a list of files that should be included directly (bypassing pipeline) # and a list of pipeline identifiers for all others. NON_COMPRESSED_FILES = pipelinefiles.non_pipeline_js.values() NON_COMPRESSED_FILE_IDS = pipelinefiles.non_pipeline_js.keys() COMPRESSED_FILE_IDS = filter(lambda f: f not in NON_COMPRESSED_FILE_IDS, pipelinefiles.JAVASCRIPT.keys()) # Make Git based version of CATMAID available as a settings field VERSION = utils.get_version() # FlyTEM rendering service. To activate add the following lines to your # settings.py file: # MIDDLEWARE_CLASSES += ('catmaid.middleware.FlyTEMMiddleware',) # FLYTEM_SERVICE_URL = 'http://renderer-2.int.janelia.org:8080/render-ws/v1/owner/flyTEM' # FLYTEM_STACK_RESOLUTION = (4,4,40) # FLYTEM_STACK_TILE_WIDTH = 512 # FLYTEM_STACK_TILE_HEIGHT = 512 # DVID auto-discovery. To activate add the following lines to your settings.py # file: # MIDDLEWARE_CLASSES += ('catmaid.middleware.DVIDMiddleware',) # DVID_URL = 'http://emdata2.int.janelia.org:7000' # DVID_FORMAT = 'jpg:80' # DVID_SHOW_NONDISPLAYABLE_REPOS = True
k, v = line.split(':', 1) ADDONS[slug][k.strip('# ').lower()] = v.strip('# \n').strip().strip(r'\r') except FileNotFoundError as err: # no toc file, that probably means this is a Bliz AddOn or just some random dir # print(err) pass # So now we have a list of the the installed addons, check the online database to see if there's a # new version latest = json.load(CACHE.getfd(LATESTURL, refresh_age=60)) for slug, info in ADDONS.items(): if slug in latest and 'version' in ADDONS[slug]: ver, url = latest[slug] instver = get_version(ADDONS[slug]['version']) latestver = get_version(ver) if slug in VERSIONMAP and instver in VERSIONMAP[slug]: instver = VERSIONMAP[slug][instver] print('Match found in database: {}'.format(slug)) print('Installed version: {}'.format(instver)) print('Latest version: {}'.format(latestver)) if instver != latestver and pargs.report is False: if pargs.yes is False: yn = input('Would you like to upgrade {} from {}? [Y/n] '.format(slug, url)) if pargs.yes or yn is "" or yn.startswith('y') or yn.startswith('Y'): # do the upgrade
default=1, type=int) main_parser.add_argument("--total_coverage", metavar="FLOAT", help="Total coverage to simulate", default=1.0, type=float) main_parser.add_argument("--mean_fragment_size", metavar="INT", help="Mean fragment size to simulate", default=350, type=int) main_parser.add_argument("--sd_fragment_size", metavar="INT", help="Standard deviation of fragment size to simulate", default=50, type=int) main_parser.add_argument("--vcfs", metavar="VCF", help="Addtional list of VCFs to insert into genome, priority is lowest ... highest", nargs="+", default=[]) main_parser.add_argument("--force_five_base_encoding", action="store_true", help="Force output bases to be only ACTGN") main_parser.add_argument("--filter", action="store_true", help="Only use PASS variants for simulation") main_parser.add_argument("--keep_temp", action="store_true", help="Keep temporary files after simulation") main_parser.add_argument("--lift_ref", action="store_true", help="Liftover chromosome names from restricted reference") main_parser.add_argument("--java_max_mem", metavar="XMX", help="max java memory", default="10g", type = str) main_parser.add_argument('--version', action='version', version=get_version()) main_parser.add_argument('--log_to_stderr', action='store_true', help='Output log to stderr instead of log_dir/varsim.log') main_parser.add_argument("--loglevel", help="Set logging level", choices=["debug", "warn", "info"], default="info") pipeline_control_group = main_parser.add_argument_group("Pipeline control options. Disable parts of the pipeline.") pipeline_control_group.add_argument("--disable_rand_vcf", action="store_true", help="Disable sampling from the provided small variant VCF") pipeline_control_group.add_argument("--disable_rand_dgv", action="store_true", help="Disable sampline from the provided DGV file") pipeline_control_group.add_argument("--disable_vcf2diploid", action="store_true", help="Disable diploid genome simulation") pipeline_control_group.add_argument("--disable_sim", action="store_true", help="Disable read simulation") # RandVCF2VCF seed num_SNP num_INS num_DEL num_MNP num_COMPLEX percent_novel min_length_lim max_length_lim reference_file file.vcf rand_vcf_group = main_parser.add_argument_group("Small variant simulation options") rand_vcf_group.add_argument("--vc_num_snp", metavar="INTEGER", help="Number of SNPs to sample from small variant VCF",