def test_main_python(self): activate = self.rel('ve', 'bin', 'activate') bin = self.rel('ve', 'bin') env = self.rel('ve') minimock = self.rel( 've', 'lib', 'python%d.%d' % (VERSION.major, VERSION.minor), 'site-packages', 'MiniMock.py' ) unittest2 = self.rel( 've', 'lib', 'python%d.%d' % (VERSION.major, VERSION.minor), 'site-packages', 'unittest2' ) self.assertFalse(os.path.isdir(env), env) self.create_file( 'requirements.py_%d%d' % (VERSION.major, VERSION.minor), 'MiniMock==1.2.7\n' ) main(['-b', '-p', 'pre-requirements.txt']) self.assertTrue(os.path.isdir(env), env) self.assertTrue(os.path.isdir(bin), bin) self.assertTrue(os.path.isfile(activate), activate) self.assertTrue(os.path.isfile(minimock), minimock) self.assertTrue(os.path.isdir(unittest2), unittest2)
def main(): qid = request.args.get('id', '') if not qid: return bootstrap.main(tool='copypaste.py', stuff=form(), title='CommonsInterwiki') site = pywikibot.Site('wikidata', 'wikidata').data_repository() item = pywikibot.ItemPage(site, qid) item.get('sitelinks') # Does this even work? Oh well. dbnames = sorted(list(item.sitelinks)) links = list() for site in dbnames: lang = site.replace('wiki','').replace('_','-') link = item.sitelinks[site] links.append('[[{0}:{1}]]'.format(lang, link)) text = '\n'.join(links) text = '<textarea rows="30" cols="100">'+text+'</textarea>' return bootstrap.main(tool='copypaste.py', stuff=text, title='CommonsInterwiki')
def test_main(self): activate = self.rel('ve', 'bin', 'activate') bin = self.rel('ve', 'bin') env = self.rel('ve') unittest2 = self.rel( 've', 'lib', 'python%d.%d' % (VERSION.major, VERSION.minor), 'site-packages', 'unittest2' ) self.assertFalse(os.path.isdir(self.rel('ve'))) main(['-b', '-p', 'pre-requirements.txt']) self.assertTrue(os.path.isdir(env), env) self.assertTrue(os.path.isdir(bin), bin) self.assertTrue(os.path.isfile(activate), activate) self.assertTrue(os.path.isdir(unittest2), unittest2)
def test_bootstrap_help(capsys): """Tests to see if bootstrap.py help text is correct and that it loads sample/tripleo plugins""" help_text = ( "usage: bootstrap.py [-h] [-d] {sample,tripleo} ...\n\n" "Browbeat bootstrap Ansible. Generates files for Ansible interactions to the\n" "OpenStack Cloud.\n\n" "positional arguments:\n" " {sample,tripleo}\n\n" "optional arguments:\n" " -h, --help show this help message and exit\n" " -d, --debug Enable Debug messages\n") with pytest.raises(SystemExit) as pytest_wrapped_e: bootstrap.main([ "-h", ]) assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 0 out, err = capsys.readouterr() assert out == help_text
def test_bootstrap_tripleo_help(capsys): """Tests to see if bootstrap.py tripleo plugin help text is correct.""" help_text = ( "usage: bootstrap.py tripleo [-h] [-i TRIPLEO_IP] [-u USER]\n\n" "Bootstrap implementation for tripleo clouds\n\n" "optional arguments:\n" " -h, --help show this help message and exit\n" " -i TRIPLEO_IP, --tripleo-ip TRIPLEO_IP\n" " IP address of tripleo undercloud. Defaults to\n" " 'localhost'. Currently only localhost is supported.\n" " -u USER, --user USER User used for tripleo install. Defaults to 'stack'.\n" ) with pytest.raises(SystemExit) as pytest_wrapped_e: bootstrap.main(["tripleo", "-h"]) assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 0 out, err = capsys.readouterr() assert out == help_text
def main(argv): usage = "usage: %prog [options] deployment_home" parser = OptionParser(usage=usage) (options, args) = parser.parse_args(args=argv) if len(args) != 1: parser.error("Expecting exactly one argument, the directory to install datablox (a.k.a. the deployment home)") dh = os.path.abspath(os.path.expanduser(args[0])) if os.path.exists(dh): parser.error("Installation directory %s already exists - delete it if you want to install to that directory." % dh) print "Deploying datablox to %s" % dh print " Running Engage bootstrap" bt_log = os.path.join(dh, "log/bootstrap.log") try: rc = bootstrap.main(["-c", dh]) except: traceback.print_exc() print "Error running Engage bootstrap, check logfile %s for details" % \ bt_log return 1 if rc != 0: print "Engage bootstrap failed, return code was %d. Check logfile %s for details." %\ (rc, bt_log) return 1 print " Running install of datablox" install_exe = os.path.join(dh, "engage/bin/install") if not os.path.exists(install_exe): raise Exception("Could not find Engage installer script at %s" % install_exe) di_log = os.path.join(dh, "log/install.log") with tempfile.NamedTemporaryFile(delete=False) as f: f.write(''.join([random.choice(string.letters+string.digits) for i in range(10)])) fname = f.name try: rc = subprocess.call([install_exe, "-p", fname, "-g", "datablox"]) except: traceback.print_exc() print "Error running datablox install, check logfile %s for details" % \ di_log return 1 finally: os.remove(fname) if rc != 0: print "Engage install of Datablox caretaker failed, return code was %d. Check logfile %s for details." % \ (rc, di_log) return 1 svcctl_exe = os.path.join(dh, "engage/bin/svcctl") assert os.path.exists(svcctl_exe), "Missing svcctl script at %s" % svccctl_exe svcctl_link = os.path.join(dh, "python/bin/svcctl") rc = subprocess.call(["/bin/ln", "-s", svcctl_exe, svcctl_link]) if rc != 0: print "Unable to setup svcctl link at %s" % svccctl_link return 1 print "Datablox deployment successful" return 0
def main(): stuff = cgi.FieldStorage() try: qid = stuff['id'].value.lower() except KeyError: print bootstrap.main(tool='copypaste.py', stuff=form, title='copypaste.py') return site=pywikibot.Site('en','wikipedia') repo=site.data_repository() #qid = 'Q1' sitelinks = repo.get_sitelinks(qid) links = list() keys = sorted(sitelinks.keys()) for site in keys: lang = site.replace('wiki','').replace('_','-') link = sitelinks[site]['title'] links.append('[[{0}:{1}]]'.format(lang, link)) text = '\n'.join(links) text = '<textarea rows="30" cols="100">'+text+'</textarea>' print bootstrap.main(tool='copypaste.py', stuff=text, title='copypaste.py')
def main(): stuff = cgi.FieldStorage() try: qid = stuff['id'].value.lower() except KeyError: print bootstrap.main(tool='copypaste.py', stuff=form, title='copypaste.py') return site = pywikibot.Site('en', 'wikipedia') repo = site.data_repository() #qid = 'Q1' sitelinks = repo.get_sitelinks(qid) links = list() keys = sorted(sitelinks.keys()) for site in keys: lang = site.replace('wiki', '').replace('_', '-') link = sitelinks[site]['title'] links.append('[[{0}:{1}]]'.format(lang, link)) text = '\n'.join(links) text = '<textarea rows="30" cols="100">' + text + '</textarea>' print bootstrap.main(tool='copypaste.py', stuff=text, title='copypaste.py')
def params_errors(best_fit_algor, args): ''' Obtain uncertainties for the fitted parameters. ''' if best_fit_algor == 'brute': fundam_params = args isoch_fit_errors = [] # Assign errors as the largest step in each parameter. for pv in fundam_params: # If any parameter has a single valued range, assign 'nan'. if len(pv) > 1: # Find largest delta in this parameter used values. largest_delta = np.diff(pv).max() # Store the maximum value. isoch_fit_errors.append(largest_delta) else: isoch_fit_errors.append(np.nan) elif best_fit_algor == 'genet': isoch_fit_errors = bootstrap.main(*args) elif best_fit_algor == 'emcee': varIdxs, emcee_trace = args isoch_fit_errors = [] # TODO hard-coded for 6 parameters j = 0 print("Median (16, 84) perc") for i in range(6): if i in varIdxs: pm = np.percentile(emcee_trace[i - j], 50) # Median # 16th and 84th percentiles (1 sigma) ph = np.percentile(emcee_trace[i - j], 84) pl = np.percentile(emcee_trace[i - j], 16) # print(" {:.4f} ({:.4f}, {:.4f})".format(pm, pl, ph)) # TODO fix this err = .5 * (ph - pl) isoch_fit_errors.append(err) else: isoch_fit_errors.append(np.nan) j += 1 return isoch_fit_errors
def params_errors(ip_list, err_lst, memb_prob_avrg_sort, completeness, st_dist_mass, isoch_fit_params): ''' Obtain errors for the fitted parameters. ''' best_fit_algor, N_b = g.bf_params[1], g.bf_params[-1] if best_fit_algor == 'brute': isoch_fit_errors = [] # Assign errors as the largest step in each parameter. par_vals = ip_list[1] for pv in par_vals: # If any parameter has a single valued range, assign an error # of -1. if len(pv) > 1: # Find largest delta in this parameter used values. largest_delta = np.diff(pv).max() # Store the maximum value. isoch_fit_errors.append(largest_delta) else: isoch_fit_errors.append(-1.) elif best_fit_algor == 'genet': if N_b >= 2: # Call bootstrap function with resampling to get the uncertainty # in each parameter. isoch_fit_errors = bootstrap.main(err_lst, memb_prob_avrg_sort, completeness, ip_list, st_dist_mass) else: print('Skipping bootstrap process.') # No error assignment. isoch_fit_errors = [-1.] * len(isoch_fit_params[0]) return isoch_fit_errors
def test_bootstrap_main(self, dirname_mock, realpath_mock, run_mock, exit_mock, stderr_mock, subprocess_Popen_mock): bootstrap.main(["bootstrap.py", "hostname,hostname2", "/tmp/bootstrap", "root", "sshkey_file", "setupAgent.py", "ambariServer", \ "centos6", "1.1.1", "8440", "root", "passwordfile"]) self.assertTrue(run_mock.called) run_mock.reset_mock() bootstrap.main(["bootstrap.py", "hostname,hostname2", "/tmp/bootstrap", "root", "sshkey_file", "setupAgent.py", "ambariServer", \ "centos6", "1.1.1", "8440", "root", None]) self.assertTrue(run_mock.called) run_mock.reset_mock() def side_effect(retcode): raise Exception(retcode, "sys.exit") exit_mock.side_effect = side_effect try: bootstrap.main(["bootstrap.py","hostname,hostname2", "/tmp/bootstrap"]) self.fail("sys.exit(2)") except Exception: # Expected pass self.assertTrue(exit_mock.called)
def test_bootstrap_main(self, dirname_mock, realpath_mock, run_mock, exit_mock, stderr_mock, subprocess32_Popen_mock): bootstrap.main(["bootstrap.py", "hostname,hostname2", "/tmp/bootstrap", "root", "123", "sshkey_file", "setupAgent.py", "ambariServer", \ "centos6", "1.1.1", "8440", "root", "passwordfile"]) self.assertTrue(run_mock.called) run_mock.reset_mock() bootstrap.main(["bootstrap.py", "hostname,hostname2", "/tmp/bootstrap", "root", "123", "sshkey_file", "setupAgent.py", "ambariServer", \ "centos6", "1.1.1", "8440", "root", None]) self.assertTrue(run_mock.called) run_mock.reset_mock() def side_effect(retcode): raise Exception(retcode, "sys.exit") exit_mock.side_effect = side_effect try: bootstrap.main(["bootstrap.py","hostname,hostname2", "/tmp/bootstrap"]) self.fail("sys.exit(2)") except Exception: # Expected pass self.assertTrue(exit_mock.called)
def main(job_id): db = oursql.connect( db='u_legoktm_wikidata_properties_p', host="sql-s1-user.toolserver.org", read_default_file=os.path.expanduser("~/.my.cnf"), ) cursor = db.cursor() query = """ SELECT lang, source, pid, target_qid, user, timestamp, done, pid2, qid2, pid3, qid3, pid4, qid4, pid5, qid5 FROM jobs WHERE id=? """ cursor.execute(query, (job_id, )) data = cursor.fetchone() info = """<p>Data was imported from <a href="//www.wikidata.org/wiki/{lang}:{source}">{lang}:{source}</a>. {pid} was linked with {targetqid}. It was requested by <a href="//www.wikidata.org/wiki/User:{user}">{user}</a>. """ if data[6]: info += 'The task finished at {ts}.' else: info += 'The task started at {ts}, and is currently running.' if data[7]: info += '\n<br />The following properties were also added:\n<ul>' info += '<li>{pid} --> {qid}</li>\n'.format(pid=plink(data[7]), qid=qlink(data[8])) if data[9]: info += '<li>{pid} --> {qid}</li>\n'.format(pid=plink(data[9]), qid=qlink(data[10])) if data[11]: info += '<li>{pid} --> {qid}</li>\n'.format( pid=plink(data[11]), qid=qlink(data[12])) if data[13]: info += '<li>{pid} --> {qid}</li>\n'.format( pid=plink(data[13]), qid=qlink(data[14])) info += '</ul>\n' info += '</p>' info = info.format( lang=data[0], source=unicode(data[1]), pid=plink(data[2]), targetqid=plink(data[3]), user=unicode(data[4]), ts=data[5].strftime('%Y-%m-%dT%H:%M:%SZ'), ) query = """ SELECT lang, page, success, comment, item, timestamp FROM edits WHERE job_id=? """ cursor = db.cursor() cursor.execute(query, (job_id, )) header = """ <table class="table table-bordered"> <thead> <tr> <th>Page</th> <th>Status</th> <th>Item</th> <th>Timestamp</th> </tr> </thead> <tbody> """ footer = """ </tbody> </table> """ text = '' for lang, page, success, comment, item, timestamp in cursor: if not comment: comment = '' else: comment = ': ' + comment link = '<a href="//www.wikidata.org/wiki/{lang}:{page}">{lang}:{page}</a>'.format( lang=lang, page=unicode(page)) if item: item_link = '<td><a href="//www.wikidata.org/wiki/Q{item}">Q{item}</a></td>'.format( item=item) else: item_link = '<td class=muted>--</td>' # print unicode(ts) # print type(ts) row = '<tr class="{css}"><td>{link}</td><td>{status}{comment}</td>{itemlink}<td>{ts}</td></tr>\n'.format( link=unicode(link), status=convert_status(success), comment=comment, itemlink=item_link.decode('utf-8'), ts=timestamp.strftime('%Y-%m-%dT%H:%M:%SZ'), css=convert_status(success, css=True)) text += row all = info + header + text + footer return bootstrap.main(tool='properties.js log', title='Job archive', stuff=unicode(all))
#!/usr/bin/env python # This file is only a "symlink" to bootstrap.py, all logic should go there. import os import sys # If this is python2, check if python3 is available and re-execute with that # interpreter. if sys.version_info.major < 3: try: # On Windows, `py -3` sometimes works. # Try this first, because 'python3' sometimes tries to launch the app # store on Windows os.execvp("py", ["py", "-3"] + sys.argv) except OSError: try: os.execvp("python3", ["python3"] + sys.argv) except OSError: # Python 3 isn't available, fall back to python 2 pass rust_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.join(rust_dir, "src", "bootstrap")) import bootstrap bootstrap.main()
def run(req, logger): # Do a sanity check of the sudo password before we do anything # destructive. if req.master_pw: try: logger.debug("Test the sudo password by running an ls") run_sudo_program(["/bin/ls", "/"], req.master_pw, logger) except: logger.exception("Unable to run sudo commands with provided password") raise Exception("Unable to run sudo commands with provided password") # create directory for previous version prev_version_dir = os.path.join(req.deployment_home, "prev_version") if os.path.exists(prev_version_dir): logger.info("Removing old uninstall at %s" % prev_version_dir) shutil.rmtree(prev_version_dir) os.makedirs(prev_version_dir) # run the backup and uninstall # TODO: We should make the uninstall command atomic: if it fails, we should # restore the original state. logger.info("Backing up old application version to %s" % prev_version_dir) _run_engage_command(req, "backup", ["backup", prev_version_dir], logger) logger.info("Uninstalling old application version") _run_engage_command(req, "backup", ["uninstall",], logger) # If present, move the password database to the backup dir until we finish the boostrap pw_repository_path = os.path.join(req.config_dir, "pw_repository") pw_salt_path = os.path.join(req.config_dir, "pw_salt") pw_repository_save_path = os.path.join(prev_version_dir, "pw_repository") pw_salt_save_path = os.path.join(prev_version_dir, "pw_salt") if os.path.exists(pw_repository_path): os.rename(pw_repository_path, pw_repository_save_path) os.rename(pw_salt_path, pw_salt_save_path) # remove engage files from the old version logger.info("Removing old version files") _remove_engage_files(req) # bootstrap the new engage logger.info("Boot strapping new engage environment") boot_cmd = ["-d", req.log_directory, req.deployment_home] if req.options.python_exe: boot_cmd = ["-p", req.options.python_exe] + boot_cmd rc = bootstrap.main(boot_cmd) if rc != 0: raise Exception("Bootstrap of new engage into %s failed" % req.deployment_home) # move the password database back, if present if os.path.exists(pw_repository_save_path): os.rename(pw_repository_save_path, pw_repository_path) os.rename(pw_salt_save_path, pw_salt_path) # run the upgrade install_script = os.path.join(req.engage_bin_dir, "install") upgrade_args = ["-u", prev_version_dir, "-f", "upgrade_subprocess.log", "--config-choices-file=%s" % os.path.join(prev_version_dir, "config_choices.json")] if req.application_archive: upgrade_args.append("--application-archive=%s" % req.application_archive) logger.info("Running upgrade to new application at %s" % req.application_archive) else: logger.info("Running upgrade to new application") rc = _run_engage_command(req, "install", upgrade_args, logger, valid_rcs=[0,3]) if rc == 3: logger.info("Upgrade failed, starting rollback to previous version") rollback_upgrade(req, prev_version_dir, logger) logger.info("Upgrade failed, rollback to previous version successfull.") return 3 logger.info("Upgrade successful.") return 0
def main(Full_query, showplot=0, medmean=1, save_file='y'): SN_Array2 = [] #Accept SQL query as input and then grab what we need print "SQL Query:", Full_query sql_input = Full_query SN_Array2 = grab(sql_input, Full_query) ### I inserted the function bootstrap in this composite code. (by Ricky, Apr 16, 2014) ###I took it back out because it ruined the array and plotted some nonsense. ###The function you added above looks nothing like bootstrap.py. ###The plan was to call your piece of code from within this one, not add something that doesn't work. #SN_Array = bootstrap(SN_Array_2) <---- This is bad. ###The call should look like this, and bootstrap.py should have a main() function to call. opt = str(raw_input("Do you want to do bootstrapping? (y/n) ")) if (opt == 'n'): SN_Array = SN_Array2 #finds the longest SN we have for our initial template lengths = [] for SN in SN_Array: lengths.append(len(SN.flux[np.where(SN.flux != 0)])) temp = [ SN for SN in SN_Array if len(SN.flux[np.where(SN.flux != 0)]) == max(lengths) ] try: composite = temp[0] except IndexError: print "No spectra found" exit() #scales data, makes a composite, and splices in non-overlapping data #Here is where we set our wavelength range for the final plot wmin = 4000 wmax = 7500 wavemin = composite.minwave wavemax = composite.maxwave #finds range of useable data good = np.where( len(np.where(((wavemin <= wmin) & (wavemax >= wmax)) > 100))) template = supernova() template = SN_Array[good[0]] template = composite #Starts our main loop i = 0 n_start = 0 n_end = 1 scales = [] while (n_start != n_end): n_start = len([x for x in scales if x > 0]) scales = [] scales = find_scales(SN_Array, template.flux, template.ivar) n_scale = len([x for x in scales if x > 0]) SN_Array = scale_data(SN_Array, scales) template = average(SN_Array, template, medmean) n_end = n_scale n_start = n_end print "Done." print "Average redshift =", template.redshift print "Average phase =", template.phase print "Average velocity =", template.velocity #This next line creates a unique filename for each run based on the sample set used f_name = "../plots/" + file_name.make_name(SN_Array) template.savedname = f_name + '.dat' lowindex = np.where( template.wavelength == find_nearest(template.wavelength, wmin)) highindex = np.where( template.wavelength == find_nearest(template.wavelength, wmax)) #This plots the individual composite just so you can see how it if int(showplot) == 1: plt.plot(template.wavelength[lowindex[0]:highindex[0]], template.flux[lowindex[0]:highindex[0]]) plt.plot(template.wavelength[lowindex[0]:highindex[0]], template.ivar[lowindex[0]:highindex[0]]) #This saves it, if you want to. plt.savefig('../plots/' + f_name + '.png') plt.show() #Either writes data to file, or returns it to user #This part is still in progress table = Table([template.wavelength, template.flux, template.ivar], names=('Wavelength', 'Flux', 'Variance')) if save_file == 'y': table.write(template.savedname, format='ascii') return template else: return template if (opt == 'y'): tries = int( raw_input("Enter number of bootstraps: ")) # Number of bootstraps. boot_flux = [] boot_flux_unscaled = [0] * tries for j in range(tries): SN_Array = bootstrap.main(SN_Array2) #SN_Array = SN_Array2 #finds the longest SN we have for our initial template lengths = [] for SN in SN_Array: lengths.append(len(SN.flux[np.where(SN.flux != 0)])) temp = [ SN for SN in SN_Array if len(SN.flux[np.where(SN.flux != 0)]) == max(lengths) ] try: composite = temp[0] except IndexError: print "No spectra found" exit() #scales data, makes a composite, and splices in non-overlapping data #Here is where we set our wavelength range for the final plot wmin = 4000 wmax = 7500 wavemin = composite.minwave wavemax = composite.maxwave #finds range of useable data good = np.where( len(np.where(((wavemin <= wmin) & (wavemax >= wmax)) > 100))) template = supernova() template = SN_Array[good[0]] template = composite #Starts our main loop i = 0 n_start = 0 n_end = 1 scales = [] while (n_start != n_end): n_start = len([x for x in scales if x > 0]) scales = [] scales = find_scales(SN_Array, template.flux, template.ivar) n_scale = len([x for x in scales if x > 0]) SN_Array = scale_data(SN_Array, scales) template = average(SN_Array, template, medmean) n_end = n_scale n_start = n_end boot_flux_unscaled[j] = template.flux[np.where( template.wavelength == wmin)[0]:np.where( template.wavelength == wmax)[0]] boot_flux.append( np.divide(boot_flux_unscaled[j], np.median(boot_flux_unscaled[j]))) ### 16th and 84th percentile of the spectrum (for scatter plot) percentile = erf(1 / np.sqrt(2.)) low_pc = 0.5 - percentile / 2. up_pc = 0.5 + percentile / 2. ### The 16th and 84th percentile index low_ind = np.round(tries * low_pc).astype(int) up_ind = np.round(tries * up_pc).astype(int) ### Sort the fluxes in each wavelength, and put the 16th and 84th percentile fluxes into two arrays median = np.median(boot_flux, axis=0) ### Median of the spectrum (for scaling) low_arr = np.divide(np.sort(boot_flux, axis=0)[low_ind - 1], median) up_arr = np.divide(np.sort(boot_flux, axis=0)[up_ind - 1], median) #for j in range(np.sort(boot_flux, axis = 0)[low_ind - 1].size): #print j, median[j], aaaa[j], low_arr[j], up_arr[j] lowindex = np.where( template.wavelength == find_nearest(template.wavelength, wmin)) highindex = np.where( template.wavelength == find_nearest(template.wavelength, wmax)) #print low_arr[lowindex[0]:highindex[0]] plt.plot(template.wavelength[lowindex[0]:highindex[0]], low_arr) plt.plot(template.wavelength[lowindex[0]:highindex[0]], up_arr) print low_arr print up_arr minflux = np.min(low_arr) * 0.9 maxflux = np.max(up_arr) * 1.1 plt.ylim((minflux, maxflux)) plt.show() plt.close() #print median[lowindex[0]:highindex[0]] f_name = "../plots/" + file_name.make_name(SN_Array) template.savedname = f_name + '.dat' #This plots the individual composite just so you can see how it if int(showplot) == 1: plt.plot(template.wavelength[lowindex[0]:highindex[0]], template.flux[lowindex[0]:highindex[0]]) plt.plot(template.wavelength[lowindex[0]:highindex[0]], template.ivar[lowindex[0]:highindex[0]]) #This saves it, if you want to. plt.savefig('../plots/' + f_name + '.png') plt.show() #Either writes data to file, or returns it to user #This part is still in progress table = Table([ template.wavelength, template.flux, template.ivar, template.phase, template.dm15 ], names=('Wavelength', 'Flux', 'Variance', 'Age', 'Dm_15s')) if save_file == 'y': table.write(template.savedname, format='ascii') return template else: return template """ print "Done." print "Average redshift =", template.redshift print "Average phase =", template.phase print "Average velocity =", template.velocity #This next line creates a unique filename for each run based on the sample set used f_name = "../plots/" + file_name.make_name(SN_Array) template.savedname = f_name + '.dat' lowindex = np.where(template.wavelength == find_nearest(template.wavelength, wmin)) highindex = np.where(template.wavelength == find_nearest(template.wavelength, wmax)) #This plots the individual composite just so you can see how it if int(showplot) == 1: plt.plot(template.wavelength[lowindex[0]:highindex[0]], template.flux[lowindex[0]:highindex[0]]) plt.plot(template.wavelength[lowindex[0]:highindex[0]], template.ivar[lowindex[0]:highindex[0]]) #This saves it, if you want to. plt.savefig('../plots/' + f_name + '.png') plt.show() #Either writes data to file, or returns it to user #This part is still in progress table = Table([template.wavelength, template.flux, template.ivar], names = ('Wavelength', 'Flux', 'Variance')) if save_file=='y': table.write(template.savedname,format='ascii') return template else: return template """ if __name__ == "__main__": main()
def main(): stuff = cgi.FieldStorage() try: qid = stuff['id'].value.lower() except KeyError: print bootstrap.main(tool='checker.py', stuff=form, title='checker.py') return try: site_lang = stuff['site'].value.lower() except KeyError: site_lang = 'en' site_lang = site_lang.replace('_', '-') site = pywikibot.Site(site_lang, 'wikipedia') repo = site.data_repository() #qid = 'Q1' item = pywikibot.ItemPage(repo, qid) sitelinks = item.get().get('sitelinks') # sitelinks = repo.get_sitelinks(qid) for lang in sitelinks: if '_' in lang: newlang = lang.replace('_', '-') sitelinks[newlang] = {'title': sitelinks[lang]['title']} del sitelinks[lang] #print sitelinks.keys() #pull the enwiki link enwiki = sitelinks['{0}wiki'.format(site_lang)]['title'] pg = pywikibot.Page(site, enwiki) enwiki_text = pg.get() local = textlib.getLanguageLinks(enwiki_text, insite=site) if not local and pg.namespace() == 10: try: enwiki_text = pywikibot.Page(site, pg.title() + '/doc').get() local = textlib.getLanguageLinks(enwiki_text, insite=site, template_subpage=True) except pywikibot.NoPage: pass all_langs = union(sitelinks.keys(), local.keys()) header = """ <table class="table table-bordered"> <thead> <tr> <th>Language</th> <th>Local</th> <th>Wikidata</th> </tr> </thead> <tbody> """ footer = """ </tbody> </table> """ text = '' allgood = True for lang in all_langs: row = '' prefix = lang.replace('wiki', '').replace('_', '-') row += '<td><a href="//{1}.wikipedia.org/wiki/{0}:">{0}wiki</a></td>'.format( prefix, site_lang) l = None d = None if lang in local: row += '<td><a href="//{2}.wikipedia.org/wiki/{0}:{1}">{0}:{1}</a></td>'.format( prefix, local[lang], site_lang) l = local[lang] else: row += '<td class=muted>----</td>' if lang in sitelinks: row += '<td><a href="//{2}.wikipedia.org/wiki/{0}:{1}">{0}:{1}</a></td>'.format( prefix, sitelinks[lang]['title'], site_lang) d = sitelinks[lang]['title'] else: row += '<td class=muted>----</td>' if (l and d) and (l == d): row = '<tr class="done">' + row elif d and not l: row = '<tr class="done">' + row else: #lets see if its a redirect. checked = False if l and d: s = pywikibot.Site(prefix, 'wikipedia') l_p = pywikibot.Page(s, l) d_p = pywikibot.Page(s, d) if l_p.isRedirectPage(): if d_p == l_p.getRedirectTarget(): row = '<tr class="already">' + row checked = True elif d_p.isRedirectPage(): if l_p == d_p.getRedirectTarget(): row = '<tr class="already">' + row checked = True if not checked: row = '<tr class="not">' + row allgood = False row += '</td>\n' text += row msg = '' if allgood and local: msg = '<p><center><a href="//{1}.wikipedia.org/wiki/{0}">{0}</a> can be removed of interwiki links.</center></p>'.format( enwiki, site_lang) elif allgood and not local: msg = '<p><center><a href="//{2}.wikipedia.org/wiki/{0}">{0}</a> (<a href="//www.wikidata.org/wiki/{1}">{1}</a>) has successfully been migrated to Wikidata.</center></p>'.format( enwiki, qid.upper(), site_lang) else: msg = '<p><center>Status of <a href="//{2}.wikipedia.org/wiki/{0}">{0}</a> (<a href="//www.wikidata.org/wiki/{1}">{1}</a>):</center></p>'.format( enwiki, qid.upper(), site_lang) text = msg + header + text + footer print bootstrap.main(tool='checker.py', stuff=text, title='checker.py')
""" This code gets executed first when the bundle is executed. """ from bootstrap import main if __name__ == "__main__": main()
def main(job_id): db = oursql.connect(db='u_legoktm_wikidata_properties_p', host="sql-s1-user.toolserver.org", read_default_file=os.path.expanduser("~/.my.cnf"), ) cursor = db.cursor() query = """ SELECT lang, source, pid, target_qid, user, timestamp, done, pid2, qid2, pid3, qid3, pid4, qid4, pid5, qid5 FROM jobs WHERE id=? """ cursor.execute(query, (job_id,)) data = cursor.fetchone() info = """<p>Data was imported from <a href="//www.wikidata.org/wiki/{lang}:{source}">{lang}:{source}</a>. {pid} was linked with {targetqid}. It was requested by <a href="//www.wikidata.org/wiki/User:{user}">{user}</a>. """ if data[6]: info+='The task finished at {ts}.' else: info+='The task started at {ts}, and is currently running.' if data[7]: info+= '\n<br />The following properties were also added:\n<ul>' info+= '<li>{pid} --> {qid}</li>\n'.format(pid=plink(data[7]), qid=qlink(data[8])) if data[9]: info+= '<li>{pid} --> {qid}</li>\n'.format(pid=plink(data[9]), qid=qlink(data[10])) if data[11]: info+= '<li>{pid} --> {qid}</li>\n'.format(pid=plink(data[11]), qid=qlink(data[12])) if data[13]: info+= '<li>{pid} --> {qid}</li>\n'.format(pid=plink(data[13]), qid=qlink(data[14])) info+='</ul>\n' info += '</p>' info = info.format( lang=data[0], source=unicode(data[1]), pid=plink(data[2]), targetqid=plink(data[3]), user=unicode(data[4]), ts=data[5].strftime('%Y-%m-%dT%H:%M:%SZ'), ) query = """ SELECT lang, page, success, comment, item, timestamp FROM edits WHERE job_id=? """ cursor = db.cursor() cursor.execute(query, (job_id,)) header = """ <table class="table table-bordered"> <thead> <tr> <th>Page</th> <th>Status</th> <th>Item</th> <th>Timestamp</th> </tr> </thead> <tbody> """ footer = """ </tbody> </table> """ text ='' for lang,page,success,comment,item,timestamp in cursor: if not comment: comment = '' else: comment = ': '+comment link = '<a href="//www.wikidata.org/wiki/{lang}:{page}">{lang}:{page}</a>'.format(lang=lang, page=unicode(page)) if item: item_link = '<td><a href="//www.wikidata.org/wiki/Q{item}">Q{item}</a></td>'.format(item=item) else: item_link = '<td class=muted>--</td>' # print unicode(ts) # print type(ts) row = '<tr class="{css}"><td>{link}</td><td>{status}{comment}</td>{itemlink}<td>{ts}</td></tr>\n'.format( link=unicode(link), status=convert_status(success), comment=comment, itemlink=item_link.decode('utf-8'), ts=timestamp.strftime('%Y-%m-%dT%H:%M:%SZ'), css=convert_status(success,css=True) ) text += row all = info+header+text+footer return bootstrap.main(tool='properties.js log', title='Job archive', stuff=unicode(all))
import sys import env import bootstrap if __name__ == '__main__': sys.exit(bootstrap.main(sys.argv[1:]))
def main(): stuff = cgi.FieldStorage() try: qid = stuff['id'].value.lower() except KeyError: print bootstrap.main(tool='checker.py', stuff=form, title='checker.py') return try: site_lang = stuff['site'].value.lower() except KeyError: site_lang = 'en' site_lang = site_lang.replace('_','-') site=pywikibot.Site(site_lang,'wikipedia') repo=site.data_repository() #qid = 'Q1' sitelinks = repo.get_sitelinks(qid) for lang in sitelinks: if '_' in lang: newlang = lang.replace('_','-') sitelinks[newlang] = {'title':sitelinks[lang]['title']} del sitelinks[lang] #print sitelinks.keys() #pull the enwiki link enwiki = sitelinks['{0}wiki'.format(site_lang)]['title'] pg = pywikibot.Page(site, enwiki) enwiki_text = pg.get() local = textlib.getLanguageLinks(enwiki_text, insite=site) if not local and pg.namespace() == 10: try: enwiki_text = pywikibot.Page(site, pg.title()+'/doc').get() local = textlib.getLanguageLinks(enwiki_text, insite=site, template_subpage=True) except pywikibot.NoPage: pass all_langs = union(sitelinks.keys(), local.keys()) header = """ <table class="table table-bordered"> <thead> <tr> <th>Language</th> <th>Local</th> <th>Wikidata</th> </tr> </thead> <tbody> """ footer = """ </tbody> </table> """ text = '' allgood=True for lang in all_langs: row = '' prefix = lang.replace('wiki','').replace('_','-') row+='<td><a href="//{1}.wikipedia.org/wiki/{0}:">{0}wiki</a></td>'.format(prefix, site_lang) l = None d = None if lang in local: row+='<td><a href="//{2}.wikipedia.org/wiki/{0}:{1}">{0}:{1}</a></td>'.format(prefix, local[lang], site_lang) l = local[lang] else: row+='<td class=muted>----</td>' if lang in sitelinks: row+='<td><a href="//{2}.wikipedia.org/wiki/{0}:{1}">{0}:{1}</a></td>'.format(prefix, sitelinks[lang]['title'], site_lang) d=sitelinks[lang]['title'] else: row+='<td class=muted>----</td>' if (l and d) and (l == d): row = '<tr class="done">'+row elif d and not l: row = '<tr class="done">'+row else: #lets see if its a redirect. checked=False if l and d: s = pywikibot.Site(prefix, 'wikipedia') l_p = pywikibot.Page(s, l) d_p = pywikibot.Page(s, d) if l_p.isRedirectPage(): if d_p == l_p.getRedirectTarget(): row = '<tr class="already">'+row checked=True elif d_p.isRedirectPage(): if l_p == d_p.getRedirectTarget(): row = '<tr class="already">'+row checked=True if not checked: row = '<tr class="not">'+row allgood=False row +='</td>\n' text+=row msg='' if allgood and local: msg='<p><center><a href="//{1}.wikipedia.org/wiki/{0}">{0}</a> can be removed of interwiki links.</center></p>'.format(enwiki, site_lang) elif allgood and not local: msg='<p><center><a href="//{2}.wikipedia.org/wiki/{0}">{0}</a> (<a href="//www.wikidata.org/wiki/{1}">{1}</a>) has successfully been migrated to Wikidata.</center></p>'.format(enwiki, qid.upper(), site_lang) else: msg='<p><center>Status of <a href="//{2}.wikipedia.org/wiki/{0}">{0}</a> (<a href="//www.wikidata.org/wiki/{1}">{1}</a>):</center></p>'.format(enwiki, qid.upper(), site_lang) text = msg+ header + text + footer print bootstrap.main(tool='checker.py', stuff=text, title='checker.py')
#!/usr/bin/env python # Copyright 2016 The Rust Project Developers. See the COPYRIGHT # file at the top-level directory of this distribution and at # http://rust-lang.org/COPYRIGHT. # # Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or # http://www.apache.org/licenses/LICENSE-2.0> or the MIT license # <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your # option. This file may not be copied, modified, or distributed # except according to those terms. import sys import os dir = os.path.dirname(__file__) sys.path.append(os.path.abspath(os.path.join(dir, "src", "bootstrap"))) import bootstrap bootstrap.main()
from flask import Flask, abort, request, jsonify import os import sys import imp import bootstrap # Load app app = Flask(__name__) # Load dependencies container = bootstrap.main() @app.route("/") def hello(): module_name = request.args.get('module', default='', type=str) argument = request.args.get('argument', default='', type=str) if module_name == '': return abort(400) module_dir = get_path(module_name) if os.path.isdir(module_dir): sys.path.append(module_dir) #main = __import__('main') #output = main.execute(container, argument) module = imp.load_source('module.name', module_dir + "/main.py") output = module.execute(container, argument) if type(output) is dict or type(output) is list: