def upgrade(): ### commands auto generated by Alembic - please adjust! ### from application.app import db, app from restore import restore with app.app_context(): db.create_all() restore()
def main(): cmds = sys.argv if len(cmds) > 1: ui = cmds[1].lower() if ui == 'init': init() if backup.readyToBackup(): if ui == 'restore' and len(backup.getIndex()) > 0: if len(cmds) > 2: restore(os.path.join(backup.restDir, cmds[2])) else: print("Restoring to default location %s " % backup.restDir) restore(backup.restDir) print("All files restored.") elif ui == 'get': try: get(cmds[2]) except IndexError: print("Please give a pattern eg. 'file' ") elif ui == 'test': fileEntries = validateArchiveIndex() invalidFiles = validateFiles() #print(fileEntries) #print("Correct Entries: %s " % fileEntries['Correct Entries']) #print("Incorrect Entries: %s " % archiveList['Incorrect Entries']) if fileEntries['Erroneous Paths'] != []: print("Erroneous Paths: %d" % len(fileEntries['Erroneous Paths'])) print('\n'.join(' {}: {}'.format(*x) \ for x in enumerate(fileEntries['Erroneous Paths'],1))) elif ui == 'list': if len(cmds) > 2: print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(cmds[2]),1))) else: listFiles() print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(),1))) elif ui == 'store' and len(cmds) > 2: if os.path.isdir(cmds[2]): store(cmds[2]) else: print("invalid directory") else: print("not ready... run 'cat' init") else: print("run 'init' to initialise the program")
def main(): if not os.geteuid() == 0: print >> sys.stderr, ("You need to run experms with root privileges." "\nAborting.") sys.exit(1) set_procname("experms") args = parse_arguments() try: if os.environ["SYSTEMD_LOG_LEVEL"] == "debug": args.debug = True except KeyError: pass if args.version: print "Experms v" + version sys.exit(0) if args.restore: if args.debug: debug_message() config = configfile.main.Check(args.config, args.debug) restore(config, args.debug) sys.exit(0) if args.total: if args.debug: debug_message() config = configfile.main.Check(args.config, args.debug) print ("Directories configured for watching:\n%s" % len(collect(config)[0])) with open("/proc/sys/fs/inotify/max_user_watches", "r") as inotifyconf: inotifyconfig = inotifyconf.read().strip() print ("Directories allowed to watch with inotify:\n%s" % inotifyconfig) sys.exit(0) if args.debug: debug_message() config = configfile.main.Check(args.config, args.debug) if config.restore: if args.debug: print >> sys.stderr, ("[debug] Starting restore") restore(config, args.debug) start_pyinotify(config, args.debug) sys.exit(0)
def read_co_spec(fname): cols = ['v','T'] fmt = ['f','f'] data = restore(fname, 0, cols, fmt) dat = data.read() return dat['v'],dat['T']
def select_and_restore(image_names,destination,configuration): image_names.sort() numOfImages=len(image_names) images = [cv2.imread(image_names[i]) for i in range(numOfImages)] # Find the best windows frame wf = winframe(images) if len(wf) >= 5: # Restore images from the windows frame restored = restore(wf,configuration.restore.iterations) else: ret = find_the_most_similar(images) restored = images[ret] # Restore images from the windows frame # balance whites #cv2.xphoto.balanceWhite(restored, restored, cv2.xphoto.WHITE_BALANCE_SIMPLE) # Show result # cv2.imshow("Restored", restored) # cv2.waitKey(0) # cv2.destroyAllWindows() # Save result cv2.imwrite(destination, restored)
def get_26src_info_tex(fname=''): cols = ['src','l','b', 'il', 'ib', 'tbg1665', 'tex1665'] fmt = ['s','f','f','f','f','f','f'] src = restore(fname, 2, cols, fmt) info = src.read() return info
def read_vrange(fname=''): cols = ['idx','src','vmin','vmax'] fmt = ['i','s','f','f'] vel = restore(fname, 2, cols, fmt) vel_info = vel.read() return vel_info
def read_src_lb(fname='result/tbg_408_to_compare.txt'): cols = ['src','l','b', 'il', 'ib', 'tbg', 'l-idx','b-idx','tbg1','tbg_hpy'] fmt = ['s','f','f','f','f','f','f','f','f','f'] src = restore(fname, 2, cols, fmt) info = src.read() return info
def read_bins_to_cal_bg(fname='../sub_data/bins_to_cal_bg.txt'): cols = ['idx','src','avmin1','avmax1','avmin2','avmax2','evmin1','evmax1','evmin2','evmax2'] fmt = ['i','s','f','f','f','f','f','f','f','f'] vel = restore(fname, 2, cols, fmt) vel_info = vel.read() return vel_info
def peak_info(source,fname='../data/gauss_1665_peaks.txt'): ret = {} cols = ['idx','src','tau','v0','wid','bmin','bmax'] fmt = ['i','s','f','f','f','f','f'] dat = restore(fname, 2, cols, fmt) info = dat.read() bmin = info['bmin'] bmax = info['bmax'] src = info['src'] tau = info['tau'] v0 = info['v0'] wid = info['wid'] # for i in range(0,len(info['src'])): # if info['src'][i] not in ret.keys(): # ret[src[i]] = {} # k = 0 # ret[src[i]][str(k)] = [tau[i],v0[i],wid[i]] # else: # k = k+1 # ret[src[i]][str(k)] = [tau[i],v0[i],wid[i]] for i in range(0,len(src)): if info['src'][i] not in ret.keys(): ret[src[i]] = {} ret[src[i]]['base_range'] = [bmin[i],bmax[i]] ret[src[i]]['guessp'] = [tau[i],v0[i],wid[i],0.] # 0. for Tex else: ret[src[i]]['base_range'] = [bmin[i],bmax[i]] ret[src[i]]['guessp'] += [tau[i],v0[i],wid[i],0.] return ret[source]['guessp'], ret[source]['base_range']
def get_mm_survey_src(fname = ''): cols = ['src'] fmt = ['s'] inf = restore(fname, 0, cols, fmt) src_info = inf.read() return src_info['src']
def main() -> None: args = docopt.docopt(__doc__, version=VERSION) logging.basicConfig(filename=LOG_FILE, filemode='a', format='%(levelname)s - %(message)s', level=LOG_LEVEL) now = time.asctime() logging.info(f'log start: {now}') logging.info(f'args: {args}') repo_file = args['REPO'] if args['create']: Repo.create(repo_file) elif args['verify']: repos = Repo(repo_file) logging.info(f'Verifying repo {repo_file}') n = 0 if args['--hashes']: repos.verify_all_hashes(delete=args['-d']) n += 1 if args['--backups']: repos.verify_backups() n += 1 if args['--orphans']: repos.find_orphans(delete=args['-d']) n += 1 if n == 0: util.msg('You must use --hashes, --backups, or --orphans') elif args['unlock']: # XXX Could this be: Repo(repo_file).unlock() ? repos = Repo(repo_file) repos.unlock() elif args['backup']: arg_prefix = args['--prefix'] prefix = arg_prefix if arg_prefix else 'backup' if not alphanum.match(prefix): util.fatal('prefix must be alphanumeric') want_verify = args['--verify'] backup.run_backup(args['--repo'], prefix, args['FILE'], verify=want_verify, verbose=args['-v']) elif args['restore']: restore.restore(repo_file) now = time.asctime() logging.info(f'log end: {now}')
def vel_range(n, fname = '../data/vel_range_ms101src.txt'): cols = ['idx','src','vmin','vmax'] fmt = ['i','s','f','f'] vel = restore(fname, 2, cols, fmt) vel_info = vel.read() vmin = vel_info['vmin'] vmax = vel_info['vmax'] xmin = vmin[n] xmax = vmax[n] return xmin, xmax
def main(): # Set default parameters. debug = False directory = os.getcwd() # Parse the command line parameters. for param in sys.argv: Dir = param.find("dir=") if Dir >= 0: directory = param[Dir + 4:] gam = param.find("Light-Dark=") if gam >= 0: gammatarg = float(param[gam + 11:]) sat = param.find("saturate=") if sat >= 0: sat_choice = eval(param[sat + 9:]) # if debug: print directory, gammatarg, sat_choice # Save the current directory. savedir = directory # Make list of files to process. os.chdir(directory) filelist = os.listdir(directory) jpegs = [] JPEGS = [] tiffs = [] TIFFS = [] for File in filelist: if File.find(".jpg") > 0: jpegs.append(File) if File.find(".JPG") > 0: JPGES.append(File) if File.find(".tiff") > 0: tiffs.append(File) if File.find(".TIFF") > 0: TIFFS.append(File) # In windows the file searching is NOT case sensitive, so merge. if JPEGS != jpegs: jpegs += JPEGS if TIFFS != tiffs: tiffs += TIFFS # Loop over the photos to be processed. for photo in jpegs + tiffs: # Strip off directory name and .jpg to get file name. photoname = os.path.split(photo)[1] print(photoname) # Open photo. im = Image.open(photoname) # Restore the image. restored_image = restore(im) # Save file in subdirectory "restored" newfilename = os.path.join(directory, "restored", photoname) restored_image.save(newfilename, icc_profile=im.info.get('icc_profile')) # Return to saved directory at end. os.chdir(savedir)
def Start(backupID, outputLocation, callback=None): dhnio.Dprint(8, "restore_monitor.Start %s to %s" % (backupID, outputLocation)) global _WorkingBackupIDs global _WorkingRestoreProgress if backupID in _WorkingBackupIDs.keys(): return None outfd, outfilename = tmpfile.make("restore", ".tar.gz", backupID.replace("/", "_") + "_") r = restore.restore(backupID, outfd) r.MyDeferred.addCallback(restore_done, outfilename, outputLocation, callback) r.MyDeferred.addErrback(restore_failed, outfilename, callback) r.SetBlockRestoredCallback(block_restored_callback) r.SetPacketInCallback(packet_in_callback) _WorkingBackupIDs[backupID] = r _WorkingRestoreProgress[backupID] = {} return r
def Start(backupID, outputLocation, callback=None): lg.out(8, 'restore_monitor.Start %s to %s' % (backupID, outputLocation)) global _WorkingBackupIDs global _WorkingRestoreProgress if backupID in _WorkingBackupIDs.keys(): return None outfd, outfilename = tmpfile.make('restore', '.tar.gz', backupID.replace('/', '_') + '_') r = restore.restore(backupID, outfd) r.MyDeferred.addCallback(restore_done, outfilename, outputLocation, callback) r.MyDeferred.addErrback(restore_failed, outfilename, callback) r.set_block_restored_callback(block_restored_callback) r.set_packet_in_callback(packet_in_callback) _WorkingBackupIDs[backupID] = r _WorkingRestoreProgress[backupID] = {} r.automat('init') return r
def read_vrange_to_cal_bg(n,fname='../sub_data/bins_to_cal_bg.txt'): cols = ['idx','src','avmin1','avmax1','avmin2','avmax2','evmin1','evmax1','evmin2','evmax2'] fmt = ['i', 's', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f'] dat = restore(fname, 2, cols, fmt) vel = dat.read(asarray=False) avmin1 = vel_info['avmin1'] avmax1 = vel_info['avmax1'] avmin2 = vel_info['avmin2'] avmax2 = vel_info['avmax2'] evmin1 = vel_info['evmin1'] evmax1 = vel_info['evmax1'] evmin2 = vel_info['evmin2'] evmax2 = vel_info['evmax2'] return avmin1[n],avmax1[n],avmin2[n],avmax2[n],evmin1[n],evmax1[n],evmin2[n],evmax2[n]
def peak_guessp(sc,fname='../data/gauss_peaks.txt'): ret = {} cols = ['idx','src','tau','v0','wid','bmin','bmax', 'v1', 'v2', 'tc', 'bgoff', 'tbg', 'bsl'] fmt = ['i', 's', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f'] dat = restore(fname, 2, cols, fmt) info = dat.read() bmin = info['bmin'] bmax = info['bmax'] bsl = info['bsl'] src = info['src'] tau = info['tau'] v0 = info['v0'] wid = info['wid'] vid1 = info['v1'] vid2 = info['v2'] tc = info['tc'] bge = info['bgoff'] tbg = info['tbg'] for i in range(0,len(src)): if info['src'][i] not in ret.keys(): ret[src[i]] = {} ret[src[i]]['base_range'] = [bmin[i],bmax[i]] ret[src[i]]['baseline'] = bsl[i] ret[src[i]]['tau'] = [tau[i]] ret[src[i]]['v0'] = [v0[i]] ret[src[i]]['wid'] = [wid[i]] ret[src[i]]['vid1'] = vid1[i] ret[src[i]]['vid2'] = vid2[i] ret[src[i]]['tc'] = tc[i] ret[src[i]]['bge'] = bge[i] ret[src[i]]['tbg'] = tbg[i] else: ret[src[i]]['base_range'] = [bmin[i],bmax[i]] ret[src[i]]['baseline'] = bsl[i] ret[src[i]]['tau'] += [tau[i]] ret[src[i]]['v0'] += [v0[i]] ret[src[i]]['wid'] += [wid[i]] ret[src[i]]['vid1'] = vid1[i] ret[src[i]]['vid2'] = vid2[i] ret[src[i]]['tc'] = tc[i] ret[src[i]]['bge'] = bge[i] ret[src[i]]['tbg'] = tbg[i] return ret[sc]['tau'], ret[sc]['v0'], ret[sc]['wid'], ret[sc]['base_range'], \ ret[sc]['vid1'], ret[sc]['vid2'], ret[sc]['tc'], ret[sc]['bge'], ret[sc]['tbg'], ret[sc]['baseline']
def read_bins_to_cal_bg(n,fname='../sub_data/bins_to_cal_bg.txt'): cols = ['idx','src','avmin1','avmax1','avmin2','avmax2','evmin1','evmax1','evmin2','evmax2'] fmt = ['i','s','f','f','f','f','f','f','f','f'] vel = restore(fname, 2, cols, fmt) vel_info = vel.read() avmin1 = vel_info['avmin1'] avmax1 = vel_info['avmax1'] avmin2 = vel_info['avmin2'] avmax2 = vel_info['avmax2'] evmin1 = vel_info['evmin1'] evmax1 = vel_info['evmax1'] evmin2 = vel_info['evmin2'] evmax2 = vel_info['evmax2'] return avmin1[n],avmax1[n],avmin2[n],avmax2[n],evmin1[n],evmax1[n],evmin2[n],evmax2[n]
def peak_info(fname=''): ret = {} cols = ['idx','src','amp','tau0','wid'] fmt = ['i','s','f','f','f'] dat = restore(fname, 2, cols, fmt) info = dat.read() for i in range(0,len(info['src'])): if info['src'][i] not in ret.keys(): ret[info['src'][i]] = {} k = 0 ret[info['src'][i]][str(k)] = [info['amp'][i],info['tau0'][i],info['wid'][i]] else: k = k+1 ret[info['src'][i]][str(k)] = [info['amp'][i],info['tau0'][i],info['wid'][i]] return ret
def read_nhi_thin(fname = '../result/nhi_thin_with_er_78src.txt'): cols = ['idx','src', 'l', 'b', 'thin', 'thin_er'] fmt = ['i', 's', 'f', 'f', 'f', 'f'] data = restore(fname, 2, cols, fmt) info = data.read() src = info['src'] thin = info['thin'] err = info['thin_er'] ret = {} for i in range(len(src)): sc = src[i] ret[sc] = {} ret[sc]['thin'] = thin[i] ret[sc]['er'] = err[i] return ret
def read_tbg_cont(fname='../../hi/stockert_fits/result/tbg_cont_from_1420.txt'): cols = ['idx','src','l','b', 'tb21', 'tbc1', 'tbc2', 'tb408'] fmt = ['i', 's', 'f','f', 'f', 'f', 'f', 'f' ] src = restore(fname, 5, cols, fmt) info = src.read() src = info['src'] gl = info['l'] gb = info['b'] tc1 = info['tbc1'] tc2 = info['tbc2'] ret1 = {} ret2 = {} for i in range(len(src)): ret1[src[i]] = tc1[i] ret2[src[i]] = tc2[i] return ret1, ret2
def get_noh_for_each_src(fname): cols = ['idx','src','tau','v0','wid','tex','tex_er','noh','noh_er'] fmt = ['i','s','f','f','f','f','f','f','f'] data = restore(fname, 2, cols, fmt) dat = data.read() noh = dat['noh'] er2 = dat['noh_er'] src = dat['src'] ret = {} for i in range(0,len(dat['src'])): if dat['src'][i] not in ret.keys(): ret[src[i]] = {} ret[src[i]]['noh'] = noh[i] ret[src[i]]['er2'] = er2[i]**2 else: ret[src[i]]['noh'] = ret[src[i]]['noh'] + noh[i] ret[src[i]]['er2'] = ret[src[i]]['er2'] + er2[i]**2 return ret
def read_tex_carl(fname = '../sub_data/tex_oh1165.txt'): cols = ['idx','src','amp','v0','wid','ts1','er1','ts2','er2','tbg', 'ts_carl', 'tau'] fmt = ['i','s','f','f','f','f','f','f','f','f','f','f'] data = restore(fname, 3, cols, fmt) dat = data.read() src = dat['src'] tau = dat['tau'] ts = dat['ts_carl'] ret = {} for i in range(0,len(src)): if src[i] not in ret.keys(): ret[src[i]] = {} ret[src[i]]['tau'] = [tau[i]] ret[src[i]]['ts_carl'] = [ts[i]] else: ret[src[i]]['tau'] += [tau[i]] ret[src[i]]['ts_carl'] += [ts[i]] return ret
def read_carl_tex(sc,fname='../sub_data/carl_tex.txt'): ret = {} cols = ['ng1','ng2', 'ng', 'src','l','b','zro1','tau1', 'cen1', 'wid1', 'tex1', 'tex1er', 'tex2', 'tex2er'] fmt = ['i', 'i', 'i', 's', 'f','f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f' ] dat = restore(fname, 2, cols, fmt) info = dat.read() src = info['src'] tau = info['tau1'] tex = info['tex1'] for i in range(len(src)): if info['src'][i] not in ret.keys(): ret[src[i]] = {} ret[src[i]]['tau'] = [tau[i]] ret[src[i]]['tex65'] = [tex[i]] else: ret[src[i]]['tau'] += [tau[i]] ret[src[i]]['tex65'] += [tex[i]] return ret[sc]['tau'], ret[sc]['tex65']
def model_performance(S, model_params, dl_val, N_val, N_classes): """ Determines performance of provided model on the validation set. Returns the labels and scores """ print( 'WARNING: MODEL_PERFORMANCE IS USED, THIS FUNCTION SHOULD HAVE BECOME OBSOLETE' ) # # free gpu memory # if torch.cuda.is_available(): # torch.cuda.empty_cache() # create model model = EfficientNet.from_name(S.modelname, model_params) # restore latest model from drive model = restore(model, S.modelname) # push model to gpu model = model.to(device=S.device) # set it to eval (disables dropouts, and prevents calculating the grad) model.eval() # pre allocate variables y_true = np.empty(N_val) scores = np.empty((N_val, N_classes)) i, j = 0, 0 # evaluate model on validation set print('\nEvaluating model on validation set...') for x, y in dl_val: x = x.to(device=S.device, dtype=S.dtype) j += x.shape[0] y_true[i:j] = y.numpy() scores[i:j, :] = model(x).cpu().detach().numpy() i = j # return return y_true, scores
def read_tbg408_healpy(fname='../../oh/result/bg408_to_compare.txt'): cols = ['src','l','b', 'il', 'ib', 'tbg', 'l-idx','b-idx','tbg1','tbg_hpy'] fmt = ['s','f','f','f','f','f','f','f','f','f'] src = restore(fname, 2, cols, fmt) info = src.read() src = info['src'] gl = info['l'] gb = info['b'] il = info['il'] ib = info['ib'] tbg = info['tbg'] lid = info['l-idx'] bid = info['b-idx'] bg1 = info['tbg1'] bgh = info['tbg_hpy'] ret = {} for i in range(len(src)): ret[src[i]] = bgh[i] return ret
def start_decryption(): dec.decrypter() tools.empty_folder('key') rst.restore() return render_template('restore_success.html')
def read_ring(fname = 'ring.txt'): cols = ['x','y'] fmt = ['f','f'] data = restore(fname, 0, cols, fmt) return data.read()
def read_23oh_src(fname = '../../oh/result/23src_with_oh.txt'): cols = ['src','l','b'] fmt = ['s', 'f','f'] data = restore(fname, 2, cols, fmt) dat = data.read() return dat
if dat['src'][i] not in ret.keys(): ret[src[i]] = {} ret[src[i]]['noh'] = noh[i] ret[src[i]]['er2'] = er2[i]**2 else: ret[src[i]]['noh'] = ret[src[i]]['noh'] + noh[i] ret[src[i]]['er2'] = ret[src[i]]['er2'] + er2[i]**2 return ret ##================= MAIN ========================## ## N(HI) and N(H) from Dust ## cols = ['idx','src', 'l', 'b', 'nhi','nhi_er', 'nh','nh_er'] fmt = ['i','s','f','f','f','f','f','f'] data = restore('../../dust/ebv2nh/result/nh_nhi_uncert_78src_from_ebv.txt', 3, cols, fmt) dat = data.read() nhi = dat['nhi'] nhi_er = dat['nhi_er'] nh = dat['nh'] nh_er = dat['nh_er'] src = dat['src'] idx = dat['idx'] ## N(OH) ## # noh1 = get_noh_for_each_src(fname = 'result/noh1_src96_er.txt') noh2 = get_noh_for_each_src(fname = '../result/noh2_src96_er.txt') nhi2 = [] nhi_er2 = [] # WARNING: Here Sigma^2
def main(): """ Main routine """ parser = argparse.ArgumentParser(description='Backup/Restore') subparsers = parser.add_subparsers(dest='cmd') backup_parser = subparsers.add_parser('backup', help='do backup') backup_parser.add_argument('-c', nargs=1, required=True, metavar='config_file') restore_parser = subparsers.add_parser('restore', help='do restore') restore_parser.add_argument('-d', nargs=1, required=True, metavar='dropbox directory') restore_parser.add_argument('-r', nargs=1, required=True, metavar='directory for database restoral') restore_parser.add_argument('-o', nargs=1, metavar='override the root of ' + ' directories') consitency_parser = subparsers.add_parser('consistency', help='do consistency') consitency_parser.add_argument('-c', nargs=1, required=True, metavar='config_file') dump_parser = subparsers.add_parser('dump', help='do dump') dump_parser.add_argument('-c', nargs=1, required=True, metavar='config_file') dump_parser.add_argument('-f', nargs=1, required=True, metavar='output_file') args = parser.parse_args() setup_logging() try: if args.cmd == 'backup': res = setup_config(args.c[0]) backup(res.config, res.database) backup_meta(res.config_path) elif args.cmd == 'restore': override = '' if args.o: override = args.o[0] restore(args.d[0], args.r[0], override) elif args.cmd == 'consistency': logger = logging.getLogger('mylog') res = setup_config(args.c[0]) res = consistency_check(res.config, res.database, True) if res is True: logger.info('Consistency check completed successfully') else: logger.info('Consistency check has found some problems.' + ' Rerun to check if they were fixed.') elif args.cmd == 'dump': res = setup_config(args.c[0]) dump_database(res.database, args.f[0]) except KeyError: print 'Key not found!' except SystemExit: print 'Exiting...'
def read_info_no_co(fname = '../../co12/result/26src_no_co_with_sponge.dat'): cols = ['idx','src','l','b','ra_icrs','de_icrs','ra_j','de_j', 'oh', 'nhi','nhi_er','thin','thin_er', 'cnm','cnm_er','wnm','wnm_er'] fmt = ['i', 's', 'f','f', 'f', 'f', 's', 's', 'i', 'f', 'f', 'f', 'f' , 'f', 'f', 'f', 'f' ] data = restore(fname, 2, cols, fmt) dat = data.read() return dat
import os import sys import staging import commit import log import reset import restore def init(): os.mkdir(".vcs") os.mkdir(".vcs/staging") staging = staging.staging() if __name__ == "__main__": if sys.argv[1] == "init": init() elif sys.argv[1] == "add": staging.add(sys.argv[2]) elif sys.argv[1] == "restore": restore.restore(sys.argv[2]) elif sys.argv[1] == "commit": commit.commit() elif sys.argv[1] == "log": for i in log.get_log_line(): print(i) elif sys.argv[1] == "reset": reset.reset()
def read_nhi_fukui_nh_planck(fname = 'result/26src_no_co_nhi_and_uncertainties_full.txt'): cols = ['idx','src','nhi_fk','err_fk','nh_pl','err_pl','nhi_hl','er_hl', 'err_hl', 'wnm', 'cnm', 'fk_hl', 'pl_hl', 'oh', 'nhi_thin'] fmt = ['i', 's', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'i', 'f'] dat = restore(fname, 4, cols, fmt) inf = dat.read() return inf
def read_cnm_err(fname = 'result/26src_no_co_cnm_uncertainties_arcb.txt'): cols = ['idx','src','nhi','nhi_er','cnm','cnm_er','wnm','wnm_er'] fmt = ['i', 's', 'f', 'f', 'f', 'f', 'f', 'f'] dat = restore(fname, 2, cols, fmt) inf = dat.read() return inf
def peak_dect(data, inf408): fit = gfit() src_list = list(data.la.srcname) src = data.la.srcname ra50 = data.la.ra1950 dec50 = data.la.dec1950 ell = data.la.ell bee = data.la.bee oh_f1 = data.la.cfr_bd1 vlsr1 = data.la.vlsr_bd1 oh_f2 = data.la.cfr_bd2 vlsr2 = data.la.vlsr_bd2 # Oh1665 # em_avg1 = correct_ctrl_chnl(data.la.i_em_avg_bd1) em_med1 = correct_ctrl_chnl(data.la.i_em_med_bd1) ab_avg1 = correct_ctrl_chnl(data.la.i_abs_avg_bd1) ab_med1 = correct_ctrl_chnl(data.la.i_abs_med_bd1) # Oh1667 # em_avg2 = correct_ctrl_chnl(data.la.i_em_avg_bd2) em_med2 = correct_ctrl_chnl(data.la.i_em_med_bd2) ab_avg2 = correct_ctrl_chnl(data.la.i_abs_avg_bd2) ab_med2 = correct_ctrl_chnl(data.la.i_abs_med_bd2) # Source # src = '3C131' n = src_list.index(src) vlsr = vlsr1[n] oh_fq = oh_f1[n] em_avg = em_avg1[n] ab_avg = em_avg2[n] # Vrange infor # fname = '../data/em_vel_range.txt' cols = ['idx','src','vmin','vmax'] fmt = ['i','s','f','f'] vel = restore(fname, 2, cols, fmt) vel_info = vel.read() vmin = vel_info['vmin'] vmax = vel_info['vmax'] xmin = vmin[n] xmax = vmax[n] ## Background ## tbg1665 = 2.8+get_tb_408(ell[n], bee[n], inf408.tb_408)*(408./1666.)**2.8 # Tbg from 408MHz tbg = tbg1665 continuum_em = tbg # VLSR # x = vlsr # Get Index of the Velocity-range # xmax_id = get_vel_index(x, xmin) # xmax_index xmin_id = get_vel_index(x, xmax) # xmin_index num_chnl = xmax_id-xmin_id # Total number of bins vrange = [xmin_id, xmax_id] # vrange = [879,1460] dv = (xmax-xmin)/num_chnl # Linear fit the baselines of spectra # # Vrange infor to calculate baseline - Linear fit # bins = read_bins_to_cal_bg() avmin1 = bins['avmin1'] avmax1 = bins['avmax1'] avmin2 = bins['avmin2'] avmax2 = bins['avmax2'] evmin1 = bins['evmin1'] evmax1 = bins['evmax1'] evmin2 = bins['evmin2'] evmax2 = bins['evmax2'] ## Bin up ## nbin = 4 x, em_avg = bin_up(x,em_avg,nbin=nbin) xd,tde,slope,intercept,tbe,v,u,sigma = baseline_from_linear_fit(x, em_avg, evmin1[n], evmax1[n], evmin2[n], evmax2[n]) plt.plot(x,em_avg) plt.show() lguess = [0.,0.0025,\ 0.063,4.47,0.3,\ 0.037,6.6,0.1,\ 0.08,7.2,0.4] # guessp = np.array(lguess, dtype='float64') pfix = [False]*11 plimd = [[False,False]]*11 plims = [[0.,0.]]*11 pname = ['bg','slope','amp1','v01','wid1','amp2','v02','wid2','amp3','v03','wid3'] parbase = {'value': 0., 'fixed': 0, 'parname': '', 'limited': [0, 0], 'limits': [0., 0.]} parinfo = [] for i in range(len(lguess)): parinfo.append(copy.deepcopy(parbase)) for i in range(len(lguess)): parinfo[i]['value'] = lguess[i] parinfo[i]['fixed'] = pfix[i] parinfo[i]['parname'] = pname[i] parinfo[i]['limited'] = plimd[i] # print parinfo[13] # sys.exit() ## Gaussian fit ## em_avg = em_avg-(slope*x+intercept) xx = x[xmin_id/nbin:xmax_id/nbin] yy = em_avg[xmin_id/nbin:xmax_id/nbin] fa = {'x':xx, 'y':yy, 'err':yy*0.001} mp = mpfit(myfunc, lguess, parinfo=parinfo, functkw=fa, quiet=False) print mp.params print mp.perror ## Plot ## p = mp.params fit = p[0]+p[1]*xx amp1 = p[2] amp2 = p[5] amp3 = p[8] for i in range(2, len(p), 3): fit = fit + p[i]*np.exp(- ( (xx-p[i+1])/(0.6005612*p[i+2]))**2) print '**********' print 'Peak 1: ',amp1,sigma,amp1>3.*sigma print 'Peak 2: ',amp2,sigma,amp2>3.*sigma print 'Peak 3: ',amp3,sigma,amp3>3.*sigma plt.plot(x,em_avg) plt.plot(xx,fit,'r-') plt.title(src) plt.xlim(xmin-2.,xmax+2.) plt.ylim(-0.03,0.08) # plt.axhline(y=sigma,xmin=-30.,xmax=30., linewidth=1, color='r') plt.axhline(y=3.*sigma,xmin=-30.,xmax=30.,linewidth=1, color='g', linestyle='dashed') plt.axvline(x=xmin,ymin=0.,ymax=1000., linewidth=1, color='k') plt.axvline(x=xmax,ymin=0.,ymax=1000., linewidth=1, color='k') plt.grid() plt.show()
u'developerModel', u'action_items', u'entityModel', u'col_log', u'col_developer', u'intentModel', u'multiMediaModel', u'activationCodeModel'] def slack(msg): data={ "text": msg } requests.post("https://hooks.slack.com/services/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", data=json.dumps(data)) unpack() restore() client = MongoClient() db = client.today collection_list = db.collection_names() flag_collection = (sorted(collection_list) == sorted(STANDARD_COLLECTION_LIST)) #print collection_list #print STANDARD_COLLECTION_LIST if not flag_collection: slack ("Warning!collections did not match : supposed to be\n {} \n actually is \n{}".format(STANDARD_COLLECTION_LIST, collection_list)) print ("Warning!collections did not match : supposed to be\n {} \n actually is \n{}".format(STANDARD_COLLECTION_LIST, collection_list)) print "did not match!" def check_time(): for i in db['systemPropertyModel'].find({"_id" : "accessToken"}): time_now = datetime.datetime.now()
def setUpClass(cls): """Pull the backup file, and restore the database.""" backup_folder = pull_latest_backup(BACKUP_BUCKET) restore(backup_folder)
def main(data, inf408): cfit = gfit() src_list = list(data.la.srcname) src = data.la.srcname ra50 = data.la.ra1950 dec50 = data.la.dec1950 ell = data.la.ell bee = data.la.bee oh_f1 = data.la.cfr_bd2 vlsr1 = data.la.vlsr_bd2 em_avg1 = correct_ctrl_chnl(data.la.i_em_avg_bd2) em_med1 = correct_ctrl_chnl(data.la.i_em_med_bd2) ab_avg1 = correct_ctrl_chnl(data.la.i_abs_avg_bd2) ab_med1 = correct_ctrl_chnl(data.la.i_abs_med_bd2) # Gaussian peaks' info - estimate values of Amp, tau0 and Width # peak = peak_info('../data/gauss_1665_peaks.txt') # Vrange infor # vel_info = read_vrange('../data/vel_range.txt') vmin = vel_info['vmin'] vmax = vel_info['vmax'] # 26 src with no CO # fname = '../data/26src_no_co.txt' cols = ['idx','src'] fmt = ['i','s'] src_no_co = restore(fname, 2, cols, fmt) s26info = src_no_co.read() s26src = s26info['src'] for src in src_list: if(src != '4C13.67'): continue n = src_list.index(src) xmin = vmin[n] xmax = vmax[n] if (xmin == 0. and xmax == 0.): continue # VLSR # xd = vlsr1[n] td_ab = ab_avg1[n] # Absorption data of 1665 td_em = em_avg1[n] # Emission data of 1665 # Get Index of the Velocity-range # xmax_id = get_vel_index(xd, xmin) # xmax_index xmin_id = get_vel_index(xd, xmax) # xmin_index num_chnl = xmax_id-xmin_id # Total number of bins vrange = [xmin_id, xmax_id] # vrange = [733,1023] ## Background ## tbg1665 = 2.8+get_tb_408(ell[n], bee[n], inf408.tb_408)*(408./1666.)**2.8 # Tbg from 408MHz t_on1665 = td_ab t_off1665 = td_em tc1665 = cal_bg(xd,t_on1665) bg_off1665 = cal_bg(xd,t_off1665) trx1665 = bg_off1665 - tbg1665 #Fitting, guess parameters and Fit multiple Gaussians for e(-tau)# guess = [] for c in peak[src]: guess += peak[src][c] print n, ' <<<>>>> ', src, ell[n], bee[n] cont = tc1665 # Tc contoff = tbg1665 # quan trong, thay doi la thay doi Tex rat nhieu tbaseline = bg_off1665 # baseline of 1665 Em line #This case tbaseline is sensitive to Ts, 74.75 -> change Ts # cont = 18. tbaseline = 73.67 npeak = 1 print 'Tc:', cont print 'Radio bg: ', contoff print 'Baseline (Offsource): ', tbaseline tau = [0.3/cont] v0 = [4.7] wid = [0.4] tex = [1.e-6]*npeak ## Fit or not # bg = 0. bgyn = 0 tauyn = [1]*npeak v0yn = [1]*npeak widyn = [1]*npeak tsyn = [0]*npeak contyn = 1 tfit, error, \ bg_fit, taufit, v0fit, widfit, tsfit, \ bg_er, tau_er, v0_er, wid_er, ts_er, \ contfit, cont_er, cov, nloop, \ nloopmax = cfit.fit(xd,td_ab,vrange,bg,tau,v0,wid,tex,cont, bgyn,tauyn,v0yn,widyn,tsyn,contyn) ## fit with Tex tsyn = [1]*npeak ts = [4.] # for 2nd Fit cont_em = contoff tau = taufit v0 = v0fit wid = widfit cont_em = 4.98662039171 contyn = 0 tauyn = [0]*npeak v0yn = [0]*npeak widyn = [0]*npeak tdat = td_em - tbaseline + contoff ta_fit, error, \ bg_fit, taufit, v0fit, widfit, tsfit, \ bg_er, tau_er, v0_er, wid_er, ts_er, \ contfit, cont_er, cov, nloop, \ nloopmax = cfit.fit(xd,tdat,vrange,bg,tau,v0,wid,ts,cont_em,\ bgyn,tauyn,v0yn,widyn,tsyn,contyn) # co the sai thu tu print '1665 spin temps:' print tsfit print ts_er print tbg1665
def main(data, inf408): cfit = gfit() src_list = list(data.la.srcname) src = data.la.srcname ra50 = data.la.ra1950 dec50 = data.la.dec1950 ell = data.la.ell bee = data.la.bee oh_f1 = data.la.cfr_bd1 vlsr1 = data.la.vlsr_bd1 em_avg1 = correct_ctrl_chnl(data.la.i_em_avg_bd1) em_med1 = correct_ctrl_chnl(data.la.i_em_med_bd1) ab_avg1 = correct_ctrl_chnl(data.la.i_abs_avg_bd1) ab_med1 = correct_ctrl_chnl(data.la.i_abs_med_bd1) # Gaussian peaks' info - estimate values of Amp, tau0 and Width # peak = peak_info('../data/gauss_1665_peaks.txt') # Vrange infor # vel_info = read_vrange('../data/vel_range.txt') vmin = vel_info['vmin'] vmax = vel_info['vmax'] # 26 src with no CO # fname = '../data/26src_no_co.txt' cols = ['idx','src'] fmt = ['i','s'] src_no_co = restore(fname, 2, cols, fmt) s26info = src_no_co.read() s26src = s26info['src'] for src in src_list: if(src != '3C18'): continue n = src_list.index(src) xmin = vmin[n] xmax = vmax[n] if (xmin == 0. and xmax == 0.): continue # VLSR # xd = vlsr1[n] td_ab = ab_avg1[n] # Absorption data of 1665 td_em = em_avg1[n] # Emission data of 1665 plt.plot(xd,td_em) plt.xlim(-30.,30.) plt.show() # Get Index of the Velocity-range # xmax_id = get_vel_index(xd, xmin) # xmax_index xmin_id = get_vel_index(xd, xmax) # xmin_index num_chnl = xmax_id-xmin_id # Total number of bins vrangid = [xmin_id, xmax_id] ## Background ## tbg1665 = 2.8+get_tb_408(ell[n], bee[n], inf408.tb_408)*(408./1665.)**2.8 # Tbg from 408MHz t_on1665 = td_ab t_off1665 = td_em tc1665 = cal_bg(xd,t_on1665) bg_off1665 = cal_bg(xd,t_off1665) trx1665 = bg_off1665 - tbg1665 #Fitting, guess parameters and Fit multiple Gaussians for e(-tau)# guess = [] for c in peak[src]: guess += peak[src][c] print n, ' ', src, ell[n], bee[n] cont = 59.15 # Tc contoff = tbg1665 # quan trong, thay doi la thay doi Tex rat nhieu tbaseline = 70.90 # baseline of 1665 Em line #This case tbaseline is sensitive to Ts, 74.75 -> change Ts # # cont = tc1665 # Tc # contoff = tbg1665 # quan trong, thay doi la thay doi Tex rat nhieu # tbaseline = bg_off1665 # baseline of 1665 Emission line bg = 0. # just 0. not to fit now tau = [0.1/cont, 0.6/cont] v0 = [-9.9, -7.6] wid = [1.3, 0.9] ta_fit, error, \ xdat, ta_dat,\ tbgfit, taufit, v0fit, widfit, \ tauerr, v0err, widerr, \ tcont,conterr,\ cov, nloop, \ nloopmax = cfit.abfit(xd,td_ab,vrangid,cont,bg,tau,v0,wid) print 'v0:', v0fit print 'width:', widfit plt.plot(xdat, ta_fit) plt.plot(xdat, ta_dat) plt.show() ngauss = len(taufit) cont_em = contoff ## tbg=50./2 amp = [0.1] v0 = [-9.9] wid = [2.5] npeaks = len(v0fit) tdat = td_em te_fit, error,\ xdat, te_dat,\ fit_base, amp, v0, wid,\ base_er, amp_er, v0_er, wid_er,\ cov, nloop, nloopmax = cfit.emfit(xd,tdat,vrangid,tbaseline,amp,v0,wid) # co the sai thu tu print 'amp:', amp print 'v0:', v0 print 'width:', wid plt.plot(xdat, te_fit) plt.plot(xdat, te_dat) plt.show() etau_fit = ta_fit/tcont etau_dat = ta_dat/tcont print fit_base te_fit = te_fit - fit_base + contoff te_dat = te_dat - fit_base + contoff ts_fit = [] ts_dat = [] for i in range(len(etau_fit)): if (etau_fit[i] == 1.): ts_fit.append(0.) ts_dat.append((te_dat[i]-contoff*etau_dat[i])/(1.-etau_dat[i])) else: ts_fit.append((te_fit[i]-contoff*etau_fit[i])/(1.-etau_fit[i])) ts_dat.append((te_dat[i]-contoff*etau_dat[i])/(1.-etau_dat[i])) plt.plot(xdat, ts_fit) plt.plot(xdat, ts_dat) plt.ylim(-1.,30.) plt.show() tsi = [] x = [] for i in range(len(ts_fit)): if ((ts_fit[i] > 0.) and (ts_fit[i] < 50.)): tsi.append(1./ts_fit[i]) x.append(xdat[i]) plt.plot(x, tsi) plt.show() peakindx = signal.find_peaks_cwt(tsi, np.arange(1,10)) print peakindx for i in range(len(peakindx)): print 'x0 peak:', x[peakindx[i]] width = [2.57656] px0 = [-7.82828] vpeaks = [] for i in range(len(width)): vpeaks.append(px0[i]-width[i]/2.) vpeaks.append(px0[i]+width[i]/2.) plt.plot(xdat, ts_fit) plt.plot(xdat, ts_dat) plt.ylim(-1.,30.) for i in range(len(width)): plt.axvline(px0[i]-width[i]/2.,0.,1) plt.axvline(px0[i]+width[i]/2.,0.,1) plt.axvline(px0[i],0.,1) plt.show() tex_peak = [] tex_count = [] for k in range(0,len(peak)/2): tex_peak.append(0.) tex_count.append(0) # Compute the Excitation Temperature # s = 0. count = 0 for i in range(0, len(ts_fit)): for k in range(0,len(vpeaks),2): vmin = vpeaks[0+k] vmax = vpeaks[1+k] if ((xdat[i]>vmin) and (xdat[i]<vmax)) : tex_peak[k/2] = tex_peak[k/2] + ts_fit[i] tex_count[k/2] = tex_count[k/2] + 1 s = s + ts_fit[i] count = count + 1 print '' print 'Tex for Peaks: k, tex_peak, bins' ss = 0. for k in range(0,len(vpeaks)/2): tex_peak[k] = tex_peak[k]/tex_count[k] ss = ss + tex_peak[k] print src, k,tex_peak[k],tex_count[k] print 'Mean TEX: mean, sum_Tex, total-count' print s/count, ss, count