Exemplo n.º 1
0
	def finish(self, runUntil, runId):
		if runUntil != len(self.modules) and self.commandline.noCopy != True:
			print '... copying data bank snapshot, disable with --noCopy'
			os.chdir(self.cwd)
			if runId != None:
				dest = self.config.get("options", "data_dir") + 'data_bank' + str(runUntil-1) + '_run' + str(runId) + '.h5';
			else:
				dest = self.config.get("options", "data_dir") + 'data_bank' + str(runUntil-1) + '.h5';
			if self.config.has_option("options", "ptrepack"):
				os.system(self.config.get("options", "ptrepack") + ' -o ' + self.config.get("options", "data_dir") + 'data_bank.h5:/ ' + dest + ':/')
			else:
				os.system('ptrepack -o ' + self.config.get("options", "data_dir") + 'data_bank.h5:/ ' + dest + ':/')				
			
		#saving run data for plotting
		if runUntil == len(self.modules):
			print '... saving data for runId ' + str(runId)
			os.chdir(self.cwd)
			with archive.archive('../../data/data_bank.h5') as src:
				with archive.archive('../../data/run' + str(runId) + '.h5', 'w') as dest:
					for f in src['/']:
						#Only Copy the config (first character uppercase)
						if (f[0].isupper()):
							print '/' , f
							copyHDF5Path(src, '/' + f, dest)
					#TODO: save more interesting data (nselectedgal, etc)
					dest['/Glue/run/runId'] = runId
					dest['/Glue/run/ngal'] = len(src['/gal/galaxy_index'])
Exemplo n.º 2
0
    def revert(s, *_):
        """ Revert back to old version! """
        # check if file is in its old location
        source = s.index["source"]
        if not os.path.isfile(source):
            if confirm("Cannot find original file: {}\nPlesae provide a folder to recover the file in.".format(source)):
                path = cmds.fileDialog2(fileMode=3)
                if not path:
                    return
                source = os.path.join(path[0], os.path.basename(source))
            else:
                return

        if confirm("You will lose all unsaved changes if you continue.\nAre you sure?"):
            note = "AUTOSAVE: Recovering version '{}'".format(s.index["note"])
            sup = popup.Startup(note)
            with sup:
                # we lied... we will save unsaved changes anyway!
                cmds.file(rename=source)
                cmds.file(save=True, force=True)
                archive.archive(note, source)

                # Recover file
                with zipfile.ZipFile(s.archive, "r") as z:
                    with open(source, "w") as f:
                        f.write(z.read(s.index["scene"]))

                # Load the file
                cmds.file(source, open=True, force=True)
Exemplo n.º 3
0
def DetectPhotons():

    # -----------------------------------------------------
    # Import data and params
    # -----------------------------------------------------
    print
    print "........................."
    print " Entering Generate Image  "
    print "........................."

    databank_file = '../../../data/data_bank.h5'
    with archive.archive(databank_file, 'r') as ar:
        PhotonEnergies = ar['/Photon/Energy']
        PhotonArrivalTimes = ar['/Photon/ArrivalTime']
        PhotonPositionsX = ar['/Photon/PositionsX']
        PhotonPositionsY = ar['/Photon/PositionsY']

        position_resolution = ar['/Instrument/Detector/position_resolution']
        time_resolution = ar['/Instrument/Detector/time_resolution']
        energy_resolution = ar['/Instrument/Detector/energy_resolution']

        seed_position_x = 9999
        seed_position_y = 9999
        seed_time = 9999
        seed_energy = 9999

    # Detect Photon Positions
    PhotonPositionsX_Detect = DetectQuantity(PhotonPositionsX,
                                             position_resolution,
                                             seed_quantity=seed_position_x)
    PhotonPositionsY_Detect = DetectQuantity(PhotonPositionsY,
                                             position_resolution,
                                             seed_quantity=seed_position_y)

    # Detect Photon Times
    PhotonArrivalTimes_Detect = DetectQuantity(PhotonArrivalTimes,
                                               time_resolution,
                                               seed_quantity=seed_time)

    # Detect Photon Energies
    PhotonEnergies_Detect = DetectQuantity(PhotonEnergies,
                                           position_resolution,
                                           seed_quantity=seed_energy)

    # -----------------------------------------------------
    # Export data and params
    # -----------------------------------------------------
    with archive.archive(databank_file, 'r') as ar:
        ar['/Photon/Energy_Detected'] = PhotonEnergies
        ar['/Photon/ArrivalTime_Detected'] = PhotonArrivalTimes
        ar['/Photon/PositionsX_Detected'] = PhotonPositionsX
        ar['/Photon/PositionsY_Detected'] = PhotonPositionsY
        ar['/Photon/Number_of_Photons'] = nphoton

    print
    print "........................."
    print " Exiting Generate Image  "
    print "........................."
Exemplo n.º 4
0
 def HourlyBackup(self,*args):
     print 'Performing hourly save'
     if self.running:
         self.StopAutoSave()
     archive(join(self.runningfolder,self.foldername),self.archivefolder,
             folder='hourly',update_currentref=False,overwrite=True)
     if self.running:
         self.ResumeAutoSave()
     print 'Hourly save complete'
Exemplo n.º 5
0
 def mergeDatafile(self, datafile):
     #merge datafiles into the databank
     with archive.archive(
             self.config.get("options", "data_dir") +
             self.config.get("options", "data_bank"), 'a') as dest:
         with archive.archive(datafile, 'r') as src:
             print "... copying " + self.config.get(
                 "data_bank", datafile) + " into " + self.config.get(
                     "options", "data_bank")
             copyHDF5Path(src, "/", dest)
Exemplo n.º 6
0
    def DailyBackup(self,*args):
        print 'Performing daily backup'
        if self.running:
            self.StopAutoSave()
        archive(join(self.runningfolder,self.foldername),self.archivefolder)
        if os.path.exists(join(self.archivefolder,'hourly')):
            shutil.rmtree(join(self.archivefolder,'hourly'))
        if self.running:
            self.ResumeAutoSave()

        print 'Daily backup complete'
def DetectPhotons():

    # -----------------------------------------------------
    # Import data and params
    # -----------------------------------------------------
    print
    print "........................."
    print " Entering Generate Image  "
    print "........................."

    databank_file = '../../../data/data_bank.h5'
    with archive.archive(databank_file, 'r') as ar:
        PhotonEnergies      = ar['/Photon/Energy']
        PhotonArrivalTimes  = ar['/Photon/ArrivalTime']
        PhotonPositionsX    = ar['/Photon/PositionsX']
        PhotonPositionsY    = ar['/Photon/PositionsY']

        position_resolution = ar['/Instrument/Detector/position_resolution']
        time_resolution     = ar['/Instrument/Detector/time_resolution']
        energy_resolution   = ar['/Instrument/Detector/energy_resolution']

        seed_position_x     = 9999
        seed_position_y     = 9999
        seed_time           = 9999
        seed_energy         = 9999


    # Detect Photon Positions
    PhotonPositionsX_Detect = DetectQuantity(PhotonPositionsX, position_resolution, seed_quantity=seed_position_x)
    PhotonPositionsY_Detect = DetectQuantity(PhotonPositionsY, position_resolution, seed_quantity=seed_position_y)


    # Detect Photon Times
    PhotonArrivalTimes_Detect = DetectQuantity(PhotonArrivalTimes, time_resolution, seed_quantity=seed_time)


    # Detect Photon Energies
    PhotonEnergies_Detect = DetectQuantity(PhotonEnergies, position_resolution, seed_quantity=seed_energy)


    # -----------------------------------------------------
    # Export data and params
    # -----------------------------------------------------
    with archive.archive(databank_file, 'r') as ar:
        ar['/Photon/Energy_Detected']       = PhotonEnergies
        ar['/Photon/ArrivalTime_Detected']  = PhotonArrivalTimes
        ar['/Photon/PositionsX_Detected']   = PhotonPositionsX
        ar['/Photon/PositionsY_Detected']   = PhotonPositionsY
        ar['/Photon/Number_of_Photons']     = nphoton

    print
    print "........................."
    print " Exiting Generate Image  "
    print "........................."
Exemplo n.º 8
0
def GeneratePhotons():

    # -----------------------------------------------------
    # Import data and params
    # -----------------------------------------------------
    print
    print "........................."
    print " Entering Generate Image  "
    print "........................."

    databank_file = '../../../data/data_bank.h5'
    with archive.archive(databank_file, 'r') as ar:
        exptime = ar['/SurveyDesign/exposure_time']
        magnitude_galaxy = ar['/Galaxy/magnitude/']
        flux_spectrum_galaxy = ar['/Galaxy/spectrum']
        image = ar['/Galaxy/image']

    # Calculate number of photons
    nphoton, = magphoton(magnitude_galaxy, exptime=exptime)

    # Sample Photon Positions
    PhotonPositionsX, PhotonPositionsY = SamplePositions(image, nphoton)

    # Sample Photon Times
    PhotonArrivalTimes = SampleTimeArrival(nphoton,
                                           exptime,
                                           start_time=0.0,
                                           seed_time=9999)

    # Sample Photon Energies
    PhotonEnergies = SampleEnergySpectrum(nphoton)

    # Generate Photon Identification Numbers
    photon_id = numpy.arange(nphoton)

    # -----------------------------------------------------
    # Export data and params
    # -----------------------------------------------------
    with archive.archive(databank_file, 'r') as ar:
        ar['/Photon/Energy'] = PhotonEnergies
        ar['/Photon/ArrivalTime'] = PhotonArrivalTimes
        ar['/Photon/PositionsX'] = PhotonPositionsX
        ar['/Photon/PositionsY'] = PhotonPositionsY
        ar['/Photon/Number_of_Photons'] = nphoton

    print
    print "........................."
    print " Exiting Generate Image  "
    print "........................."
Exemplo n.º 9
0
def main():
    args = parse_args()
    try:
        if args.action == "extract":
            if args.verbose:
                print "Extracting archive"
            extract.extract(infile=args.input, outfile=args.output, verbose=args.verbose)
        elif args.action == "archive":
            if args.verbose:
                print "Creating archive"
            archive.archive(infile=args.input, compression=args.compression, outfile=args.output, verbose=args.verbose)
    except (extract.ExtractException, archive.ArchiveException) as ex:
        print >> sys.stderr, ex.msg
        return ex.code
    return 0
Exemplo n.º 10
0
	def timings(self, runId):
		sumTime = 0
		for i in range(len(self.modules)):
			try:
				sumTime +=  self.modules[i]['endtime'] - self.modules[i]['starttime']
			except KeyError:
				pass
		timelist=[]
		modulelist=[]

		os.chdir(self.cwd)

		timingdata = []

		for i in range(len(self.modules)):
			try:
				print "[" + str(i) + "] %s\t%.2fs\t%.1f%%\t%.0fMB" % (self.modules[i]['name'].ljust(25, " "), self.modules[i]['endtime'] - self.modules[i]['starttime'], (self.modules[i]['endtime'] - self.modules[i]['starttime']) / sumTime *100, self.modules[i]['memory_rss'] / (1024*1024))
				modulelist.append(self.modules[i]['name'])
				timelist.append(self.modules[i]['endtime'] - self.modules[i]['starttime'])
				timingdata.append([self.modules[i]['name'].encode("ascii","ignore"), str(self.modules[i]['endtime'] - self.modules[i]['starttime']), str(self.modules[i]['starttime']), str(self.modules[i]['endtime']), str(self.modules[i]['memory_rss'])])
			except KeyError:
				print "[" + str(i) + "] " + (self.modules[i]['name'].ljust(25, " "))
				timingdata.append(['', '', '', '', ''])

		print "%s\t%.2fs\t%.1f%%" % ("Total".ljust(25, " "), sumTime, 100)

		os.chdir(self.cwd)
		with archive.archive('../../data/data_bank.h5', 'a') as ar:
			ar['/Glue/run/timings'] = timingdata
			ar['/Timing/timelist'] = timelist
			ar['/Timing/modulelist'] = modulelist
			if (runId == None):
				ar['/Glue/run/runId'] = ''
			else:
				ar['/Glue/run/runId'] = runId		
Exemplo n.º 11
0
    def __init__(self):
        QtGui.QMainWindow.__init__(self)
        
        # member variables definition
        self.M1 = 0.0
        self.M2 = 0.0
        self.L1 = 0.0
        self.L2 = 0.0

        self.th1 = 0.0
        self.th2 = 0.0
        self.w1 = 0.0
        self.w2 = 0.0

        self.ts = 0.0
        self.te = 0.0
        self.dt = 0.0

        # setup timer
        self.timer = QtCore.QTimer(self)
        self.timer.timeout.connect(self.timerCallback)
        self.timer_count = 0
        self.current_time = 0.0

        # dimension of the vtk view 
        self.len_convert_factor = 100.0   # length convert factor 1m = 100 pixels
        self.X_lim = 500.0
        self.Y_lim = 300.0

        # archive (history) of the results
        self.archive = archive()

        # Setup UI widgets
        self.setupUi(self)
Exemplo n.º 12
0
	def mergeParamfile(self, file):
		param = ConfigParser.RawConfigParser()
		param.read(file)
		with archive.archive('../../data/data_bank.h5', 'a') as ar:
			print "Importing sections:", param.sections(), "from parameter file:", file
			for section in param.sections():
				for option in param.options(section):
					ar["/" + section + "/" + option] = eval(param.get(section, option))
Exemplo n.º 13
0
    def loop_over_gal(self):

        #=======================================================================================
        #  LOOP over galaxies	 # default add both ... 0 for both # 1: don't do it
        #=======================================================================================
        check_fiber_sel = numpy.where(self.fiber_selection_flag == True)
        self.coeffs = self.coeffs[check_fiber_sel, :]
        self.coeffs = numpy.reshape(self.coeffs, self.coeffs.shape[1:])
        self.z_true = self.z_true[check_fiber_sel]

        flux_new = []
        lam_new = []
        galax_index_new = []

        Ngal_new = len(check_fiber_sel[0])
        print "... Loop over galaxies to create spectral templatesi [this number has been cut]", Ngal_new
        for i in range(Ngal_new):

            # calculate spectrum from coefficients
            flux_temporary, lam_temp = self.clean_spectrum(i)

            # Add temporary to output/new
            flux_new.append(numpy.array(flux_temporary))
            lam_new.append(numpy.array(lam_temp))

        #---------------------------------------------------------------------------------------
        # END LOOP over galaxies
        #---------------------------------------------------------------------------------------

        # make output into numpy arrays
        flux_new = numpy.array(flux_new)
        lam_new = numpy.array(lam_new)

        # DIAGNOSTIC
        print "... plotting diagnostics"
        Plotters.plot_spectrum(lam_new[0, :],
                               flux_new[0, :],
                               xmin=5000,
                               xmax=10000,
                               fig_number=0,
                               plot_number=0,
                               title='Galaxy Spectrum',
                               base_directory='.',
                               filename_addendum="",
                               show_plot=show_plot)

        print "... Loop finished, ngal with spectra = ", len(flux_new)
        print "... shape of output arrays:"
        print "... ... wavelength", lam_new.shape
        print "... ... flux		 ", flux_new.shape

        # Output
        print "... output to databank"
        with archive.archive(databank_file, 'a') as ar:
            ar['/gal/wavelength'] = lam_new
            ar['/gal/flux'] = flux_new
Exemplo n.º 14
0
def cosmos_insert_ra_dec(inputfile,outputfile):
	dataArray = importArrayFromCSVFile(inputfile)

	right_ascension = dataArray[:,1]
	declination = dataArray[:,2]

	# append to HDF file
	with archive.archive(outputfile,'a') as ar:
		ar['/gal/ra_true']  = right_ascension
		ar['/gal/dec_true'] = declination
Exemplo n.º 15
0
 def mergeParamfile(self, file):
     param = ConfigParser.RawConfigParser()
     param.read(file)
     with archive.archive('../../data/data_bank.h5', 'a') as ar:
         print "Importing sections:", param.sections(
         ), "from parameter file:", file
         for section in param.sections():
             for option in param.options(section):
                 ar["/" + section + "/" + option] = eval(
                     param.get(section, option))
Exemplo n.º 16
0
def cosmos_hdf_converter(inputfile,outputfile):

	dataArray = importArrayFromCSVFile(inputfile)

	# careful: index here is shifted by 1 wrt the index in the COSMOS header
	galaxy_index = dataArray[:,0]

	photometric_redshift = dataArray[:,1]
	spectroscopic_redshift = dataArray[:,31]

	magnitude_g = dataArray[:,14]
	magnitude_r = dataArray[:,15]
	magnitude_i = dataArray[:,16]
	magnitude_z = dataArray[:,17]
	magnitude_Y = dataArray[:,18]
	magnitude_J = dataArray[:,19]
	magnitude_H = dataArray[:,20]
	magnitude_K = dataArray[:,21]
	magnitude_g_error = dataArray[:,22]
	magnitude_r_error = dataArray[:,23]
	magnitude_i_error = dataArray[:,24]
	magnitude_z_error = dataArray[:,25]
	magnitude_Y_error = dataArray[:,26]
	magnitude_J_error = dataArray[:,27]
	magnitude_H_error = dataArray[:,28]
	magnitude_K_error = dataArray[:,29]

	#???ra = dataArray[:,XXX]  # Stephanie gave us a new file we just need to import
	#???dec = dataArray[:,XXX]

	# write to HDF5 file
	with archive.archive(outputfile,'a') as ar:
		ar['/gal/galaxy_index'] = galaxy_index

		ar['/gal/redshift_photometric'] = photometric_redshift
		ar['/gal/redshift_spectroscopic'] = spectroscopic_redshift

		ar['/gal/magnitude_g'] = magnitude_g
		ar['/gal/magnitude_r'] = magnitude_r
		ar['/gal/magnitude_i'] = magnitude_i
		ar['/gal/magnitude_z'] = magnitude_z
		ar['/gal/magnitude_y'] = magnitude_Y
		ar['/gal/magnitude_j'] = magnitude_J
		ar['/gal/magnitude_h'] = magnitude_H
		ar['/gal/magnitude_k'] = magnitude_K

		ar['/gal/magnitude_error_g'] = magnitude_g_error
		ar['/gal/magnitude_error_r'] = magnitude_r_error
		ar['/gal/magnitude_error_i'] = magnitude_i_error
		ar['/gal/magnitude_error_z'] = magnitude_z_error
		ar['/gal/magnitude_error_y'] = magnitude_Y_error
		ar['/gal/magnitude_error_j'] = magnitude_J_error
		ar['/gal/magnitude_error_h'] = magnitude_H_error
		ar['/gal/magnitude_error_k'] = magnitude_K_error
Exemplo n.º 17
0
def convert_sky_bkg(inputfile,outputfile): # from skybg_50_10

	table = asciitable.read(inputfile)
	wave = []
	power = []
	for i in range(len(table)):
		wave.append(table[i][0])
		power.append(table[i][1])

	with archive.archive(outputfile,'a') as ar:
		ar['/sky/background/wave'] = numpy.array(wave)
		ar['/sky/background/power'] = numpy.array(power)
Exemplo n.º 18
0
def convert_extinction(inputfile,outputfile): # from palomar

	table = asciitable.read(inputfile)
	abswave = []
	abstemp = []
	for i in range(len(table)):
		abswave.append(table[i][0])
		abstemp.append(table[i][1])

	with archive.archive(outputfile,'a') as ar:
		ar['/sky/extinction/wave'] = numpy.array(abswave)
		ar['/sky/extinction/power'] = numpy.array(abstemp)
Exemplo n.º 19
0
def update(args):
    archive_cfg = read_subcmd_config('archive')
    rename_cfg = read_subcmd_config('rename')

    args.archive_dir = args.archive_dir or archive_cfg['archive_dir']

    if not os.path.isdir(args.archive_dir):
        raise RuntimeError("archive-dir {} is not a directory".format(
            args.archive_dir))

    files = filter(tarfile.is_tarfile, collect_files(args.archive_dir))
    latest = natsorted(files, reverse=True)[0]

    args.archive = [latest]
    args.extract_dir = None
    archive.extract(args)

    rename.rename(args)

    args.path = [rename_cfg['out_dir']]
    archive.archive(args)
Exemplo n.º 20
0
def importContinuumSensitivityFromCSVFile(inputfile,outputfile):

	dataArray = importArrayFromCSVFile(inputfile)

	wavelength = dataArray[:,0] * 1e-10 # conversion Angstroem to meter
	signal_to_noise_continuum = dataArray[:,1]
	signal_to_noise_emission_line = dataArray[:,2]

	# write to HDF5 file
	with archive.archive(outputfile,'a') as ar:
		ar['/instrument/sensitivity/wavelength_grid'] 			= wavelength
		ar['/instrument/sensitivity/signal_to_noise_continuum']	 = signal_to_noise_continuum
		ar['/instrument/sensitivity/signal_to_noise_emission_line'] = signal_to_noise_emission_line
Exemplo n.º 21
0
def GenerateImage():

    # -----------------------------------------------------
    # Import data and params
    # -----------------------------------------------------
    print
    print "........................."
    print " Entering Generate Image  "
    print "........................."

    databank_file = '../../../data/data_bank.h5'
    with archive.archive(databank_file, 'r') as ar:
        npix = ar['/Instrument/Detector/Number_of_Pixels']
        profile_type = ar['/Instrument/Detector/Profile_Type']


    # -----------------------------------------------------
    # Perform Calculations
    # -----------------------------------------------------

    # Create 2-D Array of points
    image = numpy.zeros( (npix,npix), dtype=float)


    # create profile and add to image
    image = GenerateProfile( image, npix, profile_type , sigma_x=1., sigma_y=1., correlation=0.)


    # -----------------------------------------------------
    # Export data and params
    # -----------------------------------------------------
    with archive.archive(databank_file, 'r') as ar:
        ar['/Galaxy/image']

    print
    print "........................."
    print " Exiting Generate Image  "
    print "........................."
Exemplo n.º 22
0
    def timings(self, runId):
        sumTime = 0
        for i in range(len(self.modules)):
            try:
                sumTime += self.modules[i]['endtime'] - self.modules[i][
                    'starttime']
            except KeyError:
                pass
        timelist = []
        modulelist = []

        os.chdir(self.cwd)

        timingdata = []

        for i in range(len(self.modules)):
            try:
                print "[" + str(i) + "] %s\t%.2fs\t%.1f%%\t%.0fMB" % (
                    self.modules[i]['name'].ljust(25, " "),
                    self.modules[i]['endtime'] - self.modules[i]['starttime'],
                    (self.modules[i]['endtime'] - self.modules[i]['starttime'])
                    / sumTime * 100, self.modules[i]['memory_rss'] /
                    (1024 * 1024))
                modulelist.append(self.modules[i]['name'])
                timelist.append(self.modules[i]['endtime'] -
                                self.modules[i]['starttime'])
                timingdata.append([
                    self.modules[i]['name'].encode("ascii", "ignore"),
                    str(self.modules[i]['endtime'] -
                        self.modules[i]['starttime']),
                    str(self.modules[i]['starttime']),
                    str(self.modules[i]['endtime']),
                    str(self.modules[i]['memory_rss'])
                ])
            except KeyError:
                print "[" + str(i) + "] " + (self.modules[i]['name'].ljust(
                    25, " "))
                timingdata.append(['', '', '', '', ''])

        print "%s\t%.2fs\t%.1f%%" % ("Total".ljust(25, " "), sumTime, 100)

        os.chdir(self.cwd)
        with archive.archive('../../data/data_bank.h5', 'a') as ar:
            ar['/Glue/run/timings'] = timingdata
            ar['/Timing/timelist'] = timelist
            ar['/Timing/modulelist'] = modulelist
            if (runId == None):
                ar['/Glue/run/runId'] = ''
            else:
                ar['/Glue/run/runId'] = runId
Exemplo n.º 23
0
    def finish(self, runUntil, runId):
        if runUntil != len(self.modules) and self.commandline.noCopy != True:
            print '... copying data bank snapshot, disable with --noCopy'
            os.chdir(self.cwd)
            if runId != None:
                dest = self.config.get(
                    "options", "data_dir") + 'data_bank' + str(
                        runUntil - 1) + '_run' + str(runId) + '.h5'
            else:
                dest = self.config.get(
                    "options",
                    "data_dir") + 'data_bank' + str(runUntil - 1) + '.h5'
            if self.config.has_option("options", "ptrepack"):
                os.system(
                    self.config.get("options", "ptrepack") + ' -o ' +
                    self.config.get("options", "data_dir") +
                    'data_bank.h5:/ ' + dest + ':/')
            else:
                os.system('ptrepack -o ' +
                          self.config.get("options", "data_dir") +
                          'data_bank.h5:/ ' + dest + ':/')

        #saving run data for plotting
        if runUntil == len(self.modules):
            print '... saving data for runId ' + str(runId)
            os.chdir(self.cwd)
            with archive.archive('../../data/data_bank.h5') as src:
                with archive.archive('../../data/run' + str(runId) + '.h5',
                                     'w') as dest:
                    for f in src['/']:
                        #Only Copy the config (first character uppercase)
                        if (f[0].isupper()):
                            print '/', f
                            copyHDF5Path(src, '/' + f, dest)
                    #TODO: save more interesting data (nselectedgal, etc)
                    dest['/Glue/run/runId'] = runId
                    dest['/Glue/run/ngal'] = len(src['/gal/galaxy_index'])
Exemplo n.º 24
0
def import_convert_spectra_templates(inputfile,outputfile):

	#input files
	#inputfile	=  'k_nmf_derived.default.fits' 	# make this file name a parameter in one of the ini files
	#inputfile	=  'k_nmf_derived.newdefault.fits'

	# open file
	hdunum = 0
	hdu = pyfits.open(inputfile)[hdunum]

	# header
	## read in header
	header = hdu.header

	## read in header element
	nt	= header['nt']
	#print '... number of templates', nt

	# read in table elements
	loglam	 		= numpy.array(numpy.log10(pyfits.open(inputfile)[11].data))
	tspec_v0		= numpy.array(pyfits.open(inputfile)[1].data)
	tspec_v0_nl		= numpy.array(pyfits.open(inputfile)[2].data)
	tspec_v0_nd		= numpy.array(pyfits.open(inputfile)[3].data)
	tspec_v0_nd_nl	= numpy.array(pyfits.open(inputfile)[4].data)
	tspec_v300		= numpy.array(pyfits.open(inputfile)[5].data)
	tspec_v300_nl	= numpy.array(pyfits.open(inputfile)[6].data)
	tspec_v300_nd	= numpy.array(pyfits.open(inputfile)[7].data)
	tspec_v300_nd_nl= numpy.array(pyfits.open(inputfile)[8].data)
	lspec_v300		= numpy.array(pyfits.open(inputfile)[9].data)
	#	tmass			= pyfits.open(inputfile)[17].data
	#	tmetallicity		= pyfits.open(inputfile)[18].data
	#	tmass300		= pyfits.open(inputfile)[19].data
	#	tmass1000		= pyfits.open(inputfile)[20].data
	#	tmremain		= pyfits.open(inputfile)[24].data


	# write to databank
	with archive.archive(outputfile,'a') as ar:
		ar['/spectral_templates/num_spectral_templates'] 				= nt		# number of spectral spectral_templates; nt
		ar['/spectral_templates/log10_wavelength'] 						= loglam
		ar['/spectral_templates/template_spectrum_v0'] 					= tspec_v0			  # not smoothed
		ar['/spectral_templates/template_spectrum_v0_nolines'] 			= tspec_v0_nl
		ar['/spectral_templates/template_spectrum_v0_nodust'] 			= tspec_v0_nd
		ar['/spectral_templates/template_spectrum_v0_nodust_nolines'] 	= tspec_v0_nd_nl
		ar['/spectral_templates/template_spectrum_v300']	 			= tspec_v300			# smoothed to 300kms(res)
		ar['/spectral_templates/template_spectrum_v300_nolines'] 		= tspec_v300_nl
		ar['/spectral_templates/template_spectrum_v300_nodust'] 		= tspec_v300_nd
		ar['/spectral_templates/template_spectrum_v300_nodust_nolines'] = tspec_v300_nd_nl
		ar['/spectral_templates/lspec_v300'] 							= lspec_v300
Exemplo n.º 25
0
def importEmissionLineSensitivityFromCSVFile(inputfile,outputfile):

	dataArray = importArrayFromCSVFile(inputfile)

	wavelength = dataArray[:,0] * 1e-10 # conversion Angstroem to meter
	signal_to_noise_at_3sigma = dataArray[:,1]
	signal_to_noise_at_5sigma = dataArray[:,2]

	# write to HDF5 file
	with archive.archive(outputfile,'a') as ar:
		ar['/instrument/sensitivity/emission_line_wavelengths'] = wavelength
		#		ar['/gal/wavelength/unit'] = 'meter'
		ar['/instrument/sensitivity/signal_to_noise_sigma3'] = signal_to_noise_at_3sigma
		ar['/instrument/sensitivity/signal_to_noise_sigma5'] = signal_to_noise_at_5sigma
		ar['/instrument/sensitivity/exposure_time'] = 3000
Exemplo n.º 26
0
    def test(self):
        print("Running Full Test Sequence")
        #the ingest function sorts and moves files by date into the working/media directory
        ingest.ingest(ingestdir, workingdir)

        #the crawl function performs a hash index of all files in the target directories
        workingdirsum = crawl.crawl(True, workingdir, jsondatadir)
        archivedirsum = crawl.crawl(False, archivedir, jsondatadir)

        #the dedupe function combines all hash indexes and analyzes the dataset for duplicates
        data_files = glob.glob(jsondatadir + '/*.json')
        #run the dedupe function
        dedupe.dedupe(data_files, duplicatedir)

        #after the dedupe function has moved duplicaes out, reindex
        workingdirsum = crawl.crawl(True, workingdir, jsondatadir)

        #the archive function pulls from the working/media directory and pools into sized volumes
        archive.archive(archivedir, jsondatadir, workingdir, mediasize)

        #validate that all files in duplicates exist elsewhere before moving to validated
        validate.validate(duplicatedir, workingdir, archivedir, validateddir)

        print("Daily Job Completed Successfully")
Exemplo n.º 27
0
	def __init__(self,parent,name,*args,**kwargs):
		self.name=name
		self.manga=Manga(self.name)
		self.archive=archive(self.name)

		if not "relief" in kwargs: kwargs["relief"]=RAISED
		if not "borderwidth" in kwargs: kwargs["borderwidth"]=3
		Frame.__init__(self,parent,*args,**kwargs)

		self.__PICTURE=Label(self,image=self.getImage())
		self.__PICTURE.grid(row=0,column=0,sticky=N+S+E+W)
		self.grid_rowconfigure(0,weight=1)
		self.grid_columnconfigure(0,weight=1)
		self.__NAME=Label(self,text=self.name)
		self.__NAME.grid(row=1,column=0,sticky=S)
		self.__CHAPTERS=Label(self,text="{} Chapters".format(len(self.manga.chapterList_have)))
		self.__CHAPTERS.grid(row=2,column=0,sticky=S)

		self.inProgress=False #to tell if the button should say update or cancel in the lower menu
		self.downloadInProgress=False
		self.archivingInProgress=False
Exemplo n.º 28
0
    def run_async(self):
        startProcessTime = time.time()
	if not self.params['skipArchive']:
            # retrieve images from lt-archive
            self.logger.info("(process.run_async) retrieving images from archive")
            ltarchive = archive(self.params['path_pw_list'], self.params['archive_credentials_id'], self.params['skycam_lup_db_credentials_id'], self.err, self.logger)
            
            ## search for images matching criteria
            MySQLLogFile = self.params['resPath'] + "skycamfiles"
            ltarchive.getMySQLLog(self.params['resPath'], "skycam", "skycam", self.params['dateFrom'], self.params['dateTo'], self.params['instrument'], MySQLLogFile) 
            
            ## get the data
            ltarchive.getData(MySQLLogFile, self.params['resPath'])
        else:
	    for f in os.listdir(self.params['tmpMockPath']):
                shutil.copyfile(self.params['tmpMockPath'] + str(f), self.params['resPath'] + str(f))
    
        # decompress and sort images by ascending datetime
        decompress_files(self.params['resPath'], self.err, self.logger)
        images = sort_image_directory_UTC(self.params['resPath'], self.err, self.logger)
        if not images:
            self.err.setError(-12)
            self.err.handleError()

	# START THE PIPELINE
        pipe = pipeline(self.params, self.err, self.logger)	# spawn pipeline instance
        pipe.run(images)					# and run for these images

        # log error code
        with open(self.params['resPath'] + 'res.exitcode', 'w') as f:
            f.write(str(self.err.getError()))
            
        # zip files in directory and purge res dir
        archive_name = self.params['dateFrom'].replace(" ", "T").replace("-", "").replace(":", "") + ".tar"
        zip_output_files_in_directory(self.params['resPath'], archive_name, self.err, self.logger)      
        self._purge_output_dir(skipTarFiles=True)

        elapsed = (time.time() - startProcessTime)
        self.logger.info("(process.run_async) child process finished in " + str(round(elapsed)) + "s")
Exemplo n.º 29
0
def importTrainingCatalog(inputfile,outputfile):

	dataArray = importArrayFromCSVFile(inputfile)

	redshift_photometric = dataArray[:,1]
	galaxy_type = dataArray[:,2]

	magnitude_g = dataArray[:,5]
	magnitude_r = dataArray[:,8]
	magnitude_i = dataArray[:,11]
	magnitude_z = dataArray[:,14]
	magnitude_y = dataArray[:,17]
	magnitude_j = dataArray[:,20]
	magnitude_h = dataArray[:,23]
	magnitude_k = dataArray[:,26]

	#	wavelength_Ly = dataArray[:,29] * 1e-10 # conversion Angstroem to meter
	#	wavelength_OII = dataArray[:,32] * 1e-10
	#	wavelength_Hb = dataArray[:,35] * 1e-10
	#	wavelength_OIIIa = dataArray[:,38] * 1e-10
	#	wavelength_OIIIb = dataArray[:,41] * 1e-10
	#	wavelength_Ha = dataArray[:,44] * 1e-10



	# write to HDF5 file
	with archive.archive(outputfile,'a') as ar:
		ar['/target_selection_training/gal/redshift_photometric'] = redshift_photometric
		ar['/target_selection_training/gal/galaxy_type'] = galaxy_type
		ar['/target_selection_training/gal/magnitude_g'] = magnitude_g
		ar['/target_selection_training/gal/magnitude_r'] = magnitude_r
		ar['/target_selection_training/gal/magnitude_i'] = magnitude_i
		ar['/target_selection_training/gal/magnitude_z'] = magnitude_z
		ar['/target_selection_training/gal/magnitude_y'] = magnitude_y
		ar['/target_selection_training/gal/magnitude_j'] = magnitude_j
		ar['/target_selection_training/gal/magnitude_h'] = magnitude_h
		ar['/target_selection_training/gal/magnitude_k'] = magnitude_k
    def loop_over_gal(self):

        with archive.archive(data_bank, 'r') as ar:
            # galaxy information
            self.flux_all = ar['/gal/flux']
            self.lam_all = ar[
                '/gal/wavelength']  # one array only for every galaxy
            id_fiber = ar['/gal/fiber_id']
            fiber_selection_flag = ar['/gal/fiber_selection_flag']

        #=======================================================================================
        # cut data vectors
        #=======================================================================================
        where_id_fiber = numpy.where(fiber_selection_flag == True)[0]
        id_fiber_temp = id_fiber[where_id_fiber]
        self.air_mass = self.air_mass[where_id_fiber]
        self.z = self.z[where_id_fiber]
        nwhere_id_fiber = len(where_id_fiber)

        #=======================================================================================
        # LOOP over galaxies
        #=======================================================================================
        Ngal = len(id_fiber_temp)
        Vfluxobs_all = []
        Vfluxerr_all = []
        Vfluxobs_noisefree_all = []
        print "... Looping over", Ngal, "galaxies..."
        for i in range(Ngal):

            #print "... processing galaxy", i+1, "out of", Ngal
            flux = numpy.array(self.flux_all[i], dtype='float32')
            lam = numpy.array(self.lam_all[i], dtype='float32')

            Vfluxobs, Vfluxobs_noise_free = self.observed_spectrum(
                lam, flux, i)

            # add to list
            Vfluxobs_all.append(Vfluxobs)
            Vfluxobs_noisefree_all.append(Vfluxobs_noise_free)

        print "... Looping over galaxies complete"

        #=======================================================================================
        # Output
        #=======================================================================================
        with archive.archive(data_bank, 'a') as ar:
            ar['/gal/flux_spectrum_observed'] = Vfluxobs_all
            ar['/gal/flux_spectrum_observed_noise_free'] = Vfluxobs_noisefree_all
            ar['/gal/wavelength_survey'] = self.lambda_survey  #!!! is this the right wavelength to save

        #=======================================================================================
        # DIAGNOSTIC
        # plot the last galaxy spectrum as an example
        #=======================================================================================

        print "... plotting diagnostics"

        try:
            Plotters.plot_spectrum(self.lambda_survey,
                                   Vfluxobs_noise_free,
                                   xmin=5000,
                                   xmax=10000,
                                   fig_number=0,
                                   plot_number=0,
                                   title='Galaxy Spectrum Noise Free',
                                   base_directory='.',
                                   filename_addendum="",
                                   show_plot=show_plot)
        except (RuntimeError, TypeError, NameError):
            print 'bad plot'
            pass

        try:
            Plotters.plot_spectrum(self.lambda_survey,
                                   Vfluxobs,
                                   xmin=5000,
                                   xmax=10000,
                                   fig_number=1,
                                   plot_number=0,
                                   title='Galaxy Spectrum With Noise',
                                   base_directory='.',
                                   filename_addendum="",
                                   show_plot=show_plot)
        except (RuntimeError, TypeError, NameError):
            print 'bad plot'
            pass
import subprocess
import numpy
import scipy.interpolate as interpolate
import scipy.integrate as integrate
import random
from math import exp, sqrt
#from kcorrect import k_binspec
import pylab
import time

# set databank file
data_bank = '../../../data/data_bank.h5'
show_plot = False

# tell MSR to use method Cunha
with archive.archive(data_bank, 'a') as ar:
    ar['/MeasureSpectroscopicRedshift/method'] = 'Cunha'


class SimulateObservedGalaxySpectra:
    def __init__(self):

        #=======================================================================================
        # Input
        #=======================================================================================
        print
        print
        print '========================='
        print 'Entering Noise Generator'
        print '========================='
Exemplo n.º 32
0
Arquivo: go.py Projeto: Milker90/itc
#!/usr/bin/python
# coding:utf-8

import metaxml
import conf
import os
import sys
import upload
import conf
import lookup
from archive import Item
import archive
import getpass
import assembly

if __name__ == '__main__':	
    item = Item(conf.iTMSTransporter, conf.distribute_account, conf.distribute_pwd, conf.bundle_short_version_string, conf.bundle_version, conf.project_path, conf.scheme, conf.configuration, conf.provisioning_profile_name, conf.vendor_id)

    # 开始打包
    archive.archive(item)

    # 获取itmsp
    lookup.lookup(item)

    # 准备上传
    assembly.assembly(item)

    # 开始上传
    upload.upload(item)

Exemplo n.º 33
0
                                        int(columns)))

        elif args.cmd == 'interactive':
            interactive.run_interactive()

        # Start running archival
        elif args.cmd == 'archive':
            print('...starting archive loop')
            firstit = True
            while True:
                if not firstit:
                    print('Sleeping 60s until next iteration...')
                    time.sleep(60)
                    jobs = Job.get_running_jobs(dir_cfg['log'])
                firstit = False
                archive.archive(dir_cfg, jobs)

        # Debugging: show the destination drive usage schedule
        elif args.cmd == 'dsched':
            dstdirs = dir_cfg['dst']
            for (d, ph) in manager.dstdirs_to_furthest_phase(jobs).items():
                print('  %s : %s' % (d, str(ph)))

        #
        # Job control commands
        #
        elif args.cmd in ['details', 'files', 'kill', 'suspend', 'resume']:
            print(args)

            selected = []
Exemplo n.º 34
0
def DetectPhotons():

    # -----------------------------------------------------
    # Import data and params
    # -----------------------------------------------------
    print
    print "........................."
    print " Entering Apply Filters "
    print "........................."

    databank_file = '../../../data/data_bank.h5'
    with archive.archive(databank_file, 'r') as ar:
        # filter information
        wavelength_fiducial = ar['/Instrument/Filter/wavelength_fiducial']
        resolution_fiducial = ar['/Instrument/Filter/resolution_fiducial']
        wavelength_min      = ar['/Instrument/Filter/wavelength_minimum']
        wavelength_max      = ar['/Instrument/Filter/wavelength_maximum']

        # photons
        spectrum            = ar[]

    # -----------------------------------------------------
    # Generate Filters
    # -----------------------------------------------------
    Filters = GenerateFilters(wavelengths, wavelength_fiducial, resolution_fiducial)

    # -----------------------------------------------------
    # Apply Filters
    # ... later: apply filter to each spectrum
    # -----------------------------------------------------
# loop over galaxies
    for gal in
    # ... apply filter to photon list how are photons used in EAZY photo-z calculator


    # -----------------------------------------------------
    # Export data and params
    # -----------------------------------------------------
    with archive.archive(databank_file, 'r') as ar:

    print
    print "........................."
    print " Exiting Generate Image  "
    print "........................."


    # plot diagnostics
    # do the following for all new photon information
    #   overlay photon energies on initial galaxy spectrum
    #   plot photon times
    #   positions vs. time
    #   positions vs. energy
    #   energy vs. time
    # compare all old vs. new photon information


    # Quality Assurance




# ================================================
# Main
# ================================================
def main():

    ApplyDetectorMeasurement()




#-------------------------------
if __name__ == '__main__':
    main()
Exemplo n.º 35
0
import metaxml
import conf
import os
import sys
import upload
import conf
import lookup
from archive import Item
import archive
import getpass
import assembly

if __name__ == '__main__':
    item = Item(conf.iTMSTransporter, conf.distribute_account,
                conf.distribute_pwd, conf.bundle_short_version_string,
                conf.bundle_version, conf.project_path, conf.scheme,
                conf.configuration, conf.provisioning_profile_name,
                conf.vendor_id)

    # 开始打包
    archive.archive(item)

    # 获取itmsp
    lookup.lookup(item)

    # 准备上传
    assembly.assembly(item)

    # 开始上传
    upload.upload(item)
Exemplo n.º 36
0
	def mergeDatafile(self, datafile):
		#merge datafiles into the databank
		with archive.archive(self.config.get("options", "data_dir") + self.config.get("options", "data_bank"), 'a') as dest:
			with archive.archive(datafile, 'r') as src:
				print "... copying " + self.config.get("data_bank", datafile) + " into " + self.config.get("options", "data_bank")
				copyHDF5Path(src, "/", dest)
Exemplo n.º 37
0
"""List archive index
"""

from __future__ import print_function

import sys
import time

import archive

this = archive.archive()

for i in this.idx:
	if i[:5] != "INDEX":
		continue
	i,m,o = this.get_entry(i)
	assert m == "mtree"
	o = o.splitlines()
	print(i, time.ctime(int(o[1])), o[0])
def throughput():

	# =====================================================
	# INPUT
	# =====================================================
	databank_file = '../../../data/data_bank.h5'

	print
	print
	print "========================="
	print "entering Throughput"
	print "========================="


	with archive.archive(databank_file, 'r') as ar:
		# --------------------------------------------------------
		# read hardcoded through puts for large elements
		wavelength_temp			= ar['/Throughput/parameters/wavelength']
		aperture_losses_gal 	= ar['/Throughput/parameters/aperture_losses_gal']
		aperture_losses_star 	= ar['/Throughput/parameters/aperture_losses_star']
		mohawk_frd  			= ar['/Throughput/parameters/mohawk_frd']
		collimator 				= ar['/Throughput/parameters/collimator']
		vph_gsolver 			= ar['/Throughput/parameters/vph_gsolver']
		camera 					= ar['/Throughput/parameters/camera']
		ccd						= ar['/Throughput/parameters/ccd']

		# --------------------------------------------------------
		# read hardcoded throuhg puts from glass/coatings
		Al 			= ar['/Throughput/parameters/Al']
		B270_air_glass 	= ar['/Throughput/parameters/B270_air_glass']
		air_Bk7 	= ar['/Throughput/parameters/air_Bk7']
		B270_6mm 	= ar['/Throughput/parameters/B270_6mm']
		ctio 		= ar['/Throughput/parameters/ctio']
		air_silica 	= ar['/Throughput/parameters/air_silica']
		blue_MgF 	= ar['/Throughput/parameters/blue_MgF']
		fiber_material 	= ar['/Throughput/parameters/fiber_material']
		red_MgF 	= ar['/Throughput/parameters/red_MgF']
		LLF1 		= ar['/Throughput/parameters/LLF1']
		seso_a 		= ar['/Throughput/parameters/seso_a']
		seso_b 		= ar['/Throughput/parameters/seso_b']
		sf5 		= ar['/Throughput/parameters/sf5']
		bk7_25mm 	= ar['/Throughput/parameters/bk7_25mm']
		lf5 		= ar['/Throughput/parameters/lf5']
		fk5 		= ar['/Throughput/parameters/fk5']
		lak33 		= ar['/Throughput/parameters/lak33']
		laf21 		= ar['/Throughput/parameters/laf21']
		prot_ag 	= ar['/Throughput/parameters/prot_ag']
		ag_a		= ar['/Throughput/parameters/ag_a']
		ag_b 		= ar['/Throughput/parameters/ag_b']
		blue_broad 	= ar['/Throughput/parameters/blue_broad']
		red_broad 	= ar['/Throughput/parameters/red_broad']
		edmond 		= ar['/Throughput/parameters/edmond']
		solgel_b 	= ar['/Throughput/parameters/solgel_b']
		solgel_r 	= ar['/Throughput/parameters/solgel_r']
		solgel_plus	= ar['/Throughput/parameters/solgel_plus']

	print "... imported data"

	# --------------------------------------------------------
	# defining each optical element in terms of material
	# throughput (label with column in original worksheet) ... can replace the some of the above
	#atmos 		= 10**(-0.4*ctio*1.15)
	primary 	= 0.98 * Al
	top_end 	= 1.-(2.91/(numpy.pi/4.* 4.**2))   	#
	wfc			= air_silica**2* seso_b**6* red_broad**4* 10**(-fiber_material* 0.000376 /10.)
	#adc 		= red_broad**4 *LLF1**(28/25)*bk7_25mm**(40.5/25)	#
	fiber 		= 10**(-fiber_material*0.05/10)				#
	print "... Combined minor optical elements"


	# --------------------------------------------------------
	# combining optical elements
	telescope 	 = primary * top_end * wfc
	mohawk_full	 = fiber * mohawk_frd			#
	vph_mounted	 = vph_gsolver * red_broad**2 * bk7_25mm**(20./25.)
	spectrograph = collimator * vph_mounted * camera * ccd
	print "... Combined major optical elements with galaxy, including aperture losses"

	# --------------------------------------------------------
	# combine elements with aperture losses for gal and star
	# this will have to be in a loop when applying the aperture losses for each gal
	#		  for a star, total_star 	= telescope * aperture_losses_star * mohawk_full * spectrograph
	#	will have to apply galaxy aperture in SIMGALSPECOBSERVED
	total_gal 	= telescope * aperture_losses_gal  * mohawk_full * spectrograph
	throughput_final = total_gal
	print "... computed final throughput"


	# =====================================================
	# Output
	# =====================================================
	with archive.archive(databank_file, 'a') as ar:
		ar['/Throughput/throughput'] = throughput_final

	print "... exported data to databank"

	# =====================================================
	# Diagnostic
	# =====================================================
	print "... basic diagnostic plots"
	Plotters.plot_throughput_spectrum(wavelength_temp, throughput_final, power1=telescope, power2=spectrograph, xmin=500, xmax=1000,
							fig_number=0, plot_number=0,
							label0 = '', label1 = '', label2='',
							title='ThroughputSpectrum',
							base_directory='.', filename_addendum="", show_plot=False)

	return
    def __init__(self):

        #=======================================================================================
        # Input
        #=======================================================================================
        print
        print
        print '========================='
        print 'Entering Noise Generator'
        print '========================='

        with archive.archive(data_bank, 'r') as ar:

            # instrument information
            self.exposure = 20. * 60.  #exposure				= ar['/instrument/sensitivity/exposure_time']  # exposure = exposure time in seconds
            self.readnoise = 5.  #readnoise 				= ar['/instrument/sensitivity/read_noise']			# add to param files
            #			resnum = .25							  #resnum 				= ar['/instrument/sensitivity/resolution']

            self.Nwave = ar['/SimulateObservedGalaxySpectra/nb_pixels']
            wavelength_range = ar[
                '/SimulateObservedGalaxySpectra/wavelength_range']
            wavelength_min = wavelength_range[0]
            wavelength_max = wavelength_range[1]

            fiber_aperture_radius = numpy.sqrt(
                2.5 / numpy.pi
            )  #fiber_aperture_radius 	= ar['/Fiber_Allocation/fiber_radius']	  # arcsec
            self.altitude = 2635.  #altitude 				= ar['/instrument/sensitivity/altitude']			 # meters  ...add to param files
            self.worsening = 1  #worsening 				= ar['/instrument/sensitivity/worsening']			# add to param files

            self.z = ar['/gal/z_true']

            tile_id_gal = ar['/gal/tile_ID']
            airmass_tile = ar['/tiling/airmass']
            tile_id_tile = ar['/tiling/tile_ID']
            self.air_mass = ar['/gal/airmass']

            # general constants
            self.hp = 6.62607e-27  #ergs.secs
            self.c = 2.9979e18  #Ang/secs
            self.seed_constant = 99999

            # Throughput
            self.wavetrans = numpy.array(
                ar['/Throughput/parameters/wavelength'])
            self.trans = numpy.array(ar['/Throughput/throughput']
                                     )  #!!! what should be the units here?

            # sky data
            self.wave = ar['/sky/background/wave']
            self.atmtemp = ar['/sky/background/power']
            self.abswave = ar['/sky/extinction/wave']
            self.abstemp = ar['/sky/extinction/power']

            # basic modifications to input data
            self.atmtemp = numpy.array(self.atmtemp, dtype='float64')
            self.wave = numpy.array(self.wave, dtype='float64')
            self.abstemp = numpy.array(self.abstemp)
            self.abswave = numpy.array(self.abswave)

            #=======================================================================================
            # obtain airmasses for galaxies
            #=======================================================================================
            #for i in range(Number_Tiles):
            #    match_tile = numpy.where(tile_id_gal == tile_id_tile[i])[0]
            #    self.airmass[match_tile] = airmass_tile[i]

            #=======================================================================================
            # Set constants
            #=======================================================================================
            # !!! to modify to access tile information to link to where galaxy is observed
            self.num_readout = 1
            #Nwave		= 557*resnum		# 557 = number of pixels
            #			self.Nwave		= 1000*resnum		# 557 = number of pixels
            resnum = float(self.Nwave) / 1000
            dispersion = 2.5  # def: dispersion = (pixsize or width ) / resolution; 2.5 is when most of the information comes # in a limit
            #			dellam		= 7.14/resnum		#R is not being used. Only dellam matters now. = resolution/dispersion
            dellam = float(wavelength_max - wavelength_min) / self.Nwave

            print "Resolution:", int(self.Nwave), "pixels"

            #=======================================================================================
            # Aperture
            #=======================================================================================
            self.aperture_area = numpy.pi * fiber_aperture_radius**2
            self.Area = numpy.pi * 4**2
            self.Areacm = self.Area * 10000.  #converting from m^2 to cm^2

            #=======================================================================================
            # SKY BACKGROUND
            # 	- Convert sky photons to photons/Ang.
            #	- in skybg_50_10.dat the units are photons/s/nm/arcsec^2/m^2
            #=======================================================================================
            self.atm = self.atmtemp * self.worsening * self.exposure * self.aperture_area * self.Area / 10.  #because 10ang=1 nm
            self.wave *= 10.  # converting angstroms

            #====================================
            # smooth spectra to SURVEY resolution
            # I think I shouldn't smooth over atmospheric absorption since
            # smoothing only happens after the photons have been removed from atmosphere.
            #====================================
            self.atm = Utilities.k_smooth_py(numpy.log10(self.wave), self.atm,
                                             50)
            self.atm = numpy.array(self.atm, dtype='float64')

            #=======================================================================================
            # TRANSMISSION information
            #=======================================================================================
            self.wavetrans *= 10.  # converting angstroms
            #trans	   *= 0.01

            #=======================================================================================
            # Interpolate wavelengths onto grid with resolution
            #	1) make grid
            #	2) interpolate counts/fluxes onto grid
            #	3) Note that delta_lam will affect number of photons counted.
            #		***remember to multiply by delta_lam.***
            #=======================================================================================
            self.lambda_survey = numpy.zeros(self.Nwave, dtype='float64')
            binwidth = numpy.zeros(self.Nwave)
            self.lambda_survey[0] = wavelength_min

            for i in numpy.arange(1, self.Nwave):
                self.lambda_survey[i] = self.lambda_survey[i - 1] + dellam

            for i in numpy.arange(1, self.Nwave - 1):
                self.lambda_survey[i] = self.lambda_survey[i] + (
                    self.lambda_survey[i + 1] - self.lambda_survey[i]) / 2.
                binwidth[i] = self.lambda_survey[i + 1] - self.lambda_survey[i]

            atmtemp_rebinned = numpy.zeros(self.lambda_survey.shape)
            k_binspec.k_binspec(self.wave, self.atmtemp, self.lambda_survey,
                                atmtemp_rebinned)
            self.trans_interp = numpy.interp(self.lambda_survey,
                                             self.wavetrans, self.trans)

            self.A = self.hp * self.c / (self.lambda_survey *
                                         self.trans_interp)
            self.A[numpy.isnan(self.A)] = 0.

            self.B = self.worsening * self.exposure * self.aperture_area * self.Area / 10 * atmtemp_rebinned * self.trans_interp

            self.C = self.exposure * self.Areacm / (
                self.hp * self.c) * self.lambda_survey * self.trans_interp

            abstemp_interp = numpy.interp(self.lambda_survey, self.abswave,
                                          self.abstemp)
            self.D = abstemp_interp * numpy.exp(
                (1700. - self.altitude) / 7000.)

            self.E = self.num_readout * self.readnoise**2
Exemplo n.º 40
0

	#=======================================================================================
	##Calculate S/N
	#=======================================================================================
	Vatmerr	 = sqrt(Vatm+Vsed+readnoise**2)
	signoise = Vsed/Vatmerr
	Vfluxerr = Vflux/signoise


	#=======================================================================================
	##Generate observed spectra
	#=======================================================================================
	Vfluxobs=np.zeros(Nwav,Ngal)

	# loop over galaxies 
	for i in range(ngal):
		random.seed()
		gasdev 		= np.array([random.random() for x in range(Nwav)])
		Vfluxobs[*,i]	= Vflux[*,i]+ gasdev* Vfluxerr[*,i]

	# Make noise free
	Vfluxobs_noise_free=Vflux

	# Write out
	with archive.archive(data_bank,'a') as ar:
		ar['/gal/flux_spectrum_observed'] 	     = Vfluxobs
		ar['/gal/flux_spectrum_observed_noise_free'] = Vfluxobs_noise_free
		ar['/gal/wavelength_survey'] 		     = lambda_survey 		#!!! is this the right wavelength to save
		
Exemplo n.º 41
0
def archiveManga(ignore=''):
	manga=int(mangaList.curselection()[0]) #know what the manga number in our list is
	tool=archive(mangas[manga].name) #get ready to archive using the tool
	tool.update_zipFiles() #make the archives. this is a good function in that it will remove archives such as 20-22 and make 20-25
	print "Done Archiving"
Exemplo n.º 42
0
def observe_galaxy():

	data_bank = 'data_bank.h5'
	with archive.archive(data_bank,'r') as ar:
		# galaxy information
		gal_air_mass    	= ar['/gal/airmass']

		# instrument information
		exposure 		= ar['/instrument/sensitivity/exposure_time']  # exposure = exposure time in seconds
		readnoise 		= ar['/instrument/sensitivity/read_noise']			# add to param files
		resnum 			= ar['/instrument/sensitivity/resolution']
		resolution_kms 		= ar['/instrument/sensitivity/resolution_kms']			# res_instrument = 300 # res of instrument  in km/s
		wavelength_min 		= ar['/instrument/wavelength_minimum']  	# Angstrom
		fiber_aperture_radius 	= ar['fiber_allocation/fiber_radius']
		worsening 		= ar['/instrument/sensitivity/worsening']			# add to param files
		altitude 		= ar['/instrument/sensitivity/altitude']			 #meters ??? add to param files

		# general constants
		plancks_constant 	= ar['/general/physical_constants/plancks_constant']	
		speed_of_light   	= ar['/general/physical_constants/speed_of_light']	

		# sky data
		wave   		= ar['sky/background/wave']
		temp    	= ar['sky/background/power']
		abswave 	= ar['sky/extinction/wave']
		abstemp 	= ar['sky/extinction/power']

		# Throughtput
		wavetrans 	= ar['/gal/throughput/wavelength']
		trans	  	= ar['/gal/throughput/throughput']  #!!! what should be the units here?


	
# !!! to modify to access tile information to link to where galaxy is observed
	#airmass=1.3

	Ngal = len(gal)

	Nwav=557*resnum		# 557 = number of pixels
	dispersion = 2.5		# def: dispersion = (pixsize or width ) / resolution; 2.5 is when most of the information comes 
					# in a limit
	dellam=7.14/resnum        #R is not being used. Only dellam matters now. = resolution/dispersion


	#=======================================================================================
	# Aperture
	#=======================================================================================
	aperture_area 	= np.pi*fiber_aperture_radius**2
	Area		= np.pi*4^2
	Areacm		= Area*10000.          #converting from m^2 to cm^2


	#=======================================================================================
	# SKY BACKGROUND
	# 	- Convert sky photons to photons/Ang.
	#	- in skybg_50_10.dat the units are photons/s/nm/arcsec^2/m^2
	#=======================================================================================
	atm	= worsening* exposure* temp* aperture_area* Area/10. #because 10ang=1 nm
	wave	= 10.* wave


	#=======================================================================================
	# TRANSMISSION information
	#=======================================================================================
	wavetrans = 10.* wavetrans		# WHAT are the translation units?
	trans	  = 0.01* trans

    
	#============================================
	# Modify ATMOSPHERIC ABSORPTION  
	#============================================
	absatm = abstemp*airmass*exp((1700.-altitude)/7000.)	
	absatm(np.where(absatm > 1)) = 1			 # file is bad ,...just make sure file is not bad
		

	#====================================
	# smooth spectra to SURVEY resolution
	# I think I shouldn't smooth over atmospheric absorption since 
	# smoothing only happens after the photons have been removed from atmosphere.
	#====================================
	logwave = np.log10(wave)
	atm 	= my_smooth(logwave, atm, resolution)  #atm 	= k_smooth(logwave,atm,300) #outatm=k_smooth(logwave,atm,1320)


	#=======================================================================================
	# Convert sed flux to photon counts.
	# 	- sed flux is in ergs/cm^2/s/Ang.
	# 	- photon counts is in photons/Ang.
	#=======================================================================================
	flux	= flux/ (1.+zspectemp) 			#add redshift dimming #flux[*,i]=flux[*,i]/(1.+zspectemp[i])^2 #add redshift dimming
	sedphot = exposure* flux* lam* Areacm/ (hp*c) 	#approximate - correct is to integrate 
	sedflux = exposure* flux* Areacm # 


	#=======================================================================================
	# Redshift galaxy sed's. 
	#	- rest-frame flux assumes galaxy is 10pc away.
	#	- doesn't matter when using coeffs calculated from
	#	- kcorrect, since the code already applies the 1/r^2 dimming.
	#=======================================================================================
	lamz = lam* (1. + zspectemp)

	#=======================================================================================
	# Interpolate wavelengths onto grid with resolution
	#	1) make grid 
	#	2) interpolate counts/fluxes onto grid
	#	3) Note that delta_lam will affect number of photons counted.
	#		***remember to multiply by delta_lam.***
	#=======================================================================================
	lambda_survey	 = np.zeros(Nwav)
	binwidth	 = np.zeros(Nwav)
	lambda_survey[0] = wavelength_minimum

	for i in np.arange(1,Nwave):
		lambda_survey[i] = lambda_survey[i-1]+dellam 

	for i in np.arange(1,Nwave-1):
		lambda_survey[i] = lambda_survey[i] + (lambda_survey[i+1]-lambda_survey[i])/2.
		binwidth[i] 	 = lambda_survey[i+1] - lambda_survey[i]

	#=======================================================================================
	#	Bin spectra to survey's resolutions by integrating over pixels
	#		to convert flux density to flux. 
	#=======================================================================================
	Vsed		= np.zeros(Nwav,Ngal)
	Vflux		= np.zeros(Nwav,Ngal)

	
	Vatm = k_binspec(wave,atm,lambda_survey)
	for i=0L,Nshort-1 do begin
		Vsed[*,i]=k_binspec(lamz[*,i],sedphot[*,i],lambda_survey)
		Vflux[*,i]=k_binspec(lamz[*,i],sedflux[*,i],lambda_survey)
Exemplo n.º 43
0
Arquivo: ATM.py Projeto: ua-snap/atm
    def run_atm(self):
        
        """ Program sequence """
        #====================================================
        # Initialization Process
        #====================================================
        print '==================='
        print ' Initializing ATM'
        print '==================='
        read_control.read_control(self)
        initialize.initialize(self)
        read_layers.read_layers(self)
        model_domain.model_domain(self)
        create_attm_cohort_arrays.create_attm_cohort_arrays(self)


        #=========================================
        # Initializing Site Specific Information
        #=========================================
        if self.Simulation_area.lower() == 'barrow':
            run_barrow.initialize_barrow(self)
        elif self.Simulation_area.lower() == 'tanana':
            run_tanana.initialize_tanana(self)
         
        #=======================================
        # READ MET Data, Calculate Degree Days,
        # and Calculate Climatic Data needed
        # for ecotype changes.
        #=======================================
        initialize.Met(self)

        #++++++++++++++++++++++++++++++++++++++++++++++
        #  ========================================
        #    INITIALIZE COHORT PROPERTIES
        #  ========================================
        #++++++++++++++++++++++++++++++++++++++++++++++
    	print '======================================'
        print ' Initializing Terrestrial Properties '
        print '======================================'
        if self.Simulation_area.lower() == 'barrow':
            run_barrow.initialize_barrow_cohorts(self)
        elif self.Simulation_area.lower() == 'tanana':
            run_tanana.Terrestrial_Tanana(self)

        print '=================================================='
        print '            Starting the MAIN LOOP '
        print '=================================================='

        initialize.run(self)
        if self.Simulation_area.lower() == 'barrow':
            run_barrow.run_barrow(self, time)
	elif self.Simulation_area.lower() == 'tanana':
	    run_tanana.run_tanana(self, time)

        print '=================================================='
        print '            Finished the MAIN LOOP '
        print '=================================================='


        # -------------------
        # Simulation End Time
        # -------------------
        clock.finish(self)
        
        #===========================
        # Output Simulation Results
        #===========================
        if self.results_onscreen.lower() == 'yes':
            results.on_screen(self)
        if self.archive_simulation.lower() == 'yes':
            results.on_file(self)

        
        # ================
        # Archive Results
        # ================
        if self.archive_simulation.lower() == 'yes':
        #----------------------------------------------------------------------------------------------------------
        # Create the tarfile
        #----------------------------------------------------------------------------------------------------------
            self.archive_file =tarfile.open(self.control['Run_dir']+self.Output_directory+str('/Archive/')+ \
                                            self.archive_time+str('_')+self.simulation_name+".tar.gz", mode='w:gz')
        #----------------------------------------------------------------------------------------------------------
            archive.read_archive(self)
            archive.archive(self)
            
        print '----------------------------------------'
        print '        Simulation Complete             '
        print '----------------------------------------'        
Exemplo n.º 44
0
    usage()

options = dict()
options['vault'] = "NotSpecified"
options['region'] = "us-east-1"
options['access_key'] = None
options['secret_key'] = None
for option, value in opts:
    if option in ('--access_key'):
        options['access_key'] = value
    elif option in ('--secret_key'):
        options['secret_key'] = value
    elif option in ('--region'):
        options['region'] = value
    elif option in ('--vault'):
        options['vault'] = value
    elif option in ('--help'):
        usage()

if args[0]   == "create":
    a = archive.archive(args[1], options['vault'])
    a.create()
elif args[0] == "validate":
    a = archive.archive(args[1], options['vault'])
    a.validate()
elif args[0] == "upload":
    a = archive.archive(args[1], options['vault'])
    a.upload(options)
else:
    sys.exit("Did not understand {0}".format(command))
Exemplo n.º 45
0
import csvimport
from matplotlib import pyplot
import os

print
print
print "============================="
print "entered Target Selection"
print "============================="


inputfile = '../../../data/data_bank.h5'
outputfile = inputfile

# import parameters
with archive.archive(inputfile,'r') as ar:
	training_catalog_location = ar['/Target_Selection/training_catalog_location']
	mag_g_min = ar['/Target_Selection/mag_g_min']
	mag_g_max = ar['/Target_Selection/mag_g_max']
	mag_r_min = ar['/Target_Selection/mag_r_min']
	mag_r_max = ar['/Target_Selection/mag_r_max']
	mag_i_min = ar['/Target_Selection/mag_i_min']
	mag_i_max = ar['/Target_Selection/mag_i_max']
	mag_z_min = ar['/Target_Selection/mag_z_min']
	mag_z_max = ar['/Target_Selection/mag_z_max']
	mag_y_min = ar['/Target_Selection/mag_y_min']
	mag_y_max = ar['/Target_Selection/mag_y_max']
	photo_z_min = ar['/Target_Selection/photo_z_min']
	photo_z_max = ar['/Target_Selection/photo_z_max']
	type_cutoff = ar['/Target_Selection/type_cutoff']
	max_training_set_size = ar['/Target_Selection/max_training_set_size']
Exemplo n.º 46
0
	return a

tx("HELLO STOW 1.0 SERVER")

cl = rx()
assert cl == "HELLO STOW 1.0 CLIENT"

tx("WELCOME PYSTOW")

nm = rx()
assert nm[:5] == "NAME "
nm = nm.split()[1]

tx("OK")

this = archive.archive()

mt = ""
while True:
	i = rx()
	j = i.split()
	assert j[0] == "MTREE"
	l = int(j[1])
	if l == 0:
		break;
	x = 0
	while x < l:
		y = sys.stdin.read(l - x)
		mt += y
		x += len(y)
		a = rx()
Exemplo n.º 47
0
    def run_atm(self):
        """ Program sequence """
        #====================================================
        # Initialization Process
        #====================================================
        print '==================='
        print ' Initializing ATM'
        print '==================='
        read_control.read_control(self)
        initialize.initialize(self)
        read_layers.read_layers(self)
        model_domain.model_domain(self)
        create_attm_cohort_arrays.create_attm_cohort_arrays(self)

        #=========================================
        # Initializing Site Specific Information
        #=========================================
        if self.Simulation_area.lower() == 'barrow':
            run_barrow.initialize_barrow(self)
        elif self.Simulation_area.lower() == 'tanana':
            run_tanana.initialize_tanana(self)
        elif self.Simulation_area.lower() == 'yukon':
            run_yukon.initialize_yukon(self)

        #=======================================
        # READ MET Data, Calculate Degree Days,
        # and Calculate Climatic Data needed
        # for ecotype changes.
        #=======================================
        initialize.Met(self)

        #++++++++++++++++++++++++++++++++++++++++++++++
        #  ========================================
        #    INITIALIZE COHORT PROPERTIES
        #  ========================================
        #++++++++++++++++++++++++++++++++++++++++++++++
        print '======================================'
        print ' Initializing Terrestrial Properties '
        print '======================================'
        if self.Simulation_area.lower() == 'barrow':
            run_barrow.initialize_barrow_cohorts(self)
        elif self.Simulation_area.lower() == 'tanana':
            run_tanana.Terrestrial_Tanana(self)

        print '=================================================='
        print '            Starting the MAIN LOOP '
        print '=================================================='

        initialize.run(self)
        if self.Simulation_area.lower() == 'barrow':
            run_barrow.run_barrow(self, time)
        elif self.Simulation_area.lower() == 'tanana':
            run_tanana.run_tanana(self, time)

        print '=================================================='
        print '            Finished the MAIN LOOP '
        print '=================================================='

        # -------------------
        # Simulation End Time
        # -------------------
        clock.finish(self)

        #===========================
        # Output Simulation Results
        #===========================
        if self.results_onscreen.lower() == 'yes':
            results.on_screen(self)
        if self.archive_simulation.lower() == 'yes':
            results.on_file(self)

        # ================
        # Archive Results
        # ================
        if self.archive_simulation.lower() == 'yes':
            archive.read_archive(self)
            archive.archive(self)
            #----------------------------------------------------------------------------------------------------------
            # Create the tarfile
            #----------------------------------------------------------------------------------------------------------
            self.archive_file =tarfile.open(self.control['Run_dir']+self.Output_directory+str('/Archive/')+ \
                                            self.archive_time+str('_')+self.simulation_name+".tar.gz", mode='w:gz')
            #----------------------------------------------------------------------------------------------------------
            if self.Simulation_area.lower() == 'barrow':
                os.chdir(self.control['Run_dir'] + self.Input_directory +
                         '/Barrow/')

        print '----------------------------------------'
        print '        Simulation Complete             '
        print '----------------------------------------'
Exemplo n.º 48
0
Arquivo: ATTM.py Projeto: ua-snap/atm
    def run_attm(self):
        
        """ Program sequence """
        #====================================================
        # Initialization Process
        #====================================================
        print '==================='
        print ' Initializing ATTM'
        print '==================='
        read_control.read_control(self)
        initialize.initialize(self)
        read_layers.read_layers(self)
        model_domain.model_domain(self)
        create_attm_cohort_arrays.create_attm_cohort_arrays(self)
        if self.Simulation_area.lower() == 'barrow':
            initial_cohort_population.barrow_initial_cohort_population(self)
            initial_cohort_check.barrow_initial_cohort_check(self)
            cohort_present.barrow_cohort_present(self)
        elif self.Simulation_area.lower() == 'tanana':
            initial_cohort_population.tanana_initial_cohort_population(self)
            initial_cohort_check.tanana_initial_cohort_check(self)
            cohort_present.tanana_cohort_present(self)

         
        #=======================================
        # READ MET Data & Calculate Degree Days
        #=======================================
        initialize.Met(self)


        #++++++++++++++++++++++++++++++++++++++++++++++
        #  ========================================
        #    INITIALIZE BARROW COHORT PROPERTIES
        #  ========================================
        #++++++++++++++++++++++++++++++++++++++++++++++
        if self.Simulation_area.lower() == 'barrow':
            print '=================================== '
            print ' Initializing Lake & Pond Properties'
            print '===================================='
            initialize.LakePond(self)
            set_lake_pond_depth.set_lake_pond_depth(self)
            set_lake_ice_depth_constant.set_lake_ice_depth_constant(self)
            set_ice_thickness_array.set_ice_thickness_array(self)
            climate_expansion_arrays.set_climate_expansion_arrays(self)
            set_pond_growth_array.set_pond_growth_array(self)

            print '====================================='
            print ' Initializing Terrestrial Properties'
            print '====================================='
            initialize.Terrestrial_Barrow(self)
            read_ice_content.read_ice_content(self)
            read_drainage_efficiency.read_drainage_efficiency(self)
            read_initial_ALD.read_initial_ALD(self)
            set_ALD_constant.set_ALD_constant(self)
            set_ALD_array.set_ALD_array(self)
            set_protective_layer.set_protective_layer(self)
            set_initial_cumulative_probability.set_initial_cumulative_probability(self)
            # Initializing Terrestrial Cohort Properties 
            initialize.Wet_NPG(self)
            initialize.Wet_LCP(self)
            initialize.Wet_CLC(self)
            initialize.Wet_FCP(self)
            initialize.Wet_HCP(self)
            # Other needed information [in the future]
            initial_cohort_age.initial_cohort_age(self)

        elif self.Simulation_area.lower() == 'tanana':
            print '======================================'
            print ' Initializing Terrestrial Properties '
            print '======================================'
            initialize.Terrestrial_Tanana(self)


        print '=================================================='
        print '            Starting the MAIN LOOP '
        print '=================================================='

        initialize.run(self)
        for time in range(0, self.stop):
            if time == 0:
                if self.Simulation_area.lower() == 'barrow':
                    cohorts.initial_barrow(self)
                elif self.Simulation_area.lower() == 'tanana':
                    cohorts.initial_tanana(self)
            print '    at time step: ', time
            
            # ++++++++++++++++++++++++++++++++++++++
            # Check for significant climatic event
            # ++++++++++++++++++++++++++++++++++++++
            check_climate_event.check_climate_event(self)            
           
            # ----------------------------------------------------------
            # Looping over elements
            # ----------------------------------------------------------
            for element in range(0, self.ATTM_nrows * self.ATTM_ncols):
                
                # ----------------------------------------------------
                # Define the total fractional area of cohorts for
                # each element
                # ----------------------------------------------------
                cohort_start = cohort_check.cohort_start(self, element, time)
                
                # ----------------------------------------------------
                # Expand/Infill lake & ponds by prescribed rates
                # ----------------------------------------------------
                lake_pond_expansion.lake_pond_expansion(self, element)
                lake_pond_expansion.pond_infill(self, element, time)
                
                # ----------------------------------------------------------
                # Set active layer depth
                # ---------------------------------------------------------
                active_layer_depth.active_layer_depth(self, time, element)
                
                # ----------------------------------
                # Cycle through terrestrial cohorts
                # ----------------------------------
                check_Wet_NPG.check_Wet_NPG(self, element, time)
                check_Wet_LCP.check_Wet_LCP(self, element, time)
                check_Wet_CLC.check_Wet_CLC(self, element, time)
                check_Wet_FCP.check_Wet_FCP(self, element, time)
                check_Wet_HCP.check_Wet_HCP(self, element, time)

                # ----------------------------------
                # Set pond/lake ice thickness depth
                # ----------------------------------
                ice_thickness.ice_thickness(self, time, element)
                # ------------------------------
                # Cycle through ponds and lakes
                # ------------------------------
                check_Ponds.check_Ponds(self, element, time)
                check_Lakes.check_Lakes(self, element, time)
                 
                # -------------------------------------------------
                # Cohort Fraction Check (mass balance of cohorts)
                # -------------------------------------------------
                cohort_check.cohort_check(self, element, time, cohort_start)

                if time == self.stop-1:
                    if self.Simulation_area.lower() == 'barrow':
                        cohorts.final_barrow(self)
                    elif self.Simulation_area.lower() == 'tanana':
                        cohorts.final_tanana(self)
                    
            # ========================================================================
            # END MAIN LOOP 
            # ========================================================================
            
            # ========================================================================
            # OUTPUT RESULTS (if requested)
            # ========================================================================
            #  - - - - - - - - -
            # Fractional Areas
            #  - - - - - - - - -
            Output_cohorts_by_year.Output_cohorts_by_year(self, time)
            #  - - - - - - - - - - - - -
            # Dominant Fractional Area
            #  - - - - - - - - - - - - - 
            Output_cohorts_by_year.dominant_cohort(self)                 # Terrestrial_Control
            Output_cohorts_by_year.dominant_fractional_plot(self, time)  # Terrestrial_Control

        # =================================
        # OUTPUT ANIMATIONS (if requested)
        # =================================
        # - - - - - - - - - - - - - - -
        # Fractional Area of Cohorts
        # - - - - - - - - - - - - - - - -
        Output_cohorts_by_year.write_Fractions_avi(self)
        Output_cohorts_by_year.write_Dominant_Cohort_avi(self) # Terrestrial_Control

        # -------------------
        # Simulation End Time
        # -------------------
        clock.finish(self)
        
        #===========================
        # Output Simulation Results
        #===========================
        if self.results_onscreen.lower() == 'yes':
            results.on_screen(self)
        if self.archive_simulation.lower() == 'yes':
            results.on_file(self)

        
        # ================
        # Archive Results
        # ================
        if self.archive_simulation.lower() == 'yes':
        #----------------------------------------------------------------------------------------------------------
        # Create the tarfile
        #----------------------------------------------------------------------------------------------------------
            self.archive_file =tarfile.open(self.control['Run_dir']+self.Output_directory+str('/Archive/')+ \
                                            self.archive_time+str('_')+self.simulation_name+".tar.gz", mode='w:gz')
        #----------------------------------------------------------------------------------------------------------
            archive.read_archive(self)
            archive.archive(self)
            
        print '----------------------------------------'
        print '        Simulation Complete             '
        print '----------------------------------------'        
Exemplo n.º 49
0
    def __init__(self):
        print
        print
        print "========================================"
        print "   Reconstruct Pure Galaxy Spectrum  "
        print "========================================"

        #=======================================================================================
        # Input
        #=======================================================================================
        print "... Import data and params"
        with archive.archive(databank_file, 'r') as ar:

            # observation choices
            self.nolines = ar['/GenerateSpectrum/nolines']  # user inputs
            self.noextinct = ar['/GenerateSpectrum/noextinct']
            self.vdisp = ar['/GenerateSpectrum/vdisp']

            # templates
            self.loglam = numpy.array(
                ar['/spectral_templates/log10_wavelength'])
            self.tspec_v0 = numpy.array(
                ar['/spectral_templates/template_spectrum_v0'])  # not smoothed
            self.tspec_v0_nl = numpy.array(
                ar['/spectral_templates/template_spectrum_v0_nolines'])
            self.tspec_v0_nd = numpy.array(
                ar['/spectral_templates/template_spectrum_v0_nodust'])
            self.tspec_v0_nd_nl = numpy.array(
                ar['/spectral_templates/template_spectrum_v0_nodust_nolines'])
            self.tspec_v300 = numpy.array(
                ar['/spectral_templates/template_spectrum_v300']
            )  # smoothed to 300kms(res)
            self.tspec_v300_nl = numpy.array(
                ar['/spectral_templates/template_spectrum_v300_nolines'])
            self.tspec_v300_nd = numpy.array(
                ar['/spectral_templates/template_spectrum_v300_nodust'])
            self.tspec_v300_nd_nl = numpy.array(
                ar['/spectral_templates/template_spectrum_v300_nodust_nolines']
            )
            self.lspec_v300 = numpy.array(ar['/spectral_templates/lspec_v300'])

            # galaxy information
            self.coeffs = ar['/gal/coeffs']
            self.z_true = ar['/gal/z_true']
            self.fiber_selection_flag = ar['/gal/fiber_selection_flag']

        print "... finished importing data"

        # =====================================================
        # QA Test -- Incoming
        Ngal = len(self.fiber_selection_flag)  # count galaxies
        Nwave = len(self.tspec_v0[0, :])  # count wavelengths

        fiber_selection_flag_shape_expected = (Ngal, )
        check = (self.fiber_selection_flag.shape ==
                 fiber_selection_flag_shape_expected)
        if check:
            print "*** Incoming QA checksum PASSED ***"
        else:
            print "XX CHECKSUM FAILED XX"
            print "fiber selection flag shape not compliant withe expectation"
            sys.exit(1)
Exemplo n.º 50
0
def curses_main(stdscr):
    # TODO: figure out how to pass the configs in from plotman.py instead of
    # duplicating the code here.
    with open('config.yaml', 'r') as ymlfile:
        cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)
    dir_cfg = cfg['directories']
    sched_cfg = cfg['scheduling']
    plotting_cfg = cfg['plotting']

    log = Log()

    plotting_active = True
    archiving_configured = 'archive' in dir_cfg
    archiving_active = archiving_configured

    (n_rows, n_cols) = map(int, stdscr.getmaxyx())

    # Page layout.  Currently requires at least ~40 rows.
    # TODO: make everything dynamically figure to best use available space
    header_height = 3
    jobs_height = 10
    dirs_height = 14
    logscreen_height = n_rows - (header_height + jobs_height + dirs_height)

    header_pos = 0
    jobs_pos = header_pos + header_height
    dirs_pos = jobs_pos + jobs_height
    logscreen_pos = dirs_pos + dirs_height

    plotting_status = '<startup>'  # todo rename these msg?
    archiving_status = '<startup>'

    refresh_period = int(sched_cfg['polling_time_s'])

    stdscr.nodelay(True)  # make getch() non-blocking
    stdscr.timeout(2000)

    header_win = curses.newwin(header_height, n_cols, header_pos, 0)
    log_win = curses.newwin(logscreen_height, n_cols, logscreen_pos, 0)
    jobs_win = curses.newwin(jobs_height, n_cols, jobs_pos, 0)
    dirs_win = curses.newwin(dirs_height, n_cols, dirs_pos, 0)

    jobs = Job.get_running_jobs(dir_cfg['log'])
    last_refresh = datetime.datetime.now()

    pressed_key = ''  # For debugging

    while True:

        # TODO: handle resizing.  Need to (1) figure out how to reliably get
        # the terminal size -- the recommended method doesn't seem to work:
        #    (n_rows, n_cols) = [int(v) for v in stdscr.getmaxyx()]
        # Consider instead:
        #    ...[int(v) for v in os.popen('stty size', 'r').read().split()]
        # and then (2) implement the logic to resize all the subwindows as above

        # stdscr.clear()
        linecap = n_cols - 1
        logscreen_height = n_rows - (header_height + jobs_height + dirs_height)

        elapsed = (datetime.datetime.now() - last_refresh).total_seconds()

        # A full refresh scans for and reads info for running jobs from
        # scratch (i.e., reread their logfiles).  Otherwise we'll only
        # initialize new jobs, and mostly rely on cached info.
        do_full_refresh = elapsed >= refresh_period

        if not do_full_refresh:
            jobs = Job.get_running_jobs(dir_cfg['log'], cached_jobs=jobs)

        else:
            last_refresh = datetime.datetime.now()
            jobs = Job.get_running_jobs(dir_cfg['log'])

            if plotting_active:
                (started,
                 msg) = manager.maybe_start_new_plot(dir_cfg, sched_cfg,
                                                     plotting_cfg)
                if (started):
                    log.log(msg)
                    plotting_status = '<just started job>'
                    jobs = Job.get_running_jobs(dir_cfg['log'],
                                                cached_jobs=jobs)
                else:
                    plotting_status = msg

            if archiving_configured and archiving_active:
                # Look for running archive jobs.  Be robust to finding more than one
                # even though the scheduler should only run one at a time.
                arch_jobs = archive.get_running_archive_jobs(
                    dir_cfg['archive'])
                if arch_jobs:
                    archiving_status = 'pid: ' + ', '.join(map(str, arch_jobs))
                else:
                    (should_start,
                     status_or_cmd) = archive.archive(dir_cfg, jobs)
                    if not should_start:
                        archiving_status = status_or_cmd
                    else:
                        cmd = status_or_cmd
                        log.log('Starting archive: ' + cmd)

                        # TODO: do something useful with output instead of DEVNULL
                        p = subprocess.Popen(cmd,
                                             shell=True,
                                             stdout=subprocess.DEVNULL,
                                             stderr=subprocess.STDOUT,
                                             start_new_session=True)

        # Directory prefixes, for abbreviation
        tmp_prefix = ''  #os.path.commonpath(dir_cfg['tmp'])
        dst_prefix = ''  #os.path.commonpath(dir_cfg['dst'])
        if archiving_configured:
            arch_prefix = dir_cfg['archive']['rsyncd_path']

        # Header
        header_win.addnstr(0, 0, 'Plotman', linecap, curses.A_BOLD)
        timestamp = datetime.datetime.now().strftime("%H:%M:%S")
        refresh_msg = "now" if do_full_refresh else f"{int(elapsed)}s/{refresh_period}"
        header_win.addnstr(f" {timestamp} (refresh {refresh_msg})", linecap)
        header_win.addnstr('  |  <P>lotting: ', linecap, curses.A_BOLD)
        header_win.addnstr(
            plotting_status_msg(plotting_active, plotting_status), linecap)
        header_win.addnstr(' <A>rchival: ', linecap, curses.A_BOLD)
        header_win.addnstr(
            archiving_status_msg(archiving_configured, archiving_active,
                                 archiving_status), linecap)

        # Oneliner progress display
        header_win.addnstr(1, 0, 'Jobs (%d): ' % len(jobs), linecap)
        header_win.addnstr('[' + reporting.job_viz(jobs) + ']', linecap)

        # These are useful for debugging.
        # header_win.addnstr('  term size: (%d, %d)' % (n_rows, n_cols), linecap)  # Debuggin
        # if pressed_key:
        # header_win.addnstr(' (keypress %s)' % str(pressed_key), linecap)
        header_win.addnstr(2, 0, 'Prefixes:', linecap, curses.A_BOLD)
        header_win.addnstr('  tmp=', linecap, curses.A_BOLD)
        header_win.addnstr(tmp_prefix, linecap)
        header_win.addnstr('  dst=', linecap, curses.A_BOLD)
        header_win.addnstr(dst_prefix, linecap)
        if archiving_configured:
            header_win.addnstr('  archive=', linecap, curses.A_BOLD)
            header_win.addnstr(arch_prefix, linecap)
        header_win.addnstr(' (remote)', linecap)

        # Jobs
        jobs_win.addstr(
            0, 0,
            reporting.status_report(jobs, n_cols, jobs_height, tmp_prefix,
                                    dst_prefix))
        jobs_win.chgat(0, 0, curses.A_REVERSE)

        # Dirs.  Collect reports as strings, then lay out.
        n_tmpdirs = len(dir_cfg['tmp'])
        n_tmpdirs_half = int(n_tmpdirs / 2)
        tmp_report_1 = reporting.tmp_dir_report(jobs, dir_cfg['tmp'],
                                                sched_cfg, n_cols, 0,
                                                n_tmpdirs_half, tmp_prefix)
        tmp_report_2 = reporting.tmp_dir_report(jobs, dir_cfg['tmp'],
                                                sched_cfg, n_cols,
                                                n_tmpdirs_half, n_tmpdirs,
                                                tmp_prefix)

        dst_report = reporting.dst_dir_report(jobs, dir_cfg['dst'], n_cols,
                                              dst_prefix)

        if archiving_configured:
            arch_report = reporting.arch_dir_report(
                archive.get_archdir_freebytes(dir_cfg['archive']), n_cols,
                arch_prefix)
            if not arch_report:
                arch_report = '<no archive dir info>'
        else:
            arch_report = '<archiving not configured>'

        tmp_h = max(len(tmp_report_1.splitlines()),
                    len(tmp_report_2.splitlines()))
        tmp_w = len(
            max(tmp_report_1.splitlines() + tmp_report_2.splitlines(),
                key=len)) + 1
        dst_h = len(dst_report.splitlines())
        dst_w = len(max(dst_report.splitlines(), key=len)) + 1
        arch_h = len(arch_report.splitlines()) + 1
        arch_w = n_cols

        tmpwin_12_gutter = 3
        tmpwin_dstwin_gutter = 6

        maxtd_h = max([tmp_h, dst_h])

        tmpwin_1 = curses.newwin(tmp_h, tmp_w, dirs_pos + int(
            (maxtd_h - tmp_h) / 2), 0)
        tmpwin_1.addstr(tmp_report_1)

        tmpwin_2 = curses.newwin(tmp_h, tmp_w, dirs_pos + int(
            (maxtd_h - tmp_h) / 2), tmp_w + tmpwin_12_gutter)
        tmpwin_2.addstr(tmp_report_2)

        tmpwin_1.chgat(0, 0, curses.A_REVERSE)
        tmpwin_2.chgat(0, 0, curses.A_REVERSE)

        dstwin = curses.newwin(
            dst_h, dst_w, dirs_pos + int((maxtd_h - dst_h) / 2),
            2 * tmp_w + tmpwin_12_gutter + tmpwin_dstwin_gutter)
        dstwin.addstr(dst_report)
        dstwin.chgat(0, 0, curses.A_REVERSE)

        #archwin = curses.newwin(arch_h, arch_w, dirs_pos + maxtd_h, 0)
        #archwin.addstr(0, 0, 'Archive dirs free space', curses.A_REVERSE)
        #archwin.addstr(1, 0, arch_report)

        # Log.  Could use a pad here instead of managing scrolling ourselves, but
        # this seems easier.
        log_win.addnstr(
            0, 0,
            ('Log: %d (<up>/<down>/<end> to scroll)\n' % log.get_cur_pos()),
            linecap, curses.A_REVERSE)
        for i, logline in enumerate(log.cur_slice(logscreen_height - 1)):
            log_win.addnstr(i + 1, 0, logline, linecap)

        stdscr.noutrefresh()
        header_win.noutrefresh()
        jobs_win.noutrefresh()
        tmpwin_1.noutrefresh()
        tmpwin_2.noutrefresh()
        dstwin.noutrefresh()
        #archwin.noutrefresh()
        log_win.noutrefresh()
        curses.doupdate()

        key = stdscr.getch()
        if key == curses.KEY_UP:
            log.shift_slice(-1)
            pressed_key = 'up'
        elif key == curses.KEY_DOWN:
            log.shift_slice(1)
            pressed_key = 'dwn'
        elif key == curses.KEY_END:
            log.shift_slice_to_end()
            pressed_key = 'end'
        elif key == ord('p'):
            plotting_active = not plotting_active
            pressed_key = 'p'
        elif key == ord('a'):
            archiving_active = not archiving_active
            pressed_key = 'a'
        elif key == ord('q'):
            break
        else:
            pressed_key = key
Exemplo n.º 51
0
def main():

    print "Started fiber allocation. Importing from data bank..."
    # load all the inputs
    data_bank = "../../../data/data_bank.h5"
    with archive.archive(data_bank, "r") as ar:
        # numbers coming from param.ini
        fiber_pitch = ar["/Fiber_Allocation/fiber_pitch"]
        fiber_size = ar["/Fiber_Allocation/fiber_size"]
        Ndiameter = ar["/Fiber_Allocation/num_fibers_on_diameter"]
        n_pass_per_tile = ar["/Fiber_Allocation/n_pass_per_tile"]
        patrol_radius = ar["/Fiber_Allocation/patrol_radius"]
        allocation_method = ar["/Fiber_Allocation/allocation_method"]

        # numbers coming from the previous step in the pipeline
        gals_x_in = ar["/gal/ra_true"]
        gals_y_in = ar["/gal/dec_true"]
        gals_id_in = ar["/gal/galaxy_index"]
        number_of_tiles = ar["/tiling/number_of_tiles"]
        tiles_centers_ra = ar["/tiling/tile_centers_ra"]
        tiles_centers_dec = ar["/tiling/tile_centers_dec"]
        galaxy_tile_id_list = ar["/gal/tile_ID"]
        tile_id_list = ar["/tiling/tile_ID"]
        survey_selection_flag = ar["/gal/survey_selection_flag"]

        survey_selection_flag = np.array(survey_selection_flag)
        gals_x_in = gals_x_in[survey_selection_flag]
        gals_y_in = gals_y_in[survey_selection_flag]
        gals_id_in = gals_id_in[survey_selection_flag]
        galaxy_tile_id_list = galaxy_tile_id_list[survey_selection_flag]

    print "...done."

    selected = np.where(galaxy_tile_id_list > 0)
    n_in_tiles = np.size(selected)
    print "There are %d galaxies in tiles" % (n_in_tiles)
    print "Min-Max RA Tiles: %f %f" % (np.amin(tiles_centers_ra), np.amax(tiles_centers_ra))
    print "Min-Max DEC Tiles: %f %f" % (np.amin(tiles_centers_dec), np.amax(tiles_centers_dec))
    print "Min-Max RA Galaxies: %f %f" % (np.amin(gals_x_in), np.amax(gals_x_in))
    print "Min-Max DEC Galaxies: %f %f" % (np.amin(gals_y_in), np.amax(gals_y_in))

    # 	plot_name = "all_galaxies"
    # 	diagplot.plot_positions(gals_x_in[selected], gals_y_in[selected], plot_name)

    # redefine the positions of the galaxies and tile centers to a 'planar' frame
    gals_x_in = gals_x_in * np.cos(gals_y_in * math.pi / 180.0)
    tiles_centers_ra = tiles_centers_ra * np.cos(tiles_centers_dec * math.pi / 180.0)

    # initialize full galaxy structure
    all_gals = AM.MockGalaxyCatalog(x_in=gals_x_in, y_in=gals_y_in, id_in=gals_id_in)

    fraction_allocated = np.empty((0))

    # loop over the tiles
    for ra_center, dec_center, tile_id in zip(tiles_centers_ra, tiles_centers_dec, tile_id_list):
        # create the unperturbed set of fibers for this pass in this tile
        fibers = AM.FiberSet(
            Ndiameter=Ndiameter, fiber_pitch=fiber_pitch, fiber_size=fiber_size, center_x=ra_center, center_y=dec_center
        )

        # set to -1 the fiber_ID of the galaxies that should fall inside a tile
        index_inside = np.where((galaxy_tile_id_list == tile_id) & (all_gals.tile_ID < 0))
        all_gals.tile_ID[index_inside] = -1

        if np.size(index_inside) > 0:
            print "Galaxies to allocate", np.size(index_inside)

            # Finally. Make the allocation!
            AM.make_fiber_allocation(
                fibers,
                all_gals,
                tile_ID=tile_id,
                visit_ID=1,
                rank_criterion=allocation_method,
                patrol_radius=fibers.fiber_pitch,
                exclusion_radius=fibers.fiber_size * 0.5,
            )

            n_alloc = np.where(all_gals.fiber_ID > 0)

            # update the number of allocated galaxies for a control plot
        gal_alloc = np.where(all_gals.fiber_ID > 0)
        n_total = np.size(all_gals.fiber_ID)
        n_alloc = np.size(gal_alloc)
        fraction_allocated = np.append(fraction_allocated, (1.0 * n_alloc / (1.0 * n_total)))

        # final stats
    gal_alloc = np.where(all_gals.fiber_ID > 0)
    n_total = np.size(all_gals.fiber_ID)
    n_alloc = np.size(gal_alloc)

    print "Allocation efficiency", (1.0 * n_alloc) / (1.0 * n_total)
    print "Initial number of galaxies (after target selection)", n_total
    print "Initial number of galaxies (in tiles)", n_in_tiles
    print "Galaxies that were allocated:", n_alloc

    inner_fiber_selection_flag = np.array((all_gals.fiber_ID > 0))
    print "survey selection:", survey_selection_flag.shape, np.sum(survey_selection_flag)
    print "inner fiber selection:", inner_fiber_selection_flag.shape, np.sum(inner_fiber_selection_flag)

    fiber_selection_flag = survey_selection_flag
    fiber_selection_flag[survey_selection_flag] = inner_fiber_selection_flag
    print "fiber selection:", fiber_selection_flag.shape, np.sum(fiber_selection_flag)

    # Write result to data bank
    with archive.archive(data_bank, "a") as ar:
        ar["/gal/tile_id"] = all_gals.tile_ID
        ar["/gal/visit_id"] = all_gals.visit_ID
        ar["/gal/fiber_id"] = all_gals.fiber_ID
        ar["/gal/fiber_selection_flag"] = fiber_selection_flag

        # =====================================================
        # Diagnostics
        # =====================================================
    print "... diagnostic plots"

    # fiberid distribution
    try:
        check_fiber_sel = numpy.where(fiber_selection_flag == True)[0]
        fiber_id_sub = all_gals.fiber_ID[check_fiber_sel]
        width, center, bin_edges, hist = Utilities.make_histogram(fiber_id_sub, int(float(Ngal) / 10.0))
        Plotters.plot_fa_hist_fiberid(
            center, width, hist, fig_number=0, plot_number=0, title="n(fiberid)", base_directory=".", show_plot=True
        )
    except (RuntimeError, TypeError, NameError):
        print "bad plot"
        pass

        # galaxy positions for those that are allocated (ra/dec position plot)
    try:
        Plotters.plot_fa_gal_position(
            all_gals.x[gal_alloc],
            all_gals.y[gal_alloc],
            fig_number=1,
            plot_number=0,
            title="galaxy position",
            base_directory=".",
            show_plot=True,
        )
    except (RuntimeError, TypeError, NameError):
        print "bad plot"
        pass

        # fraction allocated vs. tile number
    try:
        tile_list = np.arange(np.size(fraction_allocated))
        Plotters.plot_fa_completeness(
            tile_list,
            fraction_allocated,
            fig_number=2,
            plot_number=0,
            title="fiber completeness",
            base_directory=".",
            show_plot=False,
        )
    except (RuntimeError, TypeError, NameError):
        print "bad plot"
        pass