def __init__(self, total, msg, color='default', file=None): if total is None: super(_MultiProgressBarOrSpinner, self).__init__( total, msg, color=color, file=file) else: self._is_spinner = False color_print(msg, color, file=file) self._obj = _MultiProgressBar(total, file=file)
def test_color_print3(): # Test that this thinks the FakeTTY is a tty and applies colors. stream = FakeTTY() console.color_print("foo", "green", file=stream) assert stream.getvalue() == '\x1b[0;32mfoo\x1b[0m\n' stream = FakeTTY() console.color_print("foo", "green", "bar", "red", "baz", file=stream) assert stream.getvalue() == '\x1b[0;32mfoo\x1b[0m\x1b[0;31mbar\x1b[0mbaz\n'
def test_color_print2(): # Test that this automatically detects that io.StringIO is # not a tty stream = io.StringIO() console.color_print("foo", "green", file=stream) assert stream.getvalue() == 'foo\n' stream = io.StringIO() console.color_print("foo", "green", "bar", "red", "baz", file=stream) assert stream.getvalue() == 'foobarbaz\n'
def __init__(self, total, msg, color='default', file=None): if total is None: super(_MultiProgressBarOrSpinner, self).__init__(total, msg, color=color, file=file) else: self._is_spinner = False color_print(msg, color, file=file) self._obj = _MultiProgressBar(total, file=file)
def list_remote(self, prefix=None): if not prefix: prefix = self.prefix blobs = self.storage.list_blobs(prefix=prefix) for blob in blobs: self.logger.debug(blob) console.color_print(blob.name) return blobs
def print_params(parnames, params): color_print("\nAverage parameters", "yellow") color_print("------------------", "yellow") color_print("Weighted by number of repetitions in the stack") average_params = np.median(params, axis=0) for name, par in zip(parnames, average_params): color_print(f"{name}: {par:.04f}", "white") return average_params
def _update_console(self, value=None): """ Update the progress bar to the given value (out of the total given to the constructor). """ if self._total == 0: frac = 1.0 else: frac = float(value) / float(self._total) file = self._file write = file.write if frac > 1: bar_fill = int(self._bar_length) else: bar_fill = int(float(self._bar_length) * frac) write('\r|') color_print('=' * bar_fill, 'blue', file=file, end='') if bar_fill < self._bar_length: color_print('>', 'green', file=file, end='') write('-' * (self._bar_length - bar_fill - 1)) write('|') if value >= self._total: t = time.time() - self._start_time prefix = ' ' elif value <= 0: t = None prefix = '' else: t = ((time.time() - self._start_time) * (1.0 - frac)) / frac prefix = ' ETA ' write(' {0:>4s}/{1:>4s}'.format( human_file_size(value), self._human_total)) write(' ({0:>6s}%)'.format('{0:.2f}'.format(frac * 100.0))) write(prefix) if t is not None: write(human_time(t)) self._file.flush()
def _update_console(self, value=None): """ Update the progress bar to the given value (out of the total given to the constructor). """ if self._total == 0: frac = 1.0 else: frac = float(value) / float(self._total) file = self._file write = file.write if frac > 1: bar_fill = int(self._bar_length) else: bar_fill = int(float(self._bar_length) * frac) write('\r|') color_print('=' * bar_fill, 'blue', file=file, end='') if bar_fill < self._bar_length: color_print('>', 'green', file=file, end='') write('-' * (self._bar_length - bar_fill - 1)) write('|') if value >= self._total: t = time.time() - self._start_time prefix = ' ' elif value <= 0: t = None prefix = '' else: t = ((time.time() - self._start_time) * (1.0 - frac)) / frac prefix = ' ETA ' write(' {0:>4s}/{1:>4s}'.format(human_file_size(value), self._human_total)) write(' ({0:>6s}%)'.format('{0:.2f}'.format(frac * 100.0))) write(prefix) if t is not None: write(human_time(t)) self._file.flush()
def print_stats(cr, cr_err, snr, snr_err, texp, flux, flux_err, ebands=["6", "7", "8"]): color_print("\nStatistics", "yellow") color_print("----------", "yellow") for i, eband in enumerate(ebands): idx_max = np.argmax(cr[:, i]) cr_peak = cr[idx_max, i] cr_peak_mad = cr_err[idx_max, i] texp_peak = texp[idx_max, i] idx_max = np.argmax(snr[:, i]) snr_peak = snr[idx_max, i] snr_peak_mad = snr_err[idx_max, i] color_print(f"Energy band {eband}:", "white") print( f"Median net CR at peak: {cr_peak:.01e} ± {cr_peak_mad:.01e} counts/s" ) print(f"Median exposure time at peak: {texp_peak:.01e} s") if flux is not None: f, ferr = flux[i], flux_err[i] print(f"Median flux: {f:.01e} ± {ferr:.01e} erg/s/cm-2") print(f"Median SNR at peak: {snr_peak:.01f} ± {snr_peak_mad:.01f}\n")
def _save_coverage(cov, result, rootdir, testing_path): """ This method is called after the tests have been run in coverage mode to cleanup and then save the coverage data and report. """ from astropy.utils.console import color_print if result != 0: return # The coverage report includes the full path to the temporary # directory, so we replace all the paths with the true source # path. Note that this will not work properly for packages that still # rely on 2to3. try: # Coverage 4.0: _harvest_data has been renamed to get_data, the # lines dict is private cov.get_data() except AttributeError: # Coverage < 4.0 cov._harvest_data() lines = cov.data.lines else: lines = cov.data._lines for key in list(lines.keys()): new_path = os.path.relpath( os.path.realpath(key), os.path.realpath(testing_path)) new_path = os.path.abspath( os.path.join(rootdir, new_path)) lines[new_path] = lines.pop(key) color_print('Saving coverage data in .coverage...', 'green') cov.save() color_print('Saving HTML coverage report in htmlcov...', 'green') cov.html_report(directory=os.path.join(rootdir, 'htmlcov'))
def do_status(self, *arg): """ Get the entire system status and print it pretty like! """ if self._keep_looping: console.color_print("{:>12s}: ".format('Loop Timer'), "default", "active", "lightgreen") else: console.color_print("{:>12s}: ".format('Loop Timer'), "default", "inactive", "yellow") for sensor_name in ['control_board', 'camera_board', 'weather']: if sensor_name in self.active_sensors: console.color_print("{:>12s}: ".format(sensor_name.title()), "default", "active", "lightgreen") else: console.color_print("{:>12s}: ".format(sensor_name.title()), "default", "inactive", "yellow")
def main(unit_id=None, upload=True, bucket='unit_sensors', **kwargs): assert unit_id is not None, warnings.warn("Must supply PANOPTES unit id, e.g. PAN001") console.color_print('Connecting to mongo') db = PanMongo() console.color_print('Exporting data') archived_files = db.export(**kwargs) if upload: storage = PanStorage(unit_id=unit_id, bucket=bucket) console.color_print("Uploading files:") for f in archived_files: r_fn = storage.upload(f) console.color_print("\t{:40s}".format(f), 'green', "\t->\t", 'red', r_fn, 'blue')
def emit(self, record): ''' The formatter for stderr ''' # Import utils.console only if necessary and at the latest because # the import takes a significant time [#4649] from astropy.utils.console import color_print if record.levelno <= logging.INFO: stream = sys.stdout else: stream = sys.stderr if record.levelno < logging.INFO: color_print(record.levelname, 'magenta', end='', file=stream) elif record.levelno < logging.WARN: color_print(record.levelname, 'green', end='', file=stream) elif record.levelno < logging.ERROR: color_print(record.levelname, 'brown', end='', file=stream) else: color_print(record.levelname, 'red', end='', file=stream) record.message = "{0}".format(record.msg) print(": " + record.message, file=stream)
sys.exit(1) path = sys.argv[1] lista = sys.argv[2] nprocs = get_XYtoRADEC() if path[-1]!='/': path = path + '/' #Transforma de XY a RADEC def XYtoRADEC(ep): ffn,cfn = ep id,x,y,mag,err = np.loadtxt(path+cfn,usecols=range(5),skiprows=3,unpack=True) id = id.astype(int) hdr = pf.open(path+ffn)[0].header w = wcs.WCS(hdr) ra,dec = np.transpose(w.wcs_pix2world(np.transpose([x,y]),1)) head = 'ID RA DEC X Y MAG ERR' fmt = '%d %.7f %.7f %.3f %.3f %.3f %.3f' np.savetxt(ffn.replace('fits','dat'),np.transpose([id,ra,dec,x,y,mag,err]),header=head,fmt=fmt) catalog = np.genfromtxt(lista, unpack=True, dtype='string') fits = np.array([f.split('.')[0] + '.fits' for f in catalog]) color_print('-Obteniendo RADEC...','cyan') barra(XYtoRADEC, np.transpose([fits, catalog]), 1)
) parser.add_argument('--onlyjpg', action='store_true') args = parser.parse_args() rank = MPI.COMM_WORLD.rank size = MPI.COMM_WORLD.size catalog = pd.read_csv(args.Targets, names=['ID', 'ra', 'dec', 'Tmag'], skiprows=1) tics = np.array(catalog['ID']) ra = np.array(catalog['ra']) dec = np.array(catalog['dec']) color_print('Trying %d targets for Sector %d' % (len(tics), args.Sector), 'lightcyan') def FFICut(ffis, x, y, size): ncads = len(ffis['FFIs']) x = int(x) y = int(y) xshape = ffis['FFIs'].shape[1] yshape = ffis['FFIs'].shape[2] x1 = np.max([0, x - size // 2]) x2 = np.min([xshape, x + size // 2 + 1]) y1 = np.max([0, y - size // 2]) y2 = np.min([yshape, y + size // 2 + 1])
def call_vo_service(service_type, catalog_db=None, pedantic=None, verbose=True, cache=True, kwargs={}): """ Makes a generic VO service call. Parameters ---------- service_type : str Name of the type of service, e.g., 'conesearch_good'. Used in error messages and to select a catalog database if ``catalog_db`` is not provided. catalog_db May be one of the following, in order from easiest to use to most control: - `None`: A database of ``service_type`` catalogs is downloaded from ``astroquery.vo_conesearch.conf.vos_baseurl``. The first catalog in the database to successfully return a result is used. - *catalog name*: A name in the database of ``service_type`` catalogs at ``astroquery.vo_conesearch.conf.vos_baseurl`` is used. For a list of acceptable names, use :func:`list_catalogs`. - *url*: The prefix of a URL to a IVOA Service for ``service_type``. Must end in either '?' or '&'. - :class:`VOSCatalog` object: A specific catalog manually downloaded and selected from the database (see :ref:`vo-sec-client-vos`). - Any of the above 3 options combined in a list, in which case they are tried in order. pedantic : bool or `None` When `True`, raise an error when the file violates the spec, otherwise issue a warning. Warnings may be controlled using :py:mod:`warnings` module. When not provided, uses the configuration setting ``astroquery.vo_conesearch.conf.pedantic``, which defaults to `False`. verbose : bool Verbose output. cache : bool Use caching for VO Service database. Access to actual VO websites referenced by the database still needs internet connection. kwargs : dictionary Keyword arguments to pass to the catalog service. No checking is done that the arguments are accepted by the service, etc. Returns ------- obj : `astropy.io.votable.tree.Table` First table from first successful VO service request. Raises ------ VOSError If VO service request fails. """ n_timed_out = 0 catalogs = _get_catalogs(service_type, catalog_db, cache=cache, verbose=verbose) if pedantic is None: # pragma: no cover pedantic = conf.pedantic for name, catalog in catalogs: if isinstance(catalog, six.string_types): if catalog.startswith('http'): url = catalog else: remote_db = get_remote_catalog_db(service_type, cache=cache, verbose=verbose) catalog = remote_db.get_catalog(catalog) url = catalog['url'] else: url = catalog['url'] if verbose: # pragma: no cover color_print('Trying {0}'.format(url), 'green') try: return _vo_service_request(url, pedantic, kwargs, cache=cache, verbose=verbose) except Exception as e: vo_warn(W25, (url, str(e))) if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout): n_timed_out += 1 err_msg = 'None of the available catalogs returned valid results.' if n_timed_out > 0: err_msg += ' ({0} URL(s) timed out.)'.format(n_timed_out) raise VOSError(err_msg)
def horoscope(birthday, corrected=True, chinese=False): """ Enter your birthday as an `astropy.time.Time` object and receive a mystical horoscope about things to come. Parameter --------- birthday : `astropy.time.Time` or str Your birthday as a `datetime.datetime` or `astropy.time.Time` object or "YYYY-MM-DD"string. corrected : bool Whether to account for the precession of the Earth instead of using the ancient Greek dates for the signs. After all, you do want your *real* horoscope, not a cheap inaccurate approximation, right? chinese : bool Chinese annual zodiac wisdom instead of Western one. Returns ------- Infinite wisdom, condensed into astrologically precise prose. Notes ----- This function was implemented on April 1. Take note of that date. """ from bs4 import BeautifulSoup today = datetime.now() err_msg = "Invalid response from celestial gods (failed to load horoscope)." headers = {'User-Agent': 'foo/bar'} special_words = { '([sS]tar[s^ ]*)': 'yellow', '([yY]ou[^ ]*)': 'magenta', '([pP]lay[^ ]*)': 'blue', '([hH]eart)': 'red', '([fF]ate)': 'lightgreen', } if isinstance(birthday, str): birthday = datetime.strptime(birthday, '%Y-%m-%d') if chinese: # TODO: Make this more accurate by using the actual date, not just year # Might need third-party tool like https://pypi.python.org/pypi/lunardate zodiac_sign = _get_zodiac(birthday.year) url = ('https://www.horoscope.com/us/horoscopes/yearly/' '{}-chinese-horoscope-{}.aspx'.format(today.year, zodiac_sign)) summ_title_sfx = f'in {today.year}' try: res = Request(url, headers=headers) with urlopen(res) as f: try: doc = BeautifulSoup(f, 'html.parser') # TODO: Also include Love, Family & Friends, Work, Money, More? item = doc.find(id='overview') desc = item.getText() except Exception: raise CelestialError(err_msg) except Exception: raise CelestialError(err_msg) else: birthday = atime.Time(birthday) if corrected: with warnings.catch_warnings(): warnings.simplefilter('ignore') # Ignore ErfaWarning zodiac_sign = get_sun(birthday).get_constellation().lower() zodiac_sign = _CONST_TO_SIGNS.get(zodiac_sign, zodiac_sign) if zodiac_sign not in _VALID_SIGNS: raise HumanError( 'On your birthday the sun was in {}, which is not ' 'a sign of the zodiac. You must not exist. Or ' 'maybe you can settle for ' 'corrected=False.'.format(zodiac_sign.title())) else: zodiac_sign = get_sign(birthday.to_datetime()) url = f"http://www.astrology.com/us/horoscope/daily-overview.aspx?sign={zodiac_sign}" summ_title_sfx = 'on {}'.format(today.strftime("%Y-%m-%d")) res = Request(url, headers=headers) with urlopen(res) as f: try: doc = BeautifulSoup(f, 'html.parser') item = doc.find('span', {'class': 'date'}) desc = item.parent.getText() except Exception: raise CelestialError(err_msg) print("*" * 79) color_print( "Horoscope for {} {}:".format(zodiac_sign.capitalize(), summ_title_sfx), 'green') print("*" * 79) for block in textwrap.wrap(desc, 79): split_block = block.split() for i, word in enumerate(split_block): for re_word in special_words.keys(): match = re.search(re_word, word) if match is None: continue split_block[i] = _color_text(match.groups()[0], special_words[re_word]) print(" ".join(split_block))
def test_color_print_unicode(): console.color_print("überbær", "red")
def make_validation_report( urls=None, destdir='astropy.io.votable.validator.results', multiprocess=True, stilts=None): """ Validates a large collection of web-accessible VOTable files. Generates a report as a directory tree of HTML files. Parameters ---------- urls : list of strings, optional If provided, is a list of HTTP urls to download VOTable files from. If not provided, a built-in set of ~22,000 urls compiled by HEASARC will be used. destdir : path, optional The directory to write the report to. By default, this is a directory called ``'results'`` in the current directory. If the directory does not exist, it will be created. multiprocess : bool, optional If `True` (default), perform validations in parallel using all of the cores on this machine. stilts : path, optional To perform validation with ``votlint`` from the the Java-based `STILTS <http://www.star.bris.ac.uk/~mbt/stilts/>`_ VOTable parser, in addition to `astropy.io.votable`, set this to the path of the ``'stilts.jar'`` file. ``java`` on the system shell path will be used to run it. Notes ----- Downloads of each given URL will be performed only once and cached locally in *destdir*. To refresh the cache, remove *destdir* first. """ from astropy.utils.console import (color_print, ProgressBar, Spinner) if stilts is not None: if not os.path.exists(stilts): raise ValueError( '{0} does not exist.'.format(stilts)) destdir = os.path.abspath(destdir) if urls is None: with Spinner('Loading URLs', 'green') as s: urls = get_urls(destdir, s) else: color_print('Marking URLs', 'green') for url in ProgressBar.iterate(urls): with result.Result(url, root=destdir) as r: r['expected'] = type args = [(url, destdir) for url in urls] color_print('Downloading VO files', 'green') ProgressBar.map( download, args, multiprocess=multiprocess) color_print('Validating VO files', 'green') ProgressBar.map( validate_vo, args, multiprocess=multiprocess) if stilts is not None: color_print('Validating with votlint', 'green') votlint_args = [(stilts, x, destdir) for x in urls] ProgressBar.map( votlint_validate, votlint_args, multiprocess=multiprocess) color_print('Generating HTML files', 'green') ProgressBar.map( write_html_result, args, multiprocess=multiprocess) with Spinner('Grouping results', 'green') as s: subsets = result.get_result_subsets(urls, destdir, s) color_print('Generating index', 'green') html.write_index(subsets, urls, destdir) color_print('Generating subindices', 'green') subindex_args = [(subset, destdir, len(urls)) for subset in subsets] ProgressBar.map( write_subindex, subindex_args, multiprocess=multiprocess)
import astropy.units as u import matplotlib.pyplot as plt from ..custom import * from .test_cbuiltin import PotentialTestBase top_path = "plots/" plot_path = os.path.join(top_path, "tests/potential") if not os.path.exists(plot_path): os.makedirs(plot_path) from astropy.utils.console import color_print print() color_print( "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "yellow") color_print("To view plots:", "green") print(" open {}".format(plot_path)) color_print( "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "yellow") class TestPW14Potential(PotentialTestBase): units = (u.kpc, u.M_sun, u.Myr, u.radian) def setup(self): print("\n\n") print("=" * 50) print(self.__class__.__name__)
def main(): """Command-line interface for starlist parsing and verification.""" import argparse import sys from astropy.utils.console import color_print parser = argparse.ArgumentParser(description="A Keck starlist parsing and verification tool", epilog="Parsing will be done in the 'lenient mode', with problems emitted to stderr. A correctly formatted starlist for each line, when available, will be printed to stdout, so that output can be piped into a clean starlist file.") parser.add_argument("starlist", metavar="starlist.txt", help="starlist filename", type=argparse.FileType('r'), default='starlist.txt') parser.add_argument("-o", dest='output', help="output filename", type=argparse.FileType("w"), default="-") parser.add_argument("--no-messages", dest='messages', action='store_false', help="suppress lint messages") opt = parser.parse_args() n_messages = 0 all_messages = [] for n, line in enumerate(opt.starlist): if line.startswith("#") or len(line.strip()) == 0: opt.output.write(line) opt.output.flush() else: identifier = "{!r} line {:d}".format(opt.starlist.name, n+1) try: messages = verify_starlist_line(line, identifier=identifier) except ValueError as e: messages = ["[ERROR] {1} [{0}]".format(identifier, e)] if len(messages): all_messages.append((n, line, messages)) n_messages += len(messages) try: formatted_line = format_starlist_line(*parse_starlist_line(line)) + "\n" except ValueError: formatted_line = line opt.output.write("# WARNING {0:s} couldn't parse next line.\n".format(os.path.basename(sys.argv[0]))) opt.output.write(formatted_line) opt.output.flush() if opt.messages: color_print("Starlist Lint {0:s}".format(__version__), 'green', file=sys.stderr, end="") sys.stderr.write(" for '{1:s}'\n".format(__version__, opt.starlist.name)) sys.stderr.flush() if not len(all_messages): color_print("No problems found.", 'green', file=sys.stderr) else: color_print("{0:d} problems found.".format(len(all_messages)), 'yellow', file=sys.stderr) for n, line, messages in all_messages: color_print("[line {0:d}] ".format(n), 'cyan', file=sys.stderr, end="") color_print("=>", 'blue', file=sys.stderr, end="") sys.stderr.write(" '{}'\n".format(line.strip("\n"))) for message in messages: if message.startswith("[ERROR]"): color_print(message[:len("[ERROR]")], 'red', file=sys.stderr, end="") sys.stderr.write(message[len("[ERROR]"):]) sys.stderr.write("\n") elif message.startswith("[WARNING]"): color_print(message[:len("[WARNING]")], 'yellow', file=sys.stderr, end="") sys.stderr.write(message[len("[WARNING]"):]) sys.stderr.write("\n") else: sys.stderr.write(message) sys.stderr.write("\n") sys.stderr.flush() return n_messages
def test_color_print_invalid_color(): console.color_print("foo", "unknown")
return a + b * x + c * y def quad(coords, a, b, c, d, e, f): x, y = coords return a + b * x + c * y + d * np.square(x) + e * np.multiply( x, y) + f * np.square(y) #PIPELINE folder = sys.argv[1] makedir(match_folder) makedir(match_master) makedir(pm_folder) color_print('Leyendo informacion de epocas', 'cyan') se, el, yr = np.genfromtxt(folder + 'zinfo_img', unpack=True, usecols=(4, 5, 6)) name = np.genfromtxt(folder + 'zinfo_img', unpack=True, usecols=(0, ), dtype='string') k_mask = np.array(['k' in f for f in name]) sek, elk, yrk = np.transpose([se, el, yr])[k_mask].T yrk = (yrk - yrk[0]) / 365.242199 color_print('Recopilando archivos de epocas...', 'cyan') epochs = glob.glob('./%s/*.*' % match_folder) color_print('Realizando match de la MF con las epocas', 'cyan')
def linearPM(dates,PM,zero_point): return dates*PM + zero_point #Carpetas results = folder.replace('/data','').split('_')[0] + '/' sm_path = 'stars_motion/' match_path = 'matched_epochs/' makedir(results+sm_path) ## ## PIPELINE ## #Obtiene las epocas color_print('Buscando archivos de epocas...','cyan') k_catalog = np.sort([f for f in os.listdir(folder) if 'k' in f and f.endswith('.dao')]) k_fits = np.sort([f for f in os.listdir(folder) if 'k' in f and f.endswith('.fits')]) #Obtiene los anios correspondientes color_print('Leyendo BJD de cada epoca...','cyan') dates = np.zeros(k_fits.size) with ProgressBar(k_fits.size) as bar: for i in range(k_fits.size): hdu = fits.open(folder+k_fits[i]) date = hdu[0].header['MJD-OBS'] dates[i] += date bar.update() dates = (dates-dates[0])/365.25
def validate(source, output=None, xmllint=False, filename=None): """ Prints a validation report for the given file. Parameters ---------- source : str or readable file-like object Path to a VOTABLE_ xml file or pathlib.path object having Path to a VOTABLE_ xml file. output : writable file-like object, optional Where to output the report. Defaults to ``sys.stdout``. If `None`, the output will be returned as a string. xmllint : bool, optional When `True`, also send the file to ``xmllint`` for schema and DTD validation. Requires that ``xmllint`` is installed. The default is `False`. ``source`` must be a file on the local filesystem in order for ``xmllint`` to work. filename : str, optional A filename to use in the error messages. If not provided, one will be automatically determined from ``source``. Returns ------- is_valid : bool or str Returns `True` if no warnings were found. If ``output`` is `None`, the return value will be a string. """ from astropy.utils.console import print_code_line, color_print if output is None: output = sys.stdout return_as_str = False if output is None: output = io.StringIO() lines = [] votable = None reset_vo_warnings() with data.get_readable_fileobj(source, encoding='binary') as fd: content = fd.read() content_buffer = io.BytesIO(content) content_buffer.seek(0) if filename is None: if isinstance(source, str): filename = source elif hasattr(source, 'name'): filename = source.name elif hasattr(source, 'url'): filename = source.url else: filename = "<unknown>" with warnings.catch_warnings(record=True) as warning_lines: warnings.resetwarnings() warnings.simplefilter("always", exceptions.VOWarning, append=True) try: votable = parse(content_buffer, verify='warn', filename=filename) except ValueError as e: lines.append(str(e)) lines = [ str(x.message) for x in warning_lines if issubclass(x.category, exceptions.VOWarning) ] + lines content_buffer.seek(0) output.write("Validation report for {0}\n\n".format(filename)) if len(lines): xml_lines = iterparser.xml_readlines(content_buffer) for warning in lines: w = exceptions.parse_vowarning(warning) if not w['is_something']: output.write(w['message']) output.write('\n\n') else: line = xml_lines[w['nline'] - 1] warning = w['warning'] if w['is_warning']: color = 'yellow' else: color = 'red' color_print('{0:d}: '.format(w['nline']), '', warning or 'EXC', color, ': ', '', textwrap.fill(w['message'], initial_indent=' ', subsequent_indent=' ').lstrip(), file=output) print_code_line(line, w['nchar'], file=output) output.write('\n') else: output.write('astropy.io.votable found no violations.\n\n') success = 0 if xmllint and os.path.exists(filename): from . import xmlutil if votable is None: version = "1.1" else: version = votable.version success, stdout, stderr = xmlutil.validate_schema(filename, version) if success != 0: output.write('xmllint schema violations:\n\n') output.write(stderr.decode('utf-8')) else: output.write('xmllint passed\n') if return_as_str: return output.getvalue() return len(lines) == 0 and success == 0
head = 'ID RA DEC X Y MAG' fmt = '%d %f %f %.3f %.3f %.3f' of = ep.split('/')[-1].replace('.dao','.dat') np.savetxt('./%s/%s' % (radec_folder,of),np.transpose([rid,ra,dec,rx,ry,rmag]),header=head,fmt=fmt) #PIPELINE data_folder = sys.argv[1] if not data_folder.endswith('/'): data_folder += '/' makedir(radec_folder) makedir(match_folder) #Busca archivos .dao en la carpeta color_print('Buscando archivos .dao...','cyan') k_files = np.sort(glob.glob('%s*k*.dao' % data_folder)) j_files = np.sort(glob.glob('%s*j*.dao' % data_folder)) print '\tEncontradas %d epocas en Ks' % len(k_files) print '\tEncontradas %d epocas en J' % len(j_files) #Carga la info de seeing, anios, etc color_print('Obteniendo informacion de seeing y BJD...','cyan') eps = np.genfromtxt(data_folder+'zinfo_img',unpack=True,usecols=(0,),dtype='string') se,el,yr = np.genfromtxt(data_folder+'zinfo_img',unpack=True,usecols=(4,5,6),dtype=np.float64) info_k = np.char.find(eps,'k') > -1 info_j = np.char.find(eps,'j') > -1 keps = eps[info_k] jeps = eps[info_j]
def out(msg): if args.verbose: console.color_print(msg)
def print_info(msg): console.color_print(msg, 'lightgreen')
def validate(source, output=None, xmllint=False, filename=None): """ Prints a validation report for the given file. Parameters ---------- source : str or readable file-like object Path to a VOTABLE_ xml file or pathlib.path object having Path to a VOTABLE_ xml file. output : writable file-like object, optional Where to output the report. Defaults to ``sys.stdout``. If `None`, the output will be returned as a string. xmllint : bool, optional When `True`, also send the file to ``xmllint`` for schema and DTD validation. Requires that ``xmllint`` is installed. The default is `False`. ``source`` must be a file on the local filesystem in order for ``xmllint`` to work. filename : str, optional A filename to use in the error messages. If not provided, one will be automatically determined from ``source``. Returns ------- is_valid : bool or str Returns `True` if no warnings were found. If ``output`` is `None`, the return value will be a string. """ from astropy.utils.console import print_code_line, color_print if output is None: output = sys.stdout return_as_str = False if output is None: output = io.StringIO() lines = [] votable = None reset_vo_warnings() with data.get_readable_fileobj(source, encoding='binary') as fd: content = fd.read() content_buffer = io.BytesIO(content) content_buffer.seek(0) if filename is None: if isinstance(source, str): filename = source elif hasattr(source, 'name'): filename = source.name elif hasattr(source, 'url'): filename = source.url else: filename = "<unknown>" with warnings.catch_warnings(record=True) as warning_lines: warnings.resetwarnings() warnings.simplefilter("always", exceptions.VOWarning, append=True) try: votable = parse(content_buffer, pedantic=False, filename=filename) except ValueError as e: lines.append(str(e)) lines = [str(x.message) for x in warning_lines if issubclass(x.category, exceptions.VOWarning)] + lines content_buffer.seek(0) output.write("Validation report for {0}\n\n".format(filename)) if len(lines): xml_lines = iterparser.xml_readlines(content_buffer) for warning in lines: w = exceptions.parse_vowarning(warning) if not w['is_something']: output.write(w['message']) output.write('\n\n') else: line = xml_lines[w['nline'] - 1] warning = w['warning'] if w['is_warning']: color = 'yellow' else: color = 'red' color_print( '{0:d}: '.format(w['nline']), '', warning or 'EXC', color, ': ', '', textwrap.fill( w['message'], initial_indent=' ', subsequent_indent=' ').lstrip(), file=output) print_code_line(line, w['nchar'], file=output) output.write('\n') else: output.write('astropy.io.votable found no violations.\n\n') success = 0 if xmllint and os.path.exists(filename): from . import xmlutil if votable is None: version = "1.1" else: version = votable.version success, stdout, stderr = xmlutil.validate_schema( filename, version) if success != 0: output.write( 'xmllint schema violations:\n\n') output.write(stderr.decode('utf-8')) else: output.write('xmllint passed\n') if return_as_str: return output.getvalue() return len(lines) == 0 and success == 0
def print_warning(msg): console.color_print(msg, 'yellow')
def print_error(msg): console.color_print(msg, 'red')
def make_validation_report(urls=None, destdir='astropy.io.votable.validator.results', multiprocess=True, stilts=None): """ Validates a large collection of web-accessible VOTable files. Generates a report as a directory tree of HTML files. Parameters ---------- urls : list of strings, optional If provided, is a list of HTTP urls to download VOTable files from. If not provided, a built-in set of ~22,000 urls compiled by HEASARC will be used. destdir : path, optional The directory to write the report to. By default, this is a directory called ``'results'`` in the current directory. If the directory does not exist, it will be created. multiprocess : bool, optional If `True` (default), perform validations in parallel using all of the cores on this machine. stilts : path, optional To perform validation with ``votlint`` from the the Java-based `STILTS <http://www.star.bris.ac.uk/~mbt/stilts/>`_ VOTable parser, in addition to `astropy.io.votable`, set this to the path of the ``'stilts.jar'`` file. ``java`` on the system shell path will be used to run it. Notes ----- Downloads of each given URL will be performed only once and cached locally in *destdir*. To refresh the cache, remove *destdir* first. """ from astropy.utils.console import (color_print, ProgressBar, Spinner) if stilts is not None: if not os.path.exists(stilts): raise ValueError('{0} does not exist.'.format(stilts)) destdir = os.path.abspath(destdir) if urls is None: with Spinner('Loading URLs', 'green') as s: urls = get_urls(destdir, s) else: color_print('Marking URLs', 'green') for url in ProgressBar.iterate(urls): with result.Result(url, root=destdir) as r: r['expected'] = type args = [(url, destdir) for url in urls] color_print('Downloading VO files', 'green') ProgressBar.map(download, args, multiprocess=multiprocess) color_print('Validating VO files', 'green') ProgressBar.map(validate_vo, args, multiprocess=multiprocess) if stilts is not None: color_print('Validating with votlint', 'green') votlint_args = [(stilts, x, destdir) for x in urls] ProgressBar.map(votlint_validate, votlint_args, multiprocess=multiprocess) color_print('Generating HTML files', 'green') ProgressBar.map(write_html_result, args, multiprocess=multiprocess) with Spinner('Grouping results', 'green') as s: subsets = result.get_result_subsets(urls, destdir, s) color_print('Generating index', 'green') html.write_index(subsets, urls, destdir) color_print('Generating subindices', 'green') subindex_args = [(subset, destdir, len(urls)) for subset in subsets] ProgressBar.map(write_subindex, subindex_args, multiprocess=multiprocess)
def export(self, yesterday=True, start_date=None, end_date=None, database=None, collections=list(), **kwargs): if yesterday: start_dt = (current_time() - 1. * u.day).datetime start = datetime(start_dt.year, start_dt.month, start_dt.day, 0, 0, 0, 0) end = datetime(start_dt.year, start_dt.month, start_dt.day, 23, 59, 59, 0) else: assert start_date, warnings.warn( "start-date required if not using yesterday") y, m, d = [int(x) for x in start_date.split('-')] start_dt = date(y, m, d) if end_date is None: end_dt = start_dt else: y, m, d = [int(x) for x in end_date.split('-')] end_dt = date(y, m, d) start = datetime.fromordinal(start_dt.toordinal()) end = datetime(end_dt.year, end_dt.month, end_dt.day, 23, 59, 59, 0) if 'all' in collections: collections = self.collections date_str = start.strftime('%Y-%m-%d') end_str = end.strftime('%Y-%m-%d') if end_str != date_str: date_str = '{}_to_{}'.format(date_str, end_str) out_files = list() console.color_print("Exporting collections: ", 'default', "\t{}".format(date_str.replace('_', ' ')), 'yellow') for collection in collections: if collection not in self.collections: next console.color_print("\t{}".format(collection)) out_file = '{}{}_{}.json'.format(self._backup_dir, date_str.replace('-', ''), collection) col = getattr(self, collection) entries = [ x for x in col.find({ 'date': { '$gt': start, '$lt': end } }).sort([('date', pymongo.ASCENDING)]) ] if len(entries): console.color_print( "\t\t{} records exported".format(len(entries)), 'yellow') content = json.dumps(entries, default=json_util.default) write_type = 'w' # Assume compression but allow for not if kwargs.get('compress', True): console.color_print("\t\tCompressing...", 'lightblue') content = gzip.compress(bytes(content, 'utf8')) out_file = out_file + '.gz' write_type = 'wb' with open(out_file, write_type) as f: console.color_print("\t\tWriting file: ", 'lightblue', out_file, 'yellow') f.write(content) out_files.append(out_file) return out_files
def linear(coords,a,b,c): x,y = coords return a + b*x + c*y def quad(coords,a,b,c,d,e,f): x,y = coords return a + b*x + c*y + d*np.square(x) + e*np.multiply(x,y) + f*np.square(y) #PIPELINE folder = sys.argv[1] makedir(match_folder) makedir(match_master) makedir(pm_folder) color_print('Leyendo informacion de epocas','cyan') se,el,yr = np.genfromtxt(folder+'zinfo_img',unpack=True,usecols=(4,5,6)) name = np.genfromtxt(folder+'zinfo_img',unpack=True,usecols=(0,),dtype='string') k_mask = np.array(['k' in f for f in name]) sek,elk,yrk = np.transpose([se,el,yr])[k_mask].T yrk = (yrk-yrk[0])/365.242199 color_print('Recopilando archivos de epocas...','cyan') epochs = glob.glob('./%s/*.*' % match_folder) color_print('Realizando match de la MF con las epocas','cyan') ejecuta = 'java -jar %s/stilts.jar tmatch2 in1=./%s values1="ID" ifmt1=ascii ' % (stilts_folder, master) def mf_match(ep): ej2 = 'in2=%s values2="ID_1" ifmt2=ascii icmd2=\'keepcols "ID_1 X Y"\' matcher=exact find=best join=1and2 out=./%s/%s ofmt=ascii progress=none ocmd="delcols ID_1"' % (ep, match_master, ep.split('/')[-1].replace('.match','.mfma')) os.system(ejecuta + ej2)
from ..io import load from ...units import galactic, solarsystem # HACK: bad solution is to do this: # python setup.py build_ext --inplace top_path = "plots/" plot_path = os.path.join(top_path, "tests/potential/cpotential") if not os.path.exists(plot_path): os.makedirs(plot_path) units = [u.kpc,u.Myr,u.Msun,u.radian] G = G.decompose(units) print() color_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "yellow") color_print("To view plots:", "green") print(" open {}".format(plot_path)) color_print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "yellow") niter = 1000 nparticles = 1000 class PotentialTestBase(object): name = None units = galactic def setup(self): print("\n\n") print("="*50) if self.name is None:
iteraciones = 4 # Numero de iteraciones threshold = 0.5 # Porcentaje (0-1) en que debe aparecer una estrella para ser considerada match_tol = 0.3 # Tolerancia en arcsec para hacer los matches (0.3 arcsec ~ 1 pix en VVV) # CARPETAS radec_folder = "RADEC" # Carpeta donde guarda los archivos con RA DEC match_folder = "MATCH_MF" # Carpeta donde guarda los matches stilts_folder = os.path.dirname(os.path.realpath(__file__)) # STILTS debe estar con el archivo .py #####Informacion extra if len(sys.argv) == 1: print ee = color_print("Como ejecutar", "yellow") print "python", sys.argv[0], "path/to/catalogs/" print print 'Archivo "\033[1;33mzinfo_img\033[0m" debe estar en la carpeta de los datos' print 'Se requiere un archivo "\033[1;33mref_master\033[0m" que contenga la(s) imagen(es) de referencia' print "Outputs:" sys.exit(1) # Ruta de los .dao, .fits y zinfo data_folder = sys.argv[1] if not data_folder.endswith("/"): data_folder += "/" # Elimina los archivos que utiliza, para no confundirse con resultados anteriores os.system("rm -r RADEC")
makedir(results) makedir(results+match_path) #Parametros master_iterations = 4 star_treshold = 50 match_tolerance = 0.3 #Control get_RADEC, match_CMD, match_epo = [False for i in range(3)] if 'r' in sys.argv[2] or 'a' in sys.argv[2]: get_RADEC = True if 'c' in sys.argv[2] or 'a' in sys.argv[2]: match_CMD = True if 'e' in sys.argv[2] or 'a' in sys.argv[2]: match_epo = True color_print('Iniciando "Master Frame" con argumentos %s'%sys.argv[2],'lightcyan') ### ### PIPELINE ### #Lee todos los archivos color_print('-Leyendo archivos...','cyan') archivos = np.sort([f for f in os.listdir(folder)]) fits_all = np.sort([f for f in archivos if f.endswith('.fits')]) fits_k = np.sort([f for f in fits_all if "k" in f]) fits_j = np.sort([f for f in fits_all if "j" in f]) catalog = np.sort([f for f in archivos if f.endswith('.dao') or f.endswith('.cxc')]) catalog_k = np.sort([f for f in catalog if "k" in f]) catalog_j = np.sort([f for f in catalog if "j" in f])
pm0 = np.in1d(ids, idsr) else: pm0 = True mask = (pmr <= radio) * (nf >= nframes) * (pme <= max_err) * (id_mask) * (pm0) data = np.genfromtxt("iter_%d/PM_final.dat" % (last_idx)) data = data[mask] fmt = "%d %.6f %.6f %.6f %.6f %.3f %d %d %.6f %.6f %.0f %.2f" hdr = "ID RA DEC PM_X PM_Y MAG_K NFRAMES CFRAMES PMXE PMYE NEI NEI_STD" np.savetxt(refstars, data, fmt=fmt, header=hdr) for i in range(itera): color_print("\nComenzando iteracion: %d" % (last_idx + i + 1), "lightcyan") # Crea carpeta para guardar los outputs makedir("iter_%d" % (last_idx + i + 1)) color_print("\tEjecutando tlineal_1a1.py", "cyan") subprocess.call("python -u %s/tlineal_1a1.py %s" % (stilts_folder, inputs), shell=True) color_print("\tEjecutando pm_1a1.py", "cyan") subprocess.call("python %s/pm_1a1.py %s" % (stilts_folder, ref_cat), shell=True) color_print("\tEjecutando VPDHmag.py", "cyan") subprocess.call("python %s/VPDHmag.py" % stilts_folder, shell=True) color_print("\tMoviendo archivos", "cyan") subprocess.call("mv %s*.png iter_%d" % (output, (last_idx + i + 1)), shell=True)
STARTDIR = os.path.abspath('.') # The branches we are interested in BRANCHES = get_branches(REPOSITORY_NAME) # Read in a list of all the PRs with open('merged_pull_requests_{}.json'.format(NAME)) as merged: merged_prs = json.load(merged) # Set up a dictionary where each key will be a PR and each value will be a list # of branches in which the PR is present pr_branches = defaultdict(list) try: # Set up repository color_print('Cloning {0}'.format(REPOSITORY), 'green') os.chdir(DIRTOCLONEIN) if os.path.isdir(NAME): # already exists... assume its the right thing color_print( '"{}" directory already exists - assuming it is an already ' 'existing clone'.format(NAME), 'yellow') os.chdir(NAME) if ORIGIN: subprocess.call('git fetch {}'.format(ORIGIN), shell=True) else: subprocess.call('git clone {0}'.format(REPOSITORY), shell=True) os.chdir(NAME) # Loop over branches and find all PRs in the branch for branch in BRANCHES:
from configparser import ConfigParser from astropy.utils.console import color_print import os import numpy as np params = ConfigParser() if os.path.isfile('zparams_pm.py'): params.read('zparams_pm.py') else: folder = os.path.dirname(os.path.realpath(__file__)) params.read('%s/zparams_pm.py' % folder) print '' color_print('###############################################', 'yellow') color_print('#', 'yellow', ' ', 'lightcyan', '#', 'yellow') color_print('#', 'yellow', ' zparams_pm.py no encontrado en la carpeta ', 'white', '#', 'yellow') color_print('#', 'yellow', '\033[1m' + ' Utilizando parametros por defecto ', 'lightred', '#', 'yellow') color_print('#', 'yellow', ' ', 'lightcyan', '#', 'yellow') color_print('###############################################', 'yellow') print '' #Regresion Lineal def linear_regression(x, y, w): A = np.vander(x,2) W = np.diag(w) ATWA = np.dot(A.T, np.dot(W, A)) #ATA = np.dot(A.T, A / yerr[:, np.newaxis]**2) sig_w = np.linalg.inv(ATWA) mu_w = np.linalg.solve(ATWA, np.dot(A.T, np.dot(W, y))) return mu_w, sig_w
def test_color_print(): # This stuff is hard to test, at least smoke test it console.color_print("foo", "green") console.color_print("foo", "green", "bar", "red")
def conesearch(center, radius, verb=1, catalog_db=None, pedantic=None, verbose=True, cache=True, timeout=None, query_all=False): """ Perform Cone Search and returns the result of the first successful query. Parameters ---------- center : str, `astropy.coordinates` object, list, or tuple Position of the center of the cone to search. It may be specified as an object from the :ref:`astropy:astropy-coordinates` package, string as accepted by :func:`~astroquery.utils.parse_coordinates`, or tuple/list. If given as tuple or list, it is assumed to be ``(RA, DEC)`` in the ICRS coordinate frame, given in decimal degrees. radius : float or `~astropy.units.quantity.Quantity` Radius of the cone to search: - If float is given, it is assumed to be in decimal degrees. - If astropy quantity is given, it is internally converted to degrees. verb : {1, 2, 3} Verbosity indicating how many columns are to be returned in the resulting table. Support for this parameter by a Cone Search service implementation is optional. If the service supports the parameter: 1. Return the bare minimum number of columns that the provider considers useful in describing the returned objects. 2. Return a medium number of columns between the minimum and maximum (inclusive) that are considered by the provider to most typically useful to the user. 3. Return all of the columns that are available for describing the objects. If not supported, the service should ignore the parameter and always return the same columns for every request. catalog_db May be one of the following, in order from easiest to use to most control: - `None`: A database of ``astroquery.vo_conesearch.conf.conesearch_dbname`` catalogs is downloaded from ``astroquery.vo_conesearch.conf.vos_baseurl``. The first catalog in the database to successfully return a result is used. - *catalog name*: A name in the database of ``astroquery.vo_conesearch.conf.conesearch_dbname`` catalogs at ``astroquery.vo_conesearch.conf.vos_baseurl`` is used. For a list of acceptable names, use :func:`astroquery.vo_conesearch.vos_catalog.list_catalogs`. - *url*: The prefix of a URL to a IVOA Service for ``astroquery.vo_conesearch.conf.conesearch_dbname``. Must end in either '?' or '&'. - `~astroquery.vo_conesearch.vos_catalog.VOSCatalog` object: A specific catalog manually downloaded and selected from the database (see :ref:`vo-sec-client-vos`). - Any of the above 3 options combined in a list, in which case they are tried in order. pedantic : bool or `None` When `True`, raise an error when the result violates the spec, otherwise issue a warning. Warnings may be controlled using :py:mod:`warnings` module. When not provided, uses the configuration setting ``astroquery.vo_conesearch.conf.pedantic``, which defaults to `False`. verbose : bool Verbose output. cache : bool Use caching for VO Service database. Access to actual VO websites referenced by the database still needs internet connection. timeout : number or `None` Timeout limit in seconds for each service being queries. If `None`, use default. query_all : bool This is used by :func:`search_all`. Returns ------- obj : `astropy.io.votable.tree.Table` First table from first successful VO service request. Raises ------ ConeSearchError When invalid inputs are passed into Cone Search. VOSError If VO service request fails. """ # Not using default ConeSearch instance because the attributes are # tweaked to match user inputs to this function. cs = ConeSearchClass() n_timed_out = 0 service_type = conf.conesearch_dbname catalogs = vos_catalog._get_catalogs( service_type, catalog_db, cache=cache, verbose=verbose) if query_all: result = {} else: result = None if pedantic is not None: cs.PEDANTIC = pedantic if timeout is not None: cs.TIMEOUT = timeout for name, catalog in catalogs: if isinstance(catalog, six.string_types): if catalog.startswith('http'): url = catalog else: remote_db = vos_catalog.get_remote_catalog_db( service_type, cache=cache, verbose=verbose) catalog = remote_db.get_catalog(catalog) url = catalog['url'] else: url = catalog['url'] cs.URL = url if verbose: # pragma: no cover color_print('Trying {0}'.format(url), 'green') try: r = cs.query_region(center, radius, verb=verb, cache=cache, verbose=verbose) except Exception as e: err_msg = str(e) vo_warn(W25, (url, err_msg)) if not query_all and 'ConnectTimeoutError' in err_msg: n_timed_out += 1 else: if query_all: result[r.url] = r else: result = r break if result is None: err_msg = 'None of the available catalogs returned valid results.' if n_timed_out > 0: err_msg += ' ({0} URL(s) timed out.)'.format(n_timed_out) raise VOSError(err_msg) return result
import subprocess from matplotlib import gridspec from scipy.optimize import curve_fit from astropy.io import ascii from astropy.utils.console import ProgressBar, color_print from astropy.table import Table, join, hstack from astropy.stats import mad_std from pm_funcs import barra from linfit import linfit import pm_funcs #PARAMETROS nframes, nbins, limplotpm, nprocs, sig_iter, nsigma, weight = pm_funcs.get_pm1a1() limplot = limplotpm color_print('[PM_1a1.py]', 'yellow') color_print('Parametros:', 'lightgray') print('\tnframes: %d' % nframes) print('\tnbins: %d' % nbins) print('\tnprocs: %d' % nprocs) if weight: print('\tSe utilizaran los errores como pesos') stilts_folder = os.path.dirname(os.path.realpath(__file__)) cpun = multiprocessing.cpu_count() def recta(x,a,b): return a*x + b def load_file(archivo): data = np.genfromtxt(archivo, unpack=True)
def export(self, yesterday=True, start_date=None, end_date=None, database=None, collections=list(), **kwargs): if yesterday: start_dt = (current_time() - 1. * u.day).datetime start = datetime(start_dt.year, start_dt.month, start_dt.day, 0, 0, 0, 0) end = datetime(start_dt.year, start_dt.month, start_dt.day, 23, 59, 59, 0) else: assert start_date, warnings.warn("start-date required if not using yesterday") y, m, d = [int(x) for x in start_date.split('-')] start_dt = date(y, m, d) if end_date is None: end_dt = start_dt else: y, m, d = [int(x) for x in end_date.split('-')] end_dt = date(y, m, d) start = datetime.fromordinal(start_dt.toordinal()) end = datetime(end_dt.year, end_dt.month, end_dt.day, 23, 59, 59, 0) if 'all' in collections: collections = self.collections date_str = start.strftime('%Y-%m-%d') end_str = end.strftime('%Y-%m-%d') if end_str != date_str: date_str = '{}_to_{}'.format(date_str, end_str) out_files = list() console.color_print("Exporting collections: ", 'default', "\t{}".format(date_str.replace('_', ' ')), 'yellow') for collection in collections: if collection not in self.collections: next console.color_print("\t{}".format(collection)) out_file = '{}{}_{}.json'.format(self._backup_dir, date_str.replace('-', ''), collection) col = getattr(self, collection) entries = [x for x in col.find({'date': {'$gt': start, '$lt': end}}).sort([('date', pymongo.ASCENDING)])] if len(entries): console.color_print("\t\t{} records exported".format(len(entries)), 'yellow') content = json.dumps(entries, default=json_util.default) write_type = 'w' # Assume compression but allow for not if kwargs.get('compress', True): console.color_print("\t\tCompressing...", 'lightblue') content = gzip.compress(bytes(content, 'utf8')) out_file = out_file + '.gz' write_type = 'wb' with open(out_file, write_type)as f: console.color_print("\t\tWriting file: ", 'lightblue', out_file, 'yellow') f.write(content) out_files.append(out_file) return out_files
id_mask = ~np.in1d(ids, rej_ids) else: id_mask = np.ones(len(ids)).astype(bool) mask = (pmr <= radio) * (nf >= nframes) * (pme <= max_err) * (id_mask) data = np.genfromtxt('iter_%d/PM_final.dat' % (last_idx)) data = data[mask] fmt = '%d %.6f %.6f %.6f %.6f %.3f %d %d %.6f %.6f %.0f %.2f' hdr = 'ID RA DEC PM_X PM_Y MAG_K NFRAMES CFRAMES PMXE PMYE NEI NEI_STD' np.savetxt(refstars, data, fmt=fmt, header=hdr) for i in range(itera): color_print('\nComenzando iteracion: %d' % (i+1), 'lightcyan') #Crea carpeta para guardar los outputs makedir('iter_%d' % (last_idx+i+1)) color_print('\tEjecutando tlineal_1a1.py', 'cyan') subprocess.call('python -u %s/tlineal_1a1.py %s' % (stilts_folder, inputs), shell=True) color_print('\tEjecutando pm_1a1.py', 'cyan') subprocess.call('python %s/pm_1a1.py %s' % (stilts_folder, ref_cat), shell=True) color_print('\tEjecutando VPDHmag.py', 'cyan') subprocess.call('python %s/VPDHmag.py' % stilts_folder, shell=True) color_print('\tMoviendo archivos', 'cyan') subprocess.call('mv %s*.png iter_%d' % (output, (last_idx+i+1)), shell=True)
def _more_tabcol(self, tabcol, max_lines=None, max_width=None, show_name=True, show_unit=None, show_dtype=False): """Interactive "more" of a table or column. Parameters ---------- max_lines : int or None Maximum number of rows to output max_width : int or None Maximum character width of output show_name : bool Include a header row for column names. Default is True. show_unit : bool Include a header row for unit. Default is to show a row for units only if one or more columns has a defined value for the unit. show_dtype : bool Include a header row for column dtypes. Default is False. """ allowed_keys = 'f br<>qhpn' # Count the header lines n_header = 0 if show_name: n_header += 1 if show_unit: n_header += 1 if show_dtype: n_header += 1 if show_name or show_unit or show_dtype: n_header += 1 # Set up kwargs for pformat call. Only Table gets max_width. kwargs = dict(max_lines=-1, show_name=show_name, show_unit=show_unit, show_dtype=show_dtype) if hasattr(tabcol, 'columns'): # tabcol is a table kwargs['max_width'] = max_width # If max_lines is None (=> query screen size) then increase by 2. # This is because get_pprint_size leaves 6 extra lines so that in # ipython you normally see the last input line. max_lines1, max_width = self._get_pprint_size(max_lines, max_width) if max_lines is None: max_lines1 += 2 delta_lines = max_lines1 - n_header # Set up a function to get a single character on any platform inkey = Getch() i0 = 0 # First table/column row to show showlines = True while True: i1 = i0 + delta_lines # Last table/col row to show if showlines: # Don't always show the table (e.g. after help) try: os.system('cls' if os.name == 'nt' else 'clear') except Exception: pass # No worries if clear screen call fails lines = tabcol[i0:i1].pformat(**kwargs) colors = ('red' if i < n_header else 'default' for i in range(len(lines))) for color, line in zip(colors, lines): color_print(line, color) showlines = True print() print("-- f, <space>, b, r, p, n, <, >, q h (help) --", end=' ') # Get a valid key while True: try: key = inkey().lower() except Exception: print("\n") log.error('Console does not support getting a character' ' as required by more(). Use pprint() instead.') return if key in allowed_keys: break print(key) if key.lower() == 'q': break elif key == ' ' or key == 'f': i0 += delta_lines elif key == 'b': i0 = i0 - delta_lines elif key == 'r': pass elif key == '<': i0 = 0 elif key == '>': i0 = len(tabcol) elif key == 'p': i0 -= 1 elif key == 'n': i0 += 1 elif key == 'h': showlines = False print(""" Browsing keys: f, <space> : forward one page b : back one page r : refresh same page n : next row p : previous row < : go to beginning > : go to end q : quit browsing h : print this help""", end=' ') if i0 < 0: i0 = 0 if i0 >= len(tabcol) - delta_lines: i0 = len(tabcol) - delta_lines print("\n")
from __future__ import division, print_function import matplotlib.pyplot as plt import numpy as np import sys from matplotlib import patches from matplotlib.path import Path from astropy.utils.console import color_print color_print('REFSTARS.py', 'yellow') if len(sys.argv) == 1: print('Para usar:') color_print('\tpython refstars.py ','cyan','<archivo con CMD> <archivo con puntos xy del poligono>','lightcyan') sys.exit(1) #Argumentos cmd_file = sys.argv[1] #CMD (.mat o .matgc) ver_file = sys.argv[2] #Vertices del poligono print('\tLeyendo input...') data = np.genfromtxt(cmd_file) K = data[:,5] J_K = data[:,12] - K verts = np.genfromtxt(ver_file) print('\tBuscando puntos dentro del poligono...') path = Path(verts) idx = path.contains_points(np.transpose([J_K,K])) print('\t\t %d de %d estrellas seleccionadas' % (idx.sum(), len(data)))
star_motion_calc = True #Carpetas results = folder.replace('/data','').split('_')[0] + '/' sm_path = 'stars_motion/' match_path = 'matched_epochs/' zero = results + sys.argv[2] makedir(results+sm_path) ## ## PIPELINE ## #Obtiene las epocas color_print('Buscando archivos de epocas...','cyan') k_catalog = np.sort([f for f in os.listdir(folder) if 'k' in f and (f.endswith('.dao') or f.endswith('.cxc'))]) k_fits = np.sort([f for f in os.listdir(folder) if 'k' in f and f.endswith('.fits')]) #Obtiene los anios correspondientes color_print('Leyendo BJD de cada epoca...','cyan') dates = np.zeros(k_fits.size) with ProgressBar(k_fits.size) as bar: for i in range(k_fits.size): hdu = fits.open(folder+k_fits[i]) date = hdu[0].header['MJD-OBS'] dates[i] += date bar.update() dates = (dates-dates[0])/365.25
def call_vo_service(service_type, catalog_db=None, pedantic=None, verbose=True, cache=True, kwargs={}): """ Makes a generic VO service call. Parameters ---------- service_type : str Name of the type of service, e.g., 'conesearch'. Used in error messages and to select a catalog database if `catalog_db` is not provided. catalog_db May be one of the following, in order from easiest to use to most control: - `None` A database of `service_type` catalogs is downloaded from `astropy.vo.client.vos_baseurl`. The first catalog in the database to successfully return a result is used. - *catalog name* A name in the database of `service_type` catalogs at `astropy.vo.client.vos_baseurl` is used. For a list of acceptable names, see :func:`list_catalogs`. - *url* The prefix of a *url* to a IVOA Service for `service_type`. Must end in either '?' or '&'. - `VOSCatalog` object A specific catalog manually downloaded and selected from the database using the APIs in `~astropy.vo.client.vos_catalog`. - Any of the above 3 options combined in a list, in which case they are tried in order. pedantic : bool or `None` See `astropy.io.votable.table.parse`. verbose : bool Verbose output. cache : bool See `get_remote_catalog_db`. kwargs : dictionary Keyword arguments to pass to the catalog service. No checking is done that the arguments are accepted by the service, etc. Returns ------- obj : `astropy.io.votable.tree.Table` object First table from first successful VO service request. Raises ------ VOSError If VO service request fails. """ if catalog_db is None: catalog_db = get_remote_catalog_db(service_type, cache=cache) catalogs = catalog_db.get_catalogs() elif isinstance(catalog_db, VOSDatabase): catalogs = catalog_db.get_catalogs() elif isinstance(catalog_db, (VOSCatalog, basestring)): catalogs = [(None, catalog_db)] elif isinstance(catalog_db, list): for x in catalog_db: assert (isinstance(x, (VOSCatalog, basestring)) and not isinstance(x, VOSDatabase)) catalogs = [(None, x) for x in catalog_db] else: # pragma: no cover raise VOSError('catalog_db must be a catalog database, ' 'a list of catalogs, or a catalog') if pedantic is None: # pragma: no cover pedantic = VO_PEDANTIC for name, catalog in catalogs: if isinstance(catalog, basestring): if catalog.startswith("http"): url = catalog else: remote_db = get_remote_catalog_db(service_type, cache=cache) catalog = remote_db.get_catalog(catalog) url = catalog['url'] else: url = catalog['url'] if verbose: # pragma: no cover color_print('Trying {0}'.format(url), 'green') try: return _vo_service_request(url, pedantic, kwargs) except Exception as e: vo_warn(W25, (url, str(e))) raise VOSError('None of the available catalogs returned valid results.')