Beispiel #1
0
def _parse_multispec_dict(multispec_dict):
    """
    Parse dictionary that contains the multispec information in wcs attributes (WAT keywords; often WAT2_???).
    each specN keywords has information as a string in the following format:
    ap beam dtype w1 dw nw z aplow aphigh wt_i w0_i ftype_i [parameters] [coefficients]

    Parameters
    ----------

        multispec_dict: dict-like object
            e.g. multi_spec_dict = {'wtype':'multispec', spec1:'...', 'spec2':'...', ..., 'specN':'...'}

    """

    parsed_multispec_dict = OrderedDict()

    for spec_key in multispec_dict:
        if not spec_key.lower().startswith('spec'):
            continue

        single_spec_dict = OrderedDict()
        spec_string = multispec_dict[spec_key].strip().split()
        for key_name, key_dtype in wcs_attributes_general_keywords.items():
            single_spec_dict[key_name] = key_dtype(spec_string.pop(0))
        single_spec_dict['functions'] = []
        while len(spec_string) > 0:
            # There seems to be a function defined for this spectrum -
            # checking that the dispersion type indicates that:
            assert single_spec_dict['dispersion_type'] == 2

            function_dict = {}
            for key_name, key_dtype in wcs_attributes_function_keywords.items():
                function_dict[key_name] = key_dtype(spec_string.pop(0))

                #last of the general keywords
                if key_name == 'type':
                    function_dict['type'] = fits_wcs_spec_func_type[function_dict['type']]
                    break

            # different function types defined in
            # http://iraf.net/irafdocs/specwcs.php -- see fits_wcs_spec_func_type

            function_type = function_dict['type']
            if function_type in wcs_attributes_function_parameters:
                for key_name in wcs_attributes_function_parameters[function_type]:
                    key_dtype = wcs_attributes_function_keywords[key_name]
                    function_dict[key_name] = key_dtype(spec_string.pop(0))
                num_coefficients = _get_num_coefficients(function_dict)
                coefficients = spec_string[:num_coefficients]
                function_dict['coefficients'] = list(map(float, coefficients))
                spec_string = spec_string[num_coefficients:]
            else:
                raise NotImplementedError

            single_spec_dict["functions"].append(function_dict)

        parsed_multispec_dict[spec_key] = single_spec_dict

    return parsed_multispec_dict
Beispiel #2
0
    def __init__(self, inp):
        if not isinstance(inp, OrderedDict):
            try:
                inp = OrderedDict(inp)
            except (TypeError,ValueError):
                raise ValueError("Input to TableList must be an OrderedDict or list of (k,v) pairs")

        self._dict = inp
        super(TableList,self).__init__(inp.values())
Beispiel #3
0
 def __init__(self, keywords):
     file_name = aud.get_pkg_data_filename(
         os.path.join("data", "inverse_dict.json"))
     with open(file_name, 'r') as f:
         kwd = json.load(f)
         self.keyword_types = sorted(kwd.values())
         self.keyword_dict = OrderedDict([(k, kwd[k]) for k in sorted(kwd)])
     self._keywords = None
     self.keywords = keywords
Beispiel #4
0
    def __init__(self, inp):
        if not isinstance(inp, OrderedDict):
            try:
                inp = OrderedDict(inp)
            except (TypeError, ValueError):
                raise ValueError("Input to TableList must be an OrderedDict "
                                 "or list of (k,v) pairs")

        self._dict = inp
        super(TableList, self).__init__(inp.values())
Beispiel #5
0
    def __init__(self, inp):
        if not isinstance(inp, OrderedDict):
            # py3 doesn't let you catch 2 types of errors.
            errmsg = "Input to TableList must be an OrderedDict or list of (k,v) pairs"
            try:
                inp = OrderedDict(inp)
            except (TypeError, ValueError):
                raise ValueError("Input to TableList must be an OrderedDict or list of (k,v) pairs")

        self._dict = inp
        super(TableList, self).__init__(inp.values())
Beispiel #6
0
    def _parse_result(self, response, get_catalog_names=False, verbose=False):
        """
        Parses the HTTP response to create an `astropy.table.Table`.
        Returns the raw result as a string in case of parse errors.

        Parameters
        ----------
        response : `requests.Response`
            The response of the HTTP POST request
        get_catalog_names : bool
            If specified, return only the table names (useful for table
            discovery)

        Returns
        -------
        `astroquery.utils.commons.TableList`
            An OrderedDict of `astropy.table.Table` objects.
            If there are errors in the parsing, then returns the raw results as a string.
        """
        if not verbose:
            commons.suppress_vo_warnings()
        try:
            tf = tempfile.NamedTemporaryFile()
            if PY3:
                tf.write(response.content)
            else:
                tf.write(response.content.encode('utf-8'))
            tf.file.flush()
            vo_tree = votable.parse(tf.name, pedantic=False)
            if get_catalog_names:
                return dict([(R.name,R) for R in vo_tree.resources])
            else:
                table_dict = OrderedDict()
                for t in vo_tree.iter_tables():
                    if len(t.array) > 0:
                        if t.ref is not None:
                            name = vo_tree.get_table_by_id(t.ref).name
                        else:
                            name = t.name
                        if name not in table_dict.keys():
                            table_dict[name] = []
                        table_dict[name] += [t.to_table()]
                for name in table_dict.keys():
                    if len(table_dict[name]) > 1:
                        table_dict[name] = tbl.vstack(table_dict[name])
                    else:
                        table_dict[name] = table_dict[name][0]
                return commons.TableList(table_dict)

        except:
            traceback.print_exc()  # temporary for debugging
            warnings.warn(
                "Error in parsing result, returning raw result instead")
            return response.content
Beispiel #7
0
    def __init__(self, inp):
        if not isinstance(inp, OrderedDict):
            # py3 doesn't let you catch 2 types of errors.
            errmsg = "Input to TableList must be an OrderedDict or list of (k,v) pairs"
            try:
                inp = OrderedDict(inp)
            except (TypeError, ValueError):
                raise ValueError("Input to TableList must be an OrderedDict or list of (k,v) pairs")

        self._dict = inp
        super(TableList, self).__init__(inp.values())
Beispiel #8
0
    def _construct_ordered_GET(self):
        """
        Construct a Global Edge Table (GET)

        The GET is an OrderedDict. Keys are scan  line numbers,
        ordered from bbox.ymin to bbox.ymax, where bbox is the
        bounding box of the polygon.
        Values are lists of edges for which edge.ymin==scan_line_number.

        Returns
        -------
        GET: OrderedDict
            {scan_line: [edge1, edge2]}
        """
        # edges is a list of Edge objects which define a polygon
        # with these vertices
        edges = self.get_edges()
        GET = OrderedDict.fromkeys(self._scan_line_range)
        ymin = np.asarray([e._ymin for e in edges])
        for i in self._scan_line_range:
            ymin_ind = (ymin == i).nonzero()[0]
            if ymin_ind.any():
                GET[i] = [edges[ymin_ind[0]]]
                for j in ymin_ind[1:]:
                    GET[i].append(edges[j])
        return GET
Beispiel #9
0
class VizierKeyword(list):

    """Helper class for setting keywords for Vizier queries"""

    def __init__(self, keywords):
        file_name = aud.get_pkg_data_filename(
            os.path.join("data", "inverse_dict.json"))
        with open(file_name, 'r') as f:
            kwd = json.load(f)
            self.keyword_types = sorted(kwd.values())
            self.keyword_dict = OrderedDict([(k, kwd[k]) for k in sorted(kwd)])
        self._keywords = None
        self.keywords = keywords

    @property
    def keywords(self):
        """List or string for keyword(s) that must be set for the Vizier object."""
        return self._keywords

    @keywords.setter
    def keywords(self, values):
        if isinstance(values, six.string_types):
            values = list(values)
        keys = [key.lower() for key in self.keyword_dict]
        values = [val.lower() for val in values]
        # warn about unknown keywords
        for val in set(values) - set(keys):
            warnings.warn("{val} : No such keyword".format(val=val))
        valid_keys = [
            key for key in self.keyword_dict.keys()
            if key.lower() in list(map(str.lower, values))]
        # create a dict for each type of keyword
        set_keywords = OrderedDict()
        for key in self.keyword_dict:
            if key in valid_keys:
                if self.keyword_dict[key] in set_keywords:
                    set_keywords[self.keyword_dict[key]].append(key)
                else:
                    set_keywords[self.keyword_dict[key]] = [key]
        self._keywords = OrderedDict(
                [(k, sorted(set_keywords[k]))
                 for k in set_keywords]
                )

    @keywords.deleter
    def keywords(self):
        del self._keywords

    def __repr__(self):
        return "\n".join([x for key in self.keywords for x in self.get_keyword_str(key)])

    def get_keyword_str(self, key):
        """
        Helper function that returns the keywords, grouped into appropriate
        categories and suitable for the Vizier votable CGI.

        Comma-separated is not valid!!!
        """
        keyword_name = "-kw." + key
        return [keyword_name + "=" + s for s in self.keywords[key]]
Beispiel #10
0
    def __init__(self, wave, flux, error=None,
                 unit=(u.erg / u.s / u.cm**2 / u.AA), wave_unit=u.AA,
                 z=None, dist=None, meta=None):
        self._wave = np.asarray(wave)
        self._flux = np.asarray(flux)
        self._wunit = wave_unit
        self._unit = unit
        self._z = z
        self._dist = dist

        if error is not None:
            self._error = np.asarray(error)
            if self._wave.shape != self._error.shape:
                raise ValueError('shape of wavelength and variance must match')
        else:
            self._error = None

        if meta is None:
            self.meta = OrderedDict()
        else:
            self.meta = deepcopy(meta)

        if self._wave.shape != self._flux.shape:
            raise ValueError('shape of wavelength and flux must match')
        if self._wave.ndim != 1:
            raise ValueError('only 1-d arrays supported')
Beispiel #11
0
    def read_wcs_attributes(self, axis):
        """
        Reading WCS attribute information in WAT0_001-like keywords

        Parameters
        ----------

        axis: int
            specifying which axis to read (e.g axis=2 will read WAT2_???).
        """

        wcs_attributes = self.fits_header['wat{0:d}_???'.format(axis)]
        if len(wcs_attributes) == 0:
            raise FITSWCSError

        raw_wcs_attributes = ''.join([
            wcs_attributes[key].ljust(68)
            for key in sorted(wcs_attributes.keys())
        ])

        wat_dictionary = OrderedDict()
        for wat_keyword_match in wat_keyword_pattern.finditer(
                raw_wcs_attributes):
            wat_dictionary[wat_keyword_match.groups()[0]] = \
                wat_keyword_match.groups()[1].strip('\"\'')

        if 'units' in wat_dictionary:
            wat_dictionary['units'] = _parse_fits_units(
                wat_dictionary['units'])

        return wat_dictionary
Beispiel #12
0
    def _construct_ordered_GET(self):
        """
        Construct a Global Edge Table (GET)

        The GET is an OrderedDict. Keys are scan  line numbers,
        ordered from bbox.ymin to bbox.ymax, where bbox is the
        bounding box of the polygon.
        Values are lists of edges for which edge.ymin==scan_line_number.

        Returns
        -------
        GET: OrderedDict
            {scan_line: [edge1, edge2]}
        """
        # edges is a list of Edge objects which define a polygon
        # with these vertices
        edges = self.get_edges()
        GET = OrderedDict.fromkeys(self._scan_line_range)
        ymin = np.asarray([e._ymin for e in edges])
        for i in self._scan_line_range:
            ymin_ind = (ymin == i).nonzero()[0]
            if ymin_ind.any():
                GET[i] = [edges[ymin_ind[0]]]
                for j in ymin_ind[1:]:
                    GET[i].append(edges[j])
        return GET
Beispiel #13
0
def read_data_fits(input, hdu=None, **kwargs):
    """
    Read an array and header from an FITS file.

    Parameters
    ----------
    input : str or compatible `astropy.io.fits` HDU object
        If a string, the filename to read the table from. The
        following `astropy.io.fits` HDU objects can be used as input:
        - :class:`~astropy.io.fits.hdu.table.PrimaryHDU`
        - :class:`~astropy.io.fits.hdu.table.ImageHDU`
        - :class:`~astropy.io.fits.hdu.hdulist.HDUList`
    hdu : int or str, optional
        The HDU to read the table from.
    """

    if isinstance(input, fits.HDUList):

        # Parse all array objects
        arrays = OrderedDict()
        for ihdu, hdu_item in enumerate(input):
            if isinstance(hdu_item, (fits.PrimaryHDU, fits.ImageHDU)):
                arrays[ihdu] = hdu_item

        if len(arrays) > 1:
            if hdu is None:
                hdu = first(arrays)
                warnings.warn("hdu= was not specified but multiple arrays"
                              " are present, reading in first available"
                              " array (hdu={0})".format(hdu))

            # hdu might not be an integer, so we first need to convert it
            # to the correct HDU index
            hdu = input.index_of(hdu)

            if hdu in arrays:
                array_hdu = arrays[hdu]
            else:
                raise ValueError("No array found in hdu={0}".format(hdu))

        elif len(arrays) == 1:
            array_hdu = arrays[first(arrays)]
        else:
            raise ValueError("No table found")

    elif isinstance(input, (fits.PrimaryHDU, fits.ImageHDU)):

        array_hdu = input

    else:

        hdulist = fits_open(input, **kwargs)

        try:
            return read_data_fits(hdulist, hdu=hdu)
        finally:
            hdulist.close()

    return array_hdu.data, array_hdu.header
Beispiel #14
0
 def __init__(self, keywords):
     file_name = aud.get_pkg_data_filename(os.path.join("data", "inverse_dict.json"))
     with open(file_name, "r") as f:
         kwd = json.load(f)
         self.keyword_types = sorted(kwd.values())
         self.keyword_dict = OrderedDict([(k, kwd[k]) for k in sorted(kwd)])
     self._keywords = None
     self.keywords = keywords
Beispiel #15
0
def parse_vizier_votable(data,
                         verbose=False,
                         invalid='warn',
                         get_catalog_names=False):
    """
    Given a votable as string, parse it into tables
    """
    if not verbose:
        commons.suppress_vo_warnings()

    tf = BytesIO(data)

    if invalid == 'mask':
        vo_tree = votable.parse(tf, pedantic=False, invalid='mask')
    elif invalid == 'warn':
        try:
            vo_tree = votable.parse(tf, pedantic=False, invalid='exception')
        except Exception as ex:
            warnings.warn("VOTABLE parsing raised exception: {0}".format(ex))
            vo_tree = votable.parse(tf, pedantic=False, invalid='mask')
    elif invalid == 'exception':
        vo_tree = votable.parse(tf, pedantic=False, invalid='exception')
    else:
        raise ValueError("Invalid keyword for 'invalid'. "
                         "Must be exception, mask, or warn")

    if get_catalog_names:
        return dict([(R.name, R) for R in vo_tree.resources])
    else:
        table_dict = OrderedDict()
        for t in vo_tree.iter_tables():
            if len(t.array) > 0:
                if t.ref is not None:
                    name = vo_tree.get_table_by_id(t.ref).name
                else:
                    name = t.name
                if name not in table_dict.keys():
                    table_dict[name] = []
                table_dict[name] += [t.to_table()]
        for name in table_dict.keys():
            if len(table_dict[name]) > 1:
                table_dict[name] = tbl.vstack(table_dict[name])
            else:
                table_dict[name] = table_dict[name][0]
        return commons.TableList(table_dict)
Beispiel #16
0
    def valid_keywords(self):
        if not hasattr(self, '_valid_keyword_dict'):
            file_name = aud.get_pkg_data_filename(
                os.path.join("data", "inverse_dict.json"))
            with open(file_name, 'r') as f:
                kwd = json.load(f)
                self._valid_keyword_types = sorted(kwd.values())
                self._valid_keyword_dict = OrderedDict([(k, kwd[k]) for k in sorted(kwd)])

        return self._valid_keyword_dict
Beispiel #17
0
def parse_vizier_votable(data, verbose=False, invalid='warn',
                         get_catalog_names=False):
    """
    Given a votable as string, parse it into tables
    """
    if not verbose:
        commons.suppress_vo_warnings()

    tf = BytesIO(data)

    if invalid == 'mask':
        vo_tree = votable.parse(tf, pedantic=False, invalid='mask')
    elif invalid == 'warn':
        try:
            vo_tree = votable.parse(tf, pedantic=False, invalid='exception')
        except Exception as ex:
            warnings.warn("VOTABLE parsing raised exception: {0}".format(ex))
            vo_tree = votable.parse(tf, pedantic=False, invalid='mask')
    elif invalid == 'exception':
        vo_tree = votable.parse(tf, pedantic=False, invalid='exception')
    else:
        raise ValueError("Invalid keyword for 'invalid'. "
                         "Must be exception, mask, or warn")

    if get_catalog_names:
        return dict([(R.name, R) for R in vo_tree.resources])
    else:
        table_dict = OrderedDict()
        for t in vo_tree.iter_tables():
            if len(t.array) > 0:
                if t.ref is not None:
                    name = vo_tree.get_table_by_id(t.ref).name
                else:
                    name = t.name
                if name not in table_dict.keys():
                    table_dict[name] = []
                table_dict[name] += [t.to_table()]
        for name in table_dict.keys():
            if len(table_dict[name]) > 1:
                table_dict[name] = tbl.vstack(table_dict[name])
            else:
                table_dict[name] = table_dict[name][0]
        return commons.TableList(table_dict)
Beispiel #18
0
 def keywords(self, values):
     if isinstance(values, six.string_types):
         values = list(values)
     keys = [key.lower() for key in self.keyword_dict]
     values = [val.lower() for val in values]
     # warn about unknown keywords
     for val in set(values) - set(keys):
         warnings.warn("{val} : No such keyword".format(val=val))
     valid_keys = [
         key for key in self.keyword_dict.keys()
         if key.lower() in list(map(str.lower, values))
     ]
     # create a dict for each type of keyword
     set_keywords = OrderedDict()
     for key in self.keyword_dict:
         if key in valid_keys:
             if self.keyword_dict[key] in set_keywords:
                 set_keywords[self.keyword_dict[key]].append(key)
             else:
                 set_keywords[self.keyword_dict[key]] = [key]
     self._keywords = OrderedDict([(k, sorted(set_keywords[k]))
                                   for k in set_keywords])
Beispiel #19
0
    def sample(self, n_samples):

        self.update()

        spines = OrderedDict()

        for axis in self:
            data = self[axis].data
            p = np.linspace(0., 1., data.shape[0])
            p_new = np.linspace(0., 1., n_samples)
            spines[axis] = Spine(self.parent_axes, self.transform)
            spines[axis].data = np.array([np.interp(p_new, p, data[:,0]),
                                          np.interp(p_new, p, data[:,1])]).transpose()

        return spines
Beispiel #20
0
def _to_pandas(astropyTable):
    """
	Return a :class:`pandas.DataFrame` instance

	Returns
	-------
	dataframe : :class:`pandas.DataFrame`
		A pandas :class:`pandas.DataFrame` instance

	Raises
	------
	ImportError
		If pandas is not installed
	ValueError
		If the Table contains mixin or multi-dimensional columns
	"""
    from pandas import DataFrame

    if astropyTable.has_mixin_columns:
        raise ValueError(
            "Cannot convert a table with mixin columns to a pandas DataFrame")

    if any(
            getattr(col, 'ndim', 1) > 1
            for col in astropyTable.columns.values()):
        raise ValueError(
            "Cannot convert a table with multi-dimensional columns to a pandas DataFrame"
        )

    out = OrderedDict()

    for name, column in astropyTable.columns.items():
        if isinstance(column, MaskedColumn):
            if column.dtype.kind in ['i', 'u']:
                out[name] = column.astype(float).filled(np.nan)
            elif column.dtype.kind in ['f', 'c']:
                out[name] = column.filled(np.nan)
            else:
                out[name] = column.astype(np.object).filled(np.nan)
        else:
            out[name] = column

        if out[name].dtype.byteorder not in ('=', '|'):
            out[name] = out[name].byteswap().newbyteorder()

    return DataFrame(out)
Beispiel #21
0
def data_frame_to_astropy_table(dataframe):
    """This is a backport of the Astropy method
   :meth:`astropy.table.table.Table.from_pandas`. It converts a Pandas
   :class:`pandas.DataFrame` object to an Astropy
   :class:`astropy.table.Table`.

    """
    from astropy.utils import OrderedDict
    from astropy.table import Table, Column, MaskedColumn
    from astropy.extern import six

    out = OrderedDict()

    for name in dataframe.columns:
        column = dataframe[name]
        mask = np.array(column.isnull())
        data = np.array(column)

        if data.dtype.kind == 'O':
            # If all elements of an object array are string-like or np.nan
            # then coerce back to a native numpy str/unicode array.
            string_types = six.string_types
            if six.PY3:
                string_types += (bytes, )
            nan = np.nan
            if all(isinstance(x, string_types) or x is nan for x in data):
                # Force any missing (null) values to b''.  Numpy will
                # upcast to str/unicode as needed.
                data[mask] = b''

                # When the numpy object array is represented as a list then
                # numpy initializes to the correct string or unicode type.
                data = np.array([x for x in data])

        if np.any(mask):
            out[name] = MaskedColumn(data=data, name=name, mask=mask)
        else:
            out[name] = Column(data=data, name=name)

    return Table(out)
Beispiel #22
0
    def get_multispec_wcs(self, dispersion_unit=None):
        """Extracting multispec information out of WAT header keywords and building WCS with it

        Parameters
        ----------

        dispersion_unit : astropy.unit.Unit, optional
            specify a unit for the dispersion if none exists or overwrite, default=None
        """

        assert self.naxis == 2
        assert self.global_wcs_attributes['system'] == 'multispec'
        assert self.wcs_attributes[1]['wtype'] == 'multispec'

        if dispersion_unit is None:
            dispersion_unit = self.wcs_attributes[0]['units']

        multispec_dict = _parse_multispec_dict(self.wcs_attributes[1])
        multispec_wcs_dict = OrderedDict()
        for spec_key in multispec_dict:
            single_spec_dict = multispec_dict[spec_key]

            if single_spec_dict['function']['type'] == 'legendre':
                function_dict = single_spec_dict['function']

                ##### @embray can you figure out if that's the only way to instantiate a polynomial (with c0=xx, c1=xx, ...)?

                coefficients = dict([('c{:d}'.format(i),
                                      function_dict['coefficients'][i])
                                     for i in range(function_dict['order'])])
                multispec_wcs_dict[spec_key] = specwcs.Spectrum1DLegendreWCS(
                    function_dict['order'] - 1,
                    domain=[function_dict['pmin'], function_dict['pmax']],
                    unit=dispersion_unit,
                    **coefficients)

            else:
                raise NotImplementedError
        return multispec_wcs_dict
Beispiel #23
0
class Cartesian1DRepresentation(BaseRepresentation):
    """
    Representation of a one dimensional cartesian coordinate.

    Parameters
    ----------
    x : `~astropy.units.Quantity`
        The coordinate along the axis.

    copy : bool, optional
        If True arrays will be copied rather than referenced.
    """

    attr_classes = OrderedDict([('x', u.Quantity)])

    def __init__(self, x, copy=True):

        if not isinstance(x, self.attr_classes['x']):
            raise TypeError('x should be a {0}'.format(
                self.attr_classes['x'].__name__))

        x = self.attr_classes['x'](x, copy=copy)

        self._x = x

    @property
    def x(self):
        """
        The x component of the point(s).
        """
        return self._x

    @classmethod
    def from_cartesian(cls, other):
        return other

    def to_cartesian(self):
        return self
Beispiel #24
0
 def _args_to_payload(self, *args, **kwargs):
     """
     accepts the arguments for different query functions and
     builds a script suitable for the Vizier votable CGI.
     """
     body = OrderedDict()
     center = kwargs.get('center')
     # process: catalog
     catalog = kwargs.get('catalog')
     if catalog is None:
         catalog = self.catalog
     if catalog is not None:
         if isinstance(catalog, stringtypes):
             body['-source'] = catalog
         elif isinstance(catalog, list):
             body['-source'] = ",".join(catalog)
         else:
             raise TypeError("Catalog must be specified as list or string")
     # process: columns
     columns = kwargs.get('columns')
     if columns is None:
         columns = self.columns
     else:
         columns = self.columns + columns
     # process: columns - always request computed positions in degrees
     if "_RAJ2000" not in columns:
         columns += ["_RAJ2000"]
     if "_DEJ2000" not in columns:
         columns += ["_DEJ2000"]
     # process: columns - identify sorting requests
     columns_out = []
     sorts_out = []
     for column in columns:
         if column[0] == '+':
             columns_out += [column[1:]]
             sorts_out += [column[1:]]
         elif column[0] == '-':
             columns_out += [column[1:]]
             sorts_out += [column]
         else:
             columns_out += [column]
     body['-out'] = ','.join(columns_out)
     if len(sorts_out) > 0:
         body['-sort'] = ','.join(sorts_out)
     # process: maximum rows returned
     if self.ROW_LIMIT < 0:
         body["-out.max"] = 'unlimited'
     else:
         body["-out.max"] = self.ROW_LIMIT
     # process: column filters
     column_filters = self.column_filters.copy()
     column_filters.update(kwargs.get('column_filters', {}))
     for (key, value) in column_filters.items():
         body[key] = value
     # process: center
     if center is not None:
         for (key, value) in center.items():
             body[key] = value
     # add column metadata: name, unit, UCD1+, and description
     body["-out.meta"] = "huUD"
     # computed position should always be in decimal degrees
     body["-oc.form"] = "d"
     # create final script
     script = "\n".join([
         "{key}={val}".format(key=key, val=val)
         for key, val in body.items()
     ])
     # add keywords
     if not isinstance(self.keywords,
                       property) and self.keywords is not None:
         script += "\n" + str(self.keywords)
     return script
Beispiel #25
0
 def _args_to_payload(self, *args, **kwargs):
     """
     accepts the arguments for different query functions and
     builds a script suitable for the Vizier votable CGI.
     """
     body = OrderedDict()
     caller = kwargs['caller']
     del kwargs['caller']
     catalog = kwargs.get('catalog')
     if catalog is not None:
         if isinstance(catalog, basestring):
             body['-source'] = catalog
         elif isinstance(catalog, list):
             body['-source'] = ",".join(catalog)
         else:
             raise TypeError("Catalog must be specified as list or string")
     if caller == 'query_object_async':
         body["-c"] = args[0]
     elif caller == 'query_region_async':
         c = commons.parse_coordinates(args[0])
         ra = str(c.icrs.ra.degree)
         dec = str(c.icrs.dec.degree)
         if dec[0] not in ['+', '-']:
             dec = '+' + dec
         body["-c"] = "".join([ra, dec])
         # decide whether box or radius
         if kwargs.get('radius') is not None:
             radius = kwargs['radius']
             unit, value = _parse_dimension(radius)
             switch = "-c.r" + unit
             body[switch] = value
         elif kwargs.get('width') is not None:
             width = kwargs['width']
             w_unit, w_value = _parse_dimension(width)
             switch = "-c.b" + w_unit
             height = kwargs.get('height')
             # is box a rectangle or square?
             if height is not None:
                 h_unit, h_value = _parse_dimension(height)
                 if h_unit != w_unit:
                     warnings.warn(
                         "Converting height to same unit as width")
                     h_value = u.Quantity(h_value, u.Unit
                                          (_str_to_unit(h_unit))).to(u.Unit(_str_to_unit(w_unit)))
                 body[switch] = "x".join([str(w_value), str(h_value)])
             else:
                 body[switch] = "x".join([str(w_value)] * 2)
         elif kwargs.get('height'):
             warnings.warn(
                 "No width given - shape interpreted as square (height x height)")
             height = kwargs['height']
             h_unit, h_value = _parse_dimension(height)
             switch = "-c.b" + h_unit
             body[switch] = h_value
         else:
             raise Exception(
                 "At least one of radius, width/height must be specified")
     # set output parameters
     if not isinstance(self.columns, property) and self.columns is not None:
         if "all" in self.columns:
             body["-out"] = "**"
         else:
             out_cols = ",".join([col for col in self.columns])
             # if default then return default cols and listed cols
             if "default" in self.columns:
                 body["-out.add"] = out_cols
             # else return only the listed cols
             else:
                 body["-out"] = out_cols
     # otherwise ask to return default columns
     else:
         body["-out"] = "*"
     # set the maximum rows returned
     body["-out.max"] = Vizier.ROW_LIMIT
     script = "\n".join(["{key}={val}".format(key=key, val=val)
                        for key, val in body.items()])
     # add keywords
     if not isinstance(self.keywords, property) and self.keywords is not None:
         script += "\n" + str(self.keywords)
     # add column filters
     if not isinstance(self.column_filters, property) and self.column_filters is not None:
         filter_str = "\n".join(["{key}={constraint}".format(key=key, constraint=constraint) for key, constraint in
                                 self.column_filters.items()])
         script += "\n" + filter_str
     return script
Beispiel #26
0
    def _parse_result(self, response, get_catalog_names=False, verbose=False, invalid='warn'):
        """
        Parses the HTTP response to create a `~astropy.table.Table`.

        Returns the raw result as a string in case of parse errors.

        Parameters
        ----------
        response : `requests.Response`
            The response of the HTTP POST request
        get_catalog_names : bool
            If specified, return only the table names (useful for table
            discovery)
        invalid : 'warn', 'mask' or 'raise'
            The behavior if a VOTABLE cannot be parsed.  Default is 'warn',
            which will try to parse the table, then if an exception is raised,
            it will be printent but the masked table will be returned

        Returns
        -------
        table_list : `astroquery.utils.TableList` or str
            If there are errors in the parsing, then returns the raw results as a string.
        """
        if not verbose:
            commons.suppress_vo_warnings()
        try:
            tf = six.BytesIO(response.content)

            if invalid == 'mask':
                vo_tree = votable.parse(tf, pedantic=False, invalid='mask')
            elif invalid == 'warn':
                try:
                    vo_tree = votable.parse(tf, pedantic=False, invalid='raise')
                except Exception as ex:
                    warnings.warn("VOTABLE parsing raised exception: {0}".format(ex))
                    vo_tree = votable.parse(tf, pedantic=False, invalid='mask')
            elif invalid == 'raise':
                vo_tree = votable.parse(tf, pedantic=False, invalid='raise')
            else:
                raise ValueError("Invalid keyword 'invalid'.  Must be raise, mask, or warn")

            if get_catalog_names:
                return dict([(R.name, R) for R in vo_tree.resources])
            else:
                table_dict = OrderedDict()
                for t in vo_tree.iter_tables():
                    if len(t.array) > 0:
                        if t.ref is not None:
                            name = vo_tree.get_table_by_id(t.ref).name
                        else:
                            name = t.name
                        if name not in table_dict.keys():
                            table_dict[name] = []
                        table_dict[name] += [t.to_table()]
                for name in table_dict.keys():
                    if len(table_dict[name]) > 1:
                        table_dict[name] = tbl.vstack(table_dict[name])
                    else:
                        table_dict[name] = table_dict[name][0]
                return commons.TableList(table_dict)

        except Exception as ex:
            self.response = response
            self.table_parse_error = ex
            raise TableParseError("Failed to parse VIZIER result! The raw response can be found "
                                  "in self.response, and the error in self.table_parse_error."
                                  "  The attempted parsed result is in self.parsed_result.\n"
                                  "Exception: " + str(self.table_parse_error))
Beispiel #27
0
    def _parse_result(self, response, get_catalog_names=False, verbose=False):
        """
        Parses the HTTP response to create a `~astropy.table.Table`.

        Returns the raw result as a string in case of parse errors.

        Parameters
        ----------
        response : `requests.Response`
            The response of the HTTP POST request
        get_catalog_names : bool
            If specified, return only the table names (useful for table
            discovery)

        Returns
        -------
        table_list : `astroquery.utils.TableList` or str
            If there are errors in the parsing, then returns the raw results as a string.
        """
        if not verbose:
            commons.suppress_vo_warnings()
        try:
            tf = tempfile.NamedTemporaryFile()
            if six.PY3:
                # This is an exceedingly confusing section
                # It is likely to be doubly wrong, but has caused issue #185
                try:
                    # Case 1: data is read in as unicode
                    tf.write(response.content.encode())
                except AttributeError:
                    # Case 2: data is read in as a byte string
                    tf.write(response.content.decode().encode('utf-8'))
            else:
                tf.write(response.content.encode('utf-8'))
            tf.file.flush()
            vo_tree = votable.parse(tf, pedantic=False)
            if get_catalog_names:
                return dict([(R.name, R) for R in vo_tree.resources])
            else:
                table_dict = OrderedDict()
                for t in vo_tree.iter_tables():
                    if len(t.array) > 0:
                        if t.ref is not None:
                            name = vo_tree.get_table_by_id(t.ref).name
                        else:
                            name = t.name
                        if name not in table_dict.keys():
                            table_dict[name] = []
                        table_dict[name] += [t.to_table()]
                for name in table_dict.keys():
                    if len(table_dict[name]) > 1:
                        table_dict[name] = tbl.vstack(table_dict[name])
                    else:
                        table_dict[name] = table_dict[name][0]
                return commons.TableList(table_dict)

        except Exception as ex:
            self.response = response
            self.table_parse_error = ex
            raise TableParseError(
                "Failed to parse VIZIER result! The raw response can be found "
                "in self.response, and the error in self.table_parse_error."
                "  The attempted parsed result is in self.parsed_result.\n"
                "Exception: " + str(self.table_parse_error))
Beispiel #28
0
    def _parse_result(self, response, get_catalog_names=False, verbose=False):
        """
        Parses the HTTP response to create a `~astropy.table.Table`.

        Returns the raw result as a string in case of parse errors.

        Parameters
        ----------
        response : `requests.Response`
            The response of the HTTP POST request
        get_catalog_names : bool
            If specified, return only the table names (useful for table
            discovery)

        Returns
        -------
        table_list : `astroquery.utils.TableList` or str
            If there are errors in the parsing, then returns the raw results as a string.
        """
        if not verbose:
            commons.suppress_vo_warnings()
        try:
            tf = tempfile.NamedTemporaryFile()
            if six.PY3:
                # This is an exceedingly confusing section
                # It is likely to be doubly wrong, but has caused issue #185
                try:
                    # Case 1: data is read in as unicode
                    tf.write(response.content.encode())
                except AttributeError:
                    # Case 2: data is read in as a byte string
                    tf.write(response.content.decode().encode('utf-8'))
            else:
                tf.write(response.content.encode('utf-8'))
            tf.file.flush()
            vo_tree = votable.parse(tf, pedantic=False)
            if get_catalog_names:
                return dict([(R.name,R) for R in vo_tree.resources])
            else:
                table_dict = OrderedDict()
                for t in vo_tree.iter_tables():
                    if len(t.array) > 0:
                        if t.ref is not None:
                            name = vo_tree.get_table_by_id(t.ref).name
                        else:
                            name = t.name
                        if name not in table_dict.keys():
                            table_dict[name] = []
                        table_dict[name] += [t.to_table()]
                for name in table_dict.keys():
                    if len(table_dict[name]) > 1:
                        table_dict[name] = tbl.vstack(table_dict[name])
                    else:
                        table_dict[name] = table_dict[name][0]
                return commons.TableList(table_dict)

        except Exception as ex:
            self.response = response
            self.table_parse_error = ex
            raise TableParseError("Failed to parse VIZIER result! The raw response can be found "
                                  "in self.response, and the error in self.table_parse_error."
                                  "  The attempted parsed result is in self.parsed_result.\n"
                                  "Exception: " + str(self.table_parse_error))
Beispiel #29
0
    def _args_to_payload(self, *args, **kwargs):
        """
        accepts the arguments for different query functions and
        builds a script suitable for the Vizier votable CGI.
        """
        body = OrderedDict()
        center = kwargs.get('center')
        # process: catalog
        catalog = kwargs.get('catalog')
        if catalog is None:
            catalog = self.catalog
        if catalog is not None:
            if isinstance(catalog, six.string_types):
                body['-source'] = catalog
            elif isinstance(catalog, list):
                body['-source'] = ",".join(catalog)
            else:
                raise TypeError("Catalog must be specified as list or string")
        # process: columns
        columns = kwargs.get('columns')
        if columns is None:
            columns = copy.copy(self.columns)
        else:
            columns = self.columns + columns

        # keyword names that can mean 'all' need to be treated separately
        alls = ['all','*']
        if any(x in columns for x in alls):
            for x in alls:
                if x in columns:
                    columns.remove(x)
            body['-out.all'] = 2

        # process: columns - always request computed positions in degrees
        if "_RAJ2000" not in columns:
            columns += ["_RAJ2000"]
        if "_DEJ2000" not in columns:
            columns += ["_DEJ2000"]
        # process: columns - identify sorting requests
        columns_out = []
        sorts_out = []
        for column in columns:
            if column[0] == '+':
                columns_out += [column[1:]]
                sorts_out += [column[1:]]
            elif column[0] == '-':
                columns_out += [column[1:]]
                sorts_out += [column]
            else:
                columns_out += [column]
        body['-out.add'] = ','.join(columns_out)
        body['-out'] = columns_out
        if len(sorts_out) > 0:
            body['-sort'] = ','.join(sorts_out)
        # process: maximum rows returned
        row_limit = kwargs.get('row_limit') or self.ROW_LIMIT
        if row_limit < 0:
            body["-out.max"] = 'unlimited'
        else:
            body["-out.max"] = row_limit
        # process: column filters
        column_filters = self.column_filters.copy()
        column_filters.update(kwargs.get('column_filters', {}))
        for (key, value) in column_filters.items():
            body[key] = value
        # process: center
        if center is not None:
            for (key, value) in center.items():
                body[key] = value
        # add column metadata: name, unit, UCD1+, and description
        body["-out.meta"] = "huUD"
        # merge tables when a list is queried against a single catalog
        body["-out.form"] = "mini"
        # computed position should always be in decimal degrees
        body["-oc.form"] = "d"

        ucd = kwargs.get('ucd', "") + self.ucd
        if ucd:
            body['-ucd'] = ucd

        # create final script
        script = "\n".join(["{key}={val}".format(key=key, val=val)
                            for key, val in body.items()])
        # add keywords
        if not isinstance(self.keywords, property) and self.keywords is not None:
            script += "\n" + str(self.keywords)
        return script
Beispiel #30
0
    3: 'cubicspline',
    4: 'linearspline',
    5: 'pixelcoordinatearray',
    6: 'sampledcoordinatearray'
}

wcs_attributes_function_parameters = {
    'chebyshev': ['order', 'pmin', 'pmax'],
    'legendre': ['order', 'pmin', 'pmax'],
    'linearspline': ['npieces', 'pmin', 'pmax'],
    'cubicspline': ['npieces', 'pmin', 'pmax']
}

wcs_attributes_general_keywords = OrderedDict([
    ('aperture', int), ('beam', int), ('dispersion_type', int),
    ('dispersion0', float), ('average_dispersion_delta', float),
    ('no_valid_pixels', int), ('doppler_factor', float),
    ('aperture_low', float), ('aperture_high', float)
])

wcs_attributes_function_keywords = OrderedDict([('weight', float),
                                                ('zero_point_offset', float),
                                                ('type', int), ('order', int),
                                                ('pmin', float),
                                                ('pmax', float),
                                                ('npieces', int)])


def _get_num_coefficients(function_dict):
    """
    Returns the number of coeffecients according to the IRAF fits format defined
    here: http://iraf.net/irafdocs/specwcs.php
Beispiel #31
0
def read_table_fits(input, hdu=None):
    """
    Read a Table object from an FITS file

    Parameters
    ----------
    input : str or file-like object or compatible `astropy.io.fits` HDU object
        If a string, the filename to read the table from. If a file object, or
        a compatible HDU object, the object to extract the table from. The
        following `astropy.io.fits` HDU objects can be used as input:
        - :class:`~astropy.io.fits.hdu.table.TableHDU`
        - :class:`~astropy.io.fits.hdu.table.BinTableHDU`
        - :class:`~astropy.io.fits.hdu.table.GroupsHDU`
        - :class:`~astropy.io.fits.hdu.hdulist.HDUList`
    hdu : int or str, optional
        The HDU to read the table from.
    """

    if isinstance(input, six.string_types):
        input = fits_open(input)
        to_close = input
    else:
        to_close = None

    if hasattr(input, 'read'):
        input = fits_open(input)

    try:

        # Parse all table objects
        tables = OrderedDict()
        if isinstance(input, HDUList):
            for ihdu, hdu_item in enumerate(input):
                if isinstance(hdu_item, (TableHDU, BinTableHDU, GroupsHDU)):
                    tables[ihdu] = hdu_item

            if len(tables) > 1:

                if hdu is None:
                    warnings.warn("hdu= was not specified but multiple tables"
                                  " are present, reading in first available"
                                  " table (hdu={0})".format(
                                      list(tables.keys())[0]))
                    hdu = list(tables.keys())[0]

                # hdu might not be an integer, so we first need to convert it
                # to the correct HDU index
                hdu = input.index_of(hdu)

                if hdu in tables:
                    table = tables[hdu]
                else:
                    raise ValueError("No table found in hdu={0}".format(hdu))

            elif len(tables) == 1:
                table = tables[list(tables.keys())[0]]
            else:
                raise ValueError("No table found")

        elif isinstance(input, (TableHDU, BinTableHDU, GroupsHDU)):

            table = input

        else:

            raise ValueError("Input should be a string, a file-like object, "
                             "an  HDUList, TableHDU, BinTableHDU, or "
                             "GroupsHDU instance")

        # Check if table is masked
        masked = False
        for col in table.columns:
            if col.null is not None:
                masked = True
                break

        # Convert to an astropy.table.Table object
        t = Table(table.data, masked=masked)

        # Copy over null values if needed
        if masked:
            for col in table.columns:
                t[col.name].set_fill_value(col.null)
                t[col.name].mask[t[col.name] == col.null] = True

        # Copy over units
        for col in table.columns:
            if col.unit is not None:
                try:
                    t[col.name].units = u.Unit(col.unit, format='fits')
                except ValueError:
                    t[col.name].units = u.UnrecognizedUnit(col.unit)

        # TODO: deal properly with unsigned integers

        for key, value, comment in table.header.cards:

            if key in ['COMMENT', 'HISTORY']:
                if key in t.meta:
                    t.meta[key].append(value)
                else:
                    t.meta[key] = [value]

            elif key in t.meta:  # key is duplicate

                if isinstance(t.meta[key], list):
                    t.meta[key].append(value)
                else:
                    t.meta[key] = [t.meta[key], value]

            elif (is_column_keyword(key.upper())
                  or key.upper() in REMOVE_KEYWORDS):

                pass

            else:

                t.meta[key] = value

        # TODO: implement masking

    finally:

        if to_close is not None:
            to_close.close()

    return t
Beispiel #32
0
def multispec_wcs_reader(wcs_info, dispersion_unit=None):
    """Extracting multispec information out of WAT header keywords and
    building WCS with it

    Parameters
    ----------

    dispersion_unit : astropy.unit.Unit, optional
        specify a unit for the dispersion if none exists or overwrite,
        default=None
    """

    assert wcs_info.naxis == 2
    assert wcs_info.global_wcs_attributes['system'] == 'multispec'
    assert wcs_info.wcs_attributes[1]['wtype'] == 'multispec'

    if dispersion_unit is None:
        dispersion_unit = wcs_info.wcs_attributes[0]['units']

    multispec_dict = _parse_multispec_dict(wcs_info.wcs_attributes[1])
    wcs_dict = OrderedDict()
    for spec_key in multispec_dict:
        single_spec_dict = multispec_dict[spec_key]
        if single_spec_dict['dispersion_type'] == 1:
            #log-linear dispersion
            log = True
        else:
            log = False

        if single_spec_dict['dispersion_type'] in [0, 1]:
            #linear or log-linear dispersion
            dispersion_wcs = specwcs.Spectrum1DPolynomialWCS(
                degree=1,
                c0=single_spec_dict["dispersion0"],
                c1=single_spec_dict["average_dispersion_delta"])

        else:
            # single_spec_dict['dispersion_type'] == 2
            dispersion_wcs = specwcs.WeightedCombinationWCS()
            for function_dict in single_spec_dict["functions"]:
                if function_dict['type'] == 'legendre':
                    ##### @embray can you figure out if that's the only way to
                    ##  instantiate a polynomial (with c0=xx, c1=xx, ...)?

                    coefficients = dict([
                        ('c{:d}'.format(i), function_dict['coefficients'][i])
                        for i in range(function_dict['order'])
                    ])

                    wcs = specwcs.Spectrum1DIRAFLegendreWCS(
                        function_dict['order'], function_dict['pmin'],
                        function_dict['pmax'], **coefficients)

                elif function_dict['type'] == 'chebyshev':
                    coefficients = dict([
                        ('c{:d}'.format(i), function_dict['coefficients'][i])
                        for i in range(function_dict['order'])
                    ])

                    wcs = specwcs.Spectrum1DIRAFChebyshevWCS(
                        function_dict['order'], function_dict['pmin'],
                        function_dict['pmax'], **coefficients)

                elif function_dict['type'] in ['linearspline', 'cubicspline']:
                    if function_dict['type'] == 'linearspline':
                        degree = 1
                    else:
                        degree = 3
                    n_pieces = function_dict['npieces']
                    pmin = function_dict['pmin']
                    pmax = function_dict['pmax']
                    y = [
                        function_dict['coefficients'][i]
                        for i in range(n_pieces + degree)
                    ]
                    wcs = specwcs.Spectrum1DIRAFBSplineWCS(
                        degree, n_pieces, y, pmin, pmax)
                else:
                    raise NotImplementedError
                dispersion_wcs.add_WCS(
                    wcs,
                    weight=function_dict["weight"],
                    zero_point_offset=function_dict["zero_point_offset"])

        composite_wcs = specwcs.MultispecIRAFCompositeWCS(
            dispersion_wcs,
            single_spec_dict["no_valid_pixels"],
            z=single_spec_dict["doppler_factor"],
            log=log,
            aperture=single_spec_dict["aperture"],
            beam=single_spec_dict["beam"],
            aperture_low=single_spec_dict["aperture_low"],
            aperture_high=single_spec_dict["aperture_high"],
            unit=dispersion_unit)
        wcs_dict[spec_key] = composite_wcs
    return wcs_dict
Beispiel #33
0
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""The sncosmo registry is used to load and return instances in memory
based on string identifiers."""

from astropy.utils import OrderedDict
from astropy.extern import six

__all__ = ['register_loader', 'register', 'retrieve', 'get_loaders_metadata']

_loaders = OrderedDict()
_instances = OrderedDict()


def register_loader(data_class,
                    name,
                    func,
                    args=None,
                    version=None,
                    meta=None,
                    force=False):
    """Register a data reading function.

    Parameters
    ----------
    data_class : classobj
        The class of the object that the loader returns.
    name : str
        The data identifier.
    func : callable
        The function to read in the data. Must accept a name and version
        keyword argument.
Beispiel #34
0
def read_table_fits(input, hdu=None):
    """
    Read a Table object from an FITS file

    Parameters
    ----------
    input : str or file-like object or compatible `astropy.io.fits` HDU object
        If a string, the filename to read the table from. If a file object, or
        a compatible HDU object, the object to extract the table from. The
        following `astropy.io.fits` HDU objects can be used as input:
        - :class:`~astropy.io.fits.hdu.table.TableHDU`
        - :class:`~astropy.io.fits.hdu.table.BinTableHDU`
        - :class:`~astropy.io.fits.hdu.table.GroupsHDU`
        - :class:`~astropy.io.fits.hdu.hdulist.HDUList`
    hdu : int or str, optional
        The HDU to read the table from.
    """

    if isinstance(input, basestring):
        input = fits_open(input)
        to_close = input
    else:
        to_close = None

    if hasattr(input, 'read'):
        input = fits_open(input)

    try:

        # Parse all table objects
        tables = OrderedDict()
        if isinstance(input, HDUList):
            for ihdu, hdu_item in enumerate(input):
                if isinstance(hdu_item, (TableHDU, BinTableHDU, GroupsHDU)):
                    tables[ihdu] = hdu_item

            if len(tables) > 1:

                if hdu is None:
                    warnings.warn("hdu= was not specified but multiple tables"
                                  " are present, reading in first available"
                                  " table (hdu={0})".format(tables.keys()[0]))
                    hdu = tables.keys()[0]

                # hdu might not be an integer, so we first need to convert it
                # to the correct HDU index
                hdu = input.index_of(hdu)

                if hdu in tables:
                    table = tables[hdu]
                else:
                    raise ValueError("No table found in hdu={0}".format(hdu))

            elif len(tables) == 1:
                table = tables[tables.keys()[0]]
            else:
                raise ValueError("No table found")

        elif isinstance(input, (TableHDU, BinTableHDU, GroupsHDU)):

            table = input

        else:

            raise ValueError("Input should be a string, a file-like object, "
                             "an  HDUList, TableHDU, BinTableHDU, or "
                             "GroupsHDU instance")

        # Check if table is masked
        masked = False
        for col in table.columns:
            if col.null is not None:
                masked = True
                break

        # Convert to an astropy.table.Table object
        t = Table(table.data, masked=masked)

        # Copy over null values if needed
        if masked:
            for col in table.columns:
                t[col.name].set_fill_value(col.null)
                t[col.name].mask[t[col.name] == col.null] = True

        # Copy over units
        for col in table.columns:
            if col.unit is not None:
                try:
                    t[col.name].units = u.Unit(col.unit, format='fits')
                except ValueError:
                    t[col.name].units = u.UnrecognizedUnit(col.unit)

        # TODO: deal properly with unsigned integers

        for key, value, comment in table.header.cards:

            if key in ['COMMENT', 'HISTORY']:
                if key in t.meta:
                    t.meta[key].append(value)
                else:
                    t.meta[key] = [value]

            elif key in t.meta:  # key is duplicate

                if isinstance(t.meta[key], list):
                    t.meta[key].append(value)
                else:
                    t.meta[key] = [t.meta[key], value]

            elif (is_column_keyword(key.upper()) or
                  key.upper() in REMOVE_KEYWORDS):

                pass

            else:

                t.meta[key] = value

        # TODO: implement masking

    finally:

        if to_close is not None:
            to_close.close()

    return t
Beispiel #35
0
 def _args_to_payload(self, *args, **kwargs):
     """
     accepts the arguments for different query functions and
     builds a script suitable for the Vizier votable CGI.
     """
     body = OrderedDict()
     center = kwargs.get('center')
     # process: catalog
     catalog = kwargs.get('catalog')
     if catalog is None:
         catalog = self.catalog
     if catalog is not None:
         if isinstance(catalog, basestring):
             body['-source'] = catalog
         elif isinstance(catalog, list):
             body['-source'] = ",".join(catalog)
         else:
             raise TypeError("Catalog must be specified as list or string")
     # process: columns
     columns = kwargs.get('columns')
     if columns is None:
         columns = self.columns
     else:
         columns = self.columns + columns
     # process: columns - always request computed positions in degrees
     if "_RAJ2000" not in columns:
         columns += ["_RAJ2000"]
     if "_DEJ2000" not in columns:
         columns += ["_DEJ2000"]
     # process: columns - identify sorting requests
     columns_out = []
     sorts_out = []
     for column in columns:
         if column[0] == '+':
             columns_out += [column[1:]]
             sorts_out += [column[1:]]
         elif column[0] == '-':
             columns_out += [column[1:]]
             sorts_out += [column]
         else:
             columns_out += [column]
     body['-out'] = ','.join(columns_out)
     if len(sorts_out)>0:
         body['-sort'] = ','.join(sorts_out)
     # process: maximum rows returned
     if self.ROW_LIMIT < 0:
         body["-out.max"] = 'unlimited'
     else:
         body["-out.max"] = self.ROW_LIMIT
     # process: column filters
     column_filters = self.column_filters.copy()
     column_filters.update(kwargs.get('column_filters', {}))
     for (key, value) in column_filters.items():
         body[key] = value
     # process: center
     if center is not None:
         for (key, value) in center.items():
             body[key] = value
     # add column metadata: name, unit, UCD1+, and description
     body["-out.meta"] = "huUD"
     # computed position should always be in decimal degrees
     body["-oc.form"] = "d"
     # create final script
     script = "\n".join(["{key}={val}".format(key=key, val=val)
                for key, val in body.items()])
     # add keywords
     if not isinstance(self.keywords, property) and self.keywords is not None:
         script += "\n" + str(self.keywords)
     return script
Beispiel #36
0
def create_in_odict(t_list):
    return OrderedDict([(t.meta['name'], t) for t in t_list])