def __getitem__(self, slice_):
        if not self._valid:
            raise Exception('Invalid slicing of parameter "%s"' % self._pparam.name)

        if not isinstance(slice_, tuple): slice_ = (slice_,)

        print 'slice_: %s' % (slice_,)

        # get the time verticies based on the first member of the slice
        tverts=self._tverts[slice_[0]]

        try:
            ntverts=len(tverts)
        except Exception:
            ntverts=1

        print 'num time_verts: %s' % ntverts
#        slice_=slice_[1:]

        # Get the entities pertaining to the requested slice
#        print 'ent_handles_shp: %s' % (self._ent_hndls.shape,)
#        sents=self._ent_hndls[slice_]
#        print 'sents_shp: %s' % (sents.shape,)

        # Return the data for the appropriate entities
        nshp=(ntverts,)+self.shape[1:]
        print nshp
        return utils.get_packed_data(self._pparam._tag_hndl, tverts, self._pparam.data_type).reshape(nshp)[(slice(None),)+slice_[1:]]
        ## Method one extracts data one ts at a time and builds the complete timeseries for each variable piecemeal
        # This method is slower but robust if data variables are added/removed during the timeseries
        # NOTE: does not yet deal with gaps in the timeseries
        for i in range(ntimes):
            try:
                tsvert = mesh.getEntAdj(tlines[i], iBase.Type.vertex)[0]
            except IndexError:
                tsvert = mesh.getEntAdj(tlines[i - 1], iBase.Type.vertex)[0]

            topo_set = iMesh.EntitySet(t_topo_tag[tsvert], mesh)
            verts = topo_set.getEntities(type=iBase.Type.vertex)

            dtags = mesh.getAllTags(tsvert)
            for dt in (dt for dt in dtags if dt.name.startswith("DATA")):
                dtc, _ = utils.unpack_data_tag_name(dt.name)
                data = utils.get_packed_data(dt, tsvert, dtc)

                if not dt.name in data_map:
                    data_map[dt.name] = data
                else:
                    data_map[dt.name] = numpy.vstack([data_map[dt.name], data])
    else:
        ## Method two extracts the entire timeseries for each variable
        # This method is faster but only works if data exists for all vertices

        # Extract the temporal vertex array from the time_topology (tlines)
        tsverts = [x[0] for x in mesh.getEntAdj(tlines, iBase.Type.vertex)]
        tsverts.append(mesh.getEntAdj(tlines[len(tlines) - 1], iBase.Type.vertex)[1])
        dtags = mesh.getAllTags(tsverts[0])  # This assumes that all data_tags are present on the first vertex
        for dt in (dt for dt in dtags if dt.name.startswith("DATA")):
            dtc, _ = utils.unpack_data_tag_name(dt.name)
    ts_sets=t_set.getChildren()

    for i in range(ntimes):
        try:
            stn_set = ts_sets[i]
        except:
            stn_set = None

        if stn_set is None:
            raise Exception("The EntitySet %s does not contain a point, cannot process" % ts_sets[i])

        dtags=mesh.getAllTags(stn_set)
        for dt in dtags:
            dtc,_=utils.unpack_data_tag_name(dt.name)
            data=utils.get_packed_data(dt, stn_set, dtc)

            if not dt.name in data_map:
                data_map[dt.name] = data
            else:
                data_map[dt.name] = numpy.vstack([data_map[dt.name],data])

    nvars=len(data_map)
    nrow=2
    ncol=int(nvars/nrow)+1

    # Apply units, _FillValue, scale_factor, add_offset and plot
    fig = figure()
    i=1
    for var in data_map:
        dtc, varname=utils.unpack_data_tag_name(var)
            # Get the units, _FillValue, scale_factor, and add_offset
            fill_val=numpy.nan
            scale_factor=None
            add_offset=None
            units='Unknown'
            if varname in var_atts:
                if '_FillValue' in var_atts[varname]:
                    fill_val=var_atts[varname]['_FillValue']
                if 'scale_factor' in var_atts[varname]:
                    scale_factor=var_atts[varname]['scale_factor']
                if 'add_offset' in var_atts[varname]:
                    add_offset=var_atts[varname]['add_offset']
                if 'units' in var_atts[varname]:
                    units=var_atts[varname]['units']

            data=utils.get_packed_data(data_t, set, dtc)
#            data=data_t[set]

            data=numpy.ma.masked_equal(data,fill_val,copy=False)
            # apply the scale_factor
            if scale_factor is not None:
                data=data.astype(scale_factor.dtype.char)*scale_factor
                # apply the add_offset
            if add_offset is not None:
                data+=add_offset

            qvert_list=mesh.getEntAdj(quads,iBase.Type.vertex)
            poly_list=[]
            for qv in qvert_list:
                cds=mesh.getVtxCoords(qv)
                poly_list.append(cds[:,[0,1]].tolist())