def test_dictionary_class(self): obj = DictionaryClass({'foo' : 'bar', 'nested' : {'myvar' : 5}}) self.assertEqual('bar', obj.foo) self.assertEqual(5, obj.nested.myvar) self.assertEqual({'foo' : 'bar', 'nested' : {'myvar' : 5}}, obj.get_dict())
def spectrum_redshift_calc(private_key, sender_id, msg_id, mtype, params, extra): """ spectrum_redshift_calc """ try: info("spectrum_redshift_calc()") try: payload = DictionaryClass(params) x = decode_string(payload.x) y = decode_string(payload.y) yerr = decode_string(payload.yerr) from_redshift = float(payload.from_redshift) to_redshift = float(payload.to_redshift) sed = Sed(x, y, yerr, from_redshift) sed.redshift(to_redshift) payload.x = encode_string(sed.wavelength) payload.y = encode_string(sed.flux) payload.yerr = encode_string(sed.err) reply_success(msg_id, mtype, payload) except Exception, e: reply_error(msg_id, sedexceptions.SEDException, e, mtype) return except Exception: error(str(capture_exception()))
def stack_normalize(private_key, sender_id, msg_id, mtype, params, extra): try: info("stack_normalize()") try: payload = DictionaryClass(params) seds = [] for segment in payload.segments: x = decode_string(segment.x) y = decode_string(segment.y) yerr = decode_string(segment.yerr) id_ = str(segment.id) seds.append(IrisSed(x=x, y=y, yerr=yerr, id=id_)) stack = IrisStack(seds) result = normalize(stack, payload) for i, segment in enumerate(payload.segments): segment.x = encode_string(result[i].x) segment.y = encode_string(result[i].y) segment.yerr = encode_string(result[i].yerr) segment.norm_constant = str(result[i].norm_constant) payload.excludeds = result.excluded reply_success(msg_id, mtype, payload.get_dict()) except Exception, e: reply_error(msg_id, sedexceptions.SEDException, e, mtype) return except Exception: error(str(capture_exception()))
def stack_redshift(private_key, sender_id, msg_id, mtype, params, extra): try: info("stack_redshift()") try: payload = DictionaryClass(params) seds = [] for segment in payload.segments: x = decode_string(segment.x) y = decode_string(segment.y) yerr = decode_string(segment.yerr) z = float(segment.z) id_ = str(segment.id) seds.append(IrisSed(x=x, y=y, yerr=yerr, z=z, id=id_)) z0 = float(payload.z0) correct_flux = payload.correct_flux == "true" result = redshift(IrisStack(seds), z0, correct_flux) for i, segment in enumerate(payload.segments): segment.x = encode_string(result[i].x) segment.y = encode_string(result[i].y) segment.yerr = encode_string(result[i].yerr) payload.excludeds = result.excluded reply_success(msg_id, mtype, payload.get_dict()) except Exception, e: reply_error(msg_id, sedexceptions.SEDException, e, mtype) return except Exception: error(str(capture_exception()))
def test_convert_underscores_to_hyphens(self): params = {} segment1 = {'x': [], 'y': [], 'yerr': [], 'norm_constant': 1.0} segment2 = {'x': [], 'y': [], 'yerr': [], 'norm_constant': 1.0} segment3 = {'x': [], 'y': [], 'yerr': [], 'norm_constant': 1.0} params['segments'] = [segment1, segment2, segment3] params['norm_operator'] = '0' params['y0'] = '1.0' params['xmin'] = 'min' params['xmax'] = 'max' params['stats'] = 'avg' params['integrate'] = 'true' payload = DictionaryClass(params) for keys in payload.get_dict().keys(): if set(keys) & set("_"): self.fail("Found a '_' in '%s' key." %keys) segments = payload.get_dict()['segments'] for seg in segments: for key in seg.keys(): if set(key) & set("_"): self.fail("Found a '_' in '%s' key." %key) self.assertEqual(len(payload.__dict__.keys()), 7) self.assertEquals(len(segments), 3)
def spectrum_interpolate(private_key, sender_id, msg_id, mtype, params, extra): """ spectrum_interpolate """ try: info("spectrum_interpolate()") try: methods = {'Neville' : neville, 'Linear' : linear_interp, 'Nearest Neighbor' : nearest_interp, 'Linear Spline' : interp1d, } payload = DictionaryClass(params) x = decode_string(payload.x) y = decode_string(payload.y) x_min = max(float(payload.x_min), min(x)) x_max = min(float(payload.x_max), max(x)) method = methods[payload.method] info("method " + method.__name__) n_bins = int(payload.n_bins) log = payload.log=='true'; sed = Sed(x, y) newSed = sed.interpolate(method, (x_min, x_max), n_bins, log); filtered = False if payload.smooth == "true": info('smoothing') newSed = filter(newSed) newSed.smooth(int(payload.box_size)) if payload.normalize=="true": info('normalizing') if not filtered: newSed = filter(newSed) newSed.normalise() payload.x = encode_string(newSed.wavelength) payload.y = encode_string(newSed.flux) reply_success(msg_id, mtype, payload) info("success") except Exception, e: reply_error(msg_id, sedexceptions.SEDException, e, mtype) error("error: " + repr(e)) return except Exception: error(str(capture_exception()))
def spectrum_interpolate(private_key, sender_id, msg_id, mtype, params, extra): """ spectrum_interpolate """ try: info("spectrum_interpolate()") try: methods = { 'Neville': neville, 'Linear': linear_interp, 'Nearest Neighbor': nearest_interp, 'Linear Spline': interp1d, } payload = DictionaryClass(params) x = decode_string(payload.x) y = decode_string(payload.y) x_min = max(float(payload.x_min), min(x)) x_max = min(float(payload.x_max), max(x)) method = methods[payload.method] info("method " + method.__name__) n_bins = int(payload.n_bins) log = payload.log == 'true' sed = Sed(x, y) newSed = sed.interpolate(method, (x_min, x_max), n_bins, log) filtered = False if payload.smooth == "true": info('smoothing') newSed = filter(newSed) newSed.smooth(int(payload.box_size)) if payload.normalize == "true": info('normalizing') if not filtered: newSed = filter(newSed) newSed.normalise() payload.x = encode_string(newSed.wavelength) payload.y = encode_string(newSed.flux) reply_success(msg_id, mtype, payload) info("success") except Exception, e: reply_error(msg_id, sedexceptions.SEDException, e, mtype) error("error: " + repr(e)) return except Exception: error(str(capture_exception()))
def test_dictionary_class(self): obj = DictionaryClass({'foo': 'bar', 'nested': {'myvar': 5}}) self.assertEqual('bar', obj.foo) self.assertEqual(5, obj.nested.myvar) self.assertEqual({ 'foo': 'bar', 'nested': { 'myvar': 5 } }, obj.get_dict())
def stack_stack(private_key, sender_id, msg_id, mtype, params, extra): try: info("stack_stack()") try: payload = DictionaryClass(params) seds = [] for segment in payload.segments: x = decode_string(segment.x) y = decode_string(segment.y) yerr = decode_string(segment.yerr) seds.append(IrisSed(x=x, y=y, yerr=yerr)) i_stack = IrisStack(seds) binsize = float(payload.binsize) statistic = str(payload.statistic) smooth = payload.smooth == "true" smooth_binsize = float(payload.smooth_binsize) logbin = payload.log_bin == "true" result = sedstacker.sed.stack(i_stack, binsize, statistic, fill='remove', smooth=smooth, smooth_binsize=smooth_binsize, logbin=logbin) payload.segments[0].x = encode_string(result.x) payload.segments[0].y = encode_string(result.y) payload.segments[0].yerr = encode_string(result.yerr) payload.segments[0].counts = encode_string(result.counts) payload.segments = [payload.segments[0]] get_dict = payload.get_dict() reply_success(msg_id, mtype, payload.get_dict()) except Exception, e: reply_error(msg_id, sedexceptions.SEDException, e, mtype) return except Exception: error(str(capture_exception()))
def spectrum_integrate(private_key, sender_id, msg_id, mtype, params, extra): """ spectrum_integrate """ try: info("spectrum_integrate()") try: payload = DictionaryClass(params) x = decode_string(payload.x) y = decode_string(payload.y) sed = Sed(x, y) response = dict() response['points'] = list() for curve in payload.curves: pb = Passband(curve.file_name) flux = sed.calcFlux(pb) point = dict() point['id'] = curve.id point['wavelength'] = curve.eff_wave point['flux'] = str(flux) response['points'].append(point) for window in payload.windows: xmin = float(window.min) xmax = float(window.max) flux = sed.integrate(xmin, xmax) point = dict() point['id'] = window.id point['wavelength'] = str((xmax + xmin) / 2) point['flux'] = str(flux) response['points'].append(point) reply_success(msg_id, mtype, response) except Exception, e: reply_error(msg_id, sedexceptions.SEDException, e, mtype) return except Exception: error(str(capture_exception()))
def test_normalize_by_int_median_mult(self): x1 = encode_string(numpy.array([1, 5, 10, 15, 50, 100])) y1 = encode_string(numpy.array([1, 5, 10, 15, 50, 100]) * 0.1) yerr1 = encode_string(numpy.array([1, 5, 10, 15, 50, 100]) * 0.01) x2 = encode_string(numpy.array([2, 4, 5, 8, 10])) y2 = encode_string(numpy.arange(5) + 1.0) yerr2 = encode_string(numpy.arange(5) + 1.0 * 0.1) y3 = numpy.array([5.0, 15.0, 7.0, 4.5, 13.5, 10.5]) yerr3 = encode_string(y3 * 0.1) y3 = encode_string(y3) x3 = encode_string(numpy.array([0.5, 1.5, 3.0, 5.0, 10.5, 21.0])) params = {} segment1 = {'x': x1, 'y': y1, 'yerr': yerr1, 'id': 'sed1'} segment2 = {'x': x2, 'y': y2, 'yerr': yerr2, 'id': 'sed2'} segment3 = {'x': x3, 'y': y3, 'yerr': yerr3, 'id': 'sed3'} params['segments'] = [segment1, segment2, segment3] params['norm-operator'] = '0' params['y0'] = '1.0' params['xmin'] = 'min' params['xmax'] = 'max' params['stats'] = 'median' params['integrate'] = 'true' response = self.cli.callAndWait( mtypes.cli.getPublicId(), {'samp.mtype': MTYPE_STACK_NORMALIZE, 'samp.params': params}, "10") assert response['samp.status'] == 'samp.ok' results = response['samp.result'] norm_stack = DictionaryClass(results) numpy.testing.assert_array_almost_equal(decode_string(norm_stack.segments[0].y), 0.4270427 * decode_string(y1)) numpy.testing.assert_array_almost_equal(decode_string(norm_stack.segments[1].y), 8.54 * decode_string(y2)) self.assertAlmostEqual(float(norm_stack.segments[2].norm_constant), 1.0)
def test_convert(self): obj = DictionaryClass({"from-redshift": 5}) self.assertEqual(5, obj.from_redshift)