# generate training data, add to db
survey='normal'
for i in range(1000):
	aSurvey.generateCurve()
	tfe = np.column_stack((aSurvey.times[:,np.newaxis],aSurvey.fluxes[:,np.newaxis],aSurvey.errors[:,np.newaxis]))
	points_per_curve = len(aSurvey.times)
	source_class = aSurvey.class_name
	period = aSurvey.period_this
	curve_info = [points_per_curve,source_class,0,0,0,0,None,survey,0,period]
	curve_info_names = ["number_points","classification","c1","e1","c2","e2","raw_xml","survey","xml_filename","true_period"]
	print source_class
	print curve_info
	create_database.enter_record(curve_info,curve_info_names,tfe,cursor)


## create 'outlier' sources, put there class as "outlier"
survey = 'outlier'

## create supernova remnants
reload(synthetic_data)
aCadence.generate_cadence()
aSupernovaRemnant = synthetic_data.SupernovaRemnant()
for i in range(4):
	aSupernovaRemnant.generateCurve()
	tfe = aSupernovaRemnant.curve_this(aCadence.cadence_this,aCadence.error_this)
	points_per_curve = tfe.size/3
	source_class = "SN_remnant"
	period = 1
Exemple #2
0
survey = "full"
points_per_curve = len(aCadence.error_this)
period = aRRLyraeFund.period_this
curve_info = [
    points_per_curve, source_class, 0, 0, 0, 0, None, survey, 0, period
]
curve_info_names = [
    "number_points", "classification", "c1", "e1", "c2", "e2", "raw_xml",
    "survey", "xml_filename", "true_period"
]

## generate light curve using full cadence
errors = aCadence.error_this
aCadence.error_this = np.zeros(len(errors)) + .005
tfe = ComputeTfe(aRRLyraeFund, aCadence)
create_database.enter_record(curve_info, curve_info_names, tfe, cursor)
## return aCadence.error_this to original state
aCadence.error_this = errors

## construct 50 tfe (randomness is phase and error)
## for lightcurve trucated at 10,20, . . ., 80 measurements
trunc_points = 90 - np.arange(10, 90, 10)
for trunc in trunc_points:
    aCadence.cadence_this = aCadence.cadence_this[0:trunc]
    aCadence.error_this = aCadence.error_this[0:trunc]
    survey = repr(trunc)
    points_per_curve = len(aCadence.error_this)
    curve_info = [
        points_per_curve, source_class, 0, 0, 0, 0, None, survey, 0, period
    ]
    for i in range(50):
for i in db_info:
    tfe = create_database.get_measurements(i[0],cursor)
    period = 1 / i[2]
    smo = smoothers.supersmooth(tfe,period,normalize_times=False)
    tfe_smoothed = np.concatenate((tfe[:,0].reshape((tfe[:,0].size,1)),
                                   smo.reshape(smo.size,1),
                                   tfe[:,2].reshape((tfe[:,2].size,1))),
                                  axis=1)
    residuals = np.mean(tfe[:,1]) + tfe[:,1] - smo
    tfe_residual = np.concatenate((tfe[:,0].reshape((tfe[:,0].size,1)),
                                   residuals.reshape(residuals.size,1),
                                   tfe[:,2].reshape((tfe[:,2].size,1))),
                                  axis=1)
    curve_info = ['smoothed',i[1],i[0],tfe.shape[0],i[4]]
    create_database.enter_record(curve_info,
                                 curve_info_names,
                                 tfe_smoothed,
                                 cursor=cursor)	
    curve_info[0] = 'residual'
    create_database.enter_record(curve_info,
                                 curve_info_names,
                                 tfe_residual,
                                 cursor=cursor)	

connection.commit()




## SANITY CHECK
## examine what we have collected
sql_cmd = """SELECT source_id,original_source_id,survey,number_points,classification FROM sources"""