import json import utility.utility_mdf as ut_mdf from astropy.time import Time import utility.utility_db as ut_db dataset_path = 'D:\\mdwarf_data\\' lc_path = "lc_flux_catalog_aperture_r7_txt\\" lc_timestamp_path = "lc_timestamp_txt\\" path_to_lc_file = "{}{}".format(dataset_path, lc_path) if __name__ == '__main__': fileList = ut_mdf.getListMDF() rows = [] for index, fileName in enumerate(fileList): data = ut_mdf.getDataFromFile(fileName=fileName) fileDate = fileName.split('_date')[1] startTimeMJD = data["timestamp"][0] startTimeUTC = Time(startTimeMJD, format='mjd').iso endTimeMJD = data["timestamp"][-1] endTimeUTC = Time(endTimeMJD, format='mjd').iso rows.append((fileName, fileDate, startTimeMJD, endTimeMJD, startTimeUTC, endTimeUTC)) ut_db.insertMJDRow(rows=rows) print("a")
import utility.utility_light_curve as ut_lc import utility.utility_mdf as ut_mdf import utility.utility_db as ut_db from tslearn.matrix_profile import MatrixProfile # L = 1 # I = 500 # files_list= ut_lc.getListLight(height=L,duration=I) # data = ut_lc.getDataFromFile(fileName=files_list[100],height=L,duration=I) listFiles = ut_db.getAnswerMDF_toDB() # mdf_file = 'light_curve_Gaia-DR2_49407521363733632_date20191129' window_size = 100 for mdf_file in listFiles: data = ut_mdf.getDataFromFile(fileName=mdf_file['file_name']) s_x = numpy.array(data["instances"]).reshape((-1, 1)) mp = MatrixProfile(subsequence_length=window_size, scale=False) mp_series = mp.fit_transform([s_x])[0] # t_star = numpy.argmax(mp_series.ravel()) # top_k k = 4 index_array = list(numpy.argsort(-mp_series.ravel())[:len(mp_series.ravel())]) result_list = [] while len(result_list)<4: value = index_array[0] result_list.append(value) remove_list = [*range(value-int(window_size/2) ,value + int(window_size/2))] for remove_instance in remove_list: if (remove_instance in index_array):
import numpy as np import mass_ts as mts from matplotlib import pyplot as plt import utility.utility_mdf as ut_mdf from webService.backend.coreSketchDyBinService import sketchDyBinService window_size = 12 ini_bin = 5 target_file = ut_mdf.getDataFromFile(fileName='light_curve_Gaia-DR2_49406353132632832_date20191129') def getSublenght(period,mdfData): corePlot = sketchDyBinService(windowSize=window_size, initialBin=ini_bin, isOnline=False) corePlot.sketchMode(instances=mdfData['instances']) window = corePlot.getWindow() prior_index = period cur_index = period+1 start_point = 0 for i in range(0,prior_index): start_point = start_point + window[i].get_number_instance() print(start_point) end_point = start_point + window[prior_index].get_number_instance() + window[cur_index].get_number_instance() print(end_point) return start_point,end_point if __name__ == '__main__':
plt.xlim((12000, 24500)) plt.ylim((0, 2000)) # plt.xlabel("prediction errors: %d" % (n_errors)) plt.xlabel("mean") plt.ylabel("Standard deviation") plt.show() if __name__ == '__main__': listWindow = [400, 200, 100, 20] # listWindow = [2580,1290, 645, 322, 161, 80,40,20, 10] # listWindow = [50] lightData1 = ut_mdf.getDataFromFile(fileName=fileName1) # lightData2 = ut_mdf.getDataFromFile(fileName=fileName2) for windowSize in listWindow: dyResult1 = ut_data.genListDyBin(instances=lightData1["instances"], timestamp=lightData1["timestamp"], windowSize=windowSize) # dyResult2 = ut_data.genListDyBin(instances=lightData2["instances"], # timestamp=lightData2["timestamp"], # windowSize=windowSize) computeLocalOutlierFactor(dyResult1, windowSize=windowSize) # computeDistance(dyResult=dyResult1) print("{},{},{} ".format(fileName1, windowSize, computeDistance(dyResult1))) # computeLocalOutlierFactor(dyResult2, windowSize=windowSize) # print("variance w={} : {}".format(windowSize,dyResult2["variance"])) # computeTwoFile(dyResult1=dyResult1,dyResult2=dyResult2,windowSize=windowSize)
'light_curve_Gaia-DR2_602712283908074752_date20200130', 'light_curve_Gaia-DR2_603188200643885696_date20200124', 'light_curve_Gaia-DR2_603299423116967424_date20200130', 'light_curve_Gaia-DR2_604942879467202816_date20200201' ] if __name__ == '__main__': # listFiles = ut_db.getNearStars(fileName= file_name # ,maxDistance= 5000) # listFiles = ut_db.getAnswerMDF_toDB() print(listFiles) for mdf_file in ans_list: list_data = [] for r in [5, 7, 10]: data = ut_mdf.getDataFromFile(fileName=mdf_file, r=r) # data = ut_mdf.getDataFromFile(fileName=mdf_file['file_name']) # data['instances'] = normalizaed(list_data=data['instances']) list_data.append(data) data['fileName'] = 'flux_catalog_aperture_r{}'.format(r) # plot = ut_bok.export_Simultaneous(listData=list_data,isJSDIV = False) data['timestamps'] = data['timestamp'] list_data.append(data) #### dy corePlot = sketchDyBinService(windowSize=15, initialBin=5, isOnline=False) sketchInstances = corePlot.sketchMode(instances=data['instances'])
height = 300 s = Service(ChromeDriverManager().install()) driver = webdriver.Chrome(service=s) driver.set_window_size(width, height) sizing_mode = "fixed" if __name__ == '__main__': windowSize = 40 listFile = ut.txt_to_list(csv_name="f_test.result.csv") for row in listFile: row_data = row.split(",") pattern = row_data[0].split("_")[4] pathPngOutput = "{}{}{}\\".format(dataset_path, png_path, pattern) ut.checkFolderandCreate(pathPngOutput) lightData1 = ut_mdf.getDataFromFile(fileName=row_data[0]) lightData2 = ut_mdf.getDataFromFile(fileName=row_data[1]) plots = ut_bokeh.exportPlot(x_axis=lightData1["timestamp"], y_axis=lightData1["instances"], fileName=lightData1["fileName"], addCircle=True, sizing_mode=sizing_mode) plots = ut_bokeh.exportSubplotPng(x_axis1=lightData1["timestamp"], y_axis1=lightData1["instances"], fileName1=lightData1["fileName"], x_axis2=lightData2["timestamp"], y_axis2=lightData2["instances"], fileName2=lightData2["fileName"], addCircle=True,
import utility.utility_mdf as ut_mdf from bokeh.io import export_png from bokeh.plotting import figure, output_file, show from selenium import webdriver from selenium.webdriver.chrome.service import Service from webdriver_manager.chrome import ChromeDriverManager png_path = 'top100\\' width = 1000 height = 300 s = Service(ChromeDriverManager().install()) driver = webdriver.Chrome(service=s) driver.set_window_size(width, height) sizing_mode = "fixed" max_distance = 2000 if __name__ == '__main__': fileList = ut_db.getFileListFromSQL(sqlFileName="select_top100.sql") # print(fileList) for index, fileName in enumerate(fileList): rows_result = ut_db.getNearStars(fileName=fileName, maxDistance=max_distance) listData = [] for row in rows_result: listData.append(ut_mdf.getDataFromFile(row['file_target'])) plot = ut_exBokeh.export_Simultaneous(listData=listData, isJSDIV=False) ut_bokeh.exportPlotPng(path='top100', fileName="top{}_{}".format( str(index).zfill(3), fileName), plot=plot, driver=driver)
window_size = 12 ini_bin = 5 sql_file = 'get_no_short.sql' if __name__ == '__main__': # list_files = ut_db.getFileListFromSQL(sqlFileName=sql_file) list_files = ut_db.getNearStars( fileName='light_curve_Gaia-DR2_51856511715955968_date20191130', maxDistance=3000) for index, row in enumerate(list_files): # print(main_file['file_target']) main_file = row['file_target'] try: rows = [] data = ut_mdf.getDataFromFile(fileName=main_file) corePlot = sketchDyBinService(windowSize=window_size, initialBin=ini_bin, isOnline=False) corePlot.sketchMode(instances=data['instances']) window = corePlot.getWindow() # a = window[:-1] for index_bin in range((window_size) - 1): prior_bin = window[index_bin] cur_bin = window[index_bin + 1] prior_SE = prior_bin.get_SDError() cur_SE = cur_bin.get_SDError() prior_mean = prior_bin.get_representation() cur_mean = cur_bin.get_representation() K_star = ut_det.detection_SKmethod(curMean=cur_mean, priorMean=prior_mean,
import numpy as np import mass_ts as mts from matplotlib import pyplot as plt import utility.utility_mdf as ut_mdf main_file = ut_mdf.getDataFromFile( fileName='light_curve_Gaia-DR2_609925217624936320_date202002010') target_file = ut_mdf.getDataFromFile( fileName='light_curve_Gaia-DR2_611485012307860352_date20200201') # # target_file = ut_mdf.getDataFromFile(fileName='light_curve_Gaia-DR2_657906668110583552_date20200130') # # target_file = ut_mdf.getDataFromFile(fileName='light_curve_Gaia-DR2_657906663819888640_date20200130') subInstance = main_file['instances'][1428:2759] subTimestamp = main_file["timestamp"][1428:2759] # main_file = ut_mdf.getDataFromFile(fileName='light_curve_Gaia-DR2_519401154006522368_date20191006') # # target_file = ut_mdf.getDataFromFile(fileName='light_curve_Gaia-DR2_519358376131632256_date20191006') # target_file = ut_mdf.getDataFromFile(fileName='light_curve_Gaia-DR2_657906668110583552_date20200130') # # target_file = ut_mdf.getDataFromFile(fileName='light_curve_Gaia-DR2_657906663819888640_date20200130') # subInstance = main_file['instances'][2282:3282] # subTimestamp = main_file["timestamp"][2282:3282] plt.figure(figsize=(15, 8)) plt.plot(target_file["timestamp"], target_file['instances']) # plt.ylabel('Accelerometer Reading') plt.title(target_file['fileName']) plt.show() plt.clf() plt.figure(figsize=(15, 8)) plt.plot(subTimestamp, subInstance) plt.ylabel('Accelerometer Reading')