def execute_sat_solver(self): """ Time execution time of running instances through Sat Solver :return: """ sat = Sat() sat.run_solver()
def plot_sat_solver_results(self): """ Plot execution time of running instances through Sat Solver :return: """ sat = Sat() ph = PlotHandler() results = sat.load_results() ph.plot_k_consistency_check(results)
def generate_systems(self, **kwargs): """ Generate instances :param kwargs: :return: """ sat = Sat() results, systems = sat.generate_systems(**kwargs) return results, systems
def test_14(self): """ Recursive search Warning - may crash :return: """ sat = Sat() n = 10 m = 10 clauses = sat.find_clauses(n) systems = sat.find_systems(clauses, [], n, m, 0, find_one=True) return systems
def test_8(self): """ Use recursion to find instances for a given n and m rather than randomly searching :return: """ sat = Sat() x = sat.find_equations(5, 6) for i in x: print x.count(i), sat.is_system_uniquely_satisfiable(i, 5) for j in x: if i == j: continue
def formatDump(self, Sat): dump = { "id": Sat.satnumber, "name": Sat.name, "obc": { "datetime": self.date.strftime("%F %H:%M:%S.%f") }, "orbit": { "lat": Sat.getLat(), "lon": Sat.getLng(date=self.date), "alt": Sat.getAlt() } } return dump
def updateCanvas(self): self.fillSAA() sats_lngs = [] sats_lats = [] for i, Sat in enumerate(self.Sats): sats_lngs.append(Sat.getLng(date=self.date)) lng = sats_lngs[i] - 5 * (sats_lngs[i] > 174) + 5 * (sats_lngs[i] < -174) sats_lats.append(Sat.getLat()) lat = sats_lats[i] - (1 - 2 * (sats_lats[i] < -85)) * 4 self.sat_txt[i].set_position(array((lng, lat))) self.sat_txt[i].set_text(Sat.name) self.plotCoverage(Sat.getCoverage(), sats_lats[i], sats_lngs[i], i) self.ax_sat.set_data(sats_lngs, sats_lats) self.canvas.draw_idle()
def deploy(self, category, deployer, dplyr_mass, dplyd_mass, name, vel, date=None): self.calcPosAndVel(deployer, vel) #tle = tlefile.read(deployer.name, "TLE/cubesat.txt") deployer_name, line1, line2 = deployer.createTLE() tle = tlefile.read(deployer_name, line1=line1, line2=line2) newSat = Sat(name=name, tle=tle, cat=category) #newSat = Sat(name=name, cat=category) self.updateSat(newSat, date) B = (2 * (0.034 * 0.084 + 0.034 * 0.028 + 0.084 * 0.028)) / 6 / dplyd_mass newSat.setBallisticCoeff(B) newSat.createTLE() dplyr_mass = dplyr_mass - dplyd_mass dplyr_vel = [ -vel[0] * dplyd_mass / dplyr_mass, -vel[1] * dplyd_mass / dplyr_mass, -vel[2] * dplyd_mass / dplyr_mass ] self.calcPosAndVel(deployer, dplyr_vel) self.updateSat(deployer, date) B = (0.1 * 0.1 * 2 + 4 * 0.3 * 0.1) / 6 / dplyr_mass deployer.setBallisticCoeff(B) deployer.createTLE() return newSat
def importSatData(ec): '''#file location info https://stackoverflow.com/questions/4060221/how-to-reliably-open-a-file-in-the-same-directory-as-a-python-script __location__ = os.path.realpath( os.path.join(os.getcwd(), os.path.dirname(__file__))) csvfile = open(os.path.join(__location__, 'satdata.csv')) ''' ec.post(Event(type='importSatData', status='start')) #file location from pygame 'Chimp' example main_dir = os.path.split(os.path.abspath(__file__))[0] #data_dir = os.path.join(main_dir, 'data') csvfile = open(os.path.join(main_dir, 'satdata.csv')) reader = csv.DictReader(csvfile) sats=set() for row in reader: if len(sats)>0: for sat in sats: if sat.name==row['parent']: parent=sat break else: parent=None R0=(float(row['rx0'])*1000,float(row['ry0'])*1000,float(row['rz0'])*1000) V0=(float(row['vx0'])*1000,float(row['vy0'])*1000,float(row['vz0'])*1000) sats.add(Sat(ec, row['name'], parent, float(row['mass']), R0, V0)) '''for sat in sats: if sat.name=='Sun': print(sat.name, sat.mass) else: print(sat.name, sat.parent.name, sat.mass)''' return sats
def convert_systems_to_constructions(self, **kwargs): """ Convert found systems into graphs and run them through Traces :return: """ # Init gi = Gi() sat = Sat() ph = ProcessHandler() fh = FileHandler() paths = ph.run_command("ls -v ./../assets/systems_to_convert/") validate = kwargs.get("validate", False) delete = kwargs.get("delete", False) # Iterate systems for path in paths: print "Checking " + path # Paths graph_path = "./../assets/construction/" + path + "_A.dre" system_path = "./../assets/systems_to_convert/" + path # Extract n and m values n, m = path.split("_") n = int(n) m = int(m) # Load system system = fh.read_from_file(system_path) if validate: # Check for k-local consistency if not sat.is_k_consistent(n, m, system): print "\t Not K consistent system. Removing and skipping." if delete: fh.delete_file(system_path) continue else: print "\t K consistent system. Constructing A." # Convert system into graphs and check for automorphisms G = sat.convert_system_to_graph(n, m, system) gi.convert_graph_to_traces( n, m, G, "A", "./../assets/construction/") # First construction if not gi.graph_has_automorphisms(graph_path): print "\t No Automorphisms. Constructing B." G = sat.convert_system_to_construction(n, m, system) gi.convert_graph_to_traces( n, m, G, "B", "./../assets/construction/") # Second construction if delete: fh.delete_file(graph_path) else: print "\t Automorphisms. Removing and skipping." if delete: fh.delete_file(graph_path) # Remove unwanted graph fh.delete_file(system_path) # Remove unwanted system else: G = sat.convert_system_to_construction(n, m, system) gi.convert_graph_to_traces(n, m, G, "B", "./../assets/construction/")
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import argparse from sat import Sat if __name__ == "__main__": sat = Sat() parser = argparse.ArgumentParser( prog='Sat predict', description='Hamradio satellite orbit prediction and tracking software', epilog='By Paolo Mattiolo IN3AQK Bolzano Dolomiti (c)2018') parser.add_argument("-u", "--updtle", help="Update the tle file", action="store_true") parser.add_argument("-p", "--predict", help="Predict a sat", action="store_true") parser.add_argument("-pa", "--predictall", help="Predict all sat", action="store_true") parser.add_argument("-s", "--satname", help="Satellite name") parser.add_argument("-wt", "--writetxt", help="Write predict as txt", action="store_true")
def createSatFromFile(self, sat_name, file_name, category): newSat = Sat(sat_name, tle=tlefile.read(sat_name, file_name), cat=category) newSat.updateOrbitalParameters3(self.date) self.Sats.append(newSat)
def addSat(self): add_sat = self.avail_sats_lst.get(self.avail_sats_lst.curselection()) self.ax_cov.append( self.ax.fill([0, 0], [0, 0], transform=Geodetic(), color='white', alpha=self.cov_alpha)[0]) self.sat_txt.append( self.ax.text([], [], "", color='yellow', size=8, transform=Geodetic(), ha="center")) if (add_sat in self.argos): self.createSatFromFile(add_sat, "TLE/argos.txt", "Argos Data Collection System") elif (add_sat in self.cubesat): self.createSatFromFile(add_sat, "TLE/cubesat.txt", "CubeSat") elif (add_sat in self.dmc): self.createSatFromFile(add_sat, "TLE/dmc.txt", "Disaster Monitoring") elif (add_sat in self.goes): self.createSatFromFile(add_sat, "TLE/goes.txt", "GOES") elif (add_sat in self.intelsat): self.createSatFromFile(add_sat, "TLE/intelsat.txt", "Intelsat") elif (add_sat in self.iridium): self.createSatFromFile(add_sat, "TLE/iridium.txt", "Iridium") elif (add_sat in self.iridium_next): self.createSatFromFile(add_sat, "TLE/iridium-NEXT.txt", "Iridium Next") elif (add_sat in self.molniya): self.createSatFromFile(add_sat, "TLE/molniya.txt", "Molniya") elif (add_sat in self.noaa): self.createSatFromFile(add_sat, "TLE/noaa.txt", "NOAA") elif (add_sat in self.planet): self.createSatFromFile(add_sat, "TLE/planet.txt", "Planet") elif (add_sat in self.resource): self.createSatFromFile(add_sat, "TLE/resource.txt", "Earth Resources") elif (add_sat in self.sarsat): self.createSatFromFile(add_sat, "TLE/sarsat.txt", "Search & Rescue") elif (add_sat in self.spire): self.createSatFromFile(add_sat, "TLE/spire.txt", "Spire") elif (add_sat in self.tdrss): self.createSatFromFile(add_sat, "TLE/tdrss.txt", "Tracking and Data Relay") elif (add_sat in self.tle_new): self.createSatFromFile(add_sat, "TLE/tle-new.txt", "Last 30 Days' Launches") elif (add_sat in self.weather): self.createSatFromFile(add_sat, "TLE/weather.txt", "Weather") else: self.Sats.append(Sat(add_sat, tle=tlefile.read(add_sat))) self.curr_sats_lst.insert(END, add_sat) self.sortSats() self.srch_box.focus() for i, sat in enumerate(self.Sats): self.curr_sats_lst.delete(i) self.curr_sats_lst.insert(i, sat.name)
def updateTableContent(self): rad2deg = 180 / pi self.updtCnt += 1 self.updateCanvas() if (self.updtCnt > 20): self.refreshBackgroundImg() self.updtCnt = 0 try: if (self.root.focus_get() is not self.dt_box): self.format_dt() except KeyError: self.format_dt() for Sat in self.Sats: Sat.updateOrbitalParameters3(self.date) if (self.en_db): col = self.db[Sat.name] col.insert_one(self.formatDump(Sat)) for i, Sat in enumerate(self.Sats[self.top_index:self.bottom_index]): self.name_bt[i]['text'] = Sat.name self.name_bt[i]['command'] = lambda Sat=Sat: self.changeMainSat(Sat ) self.cat_lbl[i]['text'] = Sat.getCategory() self.lat_lbl[i]['text'] = "{:0.4f}{}".format(Sat.getLat(), "°") self.lng_lbl[i]['text'] = "{:0.4f}{}".format( Sat.getLng(date=self.date), "°") self.alt_lbl[i]['text'] = "{:0.1f}".format((Sat.getAlt() * 0.001)) self.spd_lbl[i]['text'] = "{}".format(int(Sat.getSpeed())) self.a_lbl[i]['text'] = "{:0.1f}".format( (Sat.getSemiMajorAxis() * 0.001)) self.h_lbl[i]['text'] = "{:0.1f}".format(Sat.getSpecAngMomentum() * 0.000001) self.e_lbl[i]['text'] = "{:0.4f}".format(Sat.getEccentricity()) self.raan_lbl[i]['text'] = "{:0.2f}{}".format( (Sat.getRAAN() * rad2deg), "°") self.i_lbl[i]['text'] = "{:0.2f}{}".format( (Sat.getInclination() * rad2deg), "°") self.w_lbl[i]['text'] = "{:0.2f}{}".format( (Sat.getArgPerigee() * rad2deg), "°") self.theta_lbl[i]['text'] = "{:0.2f}{}".format( (Sat.getAnomaly() * rad2deg), "°")
sat.scheduler.add_job(func=update_stat,trigger='interval',seconds=600) sat.scheduler.add_job(func=update_measure,trigger='interval',seconds=600) sat.scheduler.add_job(func=update_realparams,trigger='interval',seconds=3600) sat.scheduler.add_job(func=update_outrawparams,trigger='interval',seconds=RAWPERIOD, id='outrawparams') sat.scheduler.add_job(func=update_outprodparams,trigger='interval',seconds=PRODPERIOD, id='outproducts') sat.scheduler.add_job(func=update_availability,trigger='cron',hour='*') sat.scheduler.add_job(func=update_lasttime,trigger='interval',seconds=600) #----------------------------------------# Connected = False client = mqtt.Client(OBS + '/' + SITE) connection_to_broker() on_sub() #-----------------------------------------# cur_sat = new Sat("obs1", "site1", "/home/limbo4/.limbo_data/") watch_file() start_settings() data_proc(cur_sat) try: cur_sat.scheduler.start() print('SCHEDULER', cur_stat.scheduler.state()) except (KeyboardInterrupt, SystemExit): client.publish("exit/" + OBS + '/' + SITE, "1", retain=False) scheduler.shutdown() client.disconnect() client.loop_stop()
def main(): """Main Function""" random_seed = 30 folds = 5 # Setup results directory results_directory = "results" if not os.path.exists(results_directory): os.makedirs(results_directory) """Ionosphere Data""" # Setup results directory ionosphere_results_directory = os.path.join(results_directory, "ionosphere") if not os.path.exists(ionosphere_results_directory): os.makedirs(ionosphere_results_directory) # Load Data ionosphere = Ionosphere() # Explore Data explore_data(ionosphere, ionosphere_results_directory) # Setup Data ionosphere.train_test_split(0.2, shuffle=True, random_state=random_seed) ionosphere.standardize() with open(os.path.join(ionosphere_results_directory, "svm"), 'w') as svmfile: # Coarse parameter grid search parameters = { 'kernel': ('linear', 'poly', 'rbf', 'sigmoid'), 'C': [i for i in range(1, 100)], 'gamma': [ i for i in [ 100.0, 30.0, 10.0, 3.0, 1.0, 0.3, 0.1, 0.03, 0.01, 0.003, 0.001, 0.0003, 0.0001, 0.00003, 0.00001, 0.000003, 0.000001 ] ] } ionosphere_svm_estimator = svm.SVC() clf = GridSearchCV(ionosphere_svm_estimator, parameters, cv=folds, scoring='accuracy') clf.fit(ionosphere.training_x, ionosphere.training_y.ravel()) svmfile.write("Coarse Search Best Parameters\n") svmfile.write("{}\n".format(clf.best_params_)) svmfile.write("\n") # Fine parameter grid search parameters = { 'C': [i for i in np.arange(7.0, 9.0, 0.1)], 'gamma': [i for i in np.arange(0.1, 1.0, 0.01)] } ionosphere_svm_estimator = svm.SVC(kernel='rbf') clf = GridSearchCV(ionosphere_svm_estimator, parameters, cv=folds, scoring='accuracy') clf.fit(ionosphere.training_x, ionosphere.training_y.ravel()) svmfile.write("Fine Search Best Parameters\n") svmfile.write("{}\n".format(clf.best_params_)) svmfile.write("\n") # Test Accuracy ionosphere_predicted_y = clf.predict(ionosphere.testing_x) ionosphere_accuracy = accuracy_score(ionosphere.testing_y.ravel(), ionosphere_predicted_y) svmfile.write("Testing Accuracy\n") svmfile.write("{}\n".format(ionosphere_accuracy)) svmfile.write("\n") """Sat Data""" # Setup results directory sat_results_directory = os.path.join(results_directory, "sat") if not os.path.exists(sat_results_directory): os.makedirs(sat_results_directory) # Load Data sat = Sat() # Explore Data explore_data(sat, sat_results_directory) # Setup Data sat.train_test_split(2000, shuffle=False) # Use splits generated by data files sat.standardize() with open(os.path.join(sat_results_directory, "svm"), 'w') as svmfile: # Coarse parameter grid search parameters = { 'kernel': ('linear', 'poly', 'rbf', 'sigmoid'), 'C': [i for i in range(1, 100)], 'gamma': [ i for i in [ 100.0, 30.0, 10.0, 3.0, 1.0, 0.3, 0.1, 0.03, 0.01, 0.003, 0.001, 0.0003, 0.0001, 0.00003, 0.00001, 0.000003, 0.000001 ] ] } sat_svm_estimator = svm.SVC() clf = GridSearchCV(sat_svm_estimator, parameters, cv=folds, scoring='accuracy') clf.fit(sat.training_x, sat.training_y.ravel()) svmfile.write("Coarse Search Best Parameters\n") svmfile.write("{}\n".format(clf.best_params_)) svmfile.write("\n") # Fine parameter grid search parameters = { 'C': [i for i in np.arange(11.0, 13.0, 0.1)], 'gamma': [i for i in np.arange(0.003, 0.03, 0.0001)] } sat_svm_estimator = svm.SVC(kernel='rbf') clf = GridSearchCV(sat_svm_estimator, parameters, cv=folds, scoring='accuracy') clf.fit(sat.training_x, sat.training_y.ravel()) svmfile.write("Fine Search Best Parameters\n") svmfile.write("{}\n".format(clf.best_params_)) svmfile.write("\n") # Test Accuracy sat_predicted_y = clf.predict(sat.testing_x) sat_accuracy = accuracy_score(sat.testing_y.ravel(), sat_predicted_y) svmfile.write("Testing Accuracy\n") svmfile.write("{}\n".format(sat_accuracy)) svmfile.write("\n") """Vowel-context Data""" # Setup results directory vowel_context_results_directory = os.path.join(results_directory, "vowel-context") if not os.path.exists(vowel_context_results_directory): os.makedirs(vowel_context_results_directory) # Load Data vowel_context = VowelContext() # Explore Data explore_data(vowel_context, vowel_context_results_directory) # Setup Data vowel_context.train_test_split(0.2, shuffle=True, random_state=random_seed) vowel_context.standardize() with open(os.path.join(vowel_context_results_directory, "svm"), 'w') as svmfile: # Coarse parameter grid search parameters = { 'kernel': ('linear', 'poly', 'rbf', 'sigmoid'), 'C': [i for i in range(1, 100)], 'gamma': [ i for i in [ 100.0, 30.0, 10.0, 3.0, 1.0, 0.3, 0.1, 0.03, 0.01, 0.003, 0.001, 0.0003, 0.0001, 0.00003, 0.00001, 0.000003, 0.000001 ] ] } vowel_context_svm_estimator = svm.SVC() clf = GridSearchCV(vowel_context_svm_estimator, parameters, cv=folds, scoring='accuracy') clf.fit(vowel_context.training_x, vowel_context.training_y.ravel()) svmfile.write("Coarse Search Best Parameters\n") svmfile.write("{}\n".format(clf.best_params_)) svmfile.write("\n") # Fine parameter grid search parameters = { 'C': [i for i in np.arange(21.0, 23.0, 0.1)], 'gamma': [i for i in np.arange(0.03, 0.3, 0.001)] } vowel_context_svm_estimator = svm.SVC(kernel='rbf') clf = GridSearchCV(vowel_context_svm_estimator, parameters, cv=folds, scoring='accuracy') clf.fit(vowel_context.training_x, vowel_context.training_y.ravel()) svmfile.write("Fine Search Best Parameters\n") svmfile.write("{}\n".format(clf.best_params_)) svmfile.write("\n") # Test Accuracy vowel_context_predicted_y = clf.predict(vowel_context.testing_x) vowel_context_accuracy = accuracy_score( vowel_context.testing_y.ravel(), vowel_context_predicted_y) svmfile.write("Testing Accuracy\n") svmfile.write("{}\n".format(vowel_context_accuracy)) svmfile.write("\n")