def create_trained_model(): genres = [ 'classical', 'jazz', # 'opera', # 'reggae', 'rock', 'house', 'hip hop' ] required_features = [ 'Centroid_AVG', 'Centroid_SD', 'RollOff_AVG', 'RollOff_SD', 'Flux_AVG', 'Flux_SD', 'BPM', 'Noise', 'NoiseTwo' ] db_service = DatabaseService() training_set = db_service.get_training_set(genres, required_features, 100, verbose=True) learner = LearnerSVC() learner.fit(training_set) save_model(learner, features=required_features, genres=genres)
def main(argv): #################################################### # Constants #################################################### songs_per_genre = 200 # songs_per_genre -100 # c, r, h, hh = %83.59 # c, j, r, h, hh = %77.83 genres = [ 'classical', 'jazz', # 'opera', # 'reggae', 'rock', 'house', 'hip hop' ] required_features = [ 'Centroid_AVG', 'Centroid_SD', 'RollOff_AVG', 'RollOff_SD', 'Flux_AVG', 'Flux_SD', 'BPM', 'Noise', # 'NoiseTwo' ] #################################################### # Training Set Fetch #################################################### db_service = DatabaseService() training_set = db_service.get_training_set(genres, required_features, songs_per_genre, verbose=True) cross_val = CrossValidation(training_set, k_folds=6) average_hit_rate = 0 count = 0 validator = Validator() for ts, vs in cross_val: learner = LearnerSVC() learner.fit(ts) results = validator.validate_next(learner, vs) average_hit_rate += results.hit_rate() count += 1.0 print results print "Total samples: " + str(training_set.total_samples()) print "Training/Validation: " + str(cross_val.training_size()) + " / " + str(cross_val.validation_size()) print "Average hit rate: %" + "%.2f" % (100 * average_hit_rate / count)
parser = argparse.ArgumentParser(description='Read raw data files and parse its contents') parser.add_argument("path", help='Input path of the raw_data files.') parser.add_argument('db', help='Complete path of .db file (sqlite database). Example: ../data/database.db') args = parser.parse_args() if args == None: input_dir = '/home/aksmiyazaki/git/python_oo_improvement/etl_python/raw_data' database_path = '../data/database.db' else: input_dir = str(args.path) database_path = str(args.db) print(f"Running with the following arguments: [{input_dir}] [{database_path}]") db = DatabaseService.get_instance() db.initialize_database_conn(database_path) data_list = listdir(input_dir) geo_point = None geo_parser = geoparse.GeoParser() locator = PointLocator.get_instance() def next_state(cur_state): if cur_state == geoparse.RowDataType.SEEK_LAT: return geoparse.RowDataType.SEEK_LON elif cur_state == geoparse.RowDataType.SEEK_LON: return geoparse.RowDataType.SEEK_DIST elif cur_state == geoparse.RowDataType.SEEK_DIST: return geoparse.RowDataType.SEEK_LAT else: raise Exception(f"[ERROR] There is no next state for current {cur_state}")
def persist_address(self): """ This method persists Address in a star schema. """ self.__db = DatabaseService.get_instance() # If there's more than one occurence of the same lat/lon, just add one to Occurence. sql = f"""Select Latitude, Longitude from Address where Latitude = {self.latitude} and Longitude = {self.longitude}""" res = self.__db.execute_select(sql) if len(res) > 0: print( f"Latitude = {self.latitude} and Longitude = {self.longitude} returned more than once." ) sql = f"""Update Address set Occurences = ((Select Occurences from Address where Latitude = {self.latitude} and Longitude = {self.longitude}) + 1) where Latitude = {self.latitude} and Longitude = {self.longitude}""" res = self.__db.execute(sql) self.__db.commit_changes else: # Adds content to tables # Since the data depends on a third party API, we must expect that stuff may be null. if self.country is not None and self.country != "": sql_ins = "INSERT INTO Country (CountryName) Values (?);" params = (self.country, ) self.__insert_data(sql_ins, params) if self.region is not None and self.region != "": sql_ins = f"INSERT INTO Region (RegionName) Values (?);" params = (self.region, ) self.__insert_data(sql_ins, params) if self.city is not None and self.city != "": sql_ins = f"INSERT INTO City (CityName) Values (?);" params = (self.city, ) self.__insert_data(sql_ins, params) if self.district is not None and self.district != "": sql_ins = f"INSERT INTO District (DistrictName) Values (?)" params = (self.district, ) self.__insert_data(sql_ins, params) if self.postal_code is not None and self.postal_code != "": sql_ins = f"INSERT INTO Street (PostalCode, StreetName) Values (?,?);" params = ( self.postal_code, self.street, ) self.__insert_data(sql_ins, params) sql_ins = f"""INSERT INTO Address (Latitude, Longitude, Number, PostalCode, CountryName, DistrictName, RegionName, CityName, Occurences) Values (?, ?, ?, ?, ?, ?, ?, ?, ?) """ params = (self.latitude, self.longitude, self.number, self.postal_code, self.country, self.district, self.region, self.city, 1) self.__db.execute(sql_ins, params) self.__db.commit_changes()
end = self.get_argument("end", 0) events = self.database.get_events(start, end) #TODO filter by location stops self.write(json_encode(events)) if __name__ == '__main__': '''let setup tcp connection to the upstream service to get sensor data and handle this data with a async socket read for distribution :) ''' #demoaccount api = Api( token='AyHBnaaukc32qIxv21KW7o1ogQHU3xOrsAFU3fzO', key='g22EPptMppsLfHUoqifXwWDDIVv7qV/L8dccEWcmcq0JKpQ5QVuBPToUNor4ZfqT', base_url='https://developers.polairus.com') database = DatabaseService() observer = LineObserver(database) sock = socket.socket() s = ssl.wrap_socket(sock) def connect_to_service(): s.connect(('developers.polairus.com', 443)) s.sendall(api.create_eventstream_request()) pass def data_handler(sock, fd, events): try: data = sock.recv(4096) logger.info(('received data:%s' % data)) observer.notify(data)