def run(self): try: fact = DataStore() fact.set_ds_type(self.f_type) ds = fact.create_data_store() if self.f_type == "json" or self.f_type == "csv": ds.set_file(self.params[self.f_type]) if self.f_type == "postgre" or self.f_type == "mongo": ds.set_user(self.params["user"]) ds.set_password(self.params["password"]) ds.set_host(self.params["host"]) #ds.set_port(self.params["port"])//use default system port ds.set_db(self.params["db_name"]) ds.set_table(self.params["table"]) ds.connect() for row in self.data: ds.insert_unique(*row) self.sleep(2) except Exception as err: print(err) finally: self.t_finish.emit()
def __init__(self, yt_resource: Resource) -> None: """ :param yt_resource: googleapiclient.discovery.build object """ self._yt_resource = yt_resource self._channel_store = DataStore() self._subscribers = []
def execute_pipeline(): """ Executes and calls all necessary functions to run the program. :return: """ api_key = get_api_key() # create datastore ds = DataStore() # read csv files ds.read_practices_from_csv_file("data/practices.csv") ds.read_students_from_csv_file("data/students.csv") # # create address combination file ds.create_address_csv_file() # fetch distance and duration from Google API GoogleAPI(api_key=api_key).fetch_distances_from_api('data/addresses.csv') # create all weight combination file weight_df = weight_combination.create_all_weight_combinations(ds) # extract best possible weight combinations weight_combination.extract_best_weights_students(ds, weight_df)
def run(self): try: fact = DataStore() fact.set_ds_type(self.ds_type) ds = fact.create_data_store() ds.set_user(self.params["user"]) ds.set_password(self.params["password"]) ds.set_host(self.params["host"]) #ds.set_port(self.params["port"])//use default system port ds.set_db(self.params["db_name"]) ds.set_table(self.params["table"]) ds.connect() data = ds.get_all_data() self.fill_table.emit(data) self.sleep(2) except Exception as err: print(err) finally: self.t_finish.emit()
def __init__(self, yt_resource: Resource, playlist_id) -> None: """ :param yt_resource: googleapiclient.discovery.Resource object :param playlist_id: youtube playlist ID """ self._yt_resource = yt_resource self._playlist_id = playlist_id self._videos_store = DataStore()
def __init__(self, yt_resource: Resource, channel_id) -> None: """ :param yt_resource: googleapiclient.discovery.Resource object :param channel_id: youtube channel ID """ self._yt_resource = yt_resource self._channel_id = channel_id self._playlist_store = DataStore()
def __init__(self, config) -> None: self.__local_playlist_cache_path = os.path.join( config['local_cache'], 'playlist.json') self.__local_channel_cache_path = os.path.join(config['local_cache'], 'channel.json') self.__shared_channel_cache_path = os.path.join( config['shared_cache'], 'channel.json') self.__indent = config['cache_indent'] self._local_playlist_cache = DataStore() self._local_channel_cache = DataStore() self._shared_channel_cache = DataStore() self.create_folder(config['local_cache']) self.create_folder(config['shared_cache']) self.load()
def __init__(self, *args, **kwargs): super(StudentPracticePairTests, self).__init__(*args, **kwargs) self.ds = DataStore() self.ds.read_students_from_csv_file("data/test_data/students.csv") self.ds.read_practices_from_csv_file("data/test_data/practices.csv") self.pair = StudentPracticePair( self.ds.df_students.loc['S001'], self.ds.df_practices.loc['P001'], address_path="data/test_data/addresses.csv")
def settings(): _settings = request.get_json() db = DataStore() logging.getLogger('pyro').debug('Saving new settings: %s' % _settings) db.save_profile(_settings) db.set_active_profile(_settings['id']) db.apply_active_profile() db.save_settings(_settings) return 'ok'
def __init__(self): config = Configuration() self.data_store = DataStore() self.db_host = config.get_db_host() self.db_port = config.get_db_port() self.db_username = config.get_db_username() self.db_password = config.get_db_password() self.db_name = config.get_db_name() self.db_article_table = config.get_article_table()
def test_get_path(self): """Test retrieving a file path.""" s = DataStore() result = s.get_path(self.fname) assert isinstance(result, str) assert os.fspath(self.data_path / self.fname) == result with pytest.raises(ValueError, match=r"No file found named"): s.get_path("693_UNCI")
def index(name=None): db = DataStore() settings = db.get_settings() db.shutdown() min_temp = math.floor(settings['target_temp'] - temp_range / 2) max_temp = math.ceil(settings['target_temp'] + temp_range / 2) return render_template('index.html', min_temp=min_temp, max_temp=max_temp, **settings)
def __init__(self): QCoreApplication.setOrganizationName('github.com/misdoro') QCoreApplication.setApplicationName('Battery tester') self.threadpool = QThreadPool() self.instr_thread() self.datastore = DataStore() signal.signal(signal.SIGTERM, self.terminate_process) signal.signal(signal.SIGINT, self.terminate_process) self.data_receivers = set() GUI(self)
def do_train(args): # Load configuration config = ConfigParser() config.read_file(args.config) data = DataStore(config) # Create the CRF model. model = CRF(config) retrain_epochs = config["training"].getint("retrain_every") accuracy = [] with EditShell(config) as shell: while data.has_next(): conll = data.next() i = data.i() # if the data doesn't have tags, try to smart-tag them. if len(conll[0]) == DataStore.TAG_LABEL+1: tags = [tok[DataStore.TAG_LABEL] for tok in conll] else: tags = model.infer(conll) try: #conll_display = ["{}/{}".format(token[0], token[2]) for token in conll] conll_display = ["{}".format(token[0]) for token in conll] # Create a copy of the list action = shell.run(conll_display, list(tags), metadata=render_progress(data, accuracy)) if action.type == ":prev": try: data.rewind(2) # move 2 indices back except AttributeError: data.rewind(1) elif action.type == ":goto": doc_idx, = action.args assert doc_idx >= 0 data.goto(doc_idx) elif action.type == "save": _, tags_ = action.args accuracy.append(score(tags, tags_)) data.update(conll, tags_) if i % retrain_epochs == 0: model.retrain() except QuitException: break
def history(name=None): db = DataStore() settings = db.get_settings() db.shutdown() tr = request.args.get('tr') if tr is None: tr = 8 min_temp = math.floor(settings['target_temp'] - temp_range / 2) max_temp = math.ceil(settings['target_temp'] + temp_range / 2) return render_template('history.html', tr=tr, min_temp=min_temp, max_temp=max_temp, **settings)
def main(): print(" \n NHL Player Predictions\n", "=" * 23) print("Legend:", "LW = Left Wing,", "RW = Right Wing,", "C = Center,", "D = Defenseman") db_data = DataStore() df = db_data.fetch_all() df.drop(['id'], 1, inplace=True) df.drop(['team'], 1, inplace=True) play_sample = df.sample(7) df.drop(['name'], 1, inplace=True) df.drop(play_sample.index) X = np.array(df.drop(['position'], 1)) y = np.array(df['position']) X_train, X_test, y_train, y_test = model_selection.train_test_split( X, y, test_size=0.3) clf = neighbors.KNeighborsClassifier() clf.fit(X_train, y_train) accuracy = clf.score(X_test, y_test) print("Number of Data Points:", len(df.index), "\n") print("\nPredicted with Accuracy: ", accuracy * 100, "%\n") # Prediction Test names = np.array(play_sample['name']) positions = np.array(play_sample['position']) players = dict(zip(names, positions)) play_sample.drop(['name'], 1, inplace=True) play_sample.drop(['position'], 1, inplace=True) X_play = np.array(play_sample) predictions = clf.predict(X_play) outcome = [] for i in range(len(predictions)): if predictions[i] == positions[i]: outcome.append("PASS") else: outcome.append("FAIL") output = pd.DataFrame(data=np.column_stack( (predictions, positions, outcome)), index=names, columns=["Predicted", "Actual", "Outcome"]) print(output)
def test_create_new_spine_database(self): """Test that a new Spine database is created when clicking on Spine-icon tool button. """ with mock.patch("data_store.QFileDialog") as mock_file_dialog: data_store = DataStore(self.toolbox, "DS", "", dict(), 0, 0) file_path = os.path.join(data_store.data_dir, "mock_db.sqlite") mock_file_dialog.getSaveFileName.return_value = [file_path] data_store.activate() self.toolbox.ui.toolButton_new_spine.click() self.assertTrue(os.path.isfile(file_path), "mock_db.sqlite file not found.") sqlite_file = self.toolbox.ui.lineEdit_SQLite_file.text() self.assertEqual(sqlite_file, file_path) database = self.toolbox.ui.lineEdit_database.text() basename = os.path.basename(file_path) self.assertEqual(database, basename)
def test_copy_db_url_to_clipboard(self): """Test that the database url from current selections is copied to clipboard. """ # First create a DS with an sqlite db reference file_path = os.path.join(self.toolbox.project().project_dir, "mock_db.sqlite") if not os.path.exists(file_path): with open(file_path, 'w'): pass url = "sqlite:///" + file_path create_new_spine_database(url) reference = dict(database="foo", username="******", url=url) data_store = DataStore(self.toolbox, "DS", "", reference, 0, 0) data_store.activate() self.toolbox.ui.toolButton_copy_db_url.click() clipboard_text = QApplication.clipboard().text() self.assertEqual(clipboard_text, url)
def start(): setup_logging() logging.getLogger('pyro').info("started") db = DataStore(setup=True) db.apply_active_profile() settings = db.get_settings() settings['enabled'] = 0 db.save_settings(settings) settings = db.get_settings() logging.getLogger('pyro').debug('starting with settings: %s' % settings) control = Control() control.start() print('starting web') web.start()
def __init__(self, key, years): """Initialise Cassandra Args: key: String of TBA key. years: List of the years in which to cache results. """ self.years = years self.key = key # cache previous results events = {} matches = {} base_url = "https://www.thebluealliance.com/api/v3" header = {"X-TBA-Auth-Key": self.key} # fetch events by year and order chronologically for year in years: r = requests.get(base_url + "/events/" + str(year) + "/simple", headers=header).json() # sort by date and don't include offseason events a = sorted(r, key=lambda b: b["start_date"]) a = [i["key"] for i in a if i["event_type"] < 99] events[str(year)] = a # fetch matches by year and event for year in years: for event in events[str(year)]: r = requests.get(base_url + "/event/" + event + "/matches/simple", headers=header).json() matches[event] = r # save to cache store = DataStore(new_data_store=True, year_events=events) for year in years: for event in events[str(year)]: event_matches = matches[event] store.add_event_matches(str(year), event, event_matches)
def test_load_reference(self): """Test that reference is loaded into selections on Data Store creation, and then shown in the ui when Data Store is activated. """ # FIXME: For now it only tests sqlite references file_path = os.path.join(self.toolbox.project().project_dir, "mock_db.sqlite") if not os.path.exists(file_path): with open(file_path, 'w'): pass url = "sqlite:///" + file_path create_new_spine_database(url) reference = dict(database="foo", username="******", url=url) data_store = DataStore(self.toolbox, "DS", "", reference, 0, 0) data_store.activate() dialect = self.toolbox.ui.comboBox_dialect.currentText() database = self.toolbox.ui.lineEdit_database.text() username = self.toolbox.ui.lineEdit_username.text() self.assertEqual(dialect, 'sqlite') self.assertEqual(database, 'foo') self.assertEqual(username, 'bar')
def bootstrap_db(from_scratch): config = Configuration() conn = DataStore().get_connection() if from_scratch and config.get_db_name() in list(r.db_list().run(conn)): print('Forcing - dropping existing db') r.db_drop(config.get_db_name()).run(conn) if config.get_db_name() not in list(r.db_list().run(conn)): r.db_create(config.get_db_name()).run(conn) else: print(f"{config.get_db_name()} db already exists") if config.get_article_table() not in list(r.db(config.get_db_name()).table_list().run(conn)): r.db(config.get_db_name()).table_create(config.get_article_table()).run(conn) r.db(config.get_db_name()).table(config.get_article_table()).index_create('title').run(conn) else: print(f"{config.get_article_table()} table already exists") DataStore.disconnect(conn)
def __init__(self, key, years): """Initialise Cassandra Args: key: String of TBA key. years: List of the years in which to cache results. """ self.years = years self.key = key # cache previous results events = {} matches = {} tba = tbapy.TBA(self.key) # fetch events by year and order chronologically for year in years: r = tba.events(year, simple=True) # sort by date and don't include offseason events a = sorted(r, key=lambda b: b["start_date"]) a = [i["key"] for i in a if i["event_type"] < 99] events[str(year)] = a # fetch matches by year and event for year in years: for event in events[str(year)]: r = tba.event_matches(event) matches[event] = r # save to cache store = DataStore(new_data_store=False, year_events=events) for year in years: for event in events[str(year)]: event_matches = matches[event] store.add_event_matches(str(year), event, event_matches) self.matches = matches
def main(args_): num_episodes = 600 env = gym.make('MountainCarContinuous-v0') env.seed(4) max_ep_reward, max_pos_val = -300, -10 data_store = DataStore(max_memory=10000) nn_model = NNModel(in_size=env.observation_space.shape[0], out_size=21, batch_size=128) game_loop = GameLoop(data_store=data_store, nn_model=nn_model, env=env) reward_array = np.zeros(num_episodes, dtype=np.float32) if args_.test_run: nn_model.model.load_weights("./saved_models/unmod_reward_p_2_1.h5") episode_cost = game_loop.test_episode(render=args_.display) # VisualizePolicy(nn_model=nn_model) if args_.debug: print("\t Episode reward: %6.3f" % episode_cost) else: for i in tqdm(range(num_episodes)): episode_reward, max_pos, mean_cost = game_loop.train_episode( args_.display) reward_array[i] = episode_reward nn_model.write_logs(mean_cost, max_pos, episode_reward, i) if args_.debug: max_ep_reward = max(max_ep_reward, episode_reward) max_pos_val = max(max_pos_val, max_pos) if i % 10 == 0: print( "\t Maximum episode reward: %6.3f, Max Position Value: %5.2f, Last Mean Cost: %8.6f" % (max_ep_reward, max_pos_val, mean_cost)) # noinspection PyTypeChecker np.savetxt('reward.txt', reward_array, delimiter=',')
def profiles(): db = DataStore() if request.method == 'POST': _profiles = request.get_json() for _profile in _profiles: if 'id' in _profile and _profile['id'] is not u'': logging.getLogger('pyro').debug('updating: %s' % _profile) db.save_profile(_profile) else: logging.getLogger('pyro').debug('adding: %s' % _profile) db.add_profile(_profile) db.shutdown() return 'ok' elif request.method == 'DELETE': _profile = request.get_json() logging.getLogger('pyro').debug('deleting: %s' % _profile) db.delete_profile(_profile['id']) db.shutdown() return 'ok' else: _profiles = db.get_profiles() db.shutdown() return json.dumps(_profiles)
def test_save_and_restore_selections(self): """Test that selections are saved and restored when deactivating a Data Store and activating it again. """ # FIXME: For now it only tests the mysql dialect data_store = DataStore(self.toolbox, "DS", "", dict(), 0, 0) data_store.activate() self.toolbox.ui.comboBox_dialect.setCurrentText('mysql') self.toolbox.ui.lineEdit_host.setText('localhost') self.toolbox.ui.lineEdit_port.setText('8080') self.toolbox.ui.lineEdit_database.setText('foo') self.toolbox.ui.lineEdit_username.setText('bar') data_store.deactivate() data_store.activate() dialect = self.toolbox.ui.comboBox_dialect.currentText() host = self.toolbox.ui.lineEdit_host.text() port = self.toolbox.ui.lineEdit_port.text() database = self.toolbox.ui.lineEdit_database.text() username = self.toolbox.ui.lineEdit_username.text() self.assertEqual(dialect, 'mysql') self.assertEqual(host, 'localhost') self.assertEqual(port, '8080') self.assertEqual(database, 'foo') self.assertEqual(username, 'bar')
def __init__(self, validatormsg): self.msg = validatormsg self.datastore = DataStore("test.db")
# Open video stream cam = cv2.VideoCapture(args.url) video_fps = cam.get(cv2.CAP_PROP_FPS) image_x = int(cam.get(cv2.CAP_PROP_FRAME_WIDTH)) image_y = int(cam.get(cv2.CAP_PROP_FRAME_HEIGHT)) print("[log] Video info: {:.2f}fps {}x{}".format(video_fps, image_x, image_y)) # Initialize face_recognizer = FaceRecognizer(args.face, args.tol) notifier = SlackNotifier(args.slack) # notifier = LineNotifier(args.lt, args.lu) data_store = DataStore() init_time = datetime(2000, 1, 1) detected_dict = dict( zip(cilab_people.keys(), [(init_time, 0) for i in range(len(cilab_people))])) recognized_dict = dict( zip(cilab_people.keys(), [init_time for i in range(len(cilab_people))])) read_error_count = (init_time, 0) # Monitoring print("[log] Start monitoring...") while True: frame_time = datetime.now()
def __init__(self, *args, **kwargs): super(DataStoreTests, self).__init__(*args, **kwargs) self.ds = DataStore()
from flask import Flask, request from data_store import DataStore import json ds = DataStore() app = Flask(__name__) @app.route('/fn/set_fn', methods=['POST']) def set_fn(): data = json.loads(request.data.decode('utf-8')) key, value = data['key'], data['value'] ds.set(key, value) return 'OK' @app.route('/fn/del_fn', methods=['POST']) def del_fn(): data = json.loads(request.data.decode('utf-8')) key = data['key'] ds.delete(key) return 'OK' @app.route('/fn/get_fn', methods=['POST']) def get_fn(): data = json.loads(request.data.decode('utf-8'))