def test_count_messages_on_db_for_stream(self, mongo_mock): collection_mock = ( self.__build_mock_with_structure('testdb', 'streamdata_4321', mongo_mock)) store = datastore.DataStore() store.count_messages(4321) collection_mock.count_documents.assert_called_once_with({})
def run(): try: parser = argparse.ArgumentParser(description='key value paired datastore') parser.add_argument('-k', '--key', type=str, metavar='', required=True, help='Key') parser.add_argument('-v', '--value', type=str, metavar='', help='Value') parser.add_argument('-c', '--client', type=str, metavar='', required=True, help='Client') parser.add_argument('-o', '--operation', type=str, metavar='', required=True, help='Operation') parser.add_argument('-p', '--path', type=str, metavar='', help='Path(optional)') parser.add_argument('-t', '--time', type=int, metavar='', help='Time to live(optional)') args = parser.parse_args() # print(args.key, args.value, args.client, args.operation, args.path) ds = datastore.DataStore(args.path, args.client) if args.operation == 'Create' and args.time: ds.Create(args.key, args.value, args.time) elif args.operation == 'Create': ds.Create(args.key, args.value, 0) elif args.operation == 'Read': ds.Read(args.key) elif args.operation == 'Delete': ds.Delete(args.key) else: print("Choose from operations between 'Create', 'Read' , and 'Delete'") except KeyboardInterrupt: sys.exit(0)
def test_saves_to_the_right_db_and_collection_with_prefix(self, mongo_mock): collection_mock = ( self.__build_mock_with_structure('some_test_db', 'some_nice_prefix_12345', mongo_mock)) store = datastore.DataStore(dbname='some_test_db', collection_prefix='some_nice_prefix_') store.save(['foo'], 12345) collection_mock.insert_one.assert_called_once()
def test_DataStore(self): key1 = ('courses', six.unichr(9835), 'assignments', 'homework 1') key2 = ('courses', six.unichr(9835), 'assignments', 'homework 2' + six.unichr(9835)) key3 = ('courses', 'python', 'assignments', 'homework 1') temp_dir = tempfile.mkdtemp() try: store = datastore.DataStore(temp_dir) store.put(key1, b'c++ 1') store.put(key2, u'c++ 2') store[key3] = 'python 1' self.assertEqual(set(store.get_all(('courses', ))), set([six.unichr(9835), 'python'])) self.assertEqual(store.get(key1), 'c++ 1') self.assertEqual(store[key3], 'python 1') store.remove(('courses', six.unichr(9835))) self.assertEqual(set(store.get_all(('courses', ))), set(['python'])) self.assertIn(('courses', ), store) self.assertIn(('courses', 'python'), store) self.assertIn(('courses', 'python', 'assignments', 'homework 1'), store) del store[('courses', 'python', 'assignments', 'homework 1')] self.assertNotIn( ('courses', 'python', 'assignments', 'homework 1'), store) self.assertEqual(set(store.get_all(('courses', 'nothing'))), set()) self.assertEqual( store.get(('does', 'not', 'exist' + six.unichr(9835)), 'DEFAULT VAL'), 'DEFAULT VAL') finally: shutil.rmtree(temp_dir)
def test_saves_the_right_data_to_the_db(self, mongo_mock): collection_mock = ( self.__build_mock_with_structure('testdb', 'streamdata_12345', mongo_mock)) store = datastore.DataStore() store.save(['foo', 'bar', 'baz'], 12345) calls = [call('foo'), call('bar'), call('baz')] collection_mock.insert_one.assert_has_calls(calls)
def main(r): """ Initialize and recurse through posts. """ dstore = ds.DataStore(cfg.STORE_TYPE) if len(sys.argv) > 1: cmdline.processOpts(dstore, sys.argv) # Check bot inbox for messages. msgs = list(r.inbox.unread(limit=None)) if len(msgs) > 0 and not cfg.HOSTED: print( str(len(msgs)) + " message(s) in /u/" + cfg.USERNAME + "\'s inbox.") print("Please read before running bot.") if len(msgs) > cfg.MAX_MSGS: exit() try: for word in dstore.readData(cfg.WORD_STORE): cfg.buzzwords.add(word) cfg.buzzwords.add(word + "s") except Exception as err: print(err) exit() try: for phrase in dstore.readData(cfg.PHRASE_STORE): cfg.buzzphrases.add(phrase) except Exception as err: print(err) exit() cfg.MATCHES = dstore.readScore() cfg.already_scored = dstore.readScored() cfg.highscores = dstore.readHighscores() if cfg.DEBUG: for score, name, url in cfg.highscores: print("Name: " + name + " got " + str(score) + " (" + url + ")") try: while True: sub = r.subreddit(cfg.SUBREDDIT).new(limit=50) for submission in sub: post = r.submission(submission) if cfg.DEBUG: print("submission: " + format(post.id)) for comment in post.comments: c = cmt.Comment(dstore, r, comment) c.checkComment() if not cfg.HOSTED: print("\nBleep! All done.") break if cfg.STORE_TYPE is not "file": dstore.closeDB() except: if cfg.DEBUG: traceback.print_exc() time.sleep(30) if cfg.DEBUG: print("ERROR: Reddit timeout, resuming.")
def run(): #1 files = config.ManageConfig.watch_files(config.ManageConfig, "config.dat") #2 files = do_initial_copy(files) db1 = db.DataStore(files) db1.record_files_copied() #xfiles = db1.read_files_copied() xfiles = db.DataStore.read_files_copied(db.DataStore) do_changed_files_copy(xfiles) db2 = db.DataStore(xfiles) db2.record_files_copied() #return files print(xfiles)
def CheckForUrl(apikey, urls): checking_datastore_loc = os.path.join(tempfile.mkdtemp(), 'datastore_checker') ds = datastore.DataStore(checking_datastore_loc) checker = UrlChecker(urls) cl = Client(ds, apikey, post_update_hook=checker.Updated) checker.WaitForFinish() cl.ExitUpdater()
def __init__(self, *args, **kwargs): super(APIGenerator, self).__init__(*args, **kwargs) self.backend = datastore.DataStore(self.settings) self.datasets = self.backend.build() self.output_path = self.settings['OUTPUT_PATH'] self.api_base = self.settings['DATASTORE']['api']['base'] self.api_path = os.path.join(self.output_path, self.api_base) self.api_filters = self.settings['DATASTORE']['api']['filters'] self.api_exclude = self.settings['DATASTORE']['api']['exclude'] self.api_formats = self.settings['DATASTORE']['api']['formats'] if not os.path.exists(self.api_path): os.makedirs(self.api_path)
def __init__(self, *args, **kwargs): base_path = os.path.join(os.getcwd()) PROJECT_ROOT = base_path PLUGINS = os.path.join(PROJECT_ROOT, 'plugins') CONF = os.path.join(PROJECT_ROOT, ) sys.path.append(PLUGINS) sys.path.append(CONF) import datastore import config_default self.conf = { 'DATASTORE': config_default.DATASTORE, 'ODI': config_default.ODI } ds = datastore.DataStore(self.conf) self.dest_path = os.path.join(PROJECT_ROOT, 'content', 'pages') self.datasets_dir = os.path.join(self.dest_path, 'dataset') self.places_dir = os.path.join(self.dest_path, 'place') self.file = 'index.md' self.empty_display_type = u'empty' self.na_display_type = u'na' self.datastore = ds.build() self.places = self.datastore['places'].dict self.datasets = self.datastore['datasets'].dict self.entries = self.datastore['entries'].dict self.years = self.conf['ODI']['years'] self.current_year = self.conf['ODI']['current_year'] if kwargs.get('limited_places'): self.places = [ p for p in self.places if p['id'] in kwargs['limited_places'] ] if kwargs.get('limited_datasets'): self.datasets = [ d for d in self.datasets if d['id'] in kwargs['limited_datasets'] ] self.ensure_dir(self.datasets_dir, clean_slate=True) self.ensure_dir(self.places_dir, clean_slate=True) self.write_places() self.write_datasets()
def __init__(self): """ Create book data store """ self.ds = datastore.DataStore() """ Create custom stores """ self.keyword = "truth" self.keywordCutoff = 2 self.keywordMulti = [] self.relYears = collections.Counter() self.meanRelYear = {"sum": 0, "num": 0} self.engVsNon = {"eng": 0, "non": 0} self.maxDialog = {"bookID": "", "num": 0} self.maxSize = {"bookID": "", "num": 0} """ precompile regex patterns """ self.startPattern = re.compile('\s*\*\*\* ?START OF TH[EI]S? PROJECT GUTENBERG EBOOK .*') self.endPattern = re.compile('\s*\*\*\* ?END OF TH[EI]S? PROJECT GUTENBERG EBOOK .*') self.frontPattern = re.compile('.*: .*') self.datePattern = re.compile('(Released )?(on )?[A-Za-z]*\s?[0-9]?[0-9]?,? ?([0-9]{4}).*') self.startCount = 0
def __init__(self, filename=""): """Initialize the quotes class""" if len(filename) > 0: self.filename = filename self.quotes_list = datastore.DataStore(self.filename) random.seed()
def test_connect_to_the_right_database_and_port(self, mongo_mock): datastore.DataStore(host='somewhere.com', port=12345) mongo_mock.assert_called_once_with(host='mongodb://somewhere.com', port=12345)
with open(filename, 'wb') as handle: pickle.dump(profit_dict, handle, protocol=pickle.HIGHEST_PROTOCOL) self.plan_profits = profit_dict if __name__ == "__main__": # import other scripts for testing import datastore as ds import regstore as rs # initialize start time start = time.time() # import DS object from datastore.py DS = ds.DataStore() DS.load_exog('month_exog_final.zip') DS.get_base_plan() DS.separate_exog(subcategory=True) # create Segment column in DS.style_exog_avg DS.style_exog_avg['Segment'] = DS.style_exog_avg['Style'] # rename DS.exog to be concatenated DS.base_exog and DS.style_exog_avg DS.exog = pd.concat([DS.base_exog, DS.style_exog_avg], sort=False) ###################################################################### ###################################################################### # # import RS object from regstore.py RS = rs.RegStore() RS.build_model_list(DS) # RS.get_valuation(DS)
def save(self): """Save instance to storage.""" store = datastore.DataStore() store.connect() store.setup() store.put(self.as_doc())
if text in p.name.lower() ] print("found %d results for '%s'" % (len(people), text)) self.results.clearContents() self.results.setSortingEnabled(False) self.results.setRowCount(len(people)) for row, person in enumerate(people): item = QTableWidgetItem(person.name) item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsEnabled) self.results.setItem(row, 0, item) item = QTableWidgetItem(str(person.badge)) item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsEnabled) self.results.setItem(row, 1, item) self.results.sortItems(0) def resultDoubleClicked(self, row, column): item = self.results.item(row, 1) self.personInOut.emit(int(item.text())) if __name__ == "__main__": import sys import datastore store = datastore.DataStore() store.load() app = QApplication(sys.argv) form = FindDlg(store) form.show() app.exec_()
def _getscope(self, serverid, channelid): scope = "tags-%s-%s" % (serverid, channelid) if scope not in self._storage: self._storage[scope] = datastore.DataStore(scope, TagEncoder, tag_decoder) return self._storage[scope]
import datastore as datastore_lib import escape_lib import executor import executor_queue_lib import grade_oven_lib import random_display_name_lib SECONDS_PER_DAY = 24 * 60 * 60 # globals app = flask.Flask(__name__) app.config['SECRET_KEY'] = open('../data/secret_key.txt').read() app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 login_manager = login.LoginManager() login_manager.init_app(app) data_store = datastore_lib.DataStore(os.path.abspath('../data/db')) grade_oven = grade_oven_lib.GradeOven(data_store) executor_queue = executor_queue_lib.ExecutorQueue() monitor_variables = collections.defaultdict(int) class ResourcePool(object): def __init__(self, resources): self._free_resources = collections.deque(resources) self._used_resources = set() self._resources_lock = threading.Lock() def get(self): with self._resources_lock: try: resource = self._free_resources.popleft() except IndexError:
def __init__(self, parent=None): super(DataStoreModel, self).__init__(parent) self.datastore = datastore.DataStore()
def main(): cmd = '' args = sys.argv password = None # Parse out --password <password> for i in range(0, len(args)): if args[i] == '--password': if i + 1 == len(args): print('--password requires an argument') return # Get the password and delete these two arguments password = args[i + 1] del args[i] del args[i] break if len(args) <= 1: usage() return document_path = args[1] if len(args) >= 3: cmd = args[2] # Update the cache and dump information about the document if cmd == '': ds = datastore.DataStore(document_path, password) print(ds.path) print(ds.storeinfo) print(ds.properties) print(ds.validate()) cache = VPCache(document_path) cache.update_cache(ds) uuids = ds.item_uuids() for uuid in uuids: print(ds.item_plist(uuid)['displayName'], 'links to:') for id in cache.get_forwardlinks(uuid): print(id, ds.item_plist(id)['displayName']) for uuid in uuids: print(ds.item_plist(uuid)['displayName'], ' backlinks to:') for id in cache.get_backlinks(uuid): print(id, ds.item_plist(id)['displayName']) # Add a page to the document elif cmd == 'add': if len(args) != 5 and len(args) != 6: usage() return ds = datastore.DataStore(document_path, password) text_file = args[3] name = args[4] format = 'plaintext' if len(args) > 5: format = args[5] if format == 'plaintext': format = PageFormat.Plaintext elif format == 'markdown': format = PageFormat.MarkDown else: print('Invalid format ', format) return for item in ds.item_plists.values(): if item['displayName'].lower() == name.lower(): print('A page with that name already exists') return with open(text_file, 'rb') as f: text = f.read().decode('utf-8') add_item(ds, name, text, format) elif cmd == 'render': if len(args) != 4: usage() return ds = datastore.DataStore(document_path, password) output_dir = args[3] cache = VPCache(args[1]) cache.update_cache(ds) render_document(ds, cache, output_dir) else: print('Unknown command', cmd) print('') usage()