def getRSSFeed(self, url, post_data=None): # create provider storaqe cache storage = Shove('sqlite:///' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.provider.name) + '.db') fc = cache.Cache(storage) parsed = list(urlparse.urlparse(url)) parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one if post_data: url = url + 'api?' + urllib.urlencode(post_data) f = fc.fetch(url) if not f: logger.log(u"Error loading " + self.providerID + " URL: " + url, logger.ERROR) return None elif 'error' in f.feed: logger.log(u"Newznab ERROR:[%s] CODE:[%s]" % (f.feed['error']['description'], f.feed['error']['code']), logger.DEBUG) return None elif not f.entries: logger.log(u"No items found on " + self.providerID + " using URL: " + url, logger.WARNING) return None storage.close() return f
def __init__(self, port, link_db_uri, user_db_uri, use_auth=True): self.port = port self.link_db = Shove(link_db_uri) self.user_db = Shove(user_db_uri) self.use_auth = use_auth if not self.use_auth and 'null' not in self.user_db: self.user_db['null'] = {'token': '', 'username':'******', 'links': []}
class TestCassandraStore(Store, Spawn, unittest.TestCase): cmd = ['cassandra', '-f'] @classmethod def setUpClass(cls): super(TestCassandraStore, cls).setUpClass() import time time.sleep(5.0) def setUp(self): from shove import Shove from pycassa.system_manager import SystemManager # @UnresolvedImport @IgnorePep8 system_manager = SystemManager('localhost:9160') try: system_manager.create_column_family('Murk', 'shove') except: pass self.store = Shove('cassandra://localhost:9160/Murk/shove') def tearDown(self): if self.store._store is not None: self.store.clear() self.store.close() from pycassa.system_manager import SystemManager # @UnresolvedImport @IgnorePep8 system_manager = SystemManager('localhost:9160') system_manager.drop_column_family('Murk', 'shove') @classmethod def tearDownClass(cls): from fabric.api import local local('killall java')
def test__cmp__(self): tstore = Shove() self.store['max'] = 3 tstore['max'] = 3 self.store.sync() tstore.sync() self.assertEqual(self.store, tstore)
def test_execute_invalid_command(self): """If the given command could not be found for the given project, return an error tuple.""" shove = Shove({'myproject': path('test_project')}) order = Order(project='myproject', command='foo', log_key=5, log_queue='asdf') procfile_path = path('test_project', 'bin', 'commands.procfile') eq_(shove.execute(order), (1, 'No command `foo` found in {0}'.format(procfile_path)))
def test_execute_invalid_command(self): """If the given command could not be found for the given project, return an error tuple.""" shove = Shove({"myproject": path("test_project")}, Mock()) order = Order(project="myproject", command="foo", log_key=5, log_queue="asdf") procfile_path = path("test_project", "bin", "commands.procfile") eq_(shove.execute(order), (1, "No command `foo` found in {0}".format(procfile_path)))
def test_process_order_invalid(self): """If parse_order returns None, do not execute the order.""" shove = Shove({}) shove.parse_order = Mock(return_value=None) shove.execute = Mock() eq_(shove.process_order('{"project": "asdf"}'), None) ok_(not shove.execute.called)
def test_parse_order_valid(self): """If the given order is valid, return an Order namedtuple with the correct values.""" shove = Shove({}, Mock()) order = shove.parse_order('{"project": "asdf", "command": "qwer", "log_key": 77, ' '"log_queue": "zxcv"}') eq_(order.project, "asdf") eq_(order.command, "qwer") eq_(order.log_key, 77) eq_(order.log_queue, "zxcv")
def test_parse_procfile(self): shove = Shove({}) commands = shove.parse_procfile(path('test_procfile.procfile')) eq_(commands, { 'cmd': ['test'], 'valid_underscore': ['homer'], 'valid03': ['foo', 'bar' ,'--baz'] })
def test__cmp__(self): from shove import Shove tstore = Shove() self.store['max'] = 3 tstore['max'] = 3 self.store.sync() tstore.sync() self.assertEqual(self.store, tstore)
def setUp(self): from shove import Shove from pycassa.system_manager import SystemManager # @UnresolvedImport @IgnorePep8 system_manager = SystemManager('localhost:9160') try: system_manager.create_column_family('Murk', 'shove') except: pass self.store = Shove('cassandra://localhost:9160/Murk/shove')
def setUp(self): from shove import Shove from pycassa.system_manager import SystemManager system_manager = SystemManager('localhost:9160') try: system_manager.create_column_family('Foo', 'shove') except: pass self.store = Shove('cassandra://localhost:9160/Foo/shove')
def test_execute_no_procfile(self): """If no procfile is found for the given project, return an error tuple.""" shove = Shove({'myproject': path('nonexistant')}) order = Order(project='myproject', command='foo', log_key=5, log_queue='asdf') eq_(shove.execute(order), (1, CONTAINS('Error loading procfile for project `myproject`')))
def test_parse_procfile(self): shove = Shove({}) commands = shove.parse_procfile(path('test_procfile.procfile')) eq_( commands, { 'cmd': ['test'], 'valid_underscore': ['homer'], 'valid03': ['foo', 'bar', '--baz'] })
class _ShoveWrapper(object): def __init__(self, loc): self._loc = loc self._shove = Shove(self._loc) def __enter__(self): return self._shove def __exit__(self, type, value, traceback): self._shove.close()
def test_parse_order_valid(self): """If the given order is valid, return an Order namedtuple with the correct values.""" shove = Shove({}) order = shove.parse_order( '{"project": "asdf", "command": "qwer", "log_key": 77, ' '"log_queue": "zxcv"}') eq_(order.project, 'asdf') eq_(order.command, 'qwer') eq_(order.log_key, 77) eq_(order.log_queue, 'zxcv')
class TestFTPStore(Store, unittest.TestCase): initstring = 'ftp://127.0.0.1/' def setUp(self): from shove import Shove self.store = Shove(self.initstring, compress=True) def tearDown(self): self.store.clear() self.store.close()
def get_data_set_attribute(self, data_set, key): attrib_name = self.get_attribute_name(data_set, key) if self.has_attribute(data_set, key): return self.store[attrib_name] else: return None self.store.close() gc.collect() self.store = Shove('file://' + self.filename, 'memory://', optimize=False)
def primary_file_name(self): """The file name for the 'primary' file in the bucket. It is this file from which data is loaded. Other files within the bucket should be auxiliary to this file. (E.g. they should contain projection information.) """ shove = Shove(self._shove_url) try: return shove['primary_file_name'] except KeyError: return None finally: shove.close()
def load_facts(config): import requests from bs4 import BeautifulSoup db = Shove(config['dburi']) db['facts'] = [] url1 = 'http://www.cats.alpha.pl/facts.htm' raw = requests.get(url1).text soup = BeautifulSoup(raw).findAll('ul')[1] for string in soup.stripped_strings: if string: db['facts'].append(string) db.sync()
def test_process_order_valid(self): """If parse_order returns a valid order, execute it and send logs back to Captain.""" shove = Shove({}, Mock()) order = Order(project="asdf", command="qwer", log_key=23, log_queue="zxcv") shove.parse_order = Mock(return_value=order) shove.execute = Mock(return_value=(0, "output")) shove.process_order('{"project": "asdf"}') shove.execute.assert_called_with(order) shove.adapter.send_log.assert_called_with( "zxcv", JSON({"version": "1.0", "log_key": 23, "return_code": 0, "output": "output"}) )
def load_facts(config): import requests import re db = Shove(config['dburi']) db['facts'] = [] url1 = 'http://www.cats.alpha.pl/facts.htm' raw = requests.get(url1).text filtered = filter( lambda l: l.startswith('<li>'), map(lambda l: l.strip(), raw.split('\n'))) stripped = map(lambda l: re.sub('<[^<]+?>', '', l), filtered) db['facts'].extend(stripped) db.sync()
def __init__(self, modeldir=None, adminsfile=None, oauthconfig=None): self.modeldir = modeldir self.admins = yaml.load(file(adminsfile,'r')) self.oauth_clients = Shove('sqlite:///oauth_clients.dat') self.users = Shove('sqlite:///oauth_users.dat') self.bearers = Shove('sqlite:///oauth_bearers.dat') self.oauthconf = yaml.load(file(oauthconfig,'r')) def stopper(): print 'saving persistant data' self.oauth_clients.close() self.users.close() self.bearers.close() cherrypy.engine.subscribe('stop', stopper)
def invert_index(source_dir, index_url=INDEX_URL, init=False): """ Build the invert index from give source_dir Output a Shove object built on the store_path Input: source_dir: a directory on the filesystem index_url: the store_path for the Shove object init: clear the old index and rebuild from scratch Output: index: a Shove object """ raw_index = defaultdict(list) for base, dir_list, fn_list in os.walk(source_dir): for fn in fn_list: fp = os.path.join(base, fn) code = fn with open(fp, encoding="utf-8") as f: try: tokens = f.read().strip().split('\n') except: print(fp) continue for token in tokens: raw_index[token].append(code) index = Shove(store=index_url) if init: index.clear() if '' in raw_index: del raw_index[''] index.update(raw_index) index.sync() return index
def test_process_order_valid(self): """If parse_order returns a valid order, execute it and send logs back to Captain.""" shove = Shove({}) order = Order(project='asdf', command='qwer', log_key=23, log_queue='zxcv') shove.parse_order = Mock(return_value=order) shove.execute = Mock(return_value=(0, 'output')) eq_(shove.process_order('{"project": "asdf"}'), ('zxcv', JSON({ 'version': '1.0', 'log_key': 23, 'return_code': 0, 'output': 'output' }))) shove.execute.assert_called_with(order)
class TestHDF5Store(Store, unittest.TestCase): initstring = 'hdf5://test.hdf5/test' def setUp(self): from shove import Shove self.store = Shove() def tearDown(self): import os self.store.close() try: os.remove('test.hdf5') except OSError: pass
def lookup_page_name(page_name, cache_file='file://test', sparql_url='', polite_factor=1): """lookup info from cache""" page_name = page_name_normalize(page_name) logging.info(cache_file) logging.info(page_name) cache = Shove(cache_file) if page_name in cache: logging.debug("cache hit") return cache[page_name] else: logging.debug("cache miss") res = perform_sparql_query(page_name, sparql_url, polite_factor) cache[page_name] = res cache.sync() return res
def test_execute_valid_order(self): shove = Shove({'myproject': path('test_project')}) order = Order(project='myproject', command='pwd', log_key=5, log_queue='asdf') with patch('shove.base.Popen') as Popen: p = Popen.return_value p.communicate.return_value = 'command output', None p.returncode = 0 return_code, output = shove.execute(order) Popen.assert_called_with(['pwd'], cwd=path('test_project'), stdout=PIPE, stderr=STDOUT) p.communicate.assert_called_with() eq_(return_code, 0) eq_(output, 'command output')
def save_data(self, data): db = Shove(self.conf['data_file']) modified = False for item in data: try: db[item['guid']] except KeyError: db[item['guid']] = item modified = True else: if db[item['guid']] != item: db[item['guid']] = item modified = True db.close() return modified
def test_execute_valid_order(self): shove = Shove({"myproject": path("test_project")}, Mock()) order = Order(project="myproject", command="pwd", log_key=5, log_queue="asdf") with patch("shove.base.Popen") as Popen: p = Popen.return_value p.communicate.return_value = "command output", None p.returncode = 0 return_code, output = shove.execute(order) Popen.assert_called_with(["pwd"], cwd=path("test_project"), stdout=PIPE, stderr=STDOUT) p.communicate.assert_called_with() eq_(return_code, 0) eq_(output, "command output")
def __init__(self, application): log.info('Creating Moksha Middleware') self.application = application self.mokshaapp = MokshaAppDispatcher(application) moksha.utils._apps = { } # {'app name': tg.TGController/tg.WSGIAppController} moksha.utils._widgets = {} # {'widget name': tw.api.Widget} moksha.utils.menus = {} # {'menu name': moksha.api.menus.MokshaMenu} self.engines = {} # {'app name': sqlalchemy.engine.base.Engine} self.load_paths() self.load_renderers() self.load_configs() self.load_widgets() self.load_applications() self.load_wsgi_applications() self.load_models() self.load_menus() self.load_root() try: moksha.utils.feed_storage = Shove( config.get('feed_store', 'simple://'), config.get('feed_cache', 'simple://'), compress=True) moksha.utils.feed_cache = Cache(moksha.utils.feed_storage) except Exception, e: log.error(str(e)) log.error("Unable to initialize the Feed Storage")
def iterentries(cls, limit=None): if not hasattr(cls, 'id'): cls.id = str(uuid.uuid4()) id = cls.id url = cls.url if not cls.url: raise ValueError("Feed must be supplied with a url.") global feed_cache, feed_storage if not feed_cache: feed_storage = Shove('sqlite:///feeds.db', compress=True) feed_cache = Cache(feed_storage) feed = feed_cache.fetch(url) if not (200 <= feed.get('status', 200) < 400): log.warning('Got %s status from %s: %s' % ( feed['status'], url, feed.headers.get('status'))) cls.title = feed.headers.get('status') cls.link = feed.feed.get('link') return cls.link = feed.feed.get('link') try: cls.title = feed.feed.title except AttributeError: cls.title = 'Unable to parse feed' return for i, entry in enumerate(feed.get('entries', [])): entry['uid'] = '%s_%d' % (id, i) entry['link'] = entry.get('link') if i == limit: break yield entry
def __init__( self, component_appid, component_appsecret, component_token, encoding_aes_key, session=None, auto_retry=True, ): """ :param component_appid: 第三方平台appid :param component_appsecret: 第三方平台appsecret :param component_token: 公众号消息校验Token :param encoding_aes_key: 公众号消息加解密Key """ self._http = requests.Session() self.component_appid = component_appid self.component_appsecret = component_appsecret self.expires_at = None self.crypto = WeChatCrypto(component_token, encoding_aes_key, component_appid) self.session = session or MemoryStorage() self.auto_retry = auto_retry if isinstance(session, str): from shove import Shove from wechatpy.session.shovestorage import ShoveStorage querystring = get_querystring(session) prefix = querystring.get("prefix", ["wechatpy"])[0] shove = Shove(session) storage = ShoveStorage(shove, prefix) self.session = storage
def clearCache(self, age=None): try: with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs: fc = cache.Cache(fs) fc.purge(age) except Exception as e: logger.log(u"RSS error clearing cache: " + ex(e), logger.DEBUG)
def iterentries(self, d=None, limit=None): url = self.url or d.get('url') id = d and d.get('id', self.id) or self.id if moksha.utils.feed_cache: feed = moksha.utils.feed_cache.fetch(url) else: # MokshaMiddleware not running, so setup our own feed cache. # This allows us to use this object outside of WSGI requests. global feed_cache, feed_storage if not feed_cache: feed_storage = Shove('sqlite:///feeds.db', compress=True) feed_cache = Cache(feed_storage) feed = feed_cache.fetch(url) if not (200 <= feed.get('status', 200) < 400): log.warning('Got %s status from %s: %s' % ( feed['status'], url, feed.headers.get('status'))) if d: d['title'] = feed.headers.get('status') d['link'] = feed.feed.get('link') return if d: d['link'] = feed.feed.get('link') try: d['title'] = feed.feed.title except AttributeError: d['title'] = 'Unable to parse feed' return for i, entry in enumerate(feed.get('entries', [])): entry['uid'] = '%s_%d' % (id, i) entry['link'] = entry.get('link') if i == limit: break yield entry
def __init__(self, config): self.config = config self.apikeys = [s.strip() for s in self.config['apikeys'].split(',')] dburi = self.config['dburi'] self.db = Shove(dburi) self.app = Flask(__name__) self.twilio = TwilioRestClient( self.config['SID'], self.config['token']) if 'numbers' not in self.db: self.db['numbers'] = [] if 'facts' not in self.db: print "No catfacts found, run catfacts load" exit() self.db.sync() self.routes = { "/api/numbers": (self.add_number, {"methods": ['POST']}), "/api/numbers/<num>": (self.remove_number, {"methods": ['DELETE']}), "/api/callback": (self.twilio_callback, {"methods": ['GET']}), "/api/facts": (self.add_facts, {"methods": ['POST']})} map( lambda route: self.app.route( route, **self.routes[route][1])(self.routes[route][0]), self.routes)
def set_data_set_attribute(self, data_set, key, value): attrib_name = self.get_attribute_name(data_set, key) self.store.update({attrib_name: value}) self.store.sync() self.store.close() gc.collect() self.store = Shove('file://'+self.filename, 'memory://', optimize=False)
def __init__(self, appid, access_token=None, session=None, timeout=None, auto_retry=True): self.appid = appid self.expires_at = None self.session = session or MemoryStorage() self.timeout = timeout self.auto_retry = auto_retry if isinstance(session, six.string_types): from shove import Shove from wechatpy.session.shovestorage import ShoveStorage querystring = get_querystring(session) prefix = querystring.get('prefix', ['wechatpy'])[0] shove = Shove(session) storage = ShoveStorage(shove, prefix) self.session = storage if access_token: self.session.set(self.access_token_key, access_token)
def __init__(self, component_appid, component_appsecret, component_token, encoding_aes_key, session=None): """ :param component_appid: 第三方平台appid :param component_appsecret: 第三方平台appsecret :param component_token: 公众号消息校验Token :param encoding_aes_key: 公众号消息加解密Key """ self.component_appid = component_appid self.component_appsecret = component_appsecret self.expires_at = None self.crypto = WeChatCrypto(component_token, encoding_aes_key, component_appid) self.session = session or MemoryStorage() if isinstance(session, six.string_types): from shove import Shove from wechatpy.session.shovestorage import ShoveStorage querystring = get_querystring(session) prefix = querystring.get('prefix', ['wechatpy'])[0] shove = Shove(session) storage = ShoveStorage(shove, prefix) self.session = storage
def poll(self): self.log.info("Cached cla_done graph") stats_cache = Shove(config.get('stats_cache')) fas_connector = get_connector('fas') data = fas_connector.group_membership_over_time() stats_cache['group_membership_cla_done'] = data return True
def main(): print("\n\n\t\"Hey Vsauce, Michael here.\" - Michael Stevens\n\n") setup_files_and_folders() eventlet.spawn(Log.write_file_loop) global shove shove = Shove(sio) eventlet.spawn(send_packets_loop, shove, sio) eventlet.spawn(handle_packets_loop, shove) if PING_USERS_ENABLED: eventlet.spawn(ping_users_loop, shove) use_ssl = "-no-ssl" not in sys.argv if not use_ssl: Log.warning("SSL DISABLED! Remove '-no-ssl' from sys.argv to enable") Log.info( f"Starting SocketIO WSGI on port 777! use_ssl={use_ssl}, private keys: {PRIVATE_KEYS_IMPORTED}" ) wsgi_app = socketio.WSGIApp(sio) http_socket = eventlet.listen((HOST, PORT)) if use_ssl: # wrap_ssl https://stackoverflow.com/a/39420484/13216113 ssl_socket = eventlet.wrap_ssl(http_socket, certfile="cert.pem", keyfile="key.pem", server_side=True) eventlet.wsgi.server(ssl_socket, wsgi_app, log_output=LOG_WSGI) else: eventlet.wsgi.server(http_socket, wsgi_app, log_output=LOG_WSGI) print( "\n\n\t\"And as always, thanks for watching.\" - Michael Stevens\n\n")
def test__cmp__(self): from shove import Shove tstore = Shove() self.store['max'] = 3 self.store.sync() tstore['max'] = 3 self.assertEqual(self.store, tstore)
def getFeed(self, url, post_data=None, request_headers=None): parsed = list(urlparse.urlparse(url)) parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one if post_data: url += urllib.urlencode(post_data) try: with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs: fc = cache.Cache(fs) feed = fc.fetch(url, False, False, request_headers) if feed: if 'entries' in feed: return feed elif 'error' in feed.feed: err_code = feed.feed['error']['code'] err_desc = feed.feed['error']['description'] logger.log( u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG) else: logger.log(u"RSS error loading url: " + url, logger.DEBUG) except Exception as e: logger.log(u"RSS error: " + ex(e), logger.DEBUG)
def parseWithShove(fname, callableParsingFunction, pickleDir=""): ''' Pickle dir MUST include appended "/" ''' shoveFilename = pickleDir + "/" + os.path.basename(fname) + ".shv" if os.path.exists(shoveFilename): print("Loading shove structure: " + str(shoveFilename)) g = Shove("file://" + shoveFilename, "simple://") else: print("Parsing...") tmpStruct = callableParsingFunction(file(fname, 'r')) print("Writing shove db: " + str(shoveFilename)) ks = tmpStruct.keys() g = Shove("file://" + shoveFilename) for k in ks: del tmpStruct[ k].references # May be causing an error later down the road. g[k] = tmpStruct[k] return g
def get_data_set_attribute(self, data_set, key): attrib_name = self.get_attribute_name(data_set, key) if self.has_attribute(data_set, key): return self.store[attrib_name] else: return None self.store.close() gc.collect() self.store = Shove('file://'+self.filename, 'memory://', optimize=False)
def setup_models(networks, add_morphogene=True): ''' Convert dict of networks in networkx format to models in ModelContainer format (batch mode) ''' ''' This runs out of memory on the laptop. ''' models_dict_name = "models_dictionary.db" #models_dict = shelve.open(models_dict_name) models_dict = Shove("file://"+models_dict_name, compress=True) for localparset, net in enumerate(networks.values()): print localparset, ":", #if localparset>=50: break # enable for quick run mc = dict_to_model(net, add_morphogene) print len(mc._psc), "parameter sets, shoving." if not localparset%10: tend = datetime.now() print "total execution time:", tend-tstart models_dict[str(localparset)] = mc models_dict.sync() #models_dict.close() print "shoved", localparset+1, "model containers to", models_dict_name, "."
def create_sequence_dbs_for_GAF(gaf, transcripts_file, output_dir): from Bio import SeqIO from Bio import Seq import os print "Indexing GAF db by transcript id...\n" gaf_transcript_idx = dict() for i, g in enumerate(gaf): for k in gaf[g].keys(): for ctr, t in enumerate(gaf[g][k]): gaf_transcript_idx[t['transcript_id']] = (ctr, g, k) fh_transcripts = SeqIO.parse(transcripts_file, 'fasta') # transcripts_shlv = shelve.open(os.path.join(output_dir, 'GAF_transcript_seqs.fa.shlv'), 'c') # proteins_shlv = shelve.open(os.path.join(output_dir, 'GAF_protein_seqs.fa.shlv'), 'c') transcripts_shlv = Shove( "file://" + os.path.join(output_dir, 'GAF_transcript_seqs.fa.shove')) protein_seqs_url = "file://" + os.path.join(output_dir, 'GAF_protein_seqs.fa.shove') proteins_shlv = Shove(protein_seqs_url) print "Writing transcript and protein shove dbs..." j = 0 transcripts_to_remove = list() for transcript in fh_transcripts: if j % 1000 == 0: print j j += 1 if transcript.name not in gaf_transcript_idx: continue gaf_record = gaf[gaf_transcript_idx[transcript.name][1]][ gaf_transcript_idx[transcript.name][2]][gaf_transcript_idx[ transcript.name][0]] raw_seq = str(transcript.seq) transcripts_shlv[transcript.name] = raw_seq if 'cds_start' not in gaf_record or not gaf_record['cds_start']: continue prot_seq = Seq.translate(raw_seq[gaf_record['cds_start'] - 1:gaf_record['cds_stop']]) if prot_seq[-1] == '*': prot_seq = prot_seq[:-1] elif prot_seq.find('*') != -1: # skip small number (n=12) transcripts with incorrect CDS coordinates transcripts_to_remove.append(transcript.name) continue proteins_shlv[transcript.name] = prot_seq for t in transcripts_to_remove: del transcripts_shlv[t] transcripts_shlv.close() proteins_shlv.close() return transcripts_to_remove, protein_seqs_url
class ShoveCache(Cache): """Expects a url in the form that shove requires. Maintains a cache of keys to speed performance.""" def __init__(self, url_db, url_cache): self.db = Shove(url_db, url_cache, optimize=False, max_entries=2000) def retrieve_from_cache(self, key): try: val = self.db[key] return val except KeyError: return None def store_into_cache(self, key, value): self.db[key] = value def close_cache(self): self.db.close()
def connect(self): """ Connects to the database. Raises RuntimeError if the connection is not closed yet. Use :meth:`StorageAdapter.reconnect` to explicitly close the connection and open it again. """ if self.connection is not None: raise RuntimeError('already connected') self.connection = Shove(**self._connection_options)
def test_update(self): tstore = Shove() tstore['max'] = 3 tstore['min'] = 6 tstore['pow'] = 7 self.store['max'] = 2 self.store['min'] = 3 self.store['pow'] = 7 self.store.update(tstore) self.assertEqual(self.store['min'], 6)
def getCacheInstance(server='dirCache'): try: from shove import Shove print 'sqlite:///%s.sqlite' % server dirCache = Shove('sqlite:///%s.sqlite' % server) #ftpCache = Shove() print 'use shove' except: dirCache = {} print 'use dict' return dirCache
def __init__(self, tokens_filename, audio, log): self._log = log self._audio = audio self._tokens_filename = tokens_filename self._eventQueue = queue.Queue() persist_path = "/tmp" for directory in ("alerts", "alerts/all", "alerts/active"): d = os.path.join(persist_path, directory) if not os.path.exists(d): os.mkdir(d) # would prefer to use sqlite, but that complains about # our threads accessing the same connection - and dbm seems to not # store any changes. self.allAlerts = Shove("file:///tmp/alerts/all") self.activeAlerts = Shove("file:///tmp/alerts/active") #print(list(self.allAlerts.values())) self._last_user_activity = datetime.datetime.now() t = threading.Thread(target=self.eventQueueThread, daemon=True) t.start() GObject.timeout_add(500, self.alertCheck)
def test_update(self): from shove import Shove tstore = Shove() tstore['max'] = 3 tstore['min'] = 6 tstore['pow'] = 7 self.store['max'] = 2 self.store['min'] = 3 self.store['pow'] = 7 self.store.update(tstore) self.store.sync() self.assertEqual(self.store['min'], 6)