def setUp(self): # noqa path = os.path.join(DATA_DIR, 'blockstore.xml') self.doc = edmx.Document() with open(path, 'rb') as f: self.doc.read(f) self.cdef = self.doc.root.DataServices['BlockSchema.BlockContainer'] self.container = InMemoryEntityContainer(self.cdef)
def test_mem_file(self): self.container = InMemoryEntityContainer(self.cdef) self.bs = FileBlockStore(dpath=self.d, max_block_size=self.block_size) self.ls = LockStore(entity_set=self.cdef['BlockLocks']) self.ss = StreamStore(bs=self.bs, ls=self.ls, entity_set=self.cdef['Streams']) self.random_rw()
def TestModel(): """Read and write some key value pairs""" doc=LoadMetadata() container=InMemoryEntityContainer(doc.root.DataServices['MemCacheSchema.MemCache']) memCache=doc.root.DataServices['MemCacheSchema.MemCache.KeyValuePairs'] TestData(memCache) with memCache.OpenCollection() as collection: for e in collection.itervalues(): print "%s: %s (expires %s)"%(e['Key'].value,e['Value'].value,str(e['Expires'].value))
def test_mem_mem(self): self.container = InMemoryEntityContainer(self.cdef) self.bs = blockstore.EDMBlockStore(entity_set=self.cdef['Blocks'], max_block_size=self.block_size) self.ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks']) self.ss = blockstore.StreamStore(bs=self.bs, ls=self.ls, entity_set=self.cdef['Streams']) self.random_rw()
def test_model(): """Read and write some key value pairs""" doc = load_metadata() InMemoryEntityContainer(doc.root.DataServices['MemCacheSchema.MemCache']) mem_cache = doc.root.DataServices['MemCacheSchema.MemCache.KeyValuePairs'] test_data(mem_cache) with mem_cache.open() as collection: for e in collection.itervalues(): output("%s: %s (expires %s)\n" % (e['Key'].value, e['Value'].value, str(e['Expires'].value)))
def DryRun(): doc = LoadMetadata() container = InMemoryEntityContainer( doc.root.DataServices['WeatherSchema.CambridgeWeather']) weatherData = doc.root.DataServices[ 'WeatherSchema.CambridgeWeather.DataPoints'] weatherNotes = doc.root.DataServices[ 'WeatherSchema.CambridgeWeather.Notes'] LoadData(weatherData, SAMPLE_DIR) LoadNotes(weatherNotes, 'weathernotes.txt', weatherData) return doc.root.DataServices['WeatherSchema.CambridgeWeather']
def dry_run(): doc = load_metadata() InMemoryEntityContainer( doc.root.DataServices['WeatherSchema.CambridgeWeather']) weather_data = doc.root.DataServices[ 'WeatherSchema.CambridgeWeather.DataPoints'] weather_notes = doc.root.DataServices[ 'WeatherSchema.CambridgeWeather.Notes'] load_data(weather_data, SAMPLE_DIR) load_notes(weather_notes, 'weathernotes.txt', weather_data) return doc.root.DataServices['WeatherSchema.CambridgeWeather']
def setUp(self): # noqa path = os.path.join(DATA_DIR, 'blockstore.xml') self.doc = edmx.Document() with open(path, 'rb') as f: self.doc.read(f) self.cdef = self.doc.root.DataServices['BlockSchema.BlockContainer'] self.container = InMemoryEntityContainer(self.cdef) self.mt_lock = threading.Lock() self.mt_count = 0 self.bs = blockstore.EDMBlockStore(entity_set=self.cdef['Blocks'], max_block_size=64) self.ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks'])
def main(): """Executed when we are launched""" doc=LoadMetadata() container=InMemoryEntityContainer(doc.root.DataServices['MemCacheSchema.MemCache']) server=Server(serviceRoot=SERVICE_ROOT) server.SetModel(doc) # The server is now ready to serve forever global cacheApp cacheApp=server t=threading.Thread(target=runCacheServer) t.setDaemon(True) t.start() logging.info("MemCache starting HTTP server on %s"%SERVICE_ROOT) CleanupForever(doc.root.DataServices['MemCacheSchema.MemCache.KeyValuePairs'])
def setUp(self): # noqa global regressionServerApp DataServiceRegressionTests.setUp(self) self.container = InMemoryEntityContainer( self.ds['RegressionModel.RegressionContainer']) regressionServerApp = Server("http://localhost:%i/" % HTTP_PORT) regressionServerApp.SetModel(self.ds.get_document()) t = threading.Thread(target=run_regression_server) t.setDaemon(True) t.start() logging.info("OData Client/Server combined tests starting HTTP " "server on localhost, port %i" % HTTP_PORT) # yield time to allow the server to start up time.sleep(2) self.svcDS = self.ds self.client = client.Client("http://localhost:%i/" % HTTP_PORT) self.ds = self.client.model.DataServices
parser = argparse.ArgumentParser( description="Forecast btc price with deep learning.") parser.add_argument('-train', type=str, help="-train dataset.csv path") parser.add_argument('-run', type=str, help="-run dataset.csv path") parser.add_argument('-model', type=str, help='-model model\'s path') parser.add_argument('-iterations', type=int, help='-iteration number of epoches') parser.add_argument('-finetune', type=str, help='-finetune base-model path') parser.add_argument('-net', type=str, help='-RNN or CNN') args = parser.parse_args() doc = load_metadata() container = InMemoryEntityContainer( doc.root.DataServices['MemCacheSchema.MemCache']) mem_cache = doc.root.DataServices['MemCacheSchema.MemCache.KeyValuePairs'] server = Server(serviceRoot=SERVICE_ROOT) server.set_model(doc) # The server is now ready to serve forever global cache_app cache_app = server t = threading.Thread(target=run_cache_server) t.setDaemon(True) t.start() logging.info("MemCache starting HTTP server on %s" % SERVICE_ROOT) print(args) m1 = 0 m2 = 0