def send_data(self): #创建实例 redis_server_info = RedisServer(self.addr,self.port,self.password) #处理后的info dict redis_server_info_dict = redis_server_info.collect_info_data() # redis server info dict redis_info_dict = redis_server_info_dict #Redis slowlog len redis_slowlog = RedisSlowLog(self.addr,self.port,self.password,self.cluster_name,"","","","","") redis_slowlog_len = redis_slowlog.get_slowlog_and_length() redis_info_dict["slowlog_len"] = redis_slowlog_len #redis cluster info信息采集 if (redis_server_info_dict.has_key("cluster_enabled") ): if ( redis_server_info_dict["cluster_enabled"] == 1 ): redis_cluster_info = RedisClusterInfo(self.addr,self.port,self.password) redis_cluser_info_dict = redis_cluster_info.collect_cluster_info() redis_info_dict.update(redis_cluser_info_dict) redis_update_list = [] # The upload info list # Redis is Alive redis_info_dict["redis_alive"] = 1 for info_key in redis_info_dict.keys(): # 计算的key, 采用COUNTER, 注意cmdstat开头的命令 calculate_keys = calculate_metric_dict.keys() if ( info_key in calculate_keys or re.match("^cmdstat",info_key)): key_item_dict = {"endpoint": self.addr, "metric": info_key, "tags": self.tags, "timestamp":upload_ts, "value": redis_info_dict[info_key], "step": 60, "counterType": "COUNTER"} else: key_item_dict = {"endpoint": self.addr, "metric": info_key, "tags": self.tags, "timestamp":upload_ts, "value": redis_info_dict[info_key], "step": 60, "counterType": "GAUGE"} redis_update_list.append(key_item_dict) r = requests.post(falcon_client,data=json.dumps(redis_update_list))
def __init__(self, username, users_list, users_count, loop_count): Thread.__init__(self) self.__loop_count = loop_count self.__server = RedisServer(Neo4jServer()) self.__users_list = users_list self.__users_count = users_count self.__server.registration(username) self.__user_id = self.__server.sign_in(username)
def __init__(self): self._menu_config = {'': ''} self._rserver = RedisServer() self._subsystems_controller = SubSystemsController(self._rserver) self._view = View(self._subsystems_controller) self._menu = 'Main menu' self._path = ['Main menu'] self._loop = True self.start()
def send_data(self): #创建实例 redis_server_info = RedisServer(self.addr, self.port, self.password) #处理后的info dict redis_server_info_dict = redis_server_info.collect_info_data() # redis server info dict redis_info_dict = redis_server_info_dict #Redis slowlog len redis_slowlog = RedisSlowLog(self.addr, self.port, self.password, self.cluster_name, "", "", "", "", "") redis_slowlog_len = redis_slowlog.get_slowlog_and_length() redis_info_dict["slowlog_len"] = redis_slowlog_len #redis cluster info信息采集 if (redis_server_info_dict.has_key("cluster_enabled")): if (redis_server_info_dict["cluster_enabled"] == 1): redis_cluster_info = RedisClusterInfo(self.addr, self.port, self.password) redis_cluser_info_dict = redis_cluster_info.collect_cluster_info( ) redis_info_dict.update(redis_cluser_info_dict) redis_update_list = [] # The upload info list # Redis is Alive redis_info_dict["redis_alive"] = 1 for info_key in redis_info_dict.keys(): # 计算的key, 采用COUNTER, 注意cmdstat开头的命令 calculate_keys = calculate_metric_dict.keys() if (info_key in calculate_keys or re.match("^cmdstat", info_key)): key_item_dict = { "endpoint": self.addr, "metric": info_key, "tags": self.tags, "timestamp": upload_ts, "value": redis_info_dict[info_key], "step": 60, "counterType": "COUNTER" } else: key_item_dict = { "endpoint": self.addr, "metric": info_key, "tags": self.tags, "timestamp": upload_ts, "value": redis_info_dict[info_key], "step": 60, "counterType": "GAUGE" } redis_update_list.append(key_item_dict) r = requests.post(falcon_client, data=json.dumps(redis_update_list))
def test_hash(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.hset(key, self.comm1.uuid.uuidString, write_communication_to_buffer(self.comm1)) redis_db.hset(key, self.comm2.uuid.uuidString, write_communication_to_buffer(self.comm2)) redis_db.hset(key, self.comm3.uuid.uuidString, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='hash') comms = [c for c in reader] ids = [c.id for c in comms] # assert no duplicates self.assertEquals(3, len(ids)) self.assertEquals(set(['comm-1', 'comm-2', 'comm-3']), set(ids)) self.assertEquals(3, len(reader)) self.assertEquals('comm-2', reader[self.comm2.uuid.uuidString].id) self.assertTrue(hasattr(comms[0], 'sentenceForUUID')) self.assertTrue(hasattr(comms[1], 'sentenceForUUID')) self.assertTrue(hasattr(comms[2], 'sentenceForUUID')) # assert data still there ids = [c.id for c in reader] self.assertEquals(set(['comm-1', 'comm-2', 'comm-3']), set(ids)) self.assertEquals(3, redis_db.hlen(key))
def test_set(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.sadd(key, write_communication_to_buffer(self.comm1)) redis_db.sadd(key, write_communication_to_buffer(self.comm2)) redis_db.sadd(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='set') comms = [c for c in reader] ids = [c.id for c in comms] # assert no duplicates self.assertEquals(3, len(ids)) self.assertEquals(set(['comm-1', 'comm-2', 'comm-3']), set(ids)) self.assertEquals(3, len(reader)) self.assertTrue(hasattr(comms[0], 'sentenceForUUID')) self.assertTrue(hasattr(comms[1], 'sentenceForUUID')) self.assertTrue(hasattr(comms[2], 'sentenceForUUID')) batch_ids = [c.id for c in reader.batch(2)] # do this weird thing because set(['foo']) != set([u'foo']) self.assertTrue( ('comm-1' in batch_ids and 'comm-2' in batch_ids) or ('comm-1' in batch_ids and 'comm-3' in batch_ids) or ('comm-2' in batch_ids and 'comm-3' in batch_ids) ) # assert data still there ids = [c.id for c in reader] self.assertEquals(set(['comm-1', 'comm-2', 'comm-3']), set(ids)) self.assertEquals(3, redis_db.scard(key))
class GeneratorController(Thread): def __init__(self, username, users_list, users_count, loop_count): Thread.__init__(self) self.__loop_count = loop_count self.__server = RedisServer(Neo4jServer()) self.__users_list = users_list self.__users_count = users_count self.__server.registration(username) self.__user_id = self.__server.sign_in(username) def run(self): while self.__loop_count > 0: message_text = fake.sentence(nb_words=10, variable_nb_words=True, ext_word_list=None) receiver = self.__users_list[randint(0, self.__users_count - 1)] self.__server.create_message(message_text, self.__get_random_tags(), receiver, self.__user_id) self.__loop_count -= 1 self.stop() def __get_random_tags(self) -> list: tags = [] num = randint(0, len(Tags)) for i in range(num): tag = choice(list(Tags)).name if tag not in tags: tags.append(tag) return tags def stop(self): self.__server.sign_out(self.__user_id) self.__loop_count = 0
def test_set_implicit(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.sadd(key, self.buf1) w = RedisCommunicationWriter(redis_db, key) w.write(self.comm2) w.write(self.comm3) self.assertEquals(3, redis_db.scard(key))
def test_list_implicit(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.lpush(key, write_communication_to_buffer(self.comm1)) redis_db.lpush(key, write_communication_to_buffer(self.comm2)) redis_db.lpush(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key) ids = [c.id for c in reader] self.assertEquals(['comm-1', 'comm-2', 'comm-3'], ids)
def test_write_against_file_contents(self): filename = u'tests/testdata/simple_1.concrete' key = 'comm' with open(filename, 'rb') as f: f_buf = f.read() comm = read_communication_from_buffer(f_buf) with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) write_communication_to_redis_key(redis_db, key, comm) self.assertEquals(f_buf, redis_db.get(key))
def test_set(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) w = RedisCommunicationWriter(redis_db, key, key_type='set') w.write(self.comm1) self.assertEquals(1, redis_db.scard(key)) self.assertEquals(self.buf1, redis_db.srandmember(key)) w.write(self.comm2) w.write(self.comm3) self.assertEquals(3, redis_db.scard(key))
def test_read_write_fixed_point(self): key = 'comm' comm = create_comm('comm-1') with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) buf_1 = write_communication_to_redis_key(redis_db, key, comm) buf_2 = write_communication_to_redis_key( redis_db, key, read_communication_from_redis_key(redis_db, key) ) self.assertEquals(buf_1, buf_2)
def test_list_implicit(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.lpush(key, self.buf1) w = RedisCommunicationWriter(redis_db, key) w.write(self.comm2) w.write(self.comm3) self.assertEquals(3, redis_db.llen(key)) self.assertEquals(self.buf1, redis_db.lindex(key, -1)) self.assertEquals(self.buf2, redis_db.lindex(key, -2)) self.assertEquals(self.buf3, redis_db.lindex(key, -3))
def test_set_implicit(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.sadd(key, write_communication_to_buffer(self.comm1)) redis_db.sadd(key, write_communication_to_buffer(self.comm2)) redis_db.sadd(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key) ids = [c.id for c in reader] # assert no duplicates self.assertEquals(3, len(ids)) self.assertEquals(set(['comm-1', 'comm-2', 'comm-3']), set(ids))
def test_read_against_file_contents(self): filename = u'tests/testdata/simple_1.concrete' key = 'comm' with open(filename, 'rb') as f: buf = f.read() with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.set(key, buf) comm = read_communication_from_redis_key(redis_db, key) self.assertTrue(hasattr(comm, 'sentenceForUUID')) self.assertEquals('one', comm.id)
def test_hash_implicit(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.hset(key, self.comm1.id, self.buf1) w = RedisCommunicationWriter(redis_db, key) w.write(self.comm2) w.write(self.comm3) self.assertEquals(3, redis_db.hlen(key)) self.assertEquals(self.buf1, redis_db.hget(key, self.comm1.id)) self.assertEquals(self.buf2, redis_db.hget(key, self.comm2.id)) self.assertEquals(self.buf3, redis_db.hget(key, self.comm3.id))
def test_list_left_to_right(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) w = RedisCommunicationWriter(redis_db, key, key_type='list', right_to_left=False) w.write(self.comm1) self.assertEquals(1, redis_db.llen(key)) self.assertEquals(self.buf1, redis_db.lindex(key, 0)) w.write(self.comm2) w.write(self.comm3) self.assertEquals(3, redis_db.llen(key)) self.assertEquals(self.buf3, redis_db.lindex(key, -1)) self.assertEquals(self.buf2, redis_db.lindex(key, -2)) self.assertEquals(self.buf1, redis_db.lindex(key, -3))
def test_list_no_add_references(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.lpush(key, write_communication_to_buffer(self.comm1)) redis_db.lpush(key, write_communication_to_buffer(self.comm2)) redis_db.lpush(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='list', add_references=False) comms = [c for c in reader] ids = [c.id for c in comms] self.assertEquals(['comm-1', 'comm-2', 'comm-3'], ids) self.assertFalse(hasattr(comms[0], 'sentenceForUUID')) self.assertFalse(hasattr(comms[1], 'sentenceForUUID')) self.assertFalse(hasattr(comms[2], 'sentenceForUUID'))
def test_hash_empty(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) reader = RedisCommunicationReader(redis_db, key, key_type='hash') self.assertEquals(0, len(reader)) redis_db.hset(key, self.comm1.uuid.uuidString, write_communication_to_buffer(self.comm1)) redis_db.hset(key, self.comm2.uuid.uuidString, write_communication_to_buffer(self.comm2)) redis_db.hset(key, self.comm3.uuid.uuidString, write_communication_to_buffer(self.comm3)) ids = [c.id for c in reader] # assert no duplicates self.assertEquals(3, len(ids)) self.assertEquals(set(['comm-1', 'comm-2', 'comm-3']), set(ids)) self.assertEquals(3, len(reader))
def test_hash_uuid_key(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) w = RedisCommunicationWriter( redis_db, key, key_type='hash', uuid_hash_key=True) w.write(self.comm1) self.assertEquals(1, redis_db.hlen(key)) self.assertEquals(self.buf1, redis_db.hget( key, self.comm1.uuid.uuidString)) w.write(self.comm2) w.write(self.comm3) self.assertEquals(3, redis_db.hlen(key)) self.assertEquals(self.buf1, redis_db.hget( key, self.comm1.uuid.uuidString)) self.assertEquals(self.buf2, redis_db.hget( key, self.comm2.uuid.uuidString)) self.assertEquals(self.buf3, redis_db.hget( key, self.comm3.uuid.uuidString))
def test_list(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.lpush(key, write_communication_to_buffer(self.comm1)) redis_db.lpush(key, write_communication_to_buffer(self.comm2)) redis_db.lpush(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='list') comms = [c for c in reader] ids = [c.id for c in comms] self.assertEquals(['comm-1', 'comm-2', 'comm-3'], ids) self.assertEquals(3, len(reader)) self.assertEquals('comm-2', reader[1].id) self.assertTrue(hasattr(comms[0], 'sentenceForUUID')) self.assertTrue(hasattr(comms[1], 'sentenceForUUID')) self.assertTrue(hasattr(comms[2], 'sentenceForUUID')) # assert data still there ids = [c.id for c in reader] self.assertEquals(['comm-1', 'comm-2', 'comm-3'], ids) self.assertEquals(3, redis_db.llen(key))
def test_list_block_pop_timeout(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.lpush(key, write_communication_to_buffer(self.comm1)) redis_db.lpush(key, write_communication_to_buffer(self.comm2)) redis_db.lpush(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='list', pop=True, block=True, block_timeout=1) it = iter(reader) ids = [] ids.append(it.next().id) ids.append(it.next().id) self.assertEquals(1, redis_db.llen(key)) ids.append(it.next().id) self.assertEquals(['comm-1', 'comm-2', 'comm-3'], ids) with self.assertRaises(StopIteration): print 'Waiting for timeout (1 sec)...' it.next()
def test_list_pop_left_to_right(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.lpush(key, write_communication_to_buffer(self.comm1)) redis_db.lpush(key, write_communication_to_buffer(self.comm2)) redis_db.lpush(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='list', pop=True, right_to_left=False) it = iter(reader) ids = [] ids.append(it.next().id) ids.append(it.next().id) self.assertEquals(1, redis_db.llen(key)) ids.append(it.next().id) self.assertEquals(['comm-3', 'comm-2', 'comm-1'], ids) # assert data is gone self.assertEquals([], [c.id for c in reader]) self.assertFalse(redis_db.exists(key)) with self.assertRaises(StopIteration): it.next()
def test_list_block_pop(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.lpush(key, write_communication_to_buffer(self.comm1)) redis_db.lpush(key, write_communication_to_buffer(self.comm2)) redis_db.lpush(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='list', pop=True, block=True) it = iter(reader) ids = [] ids.append(it.next().id) ids.append(it.next().id) self.assertEquals(1, redis_db.llen(key)) ids.append(it.next().id) self.assertEquals(['comm-1', 'comm-2', 'comm-3'], ids) proc = Process(target=_add_comm_to_list, args=(3, server.port, 'comm-4', key)) proc.start() print 'Waiting for new comm to be added (3 sec)...' self.assertEquals('comm-4', iter(reader).next().id) proc.join()
def test_set_pop(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) redis_db.sadd(key, write_communication_to_buffer(self.comm1)) redis_db.sadd(key, write_communication_to_buffer(self.comm2)) redis_db.sadd(key, write_communication_to_buffer(self.comm3)) reader = RedisCommunicationReader(redis_db, key, key_type='set', pop=True) it = iter(reader) ids = [] ids.append(it.next().id) ids.append(it.next().id) self.assertEquals(1, redis_db.scard(key)) ids.append(it.next().id) # assert no duplicates self.assertEquals(3, len(ids)) self.assertEquals(set(['comm-1', 'comm-2', 'comm-3']), set(ids)) # assert data is gone self.assertEquals([], [c.id for c in reader]) self.assertFalse(redis_db.exists(key)) with self.assertRaises(StopIteration): it.next()
def redis_server_fixture(loop: AbstractEventLoop, redis_backend: Redis, cache: Cache, unused_port: int): redis_server = RedisServer(redis_backend, cache, unused_port) loop.run_until_complete(redis_server.start()) yield redis_server loop.run_until_complete(redis_server.stop())
class Controller(object): def __init__(self): self._menu_config = {'': ''} self._rserver = RedisServer() self._subsystems_controller = SubSystemsController(self._rserver) self._view = View(self._subsystems_controller) self._menu = 'Main menu' self._path = ['Main menu'] self._loop = True self.start() def start(self): try: from menu_list import menu_list while self._loop: choice = self.make_choice(self._menu, menu_list[self._menu].keys()) self.consider_choice(self, choice, list(menu_list[self._menu].values())) except Exception as e: self._view.show_error(str(e)) def make_choice(self, menu_headline: str, menu_list: list): self._view.draw_menu(menu_headline, menu_list) return Controller._get_uint_value("Make your choice: ", len(menu_list)) def consider_choice(self, controller, choice: int, list_of_func: list): if choice > len(list_of_func) - 1: raise Exception("func is not exist") desired_func = list_of_func[choice] desired_func(controller) def statistics_total(self): try: country = self._get_str_from_list_value( "Enter country", self._subsystems_controller.get_countries_list()) key = self._get_str_from_list_value( "Enter mode", ['Confirmed', 'Deaths', 'Recovered']) data = self._rserver.get_total_by_name(key, country) if len(data) == 0: raise Exception('no data') self._view.show_graph(country, data, key, 'plot') except Exception as e: self._view.show_error(str(e)) def statistics(self): try: country = self._get_str_from_list_value( "Enter country", self._subsystems_controller.get_countries_list()) key = self._get_str_from_list_value( "Enter mode", ['Confirmed', 'Deaths', 'Recovered']) data = self._rserver.get_daily_by_name(key, country) if len(data) == 0: raise Exception('no data') self._view.show_graph(country, data, key, 'bar') except Exception as e: self._view.show_error(str(e)) def day_statistics(self): country = self._get_str_from_list_value( "Enter country", self._subsystems_controller.get_countries_list()) date_range = self._rserver.get_range_of_date_for_country(country) given_date = self._get_date_value("Enter date", date_range) data = self._rserver.get_all_day_by_country(country, str(given_date)) self._view.show_pie("Statistics for the day", data) def regression(self): countries = self._rserver.get_countries_with_data() given_key = self._get_str_from_list_value( "Enter mode", ['Confirmed', 'Deaths', 'Recovered']) keys_for_delete = [] for key, statistics_data in countries.items(): if 'Latitude' not in statistics_data or 'Longitude' not in statistics_data: keys_for_delete.append(key) for key in keys_for_delete: del countries[key] self._view.regression(countries, given_key) def countries_statistics(self): countries = self._rserver.get_countries_with_data() given_key = self._get_str_from_list_value( "Enter mode", ['Confirmed', 'Deaths', 'Recovered']) mode = self._get_str_from_list_value("Enter mode", ['Mean', 'Median', 'Max']) data = {} for country in countries: values = [ int(x) for x in self._rserver.get_daily_by_name( given_key, country).values() ] if len(values) != 0: if mode == 'Mean': data[country] = np.mean(values) elif mode == 'Median': data[country] = np.median(values) elif mode == 'Max': data[country] = np.max(values) else: raise Exception( 'Invalid mode in countries statistics func') self._view.additional_task("", mode, data) def back(self): self._path.pop() self._menu = self._path[-1] def generate_data(self): self._subsystems_controller.generate_data() def backup_data(self): self._subsystems_controller.backup_data() def recovery_data(self): file = self._get_str_from_list_value( 'Enter file', [file[8:] for file in glob.glob("./dumps/*.json")]) self._subsystems_controller.recovery_data(file) @staticmethod def _get_uint_value(msg: str, top_line: int = None): while True: number = input(msg) if number.isdigit(): number = int(number) if top_line is None or 0 <= number < top_line: return number def _get_str_from_list_value(self, msg: str, ls: list): while True: country = input( f"{msg}{'(' + ', '.join(x for x in ls) + ')' if len(ls) <= 5 else ''}: " ) if country in ls: return country self._view.show_error('There is not item like that, try again!') def _get_date_value(self, msg: str, date_range=None): while True: try: usr_input = input(f"{msg}, date should be in range {date_range.get('start')} - {date_range.get('end')} " f"(format of date: YYYY-MM-DD): ") \ if date_range is not None else input(f"{msg} (format of date: YYYY-MM-DD): ") usr_date = date(*map(int, usr_input.split('-'))) if date_range is not None: if date(*map(int, date_range.get('start').split('-')) ) <= usr_date <= date( *map(int, date_range.get('end').split('-'))): return usr_date else: return usr_date self._view.show_error('Date out of range, try again!') except Exception as e: self._view.show_error(str(e)) def change_data_backup_status(self): self._subsystems_controller.data_backup_system_activity =\ not self._subsystems_controller.data_backup_system_activity def stop_loop(self): self._loop = False
import sklearn import xgboost import numpy as np from redis_server import RedisServer from utils import serialize_obj, deserialize_json, load_model # Docker-compose internal DNS / network bridge resolves hostname 'redis_server' to the correct container REDIS_HOST = 'redis_server' REDIS_QUEUE_NAME = 'job_queue' # REDIS_HOST = '0.0.0.0' # REDIS_QUEUE_NAME = '0.0.0.0' redis = RedisServer(host=REDIS_HOST, port=6379, db=0) def test_run(xgb_model): ''' Runs a single test case on script execution. Script will run only if test passes. Input: sklearn model ''' test_example = np.array([[5.9, 3.2, 4.8, 1.8]]) pred = xgb_model.predict(test_example)[0] print("Pred: {}. Pred label: {}".format(pred, label_names[pred])) assert pred == 1, "Loaded model does not predict correctly for test example" def run(): ''' Model inference loop. Dequeues from Redis job queue and predicts.
import os from utils.system_time import set_ntp static_folder = os.path.abspath( os.environ.get('ASTROPHOTO_PLUS_STATIC_FOLDER', '../frontend')) has_static_folder = os.path.isfile(os.path.join(static_folder, 'index.html')) app = Flask('AstroPhoto Plus', static_folder=None) app.config['has_static_folder'] = has_static_folder app.config['static_folder'] = static_folder with open('version.json', 'r') as version_json: app.config['version'] = json.load(version_json) logger = app.logger from redis_server import RedisServer redis_server = RedisServer() redis_server.start() REDIS_HOST = os.environ.get('REDIS_SERVER', '127.0.0.1') REDIS_PORT = redis_server.port app.config['REDIS_URL'] = 'redis://{}:{}/'.format(REDIS_HOST, REDIS_PORT) set_ntp(True) from broadcast_service import broadcast_service broadcast_service() from network import network_service network_service.start()
def test_implicit_empty(self): key = 'dataset' with RedisServer(loglevel='warning') as server: redis_db = Redis(port=server.port) with self.assertRaises(Exception): RedisCommunicationWriter(redis_db, key)