class IgniteStorage(LightStorage): """ Apache Ignite storage for Light Requests and Responses. :param host: Ignite service hostname or IP address. :param port: Ignite service port. :param request_cache_name: The cache where LightRequests are stored. :param response_cache_name: The cache where LightResponses are stored. :param timeout: Timeout (in seconds) for socket operations. """ def __init__(self, host: str, port: int, request_cache_name: str, response_cache_name: str, timeout: int = 30): self.host = host self.port = port self.request_cache_name = request_cache_name self.response_cache_name = response_cache_name self.timeout = timeout self._client = None # type: Optional[Client] def get_cache(self, cache_name: str) -> Cache: """Get an Ignite Cache.""" if self._client is None: self._client = Client(timeout=self.timeout) self._client.connect(self.host, self.port) return self._client.get_cache(cache_name) def pop_light_request(self, uid: str) -> Optional[LightRequest]: """Look up a LightRequest by a unique id and then remove it.""" data = self.get_cache(self.request_cache_name).get_and_remove(uid) LOGGER.debug('Got Light Request from cache: id=%r, data=%s', uid, data) return LightRequest().load_xml( parse_xml(data)) if data is not None else None def pop_light_response(self, uid: str) -> Optional[LightResponse]: """Look up a LightResponse by a unique id and then remove it.""" data = self.get_cache(self.response_cache_name).get_and_remove(uid) LOGGER.debug('Got Light Response from cache: id=%r, data=%s', uid, data) return LightResponse().load_xml( parse_xml(data)) if data is not None else None def put_light_request(self, uid: str, request: LightRequest) -> None: """Store a LightRequest under a unique id.""" data = dump_xml(request.export_xml()).decode('utf-8') LOGGER.debug('Store Light Request to cache: id=%r, data=%s', uid, data) self.get_cache(self.request_cache_name).put(uid, data) def put_light_response(self, uid: str, response: LightResponse) -> None: """Store a LightResponse under a unique id.""" data = dump_xml(response.export_xml()).decode('utf-8') LOGGER.debug('Store Light Response to cache: id=%r, data=%s', uid, data) self.get_cache(self.response_cache_name).put(uid, data)
class AuxiliaryIgniteStorage(AuxiliaryStorage): """ Apache Ignite storage for auxiliary data. :param host: Ignite service hostname or IP address. :param port: Ignite service port. :param cache_name: The cache where data are stored. :param timeout: Timeout (in seconds) for socket operations. """ def __init__(self, host: str, port: int, cache_name: str, timeout: int = 30, prefix: str = None): self.host = host self.port = port self.cache_name = cache_name self.timeout = timeout self.prefix = prefix self._client = None # type: Optional[Client] def get_cache(self, cache_name: str) -> Cache: """Get an Ignite Cache.""" if self._client is None: self._client = Client(timeout=self.timeout) self._client.connect(self.host, self.port) return self._client.get_cache(cache_name) def pop(self, uid: str) -> Optional[Dict[str, Any]]: """Look up data by a unique id and then remove it.""" if self.prefix: uid = self.prefix + uid data = self.get_cache(self.cache_name).get_and_remove(uid) LOGGER.debug('Got data from cache: id=%r, data=%s', uid, data) return json.loads(data) if data is not None else None def put(self, uid: str, data: Dict[str, Any]) -> None: """ Store data under a unique id. Data must be JSON-serializable. """ if self.prefix: uid = self.prefix + uid LOGGER.debug('Store data to cache: id=%r, data=%s', uid, data) self.get_cache(self.cache_name).put(uid, json.dumps(data, sort_keys=True))
from pyignite import Client from cache.instance_data import PerperInstanceData from cache.stream_data import StreamData from model.state import State from model.context import Context from services.serializer import Serializer serializer = Serializer() ignite = Client() ignite.connect('localhost', 10800) ignite.register_binary_type(StreamData) instance = PerperInstanceData(ignite, serializer) state = State(instance, ignite, serializer) context = Context(instance, None, state, ignite) context.stream_action() print("Reading stream") print(ignite.get_cache_names()) streams_cache = ignite.get_cache("streams") for el in streams_cache.scan(): print(el)
#!/usr/bin/python from pyignite import Client client = Client() client.connect('192.36.243.3', 10800) key_cache = client.get_cache('key') print(key_cache.get('flag'))
from pyignite import Client from threading import Thread import random #establish connection client = Client() client.connect('34.72.162.100', 10800) client_1 = client.get_cache("client_1") def read_file(file_name): with open(file_name, 'r', encoding="utf8") as f: for line in f: for word in line.split(): try: client_1.put(word, 5) print("Adicionando " + word) except: print(word + " ja existe") def search_word(file_name): with open(file_name, 'r', encoding="utf8") as f: for line in f: for word in line.split(): try: client_1.get(word) print("******************") print("Achou " + word)
SimpleData(name='Goshko', priority=1231, json='test')) simpleDataStream.put(simpleDataStream.get_size() + 1, "TESTING DYNAMICS") for el in simpleDataStream.scan(): print(el[1]) print(el[1].__class__) last_size = simpleDataStream.get_size() while True: caches_num = len(client.get_cache_names()) if caches_num > initial_caches_num: print(f"New cache: {list(client.get_cache_names())[-1]}") new_cache_name = list(client.get_cache_names())[-1] new_cache = client.get_cache(new_cache_name) print(new_cache.scan()) current_size = simpleDataStream.get_size() if (current_size > last_size): last_size = current_size print("New Item...") ## This is not possible because it is not sorted by timestamp / key for item in simpleDataStream.scan(): print(item) *_, new = simpleDataStream.scan() print(new)