示例#1
0
    def get_data(self) -> list:
        client = Client()
        client.connect('localhost', 10800)

        streams_cache = client.get_or_create_cache(
            json.loads(self.__stream_name)['Stream'])
        stream = streams_cache.scan()
        return list(stream)
def main():
    client = Client()
    client.connect('127.0.0.1', 10800)

    my_cache = client.get_or_create_cache('my cache')
    my_cache.put('my key', 42)

    result = my_cache.get('my key')
    print('my key: {}'.format(result))
示例#3
0
def test_client_with_failed_server(request, with_partition_awareness):
    srv = start_ignite(idx=4)
    try:
        client = Client(partition_aware=with_partition_awareness)
        with client.connect([("127.0.0.1", 10804)]):
            cache = client.get_or_create_cache(request.node.name)
            cache.put(1, 1)
            kill_process_tree(srv.pid)

            if with_partition_awareness:
                ex_class = (ReconnectError, ConnectionResetError)
            else:
                ex_class = ConnectionResetError

            with pytest.raises(ex_class):
                cache.get(1)
    finally:
        kill_process_tree(srv.pid)
def test_cluster_set_active(with_persistence):
    key = 42
    val = 42
    start_state = ClusterState.INACTIVE if with_persistence else ClusterState.ACTIVE

    client = Client()
    with client.connect([("127.0.0.1", 10801), ("127.0.0.1", 10802)]):
        cluster = client.get_cluster()
        assert cluster.get_state() == start_state

        cluster.set_state(ClusterState.ACTIVE)
        assert cluster.get_state() == ClusterState.ACTIVE

        cache = client.get_or_create_cache("test_cache")
        cache.put(key, val)
        assert cache.get(key) == val

        cluster.set_state(ClusterState.ACTIVE_READ_ONLY)
        assert cluster.get_state() == ClusterState.ACTIVE_READ_ONLY

        assert cache.get(key) == val
        with pytest.raises(CacheError):
            cache.put(key, val + 1)

        cluster.set_state(ClusterState.INACTIVE)
        assert cluster.get_state() == ClusterState.INACTIVE

        with pytest.raises(CacheError):
            cache.get(key)

        with pytest.raises(CacheError):
            cache.put(key, val + 1)

        cluster.set_state(ClusterState.ACTIVE)
        assert cluster.get_state() == ClusterState.ACTIVE

        cache.put(key, val + 2)
        assert cache.get(key) == val + 2
示例#5
0
def test_client_with_recovered_server(request, with_partition_awareness):
    srv = start_ignite(idx=4)
    try:
        client = Client(partition_aware=with_partition_awareness,
                        timeout=CLIENT_SOCKET_TIMEOUT)
        with client.connect([("127.0.0.1", 10804)]):
            cache = client.get_or_create_cache(request.node.name)
            cache.put(1, 1)

            # Kill and restart server
            kill_process_tree(srv.pid)
            srv = start_ignite(idx=4)

            # First request may fail.
            try:
                cache.put(1, 2)
            except connection_errors:
                pass

            # Retry succeeds
            cache.put(1, 2)
            assert cache.get(1) == 2
    finally:
        kill_process_tree(srv.pid)
示例#6
0
from pyignite.datatypes.prop_codes import *
from pyignite.exceptions import SocketError


nodes = [
    ('127.0.0.1', 10800),
    ('127.0.0.1', 10801),
    ('127.0.0.1', 10802),
]

client = Client(timeout=4.0)
client.connect(nodes)
print('Connected to {}'.format(client))

my_cache = client.get_or_create_cache({
    PROP_NAME: 'my_cache',
    PROP_CACHE_MODE: CacheMode.REPLICATED,
})
my_cache.put('test_key', 0)

# abstract main loop
while True:
    try:
        # do the work
        test_value = my_cache.get('test_key')
        my_cache.put('test_key', test_value + 1)
    except (OSError, SocketError) as e:
        # recover from error (repeat last command, check data
        # consistency or just continue − depends on the task)
        print('Error: {}'.format(e))
        print('Last value: {}'.format(my_cache.get('test_key')))
        print('Reconnected to {}'.format(client))
示例#7
0
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from pyignite import Client
from pyignite.datatypes import CharObject, ShortObject

client = Client()
client.connect('127.0.0.1', 10800)

my_cache = client.get_or_create_cache('my cache')

my_cache.put('my key', 42)
# value ‘42’ takes 9 bytes of memory as a LongObject

my_cache.put('my key', 42, value_hint=ShortObject)
# value ‘42’ takes only 3 bytes as a ShortObject

my_cache.put('a', 1)
# ‘a’ is a key of type String

my_cache.put('a', 2, key_hint=CharObject)
# another key ‘a’ of type CharObject was created

value = my_cache.get('a')
print(value)
示例#8
0
from pyignite.datatypes import *


class Person(metaclass=GenericObjectMeta,
             schema=OrderedDict([
                 ('first_name', String),
                 ('last_name', String),
                 ('age', IntObject),
             ])):
    pass


client = Client()
client.connect('localhost', 10800)

person_cache = client.get_or_create_cache('person')

person_cache.put(1, Person(first_name='Ivan', last_name='Ivanov', age=33))

person = person_cache.get(1)
print(person.__class__.__name__)
# Person

print(person.__class__ is Person)
# True if `Person` was registered automatically (on writing)
# or manually (using `client.register_binary_type()` method).
# False otherwise

print(person)
# Person(first_name='Ivan', last_name='Ivanov', age=33, version=1)
示例#9
0
    client.sql(CITY_INSERT_QUERY, query_args=row)

for row in LANGUAGE_DATA:
    client.sql(LANGUAGE_INSERT_QUERY, query_args=row)

# examine the storage
result = client.get_cache_names()
print(result)
# [
#     'SQL_PUBLIC_CITY',
#     'SQL_PUBLIC_COUNTRY',
#     'PUBLIC',
#     'SQL_PUBLIC_COUNTRYLANGUAGE'
# ]

city_cache = client.get_or_create_cache('SQL_PUBLIC_CITY')
print(city_cache.settings[PROP_NAME])
# 'SQL_PUBLIC_CITY'

print(city_cache.settings[PROP_QUERY_ENTITIES])
# {
#     'key_type_name': (
#         'SQL_PUBLIC_CITY_9ac8e17a_2f99_45b7_958e_06da32882e9d_KEY'
#     ),
#     'value_type_name': (
#         'SQL_PUBLIC_CITY_9ac8e17a_2f99_45b7_958e_06da32882e9d'
#     ),
#     'table_name': 'CITY',
#     'query_fields': [
#         ...
#     ],
示例#10
0
#     'recipient': String,
#     'cashier_id': LongObject,
# }


class ExpenseVoucher(
        metaclass=GenericObjectMeta,
        schema=old_schema,
):
    pass


client = Client()

with client.connect('127.0.0.1', 10800):
    accounting = client.get_or_create_cache('accounting')

    for key, value in old_data:
        accounting.put(key, ExpenseVoucher(**value))

    data_classes = client.query_binary_type('ExpenseVoucher')
    print(data_classes)
    # {
    #     -231598180: <class '__main__.ExpenseVoucher'>
    # }

s_id, data_class = data_classes.popitem()
schema = data_class.schema

schema['expense_date'] = schema['date']
del schema['date']
from pyignite.datatypes.prop_codes import PROP_NAME, PROP_CACHE_MODE, PROP_BACKUPS_NUMBER
from pyignite.exceptions import SocketError

nodes = [
    ('127.0.0.1', 10800),
    ('127.0.0.1', 10801),
    ('127.0.0.1', 10802),
]

client = Client(timeout=4.0)
with client.connect(nodes):
    print('Connected')

    my_cache = client.get_or_create_cache({
        PROP_NAME: 'my_cache',
        PROP_CACHE_MODE: CacheMode.PARTITIONED,
        PROP_BACKUPS_NUMBER: 2,
    })
    my_cache.put('test_key', 0)
    test_value = 0

    # abstract main loop
    while True:
        try:
            # do the work
            test_value = my_cache.get('test_key') or 0
            my_cache.put('test_key', test_value + 1)
        except (OSError, SocketError) as e:
            # recover from error (repeat last command, check data
            # consistency or just continue − depends on the task)
            print(f'Error: {e}')
示例#12
0
    def __init__(self):
        client = Client()
        client.connect(ip, port)

        #Create cache
        self.my_cache = client.get_or_create_cache(cache)
示例#13
0
from pyignite import Client, GenericObjectMeta
from pyignite.datatypes import *


class Person(metaclass=GenericObjectMeta, schema=OrderedDict([
    ('first_name', String),
    ('last_name', String),
    ('age', IntObject),
])):
    pass


client = Client()
client.connect('localhost', 10800)

person_cache = client.get_or_create_cache('person')

person_cache.put(
    1, Person(first_name='Ivan', last_name='Ivanov', age=33)
)

person = person_cache.get(1)
print(person.__class__.__name__)
# Person

print(person.__class__ is Person)
# True if `Person` was registered automatically (on writing)
# or manually (using `client.register_binary_type()` method).
# False otherwise

print(person)
示例#14
0
client = Client()
client.connect('localhost', 10800)
client.register_binary_type(SimpleData)

print(client.get_cache_names())
initial_caches_num = len(client.get_cache_names())

stream_name = None
for cache_name in list(client.get_cache_names()):
    if "DynamicDataStream" in cache_name:
        stream_name = cache_name

if stream_name == None:
    print('Perper stream not started yet')
else:
    simpleDataStream = client.get_or_create_cache(stream_name)

simpleDataStream.put(simpleDataStream.get_size() + 1,
                     SimpleData(name='Goshko', priority=1231, json='test'))

simpleDataStream.put(simpleDataStream.get_size() + 1, "TESTING DYNAMICS")

for el in simpleDataStream.scan():
    print(el[1])
    print(el[1].__class__)

last_size = simpleDataStream.get_size()

while True:
    caches_num = len(client.get_cache_names())
    if caches_num > initial_caches_num:
示例#15
0
from pyignite.datatypes.cache_config import CacheMode
from pyignite.datatypes.prop_codes import *
from pyignite.exceptions import SocketError

nodes = [
    ('127.0.0.1', 10800),
    ('217.29.2.1', 10800),
    ('200.10.33.1', 10800),
]

client = Client(timeout=40.0)
client.connect(nodes)
print('Connected to {}'.format(client))

my_cache = client.get_or_create_cache({
    PROP_NAME: 'my_cache',
    PROP_CACHE_MODE: CacheMode.REPLICATED,
})
my_cache.put('test_key', 0)

# Abstract main loop
while True:
    try:
        # Do the work
        test_value = my_cache.get('test_key')
        my_cache.put('test_key', test_value + 1)
    except (OSError, SocketError) as e:
        # Recover from error (repeat last command, check data
        # consistency or just continue − depends on the task)
        print('Error: {}'.format(e))
        print('Last value: {}'.format(my_cache.get('test_key')))
        print('Reconnected to {}'.format(client))
示例#16
0
    client.sql(CITY_INSERT_QUERY, query_args=row)

for row in LANGUAGE_DATA:
    client.sql(LANGUAGE_INSERT_QUERY, query_args=row)

# examine the storage
result = client.get_cache_names()
print(result)
# [
#     'SQL_PUBLIC_CITY',
#     'SQL_PUBLIC_COUNTRY',
#     'PUBLIC',
#     'SQL_PUBLIC_COUNTRYLANGUAGE'
# ]

city_cache = client.get_or_create_cache('SQL_PUBLIC_CITY')
print(city_cache.settings[PROP_NAME])
# 'SQL_PUBLIC_CITY'

print(city_cache.settings[PROP_QUERY_ENTITIES])
# {
#     'key_type_name': (
#         'SQL_PUBLIC_CITY_9ac8e17a_2f99_45b7_958e_06da32882e9d_KEY'
#     ),
#     'value_type_name': (
#         'SQL_PUBLIC_CITY_9ac8e17a_2f99_45b7_958e_06da32882e9d'
#     ),
#     'table_name': 'CITY',
#     'query_fields': [
#         ...
#     ],
示例#17
0
# limitation fees for hosting or consulting/ support services related to the Software), a product or
# service whose value derives, entirely or substantially, from the functionality of the Software.
# Any license notice or attribution required by the License must also include this Commons Clause
# License Condition notice.
#
# For purposes of the clause above, the “Licensor” is Copyright 2019 GridGain Systems, Inc.,
# the “License” is the Apache License, Version 2.0, and the Software is the GridGain Community
# Edition software provided with this notice.

from pyignite import Client
from pyignite.datatypes import CharObject, ShortObject

client = Client()
client.connect('127.0.0.1', 10800)

my_cache = client.get_or_create_cache('my cache')

my_cache.put('my key', 42)
# value ‘42’ takes 9 bytes of memory as a LongObject

my_cache.put('my key', 42, value_hint=ShortObject)
# value ‘42’ takes only 3 bytes as a ShortObject

my_cache.put('a', 1)
# ‘a’ is a key of type String

my_cache.put('a', 2, key_hint=CharObject)
# another key ‘a’ of type CharObject was created

value = my_cache.get('a')
print(value)
示例#18
0
#     'recipient': String,
#     'cashier_id': LongObject,
# }


class ExpenseVoucher(
    metaclass=GenericObjectMeta,
    schema=old_schema,
):
    pass


client = Client()
client.connect('127.0.0.1', 10800)

accounting = client.get_or_create_cache('accounting')

for key, value in old_data:
    accounting.put(key, ExpenseVoucher(**value))

data_classes = client.query_binary_type('ExpenseVoucher')
print(data_classes)
# {
#     -231598180: <class '__main__.ExpenseVoucher'>
# }

s_id, data_class = data_classes.popitem()
schema = data_class.schema

schema['expense_date'] = schema['date']
del schema['date']