def main(): print("Running sync ExpiryPolicy example.") client = Client() with client.connect('127.0.0.1', 10800): print("Create cache with expiry policy.") try: ttl_cache = client.create_cache({ PROP_NAME: 'test', PROP_EXPIRY_POLICY: ExpiryPolicy(create=1.0) }) except NotSupportedByClusterError: print( "'ExpiryPolicy' API is not supported by cluster. Finishing...") return try: ttl_cache.put(1, 1) time.sleep(0.5) print(f"key = {1}, value = {ttl_cache.get(1)}") # key = 1, value = 1 time.sleep(1.2) print(f"key = {1}, value = {ttl_cache.get(1)}") # key = 1, value = None finally: ttl_cache.destroy() print("Create simple Cache and set TTL through `with_expire_policy`") simple_cache = client.create_cache('test') try: ttl_cache = simple_cache.with_expire_policy(access=1.0) ttl_cache.put(1, 1) time.sleep(0.5) print(f"key = {1}, value = {ttl_cache.get(1)}") # key = 1, value = 1 time.sleep(1.7) print(f"key = {1}, value = {ttl_cache.get(1)}") # key = 1, value = None finally: simple_cache.destroy()
student_cache = client.create_cache({ PROP_NAME: 'SQL_PUBLIC_STUDENT', PROP_SQL_SCHEMA: 'PUBLIC', PROP_QUERY_ENTITIES: [ { 'table_name': 'Student'.upper(), 'key_field_name': 'SID', 'key_type_name': 'java.lang.Integer', 'field_name_aliases': [], 'query_fields': [ { 'name': 'SID', 'type_name': 'java.lang.Integer', 'is_key_field': True, 'is_notnull_constraint_field': True, }, { 'name': 'NAME', 'type_name': 'java.lang.String', }, { 'name': 'LOGIN', 'type_name': 'java.lang.String', }, { 'name': 'AGE', 'type_name': 'java.lang.Integer', }, { 'name': 'GPA', 'type_name': 'java.math.Double', }, ], 'query_indexes': [], 'value_type_name': 'SQL_PUBLIC_STUDENT_TYPE', 'value_field_name': None, }, ], })
from pyignite import Client client = Client() client.connect('127.0.0.1', 10800) # Create cache my_cache = client.create_cache('my cache') # Put value in cache my_cache.put('my key', 42) # Get value from cache result = my_cache.get('my key') print(result) # 42 result = my_cache.get('non-existent key') print(result) # None # Get multiple values from cache result = my_cache.get_all([ 'my key', 'non-existent key', 'other-key', ]) print(result) # {'my key': 42}
# You may obtain a copy of the License at # # https://www.gridgain.com/products/software/community-edition/gridgain-community-edition-license # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from pyignite import Client client = Client() client.connect('127.0.0.1', 10800) my_cache = client.create_cache('my cache') my_cache.put_all({'key_{}'.format(v): v for v in range(20)}) # { # 'key_0': 0, # 'key_1': 1, # 'key_2': 2, # ... 20 elements in total... # 'key_18': 18, # 'key_19': 19 # } result = my_cache.scan() for k, v in result: print(k, v) # 'key_17' 17
# Open a connection client = Client() client.connect('127.0.0.1', 10800) cache_config = { PROP_NAME: 'my_cache', PROP_BACKUPS_NUMBER: 2, PROP_CACHE_KEY_CONFIGURATION: [ { 'type_name': 'PersonKey', 'affinity_key_field_name': 'companyId' } ] } my_cache = client.create_cache(cache_config) class PersonKey(metaclass=GenericObjectMeta, type_name='PersonKey', schema=OrderedDict([ ('personId', IntObject), ('companyId', IntObject), ])): pass personKey = PersonKey(personId=1, companyId=1) my_cache.put(personKey, 'test') print(my_cache.get(personKey))
import os from pyignite import Client def get_filenames(is_training, data_dir): return [ os.path.join(data_dir, 'data_batch_%d.bin' % i) for i in range(1, 6) ] batch_size = 32 * 32 * 3 + 1 idx = 0 client = Client() #client.connect('192.168.1.2', 10800) client.connect('ignite-1', 10800) cache = client.create_cache('TEST_DATA') for filename in get_filenames(True, '/tmp/cifar10_data/cifar-10-batches-bin'): batch = open(filename, 'rb') for _ in range(500): #while (True): sample = batch.read(batch_size) if len(sample) == 0: break elif len(sample) != batch_size: raise Exception("Wrong sample size (%d)" % len(sample)) cache.put(idx, sample) idx += 1