Пример #1
0
def testDependenciesBasicExportImport():
    env = Env(moduleArgs='CreateVenv 1')
    conn = getConnectionByEnv(env)

    #disable rdb save
    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').foreach(lambda x: execute('config', 'set', 'save', '')).run()"
    )

    env.expect('RG.PYEXECUTE', "import redisgraph", 'REQUIREMENTS',
               'redisgraph').ok()
    md, data = env.cmd('RG.PYEXPORTREQ', 'redisgraph')
    env.assertEqual(md[5], 'yes')
    env.assertEqual(md[7], 'yes')
    env.stop()
    env.start()
    conn = getConnectionByEnv(env)
    env.expect('RG.PYDUMPREQS').equal([])
    env.expect('RG.PYIMPORTREQ', *data).equal('OK')
    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').flatmap(lambda x: execute('RG.PYDUMPREQS')).run()"
    )
    env.assertEqual(len(err), 0)
    env.assertEqual(len(res), env.shardsCount)
    for r in res:
        env.assertContains("'IsDownloaded', 'yes', 'IsInstalled', 'yes'", r)
Пример #2
0
def testCompressionConfig(env):
    env.skipOnCluster()
    conn = getConnectionByEnv(env)
    env.cmd('ft.create', 'idx', 'SCHEMA', 'n', 'numeric')

    # w/o compression. exact number match.
    env.expect('ft.config', 'set', '_NUMERIC_COMPRESS', 'false').equal('OK')
    for i in range(100):
        env.execute_command('hset', i, 'n', str(1 + i / 100.0))
    for i in range(100):
        num = str(1 + i / 100.0)
        env.expect('ft.search', 'idx',
                   '@n:[%s %s]' % (num, num)).equal([1L,
                                                     str(i), ['n', num]])

    # with compression. no exact number match.
    env.expect('ft.config', 'set', '_NUMERIC_COMPRESS', 'true').equal('OK')
    for i in range(100):
        env.execute_command('hset', i, 'n', str(1 + i / 100.0))

    # delete keys where compression does not change value
    env.execute_command('del', '0')
    env.execute_command('del', '25')
    env.execute_command('del', '50')
    env.execute_command('del', '75')

    for i in range(100):
        num = str(1 + i / 100.0)
        env.expect('ft.search', 'idx', '@n:[%s %s]' % (num, num)).equal([0L])
Пример #3
0
def testAof(env):
    env = Env(moduleArgs='CreateVenv 1', useAof=True)
    conn = getConnectionByEnv(env)
    env.expect('RG.PYEXECUTE', "import redisgraph", 'REQUIREMENTS',
               'redisgraph').ok()

    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').flatmap(lambda x: execute('RG.PYDUMPREQS')).run()"
    )
    env.assertEqual(len(err), 0)
    env.assertEqual(len(res), env.shardsCount)
    for r in res:
        env.assertContains("'IsDownloaded', 'yes', 'IsInstalled', 'yes'", r)

    env.broadcast('debug', 'loadaof')

    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').flatmap(lambda x: execute('RG.PYDUMPREQS')).run()"
    )
    env.assertEqual(len(err), 0)
    env.assertEqual(len(res), env.shardsCount)
    for r in res:
        env.assertContains("'IsDownloaded', 'yes', 'IsInstalled', 'yes'", r)
Пример #4
0
def testBasicStreamProcessing(env):
    conn = getConnectionByEnv(env)
    res = env.cmd(
        'rg.pyexecute', "GearsBuilder('StreamReader')."
        "flatmap(lambda x: [(a[0], a[1]) for a in x['value'].items()])."
        "repartition(lambda x: x[0])."
        "foreach(lambda x: redisgears.executeCommand('set', x[0], x[1]))."
        "map(lambda x: str(x))."
        "register('stream1')", 'UNBLOCKING')
    env.assertEqual(res, 'OK')
    if (res != 'OK'):
        return
    time.sleep(0.5)  # make sure the registration reached to all shards
    env.cmd('XADD', 'stream1', '*', 'f1', 'v1', 'f2', 'v2')
    res = []
    while len(res) < 1:
        res = env.cmd('rg.dumpexecutions')
    for e in res:
        env.broadcast('rg.getresultsblocking', e[1])
        env.cmd('rg.dropexecution', e[1])
    env.assertEqual(conn.get('f1'), 'v1')
    env.assertEqual(conn.get('f2'), 'v2')

    # delete all registrations and executions so valgrind check will pass
    executions = env.cmd('RG.DUMPEXECUTIONS')
    for r in executions:
        env.expect('RG.DROPEXECUTION', r[1]).equal('OK')

    registrations = env.cmd('RG.DUMPREGISTRATIONS')
    for r in registrations:
        env.expect('RG.UNREGISTER', r[1]).equal('OK')
Пример #5
0
def test_1667(env):
  conn = getConnectionByEnv(env)
  conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 'tag', 'TAG', 'text', 'TEXT')
  env.expect('ft.search idx @tag:{a}').equal([0L])
  env.expect('ft.search idx @tag:{b}').equal([0L])

  conn.execute_command('HSET', 'doc', 'tag', 'a,b')
  conn.execute_command('HSET', 'doc1', 'tag', 'abc')

  # test single stopword
  env.expect('ft.search idx @tag:{a}').equal([1L, 'doc', ['tag', 'a,b']])
  env.expect('ft.search idx @tag:{b}').equal([1L, 'doc', ['tag', 'a,b']])
  env.expect('ft.search idx @tag:{c}').equal([0L])

  # test stopword in list
  env.expect('ft.search idx @tag:{a|c}').equal([1L, 'doc', ['tag', 'a,b']])
  env.expect('ft.search idx @tag:{c|a}').equal([1L, 'doc', ['tag', 'a,b']])
  env.expect('ft.search idx @tag:{c|a|c}').equal([1L, 'doc', ['tag', 'a,b']])

  # test stopword with prefix
  env.expect('ft.search idx @tag:{ab*}').equal([1L, 'doc1', ['tag', 'abc']])
  env.expect('ft.search idx @tag:{abc*}').equal([1L, 'doc1', ['tag', 'abc']])
  
  # ensure regular text field
  conn.execute_command('HSET', 'doc_a', 'text', 'a')
  conn.execute_command('HSET', 'doc_b', 'text', 'b')
  env.expect('ft.search idx a').equal([0L])
  env.expect('ft.search idx b').equal([1L, 'doc_b', ['text', 'b']])
Пример #6
0
def testReplace(env):
    conn = getConnectionByEnv(env)
    r = env

    r.expect('ft.create idx schema f text').ok()

    res = conn.execute_command('HSET', 'doc1', 'f', 'hello world')
    env.assertEqual(res, 1)
    res = conn.execute_command('HSET', 'doc2', 'f', 'hello world')
    env.assertEqual(res, 1)
    res = r.execute_command('ft.search', 'idx', 'hello world')
    r.assertEqual(2, res[0])

    # now replace doc1 with a different content
    res = conn.execute_command('HSET', 'doc1', 'f', 'goodbye universe')
    env.assertEqual(res, 0)

    for _ in r.retry_with_rdb_reload():
        waitForIndex(env, 'idx')
        # make sure the query for hello world does not return the replaced document
        r.expect('ft.search', 'idx', 'hello world',
                 'nocontent').equal([1, 'doc2'])

        # search for the doc's new content
        r.expect('ft.search', 'idx', 'goodbye universe',
                 'nocontent').equal([1, 'doc1'])
Пример #7
0
def test_issue1834(env):
  # Stopword query is case sensitive.
  conn = getConnectionByEnv(env)
  conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TEXT')
  conn.execute_command('HSET', 'doc', 't', 'hell hello')

  env.expect('FT.SEARCH', 'idx', 'hell|hello', 'HIGHLIGHT').equal([1L, 'doc', ['t', '<b>hell</b> <b>hello</b>']])
Пример #8
0
def aofTestCommon(env, reloadfn):
    # TODO: Change this attribute in rmtest
    conn = getConnectionByEnv(env)
    env.cmd('ft.create', 'idx', 'ON', 'HASH', 'schema', 'field1', 'text',
            'field2', 'numeric')
    for x in range(1, 10):
        conn.execute_command('hset', 'doc{}'.format(x), 'field1',
                             'myText{}'.format(x), 'field2', 20 * x)

    reloadfn()
    waitForIndex(env, 'idx')
    exp = [
        9L, 'doc1', ['field1', 'myText1', 'field2', '20'], 'doc2',
        ['field1', 'myText2', 'field2', '40'], 'doc3',
        ['field1', 'myText3', 'field2', '60'], 'doc4',
        ['field1', 'myText4', 'field2', '80'], 'doc5',
        ['field1', 'myText5', 'field2', '100'], 'doc6',
        ['field1', 'myText6', 'field2', '120'], 'doc7',
        ['field1', 'myText7', 'field2', '140'], 'doc8',
        ['field1', 'myText8', 'field2', '160'], 'doc9',
        ['field1', 'myText9', 'field2', '180']
    ]

    reloadfn()
    waitForIndex(env, 'idx')
    ret = env.cmd('ft.search', 'idx', 'myt*')
    env.assertEqual(toSortedFlatList(ret), toSortedFlatList(exp))
Пример #9
0
def testAofRewriteTags():
    env = Env(useAof=True)
    conn = getConnectionByEnv(env)
    env.cmd('FT.CREATE', 'idx', 'ON', 'HASH', 'SCHEMA', 'foo', 'TEXT',
            'SORTABLE', 'bar', 'TAG')
    env.cmd('FT.ADD', 'idx', '1', '1', 'FIELDS', 'foo', 'A', 'bar', '1')
    env.cmd('FT.ADD', 'idx', '2', '1', 'fields', 'foo', 'B', 'bar', '1')

    info_a = to_dict(env.cmd('FT.INFO', 'idx'))
    env.restart_and_reload()
    info_b = to_dict(env.cmd('FT.INFO', 'idx'))
    env.assertEqual(info_a['fields'], info_b['fields'])

    # Try to drop the schema
    env.cmd('FT.DROP', 'idx')

    conn.execute_command('del', '1')
    conn.execute_command('del', '2')

    # Try to create it again - should work!
    env.cmd('FT.CREATE', 'idx', 'ON', 'HASH', 'SCHEMA', 'foo', 'TEXT',
            'SORTABLE', 'bar', 'TAG')
    env.cmd('FT.ADD', 'idx', '1', '1', 'FIELDS', 'foo', 'A', 'bar', '1')
    env.cmd('FT.ADD', 'idx', '2', '1', 'fields', 'foo', 'B', 'bar', '1')
    res = env.cmd('FT.SEARCH', 'idx', '@bar:{1}', 'SORTBY', 'foo', 'ASC',
                  'RETURN', '1', 'foo', 'WITHSORTKEYS')
    env.assertEqual([2L, '1', '$a', ['foo', 'A'], '2', '$b', ['foo', 'B']],
                    res)
Пример #10
0
def testProfileOutput(env):
    env.skip()
    docs = 10000
    copies = 10
    queries = 0

    conn = getConnectionByEnv(env)
    pl = conn.pipeline()
    env.cmd('FT.CONFIG', 'SET', 'MAXPREFIXEXPANSIONS', 1000000)
    env.cmd('FT.CONFIG', 'SET', '_PRINT_PROFILE_CLOCK', 'false')
    env.cmd('FT.CONFIG', 'SET', 'UNION_ITERATOR_HEAP', 1)

    env.cmd('ft.create', 'idx', 'SCHEMA', 't', 'text')
    for i in range(docs):
        pl.execute_command('hset', i, 't', str(i / copies), 'hello', string1,
                           'world', string2)
        if (i % 999) is 0:
            pl.execute()
    pl.execute()

    print "finished loading"
    search_string = '12*|87*|42*'
    #search_string = '(4|5) (5|6)'
    #search_string = '1(1(1(1(1(1(1))))))'
    #search_string = '1(1(1(1(1))))'
    #print env.cmd('FT.search', 'idx', '12*|69*', 'limit', 0, 0)
    for i in range(queries):
        pl.execute_command('FT.PROFILE', 'search', 'idx', search_string,
                           'limit', 0, 1000)
        if (i % 999) is 0:
            pl.execute()
    pl.execute()
    res = env.cmd('FT.PROFILE', 'search', 'idx', search_string, 'limit', 0, 0,
                  'nocontent')
    print res
Пример #11
0
def testProfileNumeric(env):
    env.skipOnCluster()
    conn = getConnectionByEnv(env)
    env.cmd('FT.CONFIG', 'SET', '_PRINT_PROFILE_CLOCK', 'false')

    env.cmd('ft.create', 'idx', 'SCHEMA', 'n', 'numeric')
    conn.execute_command('hset', '1', 'n', '1.2')
    conn.execute_command('hset', '2', 'n', '1.5')
    conn.execute_command('hset', '3', 'n', '8.2')
    conn.execute_command('hset', '4', 'n', '6.7')
    conn.execute_command('hset', '5', 'n', '-14')

    actual_res = conn.execute_command('ft.profile', 'idx', 'search', 'query',
                                      '@n:[0,100]', 'nocontent')
    expected_res = [
        'Iterators profile',
        [
            'Type', 'UNION', 'Query type', 'NUMERIC', 'Counter', 4L,
            'Children iterators',
            [
                'Type', 'NUMERIC', 'Term', '-14 - 1.35', 'Counter', 1L, 'Size',
                2L
            ],
            [
                'Type', 'NUMERIC', 'Term', '1.35 - 8.2', 'Counter', 3L, 'Size',
                3L
            ]
        ]
    ]
    env.assertEqual(actual_res[1][3], expected_res)
Пример #12
0
def testProfileNumeric(env):
    env.skipOnCluster()
    conn = getConnectionByEnv(env)
    env.cmd('FT.CONFIG', 'SET', 'MAXPREFIXEXPANSIONS', 1000000)
    env.cmd('FT.CONFIG', 'SET', '_PRINT_PROFILE_CLOCK', 'false')

    env.cmd('ft.create', 'idx', 'SCHEMA', 'n', 'numeric')
    conn.execute_command('hset', '1', 'n', '1.2')
    conn.execute_command('hset', '2', 'n', '1.5')
    conn.execute_command('hset', '3', 'n', '8.2')
    conn.execute_command('hset', '4', 'n', '6.7')
    conn.execute_command('hset', '5', 'n', '-14')

    actual_res = conn.execute_command('ft.profile', 'search', 'idx',
                                      '@n:[0,100]', 'nocontent')
    expected_res = [[4L, '1', '2', '3', '4'],
                    [['Total profile time'],
                     ['Parsing and iterator creation time'],
                     [
                         'Iterators profile',
                         [
                             'Union iterator - NUMERIC', 5L,
                             ['Numeric reader', '-14 - 1.35', 2L],
                             ['Numeric reader', '1.35 - 8.2', 4L]
                         ]
                     ],
                     [
                         'Result processors profile', ['Index', 5L],
                         ['Scorer', 5L], ['Sorter', 5L]
                     ]]]
    env.assertEqual(actual_res, expected_res)
Пример #13
0
def test_issue1880(env):
  # order of iterator in intersect is optimized by function
  env.skipOnCluster()
  conn = getConnectionByEnv(env)
  env.cmd('FT.CONFIG', 'SET', '_PRINT_PROFILE_CLOCK', 'false')
  conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TEXT')
  conn.execute_command('HSET', 'doc1', 't', 'hello world')
  conn.execute_command('HSET', 'doc2', 't', 'hello')

  excepted_res = ['Type', 'INTERSECT', 'Counter', 1L, 'Children iterators',
                    ['Type', 'TEXT', 'Term', 'world', 'Counter', 1L, 'Size', 1L],
                    ['Type', 'TEXT', 'Term', 'hello', 'Counter', 1L, 'Size', 2L]] 
  res1 = env.cmd('FT.PROFILE', 'idx', 'SEARCH', 'QUERY', 'hello world')
  res2 = env.cmd('FT.PROFILE', 'idx', 'SEARCH', 'QUERY', 'world hello')
  # both queries return `world` iterator before `hello`
  env.assertEqual(res1[1][3][1], excepted_res)
  env.assertEqual(res2[1][3][1], excepted_res)

  # test with a term which does not exist
  excepted_res = ['Type', 'INTERSECT', 'Counter', 0L, 'Children iterators', 
                    None,
                    ['Type', 'TEXT', 'Term', 'world', 'Counter', 0L, 'Size', 1L],
                    ['Type', 'TEXT', 'Term', 'hello', 'Counter', 0L, 'Size', 2L]]
  res3 = env.cmd('FT.PROFILE', 'idx', 'SEARCH', 'QUERY', 'hello new world')

  env.assertEqual(res3[1][3][1], excepted_res)
Пример #14
0
def testCommandOverrideHset(env):
    conn = getConnectionByEnv(env)
    script = '''
import time
def doHset(x):
    x += ['__time', time.time()]
    return call_next(*x[1:])
GB("CommandReader").map(doHset).register(hook="hset", mode="sync")
    '''
    env.expect('rg.pyexecute', script).ok()

    verifyRegistrationIntegrity(env)

    conn.execute_command('hset', 'h1', 'foo', 'bar')
    conn.execute_command('hset', 'h2', 'foo', 'bar')
    conn.execute_command('hset', 'h3', 'foo', 'bar')

    res = conn.execute_command('hget', 'h1', '__time')
    env.assertNotEqual(res, None)

    res = conn.execute_command('hget', 'h2', '__time')
    env.assertNotEqual(res, None)

    res = conn.execute_command('hget', 'h3', '__time')
    env.assertNotEqual(res, None)
Пример #15
0
def testRegistersOnPrefix(env):
    conn = getConnectionByEnv(env)
    env.cmd(
        'rg.pyexecute', "GB()."
        "filter(lambda x: x['type'] != 'empty')."
        "map(lambda x: ('pref2:' + x['key'].split(':')[1], x['value']))."
        "repartition(lambda x: x[0])."
        "foreach(lambda x: execute('set', x[0], x[1]))."
        "register(regex='pref1:*')")

    time.sleep(0.1)  ## wait for registration to get to all shards

    conn.set('pref1:x', '1')
    conn.set('pref1:y', '2')
    conn.set('pref1:z', '3')

    res = []
    while len(res) < 3:
        res = env.cmd('rg.dumpexecutions')
        res = [r for r in res if r[3] == 'done']

    env.assertEqual(conn.get('pref2:x'), '1')
    env.assertEqual(conn.get('pref2:y'), '2')
    env.assertEqual(conn.get('pref2:z'), '3')

    executions = env.cmd('RG.DUMPEXECUTIONS')
    for r in executions:
        env.expect('RG.DROPEXECUTION', r[1]).equal('OK')

    registrations = env.cmd('RG.DUMPREGISTRATIONS')
    for r in registrations:
        env.expect('RG.UNREGISTER', r[1]).equal('OK')
Пример #16
0
def testNotExist(env):
    conn = getConnectionByEnv(env)
    env.cmd('ft.create', 'things', 'ON', 'HASH', 'PREFIX', '1', 'thing:',
            'FILTER', 'startswith(@__key, "thing:")', 'SCHEMA', 'txt', 'text')

    conn.execute_command('hset', 'thing:bar', 'not_text', 'foo')
    env.expect('ft.search', 'things', 'foo').equal([0L])
Пример #17
0
def testBasicStream(env):
    conn = getConnectionByEnv(env)
    res = env.cmd(
        'rg.pyexecute', "GearsBuilder()."
        "filter(lambda x:x['key'] != 'values' and x['type'] != 'empty')."
        "repartition(lambda x: 'values')."
        "foreach(lambda x: redisgears.executeCommand('lpush', 'values', x['value']))."
        "register('*')", 'UNBLOCKING')
    env.assertEqual(res, 'OK')
    if (res != 'OK'):
        return
    time.sleep(0.5)  # make sure the execution reached to all shards
    conn.execute_command('set', 'x', '1')
    conn.execute_command('set', 'y', '2')
    conn.execute_command('set', 'z', '3')
    res = []
    while len(res) < 6:
        res = env.cmd('rg.dumpexecutions')
    for e in res:
        env.broadcast('rg.getresultsblocking', e[1])
        env.cmd('rg.dropexecution', e[1])
    env.assertEqual(set(conn.lrange('values', '0', '-1')), set(['1', '2',
                                                                '3']))

    # delete all registrations so valgrind check will pass
    registrations = env.cmd('RG.DUMPREGISTRATIONS')
    for r in registrations:
        env.expect('RG.UNREGISTER', r[1]).equal('OK')
Пример #18
0
def testRegistersSurviveRestart(env):
    conn = getConnectionByEnv(env)
    env.cmd('rg.pyexecute', "GB().filter(lambda x: 'NumOfKeys' not in x['key'])."
                            "foreach(lambda x: execute('incrby', 'NumOfKeys{%s}' % (hashtag()), ('1' if 'value' in x.keys() else '-1')))."
                            "register(mode='async_local')")

    time.sleep(0.1) # wait for execution to reach all the shards

    for _ in env.reloading_iterator():
        for i in range(100):
            conn.set(str(i), str(i))

        for i in range(100):
            conn.delete(str(i))

        # wait for all executions to finish
        res = 0
        while res < 200:
            res = env.cmd('rg.pyexecute', "GB('ShardsIDReader').map(lambda x: len([r for r in execute('rg.dumpexecutions') if r[3] == 'done'])).aggregate(0, lambda a, x: x, lambda a, x: a + x).run()")

        numOfKeys = env.cmd('rg.pyexecute', "GB().map(lambda x: int(x['value'])).aggregate(0, lambda a, x: x, lambda a, x: a + x).run('NumOfKeys*')")[0][0]
        env.assertEqual(numOfKeys, '0')


    # deleting all executions from all the shards, execution list are not identical so we use gears to clear it.
    res = env.cmd('rg.pyexecute', "GB('ShardsIDReader').flatmap(lambda x: [r[1] for r in execute('rg.dumpexecutions')]).foreach(lambda x: execute('RG.DROPEXECUTION', x)).run()")

    registrations = env.cmd('RG.DUMPREGISTRATIONS')
    for r in registrations:
        env.expect('RG.UNREGISTER', r[1]).equal('OK')
Пример #19
0
def testIssue1571WithRename(env):
    conn = getConnectionByEnv(env)
    env.cmd('ft.create', 'idx1', 'PREFIX', '1', 'idx1', 'FILTER',
            '@index=="yes"', 'SCHEMA', 't', 'TEXT')
    env.cmd('ft.create', 'idx2', 'PREFIX', '1', 'idx2', 'FILTER',
            '@index=="yes"', 'SCHEMA', 't', 'TEXT')

    conn.execute_command('hset', 'idx1:{doc}1', 't', 'foo1', 'index', 'yes')

    env.expect('ft.search', 'idx1', 'foo*').equal(
        [1L, 'idx1:{doc}1', ['t', 'foo1', 'index', 'yes']])
    env.expect('ft.search', 'idx2', 'foo*').equal([0L])

    conn.execute_command('rename', 'idx1:{doc}1', 'idx2:{doc}1')

    env.expect('ft.search', 'idx2', 'foo*').equal(
        [1L, 'idx2:{doc}1', ['t', 'foo1', 'index', 'yes']])
    env.expect('ft.search', 'idx1', 'foo*').equal([0L])

    conn.execute_command('hset', 'idx2:{doc}1', 'index', 'no')

    env.expect('ft.search', 'idx1', 'foo*').equal([0L])
    env.expect('ft.search', 'idx2', 'foo*').equal([0L])

    conn.execute_command('rename', 'idx2:{doc}1', 'idx1:{doc}1')

    env.expect('ft.search', 'idx1', 'foo*').equal([0L])
    env.expect('ft.search', 'idx2', 'foo*').equal([0L])

    conn.execute_command('hset', 'idx1:{doc}1', 'index', 'yes')

    env.expect('ft.search', 'idx1', 'foo*').equal(
        [1L, 'idx1:{doc}1', ['t', 'foo1', 'index', 'yes']])
    env.expect('ft.search', 'idx2', 'foo*').equal([0L])
Пример #20
0
def testNotIterator(env):
    env.skipOnCluster()
    conn = getConnectionByEnv(env)
    env.cmd('FT.CONFIG', 'SET', 'MAXPREFIXEXPANSIONS', 2)
    env.cmd('FT.CONFIG', 'SET', '_PRINT_PROFILE_CLOCK', 'false')
    conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'text')
    conn.execute_command('HSET', '1', 't', 'foo')
    conn.execute_command('HSET', '2', 't', 'bar')

    #before the fix, we would not get an empty iterator
    res = [[1, '1', ['t', 'foo']],
           [['Total profile time'], ['Parsing time'],
            ['Pipeline creation time'],
            [
                'Iterators profile',
                [
                    'Type', 'INTERSECT', 'Counter', 1, 'Child iterators',
                    ['Type', 'TEXT', 'Term', 'foo', 'Counter', 1, 'Size', 1],
                    [
                        'Type', 'NOT', 'Counter', 1, 'Child iterator',
                        ['Type', 'EMPTY', 'Counter', 0]
                    ]
                ]
            ],
            [
                'Result processors profile', ['Type', 'Index', 'Counter', 1],
                ['Type', 'Scorer', 'Counter', 1],
                ['Type', 'Sorter', 'Counter', 1],
                ['Type', 'Loader', 'Counter', 1]
            ]]]

    env.expect('ft.profile', 'idx', 'search', 'query',
               'foo -@t:baz').equal(res)
Пример #21
0
def testProfileAggregate(env):
    env.skipOnCluster()
    conn = getConnectionByEnv(env)
    env.cmd('FT.CONFIG', 'SET', '_PRINT_PROFILE_CLOCK', 'false')

    env.cmd('ft.create', 'idx', 'SCHEMA', 't', 'text')
    conn.execute_command('hset', '1', 't', 'hello')
    conn.execute_command('hset', '2', 't', 'world')

    expected_res = [
        'Result processors profile', ['Type', 'Index', 'Counter', 1],
        ['Type', 'Loader', 'Counter', 1], ['Type', 'Grouper', 'Counter', 1]
    ]
    actual_res = conn.execute_command('ft.profile', 'idx', 'aggregate',
                                      'query', 'hello', 'groupby', 1, '@t',
                                      'REDUCE', 'count', '0', 'as', 'sum')
    env.assertEqual(actual_res[1][4], expected_res)

    expected_res = [
        'Result processors profile', ['Type', 'Index', 'Counter', 2],
        ['Type', 'Loader', 'Counter', 2],
        ['Type', 'Projector - Function startswith', 'Counter', 2]
    ]
    actual_res = env.cmd('ft.profile', 'idx', 'aggregate', 'query', '*',
                         'load', 1, 't', 'apply', 'startswith(@t, "hel")',
                         'as', 'prefix')
    env.assertEqual(actual_res[1][4], expected_res)
Пример #22
0
def testProfileSearchLimited(env):
    env.skipOnCluster()
    conn = getConnectionByEnv(env)
    env.cmd('FT.CONFIG', 'SET', '_PRINT_PROFILE_CLOCK', 'false')

    env.cmd('ft.create', 'idx', 'SCHEMA', 't', 'text')
    conn.execute_command('hset', '1', 't', 'hello')
    conn.execute_command('hset', '2', 't', 'hell')
    conn.execute_command('hset', '3', 't', 'help')
    conn.execute_command('hset', '4', 't', 'helowa')

    actual_res = conn.execute_command('ft.profile', 'idx', 'search', 'limited',
                                      'query', '%hell% hel*')
    expected_res = [
        'Iterators profile',
        [
            'Type', 'INTERSECT', 'Counter', 3, 'Child iterators',
            [
                'Type', 'UNION', 'Query type', 'FUZZY - hell', 'Counter', 3,
                'Child iterators', 'The number of iterators in the union is 3'
            ],
            [
                'Type', 'UNION', 'Query type', 'PREFIX - hel', 'Counter', 3,
                'Child iterators', 'The number of iterators in the union is 4'
            ]
        ]
    ]
    env.assertEqual(actual_res[1][3], expected_res)
Пример #23
0
def testStreamReaderAsync(env):
    env.skipOnCluster()
    conn = getConnectionByEnv(env)
    script = '''
fdata = []

class BlockHolder:
    def __init__(self, bc):
        self.bc = bc

    def __getstate__(self):
        state = dict(self.__dict__)
        state['bc'] = None
        return state

    def continueRun(self, r):
        if self.bc:
            self.bc.continueRun(r)

def bc(r):
    global fdata
    f = BlockHolder(gearsFuture())
    fd = (f, r)
    fdata.insert(0, fd)
    return f.bc

GB('CommandReader').map(lambda a: fdata.pop()).foreach(lambda x: x[0].continueRun(x[1])).register(trigger='unblock')
GB('StreamReader').map(bc).foreach(lambda x: execute('set', x['value']['key'], x['value']['val'])).register(mode='sync', prefix='s')

    '''

    env.expect('RG.PYEXECUTE', script).ok()

    # this will make sure registrations reached all the shards
    verifyRegistrationIntegrity(env)

    env.cmd('xadd', 's', '*', 'key', 'x', 'val', '1')
    env.cmd('xadd', 's', '*', 'key', 'y', 'val', '2')
    env.cmd('xadd', 's', '*', 'key', 'z', 'val', '3')

    try:
        with TimeLimit(50):
            env.cmd('RG.TRIGGER', 'unblock')
            x = None
            while x != '1':
                x = env.cmd('get', 'x')
                time.sleep(0.1)
            env.cmd('RG.TRIGGER', 'unblock')
            y = None
            while y != '2':
                y = env.cmd('get', 'y')
                time.sleep(0.1)
            env.cmd('RG.TRIGGER', 'unblock')
            z = None
            while z != '3':
                z = env.cmd('get', 'z')
                time.sleep(0.1)
    except Exception as e:
        env.assertTrue(False, message='Failed waiting to reach unblock')
Пример #24
0
def testSimpleAsyncOnFilter(env):
    conn = getConnectionByEnv(env)
    script = '''
fdata = []

class BlockHolder:
    def __init__(self, bc):
        self.bc = bc

    def __getstate__(self):
        state = dict(self.__dict__)
        state['bc'] = None
        return state

    def continueRun(self, r):
        if self.bc:
            self.bc.continueRun(r)

def bc(r):
    global fdata
    f = BlockHolder(gearsFuture())
    fdata.insert(0, f)
    return f.bc

def unbc(r):
    global fdata
    try:
        f = fdata.pop()
    except Exception as e:
        return 0
    if f:
        f.continueRun(True if r[1] == 'true' else False)
        return 1
    return 0

GB('CommandReader').flatmap(lambda x: execute('keys', '*')).collect().filter(bc).count().register(trigger='block')
GB('CommandReader').map(unbc).register(trigger='unblock')
    '''

    env.expect('RG.PYEXECUTE', script).ok()

    # this will make sure registrations reached all the shards
    verifyRegistrationIntegrity(env)

    conn.execute_command('set', 'x', '1')
    conn.execute_command('set', 'y', '2')
    conn.execute_command('set', 'z', '3')

    def Block():
        env.expect('RG.TRIGGER', 'block').equal(['3'])

    try:
        with Background(Block) as bk:
            with TimeLimit(50):
                while bk.isAlive:
                    conn.execute_command('RG.TRIGGER', 'unblock', 'true')
                    time.sleep(0.1)
    except Exception as e:
        env.assertTrue(False, message='Failed waiting to reach unblock')
Пример #25
0
def testDependenciesImportSerializationError():
    env = Env(moduleArgs='CreateVenv 1')
    conn = getConnectionByEnv(env)
    env.expect('RG.PYEXECUTE', "import rejson", 'REQUIREMENTS', 'rejson').ok()
    md, data = env.cmd('RG.PYEXPORTREQ', 'rejson')
    data = b''.join(data)
    for i in range(len(data) - 1):
        env.expect('RG.PYIMPORTREQ', data[:i]).error()
Пример #26
0
def testPrefix0a(env):
    conn = getConnectionByEnv(env)
    env.cmd('ft.create', 'things', 'ON', 'HASH',
            'PREFIX', '1', '',
            'SCHEMA', 'name', 'text')

    conn.execute_command('hset', 'thing:bar', 'name', 'foo')
    env.expect('ft.search', 'things', 'foo').equal([1L, 'thing:bar', ['name', 'foo']])
Пример #27
0
def test_issue1826(env):
  # Stopword query is case sensitive.
  conn = getConnectionByEnv(env)
  conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TEXT')
  conn.execute_command('HSET', 'doc', 't', 'boy with glasses')

  env.expect('FT.SEARCH', 'idx', 'boy with glasses').equal([1L, 'doc', ['t', 'boy with glasses']])
  env.expect('FT.SEARCH', 'idx', 'boy With glasses').equal([1L, 'doc', ['t', 'boy with glasses']])
Пример #28
0
def test_1601(env):
  conn = getConnectionByEnv(env)
  conn.execute_command('FT.CREATE', 'idx:movie', 'SCHEMA', 'title', 'TEXT')
  conn.execute_command('HSET', 'movie:1', 'title', 'Star Wars: Episode I - The Phantom Menace')
  conn.execute_command('HSET', 'movie:2', 'title', 'Star Wars: Episodes II - Attack of the Clones')
  conn.execute_command('HSET', 'movie:3', 'title', 'Star Wars: Episode III - Revenge of the Sith')
  res = env.cmd('ft.search idx:movie @title:(episode) withscores nocontent')
  env.assertEqual(res[0], 3L)
Пример #29
0
def testMaxAggResults(env):
    if env.env == 'existing-env':
        env.skip()
    env = Env(moduleArgs="MAXAGGREGATERESULTS 100")
    conn = getConnectionByEnv(env)
    env.execute_command('ft.create', 'idx', 'SCHEMA', 't', 'TEXT')
    env.expect('ft.aggregate', 'idx', '*', 'LIMIT', '0', '10000').error()   \
       .contains('LIMIT exceeds maximum of 100')
Пример #30
0
def testDependenciesInstallFailure():
    env = Env(moduleArgs='CreateVenv 1')
    conn = getConnectionByEnv(env)
    env.expect(
        'RG.PYEXECUTE', "GB('ShardsIDReader')."
        "map(lambda x: __import__('redisgraph'))."
        "collect().distinct().run()", 'REQUIREMENTS',
        str(uuid.uuid4())).error().contains('satisfy requirements')