Esempio n. 1
0
 def test_12_one_input_cell(self):
     """Autochunked redimension of 1-cell array should not fail"""
     self._iquery.update({'no_fetch': False})
     _, err = self._iquery('create temp array ONE<val:double>[k=0:39:4:20]')
     assert not err, err
     self._array_cleanups.append("ONE")
     # Insert one cell at k == 25.
     iquery = IQuery(afl=True, format='tsv+:l', no_fetch=True)
     _, err = iquery("""
         insert(
           redimension(
             apply(build(<val:double>[i=0:0,1,0], 3.14), k, 25),
             ONE),
         ONE)""")
     assert not err, err
     iquery.update({'format': 'tsv+', 'afl': True, 'no_fetch': False})
     out, err = iquery('redimension(ONE, <val:double>[k=0:39:3])')
     assert not err, err
     try:
         numbers = map(float, out.split())
     except ValueError:
         assert False, "Unexpected non-number in '%s'" % out
     assert len(numbers) == 2
     assert numbers[0] == 25
     assert numbers[1] == 3.14
Esempio n. 2
0
    def setUpClass(self):
        """Create some test data files used by all test methods."""
        print "Setup ...",
        sys.stdout.flush()
        self._iquery = IQuery(afl=True, no_fetch=True)  # -naq
        self._array_cleanups = []
        self._files = {}  # map array name to input file
        # Put all our data files in one temp directory so we can
        # easily remove them all during tearDown.
        if os.system("rm -rf {0} ; mkdir -p {0}".format(_tmpdir)):
            raise AppError("Trouble (re)creating %s" % _tmpdir)

        # Create slightly sparse 3-D input data with no collisions.
        self._files['nocoll_3d'] = os.path.join(_tmpdir, "nocoll_3d.bin")
        if boxofpoints([
                'boxofpoints',
                '--lower-corner',
                '0,0,0',
                '--upper-corner',
                '9,69,69',
                '--cells',
                '40000',  # sparse: 40000 < 10x70x70 (49000)
                '--format',
                'binary',
                '--output',
                self._files['nocoll_3d'],
                '--seed',
                '42'
        ]):
            raise AppError("box_of_points could not create %s" %
                           self._files['nocoll_3d'])

        # Create dense 2-D input data with 10% collisions.
        self._files['coll_2d'] = os.path.join(_tmpdir, "coll_2d.bin")
        if boxofpoints([
                'boxofpoints',
                '--lower-corner',
                '0,0',
                '--upper-corner',
                '49,999',
                '--cells',
                '50000',  # dense: 50,000 == 50x1000
                '--collisions',
                '0.1',  # 10% collision rate
                '--format',
                'binary',
                '--output',
                self._files['coll_2d'],
                '--seed',
                '42'
        ]):
            raise AppError("box_of_points could not create %s" %
                           self._files['coll_2d'])
        print "done"
Esempio n. 3
0
def main(argv=None):
    """Argument parsing and last-ditch exception handling.

    See http://www.artima.com/weblogs/viewpost.jsp?thread=4829
    """
    if argv is None:
        argv = sys.argv

    global _pgm
    _pgm = "%s:" % os.path.basename(argv[0])  # colon for easy use by print

    parser = argparse.ArgumentParser(description="The redim_autochunk_2 test.")
    parser.add_argument('-c',
                        '--host',
                        default=None,
                        help='Target host for iquery commands.')
    parser.add_argument('-p',
                        '--port',
                        default=None,
                        help='SciDB port on target host for iquery commands.')
    parser.add_argument('-r',
                        '--run-id',
                        type=int,
                        default=0,
                        help='Unique run identifier.')
    parser.add_argument('-k',
                        '--keep-arrays',
                        action='store_true',
                        help='Do not remove test arrays during cleanup.')
    parser.add_argument('-v',
                        '--verbosity',
                        default=0,
                        action='count',
                        help='Increase debug logs. 1=info, 2=debug, 3=debug+')

    global _args
    _args = parser.parse_args(argv[1:])
    IQuery.setenv(_args)

    global _tmpdir
    _tmpdir = "/tmp/redim_autochunk_2.{0}".format(_args.run_id)

    try:
        tt = TheTest()
        return tt.run()
    except AppError as e:
        print >> sys.stderr, _pgm, e
        return 1
    except Exception as e:
        print >> sys.stderr, _pgm, "Unhandled exception:", e
        traceback.print_exc()  # always want this for unexpected exceptions
        return 2
Esempio n. 4
0
def sync_scidb(auth_file=None, max_attempts=12, noexcept=False):
    """Run AFL sync() operator until it succeeds.

    @param auth authentication filename or (user, password) tuple
    @param max_attempts try once a second for this many seconds
    @param noexcept don't raise on failure, instead return False
    @returns True on success, False on failure (if noexcept)

    @note Since sync() is only available in the P4 'system' library,
          this will fail if the library is not loaded.
    """
    iquery = IQuery(auth_file=auth_file, afl=True)
    err = ''
    for _ in xrange(max_attempts):
        _, err = iquery("sync()")
        if not err:
            return True
        if "SCIDB_LE_LOGICAL_OP_DOESNT_EXIST" in err:
            if noexcept:
                return False
            raise RuntimeError("P4 'system' library is not loaded")
        else:
            time.sleep(1)
    if noexcept:
        return False
    raise RuntimeError("sync_scidb:\n%s", err)
Esempio n. 5
0
def list_runner():
    """Run list('queries') repeatedly until user 'nobody' (user_id==0) is
       seen, or until our patience runs out.
    """
    iquery = IQuery(afl=True, format='tsv', host=_args.host, port=_args.port)
    try:
        for i in xrange(_args.max_trials):
            out, err = iquery(
                "project(list('queries'), user_id, user, query_string)")
            if err or iquery.returncode:
                # Cannot raise from subprocess, sadly.
                print >> sys.stderr, "List runner query failed:", err
                sys.exit(1)
            if any(x.startswith('0\t') for x in out.splitlines()):
                print "Success, user_id==0 seen after", i + 1, "trials!"
                print "Out:\n", out
                return
            time.sleep(0.5)

        # This isn't necessarily a bad thing.  The mere fact that no
        # instances crashed is a win.  Perhaps recent code changes
        # guarantee that Query objects in the Query::_queries maps now
        # *always* have a Session?  Maybe we should not exit here?
        print >> sys.stderr, """
            Exceeded --max-trials={0} .  Does the SDB-6125 race still exist?
            """.strip().format(_args.max_trials)
        sys.exit(1)

    finally:
        try:
            os.unlink(RUN_FILE)
        except:
            pass
Esempio n. 6
0
def process():
    """Create test array, start list and query runners, wait for done."""
    # Create test array.
    iquery = IQuery(afl=True, no_fetch=True, host=_args.host, port=_args.port)
    _ = iquery("remove(IMAGE_CUBE_%s" % _args.run_id)
    _, err = iquery("""
        store(build(<intensity:float>[study_id=0:30:0:1;
                                      x=0:128:0:64;
                                      y=0:128:0:64;
                                      z=0:50:0:10],
                    x*y),
              IMAGE_CUBE_{0})""".format(_args.run_id))
    assert not err and not iquery.returncode, (
        "Cannot create IMAGE_CUBE_{0}: {1}".format(_args.run_id, err))
    print "Created IMAGE_CUBE_%s" % _args.run_id

    # Spawn subprocesses.
    qproc = mp.Process(target=query_runner, args=())
    lproc = mp.Process(target=list_runner, args=())
    qproc.start()
    time.sleep(0.5)  # Give query_runner a head start.
    lproc.start()
    lproc.join()
    qproc.join()

    # Cleanup and return list runner's exit status.
    iquery("remove(IMAGE_CUBE_%s)" % _args.run_id)
    return lproc.exitcode
Esempio n. 7
0
def main(argv=None):
    """Argument parsing and last-ditch exception handling.

    See http://www.artima.com/weblogs/viewpost.jsp?thread=4829
    """
    if argv is None:
        argv = sys.argv

    global _pgm
    _pgm = "%s:" % os.path.basename(argv[0])  # colon for easy use by prt()

    parser = argparse.ArgumentParser(
        description="Prove that save() failures don't leak file descriptors.")
    parser.add_argument('-c',
                        '--host',
                        default=None,
                        help='Target host for iquery commands.')
    parser.add_argument('-p',
                        '--port',
                        default=None,
                        help='SciDB port on target host for iquery commands.')
    parser.add_argument('-r',
                        '--run-id',
                        type=int,
                        default=0,
                        help='Unique run identifier.')

    global _args
    _args = parser.parse_args(argv[1:])

    global _iquery
    _iquery = IQuery(afl=True, format='tsv')
    _iquery.setenv(_args)

    try:
        process()
        return 0
    except AssertionError as e:
        print >> sys.stderr, _pgm, e
        return 1
    except Exception as e:
        print >> sys.stderr, _pgm, "Unhandled exception:", e
        traceback.print_exc()  # always for unexpected exceptions
        return 2
Esempio n. 8
0
def main(argv=None):
    if argv is None:
        argv = sys.argv

    global _pgm
    _pgm = "%s:" % os.path.basename(argv[0])  # colon for easy use by print

    parser = argparse.ArgumentParser(
        description='The other.big_chunk test script.')
    parser.add_argument('-c',
                        '--host',
                        default='localhost',
                        help="The SciDB host address.")
    parser.add_argument('-p',
                        '--port',
                        type=int,
                        default=1239,
                        help="The TCP port for connecting to SciDB.")
    parser.add_argument('-r',
                        '--run-id',
                        type=str,
                        default="",
                        help="""
        Uniquifier (such as $HPID) to use in naming files etc.""")
    parser.add_argument('-v',
                        '--verbose',
                        default=False,
                        action='store_true',
                        help="""Print timings and full error descriptions.""")
    global _args
    _args = parser.parse_args(argv[1:])

    global _iquery
    _iquery = IQuery(afl=True, host=_args.host, port=_args.port, stderr=STDOUT)

    ElapsedTimer.enabled = _args.verbose

    global _fifo
    _fifo = '/tmp/big_chunk_fifo_%s' % _args.run_id

    try:
        os.mkfifo(_fifo)
    except OSError as e:
        log("Cannot create fifo", _fifo, "-", e)
        return 1
    try:
        return run_tests()
    finally:
        os.unlink(_fifo)
        log_mem_info()
Esempio n. 9
0
    def __init__(self,queries,stopOnError=False):
        super(myTestCase,self).__init__() # Important: call the superclass constructor.
        self.registerCleaner(ArrayCleaner()) # Register the cleaner class to remove arrays
                                             # created by this test.
        self.queries = queries
        self.stopOnError = stopOnError

        try:
            dataPath = os.path.join(
                os.environ['SCIDB_DATA_PATH'],
                '000',
                '0'
                )
            # Register the cleaner that will remove any data files created by this test.
            self.registerCleaner(DataDirCleaner(dataPath))
        except:
            pass # Somehow, we could not get to the scidb data folder:
                 # we will leave some junk files in there.

        self.__iquery = IQuery(afl=True) # Iquery wrapper class.
        self.exitCode = 0 # Exit code for test harness
Esempio n. 10
0
def query_runner():
    """Run a query repeatedly until the RUN_FILE is removed."""
    iquery = IQuery(afl=True, format='tsv', host=_args.host, port=_args.port)
    afl = """save(filter(IMAGE_CUBE_{0},
                         study_id>=0 and study_id<=0 and
                         x>=0 and x<=127 and
                         y>=0 and y<=127 and
                         z>=2 and z<=2),
                  '/dev/null')""".format(_args.run_id)
    afl = ' '.join(afl.split())  # Prettify for nicer list('queries') output.
    loops = 0
    while True:
        loops += 1
        iquery(afl)
        try:
            os.stat(RUN_FILE)
        except OSError as e:
            if e.errno == errno.ENOENT:
                print "File", RUN_FILE, "gone after", loops, "save() queries"
                return
            else:
                print >> sys.stderr, "Cannot stat {0}: {1}".format(RUN_FILE, e)
                sys.exit(1)
Esempio n. 11
0
 def __init__(self, iqueryPath='iquery', debug=False, stream=None):
     self._pushedArrayNames = set([])
     self._iquery = IQuery(prog=iqueryPath, afl=True, format='tsv')
     self._pushed = False
     self._dbgWriter = DebugStreamWriter(debug, stream)
Esempio n. 12
0
def main(argv=None):
    """Argument parsing and last-ditch exception handling.

    See http://www.artima.com/weblogs/viewpost.jsp?thread=4829
    """
    if argv is None:
        argv = sys.argv

    global _pgm
    _pgm = "%s:" % os.path.basename(argv[0])

    parser = argparse.ArgumentParser(
        description="Unit tests for scidblib.psql_client module.")
    parser.add_argument('-c',
                        '--host',
                        default=None,
                        help='Target host for iquery commands.')
    parser.add_argument('-H',
                        '--db-host',
                        required=True,
                        help='Target host for psql commands.')
    parser.add_argument('-p',
                        '--port',
                        default=None,
                        help='SciDB port on target host for iquery commands.')
    parser.add_argument('-P', '--db-port', default=None, help="Postgres port.")
    parser.add_argument('-r',
                        '--run-id',
                        type=int,
                        default=0,
                        help='Unique run identifier.')
    parser.add_argument('-u',
                        '--db-user',
                        required=True,
                        help='Postgres role name.')
    parser.add_argument('-d',
                        '--db-name',
                        required=True,
                        help='Postgres database name.')

    global _args
    _args = parser.parse_args(argv[1:])

    global _iquery
    _iquery = IQuery(afl=True, format='tsv')
    if _args.host:
        _iquery.host = _args.host
    if _args.port:
        _iquery.port = _args.port

    global _psql
    _psql = Psql()
    _psql.user = _args.db_user
    _psql.host = _args.db_host
    if _args.db_port:
        _psql.port = _args.db_port

    try:
        tt = TheTest()
        return tt.run()
    except Exception as e:
        print >> sys.stderr, _pgm, "Unhandled exception:", e
        traceback.print_exc()  # always want this for unexpected exceptions
        return 2
Esempio n. 13
0
def make_table(entry_name, query, namespace=None, host=None, port=None):
    """Build a list of named tuples based on result of the given AFL query.

    @param entry_name name of type to be created by collections.namedtuple
    @param query      AFL query from whose output we will make a table
    @param namespace  use as current namespace when executing query
    @param host       host for iquery connections
    @param port       port for iquery connections

    Because the entire query result is read into memory, best to use
    this only with queries returning smallish results.

    Fields that can be converted to ints, floats, or bools are so converted.

    An example:
    >>> t = make_table('ArrayTable', "list('arrays',true)")
    >>> all_versioned_array_ids = [x.aid for x in t if x.aid != x.uaid]
    """
    # Format tsv+:l gives dimension/attribute names used for tuple attributes.
    iquery = IQuery(afl=True,
                    format='tsv+:l',
                    namespace=namespace,
                    host=host,
                    port=port)
    out, err = iquery(query)
    if err:
        raise RuntimeError(err)
    table_data = out.splitlines()
    # Sometimes SciDB gives the same label to >1 attribute; make them unique.
    attrs = []
    seen = dict()
    for label in table_data[0].split():
        if label in seen:
            seen[label] += 1
            label = '_'.join((label, str(seen[label])))
        else:
            seen[label] = 1
        attrs.append(label)

    # Attempt to convert types to Python equivalents.
    def _convert(x):
        try:
            return int(x)
        except ValueError:
            pass
        try:
            return float(x)
        except ValueError:
            pass
        xx = x.lower().strip()
        if xx == 'true':
            return True
        if xx == 'false':
            return False
        return x

    # Create our data type and fill in the table.
    tuple_type = namedtuple(entry_name, attrs)
    table = []
    for line in table_data[1:]:
        table.append(tuple_type._make(_convert(x) for x in line.split('\t')))
    return table
Esempio n. 14
0
def main(argv=None):
    """Argument parsing and last-ditch exception handling.

    See http://www.artima.com/weblogs/viewpost.jsp?thread=4829
    """
    if argv is None:
        argv = sys.argv

    global _pgm
    _pgm = "%s:" % os.path.basename(argv[0])  # colon for easy use by print

    parser = argparse.ArgumentParser(
        description="SciDB password and account management (SPAAM)")
    parser.add_argument('-A',
                        '--auth-file',
                        default=None,
                        help='iquery authentication file in INI format')
    parser.add_argument('-c',
                        '--host',
                        default=None,
                        help='target host for iquery commands')
    parser.add_argument('-p',
                        '--port',
                        default=None,
                        help='SciDB port on target host for iquery commands')
    parser.add_argument('--stdin',
                        default=False,
                        action='store_true',
                        help="read password from stdin without prompting")
    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument('-a',
                       '--add',
                       metavar='USERNAME',
                       help='add account for username')
    group.add_argument('-m',
                       '--modify',
                       metavar='USERNAME',
                       help='modify password for username')

    global _args
    _args = parser.parse_args(argv[1:])
    assert bool(_args.add) ^ bool(_args.modify), (
        "Required mutually exclusive group doesn't work as advertised!")

    global _iquery
    _iquery = IQuery(afl=True,
                     format='tsv',
                     host=_args.host,
                     port=_args.port,
                     auth_file=_args.auth_file)

    try:
        return add_user(_args.add) if _args.add else modify_user(_args.modify)
    except AppError as e:
        print >> sys.stderr, _pgm, e
        return 1
    except KeyboardInterrupt:
        print >> sys.stderr, "Interrupt"
        return 1
    except Exception as e:
        print >> sys.stderr, _pgm, "Unhandled exception:", e
        traceback.print_exc()  # always want this for unexpected exceptions
        return 2
Esempio n. 15
0
class TheTest(SimpleTestRunner):
    def setUpClass(self):
        """Create some test data files used by all test methods."""
        print "Setup ...",
        sys.stdout.flush()
        self._iquery = IQuery(afl=True, no_fetch=True)  # -naq
        self._array_cleanups = []
        self._files = {}  # map array name to input file
        # Put all our data files in one temp directory so we can
        # easily remove them all during tearDown.
        if os.system("rm -rf {0} ; mkdir -p {0}".format(_tmpdir)):
            raise AppError("Trouble (re)creating %s" % _tmpdir)

        # Create slightly sparse 3-D input data with no collisions.
        self._files['nocoll_3d'] = os.path.join(_tmpdir, "nocoll_3d.bin")
        if boxofpoints([
                'boxofpoints',
                '--lower-corner',
                '0,0,0',
                '--upper-corner',
                '9,69,69',
                '--cells',
                '40000',  # sparse: 40000 < 10x70x70 (49000)
                '--format',
                'binary',
                '--output',
                self._files['nocoll_3d'],
                '--seed',
                '42'
        ]):
            raise AppError("box_of_points could not create %s" %
                           self._files['nocoll_3d'])

        # Create dense 2-D input data with 10% collisions.
        self._files['coll_2d'] = os.path.join(_tmpdir, "coll_2d.bin")
        if boxofpoints([
                'boxofpoints',
                '--lower-corner',
                '0,0',
                '--upper-corner',
                '49,999',
                '--cells',
                '50000',  # dense: 50,000 == 50x1000
                '--collisions',
                '0.1',  # 10% collision rate
                '--format',
                'binary',
                '--output',
                self._files['coll_2d'],
                '--seed',
                '42'
        ]):
            raise AppError("box_of_points could not create %s" %
                           self._files['coll_2d'])
        print "done"

    def tearDownClass(self):
        print "Teardown ...",
        sys.stdout.flush()
        if not _args.keep_arrays:
            if os.system("rm -rf {0}".format(_tmpdir)):
                raise AppError("Trouble cleaning up %s" % _tmpdir)
            for a in self._array_cleanups:
                self._iquery("remove(%s)" % a)
        print "done"

    def test_00_load_3d_ac(self):
        """Load 3-D no-collision data using autochunking"""
        dims = "x,y,z"  # Autochunked!
        query = """
            store(
              redimension(
                input(<v:int64,x:int64,y:int64,z:int64>[dummy], '{0}',
                      -2, '(int64,int64,int64,int64)'),
                <v:int64>[{1}]),
              {2}) """.format(self._files['nocoll_3d'], dims, "nocoll_3d_ac")
        _, err = self._iquery(query)
        assert not err, err
        self._array_cleanups.append("nocoll_3d_ac")

    def test_01_load_3d_concrete(self):
        """Load 3-D no-collisions data with specified chunks"""
        # older dim syntax: backward compat
        dims = "x=0:*,10,0,y=0:*,100,0,z=0:*,100,0"
        query = """
            store(
              redimension(
                input(<v:int64,x:int64,y:int64,z:int64>[dummy], '{0}',
                      -2, '(int64,int64,int64,int64)'),
                <v:int64>[{1}]),
              {2}) """.format(self._files['nocoll_3d'], dims, "nocoll_3d")
        _, err = self._iquery(query)
        assert not err, err
        self._array_cleanups.append("nocoll_3d")

    def test_02_nocoll_3d_counts_and_sums(self):
        """Compare 3-D array counts and sums"""
        self._iquery.update({'format': 'tsv', 'no_fetch': False})
        out1, err1 = self._iquery('aggregate(nocoll_3d_ac,count(*),sum(v))')
        assert not err1, err1
        out2, err2 = self._iquery('aggregate(nocoll_3d,count(*),sum(v))')
        assert not err2, err2
        c1, s1 = map(int, out1.split())
        c2, s2 = map(int, out2.split())
        assert c1 == c2, "counts differ"
        assert s1 == s2, "sums differ"

    def test_03_nocoll_3d_check_values(self):
        """Cell-by-cell value comparison for 3-D arrays"""
        self._iquery.update({'format': 'tsv+', 'no_fetch': False})
        out, err = self._iquery("""filter(join(nocoll_3d,nocoll_3d_ac),
                                          nocoll_3d.v <> nocoll_3d_ac.v)""")
        assert not err, err
        assert out == '', "Cell values differ:\n\t{0}".format(out)

    def test_04_load_2d_ac_w_collisions(self):
        """Load 2-D data containing collisions using autochunking"""
        dims = "x=0:*; y=0:*; synth=0:*"
        query = """
            store(
              redimension(
                input(<v:int64,x:int64,y:int64>[dummy], '{0}',
                      -2, '(int64,int64,int64)'),
                <v:int64>[{1}]),
              {2}) """.format(self._files['coll_2d'], dims, "coll_2d_ac")
        self._iquery.no_fetch = True
        _, err = self._iquery(query)
        assert not err, err
        self._array_cleanups.append("coll_2d_ac")

    def test_05_load_2d_concrete_w_collisions(self):
        """Load 2-D data containing collisions with specified chunks"""
        dims = "x=0:*:0:100; y=0:*:0:100; synth=0:9:0:10"
        query = """
            store(
              redimension(
                input(<v:int64,x:int64,y:int64>[dummy], '{0}',
                      -2, '(int64,int64,int64)'),
                <v:int64>[{1}]),
              {2}) """.format(self._files['coll_2d'], dims, "coll_2d")
        self._iquery.no_fetch = True
        _, err = self._iquery(query)
        assert not err, err
        self._array_cleanups.append("coll_2d")

    def test_06_coll_2d_counts_and_sums(self):
        """Compare 2-D array counts and sums"""
        self._iquery.update((('format', 'tsv'), ('no_fetch', False)))
        out1, err1 = self._iquery('aggregate(coll_2d_ac,count(*),sum(v))')
        assert not err1, err1
        out2, err2 = self._iquery('aggregate(coll_2d,count(*),sum(v))')
        assert not err2, err2
        c1, s1 = map(int, out1.split())
        c2, s2 = map(int, out2.split())
        assert c1 == c2, "counts differ"
        assert s1 == s2, "sums differ"

    def test_07_coll_2d_check_values(self):
        """Cell-by-cell value comparison for 2-D arrays

        This test is complicated by the fact that with different chunk
        intervals, redimension() does not produce synthetic dimension
        siblings in any particular order.  So we must process the
        filtered list of differing cells, and only complain if the
        *set* of values along the synthetic dimension at [x,y,*]
        differs for the two arrays.  For example, if the two arrays
        held

            {x,y,synth} v           {x,y,synth} v
            {2,7,0} 20              {2,7,0} 20
            {2,7,1} 73              {2,7,1} 99
            {2,7,2} 99              {2,7,2} 73

        that is perfectly fine.
        """
        tbl = make_table(
            'CellDiffs', """
            filter(join(coll_2d,coll_2d_ac), coll_2d.v <> coll_2d_ac.v)
            """)
        v_xy_sets = defaultdict(set)
        v2_xy_sets = defaultdict(set)
        for celldiff in tbl:
            key = (int(celldiff.x), int(celldiff.y))
            v_xy_sets[key].add(int(celldiff.v))
            v2_xy_sets[key].add(int(celldiff.v_2))
        assert len(v_xy_sets) == len(v2_xy_sets)
        for xy in v_xy_sets:
            assert v_xy_sets[xy] == v2_xy_sets[xy], \
                "Synthetic dimension trouble at {0}".format(xy)

    def test_08_load_3d_ac_w_overlap(self):
        """Load 3-D no-collision data using autochunking and overlaps."""
        dims = "x=0:*:2; y=0:*:3; z=0:*"  # Autochunked!
        query = """
            store(
              redimension(
                input(<v:int64,x:int64,y:int64,z:int64>[dummy], '{0}',
                      -2, '(int64,int64,int64,int64)'),
                <v:int64>[{1}]),
              {2}) """.format(self._files['nocoll_3d'], dims,
                              "nocoll_3d_ac_ol")
        _, err = self._iquery('-naq', query)
        assert not err, err
        self._array_cleanups.append("nocoll_3d_ac_ol")

    def test_09_nocoll_3d_overlap_counts_and_sums(self):
        """Compare 3-D array counts and sums (overlap)"""
        out1, err1 = self._iquery(
            '-otsv', '-aq', 'aggregate(nocoll_3d_ac_ol,count(*),sum(v))')
        assert not err1, err1
        out2, err2 = self._iquery('-otsv', '-aq',
                                  'aggregate(nocoll_3d,count(*),sum(v))')
        assert not err2, err2
        c1, s1 = map(int, out1.split())
        c2, s2 = map(int, out2.split())
        assert c1 == c2, "counts differ"
        assert s1 == s2, "sums differ"

    def test_10_nocoll_3d_overlap_check_values(self):
        """Cell-by-cell value comparison for 3-D arrays (overlap)"""
        self._iquery.update({'format': 'tsv+', 'quiet': False})
        out, err = self._iquery("""
            filter(join(nocoll_3d, nocoll_3d_ac_ol),
                        nocoll_3d.v <> nocoll_3d_ac_ol.v)
            """)
        assert not err, err
        assert out == '', "Cell values differ:\n\t{0}".format(out)

    def test_11_empty_input(self):
        """Autochunked redimension of empty array should not fail (SDB-5109)"""
        out, err = self._iquery(
            'create temp array empty<val:double>[k=0:39:4:20]', quiet=False)
        assert not err, err
        self._array_cleanups.append("empty")
        out, err = self._iquery('redimension(empty, <val:double>[k=0:39:3])',
                                format='tsv+',
                                no_fetch=False)
        assert not err, err
        assert not out, "Redim of empty array is not empty: '%s'" % out

    def test_12_one_input_cell(self):
        """Autochunked redimension of 1-cell array should not fail"""
        self._iquery.update({'no_fetch': False})
        _, err = self._iquery('create temp array ONE<val:double>[k=0:39:4:20]')
        assert not err, err
        self._array_cleanups.append("ONE")
        # Insert one cell at k == 25.
        iquery = IQuery(afl=True, format='tsv+:l', no_fetch=True)
        _, err = iquery("""
            insert(
              redimension(
                apply(build(<val:double>[i=0:0,1,0], 3.14), k, 25),
                ONE),
            ONE)""")
        assert not err, err
        iquery.update({'format': 'tsv+', 'afl': True, 'no_fetch': False})
        out, err = iquery('redimension(ONE, <val:double>[k=0:39:3])')
        assert not err, err
        try:
            numbers = map(float, out.split())
        except ValueError:
            assert False, "Unexpected non-number in '%s'" % out
        assert len(numbers) == 2
        assert numbers[0] == 25
        assert numbers[1] == 3.14
Esempio n. 16
0
The other.insert_08 test also tests this, but I wanted to do some
additional tests with more complex queries (placing arbitrary "grids"
into the sparse unbounded input), and writing them on one line would
be yucky.
"""

import argparse
import sys
from subprocess import STDOUT

from t_other_utils import ok, fail, make_grid
from scidblib.iquery_client import IQuery

_args = None
_iquery = IQuery(afl=True, stderr=STDOUT)


def main(argv=None):
    if argv is None:
        argv = sys.argv

    global _args
    parser = argparse.ArgumentParser(description='The insert_09 test script.')
    parser.add_argument('-c',
                        '--host',
                        default='localhost',
                        help="The SciDB host address.")
    parser.add_argument('-p',
                        '--port',
                        type=int,
Esempio n. 17
0
def main(argv=None):
    if argv is None:
        argv = sys.argv

    global _args
    parser = argparse.ArgumentParser(
        description='The store_01 test script.')
    parser.add_argument('-c', '--host', default='localhost',
                        help="The SciDB host address.")
    parser.add_argument('-p', '--port', type=int, default=1239,
                        help="The TCP port for connecting to SciDB.")
    parser.add_argument('-r', '--run-id', default="", help="""
        Uniquifier (such as $HPID) to use in naming files etc.""")
    parser.add_argument('-t', '--use-temp-array', default=False,
                        action='store_true', help="""
        Use a temporary array as the bounded target.""")
    parser.add_argument('-v', '--verbose', default=False, action='store_true',
                        help="""Print timings and full error descriptions.""")
    _args = parser.parse_args(argv[1:])

    iquery = IQuery(host=_args.host, port=_args.port,
                    afl=True, stderr=STDOUT, no_fetch=True)

    BOUNDED_SCHEMA = "<value:int64>[row=0:499,100,0,col=0:99,100,0]"
    BOUNDED_ARRAY = "bounded_%s" % _args.run_id
    STORE_QUERY = 'store(%s, {0})'.format(BOUNDED_ARRAY)
    # Temp arrays take a different code path where, for historical
    # reasons I guess, the short error code is different.  Storing SG
    # takes yet another path. So, we will just ignore the short error.
    # The semantic menaing is in the long error ayway.  In verbose
    # mode the entire error string can be examined.
    LONG_ERROR = "SCIDB_LE_CHUNK_OUT_OF_BOUNDARIES"

    print 'Create%s bounded array.' % (
        ' temporary' if _args.use_temp_array else '')
    print iquery('create%s array %s %s' % (
        ' temp' if _args.use_temp_array else '',
        BOUNDED_ARRAY,
        BOUNDED_SCHEMA))[0]

    fails = 0
    quiet = not _args.verbose
    if quiet:
        ElapsedTimer.enabled = False

    print '\nEasy store...'
    with ElapsedTimer():
        fails += ok(iquery(STORE_QUERY % make_grid(10, 10, 60, 30))[0])

    print '\nRight up against the row limit...'
    with ElapsedTimer():
        fails += ok(iquery(STORE_QUERY % make_grid(450, 10, 499, 20))[0])

    print '\nOne step over the line...'
    with ElapsedTimer():
        fails += fail(iquery(STORE_QUERY % make_grid(450, 10, 500, 20))[0],
                      LONG_ERROR,
                      quiet)

    print '\nWay over the line...'
    with ElapsedTimer():
        fails += fail(iquery(STORE_QUERY % make_grid(480, 10, 520, 20))[0],
                      LONG_ERROR,
                      quiet)

    print '\nRight up against the column limit...'
    with ElapsedTimer():
        fails += ok(iquery(STORE_QUERY % make_grid(10, 80, 50, 99))[0])

    print '\nOne step over the column limit...'
    with ElapsedTimer():
        fails += fail(iquery(STORE_QUERY % make_grid(10, 80, 50, 100))[0],
                      LONG_ERROR,
                      quiet)

    print '\nPartially over both limits...'
    with ElapsedTimer():
        fails += fail(iquery(STORE_QUERY % make_grid(480, 95, 500, 100))[0],
                      LONG_ERROR,
                      quiet)

    print '\nWay over both limits...'
    with ElapsedTimer():
        fails += fail(iquery(STORE_QUERY % make_grid(510, 120, 530, 140))[0],
                      LONG_ERROR,
                      quiet)

    print "\nCleanup."
    with ElapsedTimer():
        iquery('remove(%s)' % BOUNDED_ARRAY)

    if fails:
        print fails, "test case failures"
    else:
        print "All test cases passed."

    # By returning 0 even for failure, we prefer a FILES_DIFFER error
    # to an EXECUTOR_FAILED error.  Seems slightly more accurate.
    return 0