Пример #1
0
 def test_rules_query__267(self):
     unique = "Testing prc #267: queryable rule objects"
     with NamedTemporaryFile(mode='w') as rfile:
         rfile.write("""f() {{ delay('<EF>1m</EF>') {{ writeLine('serverLog','{unique}') }} }}\n"""
                     """OUTPUT null\n""".format(**locals()))
         rfile.flush()
         ## create a delayed rule we can query against
         myrule = Rule(self.sess, rule_file = rfile.name)
         myrule.execute()
     qu = self.sess.query(RuleExec.id).filter( Like(RuleExec.frequency,'%1m%'),
                                               Like(RuleExec.name, '%{unique}%'.format(**locals())) )
     results = [row for row in qu]
     self.assertEqual(1, len(results))
     if results:
         Rule(self.sess).remove_by_id( results[0][RuleExec.id] )
    def test_query_with_like_condition(self):
        '''Equivalent to:
        iquest "select RESC_NAME where RESC_NAME like 'dem%'"
        '''

        query = self.sess.query(Resource).filter(Like(Resource.name, 'dem%'))
        self.assertIn('demoResc', [row[Resource.name] for row in query])
Пример #3
0
    def remote_trees_stats(self, dirs):
        nfiles = 0
        size = 0
        stats = {}

        for d in dirs:
            # need to keep column 'collection_id' to avoid 'distinct' clause on
            # recursive queries
            q = self.session.query(DataObject.collection_id, DataObject.name,
                                   DataObject.size)

            dsize = 0
            # first level query
            q1 = q.filter(Collection.name == d)
            for r in q1.get_results():
                nfiles += 1
                dsize += int(r[DataObject.size])

            # recursive query
            qr = q.filter(Like(Collection.name, self.join(d, '%')))
            for r in qr.get_results():
                nfiles += 1
                dsize += int(r[DataObject.size])

            stats[d] = dsize
            size += dsize

        return nfiles, size, stats
Пример #4
0
 def test_query_order_by_col_not_in_result__183(self):
     test_collection_size = 8
     test_collection_path = '/{0}/home/{1}/testcoln_for_col_not_in_result'.format(
         self.sess.zone, self.sess.username)
     c1 = c2 = None
     try:
         c1 = helpers.make_test_collection(self.sess,
                                           test_collection_path + "1",
                                           obj_count=test_collection_size)
         c2 = helpers.make_test_collection(self.sess,
                                           test_collection_path + "2",
                                           obj_count=test_collection_size)
         d12 = [
             sorted([d.id for d in c.data_objects])
             for c in sorted((c1, c2), key=lambda c: c.id)
         ]
         query = self.sess.query(DataObject).filter(
             Like(Collection.name,
                  test_collection_path + "_")).order_by(Collection.id)
         q12 = list(map(lambda res: res[DataObject.id], query))
         self.assertTrue(d12[0] +
                         d12[1] == sorted(q12[:test_collection_size]) +
                         sorted(q12[test_collection_size:]))
     finally:
         if c1: c1.remove(recurse=True, force=True)
         if c2: c2.remove(recurse=True, force=True)
Пример #5
0
    def test_set_inherit_acl_depth_test(self):
        DEPTH = 3  # But test is valid for any DEPTH > 1
        for recursionTruth in (True, False):
            deepcoll = None
            try:
                test_coll_path = self.coll_path + "/test"
                deepcoll = helpers.make_deep_collection(self.sess,
                                                        test_coll_path,
                                                        depth=DEPTH,
                                                        objects_per_level=2)
                acl1 = iRODSAccess('inherit', deepcoll.path)
                self.sess.permissions.set(acl1, recursive=recursionTruth)
                test_subcolls = set(
                    iRODSCollection(self.sess.collections, _)
                    for _ in self.sess.query(Collection).filter(
                        Like(Collection.name, deepcoll.path + "/%")))

                # assert top level collection affected
                test_coll = self.sess.collections.get(test_coll_path)
                self.assertTrue(test_coll.inheritance)
                #
                # assert lower level collections affected only for case when recursive = True
                subcoll_truths = [(_.inheritance == recursionTruth)
                                  for _ in test_subcolls]
                self.assertEqual(len(subcoll_truths), DEPTH - 1)
                self.assertTrue(all(subcoll_truths))
            finally:
                if deepcoll: deepcoll.remove(force=True, recurse=True)
Пример #6
0
 def query_collection_stats(self, full_path):
     query = self.session.query(DataObject.size).filter(
         Like(Collection.name,
              f'{full_path}%')).count(DataObject.id).sum(DataObject.size)
     result = next(iter(query))  # only one result
     size = result[DataObject.size]
     if size is None: size = 0
     return size, result[DataObject.id]
Пример #7
0
    def test_set_inherit_and_test_sub_objects (self):
        DEPTH = 3
        OBJ_PER_LVL = 1
        deepcoll = user = None
        test_coll_path = self.coll_path + "/test"
        try:
            deepcoll = helpers.make_deep_collection(self.sess, test_coll_path, object_content = 'arbitrary',
                                                    depth=DEPTH, objects_per_level=OBJ_PER_LVL)
            user = self.sess.users.create('bob','rodsuser')
            user.modify ('password','bpass')

            acl_inherit = iRODSAccess('inherit', deepcoll.path)
            acl_read = iRODSAccess('read', deepcoll.path, 'bob')

            self.sess.permissions.set(acl_read)
            self.sess.permissions.set(acl_inherit)

            # create one new object and one new collection *after* ACL's are applied
            new_object_path = test_coll_path + "/my_data_obj"
            with self.sess.data_objects.open( new_object_path ,'w') as f: f.write(b'some_content')

            new_collection_path = test_coll_path + "/my_colln_obj"
            new_collection = self.sess.collections.create( new_collection_path )

            coll_IDs = [c[Collection.id] for c in
                            self.sess.query(Collection.id).filter(Like(Collection.name , deepcoll.path + "%"))]

            D_rods = list(self.sess.query(Collection.name,DataObject.name).filter(
                                                                          In(DataObject.collection_id, coll_IDs )))

            self.assertEqual (len(D_rods), OBJ_PER_LVL*DEPTH+1) # counts the 'older' objects plus one new object

            with iRODSSession (port=self.sess.port, zone=self.sess.zone, host=self.sess.host,
                               user='******', password='******') as bob:

                D = list(bob.query(Collection.name,DataObject.name).filter(
                                                                    In(DataObject.collection_id, coll_IDs )))

                # - bob should only see the new data object, but none existing before ACLs were applied

                self.assertEqual( len(D), 1 )
                D_names = [_[Collection.name] + "/" + _[DataObject.name] for _ in D]
                self.assertEqual( D[0][DataObject.name], 'my_data_obj' )

                # - bob should be able to read the new data object

                with bob.data_objects.get(D_names[0]).open('r') as f:
                    self.assertGreater( len(f.read()), 0)

                C = list(bob.query(Collection).filter( In(Collection.id, coll_IDs )))
                self.assertEqual( len(C), 2 ) # query should return only the top-level and newly created collections
                self.assertEqual( sorted([c[Collection.name] for c in C]),
                                  sorted([new_collection.path, deepcoll.path]) )
        finally:
            if user: user.remove()
            if deepcoll: deepcoll.remove(force = True, recurse = True)
Пример #8
0
def search_collection(q, config={}):
    print("Search::Collection", q)
    with new_session(config) as session:
        query = session.query(Collection, CollectionMeta) \
            .filter(Like(Collection.name, '%{}%'.format(q['value']))) \
            .add_keyword('zone', 'seq')

        return [{
            "type": iRODSCollection,
            "id": result[Collection.id],
            "name": result[Collection.name]
        } for result in query]
Пример #9
0
def rm_usage_and_count_remaining(dataObjectToken, rescname):
    sess = session_object()
    resc = sess.resources.get(rescname)
    resc.metadata.remove(iRODSMeta(IRODS_RESC_ACQUIRE_KEY, dataObjectToken))

    (dataobj_path, token) = dataObjectToken.rsplit('--', 1)
    dataobj_path = dataobj_path.replace('_', r'\_')
    token = '_' * len(token)

    usage_query = sess.query(ResourceMeta).filter(
        Resource.name == rescname).filter(
            ResourceMeta.name == IRODS_RESC_ACQUIRE_KEY).filter(
                Like(ResourceMeta.value, dataobj_path + '--' + token))

    return len(usage_query.all())
Пример #10
0
 def collections_in_root(self):
     """Returns a generator for all the Collections in the root resource"""
     if self.root_collection is None:
         return (self.session.query(Collection.id, Collection.name)
                 .filter(Resource.id == self.root[Resource.id])
                 .get_results()
                 )
     else:
         generator_collection = (self.session.query(Collection.id, Collection.name)
                 .filter(Resource.id == self.root[Resource.id])
                 .filter(Collection.name == self.root_collection)
                 .get_results()
                 )
         generator_subcollections = (self.session.query(Collection.id, Collection.name)
                 .filter(Resource.id == self.root[Resource.id])
                 .filter(Like(Collection.name, self.root_collection + "/%%"))
                 .get_results()
                 )
         return chain(generator_collection, generator_subcollections)
Пример #11
0
def search_data_object_metadata(q, config={}):
    print("Search::DataObjectMeta", q)
    with new_session(config) as session:
        query = session.query(DataObject, Collection.name) \
            .filter(Like(DataObject.path, "/irods-seq-sr%")) \
            .add_keyword('zone', 'seq')

        print("Searching for", q)

        for k in q:
           query = query.filter(Criterion('=', DataObjectMeta.name, k)) \
                .filter(Criterion('=', DataObjectMeta.value, q[k]))

        return [{
            "type": iRODSDataObject,
            "id": result[DataObject.id],
            "name": result[DataObject.name],
            "size": sizeof_fmt(result[DataObject.size]),
            "modified": result[DataObject.modify_time].isoformat(),
            "path": "{}/{}".format(result[Collection.name], result[DataObject.name])
        } for result in query]
Пример #12
0
 def test_multiple_criteria_on_one_column_name(self):
     collection = self.coll_path
     filename = 'test_multiple_AVU_joins'
     file_path = '{collection}/{filename}'.format(**locals())
     objects = []
     nobj = 0
     for x in range(3,9):
         nobj += 2
         obj1 = helpers.make_object(self.sess, file_path+'-{}'.format(x))
         obj2 = helpers.make_object(self.sess, file_path+'-dummy{}'.format(x))
         objects.extend([obj1,obj2])
     self.assertTrue( nobj > 0 and len(objects) == nobj )
     q = self.sess.query(Collection,DataObject)
     dummy_test = [d for d in q if d[DataObject.name][-1:] != '8'
                               and d[DataObject.name][-7:-1] == '-dummy' ]
     self.assertTrue( len(dummy_test) > 0 )
     q = q. filter(Like(DataObject.name, '%-dummy_')).\
            filter(Collection.name == collection) .\
            filter(DataObject.name != (filename + '-dummy8'))
     results = [r[DataObject.name] for r in q]
     self.assertTrue(len(results) == len(dummy_test))
Пример #13
0
def search_data_object(q, config={}):
    print("Search::DataObject", q)
    with new_session(config) as session:
        query = session.query(DataObject, Collection.name) \
            .filter(Like(DataObject.path, "/irods-seq-sr%")) \
            .filter(Criterion('=', DataObject.name, '%{}%'.format(q['value']))) \
            .add_keyword('zone', 'seq')

        results = [{
            "type": iRODSDataObject,
            "id": result[DataObject.id],
            "name": result[DataObject.name],
            "size": sizeof_fmt(result[DataObject.size]),
            "modified": result[DataObject.modify_time].isoformat(),
            "path": "{}/{}".format(result[Collection.name], result[DataObject.name])
        } for result in query]
        
        return {
            "id": "search-result",
            "name": q['value'],
            "count": len(results),
            "children": results
        }
Пример #14
0
    def test_simultaneous_multiple_AVU_joins(self):
        objects = []
        decoys = []
        try:
            collection = self.coll_path
            filename = 'test_multiple_AVU_joins'
            file_path = '{collection}/{filename}'.format(**locals())
            for x in range(3, 9):
                obj = helpers.make_object(self.sess, file_path +
                                          '-{}'.format(x))  # with metadata
                objects.append(obj)
                obj.metadata.add('A_meta', '1{}'.format(x))
                obj.metadata.add('B_meta', '2{}'.format(x))
                decoys.append(
                    helpers.make_object(
                        self.sess,
                        file_path + '-dummy{}'.format(x)))  # without metadata
            self.assertTrue(len(objects) > 0)

            # -- test simple repeat of same column --
            q = self.sess.query(DataObject,DataObjectMeta).\
                                            filter(DataObjectMeta.name == 'A_meta', DataObjectMeta.value <  '20').\
                                            filter(DataObjectMeta.name == 'B_meta', DataObjectMeta.value >= '20')
            self.assertTrue(rows_returned(q) == len(objects))

            # -- test no-stomp of previous filter --
            self.assertTrue(('B_meta',
                             '28') in [(x.name, x.value)
                                       for x in objects[-1].metadata.items()])
            q = self.sess.query(DataObject,DataObjectMeta).\
                                            filter(DataObjectMeta.name == 'B_meta').filter(DataObjectMeta.value < '28').\
                                            filter(DataObjectMeta.name == 'B_meta').filter(Like(DataObjectMeta.value, '2_'))
            self.assertTrue(rows_returned(q) == len(objects) - 1)

            # -- test multiple AVU's by same attribute name --
            objects[-1].metadata.add('B_meta', '29')
            q = self.sess.query(DataObject,DataObjectMeta).\
                                            filter(DataObjectMeta.name == 'B_meta').filter(DataObjectMeta.value == '28').\
                                            filter(DataObjectMeta.name == 'B_meta').filter(DataObjectMeta.value == '29')
            self.assertTrue(rows_returned(q) == 1)
        finally:
            for x in (objects + decoys):
                x.unlink(force=True)
            helpers.remove_unused_metadata(self.sess)
Пример #15
0
def import_files_from_irods(task_id, password):
    task = AsyncTask.objects.get(id=task_id)
    attempt = task.import_attempt
    file_objects = []

    try:
        directory = ImportedDirectory(name=attempt.irods_name,
                                      directory_type='iRODS',
                                      date_scanned=attempt.date_imported,
                                      root_path=attempt.irods_root,
                                      has_checksums=True)

        def save_file(collection, name, size, date_created, checksum):
            path = '{}/{}'.format(collection, name)
            file_obj = File(name=name,
                            path=path,
                            size=size,
                            date_created=date_created,
                            directory=directory,
                            directory_name=directory.name,
                            checksum=checksum)
            file_objects.append(file_obj)

        task.status_message = 'Downloading file data...'
        task.save()
        print('Contacting iRODS...')
        with iRODSSession(user=attempt.irods_user,
                          password=password,
                          host=attempt.irods_host,
                          port=attempt.irods_port,
                          zone=attempt.irods_zone) as session:
            session.connection_timeout = 120

            base_query = session.query(
                Collection.name, DataObject.name, DataObject.checksum,
                DataObject.size, DataObject.create_time).filter(
                    DataObject.replica_number == 0).limit(1000)

            folder_queue = deque([attempt.irods_root])

            while len(folder_queue):
                next_folder = folder_queue.popleft()
                col = session.collections.get(next_folder)
                for obj in col.data_objects:
                    save_file(next_folder, obj.name, obj.size, obj.create_time,
                              obj.checksum)

                query = base_query.filter(
                    Like(Collection.name, next_folder + '/%'))
                try:
                    for batch in query.get_batches():
                        for row in batch:
                            save_file(row[Collection.name],
                                      row[DataObject.name],
                                      row[DataObject.size],
                                      row[DataObject.create_time],
                                      row[DataObject.checksum])
                except NetworkException:
                    task.status_subtitle = 'This folder is very large. The import process may take much longer than usual.'
                    task.save()
                    print('Timeout on {}'.format(next_folder))
                    for subcol in col.subcollections:
                        folder_queue.append(subcol.path)

        build_file_database(task, directory, file_objects)
    except Exception as e:
        print('Task failed with error: {}'.format(e))
        task.in_progress = False
        task.failed = True
        task.warning = True
        task.status_message = 'Import failed.'
        task.status_subtitle = 'Error: {}'.format(e)
        task.save()
        return

    print('Updating database fixture...')
    create_db_backup(task)
    task.in_progress = False
    task.save()
              -e defaults to ".jpg"
              -n defaults to "stickers" """ % (sys.argv[0], ))
    sys.exit(1)

opts = {}
opts.update(opt)

try:
    env_file = os.environ['IRODS_ENVIRONMENT_FILE']
except KeyError:
    env_file = os.path.expanduser('~/.irods/irods_environment.json')

session = iRODSSession(irods_env_file=env_file)

object_name_stub = opts.get('-n', "stickers")
object_name_ext = opts.get('-e', '.jpg')
resc_name = opts.get('-R', "lts_resc")

if not resc_name: resc_name = '%'

q = session.query(Collection.name, DataObject.name, Resource.name)
q.filter(Like(DataObject.name, object_name_stub + '%x%' + object_name_ext),
         Like(Resource.name, resc_name))

resultsIter = q.get_results()

print("=== QUERY RESULTS: ===")
for result in resultsIter:
    print( result[Resource.name] + " :\t\t" +\
           result[Collection.name] + "/" + result[DataObject.name] )