def collection_get(self): if not self._root: raise HTTPServerError() prefix = 'storage:' used_paths = [x[0][len(prefix):] for x in DBSession.query(Link.uri).filter(Link.type == 'data').all() if x[0].startswith(prefix)] if self._request.GET.get('dirs', '0') == '1': directories = {} for root, dirs, files in os.walk(self._root): d = directories for dd in self._strip_path(root).split('/')[1:]: d = d['dirs'][dd] d['dirs'] = {dd: {} for dd in dirs} d['file_count'] = len(files) d['new_files'] = len([x for x in files if not self._strip_path(os.path.join(root, x)) in used_paths]) return APIResponse(directories) print(self._request.matchdict['path']) req_path = os.path.join(self._root, *self._request.matchdict['path']) print(req_path) r, d, f = next(os.walk(req_path)) res = {'files': [self._process_file(file, os.path.join(r, file), used_paths) for file in f if not file.endswith('.mods.xml')], 'path': self._strip_path(r), 'dirs': d} print(res) return APIResponse(res)
def collection_get(self): params = get_list_params(self._request) users = DBSession.query(User) if len(params.query_keys) > 0: users = users.filter(sqlalchemy.and_( *[sqlalchemy.or_(User.id_user.value.contains(key), User.username.contains(key), User.fullname.contains(key)) for key in params.query_keys])) if params.dry: return APIResponse(users.count()) users = users.offset(params.offset).limit(params.limit) return APIResponse([x.get_dict() for x in users.all()])
def collection_post(self): self._request.validated['id_item'] = str(uuid4()) self._request.validated['created'] = datetime.datetime.now() # di = DigitalItem(**self._request.validated) v = self._request.validated print(v) metakeys = [m['key'] for m in v['metadata']] print(metakeys) missing_metakeys = check_missing_metakeys(metakeys, 'i') if len(missing_metakeys) > 0: raise HTTPBadRequest('missing metadata keys: %s' % ', '.join(missing_metakeys)) di = DigitalItem(v['id_item'], v['mime'], v['created'], '', v['id_user'], v['rights']) update_metadata(v['metadata'], di.id_item, 'item') DBSession.add(di) DBSession.flush() for link in v['links']: l = Link(str(uuid4()), di.id_item, link['id_user'], link['type'], link['description'], link['uri']) DBSession.add(l) for group_id in v['group_ids']: print('Adding to group %s' % group_id) group_item = GroupItem(group_id, di.id_item, 0, 0) DBSession.add(group_item) print(di) return APIResponse(self._add_metadata(di))
def collection_get(self): return APIResponse([x.get_dict() for x in DBSession.query(MetaKey).all()]) # @view(permission=Everyone) # def collection_get(self): # return [x.get_dict() for x in DBSession.query(DigitalItem).all()] # # @view(permission=Everyone, schema = DigitalItemSchema, validators=(colander_body_validator,)) # def collection_post(self): # self._request.validated['id_item'] = str(uuid4()) # self._request.validated['created'] = datetime.datetime.now() # #di = DigitalItem(**self._request.validated) # v = self._request.validated # di = DigitalItem(v['id_item'], v['mime'], v['created'], '', v['author'], v['rights']) # print(di) # # required_keys = DBSession.query(MetaKey).filter(MetaKey.mandatory.contains('i')) # print('Required keys: %s' % (','.join((x.key for x in required_keys)))) # # DBSession.add(di) # DBSession.flush() # print(di) # return di.get_dict()
def upload_file(request): cfg = get_cfg(request.registry.settings, 'naki.storage.') root = cfg.get('root', None) if not root: raise HTTPServerError() prefix = 'storage:' filename = request.matchdict['filename'] print('File: ' + filename) target_dir = request.GET.get('dir', 'upload') while target_dir.startswith('/'): target_dir = target_dir[1:] print(target_dir) # print(request.body) path = os.path.join(root, target_dir) print(root, path) filepath = os.path.join(path, filename) os.makedirs(path, exist_ok=True) f = open(filepath, 'wb') f.write(request.body) f.close() mime = guess_mime(magic.Magic(mime=True), filepath) return APIResponse({ 'name': filename, 'mime': mime, 'used': False, 'path': strip_path(root, filepath), 'metadata': load_metadata(filepath, mime) })
def get(self): key = self._request.matchdict['key'] try: metakey = DBSession.query(MetaKey).filter(MetaKey.key == key).one() return APIResponse(metakey.get_dict()) except: raise HTTPNotFound()
def put(self): set_id = self._request.matchdict['id'] try: dset = DBSession.query(DigitalSet).filter( DigitalSet.id_set == set_id).one() dset.set_from_dict(self._request.validated) old_groups = DBSession.query(GroupSet).filter( GroupSet.id_set == set_id).all() old_ids = [gs.id_group for gs in old_groups] new_ids = [ g['id_group'] for g in self._request.validated['groups'] ] new_groups = [ GroupSet(set_id, gid) for gid in new_ids if not gid in old_ids ] for new_group in new_groups: DBSession.add(new_group) for old_group in old_groups: if old_group.id_group not in new_ids: DBSession.delete(old_group) # metadata = [x for x in DBSession.query(Metadata).filter(Metadata.id == dset.id_set).all()] update_metadata(self._request.validated['metadata'], dset.id_set, 'set') DBSession.flush() return APIResponse( add_metadata_record(dset.get_dict(), dset.id_set, 'set')) except Exception as e: print(e) raise HTTPNotFound()
def collection_get(self): params = get_list_params(self._request) # subq = DBSession.query(DigitalItem.id_item)\ # .outerjoin(Metadata, sqlalchemy.and_(Metadata.id == DigitalItem.id_item, Metadata.target == 'item')) # if len(params.query_keys) > 0: # # TODO: Search with multiple search keys is BROKEN (all the keys have to be in the same metadata-value...) # subq = subq.filter(sqlalchemy.and_(*[sqlalchemy.or_(Metadata.value.contains(key), DigitalItem.mime.contains(key)) for key in params.query_keys])) # subq = subq.group_by(DigitalItem.id_item) if len(params.query_keys) == 0: subq = self._get_subq_base().group_by(DigitalItem.id_item) else: subq = self._get_subq_with_key(params.query_keys[0], None) for idx, key in enumerate(params.query_keys[1:]): subq = self._get_subq_with_key(key, subq.subquery('subq%d' % idx)) if params.dry: return APIResponse(subq.count()) subq = subq.offset(params.offset).limit(params.limit).subquery('subq') items_raw = DBSession.query(DigitalItem, Link, Metadata) \ .outerjoin(Link, Link.id_item == DigitalItem.id_item) \ .outerjoin(Metadata, Metadata.id == DigitalItem.id_item) \ .join(subq, subq.c.sID_Item == DigitalItem.id_item) \ .all() items = {} for bundle in items_raw: item_id = bundle[0].id_item if not item_id in items: items[item_id] = bundle[0].get_dict() items[item_id]['links'] = [] items[item_id]['metadata'] = [] item = items[item_id] if bundle[1]: if not next((x for x in item['links'] if x['id_link'] == bundle[1].id_link), None): item['links'].append(bundle[1].get_dict()) if bundle[2]: if not next( (x for x in item['metadata'] if x['key'] == bundle[2].key), None): item['metadata'].append(bundle[2].get_dict()) return APIResponse([items[x] for x in items])
def delete(self): if not self._request.token: # this should never happen raise HTTPUnauthorized tok = DBSession.query(Token).filter( Token.id_token == self._request.token).one() DBSession.delete(tok) return APIResponse(None)
def delete(self): annotation_id = self._request.matchdict['annot_id'] q = DBSession.query(Link).filter(Link.id_link == annotation_id) if self._request.user.auth_level < RIGHTLevels.Editor: q = q.filter(Link.id_user == self._request.user.id_user) link = q.one() DBSession.delete(link) DBSession.flush() return APIResponse(True)
def delete(self): container_id = self._request.matchdict['container_id'] cnt = DBSession.query(Container).filter(Container.id_view == self._view_id).filter(Container.id_container == container_id).one() cntitems = DBSession.query(ContainerItem).filter(ContainerItem.id_container == container_id).all() for itm in cntitems: DBSession.delete(cnt) DBSession.delete(cnt) DBSession.flush() return APIResponse(True)
def get(self): print(DigitalItemSchema) di_id = self._request.matchdict['id'] try: item = DBSession.query(DigitalItem).filter( DigitalItem.id_item == di_id).one() return APIResponse(self._add_metadata(item)) except Exception as e: print(e) raise HTTPNotFound()
def post(self): data = self._request.validated user = DBSession.query(User).filter( User.username == data['username']).filter( User.passwd == hash_pw(data['password'])).one_or_none() if not user: raise HTTPUnauthorized() token = Token(str(uuid.uuid4()), user.id_user) DBSession.add(token) d = user.get_dict() d['token'] = token.id_token return APIResponse(d)
def collection_get(self): limit = self._request.GET.get('limit', 10) offset = self._request.GET.get('offset', 0) q = self._request.GET.get('q', '') dry = self._request.GET.get('dry', '0') == '1' query_keys = [x for x in q.split(' ') if len(x) > 0] subq = DBSession.query(DIGroup.id_group) \ .outerjoin(Metadata, sqlalchemy.and_(Metadata.id == DIGroup.id_group, Metadata.target == 'group')) if len(query_keys) > 0: subq = subq.filter( sqlalchemy.and_( *[Metadata.value.contains(key) for key in query_keys])) subq = subq.group_by(DIGroup.id_group) if dry: return APIResponse(subq.count()) subq = subq.offset(offset).limit(limit).subquery('subq') groups_raw = DBSession.query(DIGroup, Metadata) \ .outerjoin(Metadata, Metadata.id == DIGroup.id_group) \ .join(subq, subq.c.sID_Group == DIGroup.id_group) \ .all() groups = {} for bundle in groups_raw: gid = bundle[0].id_group if not gid in groups: groups[gid] = bundle[0].get_dict() groups[gid]['metadata'] = [] group = groups[gid] if bundle[1]: if not next( (x for x in group['metadata'] if x['key'] == bundle[1].key), None): group['metadata'].append(bundle[1].get_dict()) return APIResponse([groups[x] for x in groups])
def collection_get(self): params = get_list_params(self._request) if len(params.query_keys) == 0: subq = self._add_user_checking(self._get_subq_base().group_by( View.id_view)) else: subq = self._get_subq_with_key(params.query_keys[0], None) for idx, key in enumerate(params.query_keys[1:]): subq = self._get_subq_with_key(key, subq.subquery('subq%d' % idx)) if params.dry: return APIResponse(subq.count()) subq = subq.offset(params.offset).limit(params.limit).subquery('subq') views_raw = DBSession.query(View, Metadata, User) \ .outerjoin(Metadata, Metadata.id == View.id_view) \ .join(User, User.id_user == View.id_user)\ .join(subq, subq.c.sID_View == View.id_view) \ .all() views = {} for bundle in views_raw: gid = bundle[0].id_view if not gid in views: views[gid] = bundle[0].get_dict() views[gid]['metadata'] = [] views[gid]['author'] = bundle[2].get_dict() view = views[gid] if bundle[1]: if not next( (x for x in view['metadata'] if x['key'] == bundle[1].key), None): view['metadata'].append(bundle[1].get_dict()) return APIResponse([views[x] for x in views])
def get(self): container_id = self._request.matchdict['container_id'] items = DBSession.query(Container, ContainerItem)\ .outerjoin(ContainerItem, ContainerItem.id_container == Container.id_container)\ .filter(Container.id_view == self._view_id) \ .filter(Container.id_container == container_id)\ .all() if len(items) == 0: raise HTTPNotFound() ret = items[0][0].get_dict(); ret['item_ids'] = [x[1].id_item for x in items] if items[0][1] else [] return APIResponse(ret)
def collection_post(self): v = self._request.validated v['id_user'] = self._request.user.id_user type = 'annotation' if v['time'] > 0 or v['duration'] > 0: if v['duration'] > 0: type = '%s;%d;%d' % (type, v['time'], v['duration']) else: type = '%s;%d' % (type, v['time']) link = Link(str(uuid4()), v['id_item'], v['id_user'], type, '', v['uri']) DBSession.add(link) DBSession.flush() return APIResponse(link.get_dict())
def put(self): view_id = self._request.matchdict['view_id'] try: q = DBSession.query(View).filter(View.id_view == view_id) if self._request.user.auth_level < RIGHTLevels.Editor: q = q.filter(View.id_user == self._request.user.id_user) view = q.one() view.set_from_dict(self._request.validated) update_metadata(self._request.validated['metadata'], view.id_view, 'view') DBSession.flush() return APIResponse(view.get_dict()) except Exception as e: print(e) raise HTTPNotFound()
def put(self): di_id = self._request.matchdict['id'] try: item = DBSession.query(DigitalItem).filter( DigitalItem.id_item == di_id).one() item.set_from_dict(self._request.validated) update_metadata(self._request.validated['metadata'], item.id_item, 'item') update_links(self._request.validated['links'], item.id_item) DBSession.flush() return APIResponse(self._add_metadata(item)) except Exception as e: print(e) traceback.print_exc() raise HTTPNotFound()
def put(self): user = self._verify_access() v = self._request.validated if self._request.user.auth_level < 3: v['auth_level'] = user.auth_level print(v) user.set_from_dict(v) # set_dict() doesn't set password, so let's hash and update it manually new_pw = v.get('passwd', '') if new_pw: user.passwd = hash_pw(new_pw) DBSession.flush() return APIResponse(user.get_dict())
def put(self): dg_id = self._request.matchdict['id'] try: group = DBSession.query(DIGroup).filter( DIGroup.id_group == dg_id).one() group.type = self._request.validated['type'] update_metadata(self._request.validated['metadata'], group.id_group, 'group') DBSession.flush() # return APIResponse(self._add_metadata(group)) return APIResponse( add_metadata_record(group.get_dict(), group.id_group, 'group')) except Exception as e: print(e) raise HTTPNotFound()
def collection_get(self): params = get_list_params(self._request) if len(params.query_keys) == 0: subq = self._get_subq_base().group_by(DigitalSet.id_set) else: subq = self._get_subq_with_key(params.query_keys[0], None) for idx, key in enumerate(params.query_keys[1:]): subq = self._get_subq_with_key(key, subq.subquery('subq%d' % idx)) if params.dry: return APIResponse(subq.count()) subq = subq.offset(params.offset).limit(params.limit).subquery('subq') sets_raw = DBSession.query(DigitalSet, Metadata, User)\ .outerjoin(Metadata, Metadata.id == DigitalSet.id_set) \ .outerjoin(User, User.id_user == DigitalSet.id_user) \ .join(subq, subq.c.sID_Set == DigitalSet.id_set)\ .all() sets = {} for bundle in sets_raw: set_id = bundle[0].id_set if not set_id in sets: sets[set_id] = bundle[0].get_dict() sets[set_id]['author'] = bundle[2].get_dict() sets[set_id]['metadata'] = [] setx = sets[set_id] if bundle[1]: if not next( (x for x in setx['metadata'] if x['key'] == bundle[1].key), None): setx['metadata'].append(bundle[1].get_dict()) return APIResponse([sets[x] for x in sets])
def get(self): # item_id = self._request.matchdict['item_id'] view_id = self._request.matchdict['view_id'] try: view_info = DBSession.query(View).filter( View.id_view == view_id).one() res = add_metadata_record(view_info.get_dict(), view_info.id_view, 'view') res['items'] = self._get_items(view_info) res['containers'] = [ x.get_dict() for x in DBSession.query(Container).filter( Container.id_view == view_id).all() ] return APIResponse(res) except Exception as e: print(e) raise HTTPNotFound()
def get(self): set_id = self._request.matchdict['id'] try: dset = DBSession.query(DigitalSet).filter( DigitalSet.id_set == set_id).one() out_dict = dset.get_dict() groups = DBSession.query(GroupSet, DIGroup)\ .join(DIGroup, GroupSet.id_group == DIGroup.id_group)\ .filter(GroupSet.id_set == set_id)\ .all() out_dict['groups'] = [g[1].get_dict() for g in groups] return APIResponse( add_metadata_record(out_dict, dset.id_set, 'set')) except Exception as e: print(e) raise HTTPNotFound()
def collection_post(self): self._request.validated['id_group'] = str(uuid4()) self._request.validated['created'] = datetime.datetime.now() v = self._request.validated print(v) metakeys = [m['key'] for m in v['metadata']] print(metakeys) missing_metakeys = check_missing_metakeys(metakeys, 'g') if len(missing_metakeys) > 0: raise HTTPBadRequest('missing metadata keys: %s' % ', '.join(missing_metakeys)) dg = DIGroup(v['id_group'], v['created'], '', v['id_user'], v['type']) update_metadata(v['metadata'], dg.id_group, 'group') DBSession.add(dg) DBSession.flush() print(dg) return APIResponse( add_metadata_record(dg.get_dict(), dg.id_group, 'group'))
def collection_post(self): self._request.validated['id_set'] = str(uuid4()) self._request.validated['created'] = datetime.datetime.now() # di = DigitalItem(**self._request.validated) v = self._request.validated print(v) metakeys = [m['key'] for m in v['metadata']] print(metakeys) missing_metakeys = check_missing_metakeys(metakeys, 's') if len(missing_metakeys) > 0: raise HTTPBadRequest('missing metadata keys: %s' % ', '.join(missing_metakeys)) dset = DigitalSet(v['id_set'], v['created'], '', v['id_user']) update_metadata(v['metadata'], dset.id_set, 'set') DBSession.add(dset) DBSession.flush() return APIResponse( add_metadata_record(dset.get_dict(), dset.id_set, 'set'))
def get(self): dg_id = self._request.matchdict['id'] dgs = [x for x in DBSession.query(DIGroup, DigitalItem) \ .outerjoin(GroupItem, GroupItem.id_group == DIGroup.id_group) \ .outerjoin(DigitalItem, DigitalItem.id_item == GroupItem.id_item) \ .filter(DIGroup.id_group == dg_id) \ .all()] if len(dgs) == 0: raise HTTPNotFound() dg = add_metadata_record(dgs[0][0].get_dict(), dgs[0][0].id_group, 'group') meta = DBSession.query(Metadata, MetaKey) \ .join(MetaKey, MetaKey.key == Metadata.key) \ .filter(Metadata.target == 'item') \ .filter(Metadata.id.in_([x[1].id_item for x in dgs if x[1]])) \ .all() dg['items'] = [] for bundle in dgs: if not bundle[1]: continue item = bundle[1].get_dict() item['metadata'] = [ meta_record(x[1], x[0]) for x in meta if x[0].id == bundle[1].id_item ] dg['items'].append(item) if not self._request.user or self._request.user.auth_level < RIGHTLevels.Researcher: # Guests should see only public items! old_items = dg['items'] dg['items'] = [ x for x in old_items if self._get_key_value(x['metadata'], 'public') == '1' ] # dg['items'] = [x[1].get_dict() for x in dgs if x[1]] return APIResponse(dg)
def put(self): key = self._request.matchdict['key'] metakey = DBSession.query(MetaKey).filter(MetaKey.key == key).one() metakey.set_from_dict(self._request.validated) DBSession.flush() return APIResponse(metakey.get_dict())
def collection_post(self): self._request.validated['id_view'] = str(uuid4()) self._request.validated['created'] = datetime.datetime.now() copy_view_id = self._request.GET.get('view_id', '') v = self._request.validated print(v) metakeys = [m['key'] for m in v['metadata']] print(metakeys) missing_metakeys = check_missing_metakeys(metakeys, 'i') if len(missing_metakeys) > 0: raise HTTPBadRequest('missing metadata keys: %s' % ', '.join(missing_metakeys)) view = View(v['id_view'], v['created'], v['description'], v['id_user'], v['public']) update_metadata(v['metadata'], view.id_view, 'view') DBSession.add(view) DBSession.flush() # # for meta in v['metadata']: # m = Metadata(view.id_view, 'view', meta['key'], meta['value']) # DBSession.add(m) # DBSession.flush() # If user specified a view_id parameter, let's make a copy if copy_view_id: old_view = DBSession.query(View).filter( View.id_view == copy_view_id).one() old_items = DBSession.query(ViewItem).filter( ViewItem.id_view == copy_view_id).all() for item in old_items: print('Adding item' + item.id_item) vi = ViewItem(view.id_view, item.id_item, item.path) DBSession.add(vi) old_containers = DBSession\ .query(Container, ContainerItem)\ .join(ContainerItem, ContainerItem.id_container == Container.id_container)\ .filter(Container.id_view == copy_view_id)\ .all() process_conts = {} for ci in old_containers: cid = ci[0].id_container if not cid in process_conts: c = ci[0] new_cid = str(uuid4()) print('Adding container %s as %s' % (cid, new_cid)) process_conts[cid] = new_cid cont = Container(new_cid, view.id_view, c.type, c.description, c.x, c.y, c.width, c.height, c.z, c.data) DBSession.add(cont) else: new_cid = process_conts[cid] print('Adding item %s to container %s' % (ci[1].id_item, new_cid)) citem = ContainerItem(new_cid, ci[1].id_item, ci[1].data) DBSession.add(citem) print(view) return APIResponse( add_metadata_record(view.get_dict(), view.id_view, 'view'))
def get(self): d = self._request.user.get_dict() d['token'] = self._request.token return APIResponse(d)