async def cancel_deletion(request, confirmation_code, connection=None): user = request.user deletion_request = await request.app.models.user_profile_delete_request.get_one( confirmation_code, connection=connection) if deletion_request.user_id != user.pk: raise HTTPConflict(reason='Confirmation code does not match to user') if deletion_request.is_confirmed(): raise HTTPConflict( reason="Account removing have been confirmed already") if deletion_request.is_cancelled(): raise HTTPConflict( reason="Account removing have been cancelled already") # Change request status await deletion_request.cancel(connection=connection)
def post(self, request): yield from request.post() url = request.POST['url'] request.session.begin_nested() try: citation = Citation(url=url, added=pytz.utc.localize( datetime.datetime.utcnow())) request.session.add(citation) request.session.commit() except IntegrityError: request.session.rollback() citation = request.session.query(Citation).filter_by(url=url).one() if 'accident_id' in request.POST: accident = request.session.query(Accident).get( request.POST['accident_id']) if not accident: raise HTTPConflict() request.session.begin_nested() try: assoc = CitationAccident(accident=accident, citation=citation) citation.accidents.append(assoc) request.session.commit() except IntegrityError: request.session.rollback() return HTTPNoContent()
async def http_put(app, url, data=None, params=None): log.info("http_put('{}', data: {})".format(url, data)) rsp = None client = get_http_client(app) timeout = config.get("timeout") try: async with client.put(url, json=data, params=params, timeout=timeout) as rsp: log.info("http_put status: {}".format(rsp.status)) if rsp.status == 201: pass # expected elif rsp.status == 404: # can come up for replace ops log.info(f"HTTPNotFound for: {url}") elif rsp.status == 409: log.info(f"HTTPConflict for: {url}") raise HTTPConflict() elif rsp.status == 503: log.warn(f"503 error for http_put url: {url}") raise HTTPServiceUnavailable() else: log.error( f"PUT request error for url: {url} - status: {rsp.status}") raise HTTPInternalServerError() rsp_json = await rsp.json() log.debug("http_put({}) response: {}".format(url, rsp_json)) except ClientError as ce: log.error(f"ClientError for http_put({url}): {ce} ") raise HTTPInternalServerError() except CancelledError as cle: log.error(f"CancelledError for http_put({url}): {cle}") raise HTTPInternalServerError() return rsp_json
async def put(self): group_id = int(self.request.match_info['group_id']) job_id = int(self.request.match_info['id']) json_data = await self.request.json() self._logging_debug(json_data) obj = JenkinsJob() obj.id = job_id obj.jenkins_group_id = group_id obj.name = json_data['name'] try: obj.jenkins_job_perent_id = int(json_data['jenkins_job_perent_id']) except Exception as e: obj.jenkins_job_perent_id = None obj.gitlab_project_id = int(json_data['gitlab_project_id']) first_job = await self.jenkins_job_manager.find_first_by_group_id(group_id) if not first_job.id == job_id: raise HTTPConflict(reason="only one job can be first") self._logging_debug(obj.values) job = await self.jenkins_job_manager.update(obj) return web.json_response(job, dumps=partial(json.dumps, cls=CustomJSONEncoder))
def new(self, **kwargs): data = yield from self.request.json() self.validate_allowed_fields(data) self.validate_required_fields(data) data = self.prepare_insert(data) if not isinstance(data, dict): # pragma: no cover data = yield from data fields = data.keys() pool = yield from self.get_pool() with (yield from pool.cursor()) as cur: query = ('INSERT INTO {table} ({fields}) ' 'VALUES ({values}) ' 'returning {id_column}').format( table=self.get_table_name(), fields=','.join(fields), values=','.join(['%s' for x in fields]), id_column=self.id_column) values = (data[x] for x in fields) yield from self.before_insert(cur) try: yield from cur.execute(query, tuple(values)) except DatabaseError as e: raise HTTPConflict(body=self.error_response(e)) record_id = (yield from cur.fetchone())[0] yield from self.before_insert(cur) return Response(status=201, headers=(('Location', self.get_object_url(record_id)), ))
def update(self, id, **kwargs): data = yield from self.request.json() self.validate_allowed_fields(data) data = self.prepare_update(data) if not isinstance(data, dict): # pragma: no cover data = yield from data updated_fields = data.keys() pool = yield from self.get_pool() with (yield from pool.cursor()) as cur: yield from self.before_update(cur) query = 'update {table} set {fields} where {id_column}=%s'.format( table=self.get_table_name(), fields=','.join(['{}=%s'.format(x) for x in updated_fields]), id_column=self.id_column) try: yield from cur.execute( query, tuple([data[f] for f in updated_fields] + [id])) except DatabaseError as e: raise HTTPConflict(body=self.error_response(e)) yield from self.after_update(cur) return Response(status=204)
async def create(self): kwargs = await self.request.json() kwargs = unpack_dict(kwargs, ( 'parent_id', 'name', )) ok = all((k in kwargs for k in ( 'parent_id', 'name', ))) if not ok: raise HTTPBadRequest() drive: Drive = self.request.app['drive'] parent_id = kwargs['parent_id'] name = kwargs['name'] parent = await drive.get_node_by_id(parent_id) try: node = await drive.create_folder( parent_node=parent, folder_name=name, exist_ok=False, ) return node.to_dict() except Exception as e: EXCEPTION('engine', e) << name << parent_id raise HTTPConflict() from e
async def registration(request: Request): if 'X-UserId' in request.headers: return HTTPNotAcceptable() session_maker = request.app['db_session_manager'] session: Session = session_maker() try: data = await request.json() if data: user_db = session.query(Users).filter_by(login=data['login']).first() if user_db: return HTTPConflict() data_without_password = dict(data) data_without_password.pop('password') data_for_auth = {'login': data['login'], 'password': data['password']} if 'account_manager' in request.app['config']['app']: url = request.app['config']['app']['account_manager']['url'] async with aiohttp.ClientSession(raise_for_status=True) as http_client_session: async with http_client_session.post(url, json=data_without_password) as \ resp: account_manager_resp = await resp.json() data_for_auth['id'] = account_manager_resp['id'] user_serializer = UsersSchema().load(data_for_auth, session=session) else: user_serializer = UsersSchema().load(data_for_auth, session=session) session.add(user_serializer) session.commit() return HTTPCreated(headers={'Location': f"/users/{user_serializer.login}"}) else: return HTTPBadRequest() except InvalidURL as ex: raise Exception(f"""Invalid url account_manager:{str(ex)}""") except ClientResponseError: return HTTPConflict() except ClientError as ex: raise Exception(f"""Can't connect to account_manager:{str(ex)}""") except Exception: session.rollback() raise finally: session.close()
async def PUT_Domain(request): """HTTP PUT method to create a domain """ log.request(request) app = request.app if not request.has_body: msg = "Expected body in put domain" log.error(msg) raise HTTPInternalServerError() body = await request.json() log.debug(f"got body: {body}") domain = get_domain(request, body=body) log.debug(f"PUT domain: {domain}") bucket = getBucketForDomain(domain) if not bucket: log.error(f"expected bucket to be used in domain: {domain}") raise HTTPInternalServerError() body_json = await request.json() if "owner" not in body_json: msg = "Expected Owner Key in Body" log.warn(msg) raise HTTPInternalServerError() if "acls" not in body_json: msg = "Expected Owner Key in Body" log.warn(msg) raise HTTPInternalServerError() # try getting the domain, should raise 404 domain_exists = await check_metadata_obj(app, domain) if domain_exists: # domain already exists msg = "Conflict: resource exists: " + domain log.info(msg) raise HTTPConflict() domain_json = {} if "root" in body_json: domain_json["root"] = body_json["root"] else: log.info("no root id, creating folder") domain_json["owner"] = body_json["owner"] domain_json["acls"] = body_json["acls"] now = time.time() domain_json["created"] = now domain_json["lastModified"] = now # write the domain json to S3 immediately so it will show up in a get_domains S3 scan await save_metadata_obj(app, domain, domain_json, notify=True, flush=True) resp = json_response(domain_json, status=201) log.response(request, resp=resp) return resp
async def request_deletion(request, lang_code, connection=None): user = request.user deletion_request = await request.app.models.user_profile_delete_request.get_by_email( user.email, connection=connection) if deletion_request: raise HTTPConflict( reason="Account removing have been requested already") deletion_request = await request.app.models.user_profile_delete_request.send( user, lang_code=lang_code, connection=connection) await deletion_request.mark_as_sent(connection=connection)
async def delete_profile_picture(request, connection=None): user = request.user Image = request.app.m.image old_picture = user.picture if not old_picture: raise HTTPConflict(reason="No user picture has been set") user['picture'] = None await user.save(fields=['picture'], connection=connection) # Add log entry await request.app.m.user_action_log_entry.create_user_profile_update( request, connection=connection) await Image.delete_name(old_picture, connection=connection)
async def approve_email_change_request(request, confirmation_code, connection=None): orig_m = request.app.m.user_change_email_original_address_request new_m = request.app.m.user_change_email_new_address_request # Change to this address has been requested already orig_address_request = await orig_m.get_one(confirmation_code, connection=connection, silent=True) new_address_request = await new_m.get_one(confirmation_code, connection=connection, silent=True) confirmation_request = orig_address_request or new_address_request # Confirmation code not found for both addresses if confirmation_request is None: raise HTTPNotFound(reason="No such confirmation code") else: # It is confirmed already if confirmation_request.is_confirmed(): raise HTTPConflict(reason="Confirmation already obtained") # Change confirmation request status await confirmation_request.confirm(connection=connection) orig_address_request = await orig_m.get_by_new_email( confirmation_request.new_email, user_id=confirmation_request.user_id, connection=connection) new_address_request = await new_m.get_by_new_email( confirmation_request.new_email, user_id=confirmation_request.user_id, connection=connection) # Check whether second request confirmed already if orig_address_request and new_address_request \ and orig_address_request.is_confirmed() and new_address_request.is_confirmed(): await request.app.m.user.change_email( confirmation_request.user_id, confirmation_request.new_email, connection=connection) # Add log entry await request.app.m.user_action_log_entry.create_user_change_email_address( request, user_id=confirmation_request.user_id, old_email=confirmation_request.orig_email, new_email=confirmation_request.new_email, confirmation_code=confirmation_code, connection=connection) return {'status': 200}
async def create_user(request, user, connection=None): user_exists = await request.app.models.user.get_user_by_email( user['email'], connection=connection) if user_exists: raise HTTPConflict(reason='User with this email already exists.') lang_code = user.get('lang_code', 'en') user = await request.app.models.user.create(email=user['email'], password=make_password( user['password']), connection=connection) activation_request = await request.app.models.user_activation_request.send( user, lang_code=lang_code, connection=connection) await activation_request.mark_as_sent(connection=connection)
async def send_email_change_request(request, new_email_address, lang_code, connection=None): user = request.user orig_m = request.app.m.user_change_email_original_address_request new_m = request.app.m.user_change_email_new_address_request # Same email specified if user.email == new_email_address: raise ValidationError( new_email_address=["User's existing email specified"]) orig_address_request = await orig_m.get_by_new_email( new_email_address=new_email_address, user_id=user.pk, connection=connection) new_address_request = await new_m.get_by_new_email( new_email_address=new_email_address, user_id=user.pk, connection=connection) # Change to this address has been requested already if orig_address_request is not None and orig_address_request.user_id == user.pk: raise HTTPConflict( reason="Email change to this address requested already") if new_address_request is not None and new_address_request.user_id == user.pk: raise HTTPConflict( reason="Email change to this address requested already") # Create and send confirmation request for both original and new email addresses await orig_m.send(user, new_email_address, lang_code, connection=connection) await new_m.send(user, new_email_address, lang_code, connection=connection) return {'status': 200}
async def login(request, email, password, connection=None): user = await request.app.models.user.get_user_by_email( email, connection=connection) if user: if not user.is_active: raise HTTPConflict(reason="User disabled") elif check_password(password, user.password): await gen_api_key(user.id, request=request, auth='email') request.user = user await user.on_login(connection=connection) await request.app.m.user_action_log_entry.create_login( request, connection=connection) response = dict(status=200, data=dict(uid=user.id)) return json_response(response, headers={'Authorization': request.api_key}) raise HTTPUnauthorized(reason="Login incorrect")
async def post(self): payload = await self._validate_payload() payload['id'] = generate_resource_id(payload['sku'], payload['seller'], payload['campaign_code']) result = await ResourceModel.get(id=payload['id']) if result: raise HTTPConflict( reason='It was not possible to create a resource {}' ' that already exists'.format(payload['id'])) resource = ResourceModel(**payload) await resource.save() return self.response(201, resource)
async def activate_user(request, activation_code, connection=None): activation_request = await request.app.models.user_activation_request.get_one( activation_code, connection=connection) if activation_request.is_confirmed(): raise HTTPConflict(reason="Account have been activated already") # Change activation request status await activation_request.confirm(connection=connection) # Change user status user = await request.app.models.user.get_one(activation_request.user_id, connection=connection) await user.activate(connection=connection) # Generate and return API key await gen_api_key(user.id, request=request, auth='email') request.user = user # Add log entry await request.app.m.user_action_log_entry.create_user_registration( request, connection=connection) response = dict(status=200, data=dict(uid=user.id)) return json_response(response, headers={'Authorization': request.api_key})
async def post(self): group_id = self.request.match_info['group_id'] json_data = await self.request.json() self._logging_debug(json_data) obj = JenkinsJob() obj.jenkins_group_id = group_id obj.name = json_data['name'] try: obj.jenkins_job_perent_id = int(json_data['jenkins_job_perent_id']) except Exception as e: obj.jenkins_job_perent_id = None obj.gitlab_project_id = int(json_data['gitlab_project_id']) try: first_job = await self.jenkins_job_manager.find_first_by_group_id(group_id) if first_job and obj.jenkins_job_perent_id is None: raise HTTPConflict(reason="only one job can be first") except RecordNotFound: pass job = await self.jenkins_job_manager.create(obj) return web.json_response(job, dumps=partial(json.dumps, cls=CustomJSONEncoder))
async def _handle(self, request, retries=0): task_vars.request.set(request) for var in ( "txn", "tm", "futures", "authenticated_user", "security_policies", "container", "registry", "db", ): # and make sure to reset various task vars... getattr(task_vars, var).set(None) try: return await super()._handle(request) except (ConflictError, TIDConflictError) as e: if app_settings.get("conflict_retry_attempts", 3) > retries: label = "DB Conflict detected" if isinstance(e, TIDConflictError): label = "TID Conflict Error detected" tid = getattr(getattr(request, "_txn", None), "_tid", "not issued") logger.debug( f"{label}, retrying request, tid: {tid}, retries: {retries + 1})", exc_info=True) request._retry_attempt = retries + 1 request.clear_futures() return await self._handle(request, retries + 1) logger.error( "Exhausted retry attempts for conflict error on tid: {}". format( getattr(getattr(request, "_txn", None), "_tid", "not issued"))) return HTTPConflict(body=json.dumps({"summary": str(e)}), content_type="application/json")
async def PUT_DatasetShape(request): """HTTP method to update dataset's shape""" log.request(request) app = request.app params = request.rel_url.query dset_id = request.match_info.get('id') if not isValidUuid(dset_id, obj_class="dataset"): log.error( "Unexpected type_id: {}".format(dset_id)) raise HTTPInternalServerError() body = await request.json() log.info(f"PUT datasetshape: {dset_id}, body: {body}") if "shape" not in body and "extend" not in body: log.error("Expected shape or extend keys") raise HTTPInternalServerError() if "bucket" in params: bucket = params["bucket"] elif "bucket" in body: bucket = params["bucket"] else: bucket = None dset_json = await get_metadata_obj(app, dset_id, bucket=bucket) shape_orig = dset_json["shape"] log.debug(f"shape_orig: {shape_orig}") if "maxdims" not in shape_orig: log.error("expected maxdims in dataset json") raise HTTPInternalServerError() dims = shape_orig["dims"] maxdims = shape_orig["maxdims"] resp_json = { } if "extend" in body: # extend the shape by the give value and return the # newly extended area extension = body["extend"] extend_dim = 0 if "extend_dim" in body: extend_dim = body["extend_dim"] log.info(f"datashape extend: {extension} dim: {extend_dim}") selection = "[" for i in range(len(dims)): if i == extend_dim: lb = dims[i] ub = lb + extension if maxdims[extend_dim] != 0 and ub > maxdims[extend_dim]: msg = "maximum extent exceeded" log.warn(msg) raise HTTPConflict() selection += f"{lb}:{ub}" dims[i] = ub else: if dims[i] == 0: dims[i] = 1 # each dimension must be non-zero selection += ":" if i < len(dims) - 1: selection += "," selection += "]" resp_json["selection"] = selection else: # verify that the extend request is still valid # e.g. another client has already extended the shape since the SN # verified it shape_update = body["shape"] log.debug("shape_update: {}".format(shape_update)) for i in range(len(dims)): if shape_update[i] < dims[i]: msg = "Dataspace can not be made smaller" log.warn(msg) raise HTTPBadRequest(reason=msg) # Update the shape! for i in range(len(dims)): dims[i] = shape_update[i] # write back to S3, save to metadata cache log.info(f"Updated dimensions: {dims}") await save_metadata_obj(app, dset_id, dset_json, bucket=bucket) resp = json_response(resp_json, status=201) log.response(request, resp=resp) return resp
async def PUT_Link(request): """ Handler creating a new link""" log.request(request) app = request.app params = request.rel_url.query group_id = get_obj_id(request) log.info(f"PUT link: {group_id}") if not isValidUuid(group_id, obj_class="group"): log.error(f"Unexpected group_id: {group_id}") raise HTTPInternalServerError() link_title = request.match_info.get('title') validateLinkName(link_title) log.info(f"link_title: {link_title}") if not request.has_body: msg = "PUT Link with no body" log.warn(msg) raise HTTPBadRequest(reason=msg) body = await request.json() if "class" not in body: msg = "PUT Link with no class key body" log.warn(msg) raise HTTPBadRequest(reason=msg) link_class = body["class"] link_json = {} link_json["class"] = link_class if "id" in body: link_json["id"] = body["id"] if "h5path" in body: link_json["h5path"] = body["h5path"] if "h5domain" in body: link_json["h5domain"] = body["h5domain"] if "bucket" in params: bucket = params["bucket"] elif "bucket" in body: bucket = params["bucket"] else: bucket = None group_json = await get_metadata_obj(app, group_id, bucket=bucket) if "links" not in group_json: log.error(f"unexpected group data for id: {group_id}") raise HTTPInternalServerError() links = group_json["links"] if link_title in links: msg = f"Link name {link_title} already found in group: {group_id}" log.warn(msg) raise HTTPConflict() now = time.time() link_json["created"] = now # add the link links[link_title] = link_json # update the group lastModified group_json["lastModified"] = now # write back to S3, save to metadata cache await save_metadata_obj(app, group_id, group_json, bucket=bucket) resp_json = { } resp = json_response(resp_json, status=201) log.response(request, resp=resp) return resp
async def PUT_Attribute(request): """ Handler for PUT /(obj)/<id>/attributes/<name> """ log.request(request) app = request.app params = request.rel_url.query obj_id = get_obj_id(request) attr_name = request.match_info.get('name') log.info("PUT attribute {} in {}".format(attr_name, obj_id)) validateAttributeName(attr_name) if not request.has_body: log.error( "PUT_Attribute with no body") raise HTTPBadRequest(message="body expected") body = await request.json() if "bucket" in params: bucket = params["bucket"] elif "bucket" in body: bucket = params["bucket"] else: bucket = None replace = False if "replace" in params and params["replace"]: replace = True log.info("replace attribute") datatype = None shape = None value = None if "type" not in body: log.error("PUT attribute with no type in body") raise HTTPInternalServerError() datatype = body["type"] if "shape" not in body: log.error("PUT attribute with no shape in body") raise HTTPInternalServerError() shape = body["shape"] if "value" in body: value = body["value"] obj_json = await get_metadata_obj(app, obj_id, bucket=bucket) log.debug(f"PUT attribute obj_id: {obj_id} bucket: {bucket} got json") if "attributes" not in obj_json: log.error(f"unexpected obj data for id: {obj_id}") raise HTTPInternalServerError() attributes = obj_json["attributes"] if attr_name in attributes and not replace: # Attribute already exists, return a 409 log.warn(f"Attempt to overwrite attribute: {attr_name} in obj_id: {obj_id}") raise HTTPConflict() if replace and attr_name not in attributes: # Replace requires attribute exists log.warn(f"Attempt to update missing attribute: {attr_name} in obj_id: {obj_id}") raise HTTPNotFound() if replace: orig_attr = attributes[attr_name] create_time = orig_attr["created"] else: create_time = time.time() # ok - all set, create attribute obj attr_json = {"type": datatype, "shape": shape, "value": value, "created": create_time } attributes[attr_name] = attr_json # write back to S3, save to metadata cache await save_metadata_obj(app, obj_id, obj_json, bucket=bucket) resp_json = { } resp = json_response(resp_json, status=201) log.response(request, resp=resp) return resp
async def PUT_DatasetShape(request): """HTTP method to update dataset's shape""" log.request(request) app = request.app shape_update = None extend = 0 extend_dim = 0 dset_id = request.match_info.get('id') if not dset_id: msg = "Missing dataset id" log.warn(msg) raise HTTPBadRequest(reason=msg) if not isValidUuid(dset_id, "Dataset"): msg = f"Invalid dataset id: {dset_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) username, pswd = getUserPasswordFromRequest(request) await validateUserPassword(app, username, pswd) # validate request if not request.has_body: msg = "PUT shape with no body" log.warn(msg) raise HTTPBadRequest(reason=msg) data = await request.json() if "shape" not in data and "extend" not in data: msg = "PUT shape has no shape or extend key in body" log.warn(msg) raise HTTPBadRequest(reason=msg) if "shape" in data: shape_update = data["shape"] if isinstance(shape_update, int): # convert to a list shape_update = [ shape_update, ] log.debug(f"shape_update: {shape_update}") if "extend" in data: try: extend = int(data["extend"]) except ValueError: msg = "extend value must be integer" log.warn(msg) raise HTTPBadRequest(reason=msg) if extend <= 0: msg = "extend value must be positive" log.warn(msg) raise HTTPBadRequest(reason=msg) if "extend_dim" in data: try: extend_dim = int(data["extend_dim"]) except ValueError: msg = "extend_dim value must be integer" log.warn(msg) raise HTTPBadRequest(reason=msg) if extend_dim < 0: msg = "extend_dim value must be non-negative" log.warn(msg) raise HTTPBadRequest(reason=msg) domain = getDomainFromRequest(request) if not isValidDomain(domain): msg = f"Invalid domain: {domain}" log.warn(msg) raise HTTPBadRequest(reason=msg) bucket = getBucketForDomain(domain) # verify the user has permission to update shape await validateAction(app, domain, dset_id, username, "update") # get authoritative state for dataset from DN (even if it's in the meta_cache). dset_json = await getObjectJson(app, dset_id, refresh=True, bucket=bucket) shape_orig = dset_json["shape"] log.debug(f"shape_orig: {shape_orig}") # verify that the extend request is valid if shape_orig["class"] != "H5S_SIMPLE": msg = "Unable to extend shape of datasets who are not H5S_SIMPLE" log.warn(msg) raise HTTPBadRequest(reason=msg) if "maxdims" not in shape_orig: msg = "Dataset is not extensible" log.warn(msg) raise HTTPBadRequest(reason=msg) dims = shape_orig["dims"] rank = len(dims) maxdims = shape_orig["maxdims"] if shape_update and len(shape_update) != rank: msg = "Extent of update shape request does not match dataset sahpe" log.warn(msg) raise HTTPBadRequest(reason=msg) for i in range(rank): if shape_update and shape_update[i] < dims[i]: msg = "Dataspace can not be made smaller" log.warn(msg) raise HTTPBadRequest(reason=msg) if shape_update and maxdims[i] != 0 and shape_update[i] > maxdims[i]: msg = "Database can not be extended past max extent" log.warn(msg) raise HTTPConflict() if extend_dim < 0 or extend_dim >= rank: msg = "Extension dimension must be less than rank and non-negative" log.warn(msg) raise HTTPBadRequest(reason=msg) # send request onto DN req = getDataNodeUrl(app, dset_id) + "/datasets/" + dset_id + "/shape" json_resp = {"hrefs": []} params = {} if bucket: params["bucket"] = bucket if extend: data = {"extend": extend, "extend_dim": extend_dim} else: data = {"shape": shape_update} try: put_rsp = await http_put(app, req, data=data, params=params) log.info(f"got shape put rsp: {put_rsp}") if "selection" in put_rsp: json_resp["selection"] = put_rsp["selection"] except HTTPConflict: log.warn("got 409 extending dataspace") raise resp = await jsonResponse(request, json_resp, status=201) log.response(request, resp=resp) return resp
async def DELETE_Domain(request): """HTTP method to delete a domain resource""" log.request(request) app = request.app params = request.rel_url.query domain = None meta_only = False # if True, just delete the meta cache value keep_root = False if request.has_body: body = await request.json() if "domain" in body: domain = body["domain"] else: msg = "No domain in request body" log.warn(msg) raise HTTPBadRequest(reason=msg) if "meta_only" in body: meta_only = body["meta_only"] if "keep_root" in body: keep_root = body["keep_root"] else: # get domain from request uri try: domain = getDomainFromRequest(request) except ValueError: msg = "Invalid domain" log.warn(msg) raise HTTPBadRequest(reason=msg) if "keep_root" in params: keep_root = params["keep_root"] log.info("meta_only domain delete: {}".format(meta_only)) if meta_only: # remove from domain cache if present domain_cache = app["domain_cache"] if domain in domain_cache: log.info("deleting {} from domain_cache".format(domain)) del domain_cache[domain] resp = await jsonResponse(request, {}) return resp username, pswd = getUserPasswordFromRequest(request) await validateUserPassword(app, username, pswd) parent_domain = getParentDomain(domain) if (not parent_domain or parent_domain == '/') and username != "admin": msg = "Deletion of top-level domains is only supported by admin users" log.warn(msg) raise HTTPForbidden() try: domain_json = await getDomainJson(app, domain, reload=True) except ClientResponseError as ce: if ce.code == 404: log.warn("domain not found") raise HTTPNotFound() elif ce.code == 410: log.warn("domain has been removed") raise HTTPGone() else: log.error(f"unexpected error: {ce.code}") raise HTTPInternalServerError() aclCheck(domain_json, "delete", username) # throws exception if not allowed # check for sub-objects if this is a folder if "root" not in domain_json: s3prefix = domain[1:] + '/' log.info(f"checking kets with prefix: {s3prefix} ") s3keys = await getS3Keys(app, include_stats=False, prefix=s3prefix, deliminator='/') for s3key in s3keys: if s3key.endswith("/"): log.warn(f"attempt to delete folder {domain} with sub-items") log.debug(f"got prefix: {s3keys[0]}") raise HTTPConflict(reason="folder has sub-items") req = getDataNodeUrl(app, domain) req += "/domains" body = {"domain": domain} rsp_json = await http_delete(app, req, data=body) if "root" in domain_json and not keep_root: # delete the root group root_id = domain_json["root"] req = getDataNodeUrl(app, root_id) req += "/groups/" + root_id await http_delete(app, req) # remove from domain cache if present domain_cache = app["domain_cache"] if domain in domain_cache: del domain_cache[domain] # delete domain cache from other sn_urls sn_urls = app["sn_urls"] body["meta_only"] = True for node_no in sn_urls: if node_no == app["node_number"]: continue # don't send to ourselves sn_url = sn_urls[node_no] req = sn_url + "/" log.info("sending sn request: {}".format(req)) try: sn_rsp = await http_delete(app, req, data=body) log.info("{} response: {}".format(req, sn_rsp)) except ClientResponseError as ce: log.warn("got error for sn_delete: {}".format(ce)) resp = await jsonResponse(request, rsp_json) log.response(request, resp=resp) return resp
async def DELETE_Domain(request): """HTTP method to delete a domain resource""" log.request(request) app = request.app params = request.rel_url.query meta_only = False # if True, just delete the meta cache value keep_root = False if request.has_body: body = await request.json() if "meta_only" in body: meta_only = body["meta_only"] if "keep_root" in body: keep_root = body["keep_root"] else: if "meta_only" in params: meta_only = params["meta_only"] if "keep_root" in params: keep_root = params["keep_root"] domain = None try: domain = getDomainFromRequest(request) except ValueError: log.warn(f"Invalid domain: {domain}") raise HTTPBadRequest(reason="Invalid domain name") bucket = getBucketForDomain(domain) log.debug(f"GET_Domain domain: {domain}") if not domain: msg = "No domain given" log.warn(msg) raise HTTPBadRequest(reason=msg) log.info(f"meta_only domain delete: {meta_only}") if meta_only: # remove from domain cache if present domain_cache = app["domain_cache"] if domain in domain_cache: log.info(f"deleting {domain} from domain_cache") del domain_cache[domain] resp = await jsonResponse(request, {}) return resp username, pswd = getUserPasswordFromRequest(request) await validateUserPassword(app, username, pswd) parent_domain = getParentDomain(domain) if not parent_domain or getPathForDomain(parent_domain) == '/': is_toplevel = True else: is_toplevel = False if is_toplevel and username != "admin": msg = "Deletion of top-level domains is only supported by admin users" log.warn(msg) raise HTTPForbidden() try: domain_json = await getDomainJson(app, domain, reload=True) except ClientResponseError as ce: if ce.code == 404: log.warn("domain not found") raise HTTPNotFound() elif ce.code == 410: log.warn("domain has been removed") raise HTTPGone() else: log.error(f"unexpected error: {ce.code}") raise HTTPInternalServerError() aclCheck(domain_json, "delete", username) # throws exception if not allowed # check for sub-objects if this is a folder if "root" not in domain_json: index = domain.find('/') s3prefix = domain[(index + 1):] + '/' log.info(f"checking s3key with prefix: {s3prefix} in bucket: {bucket}") s3keys = await getS3Keys(app, include_stats=False, prefix=s3prefix, deliminator='/', bucket=bucket) for s3key in s3keys: if s3key.endswith("/"): log.warn(f"attempt to delete folder {domain} with sub-items") log.debug(f"got prefix: {s3keys[0]}") raise HTTPConflict(reason="folder has sub-items") req = getDataNodeUrl(app, domain) req += "/domains" params = {} # for http_delete requests to DN nodes params["domain"] = domain rsp_json = await http_delete(app, req, params=params) if "root" in domain_json and not keep_root: # delete the root group root_id = domain_json["root"] req = getDataNodeUrl(app, root_id) req += "/groups/" + root_id await http_delete(app, req, params=params) # remove from domain cache if present domain_cache = app["domain_cache"] if domain in domain_cache: del domain_cache[domain] # delete domain cache from other sn_urls sn_urls = app["sn_urls"] params = {} params["domain"] = getPathForDomain(domain) params["bucket"] = getBucketForDomain(domain) params[ "meta_only"] = 1 # can't pass booleans as params, so use 1 instead of True for node_no in sn_urls: if node_no == app["node_number"]: continue # don't send to ourselves sn_url = sn_urls[node_no] req = sn_url + "/" log.info(f"sending sn request: {req}") try: sn_rsp = await http_delete(app, req, params=params) log.info(f"{req} response: {sn_rsp}") except ClientResponseError as ce: log.warn(f"got error for sn_delete: {ce}") resp = await jsonResponse(request, rsp_json) log.response(request, resp=resp) return resp