def generic_request(method, url, data=None, auth_token=None, nova_cacert=False): logging.debug("url:%s, X-Auth-Token:%s, data:%s" % (url, auth_token, data)) headers = {} headers["Content-type"] = "application/json" if auth_token: headers["X-Auth-Token"] = auth_token resp = method(url, headers=headers, data=data, verify=nova_cacert) logging.debug("Api request return code:%d" % resp.status_code) if resp.status_code in [401]: resp_body = resp.json() raise exceptions.Forbidden(resp_body['error']['message']) if resp.status_code in [403]: resp_body = resp.json() raise exceptions.Unauthorized(resp_body['error']['message']) elif resp.status_code not in [200, 201, 203, 204]: raise exceptions.RequestFailed(resp.text) return resp
async def delete(self, request): if request.username: database_name = hashlib.sha256( request.username.encode("utf-8")).hexdigest() else: raise exceptions.Unauthorized("A valid token is needed") data = await request.post() hash = data["hash"] file_path = storage.get_file(hash) dotfile_path = storage.get_file("." + hash) if not os.path.exists(file_path) or not os.path.exists(dotfile_path): raise exceptions.NotFound("file <{}> does not exist".format(hash)) with open(dotfile_path) as dotfile: dotfile_content = json.load(dotfile) user_dotfile_content = dotfile_content[database_name] spaces = user_dotfile_content["spaces"] storage.atto.Database(database_name).remove_data((hash, spaces)) if len(dotfile_content) == 1: os.remove(file_path) os.remove(dotfile_path) else: del dotfile_content[database_name] with open(dotfile_path, "w") as dotfile: json.dump(dotfile_content, dotfile) return web.json_response({"deleted": True})
async def download(self, request): if request.username: database_name = hashlib.sha256( request.username.encode("utf-8")).hexdigest() else: raise exceptions.Unauthorized("A valid token is needed") data = await request.post() hash = data["hash"] file_path = storage.get_file(hash) dotfile_path = storage.get_file("." + hash) if not os.path.exists(file_path) or not os.path.exists(dotfile_path): raise exceptions.NotFound("file <{}> does not exist".format(hash)) with open(dotfile_path) as dotfile: dotfile_content = json.load(dotfile)[database_name] name = dotfile_content["name"] response = web.StreamResponse() response.headers["Content-Type"] = "application/octet-stream" response.headers[ "Content-Disposition"] = "attachment; filename*=UTF-8''{}".format( urllib.parse.quote(name, safe="") # replace with the filename ) response.enable_chunked_encoding() await response.prepare(request) with open(file_path, "rb") as fd: # replace with the path for chunk in iter(lambda: fd.read(1024), b""): await response.write(chunk) await response.write_eof() return response
def register(self, username, password): cursor = self.connection.cursor() hash = self.password_hasher.hash(password) try: cursor.execute("INSERT INTO users VALUES (?, ?)", (username, hash)) except sqlite3.IntegrityError: raise exceptions.Unauthorized("Username taken") self.connection.commit()
def login(self, username, password): cursor = self.connection.cursor() cursor.execute("SELECT hash FROM users WHERE username=?", (username, )) result = cursor.fetchone() if not result: raise exceptions.Unauthorized("not registered") hash = result[0] try: self.password_hasher.verify(hash, password) if self.password_hasher.check_needs_rehash(hash): hash = self.password_hasher.hash(password) sql_hash_updater = """ UPDATE users SET hash = ? WHERE username = ?""" cursor.execute(sql_hash_updater, (hash, username)) self.connection.commit() except argon2.exceptions.VerifyMismatchError: raise exceptions.Unauthorized("Wrong password")
async def search(self, request): if request.username: database_name = hashlib.sha256( request.username.encode("utf-8")).hexdigest() else: raise exceptions.Unauthorized("A valid token is needed") data = await request.post() if "spaces" not in data: raise exceptions.UserError("you must specify spaces") results = storage.atto.Database(database_name).inter( data["spaces"].split()) output = {} for result in results: with open(storage.get_file("." + result[0])) as json_file: dotfile_content = json.load(json_file)[database_name] dotfile_content["spaces"] = list(result[1]) output[result[0]] = dotfile_content return web.json_response({"results": output})
async def logout(self, request): if request.username: self.EXPIRED_TOKENS.add(request.headers.get("authorization", None)) return web.json_response({"disconnected": True}) else: raise exceptions.Unauthorized("No token to blacklist")
async def upload(self, request): """ EXAMPLE (curl) curl -H "Authorization:auth" -F "name=Awesome Background" -F "type=image" -F "desc=A cool image" -F "hash=617Y7DY73y2" -F "chunk=0" -F "spaces=['A','B']" -F "file=@./background.jpg" -X POST localhost:8080/upload """ if request.username: database_name = hashlib.sha256( request.username.encode("utf-8")).hexdigest() else: raise exceptions.Unauthorized("A valid token is needed") reader = await request.multipart() # infos field = await reader.next() assert field.name == "name" name = (await field.read()).decode("utf-8") field = await reader.next() assert field.name == "type" content_type = (await field.read()).decode("utf-8") field = await reader.next() assert field.name == "desc" description = (await field.read()).decode("utf-8") # sha256 field = await reader.next() assert field.name == "hash" hash = (await field.read()).decode("utf-8") # chunk index field = await reader.next() assert field.name == "chunk" # spaces field = await reader.next() assert field.name == "spaces" spaces = (await field.read()).decode("utf-8").split() # file field = await reader.next() # create files folder if not created Path(storage.get_folder()).mkdir(parents=True, exist_ok=True) # cannot rely on Content-Length because of chunked transfer size = 0 with open(storage.get_file(hash), "wb") as f: while True: chunk = await field.read_chunk() # 8192 bytes by default. if not chunk: break size += len(chunk) f.write(chunk) # save file infos dotfile_path = storage.get_file("." + hash) if os.path.exists(dotfile_path): with open(dotfile_path, "r") as dotfile: data = json.load(dotfile) else: data = {} with open(dotfile_path, "w") as dotfile: data[database_name] = { "name": name, "type": content_type, "desc": description, } json.dump(data, dotfile) storage.atto.Database(database_name).add_data((hash, spaces)) return web.json_response({"stored": True, "size": size})