def upsert_relationship(): logging.info('Request Received: Upsert Relationship') g = setup_graph() try: properties = app.current_request.json_body # TODO - Validate the JSON logging.info('Upserting Relationship to Graph') # Pull out the details from_person_id = properties.get('from') to_person_id = properties.get('to') weight = float(properties.get('weight', '0.5')) # This shouldn't be necessary, but is because of the open question about ids from_person = get_person(from_person_id, g) if not from_person: raise NotFoundError('id "%s" not found' % from_person_id) to_person = get_person(to_person_id, g) if not to_person: raise NotFoundError('id "%s" not found' % to_person_id) # There might be a better way of checking whether to addE or not # I saw reference to tryNext().orElseGet({addE}) but I need to get it to work in Python if g.V(from_person).outE('knows').filter( __.inV().is_(to_person)).toList(): logging.info('Updating relationship') g.V(from_person).outE('knows').filter( __.inV().is_(to_person)).property('weight', weight).next() else: logging.info('Creating relationship') g.V(from_person).addE('knows').to(to_person).property( 'weight', weight).next() except (ValueError, AttributeError, TypeError) as e: logging.error(e, exc_info=True) raise BadRequestError('Could not upsert relationship. Error: ' + str(e)) logging.info("Successfully upserted relationship")
def balanceTotalSupply(): request = app.current_request data = request.json_body token = Database.find_one("Token", {"address": data["token"]}) if not token: raise NotFoundError('token not found with address {}'.format( data["token"])) print(data) Database.update("Token", {"id": token["id"]}, {"totalSupply": data["newTotalSupply"]}) user = Database.find_one("User", {'address': data["owner"]}) if not user: raise NotFoundError('user not found with address {}'.format( data["owner"])) userBalance = Database.find_one("TokenBalance", { 'userId': user['id'], "tokenId": token["id"] }, insert=True) if 'balance' not in userBalance: userBalance['balance'] = '0' if not userBalance['balance']: userBalance['balance'] = '0' newBalance = int(float(userBalance['balance'])) + int( float(data["newTotalSupply"])) - int(float(data["oldTotalSupply"])) userBalance = Database.update("TokenBalance", {"id": userBalance["id"]}, {"balance": newBalance}, return_updated=True)[0] return toObject(userBalance)
def get_ssh_key(uuid): baisc = get_authorized_username(app.current_request) splited = baisc.split('@', 1) if len(splited) != 2: raise BadRequestError("Invalid authorization") user, secret = splited[0], splited[1] item = get_task_db().get_item(uuid, user=user) if not item: raise NotFoundError("Task doesn't exist") if len(secret) < 8: raise BadRequestError("Invalid secret_key|access_token") access = user.rsplit('-', 1)[1] session = boto3.Session(aws_access_key_id=access, aws_secret_access_key=secret) client = session.client('sts') try: client.get_caller_identity() except: raise ForbiddenError("Invalid secret_key") extend = item.get('extend', {}) key = extend.get('key') if not key: raise NotFoundError("SSH key doesn't exist") s3_client = get_s3_client() params = {'Bucket': os.environ['OCTOUP_BUCKET'], 'Key': key} url = s3.generate_presigned_url(s3_client, 'get_object', params, 3600) return Response(body='', headers={'Location': url}, status_code=301)
def presigned_url(project, step): if app.current_request.query_params is None: raise NotFoundError("No parameter has been sent") mail = app.current_request.query_params.get('mail') if len(mail) == 0: raise NotFoundError("mail is empty") print("query_param mail: " + mail) if project is None or len(project) == 0: raise NotFoundError("project is empty") print("query_param project: " + project) step_number = 0 if step is not None or len(step) > 0: try: step_number = int(step) except ValueError: print("query_param v is not a number: " + step) step_number = 0 print("query_param step: " + step) h = blake2b(digest_size=10) byte_mail = bytes(mail, 'utf-8') h.update(byte_mail) hexmail = h.hexdigest() print("hex mail: " + hexmail) new_user_video = project + "/" + str(step_number) + "/" + hexmail + '.webm' if check_if_file_exists(new_user_video): return Response(body="The resource you requested does already exist", status_code=403, headers={'Content-Type': 'text/plain'}) try: get_dynamodb_table().put_item( Item={ "ProjectStep": project + "-" + str(step_number), "Mail": mail, "video": new_user_video }) except Exception as e: print(e) raise NotFoundError("Error adding an element on dynamodb") try: response = get_s3_client().generate_presigned_post( Bucket=MEDIA_BUCKET_NAME, Key=new_user_video, Fields={"acl": "public-read"}, Conditions=[{ 'acl': 'public-read' }], ExpiresIn=3600) except ClientError as e: logging.error(e) raise BadRequestError("Internal Error generating presigned post ") return response
def manage_record(hostzone, name): request = app.current_request if request.method == 'GET': record_sets = _get_dns_record(hostzone=hostzone, name=name) if len(record_sets): record_set_detail = [{ 'value': i['ResourceRecords'][0]['Value'], 'type': i['Type'], 'ttl': i['TTL'] } for i in record_sets] result_json = {'status': 'exist', 'detail': record_set_detail} return _json_dumps(result_json) else: raise NotFoundError("{} is not registered in {}".format( name, hostzone)) elif request.method == 'PUT': values = [request.context['identity']['sourceIp']] return _json_dumps( _change_dns_record(hostzone=hostzone, name=name, values=values, action='UPSERT')) elif request.method == 'POST': values = request.json_body['records'] record_type = request.json_body['type'] ttl = request.json_body['ttl'] return _json_dumps( _change_dns_record(hostzone=hostzone, name=name, values=values, action='UPSERT', record_type=record_type, ttl=ttl)) elif request.method == 'DELETE': record_sets = _get_dns_record(hostzone=hostzone, name=name) if len(record_sets) == 1: record_type = record_sets[0]['Type'] ttl = record_sets[0]['TTL'] values = [i["Value"] for i in record_sets[0]['ResourceRecords']] elif len(record_sets) > 1: raise BadRequestError("too many records exist in {}".format(name)) else: raise NotFoundError("{} is not registered in {}".format( name, hostzone)) return _json_dumps( _change_dns_record(hostzone=hostzone, name=name, values=values, action='DELETE', record_type=record_type, ttl=ttl))
def videos(): global users_video_dictionary app.log.debug("GET Call app.route/register") mail = app.current_request.query_params.get('mail') if len(mail) == 0: raise NotFoundError("mail is empty " + mail) if mail in users_video_dictionary: return {mail: json.dumps(users_video_dictionary[mail])} raise NotFoundError("mail: " + mail + " not found")
def balanceTransfer(): request = app.current_request data = request.json_body print("\n\nbalanceTransfer\n\n") print(data) print("\n\nbalanceTransfer\n\n") token = Database.find_one("Token", {"address": data["token"]}) fromUser = Database.find_one("User", {'address': data["from"]}) if not fromUser: raise NotFoundError('user not found with address {}'.format( data["from"])) toUser = Database.find_one("User", {'address': data["to"]}) if not toUser: raise NotFoundError('user not found with address {}'.format( data["to"])) value = data['value'] fromBalance = Database.find_one("TokenBalance", { 'userId': fromUser['id'], "tokenId": token["id"] }, for_update=True) # If there is no from balance this transfer cannot be valid if not fromBalance: raise NotFoundError('token balance not found for user {}'.format( fromUser['id'])) # Check does the user have enough balance print("fromBalanceYo: ", fromBalance) if ('balance' not in fromBalance or not fromBalance['balance'] or int(fromBalance['balance']) < int(value)): raise NotFoundError('token balance not enough for user {}'.format( fromUser['id'])) newFromBalance = int(float(fromBalance['balance'])) - int(float(value)) fromBalance = Database.update("TokenBalance", {"id": fromBalance["id"]}, {"balance": newFromBalance}, return_updated=True)[0] toBalance = Database.find_one("TokenBalance", { 'userId': toUser['id'], "tokenId": token["id"] }, insert=True, for_update=True) if 'balance' not in toBalance: toBalance['balance'] = '0' if not toBalance['balance']: toBalance['balance'] = '0' newToBalance = int(float(toBalance['balance'])) + int(float(value)) toBalance = Database.update("TokenBalance", {"id": toBalance["id"]}, {"balance": newToBalance}, return_updated=True)[0] return {"message": "Funds transferred"}
def parse_email(event): obj = s3_client.get_object(Bucket='email.personalstats.nl', Key=event.key) email_file = obj['Body'].read().decode('utf-8') email_content = email.message_from_string(email_file) email_code = email_content['to'].split('-', 1)[0] sub = email_content['to'][len(email_code) + 1:].split('@')[0] cognito_user = cognito_client.list_users( UserPoolId=config('COGNITO_USER_POOL_ID'), Filter="sub = \"{}\"".format(sub)).get('Users') if len(cognito_user) != 1: s3_client.delete_object(Bucket='email.personalstats.nl', Key=event.key) raise NotFoundError( 'User {sub}, not found, deleting message'.format(sub)) attributes = cognito_user[0].get("Attributes", []) user_email_code = list( filter(lambda attr: attr.get("Name") == "custom:email_code", attributes)) if not email_code == user_email_code[0].get("Value"): s3_client.delete_object(Bucket='email.personalstats.nl', Key=event.key) raise NotFoundError('User not found, deleting message') # Get session id session_id = list( filter(lambda attr: attr.get("Name") == "custom:session_id", attributes)) if len(session_id) != 1: s3_client.delete_object(Bucket='email.personalstats.nl', Key=event.key) raise NotFoundError('User not found, deleting message') # Get the post data: expecting: {"name": "node content here"} # data = app.current_request.json_body # Setup workflowy wf = Workflowy(sessionid=session_id[0].get("Value")) node = wf.root.create() try: node.edit(email_content['subject']) return {"status": 'ok'} except WFLoginError: # TODO: email the user once the let them know we have trouble with the # connection to workflowy. (and then suffix the expended session ID) s3_client.delete_object(Bucket='email.personalstats.nl', Key=event.key) raise BadRequestError( "Cannot authorize with Workflowy, deleting message") # print("Object uploaded for bucket: %s, key: %s" % (event.bucket, event.key)) s3_client.delete_object(Bucket='email.personalstats.nl', Key=event.key)
def trades_show(tradeId): request = app.current_request data = request.json_body trade = Database.find_one("Trade", {"id": int(tradeId)}) if not trade: raise NotFoundError("trade not found with id {}".format(tradeId)) tradeBroker = Database.find_one("TradeBroker", { "tradeId": trade["id"], "brokerId": request.user["id"] }) if not tradeBroker: raise NotFoundError("tradeBroker not found with trade id {}".format(tradeId)) Database.update("TradeBroker", {"id": tradeBroker["id"]}, {"price": data["price"]}) # Socket, should be pushing to a message queue of some kind r = passWithoutError(requests.post)(socket_uri + "trade-update", data=toObject(trade)) return toObject(trade)
def find(self, postcode, limit=5): """ Returns a list of n nearest locations from the locations table. Keyword arguments: postcode - A US or Canadian postal code. limit - The maximum amount of locations to be returned (default 5) """ coordinates = Postcode.find(postcode) db.execute(""" SELECT l.*, ROUND(SQRT(POWER(69.1 * (%f - l.latitude), 2) + POWER(69.1 * (l.longitude - %f) * COS(41.929599 / 57.3), 2)), 2) AS distance FROM postcodes p INNER JOIN locations l ON(l.postcode = p.postcode) ORDER BY distance LIMIT %i; """ % (coordinates['latitude'], coordinates['longitude'], limit)) locations = db.fetchall() if not locations: raise NotFoundError("No locations found") return locations
def user_put(userId): request = app.current_request data = request.json_body if 'address' in data: # Find if another user # requestingUser = request.user data['address'] = Web3Helper.toChecksumAddress(data['address']) # user = Database.find_one("User", {'address': data['address']}) # if user and user['id'] != requestingUser['id']: # raise ForbiddenError('user already exists with address {}'.format(data['address'])) user = Database.find_one("User", {'id': int(userId)}) if not user: raise NotFoundError('user not found with id {}'.format(userId)) user = Database.update('User', {'id': user['id']}, data, return_updated=True)[0] if 'address' in data: # Set user athorized as investor tx = Web3Helper.transact(permissions_contract, 'setAuthorized', data['address'], 1) return toObject( user, ['id', 'name', 'address', 'role', 'ik', 'spk', 'signature'])
def feeTaken(): request = app.current_request data = request.json_body print(data) token = Database.find_one("Token", {"address": data["token"]}) ownerUser = Database.find_one("User", {'address': data["owner"]}) if not ownerUser: raise NotFoundError('user not found with address {}'.format( data["owner"])) value = data['value'] ownerBalance = Database.find_one("TokenBalance", { 'userId': ownerUser['id'], "tokenId": token["id"] }, insert=True) if 'balance' not in ownerBalance: ownerBalance['balance'] = '0' if not ownerBalance['balance']: ownerBalance['balance'] = '0' newOwnerBalance = int(float(ownerBalance['balance'])) + int( float(value)) ownerBalance = Database.update("TokenBalance", {"id": ownerBalance["id"]}, {"balance": newOwnerBalance}, return_updated=True)[0] return {"message": "Fee taken"}
def update_data(self, event): try: if event is None: return update_expression = '' expression_attribute_values = {} if "GEO" in event: update_expression = 'SET geo = :val' expression_attribute_values = {':val': event["GEO"]} if "KA" in event: update_expression = 'SET ka = :val' expression_attribute_values = {':val': event["KA"]} response = self.table.update_item( Key={ "timeStamp": event["timeStamp"], "DevEUI": event["DevEUI"] }, UpdateExpression=update_expression, ExpressionAttributeValues=expression_attribute_values, ReturnValues="UPDATED_NEW") self.log.debug("print: Data persisted") return response except Exception as e: print(e) raise NotFoundError("Error updating an element on dynamodb")
def auth_login(): request = app.current_request data = request.json_body email = data['email'] password = data['password'] user = Database.find_one("User", {'email': email}) if not user: raise NotFoundError('user not found with email {}'.format(email)) if not pbkdf2_sha256.verify(password, user['password']): raise ForbiddenError('Wrong password') token = jwt.encode(toObject(user, [ 'id', 'name', 'email', 'address', 'role', 'ik', 'spk', 'signature', 'truelayerAccountId' ]), secret, algorithm='HS256') return { 'user': toObject(user, [ 'id', 'name', 'email', 'address', 'role', 'ik', 'spk', 'signature', 'truelayerAccountId' ]), 'token': token.decode("utf-8") }
def decrypt(id): dynamo_client = get_dynamo_client() kms_client = get_kms_client() response = dynamo_client.get_item(Key={"id": { "S": id }}, TableName=environ.get("DYNAMO_TABLE")) if "Item" in response: item = response["Item"] data_key = kms_client.decrypt(CiphertextBlob=item["key"]["B"]) plaintext_key = b64encode(data_key["Plaintext"]) f = Fernet(plaintext_key) body = f.decrypt(item["body"]["B"]) dynamo_client.delete_item(Key={"id": { "S": id }}, TableName=environ.get("DYNAMO_TABLE")) return {"body": body.decode()} else: raise NotFoundError("Item is ID %s not found" % (id))
def persist_data(self, event): try: self.table.put_item(Item=event) except Exception as e: print(e) raise NotFoundError("Error adding an element on dynamodb") self.log.debug("print: Data persisted")
def get_coffee(c_id): check_headers() req = app.current_request uid = req.headers['X-Api-Key'] body = req.json_body or {} try: c = pc.load_coffee(coffee_id=c_id, uid=uid) except: # TODO fix library to raise better exceptions raise NotFoundError("No coffee found for that ID") # Chalice doesn't support DELETE yet. Or PATCH. Odd. # if req.method == 'DELETE': # if c.delete_coffee(): # return {'data': 'ok'} # else: # raise ChaliceViewError("Unable to delete") if req.method == 'PUT': if 'field' in body and body['field'] == "state": # temporary workaround if 'to' in body and body['to'] == "deleted": if c.delete_coffee(): return {'data': 'ok'} else: raise ChaliceViewError("Unable to Delete") if 'from' in body: c.update_state(oldstate=body['from'], newstate=body['to']) else: c.update_state(newstate=body['to']) return format_state(c)
def config_thread(c_key, c_value, json_files, file_uuid, es_json): app.log.info("config_thread %s", str(c_key)) for j in range(len(json_files)): # if the config (file name) is in the given json file if c_key in json_files[j]: try: # file_url = urlopen( # str(in_host + '/file/' + json_files[j][c_key])).read() file_url = get_file(json_files[j][c_key]) file = json.loads(file_url) except Exception as e: app.log.info(e) raise NotFoundError("File '%s' does not exist" % file_uuid) # for every item under this file name in config for c_item in c_value: # look for config item in file to_append = look_file(c_item, file, c_key) # if config item is in the file if to_append is not None: if isinstance(to_append, list): # makes lists of lists into a single list to_append = flatten(to_append) for item in to_append: # add file item to list of items to append to ES es_json.append(item) else: # add file item to list of items to append to ES es_json.append(to_append) app.log.info("config_thread es_json %s", str(es_json))
def get_organization(reference): organization = organizations_service.find_organization(reference) if not organization: raise NotFoundError() return organization.as_json()
def delete(user): try: delete_user(user) except ValueError: raise NotFoundError('User not found') else: return Response(body='', status_code=204)
def presigned_url(): mail = app.current_request.query_params.get('mail') print("query_param mail: " + mail) if len(mail) == 0: raise NotFoundError("mail is empty " + mail) h = blake2b(digest_size=10) byte_mail = bytes(mail, 'utf-8') h.update(byte_mail) hexmail = h.hexdigest() print("hex mail: " + hexmail) str_count = "" if mail in users_video_dictionary: str_count = str(len(users_video_dictionary[mail])) new_user_video = hexmail + str_count + '.mp4' users_video_dictionary[mail].append(new_user_video) s3_client = boto3.client('s3') try: response = s3_client.generate_presigned_post( Bucket="videos.oico.com", Key=new_user_video, Fields={"acl": "public-read"}, Conditions=[{ 'acl': 'public-read' }], ExpiresIn=3600) except ClientError as e: logging.error(e) raise BadRequestError("Internal Error generating presigned post ") return response
def delete(user_id): try: delete_user(user_id) except ValueError: raise NotFoundError("User not found") else: return Response(body="", status_code=204)
def handle_transcription_is_created(event): print("handle_audio_created: " + event.key) if _is_text(event.key): print("Correct JSON generated: " + event.key) s3_clientobj = get_s3_client().get_object(Bucket=event.bucket, Key=event.key) s3_clientdata = s3_clientobj['Body'].read().decode('utf-8') print("printing s3_clientdata") print(s3_clientdata) s3clientlist = json.loads(s3_clientdata) print("json loaded data") print("status: " + s3clientlist['status']) transcript = s3clientlist['results']['transcripts'][0]['transcript'] print("transcript: " + transcript) response = get_comprehend_client().detect_sentiment(Text=transcript, LanguageCode='en') print(json.dumps(response)) try: get_dynamodb_metadata_table().put_item( Item={ "JsonFile": event.key, "transcript": transcript, "Sentiment": response["Sentiment"] }) except Exception as e: print(e) raise NotFoundError("Error adding an element on dynamodb")
def __init__(self, id): """ Get the workflow execution by id from dyanamo and assign to this object :param id: The id of the workflow execution """ print("Workflow execution init workflow_execution = {}".format(id)) table = DYNAMO_RESOURCE.Table(WORKFLOW_EXECUTION_TABLE_NAME) workflow_execution = None response = table.get_item( Key={ 'Id': id }, ConsistentRead=True) if "Item" in response: workflow_execution = response["Item"] else: raise NotFoundError( "Exception: workflow execution '%s' not found" % id) self.id = workflow_execution["Id"] self.asset_id = workflow_execution["AssetId"] self.configuration = workflow_execution["Configuration"] self.current_stage = workflow_execution["CurrentStage"] self.status = workflow_execution["Status"] self.trigger = workflow_execution["Trigger"] self.workflow = workflow_execution["workflow"]
def process_person(person_id): log_string = 'Update' if app.current_request.method == 'PUT' else 'Get' logging.info('Request Received: %s Person' % log_string) g = setup_graph() try: person = get_person(person_id=person_id, g=g) if not person: raise NotFoundError('id "%s" not found' % person_id) if app.current_request.method == 'GET': return vertex_to_json(vertex=person, g=g) else: properties = app.current_request.json_body # TODO - Validate the JSON logging.info('Updating Person on Graph') # Remove the existing properties g.V(person).properties().drop().iterate() # Ideally I would roll this into a single call logging.info("Received Properties: " + str(properties)) for prop_name, prop_value in properties.items(): g.V(person).property(prop_name, prop_value).next() except (ValueError, AttributeError, TypeError) as e: logging.error(e, exc_info=True) raise BadRequestError('Could not %s person. Error: ' % log_string + str(e)) logging.info("Successfully inserted person") return {"id": person_id}
def list_data_bundles(**kwargs): """ Page through the data bundles index and return data bundles, respecting an alias or checksum request if it is made. :rtype: ListDataBundlesResponse """ if not es.indices.exists(index=INDEXES['data_bdl']): raise NotFoundError("Data bundle index does not exist") req_body = app.current_request.query_params or {} page_token = req_body.get('page_token', 0) per_page = int(req_body.get('page_size', 10)) if req_body.get('alias', None): results = azul_match_alias(index=INDEXES['data_bdl'], alias=req_body['alias'], size=per_page + 1, from_=page_token if page_token != 0 else None) else: results = es.search(body={'query': {}}, index=INDEXES['data_bdl'], size=per_page + 1)['hits']['hits'] response = model('ListDataBundlesResponse') response.data_bundles = [azul_to_bdl(x) for x in results[:per_page]] if len(results) > per_page: response.next_page_token = str(int(page_token) + 1) return response.marshal()
def modify_topic(c_id, t_id): check_headers() req = app.current_request uid = req.headers['X-Api-Key'] body = req.json_body or {} if 'field' not in body: raise BadRequestError("'field' Required") try: c = pc.load_coffee(coffee_id=c_id, uid=uid) except: # TODO fix library to raise better exceptions raise NotFoundError("No coffee found for that ID") if body['field'] == 'votes': if 'op' not in body or body['op'] not in ('add', 'remove'): raise BadRequestError("'op' Required and must be add|remove") c.vote(t_id, body['op']) return format_state(c) if 'to' not in body: raise BadRequestError("At least a 'to' value is required") # TODO you know errors CAN happen, right? if 'from' in body: rv = c.update_topic(t_id, body['field'], body['to'], body['from']) else: rv = c.update_topic(t_id, body['field'], body['to']) if not rv: raise ChaliceViewError("Unknown Error updating the topic") return format_state(c)
def update_data_object(data_object_id): """ Updates a data object. The data object must exist. :param data_object_id: the id of the data object to update """ # Ensure that the user is authenticated first if not check_auth(): raise UnauthorizedError("You're not authorized to use this service. " "Did you set access_token in the request headers?") # Make sure that the data object to update exists try: source = azul_match_field(index=INDEXES['data_obj'], key='file_id', val=data_object_id) except LookupError: raise NotFoundError("Data object not found.") # Check that a data object was provided in the request body = app.current_request.json_body if not body or not body.get('data_object', None): raise BadRequestError("Please add a data_object to the body of your request.") # Now that we know everything is okay, do the actual update data = {'doc': obj_to_azul(body['data_object'])} es.update(index=INDEXES['data_obj'], doc_type=DOCTYPES['data_obj'], id=source['_id'], body=data) return model('UpdateDataObjectResponse', data_object_id=data_object_id).marshal()
def get_day(day: Day) -> Dict[str, List[str]]: """ Get all the supported challenges for a particular day. """ if (solver_class := get_solver(day)) == NotImplemented: raise NotFoundError(f"Day {day} is not implemented yet.")
def close_missing_case2(): json_body = app.current_request.json_body # Load json data into object schema = MissingClosingSchema() missing, errors = schema.load(json_body) # Invalid JSON body if errors: raise ChaliceViewError(errors) with contextlib.closing(session_factory()) as session: try: # Check resident id is valid resident = session.query(Resident).get(missing.resident_id) if not resident: raise NotFoundError('Resident not exists') resident.status = 0 session.merge(resident) # Close existing active missing cases updated = session.query(Missing).filter( Missing.resident_id == missing.resident_id, Missing.status == 1).all() count = session.query(Missing).filter(Missing.resident_id == missing.resident_id, Missing.status == 1) \ .update({'status': 0, 'closed_by': missing.closed_by, 'closure': missing.closure, 'closed_at': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')}) notify_close_missing(db_session=session, missing=missing) # Call flush() to update id value in missing session.flush() session.commit() schema = MissingClosingSchema(many=True) return schema.dump(updated).data except exc.SQLAlchemyError as e: session.rollback() raise ChaliceViewError(str(e))