def register_account(): email = request.json.get('email') password = request.json.get('password') accounts = current_app.data.driver.db['accounts'] accounts_found = accounts.find_one({'email' : email}) if accounts_found != None: result_code = 406 result = {'result' : 'User alredy exists'} return json.dumps(result), result_code new_account = { 'email' : email, 'passwordhash' : get_hash(password) } insert_result = accounts.insert_one(new_account) new_user = { 'email' : email, 'account_id' : ObjectId(insert_result.inserted_id) } post_internal('users', new_user) result_code = 200 result = {'result' : 'ok', 'account-id' : str(insert_result.inserted_id)} return json.dumps(result), result_code
def on_inserted_host(items): """ What to do when a new host is inserted in the live state ... """ for index, item in enumerate(items): if item['register']: name = '' if 'display_name' in item and item['display_name'] != '': name = item['display_name'] elif 'alias' in item and item['alias'] != '': name = item['alias'] else: name = item['host_name'] data = {'host_name': item['_id'], 'service_description': None, 'state': 'UP', 'state_type': 'HARD', 'acknowledged': False, 'last_check': 0, 'last_state': 'UP', 'last_state_type': 'HARD', 'output': '', 'long_output': '', 'perf_data': '', 'type': 'host', 'business_impact': item['business_impact'], 'display_name_host': name} if item['initial_state'] == 'd': data['state'] = 'DOWN' data['last_state'] = 'DOWN' elif item['initial_state'] == 'u': data['state'] = 'UNREACHABLE' data['last_state'] = 'UNREACHABLE' post_internal("livestate", data)
def on_updated_userservice_session_user(updates, original): """ A session user relation has been updated in the database: - Add a session.opened event :param items: :return: None """ if 'status' in updates: if updates['status'] == 'close': # Decrease session users _session = app.data.driver.db['userservice_session'] session = _session.find_one({'_id': original['userservice_session']}) if session and session['current_nb_users'] > 0: data = { "current_nb_users": session['current_nb_users'] - 1 } # Close session if no more users in the session ... if data['current_nb_users'] == 0: data['status'] = 'close' lookup = {"_id": original['userservice_session']} patch_internal('userservice_session', data, False, False, **lookup) # Add an event _users = app.data.driver.db['user'] user = _users.find_one({'_id': original['user']}) if user: data = { 'userservice_session': original['userservice_session'], 'user': user['_id'], 'date': datetime.utcnow(), 'type': 'session.left', 'message': 'User %s left the session' % user['name'] } post_internal("event", data)
def update(): print "Deleting (reset) Mongo Collection named 'mac'" delete("mac") with Timeout(5, False): oui = urllib2.urlopen(OUI_URL, timeout=240) # code, oui = _sendGetRequest(OUI_URL, {}, {}) # print "IEEE Response Code was a " + str(code) count = 1 for totalcount, line in enumerate(oui, start=1): macHexVendor = re.match("^.*(" + OUI_HEX_REGEX + ").*" + "hex" + ".*?([A-Z].*)$", line) if macHexVendor: count += 1 macEntry = { "base16": macHexVendor.group(1).replace(OUI_MATCH, ""), "hex": macHexVendor.group(1).replace(OUI_MATCH, OUI_REPLACE), "organization": macHexVendor.group(2) } post_internal("mac", macEntry) if not VCAP_CONFIG: print macHexVendor.group(1).replace(OUI_MATCH, OUI_REPLACE) + ", " + \ macHexVendor.group(2) print "Number of MAC Entries matched: " + str(count) return ""
def add_activity(msg, item=None, notify=None, **data): """Add an activity into activity log. This will became part of current user activity log. If there is someone set to be notified it will make it into his notifications box. """ activity = { 'message': msg, 'data': data, } user = getattr(flask.g, 'user', None) if user: activity['user'] = user.get('_id') if notify: activity['read'] = {str(_id): 0 for _id in notify} else: activity['read'] = {} if item: activity['item'] = str(item) post_internal(ActivityResource.endpoint_name, activity) push_notification(ActivityResource.endpoint_name, _dest=activity['read'])
def post_by_name(name): # check if person exists if name not in image_count: return("The name "+name+" doesn't exist.") # make empty element id=str(uuid.uuid4()) with app.app_context(): with app.test_request_context(): post_internal('faceInput', {"_id": id}) # insert into arrays image_to_name_mapping[id]=name if name not in image_count: image_count[name]=1 else: image_count[name]+=1 print("") print("genericFaceInput: created new item for person "+name) print("Current status:") print(image_to_name_mapping) print(image_count) print("") # send image files={'image': request.files['image']} res=requests.patch("http://localhost:5000/faceInput/"+id, files=files) # check response if res.status_code>=400: return str(res.status_code) return("OK")
def on_inserted_userservice_session(items): """ A new session has been created in the database: - Add a sessionOpened event - Create a user service CDR :param items: :return: None """ for index, item in enumerate(items): print "Created a new session for service: %s" % item['service_name'] # Add an event data = { 'userservice_session': item['_id'], 'user': g.get('users_id', None), 'date': datetime.utcnow(), 'type': 'session.opened', 'message': 'Session opening' } post_internal("event", data) # Create a service CDR data = { 'userservice_session': item['_id'], 'userservice': item['userservice'], 'user_creator': g.get('users_id', None), 'opening_date': datetime.utcnow(), 'status': 'open' } post_internal("userservice_cdr", data)
def on_inserted_service(items): """ Called by EVE HOOK (app.on_inserted_service) After service inserted, if it is a template and the host linked is a template with services we add the service in all hosts have this host in template :param items: List of services :type items: list :return: None """ host_db = current_app.data.driver.db['host'] services = [] for _, item in enumerate(items): if item['_templates_from_host_template'] and item['_is_template']: # case where this service is template host+service, so add this service on all # hosts # use the host template and have _templates_with_services=True hostid = item['host'] hosts = host_db.find( {'_templates': hostid, '_templates_with_services': True}) for hs in hosts: services.append(Template.prepare_service_to_post(item, hs['_id'])) if services != []: post_internal('service', services)
def add_file(items): project_info=getitem_internal('project',**{'_id': items[0]['ProjectID']})[0] trimble_folder_id=project_info['TrimbleFolderID'] for item in items: # download file file=IO() file_path=file.save_file(item['Url']) # upload to trimble token=get_internal('lastToken')[0]['_items']['token'] headers={"Authorization":"Bearer "+token} files = {'file': open(file_path, 'rb')} r = requests.post(trimble_url+'files?parentId='+trimble_folder_id,files=files,headers=headers) trimble_data=r.json()[0] TrimbleVersionID=trimble_data['versionId'] item['TrimbleVersionID']=TrimbleVersionID # extract features from ifc file ifc=IFC(file_path) entityList, entities, data=ifc.parse_geometry() file.remove_file(file_path) bim=Model(data=data,model_id=TrimbleVersionID) features=bim.get_features() item['ThumbnailUrl']=process_thumbnail(TrimbleVersionID,headers) item['Entities']=entityList for entity in entities: entity['TrimbleVersionID']=TrimbleVersionID post_internal('entity',entities) post_internal('feature',features)
def send_to_timeseries_db(data): """ Send perfdata to timeseries databases, if not available, add temporary in mongo (retention) data must have this structure: [ { "name": "", "realm"; "", "host": "", "service": "", "value": 000, "timestamp": 000 } ] :param data: Information of data to send to graphite / influxdb :type data: list :return: None """ to_graphite_cache = False to_influx_cache = False if not Timeseries.send_to_timeseries_graphite(data): to_graphite_cache = True if not Timeseries.send_to_timeseries_influxdb(data): to_influx_cache = True if to_graphite_cache or to_influx_cache: for d in data: d['for_graphite'] = to_graphite_cache d['for_influxdb'] = to_influx_cache if len(data) > 0: post_internal('timeseriesretention', data)
def add_activity(msg, item=None, notify=None, **data): """Add an activity into activity log. This will became part of current user activity log. If there is someone set to be notified it will make it into his notifications box. """ activity = { 'message': msg, 'data': data, } user = getattr(flask.g, 'user', None) if user: activity['user'] = user.get('_id') if notify: activity['read'] = {str(_id): 0 for _id in notify} else: activity['read'] = {} if item: activity['item'] = str(item) post_internal(ActivityModel.endpoint_name, activity) push_notification(ActivityModel.endpoint_name, _dest=activity['read'])
def on_inserted_host(items): """ Called by EVE HOOK (app.on_inserted_host) After host inserted, if it use a template (or templates) and the the host use template with services, we add templates services to this host :param items: list of hosts :type items: list :return: None """ service_db = current_app.data.driver.db['service'] for _, item in enumerate(items): if item['_templates'] != [] and item['_templates_with_services']: # add services services = {} # loop on host templates and add into services the service are templates for hostid in item['_templates']: services_template = service_db.find({ '_is_template': True, 'host': hostid }) for srv in services_template: services[ srv['name']] = Template.prepare_service_to_post( srv, item['_id']) # when ok, add all services to this host post_internal('service', [services[k] for k in services])
def addplugin(): info = json.loads(request.data) print(jwt.decode(info['token'], secret_key)['user']) print( not info['submitter'] == jwt.decode(info['token'], secret_key)['user']) if not needAuth: del info['token'] post_internal('pluginpackages', info) return 'posted!' try: if not (info['submitter'] == jwt.decode(info['token'], secret_key)['user']): return 'submitter validation fail' except: return 'submitter validation fail' try: token = info['token'] if jwt.decode(token, secret_key): del info['token'] #post to db try: post_internal('pluginpackages', info) #requests.post('http://127.0.0.1:5000/pluginpackages', json.dumps(info), headers={'Content-Type': 'application/json'}) except: return 'error with adding a plugin to database' return 'success' else: return 'token fail' except: return 'error'
def on_inserted_service(items): """ Called by EVE HOOK (app.on_inserted_service) After service inserted, if it is a template and the host linked is a template with services we add the service in all hosts have this host in template :param items: List of services :type items: list :return: None """ host_db = current_app.data.driver.db['host'] services = [] for _, item in enumerate(items): if item['_templates_from_host_template'] and item['_is_template']: # case where this service is template host+service, so add this service on all # hosts # use the host template and have _templates_with_services=True hostid = item['host'] hosts = host_db.find({ '_templates': hostid, '_templates_with_services': True }) for hs in hosts: services.append( Template.prepare_service_to_post(item, hs['_id'])) if services != []: post_internal('service', services)
def update(): print "Deleting (reset) Mongo Collection named 'mac'" delete("mac") with Timeout(5, False): oui = urllib2.urlopen(OUI_URL, timeout=240) # code, oui = _sendGetRequest(OUI_URL, {}, {}) # print "IEEE Response Code was a " + str(code) count = 1 for totalcount, line in enumerate(oui, start=1): macHexVendor = re.match( "^.*(" + OUI_HEX_REGEX + ").*" + "hex" + ".*?([A-Z].*)$", line) if macHexVendor: count += 1 macEntry = { "base16": macHexVendor.group(1).replace(OUI_MATCH, ""), "hex": macHexVendor.group(1).replace(OUI_MATCH, OUI_REPLACE), "organization": macHexVendor.group(2) } post_internal("mac", macEntry) if not VCAP_CONFIG: print macHexVendor.group(1).replace(OUI_MATCH, OUI_REPLACE) + ", " + \ macHexVendor.group(2) print "Number of MAC Entries matched: " + str(count) return ""
def on_inserted_userservice_session_user(items): """ A user joined a session: - Add a sessionJoined event - Update last user activity in the session - Increase session users' number :param items: :return: None """ for index, item in enumerate(items): print "User: %s joined the session: %s" % (item['user'], item['userservice_session']) # New session event: sessionJoined _users = app.data.driver.db['user'] user = _users.find_one({'_id': item['user']}) if user: data = { 'userservice_session': item['userservice_session'], 'user': g.get('users_id', None), 'date': datetime.utcnow(), 'type': 'session.joined', 'message': 'User %s joined the session' % user['name'] } post_internal("event", data) # Increase session users _session = app.data.driver.db['userservice_session'] session = _session.find_one({'_id': item['userservice_session']}) if session: data = { "current_nb_users": session['current_nb_users'] + 1 } lookup = {"_id": item['userservice_session']} patch_internal('userservice_session', data, False, False, **lookup)
def on_inserted_host(items): """ Called by EVE HOOK (app.on_inserted_host) After host inserted, if it use a template (or templates) and the the host use template with services, we add templates services to this host :param items: list of hosts :type items: list :return: None """ service_db = current_app.data.driver.db['service'] for _, item in enumerate(items): if item['_templates'] != [] and item['_templates_with_services']: # add services services = {} # loop on host templates and add into services the service are templates for hostid in item['_templates']: services_template = service_db.find({'_is_template': True, 'host': hostid}) for srv in services_template: services[srv['name']] = Template.prepare_service_to_post(srv, item[ '_id']) # when ok, add all services to this host post_internal('service', [services[k] for k in services])
def on_updated_host(updates, original): """ Called by EVE HOOK (app.on_updated_host) After host updated, if host is a template, report value of fields updated on host used this template if host is not template, add or remove services templates if _templates changed :param updates: modified fields :type updates: dict :param original: original fields :type original: dict :return: None """ # pylint: disable=too-many-locals if g.get('ignore_hook_patch', False): g.ignore_hook_patch = False return if original['_is_template']: # We must update all host use this template host_db = current_app.data.driver.db['host'] hosts = host_db.find({'_templates': original['_id']}) for host in hosts: Template.update_host_use_template(host, updates) else: if '_templates'in updates and updates['_templates'] != original['_templates']: if original['_templates_with_services']: service_db = current_app.data.driver.db['service'] # Get all services of this host myservices = service_db.find({'_is_template': False, 'host': original['_id']}) myservices_template_id = [] myservices_bis = {} for myservice in myservices: myservices_template_id.append(myservice['_templates'][0]) myservices_bis[myservice['_templates'][0]] = myservice services = {} service_template_id = [] # loop on host templates and add into services the service are templates for hostid in updates['_templates']: services_template = service_db.find({'_is_template': True, 'host': hostid}) for srv in services_template: services[srv['name']] = Template.prepare_service_to_post(srv, original[ '_id']) service_template_id.append(services[srv['name']]['_templates'][0]) services_to_add = list(set(service_template_id) - set(myservices_template_id)) services_to_del = list(set(myservices_template_id) - set(service_template_id)) for (_, service) in iteritems(services): if service['_templates'][0] in services_to_add: post_internal('service', [service]) for template_id in services_to_del: if template_id in myservices_bis: lookup = {"_id": myservices_bis[template_id]['_id']} deleteitem_internal('service', False, False, **lookup)
def push_systemvthings_locally(request, payload): itms = json.loads(payload.get_data()).get('_items') for itm in itms: itm["id"] = itm["id"].replace("/", ":") with app.test_request_context(): post_internal('batchentitiesUpdatePOSTendpoint', itms) # i have to materialize redundantly here because the hook is not fired # when using the post_internal. And the call to db requires the context app.data.driver.db.entities.aggregate(latestentities_pipeline)
def save_notification(app, push_interval): notifications = app.extensions.pop('superdesk_notifications', None) if notifications: with app.test_request_context(): log.info('Saving changes %s', notifications) post_internal('notification', {'changes': notifications}) timer = Timer(push_interval, save_notification, args=(app, push_interval)) timer.daemon = True timer.start()
def send_to_timeseries_db(data, item_realm): """Send perfdata to timeseries databases. If TSDB is not available, store the perf_data in the internal retention store `data` must have this structure: [ { "name": "", "realm"; "", "host": "", "service": "", "value": 000, "timestamp": 000 "uom": "" } ] :param data: Information of data to send to carbon / influxdb :type data: list :param item_realm: id of the realm :type item_realm: str :return: None """ graphite_db = current_app.data.driver.db['graphite'] influxdb_db = current_app.data.driver.db['influxdb'] realm_db = current_app.data.driver.db['realm'] searches = [{'_realm': item_realm}] realm_info = realm_db.find_one({'_id': item_realm}) for realm in realm_info['_tree_parents']: searches.append({'_realm': realm, '_sub_realm': True}) # get graphite servers to send to for search in searches: graphites = graphite_db.find(search) for graphite in graphites: if not Timeseries.send_to_timeseries_graphite(data, graphite): for perf in data: perf['graphite'] = graphite['_id'] post_internal('timeseriesretention', data) for perf in data: del perf['graphite'] # get influxdb servers to send to for search in searches: influxdbs = influxdb_db.find(search) for influxdb in influxdbs: if not Timeseries.send_to_timeseries_influxdb(data, influxdb): for perf in data: perf['influxdb'] = influxdb['_id'] post_internal('timeseriesretention', data) for perf in data: del perf['influxdb']
def add_activity(msg, **data): user = getattr(flask.g, 'user', None) if not user: return post_internal(ActivityModel.endpoint_name, { 'user': user.get('_id'), 'message': msg, 'data': data }) push_notification(ActivityModel.endpoint_name, created=1, keys=(user.get('_id'),))
def prepare_user(app): with app.test_request_context(): account = app.config['SUBMITTER_ACCOUNT'] account.update({'role': Role.SUPERUSER}) account['@type'] = 'Account' account['databases'] = app.config['DATABASES'] if app.data.find_one_raw('accounts', {'email': account['email']}) is None: with suppress(DuplicateKeyError): post_internal('accounts', dict(account), True) # If we validate, RolesAuth._set_database changes our db response = app.test_client().post('login', data=json.dumps(account), content_type='application/json') js = json.loads(response.data.decode()) return js['token']
def after_insert(resource_name, request, payload): if resource_name == 'drivings': for doc in [request.get_data()]: doc = json.loads(doc) if doc['destinations']: destinations = json.loads(doc['destinations']) for dest in destinations: dest['driving'] = json.loads(payload.get_data())['_id'] print post_internal('destinations', payl=dest) del doc['destinations']
def setup_db(admin_email): """Setup the database - Create admin, subscriber and demo Group collection - Create admin user (must use valid blender-id credentials) - Create one project """ # Create default groups groups_list = [] for group in ['admin', 'subscriber', 'demo']: g = {'name': group} g = post_internal('groups', g) groups_list.append(g[0]['_id']) print("Creating group {0}".format(group)) # Create admin user user = { 'username': admin_email, 'groups': groups_list, 'roles': ['admin', 'subscriber', 'demo'], 'settings': { 'email_communications': 1 }, 'auth': [], 'full_name': admin_email, 'email': admin_email } result, _, _, status = post_internal('users', user) if status != 201: raise SystemExit('Error creating user {}: {}'.format( admin_email, result)) user.update(result) print("Created user {0}".format(user['_id'])) # Create a default project by faking a POST request. with app.test_request_context(data={'project_name': u'Default Project'}): from flask import g from application.modules import projects g.current_user = { 'user_id': user['_id'], 'groups': user['groups'], 'roles': set(user['roles']) } projects.create_project(overrides={ 'url': 'default-project', 'is_private': False })
def on_generic_deleted(self, resource, doc): if resource in self.exclude: return user = getattr(flask.g, 'user', None) if not user: return audit = { 'user': user.get('_id'), 'resource': resource, 'action': 'deleted', 'extra': doc } post_internal(self.endpoint_name, audit)
def task(): print("request", request) if request.method == "PATCH": print("heelloooooo") return send_response("tasks", patch_internal("tasks", payload = request.json)) return send_response("tasks", post_internal("tasks", request.json))
def _import_modules(): data = request.json items = data[config.ITEMS] def modules_convert(item): if item.get('image'): item['image'] = item['image'].split('/')[-1] allowed_key = ('name', 'image') item = filter(lambda v: v[0] in allowed_key, item.items()) item = dict(item) item = filter(lambda v: v[1] is not None, item.items()) item = dict(item) return item items = map(modules_convert, items) items = list(items) status_all = True resource = 'modules' domain_ori = deepcopy(app.config['DOMAIN']) app.config['DOMAIN'][resource]['schema']['image']['type'] = 'string' app.config['DOMAIN'][resource]['_media'] = [] try: _ = delete(resource) except NotFound: pass response = post_internal(resource, items) pprint(response) status_all = status_all and response[3] == 201 app.config['DOMAIN'] = domain_ori return jsonify({'status': status_all})
def populate_db(pdf_file_name): pdf_file_path = os.path.join(os.getcwd(), 'app', pdf_file_name) if os.path.exists(pdf_file_path): output_file_path = os.path.join(os.getcwd(), 'app', \ pdf_file_name.replace('.pdf', '.txt')) subprocess.call( ['/usr/bin/pdftotext', pdf_file_path, output_file_path]) demographics = PDF_Demographics_Parser(output_file_path).demographics status_dict = {} for demo in demographics: vcf_file_name = demo['vcf_file_name'] vcf_file_path = path.join(VCF_LOCATION, vcf_file_name) vcf_file_content = NGS_Parser(vcf_file_path) demo['vcf_file_content'] = vcf_file_content.to_dict() with app.test_request_context(): _, _, _, status, _ = post_internal('ngs_data', demo) status_dict[vcf_file_name] = status '''separate the contents of the vcf file by removing the info''' status = 201 message = "Status for request:\n" for vcf_name in status_dict: message += vcf_name + ":" + status_dict[vcf_name] + "\n" else: message = "PDF File does not exists in " + \ os.path.join(os.getcwd(), 'app') status = 404 message = json.dumps({"message": message}) return app.response_class(message, content_type="application/json", \ status=status)
def before_POST_posts(request): ''' Callback to be executed before documents have been validated. Primarily used to handle multipart form data. ''' if request.mimetype == 'multipart/form-data': # This is designed to parse only a single `request.form` item. # Additional items will be ignored. payload = {} for key in request.form: payload = json.loads(request.form[key]) break # The actual names of the keys used to send `request.files` data are # ignored. attachments = [] for key in request.files: file = request.files[key] attachments.append(save_attachment(file)) if attachments: payload['attachments'] = attachments response = post_internal('posts', payload) # Instead of continuing with the default response (where the request # will be sent to Eve's `post` function), we abort normal operation and # create our own response. abort(send_response('posts', response))
def xml_collections_endpoint(**lookup): resource = _resource() response = None method = request_method() if request.content_type.endswith("/xml"): if method == "POST": # response = post(resource, payl=xml2json(request.data)) response = post_internal(resource, payl=xml2json(request.data), skip_validation=True) elif method == "GET": response = collections_endpoint(**lookup) l = json.loads(response.data.decode('utf-8'))['_items'] response.data = xmltodict.unparse({ 'gdxp': { "supplier": list(map(popper, l)) } }) else: raise NotImplementedError('Not implemented') return send_response(resource, response) else: return collections_endpoint(**lookup)
def send_inter_component_message(recipient: str = '', msg_type: str = '', json_payload: object = None): """ Sends a message from the middleware to another component using the interComponentMessage service. This ensures the message is retransmit on connection failures. @param recipient: The recipient string must be one of the component short names configured in the INTERCOMPONENT_SETTINGS environment variable. @param msg_type: The msg_type string is important if there are different messages to the same recipient url. Even if not applicable, the string must be set to something at least 3 characters long. @param json_payload: The payload needs to be of type json It depends on the recipients settings in INTERCOMPONENT_SETTINGS if a full interComponentMessage or just the payload is send to the recipient. """ if recipient not in INTERCOMPONENT_SETTINGS: error = "send_inter_component_message(): ERROR recipient '%s' not configured in INTERCOMPONENT_SETTINGS" % recipient print(error) # Should we through an exception ? This would fail the incoming request that triggered the message sending # raise Exception(error) else: msg = { 'notification_id': str(uuid.uuid4()), 'sender_id': 'MIDDLEWARE', 'recipient_id': recipient, 'message_type': msg_type, 'creation_time': datetime.datetime.utcnow().replace(microsecond=0), 'payload': json_payload } internal_response = post_internal('interComponentMessage', msg) if internal_response[0]['_status'] != 'OK': print('send_inter_component_message(): ERROR ', internal_response) print(' on internal_post ', msg)
def _import_branches(): data = request.json items = data[config.ITEMS] def branch_convert(item): allowed_key = ('name', 'address') item = filter(lambda v: v[0] in allowed_key, item.items()) item = dict(item) item = filter(lambda v: v[1] is not None, item.items()) item = dict(item) return item items = map(branch_convert, items) items = list(items) status_all = True resource = 'branches' try: _ = delete(resource) except NotFound: pass response = post_internal(resource, items) pprint(response) status_all = status_all and response[3] == 201 return jsonify({'status': status_all})
def prepare_user(app): with app.test_request_context(): account = app.config['SUBMITTER_ACCOUNT'] account.update({'role': Role.SUPERUSER}) account['@type'] = 'Account' account['databases'] = app.config['DATABASES'] if app.data.find_one_raw('accounts', {'email': account['email']}) is None: with suppress(DuplicateKeyError): post_internal( 'accounts', dict(account), True ) # If we validate, RolesAuth._set_database changes our db response = app.test_client().post('login', data=json.dumps(account), content_type='application/json') js = json.loads(response.data.decode()) return js['token']
def test_post_internal(self): # test that post_internal is available and working properly. test_field = 'ref' test_value = "1234567890123456789054321" payload = {test_field: test_value} with self.app.test_request_context(self.known_resource_url): r, _, _, status = post_internal(self.known_resource, payl=payload) self.assert201(status)
def test_api_prefix_post_internal(self): # https://github.com/pyeve/eve/issues/810 from eve.methods.post import post_internal settings_file = os.path.join(self.this_directory, "test_prefix.py") self.app = Eve(settings=settings_file) self.test_prefix = self.app.test_client() # This works fine with self.app.test_request_context(method="POST", path="/prefix/contacts"): _, _, _, status_code, _ = post_internal("contacts", {}) self.assert201(status_code) # This fails unless #810 is fixed with self.app.test_request_context(): _, _, _, status_code, _ = post_internal("contacts", {}) self.assert201(status_code)
def whitelist_admin(_id=None): if request.method == 'POST': return dumps(post_internal('whitelist', request.json)[0]) elif request.method == 'PUT': app.data.driver.db['whitelist'].update({"_id": ObjectId(_id)}, {"$set": request.json}) return dumps(get_internal('whitelist')[0])
def student_classes_admin(_id=None): if request.method == 'POST': return dumps(post_internal('student_classes', request.json)[0]) elif request.method == 'PUT': app.data.driver.db['student_classes'].update({"_id": ObjectId(_id)}, {"$set": request.json}) return dumps(get_internal('student_classes')[0])
def execute_post_internal(resource: str, payload: dict, skip_validation=False) -> dict: """Executes POST internally using the same Request, so in the same database, etc.""" response = post_internal(resource, payload, skip_validation) if not (200 <= response[3] < 300): raise InnerRequestError(response[3], response[0]) return response[0] # Actual data
def test_post_internal(node_id): import pprint nodes_collection = app.data.driver.db['nodes'] node = nodes_collection.find_one(ObjectId(node_id)) internal_fields = ['_id', '_etag', '_updated', '_created'] for field in internal_fields: node.pop(field, None) pprint.pprint(node) print(post_internal('nodes', node))
def after_insert_actiondowntime(items): """ Hook after action downtime inserted. :param items: realm fields :type items: dict :return: None """ for dummy, item in enumerate(items): # Create an history event for the new downtime data = { 'host': item['host'], 'service': item['service'], 'user': item['user'], 'type': 'downtime.' + item['action'], 'message': item['comment'] } post_internal("history", data, True)
def create_internal(self, project_id, name, filters, y_axes): return post_internal( 'views', { 'project': project_id, 'name': name, 'filters': filters, 'yAxes': y_axes, 'watches': [] })
def post_internal(self, resource: str, payl=None, skip_validation=False): """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810""" from eve.methods.post import post_internal url = self.config['URLS'][resource] path = '%s/%s' % (self.api_prefix, url) with self.__fake_request_url_rule('POST', path): return post_internal(resource, payl=payl, skip_validation=skip_validation)[:4]
def activity_subscribe(user_id, context_object_type, context_object_id): """Subscribe a user to changes for a specific context. We create a subscription if none is found. :param user_id: id of the user we are going to subscribe :param context_object_type: hardcoded index, check the notifications/model.py :param context_object_id: object id, to be traced with context_object_type_id """ subscriptions_collection = current_app.data.driver.db['activities-subscriptions'] lookup = { 'user': user_id, 'context_object_type': context_object_type, 'context_object': context_object_id } subscription = subscriptions_collection.find_one(lookup) # If no subscription exists, we create one if not subscription: post_internal('activities-subscriptions', lookup)
def test_post_internal_skip_validation(self): # test that when skip_validation is active everything behaves as # expected. Also make sure that #726 is fixed. test_field = 'ref' test_value = "1234567890123456789054321" payload = {test_field: test_value} with self.app.test_request_context(self.known_resource_url): r, _, _, status = post_internal(self.known_resource, payl=payload, skip_validation=True) self.assert201(status)