def create_view(self,): if request.method == 'POST': img_files = [] imd = ImmutableMultiDict(request.files) if 'sponsors[logo]' in imd and request.files['sponsors[logo]'].filename != "": for img_file in imd.getlist('sponsors[logo]'): img_files.append(img_file) event = DataManager.create_event(request.form, img_files) if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name): flash( "Your event was saved. To publish your event please review the highlighted fields below.", "warning") return redirect(url_for( '.edit_view', event_id=event.id) + "#step=location_name") if event: return redirect(url_for('.details_view', event_id=event.id)) return redirect(url_for('.index_view')) return self.render( '/gentelella/admin/event/new/new.html', start_date=datetime.datetime.now() + datetime.timedelta(days=10), event_types=DataGetter.get_event_types(), event_licences=DataGetter.get_event_licences(), event_topics=DataGetter.get_event_topics(), event_sub_topics=DataGetter.get_event_subtopics(), timezones=DataGetter.get_all_timezones())
def route(): data = ImmutableMultiDict(urlparse.parse_qs(request.data)) errors = validate(data) if errors['errors']: return jsonify(errors), 400 target_node_ids = data.getlist('target_node_ids') speed = data.get('speed') bonuses = get_bonuses(data) return jsonify(graph.run(target_node_ids, bonuses, speed))
def login(): error = None if request.method == 'POST': userID = request.form['userID'] print "postttt" # if there's a user id & no db user => create if userID: print "i can hz id" print userID user = models.User.query.get(userID) print 'i got past the querying' print user if not user: print "I DON'T EXIST ;_____;" print request.form['userID'] print 'req form' # print request.form['user'] baseObject = ImmutableMultiDict(request.form) # userObj = baseObject.getlist('user') print 'name?' print baseObject.get('name') # print userObj # raise IOError user = models.User(id = userID, name = baseObject.get('name'), role = 0, fb_access_token = request.form['accessToken']) # Find any existing users in our db who are in this person's friend list db_session.add(user) db_session.commit() return jsonify({'success': True}), 200 else: print "i exist?" return jsonify({'success': True}), 200 # there's no userId in the request else: return redirect(url_for('index')) return "something"
def get_page(recid): if not is_record_editable(recid): abort(401) record = get_record_details(int(recid), current_user['email']) record.update(record.get('domain_metadata', {})) if record.get('open_access') == False or record.get('open_access') == 'restricted': del record['open_access'] form = ImmutableMultiDict(record) metaclass, meta, meta_form = _get_meta_form_data(form.get('domain'), form) return render_template('b2share-edit.html', recid=recid, metadata=meta, form=meta_form, files=_bibdoc_file_list(recid), domain=metaclass, getattr=getattr)
def filter_by_args(cls, q, args: ImmutableMultiDict): lang = args.get('lang') if lang != 'All' and (lang,) in cls.language_distinct(): q = q.filter(cls.language == lang) status = args.get('status') if status in ('promising', 'hopeless'): q = q.filter(cls.status == status) if bool(args.get('mature')): q = q.filter(cls.mature.is_(True)) return q
def filter_by_args(cls, q, args: ImmutableMultiDict): lang = args.get('lang') if lang != 'All' and (lang,) in cls.language_distinct(): q = q.filter(cls.language == lang) status = args.get('status') if status in ('promising', 'hopeless'): q = q.filter(cls.status == status) if bool(args.get('mature')): q = q.filter(cls.mature.is_(True)) try: q = q.filter(cls.full_name.like(str(SearchTerm(args.get('term', ''))))) except ValueError: pass return q
def get_data(self, form_data): """Extract data for a section from a submitted form :param form_data: the submitted form data :type form_data: :class:`werkzeug.ImmutableMultiDict` :return: parsed and filtered data This parses the provided form data against the expected fields for this section. Any fields provided in the form data that are not described in the section are dropped. Any fields in the section that are not in the form data are ignored. Fields in the form data are parsed according to their type in the section data. """ # strip trailing and leading whitespace from form values form_data = ImmutableMultiDict((k, v.strip()) for k, v in form_data.items(multi=True)) section_data = {} for question in self.questions: section_data.update(question.get_data(form_data)) section_data = drop_followups(self, section_data) return section_data
def create_view(self, ): if request.method == 'POST': if not current_user.can_create_event(): flash("You don't have permission to create event.") return redirect(url_for('.index_view')) img_files = [] imd = ImmutableMultiDict(request.files) if 'sponsors[logo]' in imd and request.files['sponsors[logo]'].filename != "": for img_file in imd.getlist('sponsors[logo]'): img_files.append(img_file) event = DataManager.create_event(request.form, img_files) if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name): flash( "Your event was saved. To publish your event please review the highlighted fields below.", "warning") return redirect(url_for( '.edit_view', event_id=event.id) + "#step=location_name") if event: return redirect(url_for('.details_view', event_id=event.id)) return redirect(url_for('.index_view')) hash = get_random_hash() if CallForPaper.query.filter_by(hash=hash).all(): hash = get_random_hash() return self.render( '/gentelella/admin/event/new/new.html', start_date=datetime.datetime.now() + datetime.timedelta(days=10), event_types=DataGetter.get_event_types(), event_licences=DataGetter.get_event_licences(), event_topics=DataGetter.get_event_topics(), event_sub_topics=DataGetter.get_event_subtopics(), timezones=DataGetter.get_all_timezones(), cfs_hash=hash, payment_countries=DataGetter.get_payment_countries(), payment_currencies=DataGetter.get_payment_currencies(), included_settings=self.get_module_settings())
def test_immutable_structures(): """Test immutable structures""" l = ImmutableList([1, 2, 3]) assert_raises(TypeError, l.__delitem__, 0) assert_raises(TypeError, l.__delslice__, 0, 1) assert_raises(TypeError, l.__iadd__, [1, 2]) assert_raises(TypeError, l.__setitem__, 0, 1) assert_raises(TypeError, l.__setslice__, 0, 1, [2, 3]) assert_raises(TypeError, l.append, 42) assert_raises(TypeError, l.insert, 0, 32) assert_raises(TypeError, l.pop) assert_raises(TypeError, l.extend, [2, 3]) assert_raises(TypeError, l.reverse) assert_raises(TypeError, l.sort) assert l == [1, 2, 3] d = ImmutableDict(foo=23, bar=42) assert_raises(TypeError, d.setdefault, 'baz') assert_raises(TypeError, d.update, {2: 3}) assert_raises(TypeError, d.popitem) assert_raises(TypeError, d.__delitem__, 'foo') assert_raises(TypeError, d.clear) assert d == dict(foo=23, bar=42) d = ImmutableDict.fromkeys([1, 2]) assert d[1] == d[2] == None d = ImmutableMultiDict(d) assert_raises(TypeError, d.add, 'fuss', 44) assert_raises(TypeError, d.popitemlist) assert_raises(TypeError, d.poplist, 'foo') assert_raises(TypeError, d.setlist, 'tadaa', [1, 2]) assert_raises(TypeError, d.setlistdefault, 'tadaa') d = ImmutableMultiDict.fromkeys([1, 2]) assert d[1] == d[2] == None d = EnvironHeaders({'HTTP_X_FOO': 'test'}) assert_raises(TypeError, d.__delitem__, 0) assert_raises(TypeError, d.add, 42) assert_raises(TypeError, d.pop, 'x-foo') assert_raises(TypeError, d.popitem) assert_raises(TypeError, d.setdefault, 'foo', 42) assert dict(d.items()) == {'X-Foo': 'test'} assert_raises(TypeError, d.copy)
def parse_grafana(alert: JSON, match: Dict[str, Any], args: ImmutableMultiDict) -> Alert: alerting_severity = args.get('severity', 'major') if alert['state'] == 'alerting': severity = alerting_severity elif alert['state'] == 'ok': severity = 'normal' else: severity = 'indeterminate' environment = args.get('environment', 'Production') # TODO: verify at create? event_type = args.get('event_type', 'performanceAlert') group = args.get('group', 'Performance') origin = args.get('origin', 'Grafana') service = args.get('service', 'Grafana') timeout = args.get('timeout', type=int) attributes = match.get('tags', None) or dict() attributes = {k.replace('.', '_'): v for (k, v) in attributes.items()} attributes['ruleId'] = str(alert['ruleId']) if 'ruleUrl' in alert: attributes['ruleUrl'] = '<a href="%s" target="_blank">Rule</a>' % alert['ruleUrl'] if 'imageUrl' in alert: attributes['imageUrl'] = '<a href="%s" target="_blank">Image</a>' % alert['imageUrl'] return Alert( resource=match['metric'], event=alert['ruleName'], environment=environment, severity=severity, service=[service], group=group, value='%s' % match['value'], text=alert.get('message', None) or alert.get('title', alert['state']), tags=list(), attributes=attributes, origin=origin, event_type=event_type, timeout=timeout, raw_data=json.dumps(alert) )
def correct_request_types(self, req): return ImmutableMultiDict(req)
def _validate_message(self, msg: FixMessage) -> bool: try: # Hack to make msg.get return decoded strings. decoded = ImmutableMultiDict([(k, v.decode()) for k, v in msg.pairs]) msg.get = lambda key: decoded.get(fix_val(key)) except ValueError: self.reject_message( msg, reason='Invalid encoding', error_code=simplefix. SESSIONREJECTREASON_INCOORECT_DATA_FORMAT_FOR_VALUE, ) return False if self._target_id is None and msg.get(simplefix.TAG_SENDER_COMPID): self._target_id = msg.get(simplefix.TAG_SENDER_COMPID) if msg.get(simplefix.TAG_MSGSEQNUM): if msg.get(simplefix.TAG_MSGSEQNUM) == str( self._next_recv_seq_num): self._next_recv_seq_num += 1 else: self.reject_message( msg, reason='Incorrect sequence number', tag_id=simplefix.TAG_MSGSEQNUM, error_code=simplefix. SESSIONREJECTREASON_VALUE_INCORRECT_FOR_THIS_TAG) return False for tag, description in [ (simplefix.TAG_MSGTYPE, 'message type'), (simplefix.TAG_BEGINSTRING, 'begin string'), (simplefix.TAG_SENDER_COMPID, 'sender ID'), (simplefix.TAG_TARGET_COMPID, 'target ID'), (simplefix.TAG_SENDING_TIME, 'sending time'), (simplefix.TAG_MSGSEQNUM, 'sequence number'), ]: if not msg.get(tag): self.reject_message(msg, reason=f'Missing {description}', tag_id=tag, error_code=simplefix. SESSIONREJECTREASON_REQUIRED_TAG_MISSING) return False if msg.get(simplefix.TAG_BEGINSTRING) != 'FIX.4.2': self.reject_message( msg, reason='Invalid FIX version', tag_id=simplefix.TAG_BEGINSTRING, error_code=simplefix. SESSIONREJECTREASON_VALUE_INCORRECT_FOR_THIS_TAG) return False elif msg.get(simplefix.TAG_SENDER_COMPID) != self._target_id: self.reject_message( msg, reason='Incorrect sender', tag_id=simplefix.TAG_SENDER_COMPID, error_code=simplefix. SESSIONREJECTREASON_VALUE_INCORRECT_FOR_THIS_TAG) return False elif msg.get(simplefix.TAG_TARGET_COMPID) != self._sender_id: self.reject_message( msg, reason='Incorrect target', tag_id=simplefix.TAG_TARGET_COMPID, error_code=simplefix. SESSIONREJECTREASON_VALUE_INCORRECT_FOR_THIS_TAG) return False self._last_recv_time = time.time() return True
def login_with_iot_callback(): # username: AzrWLH8xw1xGYoPBBt1lP4xl # password: V2CQt67jOXTpeV4BrDMumQOcka1HEpQmDWp72l1mnutz52j8 data = request.json session_token = data['sessionId'] districts = { "1": "Ciutat Vella", "2": "Eixample", "3": "Sants-Montjuïc", "4": "Les Corts", "5": "Sarrià-Sant Gervasi", "6": "Gràcia", "7": "Horta-Guinardó", "8": "Nou Barris", "9": "Sant Andreu", "10": "Sant Martí", } try: current_app.logger.info("starting callback") authorizable_attribute_id = data['credential'][ 'authorizable_attribute_id'] current_app.logger.info("authorizable_attribute_id: " + authorizable_attribute_id) credential_issuer_endpoint_address = data['credential'][ 'credential_issuer_endpoint_address'] current_app.logger.info("credential_issuer_endpoint_address: " + credential_issuer_endpoint_address) # read the public key from endpoint bcn_community_obj = Community.get_from_authorizable_attribute_id( authorizable_attribute_id) # print("bcn_community_obj: " + bcn_community_obj) current_app.logger.info( "URL: " + credential_issuer_endpoint_address + "/authorizable_attribute/{}".format(authorizable_attribute_id)) res = requests.get( credential_issuer_endpoint_address + "/authorizable_attribute/{}".format(authorizable_attribute_id)) if res.ok: credential_key = json.dumps( res.json()["verification_key"]).encode() value = json.dumps(data['credential']['value']).encode() ## check with zenroom if login is valid verify_response_msg = "OK" current_app.logger.info("\tvalue: {}".format(value)) current_app.logger.info( "\tAll good, got this result: {}".format(res.json())) if cfg['iotconfig']['bypass'] == 'no': with open('verifyer.zencode') as file: verify_credential_script = file.read() try: verify_response, errs = zenroom.execute( verify_credential_script.encode(), data=credential_key, keys=value) verify_response_msg = verify_response.decode() except: verify_response_msg = "not OK" if verify_response_msg == "OK": tkn_manager = TokenManager() tkn_status = tkn_manager.validate_token(session_token) if tkn_status == '1': # login request.headers.environ['HTTP_AUTHORIZATION'] = \ 'Basic ' + b64encode(bytes(cfg['oauth']['client_username'] + ':' + cfg['oauth']['client_password'], 'utf-8')).decode('utf-8') data2 = ImmutableMultiDict([('grant_type', 'password'), ('username', session_token), ('scope', 'profile'), ('password', 'dummy')]) request.form = data2 # Get personal data name = "" city = "Barcelona" age = "" area = "" profile_data_array = data['optionalAttributes'] for profile_data in profile_data_array: if profile_data[ 'attribute_id'] == "schema:dateOfBirth": # process age dd/mm/yyyy day, month, year = profile_data['value'].split( '/') today = date.today() age = today.year - int(year) - ( (today.month, today.day) < (int(month), int(day))) if profile_data['attribute_id'] == "schema:name": name = profile_data['value'] if profile_data['attribute_id'] == "schema:city": city = profile_data['value'] if profile_data[ 'attribute_id'] == "schema:district": if profile_data['value'] in districts: area = districts[profile_data['value']] User.update_user(session_token, name, city, age, area) User.user_add_community(session_token, bcn_community_obj.id) token = authorization.create_token_response(request) return token else: response = jsonify(message="Invalid Tokken") response.status_code = 401 return response else: response = jsonify(message="Invalid Credentials") response.status_code = 401 return response else: current_app.logger.info( "\tCalls not getting back, got this error: {}".format( res.json())) response = jsonify( message= "Could not get public key data from credential_issuer_endpoint_address" ) response.status_code = 412 return response except Exception as e: current_app.logger.error("Unexpected error:" + sys.exc_info()[0]) current_app.logger.error("Error description: " + e) response = jsonify(message="Unexpected Error in Validation") response.status_code = 412 return response
def index_view(): if request.method == 'POST': if 'super_admin_email' in request.form: super_admin = DataGetter.get_super_admin_user() super_admin.email = request.form['super_admin_email'] save_to_db(super_admin) if 'event-thumbnail_width' in request.form: im_size_profile = DataGetter.get_image_sizes_by_type( type='profile') im_size_event = DataGetter.get_image_sizes_by_type(type='event') if im_size_profile and im_size_event: im_size_profile.full_width = request.form[ 'profile-large_width'] im_size_profile.full_height = request.form[ 'profile-large_width'] im_size_profile.full_aspect = request.form.get( 'profile-large_aspect', 'off') im_size_profile.full_quality = request.form[ 'profile-large_quality'] im_size_profile.icon_width = request.form['profile-icon_width'] im_size_profile.icon_height = request.form[ 'profile-icon_width'] im_size_profile.icon_aspect = request.form.get( 'profile-icon_aspect', 'off') im_size_profile.icon_quality = request.form[ 'profile-icon_quality'] im_size_profile.thumbnail_width = request.form[ 'profile-thumbnail_width'] im_size_profile.thumbnail_height = request.form[ 'profile-thumbnail_width'] im_size_profile.thumbnail_aspect = request.form.get( 'profile-thumbnail_aspect', 'off') im_size_profile.thumbnail_quality = request.form[ 'profile-thumbnail_quality'] im_size_profile.logo_width = None im_size_profile.logo_height = None save_to_db(im_size_profile, "Image Sizes saved") im_size_event.full_width = request.form['event-large_width'] im_size_event.full_height = request.form['event-large_height'] im_size_event.full_aspect = request.form.get( 'event-large_aspect', 'off') im_size_event.full_quality = request.form[ 'event-large_quality'] im_size_event.icon_width = request.form['event-icon_width'] im_size_event.icon_height = request.form['event-icon_height'] im_size_event.icon_aspect = request.form.get( 'event-icon_aspect', 'off') im_size_event.icon_quality = request.form['event-icon_quality'] im_size_event.thumbnail_width = request.form[ 'event-thumbnail_width'] im_size_event.thumbnail_height = request.form[ 'event-thumbnail_height'] im_size_event.thumbnail_aspect = request.form.get( 'event-thumbnail_aspect', 'off') im_size_event.thumbnail_quality = request.form[ 'event-thumbnail_quality'] im_size_event.logo_width = request.form['logo_width'] im_size_event.logo_height = request.form['logo_height'] save_to_db(im_size_event, "Image Sizes saved") else: all_im_sizes = DataGetter.get_image_sizes() for sizes in all_im_sizes: delete_from_db(sizes, 'Delete Image Sizes') im_size = ImageSizes( type='profile', full_width=request.form['profile-large_width'], full_height=request.form['profile-large_width'], full_aspect=request.form.get('profile-large_aspect', 'off'), full_quality=request.form['profile-large_quality'], icon_width=request.form['profile-icon_width'], icon_height=request.form['profile-icon_width'], icon_aspect=request.form.get('profile-icon_aspect', 'off'), icon_quality=request.form['profile-icon_quality'], thumbnail_width=request.form['profile-thumbnail_width'], thumbnail_height=request.form['profile-thumbnail_width'], thumbnail_aspect=request.form.get( 'profile-thumbnail_aspect', 'off'), thumbnail_quality=request. form['profile-thumbnail_quality'], logo_width=None, logo_height=None) save_to_db(im_size, "Image Sizes saved") im_size = ImageSizes( type='event', full_width=request.form['event-large_width'], full_height=request.form['event-large_height'], full_aspect=request.form.get('event-large_aspect', 'off'), full_quality=request.form['profile-large_quality'], icon_width=request.form['event-icon_width'], icon_height=request.form['event-icon_height'], icon_aspect=request.form.get('event-icon_aspect', 'off'), icon_quality=request.form['profile-icon_quality'], thumbnail_width=request.form['event-thumbnail_width'], thumbnail_height=request.form['event-thumbnail_height'], thumbnail_aspect=request.form.get('event-thumbnail_aspect', 'off'), thumbnail_quality=request. form['profile-thumbnail_quality'], logo_width=request.form['logo_width'], logo_height=request.form['logo_height']) save_to_db(im_size, "Image Sizes saved") if 'service_fee' in request.form: dic = ImmutableMultiDict(request.form) else: dic = dict(request.form.copy()) for i in dic: v = dic[i][0] if not v: dic[i] = None else: dic[i] = v set_settings(**dic) settings = get_settings() fees = DataGetter.get_fee_settings() image_config = DataGetter.get_image_configs() event_image_sizes = DataGetter.get_image_sizes_by_type(type='event') profile_image_sizes = DataGetter.get_image_sizes_by_type(type='profile') if current_app.config['PRODUCTION']: settings['app_environment'] = Environment.PRODUCTION elif current_app.config['STAGING']: settings['app_environment'] = Environment.STAGING elif current_app.config['DEVELOPMENT']: settings['app_environment'] = Environment.DEVELOPMENT return render_template( 'gentelella/admin/super_admin/settings/settings.html', settings=settings, fees=fees, s3_regions=boto.s3.regions(), payment_currencies=DataGetter.get_payment_currencies(), included_settings=get_module_settings(), image_config=image_config, super_admin_email=DataGetter.get_super_admin_user().email, event_image_sizes=event_image_sizes, profile_image_sizes=profile_image_sizes, navigation_bar=list_navbar())
def wrapper(*args, **kwargs): if not request.form: request.form = ImmutableMultiDict(request.json) return func(*args, **kwargs)
def onboard_ns(request): """ Registers a Network Service into the Store and onboards it with the Orchestrator. The SHIELD manifest is checked for integrity and compliance. Metadata is stored for the catalogue and the actual manifest file is stored as binary so it can be provided for attestation purposes (thus ensuring tamper-proofing). :param request: the HTTP request data, holding a single Network Service package. If more than one package file is provided it gets ignored. """ # Store validation data about the vnsf validation_data = dict() ex_response = None form_data = request.form.copy() try: # It's assumed that only one NS package file is received. if 'package' not in request.files: ex_response = NsHooks.issue.build_ex( IssueElement.ERROR, NsHooks.errors['ONBOARD_NS']['PACKAGE_MISSING'], message= "Missing or wrong field in POST. 'package' should be used as the field name" ) return vnsfo = VnsfoFactory.get_orchestrator('OSM', cfg.VNSFO_PROTOCOL, cfg.VNSFO_HOST, cfg.VNSFO_PORT, cfg.VNSFO_API) ns = NsHelper(vnsfo) manifest_fs, package_data = ns.onboard_ns(cfg.VNSFO_TENANT_ID, request.files['package'], validation_data) # Ensure the SHIELD manifest is stored as a binary file. # NOTE: the file is closed by Eve once stored. files = request.files.copy() files['manifest_file'] = manifest_fs # The package field is only required for onboarding schema validation but shouldn't be stored as document # data. files.pop('package') request.files = ImmutableMultiDict(files) # Convert the Network Service package into the document data. # NOTE: there's no need to deep copy as the data won't be modified until it gets stored in the database. form_data['owner_id'] = '12ab34567c89d0123e4f5678' form_data['state'] = 'sandboxed' form_data['manifest'] = package_data['manifest'] form_data['descriptor'] = package_data['descriptor'] form_data['ns_id'] = package_data['ns_id'] form_data['ns_name'] = package_data['ns_name'] form_data['constituent_vnsfs'] = package_data['constituent_vnsfs'] except (NsMissingPackage, NsWrongPackageFormat, VnsfoNsWrongPackageFormat) as e: ex_response = NsHooks.issue.build_ex( IssueElement.ERROR, NsHooks.errors['ONBOARD_NS']['PACKAGE_ISSUE'], [[e.message]], e.message) except (NsPackageCompliance, VnsfoMissingNsDescriptor, NsWrongManifestFormat) as e: ex_response = NsHooks.issue.build_ex( IssueElement.ERROR, NsHooks.errors['ONBOARD_NS']['PACKAGE_COMPLIANCE'], [[e.message]], e.message) except (VnsfOrchestratorOnboardingIssue, VnsfOrchestratorUnreacheable) as e: ex_response = NsHooks.issue.build_ex( IssueElement.ERROR, NsHooks.errors['ONBOARD_NS']['VNSFO_ISSUE'], [[e.message]], e.message) except (NsInvalidFormat, NsMissingDependency, NsValidationIssue) as e: ex_response = NsHooks.issue.build_ex( IssueElement.ERROR, NsHooks.errors['ONBOARD_NS']['NS_VALIDATION_FAILURE'], [[e.message]], e.message) finally: # Always persist the validation data, if existent validation_ref = None if validation_data: app = flask.current_app with app.test_request_context(): r, _, _, status, _ = post_internal('validation', validation_data) assert status == http_utils.HTTP_201_CREATED validation_ref = r['_id'] # Check if exceptions were raised during the onboard process if ex_response: # Include validation data in the error response, if existent if validation_ref: ex_response['validation'] = str(validation_ref) # Abort the request and reply with a meaningful error abort( make_response(jsonify(**ex_response), ex_response['_error']['code'])) # Onboard succeeded. Include the validation reference in the request form if validation_ref: form_data['validation'] = validation_ref # Modify the request form to persist request.form = ImmutableMultiDict(form_data)
def search_youtubers(dictionary: ImmutableMultiDict, booked_package) -> list: track_search('youtube', booked_package) sql_stmt = """ SELECT DISTINCT influencer.influencer_identifier, last_name, first_name, email, phone_number, price, gender, homebase, birthyear, pwd_hash, joined_at, confirmed, youtube_username, youtube_follower_amount, youtube_post_amount, youtube_rhythm, youtube_gender_distribution_male, youtube_gender_distribution_female, youtube_age_distribution_min, youtube_age_distribution_max, youtube_page_views, youtube_impressions_amount, youtube_click_rate FROM influencer JOIN is_listed_on_youtube on influencer.influencer_identifier = is_listed_on_youtube.influencer_identifier LEFT OUTER JOIN influencer_covers_topic on influencer.influencer_identifier = influencer_covers_topic.influencer_identifier LEFT OUTER JOIN influencer_deal on influencer.influencer_identifier = influencer_deal.influencer_identifier LEFT OUTER JOIN countries_of_channel on influencer.influencer_identifier = countries_of_channel.influencer_identifier and channel_identifier = 3 WHERE is_listed_on_youtube.listing_on = 1 """ parameters = [] if dictionary.get("birthyear_from") != "UNDEFINED" and dictionary.get( "birthyear_from") is not None: sql_stmt += """ AND birthyear >= %s """ parameters.append(dictionary.get("birthyear_from", 0)) if dictionary.get("birthyear_to") != "UNDEFINED" and dictionary.get( "birthyear_to") is not None: sql_stmt += """ AND birthyear <= %s """ parameters.append(dictionary.get("birthyear_to", 0)) if dictionary.get("youtubeFollowerFrom") != "UNDEFINED" and dictionary.get( "youtubeFollowerFrom") is not None: sql_stmt += """ AND youtube_follower_amount >= %s """ parameters.append(dictionary.get("youtubeFollowerFrom", 0)) if dictionary.get("youtubeFollowerTo") != "UNDEFINED" and dictionary.get( "youtubeFollowerTo") is not None: sql_stmt += """ AND youtube_follower_amount <= %s """ parameters.append(dictionary.get("youtubeFollowerTo", 0)) if dictionary.get( "youtube_age_distribution_from") != "UNDEFINED" and dictionary.get( "youtube_age_distribution_from") is not None: sql_stmt += """ AND youtube_age_distribution_min >= %s """ parameters.append(dictionary.get("youtube_age_distribution_from", 0)) if dictionary.get( "youtube_age_distribution_to") != "UNDEFINED" and dictionary.get( "youtube_age_distribution_to") is not None: sql_stmt += """ AND youtube_age_distribution_max <= %s """ parameters.append(dictionary.get("youtube_age_distribution_to", 0)) if dictionary.get("youtube_gender_distribution_female_from" ) != "UNDEFINED" and dictionary.get( "youtube_gender_distribution_female_from" ) is not None: sql_stmt += """ AND youtube_gender_distribution_female >= %s """ parameters.append( dictionary.get("youtube_gender_distribution_female_from", 0)) if dictionary.get("youtube_gender_distribution_female_to" ) != "UNDEFINED" and dictionary.get( "youtube_gender_distribution_female_to") is not None: sql_stmt += """ AND youtube_gender_distribution_female <= %s """ parameters.append( dictionary.get("youtube_gender_distribution_female_to", 0)) if dictionary.get("youtube_gender_distribution_male_from" ) != "UNDEFINED" and dictionary.get( "youtube_gender_distribution_male_from") is not None: sql_stmt += """ AND youtube_gender_distribution_male >= %s """ parameters.append( dictionary.get("youtube_gender_distribution_male_from", 0)) if dictionary.get("youtube_gender_distribution_male_to" ) != "UNDEFINED" and dictionary.get( "youtube_gender_distribution_male_to") is not None: sql_stmt += """ AND youtube_gender_distribution_male <= %s """ parameters.append( dictionary.get("youtube_gender_distribution_male_to", 0)) if dictionary.get( "youtube_click_rate_From") != "UNDEFINED" and dictionary.get( "youtube_click_rate_From") is not None: sql_stmt += """ AND youtube_click_rate >= %s """ parameters.append(dictionary.get("youtube_click_rate_From", 0)) if dictionary.get( "youtube_click_rate_To") != "UNDEFINED" and dictionary.get( "youtube_click_rate_To") is not None: sql_stmt += """ AND youtube_click_rate <= %s """ parameters.append(dictionary.get("youtube_click_rate_To", 0)) if dictionary.get( "youtube_impressions_from") != "UNDEFINED" and dictionary.get( "youtube_impressions_from") is not None: sql_stmt += """ AND youtube_impressions_amount >= %s """ parameters.append(dictionary.get("youtube_impressions_from", 0)) if dictionary.get( "youtube_impressions_to") != "UNDEFINED" and dictionary.get( "youtube_impressions_to") is not None: sql_stmt += """ AND youtube_impressions_amount <= %s """ parameters.append(dictionary.get("youtube_impressions_to", 0)) if dictionary.get( "youtube_post_amount_from") != "UNDEFINED" and dictionary.get( "youtube_post_amount_from") is not None: sql_stmt += """ AND youtube_post_amount >= %s """ parameters.append(dictionary.get("youtube_post_amount_from", 0)) if dictionary.get( "youtube_post_amount_to") != "UNDEFINED" and dictionary.get( "youtube_post_amount_to") is not None: sql_stmt += """ AND youtube_post_amount <= %s """ parameters.append(dictionary.get("youtube_post_amount_to", 0)) if dictionary.get("youtube_views_From") != "UNDEFINED" and dictionary.get( "youtube_views_From") is not None: sql_stmt += """ AND youtube_page_views >= %s """ parameters.append(dictionary.get("youtube_views_From", 0)) if dictionary.get("youtube_views_To") != "UNDEFINED" and dictionary.get( "youtube_views_To") is not None: sql_stmt += """ AND youtube_page_views <= %s """ parameters.append(dictionary.get("youtube_views_To", 0)) if dictionary.getlist("youtube_rhythm_types") is not None and len( dictionary.getlist("youtube_rhythm_types")) > 0: sql_stmt += """ and youtube_rhythm in """ + tuple_of_list( dictionary.getlist( "youtube_rhythm_types")) # TODO: WARNING: RISKY! if dictionary.getlist("youtube_countries") is not None and len( dictionary.getlist("youtube_countries")) > 0: sql_stmt += """ and country_identifier in """ + tuple_of_list( dictionary.getlist("youtube_countries")) # TODO: WARNING: RISKY! if dictionary.get("homebase") is not "": sql_stmt += """ and homebase = %s""" parameters.append(dictionary.get("homebase")) if dictionary.getlist("topics") is not None and len( dictionary.getlist("topics")) > 0: sql_stmt += """ and topic_identifier in """ + tuple_of_list( dictionary.getlist("topics")) # TODO: WARNING: RISKY! if dictionary.getlist("deals") is not None and len( dictionary.getlist("deals")) > 0: sql_stmt += """ and deal_identifier in """ + tuple_of_list( dictionary.getlist("deals")) # TODO: WARNING: RISKY! if dictionary.getlist("genders") is not None and len( dictionary.getlist("genders")) > 0: sql_stmt += """ and gender in """ + tuple_of_list( dictionary.getlist("genders")) # TODO: WARNING: RISKY! sql_stmt += """ LIMIT 20 """ if dictionary.get("offset"): if int(dictionary.get("offset")) >= 0 and int( dictionary.get("offset")) % 20 == 0: sql_stmt += """ OFFSET """ + dictionary.get("offset") else: return sql_stmt += ";" dbconnection = get_database_connection() cursor = dbconnection.cursor() prm_tuple = tuple(x for x in parameters) try: cursor.execute(sql_stmt, prm_tuple) except: print(sql_stmt) print(prm_tuple) print(cursor.statement) result = parse_list_tuples_to_list_dict(cursor.fetchall(), cursor.column_names) cursor.close() dbconnection.close() return result
@check_blueprints('', '/objects', '/views') def test_link_deletion(user_client): create_from_file(user_client, 'europe.xls') for link_name in links: link = get_obj(Link, name=link_name) user_client.post('/objects/delete/link/{}'.format(link.id)) assert len(Node.query.all()) == 33 assert len(Link.query.all()) == 38 ## Pool management pool1 = ImmutableMultiDict([ ('name', 'pool1'), ('node_location', 'france|spain'), ('node_location_regex', 'y'), ('link_name', 'link[1|2].'), ('link_name_regex', 'y'), ]) pool2 = ImmutableMultiDict([ ('name', 'pool2'), ('node_location', 'france'), ('link_name', 'l.*k\\S3'), ('link_name_regex', 'y'), ]) # @check_blueprints('', '/objects', '/views') # def test_pool_management(user_client): # create_from_file(user_client, 'europe.xls') # user_client.post('/objects/process_pool', data=pool1)
def define_link(subtype, source, destination, name): return ImmutableMultiDict([('name', name), ('description', 'description'), ('location', 'Los Angeles'), ('vendor', 'Juniper'), ('type', subtype), ('source', source), ('destination', destination), ('add_link', '')])
Script ) from test_base import check_blueprints from werkzeug.datastructures import ImmutableMultiDict # test the creation of configuration script (netmiko / napalm) # test the creation of file transfer script (netmiko via SCP) # test the creation of ansible script ## Netmiko configuration netmiko_ping = ImmutableMultiDict([ ('name', 'netmiko_ping'), ('content_type', 'simple'), ('create_script', 'netmiko_config'), ('content', 'ping 1.1.1.1'), ('netmiko_type', 'show_commands'), ('driver', 'cisco_xr_ssh'), ('global_delay_factor', '1.0'), ]) template = ''' {% for interface, properties in subinterfaces.items() %} interface FastEthernet0/0.{{ interface }} description {{ properties.aire }} encapsulation dot1Q {{ properties.dot1Q }} ip address {{ properties.address }} 255.255.255.248 no ip redirects ip ospf cost {{ properties.cost }} {% endfor %} '''
def parse_grafana(args: ImmutableMultiDict, alert: JSON, match: Dict[str, Any]) -> Alert: # get values from request params environment = args.get('environment', 'Production') alerting_severity = args.get('severity', 'major') service = args.getlist('service') or ['Grafana'] group = args.get('group', 'Performance') customer = args.get('customer', None) origin = args.get('origin', 'Grafana') timeout = args.get('timeout', type=int) # get metric labels (evalMatches tags) match_tags = match.get('tags') or {} environment = match_tags.pop('environment', environment) alerting_severity = match_tags.pop('severity', alerting_severity) if 'service' in match_tags: service.append(match_tags.pop('service')) group = match_tags.pop('group', group) customer = match_tags.pop('customer', customer) origin = match_tags.pop('origin', origin) # assign leftover match tags as attributes attributes = {k.replace('.', '_'): v for (k, v) in match_tags.items()} # get alert rule tags rules_tags = alert.get('tags') or {} environment = rules_tags.pop('environment', environment) alerting_severity = rules_tags.pop('severity', alerting_severity) if 'service' in rules_tags: service.append(rules_tags.pop('service')) group = rules_tags.pop('group', group) customer = rules_tags.pop('customer', customer) origin = rules_tags.pop('origin', origin) # set severity if alert['state'] == 'alerting': severity = alerting_severity elif alert['state'] == 'ok': severity = alarm_model.DEFAULT_NORMAL_SEVERITY else: severity = 'indeterminate' # assign leftover rule tags as attributes attributes.update( {k.replace('.', '_'): v for (k, v) in rules_tags.items()}) attributes['ruleId'] = str(alert['ruleId']) if 'ruleUrl' in alert: attributes[ 'ruleUrl'] = '<a href="%s" target="_blank">Rule</a>' % alert[ 'ruleUrl'] if 'imageUrl' in alert: attributes[ 'imageUrl'] = '<a href="%s" target="_blank">Image</a>' % alert[ 'imageUrl'] return Alert(resource=match['metric'], event=alert['ruleName'], environment=environment, severity=severity, service=service, group=group, value='%s' % match['value'], text=alert.get('message', None) or alert.get('title', alert['state']), tags=list(), attributes=attributes, customer=customer, origin=origin, event_type='grafanaAlert', timeout=timeout, raw_data=json.dumps(alert))
def test_get_with_request(self): params = { 'source': { 'query': { 'filtered': { 'filter': { 'bool': { 'must': [], 'must_not': [] } } } } }, 'params': { 'dates': { 'filter': 'yesterday' } }, 'repo': 'published', 'return_type': 'text/csv' } expected_args = { 'source': params['source'], 'repo': params['repo'], 'return_type': params['return_type'] } request = ParsedRequest() # Request object with source as a json in string format request.args = { 'source': json.dumps(params['source']), 'repo': params['repo'], 'return_type': params['return_type'], } args = self.service._get_request_or_lookup(req=request, lookup=None) self.assertEqual(args, expected_args) # Request object with args an ImmutableMultiDict request.args = ImmutableMultiDict({ 'source': json.dumps(params['source']), 'repo': params['repo'], 'return_type': params['return_type'], }) args = self.service._get_request_or_lookup(req=request, lookup=None) self.assertEqual(args, expected_args) # Request object with source as a normal dict request.args = { 'source': params['source'], 'repo': params['repo'], 'return_type': params['return_type'], } args = self.service._get_request_or_lookup(req=request, lookup=None) self.assertEqual(args, expected_args) # return_type default request.args = { 'source': params['source'], 'repo': params['repo'], } args = self.service._get_request_or_lookup(req=request, lookup=None) self.assertEqual(args.get('return_type'), 'aggregations') # Request object with params as json in string format expected_args = { 'params': params['params'], 'repo': params['repo'], 'return_type': params['return_type'] } request.args = { 'params': json.dumps(params['params']), 'repo': params['repo'], 'return_type': params['return_type'], } args = self.service._get_request_or_lookup(req=request, lookup=None) self.assertEqual(args, expected_args) # Request object with params as a normal dict request.args = { 'params': params['params'], 'repo': params['repo'], 'return_type': params['return_type'], } args = self.service._get_request_or_lookup(req=request, lookup=None) self.assertEqual(args, expected_args)
def index_view(self): if request.method == 'POST': if 'event-thumbnail_width' in request.form: im_size_profile = DataGetter.get_image_sizes_by_type( type='profile') im_size_event = DataGetter.get_image_sizes_by_type( type='event') if im_size_profile and im_size_event: im_size_profile.full_width = request.form[ 'profile-large_width'] im_size_profile.full_height = request.form[ 'profile-large_width'] im_size_profile.full_aspect = request.form.get( 'profile-large_aspect', 'off') im_size_profile.full_quality = request.form[ 'profile-large_quality'] im_size_profile.icon_width = request.form[ 'profile-icon_width'] im_size_profile.icon_height = request.form[ 'profile-icon_width'] im_size_profile.icon_aspect = request.form.get( 'profile-icon_aspect', 'off') im_size_profile.icon_quality = request.form[ 'profile-icon_quality'] im_size_profile.thumbnail_width = request.form[ 'profile-thumbnail_width'] im_size_profile.thumbnail_height = request.form[ 'profile-thumbnail_width'] im_size_profile.thumbnail_aspect = request.form.get( 'profile-thumbnail_aspect', 'off') im_size_profile.thumbnail_quality = request.form[ 'profile-thumbnail_quality'] im_size_profile.logo_width = None im_size_profile.logo_height = None save_to_db(im_size_profile, "Image Sizes saved") im_size_event.full_width = request.form[ 'event-large_width'] im_size_event.full_height = request.form[ 'event-large_height'] im_size_event.full_aspect = request.form.get( 'event-large_aspect', 'off') im_size_event.full_quality = request.form[ 'event-large_quality'] im_size_event.icon_width = request.form['event-icon_width'] im_size_event.icon_height = request.form[ 'event-icon_height'] im_size_event.icon_aspect = request.form.get( 'event-icon_aspect', 'off') im_size_event.icon_quality = request.form[ 'event-icon_quality'] im_size_event.thumbnail_width = request.form[ 'event-thumbnail_width'] im_size_event.thumbnail_height = request.form[ 'event-thumbnail_height'] im_size_event.thumbnail_aspect = request.form.get( 'event-thumbnail_aspect', 'off') im_size_event.thumbnail_quality = request.form[ 'event-thumbnail_quality'] im_size_event.logo_width = request.form['logo_width'] im_size_event.logo_height = request.form['logo_height'] save_to_db(im_size_event, "Image Sizes saved") else: all_im_sizes = DataGetter.get_image_sizes() for sizes in all_im_sizes: delete_from_db(sizes, 'Delete Image Sizes') im_size = ImageSizes( type='profile', full_width=request.form['profile-large_width'], full_height=request.form['profile-large_width'], full_aspect=request.form.get('profile-large_aspect', 'off'), full_quality=request.form['profile-large_quality'], icon_width=request.form['profile-icon_width'], icon_height=request.form['profile-icon_width'], icon_aspect=request.form.get('profile-icon_aspect', 'off'), icon_quality=request.form['profile-icon_quality'], thumbnail_width=request. form['profile-thumbnail_width'], thumbnail_height=request. form['profile-thumbnail_width'], thumbnail_aspect=request.form.get( 'profile-thumbnail_aspect', 'off'), thumbnail_quality=request. form['profile-thumbnail_quality'], logo_width=None, logo_height=None) save_to_db(im_size, "Image Sizes saved") im_size = ImageSizes( type='event', full_width=request.form['event-large_width'], full_height=request.form['event-large_height'], full_aspect=request.form.get('event-large_aspect', 'off'), full_quality=request.form['profile-large_quality'], icon_width=request.form['event-icon_width'], icon_height=request.form['event-icon_height'], icon_aspect=request.form.get('event-icon_aspect', 'off'), icon_quality=request.form['profile-icon_quality'], thumbnail_width=request.form['event-thumbnail_width'], thumbnail_height=request. form['event-thumbnail_height'], thumbnail_aspect=request.form.get( 'event-thumbnail_aspect', 'off'), thumbnail_quality=request. form['profile-thumbnail_quality'], logo_width=request.form['logo_width'], logo_height=request.form['logo_height']) save_to_db(im_size, "Image Sizes saved") if 'service_fee' in request.form: dic = ImmutableMultiDict(request.form) else: dic = dict(request.form.copy()) for i in dic: v = dic[i][0] if not v: dic[i] = None else: dic[i] = v set_settings(**dic) settings = get_settings() fees = DataGetter.get_fee_settings() event_view = EventsView() image_config = DataGetter.get_image_configs() event_image_sizes = DataGetter.get_image_sizes_by_type(type='event') profile_image_sizes = DataGetter.get_image_sizes_by_type( type='profile') return self.render( '/gentelella/admin/super_admin/settings/settings.html', settings=settings, fees=fees, payment_currencies=DataGetter.get_payment_currencies(), included_settings=event_view.get_module_settings(), image_config=image_config, event_image_sizes=event_image_sizes, profile_image_sizes=profile_image_sizes)
region_list = [] for region in regions: # 第一个元素是行政区划代码 if region["adminlevel"] < 3: parent = u"中国" else: parent = admin_data.database.collection.find_one({"_id": region["parent"]}) parent = "/".join(["中国", "".join([parent["region"], "属下"])]) region_list.append([region["acode"], parent, region["region"]]) print(region_list) # json.dump(region_list,fp=open('e:/gitwork/application/testweb/region_ceic.txt', 'w')) # 4. 查询变量 variables = con.find().distinct("variable") print(variables) mdata = ImmutableMultiDict( [ ("period", "1990"), ("period", "1991"), ("period", "1992"), ("period", "1993"), ("period", "1994"), ("period", "1995"), ("period", "1996"), ("period", "1997"), ("period", "1998"), ] ) print(mdata.getlist("period"))
def newobject(): try: something = request.form imd = ImmutableMultiDict(something) records = helpers.convert(imd) # Import indicators from Cuckoo for the selected analysis task if 'type' in records and 'cuckoo' in records['type']: host_data, dns_data, sha1, firstseen = cuckoo.report_data( records['cuckoo_task_id']) if host_data and dns_data and sha1 and firstseen: # Import IP Indicators from Cuckoo Task for ip in host_data: ind = Indicator.query.filter_by(object=ip).first() if ind is None: indicator = Indicator(ip.strip(), 'IPv4', firstseen, '', 'Infrastructure', records['campaign'], 'Low', '', records['tags'], '') db_session.add(indicator) db_session.commit() # Import Domain Indicators from Cuckoo Task for dns in dns_data: ind = Indicator.query.filter_by( object=dns['request']).first() if ind is None: indicator = Indicator(dns['request'], 'Domain', firstseen, '', 'Infrastructure', records['campaign'], 'Low', '', records['tags'], '') db_session.add(indicator) db_session.commit() # Import File/Hash Indicators from Cuckoo Task ind = Indicator.query.filter_by(object=sha1).first() if ind is None: indicator = Indicator(sha1, 'Hash', firstseen, '', 'Capability', records['campaign'], 'Low', '', records['tags'], '') db_session.add(indicator) db_session.commit() # Redirect to Dashboard after successful import return redirect(url_for('home')) else: errormessage = 'Task is not a file analysis' return redirect(url_for('import_indicators')) if 'inputtype' in records: # Makes sure if you submit an IPv4 indicator, it's an actual IP # address. ipregex = re.match(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', records['inputobject']) # Convert the inputobject of IP or Domain to a list for Bulk Add functionality. records['inputobject'] = records['inputobject'].split(',') for newobject in records['inputobject']: if records['inputtype'] == "IPv4": if ipregex: object = Indicator.query.filter_by( object=newobject).first() if object is None: ipv4_indicator = Indicator( newobject.strip(), records['inputtype'], records['inputfirstseen'], records['inputlastseen'], records['diamondmodel'], records['inputcampaign'], records['confidence'], records['comments'], records['tags'], None) db_session.add(ipv4_indicator) db_session.commit() network = Indicator.query.filter( Indicator.type.in_(('IPv4', 'IPv6', 'Domain', 'Network'))).all() else: errormessage = "Entry already exists in database." return render_template( 'newobject.html', errormessage=errormessage, inputtype=records['inputtype'], inputobject=newobject, inputfirstseen=records['inputfirstseen'], inputlastseen=records['inputlastseen'], inputcampaign=records['inputcampaign'], comments=records['comments'], diamondmodel=records['diamondmodel'], tags=records['tags']) else: errormessage = "Not a valid IP Address." return render_template( 'newobject.html', errormessage=errormessage, inputtype=records['inputtype'], inputobject=newobject, inputfirstseen=records['inputfirstseen'], inputlastseen=records['inputlastseen'], confidence=records['confidence'], inputcampaign=records['inputcampaign'], comments=records['comments'], diamondmodel=records['diamondmodel'], tags=records['tags']) else: object = Indicator.query.filter_by( object=newobject).first() if object is None: indicator = Indicator( newobject.strip(), records['inputtype'], records['inputfirstseen'], records['inputlastseen'], records['diamondmodel'], records['inputcampaign'], records['confidence'], records['comments'], records['tags'], None) db_session.add(indicator) db_session.commit() else: errormessage = "Entry already exists in database." return render_template( 'newobject.html', errormessage=errormessage, inputtype=records['inputtype'], inputobject=newobject, inputfirstseen=records['inputfirstseen'], inputlastseen=records['inputlastseen'], inputcampaign=records['inputcampaign'], comments=records['comments'], diamondmodel=records['diamondmodel'], tags=records['tags']) # TODO: Change 'network' to 'object' in HTML templates to standardize on verbiage if records['inputtype'] == "IPv4" or records['inputtype'] == "Domain" or records['inputtype'] == "Network"\ or records['inputtype'] == "IPv6": network = Indicator.query.filter( Indicator.type.in_( ('IPv4', 'IPv6', 'Domain', 'Network'))).all() return render_template('networks.html', network=network) elif records['diamondmodel'] == "Victim": victims = Indicator.query.filter( Indicator.diamondmodel == ('Victim')).all() return render_template('victims.html', network=victims) elif records['inputtype'] == "Hash": files = Indicator.query.filter( Indicator.type == ('Hash')).all() return render_template('files.html', network=files) else: threatactors = Indicator.query.filter( Indicator.type == ('Threat Actors')).all() return render_template('threatactors.html', network=threatactors) except Exception as e: return render_template('error.html', error=e)
assert len(fetch_all('Device')) == 1 # PUT: update object properties put('http://127.0.0.1:5000/rest/object/device', data=dumps(updated_device)) response = get('http://127.0.0.1:5000/rest/object/device/router10', headers={'Accept': 'application/json'}) assert loads(response.content) == updated_response assert len(fetch_all('Device')) == 1 # DELETE: delete an object delete('http://127.0.0.1:5000/rest/object/device/router10', headers={'Accept': 'application/json'}) assert len(fetch_all('Device')) == 0 post_service = ImmutableMultiDict([ ('name', 'create_router10'), ('description', 'POST creation'), ('call_type', 'POST'), ('url', 'http://127.0.0.1:5000/rest/object/device'), ('payload', dumps(device)), ('username', ''), ('password', ''), ('content', '.*15.5(\\d)M.*'), ('content_regex', 'y') ]) post_service_task = ImmutableMultiDict([('name', 'task_create_router'), ('waiting_time', '0'), ('job', '4'), ('start_date', ''), ('end_date', ''), ('frequency', ''), ('run_immediately', 'y')]) delete_service = ImmutableMultiDict([ ('name', 'delete_router10'), ('description', 'DELETE'), ('call_type', 'DELETE'), ('url', 'http://127.0.0.1:5000/rest/object/device/router10'), ('payload', ''), ('username', ''), ('password', ''), ('content', '.*15.5(\\d)M.*'), ('content_regex', 'y')
def updatesettings(): try: something = request.form imd = ImmutableMultiDict(something) newdict = helpers.convert(imd) # Query the first set of settings, could query custom settings for individual users settings = Setting.query.filter_by(_id=1).first() # Make sure we're updating the settings instead of overwriting them if 'threatcrowd' in newdict.keys(): settings.threatcrowd = 'on' else: settings.threatcrowd = 'off' for pt_type in ['pt_pdns', 'pt_whois', 'pt_pssl', 'pt_host_attr']: auth = [newdict['pt_username'], newdict['pt_api_key']] if pt_type in newdict.keys() and ('' not in auth): setattr(settings, pt_type, 'on') else: setattr(settings, pt_type, 'off') if 'cuckoo' in newdict.keys(): settings.cuckoo = 'on' else: settings.cuckoo = 'off' if 'vtinfo' in newdict.keys() and newdict['apikey'] is not '': settings.vtinfo = 'on' else: settings.vtinfo = 'off' if 'vtfile' in newdict.keys() and newdict['apikey'] is not '': settings.vtfile = 'on' else: settings.vtfile = 'off' if 'circlinfo' in newdict.keys( ) and newdict['circlusername'] is not '': settings.circlinfo = 'on' else: settings.circlinfo = 'off' if 'circlssl' in newdict.keys() and newdict['circlusername'] is not '': settings.circlssl = 'on' else: settings.circlssl = 'off' if 'whoisinfo' in newdict.keys(): settings.whoisinfo = 'on' else: settings.whoisinfo = 'off' if 'farsightinfo' in newdict.keys( ) and newdict['farsightkey'] is not '': settings.farsightinfo = 'on' else: settings.farsightinfo = 'off' if 'shodaninfo' in newdict.keys() and newdict['shodankey'] is not '': settings.shodaninfo = 'on' else: settings.shodaninfo = 'off' if 'odnsinfo' in newdict.keys() and newdict['odnskey'] is not '': settings.odnsinfo = 'on' else: settings.odnsinfo = 'off' settings.farsightkey = newdict['farsightkey'] settings.apikey = newdict['apikey'] settings.odnskey = newdict['odnskey'] settings.httpproxy = newdict['httpproxy'] settings.httpsproxy = newdict['httpsproxy'] settings.cache_ttl = newdict['cache_ttl'] settings.cuckoohost = newdict['cuckoohost'] settings.cuckooapiport = newdict['cuckooapiport'] settings.circlusername = newdict['circlusername'] settings.circlpassword = newdict['circlpassword'] settings.pt_username = newdict['pt_username'] settings.pt_api_key = newdict['pt_api_key'] settings.shodankey = newdict['shodankey'] db_session.commit() settings = Setting.query.first() return render_template('settings.html', records=settings) except Exception as e: return render_template('error.html', error=e)
def edit_view_stepped(self, event_id, step): event = DataGetter.get_event(event_id) session_types = DataGetter.get_session_types_by_event_id(event_id).all( ) tracks = DataGetter.get_tracks(event_id).all() social_links = DataGetter.get_social_links_by_event_id(event_id) microlocations = DataGetter.get_microlocations(event_id).all() call_for_speakers = DataGetter.get_call_for_papers(event_id).first() sponsors = DataGetter.get_sponsors(event_id) custom_forms = DataGetter.get_custom_form_elements(event_id) speaker_form = json.loads(custom_forms.speaker_form) session_form = json.loads(custom_forms.session_form) tax = DataGetter.get_tax_options(event_id) ticket_types = DataGetter.get_ticket_types(event_id) preselect = [] required = [] for session_field in session_form: if session_form[session_field]['include'] == 1: preselect.append(session_field) if session_form[session_field]['require'] == 1: required.append(session_field) for speaker_field in speaker_form: if speaker_form[speaker_field]['include'] == 1: preselect.append(speaker_field) if speaker_form[speaker_field]['require'] == 1: required.append(speaker_field) print preselect if request.method == 'GET': hash = get_random_hash() if CallForPaper.query.filter_by(hash=hash).all(): hash = get_random_hash() return self.render('/gentelella/admin/event/edit/edit.html', event=event, session_types=session_types, tracks=tracks, social_links=social_links, microlocations=microlocations, call_for_speakers=call_for_speakers, sponsors=sponsors, event_types=DataGetter.get_event_types(), event_licences=DataGetter.get_event_licences(), event_topics=DataGetter.get_event_topics(), event_sub_topics=DataGetter.get_event_subtopics(), preselect=preselect, timezones=DataGetter.get_all_timezones(), cfs_hash=hash, step=step, required=required, included_settings=self.get_module_settings(), tax=tax, payment_countries=DataGetter.get_payment_countries(), start_date=datetime.datetime.now() + datetime.timedelta(days=10), payment_currencies=DataGetter.get_payment_currencies(), ticket_types=ticket_types) if request.method == "POST": img_files = [] imd = ImmutableMultiDict(request.files) if 'sponsors[logo]' in imd and request.files['sponsors[logo]'].filename != "": for img_file in imd.getlist('sponsors[logo]'): img_files.append(img_file) old_sponsor_logos = [] old_sponsor_names = [] for sponsor in sponsors: old_sponsor_logos.append(sponsor.logo) old_sponsor_names.append(sponsor.name) event = DataManager.edit_event( request, event_id, event, session_types, tracks, social_links, microlocations, call_for_speakers, sponsors, custom_forms, img_files, old_sponsor_logos, old_sponsor_names, tax) if (request.form.get('state', u'Draft') == u'Published') and string_empty( event.location_name): flash( "Your event was saved. To publish your event please review the highlighted fields below.", "warning") return redirect(url_for('.edit_view', event_id=event.id) + "#highlight=location_name") return redirect(url_for('.details_view', event_id=event_id))
def search_facebook_user(dictionary: ImmutableMultiDict, booked_package) -> list: track_search('facebook', booked_package) sql_stmt = """ SELECT DISTINCT influencer.influencer_identifier, last_name, first_name, email, price, gender, homebase, birthyear, facebook_username, facebook_follower_amount, facebook_page_views, facebook_post_amount, facebook_rhythm, facebook_gender_distribution_male, facebook_gender_distribution_female, facebook_page_activity_amount, facebook_likes_amount, facebook_reach_value, facebook_post_interaction FROM influencer JOIN is_listed_on_facebook on is_listed_on_facebook.influencer_identifier = influencer.influencer_identifier LEFT OUTER JOIN content_of_channel on content_of_channel.influencer_identifier = influencer.influencer_identifier and channel_identifier = 2 LEFT OUTER JOIN influencer_covers_topic on influencer.influencer_identifier = influencer_covers_topic.influencer_identifier LEFT OUTER JOIN influencer_deal on influencer.influencer_identifier = influencer_deal.influencer_identifier LEFT OUTER JOIN countries_of_channel on influencer.influencer_identifier = countries_of_channel.influencer_identifier and countries_of_channel.channel_identifier = 2 WHERE influencer.listing_on = 1 """ parameters = [] if dictionary.get( "facebookFollowerFrom") != "UNDEFINED" and dictionary.get( "facebookFollowerFrom") is not None: sql_stmt += """ and facebook_follower_amount >= %s """ parameters.append(dictionary.get("facebookFollowerFrom", 0)) if dictionary.get("facebookFollowerTo") != "UNDEFINED" and dictionary.get( "facebookFollowerTo") is not None: sql_stmt += """ and facebook_follower_amount <= %s """ parameters.append(dictionary.get("facebookFollowerTo", 0)) if dictionary.get("birthyear_from") != "UNDEFINED" and dictionary.get( "birthyear_from") is not None: sql_stmt += """ and birthyear >= %s """ parameters.append(dictionary.get("birthyear_from", 0)) if dictionary.get("birthyear_to") != "UNDEFINED" and dictionary.get( "birthyear_to") is not None: sql_stmt += """ and birthyear <= %s """ parameters.append(dictionary.get("birthyear_to", 0)) # if dictionary.get("facebook_reach_from") != "UNDEFINED" and dictionary.get("facebook_reach_from") is not None: # sql_stmt += """ and facebook_reach_value >= %s """ # parameters.append(dictionary.get("facebook_reach_from", 0)) # # if dictionary.get("facebook_reach_to") != "UNDEFINED" and dictionary.get("facebook_reach_to") is not None: # sql_stmt += """ and facebook_reach_value <= %s """ # parameters.append(dictionary.get("facebook_reach_to", 0)) if dictionary.get("facebook_post_interaction_From" ) != "UNDEFINED" and dictionary.get( "facebook_post_interaction_From") is not None: sql_stmt += """ and facebook_post_interaction >= %s """ parameters.append(dictionary.get("facebook_post_interaction_From", 0)) if dictionary.get( "facebook_post_interaction_To") != "UNDEFINED" and dictionary.get( "facebook_post_interaction_To") is not None: sql_stmt += """ and facebook_post_interaction <= %s """ parameters.append(dictionary.get("facebook_post_interaction_To", 0)) if dictionary.get( "facebook_post_amount_from") != "UNDEFINED" and dictionary.get( "facebook_post_amount_from") is not None: sql_stmt += """ and facebook_post_amount >= %s """ parameters.append(dictionary.get("facebook_post_amount_from", 0)) if dictionary.get( "facebook_post_amount_to") != "UNDEFINED" and dictionary.get( "facebook_post_amount_to") is not None: sql_stmt += """ and facebook_post_amount <= %s """ parameters.append(dictionary.get("facebook_post_amount_to", 0)) if dictionary.get( "facebook_page_calls_From") != "UNDEFINED" and dictionary.get( "facebook_page_calls_From") is not None: sql_stmt += """ and facebook_page_views >= %s """ parameters.append(dictionary.get("facebook_page_calls_From", 0)) if dictionary.get( "facebook_page_calls_To") != "UNDEFINED" and dictionary.get( "facebook_page_calls_To") is not None: sql_stmt += """ and facebook_page_views <= %s """ parameters.append(dictionary.get("facebook_page_calls_To", 0)) if dictionary.get( "facebook_page_activity_From") != "UNDEFINED" and dictionary.get( "facebook_page_activity_From") is not None: sql_stmt += """ and facebook_page_activity_amount >= %s """ parameters.append(dictionary.get("facebook_page_activity_From", 0)) if dictionary.get( "facebook_page_activity_To") != "UNDEFINED" and dictionary.get( "facebook_page_activity_To") is not None: sql_stmt += """ and facebook_page_activity_amount <= %s """ parameters.append(dictionary.get("facebook_page_activity_To", 0)) if dictionary.get("facebook_likes_From") != "UNDEFINED" and dictionary.get( "facebook_likes_From") is not None: sql_stmt += """ and facebook_likes_amount >= %s """ parameters.append(dictionary.get("facebook_likes_From", 0)) if dictionary.get("facebook_likes_To") != "UNDEFINED" and dictionary.get( "facebook_likes_To") is not None: sql_stmt += """ and facebook_likes_amount <= %s """ parameters.append(dictionary.get("facebook_likes_To", 0)) if dictionary.get("facebook_gender_distribution_female_from" ) != "UNDEFINED" and dictionary.get( "facebook_gender_distribution_female_from" ) is not None: sql_stmt += """ and facebook_gender_distribution_female >= %s """ parameters.append( dictionary.get("facebook_gender_distribution_female_from", 0)) if dictionary.get("facebook_gender_distribution_female_to" ) != "UNDEFINED" and dictionary.get( "facebook_gender_distribution_female_to" ) is not None: sql_stmt += """ and facebook_gender_distribution_female <= %s """ parameters.append( dictionary.get("facebook_gender_distribution_female_to", 0)) if dictionary.get("facebook_gender_distribution_male_from" ) != "UNDEFINED" and dictionary.get( "facebook_gender_distribution_male_from" ) is not None: sql_stmt += """ and facebook_gender_distribution_male >= %s """ parameters.append( dictionary.get("facebook_gender_distribution_male_from", 0)) if dictionary.get("facebook_gender_distribution_male_to" ) != "UNDEFINED" and dictionary.get( "facebook_gender_distribution_male_to") is not None: sql_stmt += """ and facebook_gender_distribution_male <= %s """ parameters.append( dictionary.get("facebook_gender_distribution_male_to", 0)) if dictionary.getlist("facebook_rhythm_types") is not None and len( dictionary.getlist("facebook_rhythm_types")) > 0: sql_stmt += """ and facebook_rhythm in """ + tuple_of_list( dictionary.getlist( "facebook_rhythm_types")) # TODO: WARNING: RISKY! if dictionary.getlist("facebook_content") is not None and len( dictionary.getlist("facebook_content")) > 0: sql_stmt += """ and content_type_identifier in """ + tuple_of_list( dictionary.getlist("facebook_content")) # TODO: WARNING: RISKY! if dictionary.getlist("genders") is not None and len( dictionary.getlist("genders")) > 0: sql_stmt += """ and gender in """ + tuple_of_list( dictionary.getlist("genders")) # TODO: WARNING: RISKY! if dictionary.getlist("topics") is not None and len( dictionary.getlist("topics")) > 0: sql_stmt += """ and topic_identifier in """ + tuple_of_list( dictionary.getlist("topics")) # TODO: WARNING: RISKY! if dictionary.get("homebase") is not "": sql_stmt += """ and homebase = %s""" parameters.append(dictionary.get("homebase")) if dictionary.getlist("deals") is not None and len( dictionary.getlist("deals")) > 0: sql_stmt += """ and deal_identifier in """ + tuple_of_list( dictionary.getlist("deals")) # TODO: WARNING: RISKY! if dictionary.getlist("facebook_countries") is not None and len( dictionary.getlist("facebook_countries")) > 0: sql_stmt += """ and country_identifier in """ + tuple_of_list( dictionary.getlist("facebook_countries")) # TODO: WARNING: RISKY! sql_stmt += """ LIMIT 20 """ if dictionary.get("offset"): if int(dictionary.get("offset")) >= 0 and int( dictionary.get("offset")) % 20 == 0: sql_stmt += """ OFFSET """ + dictionary.get("offset") else: return False sql_stmt += ";" dbconnection = get_database_connection() cursor = dbconnection.cursor() prm_tuple = tuple(x for x in parameters) try: cursor.execute(sql_stmt, prm_tuple) print(cursor.statement) except: print(sql_stmt) print(prm_tuple) print(cursor.statement) result = parse_list_tuples_to_list_dict(cursor.fetchall(), cursor.column_names) cursor.close() dbconnection.close() return result
from conftest import path_scripts from os.path import join from tasks.models import Task from test_base import check_blueprints from test_objects import create_from_file from test_scripts import netmiko_ping, napalm_jinja2_script from werkzeug.datastructures import ImmutableMultiDict instant_task = ImmutableMultiDict([('name', 'instant_task'), ('scripts', 'napalm_subif'), ('scripts', 'netmiko_ping'), ('start_date', ''), ('end_date', ''), ('frequency', ''), ('script', '')]) scheduled_task = ImmutableMultiDict([('name', 'scheduled_task'), ('scripts', 'napalm_subif'), ('scripts', 'netmiko_ping'), ('start_date', '30/03/2018 19:10:13'), ('end_date', '06/04/2018 19:10:13'), ('frequency', '3600'), ('script', '')]) @check_blueprints('/views', '/tasks') def test_netmiko_napalm_config(user_client): create_from_file(user_client, 'europe.xls') user_client.post('/scripts/netmiko_configuration', data=netmiko_ping) path_yaml = join(path_scripts, 'cisco', 'interfaces', 'parameters.yaml') with open(path_yaml, 'rb') as f: napalm_jinja2_script['file'] = f user_client.post('/scripts/napalm_configuration', data=napalm_jinja2_script)
def search_for_pinterest_users(dictionary: ImmutableMultiDict, booked_package) -> list: track_search('pinterest', booked_package) sql_stmt = """ SELECT DISTINCT influencer.influencer_identifier, last_name, first_name, email, phone_number, price, gender, homebase, birthyear, pwd_hash, pinterest_username, pinterest_follower_amount, pinterest_post_amount, pinterest_rhythm, pinterest_viewer_amount FROM influencer JOIN is_listed_on_pinterest ON influencer.influencer_identifier = is_listed_on_pinterest.influencer_identifier LEFT OUTER JOIN influencer_covers_topic ON influencer.influencer_identifier = influencer_covers_topic.influencer_identifier LEFT OUTER JOIN influencer_deal ON influencer.influencer_identifier = influencer_deal.influencer_identifier LEFT OUTER JOIN content_of_channel ON channel_identifier = 4 AND influencer.influencer_identifier = content_of_channel.influencer_identifier WHERE is_listed_on_pinterest.listing_on = 1""" parameters = [] if dictionary.get("birthyear_from") != "UNDEFINED" and dictionary.get( "birthyear_from") is not None: sql_stmt += """ AND birthyear >= %s """ parameters.append(dictionary.get("birthyear_from", 0)) if dictionary.get("birthyear_to") != "UNDEFINED" and dictionary.get( "birthyear_to") is not None: sql_stmt += """ AND birthyear <= %s """ parameters.append(dictionary.get("birthyear_to", 0)) if dictionary.get( "pinterestFollowerFrom") != "UNDEFINED" and dictionary.get( "pinterestFollowerFrom") is not None: sql_stmt += """ AND pinterest_follower_amount >= %s """ parameters.append(dictionary.get("pinterestFollowerFrom", 0)) if dictionary.get("pinterestFollowerTo") != "UNDEFINED" and dictionary.get( "pinterestFollowerTo") is not None: sql_stmt += """ AND pinterest_follower_amount <= %s """ parameters.append(dictionary.get("pinterestFollowerTo", 0)) if dictionary.get( "pinterest_pins_amount_from") != "UNDEFINED" and dictionary.get( "pinterest_pins_amount_from") is not None: sql_stmt += """ AND pinterest_post_amount >= %s """ parameters.append(dictionary.get("pinterest_pins_amount_from", 0)) if dictionary.get( "pinterest_pins_amount_to") != "UNDEFINED" and dictionary.get( "pinterest_pins_amount_to") is not None: sql_stmt += """ AND pinterest_post_amount <= %s """ parameters.append(dictionary.get("pinterest_pins_amount_to", 0)) if dictionary.get( "pinterest_page_calls_From") != "UNDEFINED" and dictionary.get( "pinterest_page_calls_From") is not None: sql_stmt += """ AND pinterest_viewer_amount >= %s """ parameters.append(dictionary.get("pinterest_page_calls_From", 0)) if dictionary.get( "pinterest_page_calls_To") != "UNDEFINED" and dictionary.get( "pinterest_page_calls_To") is not None: sql_stmt += """ AND pinterest_viewer_amount <= %s """ parameters.append(dictionary.get("pinterest_page_calls_To", 0)) if dictionary.getlist("pinterest_rhythm_types") is not None and len( dictionary.getlist("pinterest_rhythm_types")) > 0: sql_stmt += """ and pinterest_rhythm in """ + tuple_of_list( dictionary.getlist( "pinterest_rhythm_types")) # TODO: WARNING: RISKY! if dictionary.getlist("topics") is not None and len( dictionary.getlist("topics")) > 0: sql_stmt += """ and topic_identifier in """ + tuple_of_list( dictionary.getlist("topics")) # TODO: WARNING: RISKY! if dictionary.getlist("deals") is not None and len( dictionary.getlist("deals")) > 0: sql_stmt += """ and deal_identifier in """ + tuple_of_list( dictionary.getlist("deals")) # TODO: WARNING: RISKY! if dictionary.getlist("genders") is not None and len( dictionary.getlist("genders")) > 0: sql_stmt += """ and gender in """ + tuple_of_list( dictionary.getlist("genders")) # TODO: WARNING: RISKY! if dictionary.getlist("pinterest_content") is not None and len( dictionary.getlist("pinterest_content")) > 0: sql_stmt += """ and content_type_identifier in """ + tuple_of_list( dictionary.getlist("pinterest_content")) # TODO: WARNING: RISKY! if dictionary.get("homebase") is not "": sql_stmt += """ and homebase = %s""" parameters.append(dictionary.get("homebase")) sql_stmt += """ LIMIT 20 """ if dictionary.get("offset"): if int(dictionary.get("offset")) >= 0 and int( dictionary.get("offset")) % 20 == 0: sql_stmt += """ OFFSET """ + dictionary.get("offset") else: return sql_stmt += ";" dbconnection = get_database_connection() cursor = dbconnection.cursor() prm_tuple = tuple(x for x in parameters) try: cursor.execute(sql_stmt, prm_tuple) except: print(sql_stmt) print(prm_tuple) print(cursor.statement) result = parse_list_tuples_to_list_dict(cursor.fetchall(), cursor.column_names) cursor.close() dbconnection.close() return result
def delete(): items.delete_items(dict(ImmutableMultiDict(request.form))) return redirect('/')
from flask.testing import FlaskClient from werkzeug.datastructures import ImmutableMultiDict from eNMS.base.functions import fetch_all from tests.test_base import check_blueprints from tests.test_objects import create_from_file instant_task = ImmutableMultiDict([("name", "instant_task"), ("start-task", "run-now"), ("job", "2")]) scheduled_task = ImmutableMultiDict([ ("name", "scheduled_task"), ("start_date", "30/03/2018 19:10:13"), ("end_date", "06/04/2018 19:10:13"), ("frequency", "3600"), ("job", "2"), ]) @check_blueprints("/scheduling") def test_netmiko_napalm_config(user_client: FlaskClient) -> None: create_from_file(user_client, "europe.xls") user_client.post("/update/task", data=instant_task) assert len(fetch_all("Task")) == 3 user_client.post("/update/task", data=scheduled_task) assert len(fetch_all("Task")) == 4 google_earth_dict = ImmutableMultiDict([ ("google earth", ""),
def test_report_converter_parse_form_errorband_costs(): form_data = ImmutableMultiDict([ ('cost-band-error-start-0', 1), ('cost-band-error-end-0', 5), ('cost-band-cost-function-0', 'constant'), ('cost-value-0', '1.0'), ('cost-aggregation-0', 'sum'), ('cost-net-0', True), ('cost-band-error-start-1', 5), ('cost-band-error-end-1', 10), ('cost-band-cost-function-1', 'timeofday'), ('cost-times-1', '00:00,12:00'), ('cost-costs-1', '1.5,2.5'), ('cost-aggregation-1', 'mean'), ('cost-fill-1', 'forward'), ('cost-net-1', False), ('cost-timezone-1', 'America/Denver'), ('cost-band-error-start-2', -5), ('cost-band-error-end-2', 1), ('cost-band-cost-function-2', 'datetime'), ('cost-datetimes-2', '2020-01-01T00:00Z,2020-01-01T12:00Z'), ('cost-costs-2', '1.0,3.0'), ('cost-aggregation-2', 'sum'), ('cost-net-2', True), ('cost-fill-2', 'forward'), ('cost-timezone-2', 'GMT'), ]) params = converters.ReportConverter.parse_form_errorband_cost(form_data) bands = params['bands'] assert bands[0] == { 'error_range': [1, 5], 'cost_function': 'constant', 'cost_function_parameters': { 'cost': 1.0, 'aggregation': 'sum', 'net': True, } } assert bands[1] == { 'error_range': [5, 10], 'cost_function': 'timeofday', 'cost_function_parameters': { 'times': ['00:00', '12:00'], 'cost': [1.5, 2.5], 'aggregation': 'mean', 'fill': 'forward', 'net': False, 'timezone': 'America/Denver', } } assert bands[2] == { 'error_range': [-5, 1], 'cost_function': 'datetime', 'cost_function_parameters': { 'datetimes': ['2020-01-01T00:00Z', '2020-01-01T12:00Z'], 'cost': [1.0, 3.0], 'aggregation': 'sum', 'fill': 'forward', 'net': True, 'timezone': 'GMT', } }
def args(self): """The keyword arguments from ``handle.remote(**kwargs)``.""" return ImmutableMultiDict(self._kwargs)
def test_report_converter_parse_form_fill_method(form_vals, expected): form_data = ImmutableMultiDict(form_vals) parsed = converters.ReportConverter.parse_fill_method(form_data) assert parsed == expected
class QueryModel(BaseModel): q1: int q2: str = "default" class RequestBodyModel(BaseModel): b1: float b2: Optional[str] = None validate_test_cases = [ pytest.param( ValidateParams( request_body={"b1": 1.4}, request_query=ImmutableMultiDict({"q1": 1}), expected_response_body={ "q1": 1, "q2": "default", "b1": 1.4, "b2": None }, response_model=ResponseModel, query_model=QueryModel, body_model=RequestBodyModel, ), id="simple valid example with default values", ), pytest.param( ValidateParams( request_body={"b1": 1.4},
from werkzeug.datastructures import ImmutableMultiDict from eNMS import app from eNMS.database import db from tests.conftest import check_pages from tests.test_inventory import create_from_file instant_task = ImmutableMultiDict( [ ("form_type", "task"), ("start_date", "30/03/2018 19:10:13"), ("name", "instant_task"), ("frequency_unit", "seconds"), ("scheduling_mode", "standard"), ("service", "2"), ] ) scheduled_task = ImmutableMultiDict( [ ("form_type", "task"), ("name", "scheduled_task"), ("frequency_unit", "seconds"), ("scheduling_mode", "standard"), ("start_date", "30/03/2018 19:10:13"), ("end_date", "06/04/2018 19:10:13"), ("frequency", "3600"), ("service", "2"), ]
def classify(): predictions = machine_learning.predict( dict(ImmutableMultiDict(request.form))['items[]']) return render_template('custom/suggestions.html', labels_json=predictions)
def update_request_form(**kwargs): """Why: because request forms are immutable.""" form = request.form.to_dict() form.update(**kwargs) request.form = ImmutableMultiDict(form)
def tag(): print dict(request.form) items.process_tags(dict(ImmutableMultiDict(request.form))) return redirect('/')
def newobject(): try: imd = ImmutableMultiDict(request.form) records = helpers.convert(imd) # Import indicators from Cuckoo for the selected analysis task if 'type' in records and 'cuckoo' in records['type']: host_data, dns_data, sha1, firstseen = cuckoo.report_data( records['cuckoo_task_id']) if host_data and dns_data and sha1 and firstseen: # Import IP Indicators from Cuckoo Task for ip in host_data: ip = ip['ip'] ind = Indicator.query.filter_by(indicator=ip).first() if ind is None: indicator = Indicator(ip.strip(), 'IPv4', firstseen, '', 'Infrastructure', records['campaign'], 'Low', '', records['tags'], '') db.session.add(indicator) db.session.commit() # Import Domain Indicators from Cuckoo Task for dns in dns_data: ind = Indicator.query.filter_by( indicator=dns['request']).first() if ind is None: indicator = Indicator(dns['request'], 'Domain', firstseen, '', 'Infrastructure', records['campaign'], 'Low', '', records['tags'], '') db.session.add(indicator) db.session.commit() # Import File/Hash Indicators from Cuckoo Task ind = Indicator.query.filter_by(indicator=sha1).first() if ind is None: indicator = Indicator(sha1, 'Hash', firstseen, '', 'Capability', records['campaign'], 'Low', '', records['tags'], '') db.session.add(indicator) db.session.commit() # Redirect to Dashboard after successful import return redirect(url_for('home')) else: errormessage = 'Task is not a file analysis' return redirect(url_for('import_indicators')) # Add the Campaign to the database exists = Campaign.query.filter_by( name=records['inputcampaign']).all() is not None camp = Campaign(name=records['inputcampaign'], notes='', tags=records['tags']) if not exists: db.session.add(camp) db.session.commit() if 'inputtype' in records: # Hack for dealing with disabled fields not being sent in request.form # A hidden field is used to send the indicator if 'inputobject' not in records: records['inputobject'] = records['indicator'] # Makes sure if you submit an IPv4 indicator, it's an actual IP # address. ipregex = re.match(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', records['inputobject']) # Convert the inputobject of IP or Domain to a list for Bulk Add functionality. records['inputobject'] = records['inputobject'].split(',') errormessage = None for newobject in records['inputobject']: indicator = Indicator.query.filter_by( indicator=newobject).first() if indicator is None: newindicator = Indicator( indicator=newobject.strip(), campaign=camp, indicator_type=records['inputtype'], firstseen=records['inputfirstseen'], lastseen=records['inputlastseen'], diamondmodel=records['diamondmodel'], confidence=records['confidence'], notes=records['comments'], tags=records['tags'], relationships=None) if newindicator: # Validates that the indicator is an IPv4 if not ipregex and records['inputtype'] == "IPv4": errormessage = "Not a valid IP Address." else: db.session.add(newindicator) db.session.commit() else: # Check to see if the app route was Update # preform an update instead of adding a new indicator rule = request.url_rule if 'update' in rule.rule: indicator.campaign.name = records['inputcampaign'] indicator.indicator_type = records['inputtype'] indicator.firstseen = records['inputfirstseen'] indicator.lastseen = records['inputlastseen'] indicator.diamondmodel = records['diamondmodel'] indicator.confidence = records['confidence'] indicator.notes = records['comments'] indicator.tags = records['tags'] db.session.commit() else: errormessage = "Entry already exists in database." if errormessage: return render_template( 'newobject.html', errormessage=errormessage, inputtype=records['inputtype'], inputobject=newobject, inputfirstseen=records['inputfirstseen'], inputlastseen=records['inputlastseen'], inputcampaign=records['inputcampaign'], comments=records['comments'], diamondmodel=records['diamondmodel'], tags=records['tags']) if records['inputtype'] == "IPv4" or records['inputtype'] == "Domain" or records['inputtype'] == "Network" \ or records['inputtype'] == "IPv6": network = Indicator.query.filter( Indicator.indicator_type.in_( ('IPv4', 'IPv6', 'Domain', 'Network'))).all() return render_template('indicatorlist.html', network=network, title='Network Indicators', links='network') elif records['diamondmodel'] == "Victim": victims = Indicator.query.filter( Indicator.diamondmodel == 'Victim').all() return render_template('indicatorlist.html', network=victims, title='Victims', links='victims') elif records['inputtype'] == "Hash": files = Indicator.query.filter( Indicator.indicator_type == 'Hash').all() return render_template('indicatorlist.html', network=files, title='Files & Hashes', links='files') else: threatactors = Indicator.query.filter( Indicator.indicator_type == 'Threat Actor').all() return render_template('indicatorlist.html', network=threatactors, title='Threat Actors', links='threatactors') except Exception as e: return render_template('error.html', error=e)