def write_gc_many(collection, blobs): if settings.DISABLE_GETCONNECT: return try: posted = requests.post( 'https://api.getconnect.io/events', timeout=15, data=json.dumps({collection: blobs}), headers={'X-Project-Id': settings.GETCONNECT_IO_PID, 'X-Api-Key': settings.GETCONNECT_IO_PUSH_KEY}) except requests.exceptions.Timeout: # f**k getconnect return except Exception as e: rollbar.report_message('Analytics POST error: %s' % str(e), 'error') return # 409 is a duplicate ID error, which is expected if posted.status_code != 200 and posted.status_code != 409: rollbar.report_message( 'Got non-200 status code submitting logs: %s %s' % ( posted.status_code, posted.text), 'error')
def _rollbar_track_err(conf, err, exit_code): """ Report to rollbar. Synchronous. :param exit_code: exit code of tracked process :type exit_code: int :param err: stderr of tracked process :type err: str :param conf: dcos config file :type conf: Toml :rtype: None """ props = _base_properties(conf) props['exit_code'] = exit_code lines = err.split('\n') if len(lines) >= 2: title = lines[-2] else: title = err props['stderr'] = err try: rollbar.report_message(title, 'error', extra_data=props) except Exception as e: logger.exception(e)
def process_request(self, request): """Replace the authenticated `request.user` if properly emulating """ if is_allowed_to_emulate_users(request.user): from htk.apps.accounts.utils import get_user_by_id from htk.apps.accounts.utils import get_user_by_username user_id = request.COOKIES.get('emulate_user_id') username = request.COOKIES.get('emulate_user_username') is_attempting_to_emulate = user_id or username if is_attempting_to_emulate: if user_id: targeted_user = get_user_by_id(user_id) elif username: targeted_user = get_user_by_username(username) else: rollbar.report_message('Impossible case: attempting to emulate another user but not specified') if targeted_user is None: messages.error(request, 'Cannot Emulate: User does not exist', fail_silently=True) else: if is_allowed_to_emulate(request.user, targeted_user): request.original_user = request.user request.user = targeted_user else: messages.error(request, 'Cannot Emulate: Not allowed to emulate that user', fail_silently=True) else: # not attempting to emulate pass else: # is not allowed or is not attempting to emulate users pass
def _rollbar_track_err(conf, cluster_id, err, exit_code): """ Report to rollbar. Synchronous. :param conf: dcos config file :type conf: Toml :param cluster_id: dcos cluster id to send to segment :type cluster_id: str :param err: stderr of tracked process :type err: str :param exit_code: exit code of tracked process :type exit_code: int :rtype: None """ rollbar.init(ROLLBAR_SERVER_POST_KEY, 'prod') props = _base_properties(conf, cluster_id) props['exit_code'] = exit_code lines = err.split('\n') if len(lines) >= 2: title = lines[-2] else: title = err props['stderr'] = err try: rollbar.report_message(title, 'error', extra_data=props) except Exception as e: logger.exception(e)
def convert_weather_icon_to_emoji(weather_icon): """ https://darksky.net/dev/docs/response http://www.webpagefx.com/tools/emoji-cheat-sheet/ """ icons = { 'default' : ':sunny:', 'clear-day' : ':sun_small_cloud:', 'clear-night' : ':night_with_stars:', 'rain' : ':rain_cloud:', 'snow' : ':snow_cloud:', 'sleet' : ':snow_cloud:', # TODO better icon 'wind' : ':wind_chime:', 'fog' : ':fog:', 'cloudy' : ':cloud:', 'partly-cloudy-day' : ':partly_sunny:', 'partly-cloudy-night' : ':night_with_stars:', # TODO better icon 'hail' : ':snow_cloud:', # TODO better icon 'thunderstorm' : ':thunder_cloud_and_rain:', 'tornado' : ':tornado:', } if weather_icon not in icons: weather_icon = 'default' rollbar.report_message('Unmatched weather icon: %s' % weather_icon) icon = icons.get(weather_icon) return icon
def get(self, resource_type, params, headers=None, auth_type='bearer', refresh_token=True): """Performs a Fitbit API GET request `auth_type` the string 'basic' or 'bearer' `refresh_token` if True, will refresh the OAuth token when needed """ url = self.get_resource_url(resource_type) headers = self.make_headers(auth_type, headers=headers) response = requests.get(url, headers=headers, params=params) if response.status_code == 401: if refresh_token: was_refreshed = self.refresh_oauth2_token() if was_refreshed: # if token was successfully refreshed, repeat request response = self.get(resource_type, params, auth_type=auth_type, refresh_token=False) else: pass else: extra_data = { 'user_id' : self.social_auth_user.user.id, 'username' : self.social_auth_user.user.username, 'response' : response.json(), } rollbar.report_message('Fitbit OAuth token expired, needs refreshing', extra_data=extra_data) elif response.status_code == 200: pass else: extra_data = { 'response' : response.json(), } rollbar.report_message('Unexpected response from Fitbit API GET request', extra_data=extra_data) return response
def debugger(request): import rollbar message = request.GET.get('m') rollbar.report_message(message, 'debug') from htk.api.utils import json_response_okay response = json_response_okay() return response
def _send_mail(to, subject, body, email_format='Text'): if settings.DEBUG: print((to, subject, body)) session = Session(aws_access_key_id=settings.SES_ACCESS_ID, aws_secret_access_key=settings.SES_SECRET_KEY, region_name='us-east-1') conn = session.client('ses') resp = conn.send_email( Source=settings.SENDER_EMAIL, Destination={'ToAddresses': [to]}, Message={ 'Subject': { 'Data': subject, }, 'Body': { email_format: { 'Data': body, }, }, }, ReplyToAddresses=[settings.SUPPORT_EMAIL], ReturnPath=settings.ADMINS[0][1] ) if not resp.get('MessageId'): rollbar.report_message('Got bad response from SES: %s' % repr(resp), 'error')
def __s3_upload__(self): """ upload the file to s3 see http://boto.cloudhackers.com/en/latest/s3_tut.html :return: """ # s3 = boto3.resource('s3') s3,_ = self.__s3_connect__() aws_tar = self.__get_aws_tar_name__() b = s3.get_bucket('zooniverse-static') key_str = "panoptes-uploads.zooniverse.org/production/project_aggregations_export/"+aws_tar s3_key = Key(b) s3_key.key = key_str if not os.path.exists("/tmp/"+aws_tar): print("warning the tar file does not exist - creating an temporary one.") panoptes_file = open("/app/config/aggregation.yml","rb") api_details = yaml.load(panoptes_file) rollbar_token = api_details[self.environment]["rollbar"] rollbar.init(rollbar_token,self.environment) rollbar.report_message('the tar file does not exist', 'warning') with open("/tmp/"+aws_tar,"w") as f: f.write("") s3_key.set_contents_from_filename("/tmp/"+aws_tar)
def clean(self): cleaned_data = super(SignInForm, self).clean() try: user = User.objects.get(email__iexact=cleaned_data.get('email')) except User.DoesNotExist, User.MultipleObjectsReturned: rollbar.report_message('Wrong email entered: %s' % (cleaned_data.get('email'),), 'warning') raise forms.ValidationError('Login failed')
def create_sso_endpoint(self, sso_key): resource_path = DSS_321FORMS_API_RESOURCE_SSO_ENDPOINT % { 'sso_key' : sso_key, } request_url = self.get_request_url(resource_path=resource_path) response = self.request_post(request_url) exception_reported = False try: data = response.json() endpoint = data.get('endpoint') except: endpoint = None request = get_current_request() extra_data = self._get_rollbar_extra_data() extra_data.update({ 'response_text' : response.text, }) rollbar.report_exc_info(extra_data=extra_data) exception_reported = True if endpoint is None and not exception_reported: request = get_current_request() extra_data = self._get_rollbar_extra_data() extra_data.update({ 'response_text' : response.text, }) rollbar.report_message('Error retrieving SSO endpoint', request=request, extra_data=extra_data) return endpoint
def webhook_call( webhook_url=None, channel=None, username=None, text='', icon_emoji=None ): """Performs a webhook call to Slack https://api.slack.com/incoming-webhooks `channel` override must be a public channel """ if webhook_url is None: webhook_url = htk_setting('HTK_SLACK_WEBHOOK_URL') payload = { 'text' : text, } if channel: payload['channel'] = channel if username: payload['username'] = username if icon_emoji: payload['icon_emoji'] = icon_emoji #data = 'payload=%s' % json.dumps(payload) data = { 'payload' : payload } response = requests.post(webhook_url, json=payload) if response.status_code != 200: rollbar.report_message('Slack webhook call error: [%s] %s' % (response.status_code, response.content,)) return response
def generate_sso_key(self): request_url = self.get_request_url(resource_path=DSS_321FORMS_API_RESOURCE_SSO_GENERATE) response = self.request_get(request_url) exception_reported = False try: data = response.json() sso_key = data.get('SSOKey') except: sso_key = None extra_data = self._get_rollbar_extra_data() extra_data.update({ 'response_text' : response.text, }) rollbar.report_exc_info(extra_data=extra_data) exception_reported = True if sso_key is None and not exception_reported: request = get_current_request() extra_data = self._get_rollbar_extra_data() extra_data.update({ 'response_text' : response.text, }) rollbar.report_message('Error generating 321Forms SSO key', request=request, extra_data=extra_data) return sso_key
def get_users_by_company(self, company_id, user_type): """Returns a list of users in a company based on `user_type` provided. Only shows users that the requesting account is allowed to manage """ resource_path = DSS_321FORMS_API_RESOURCE_COMPANY_USERS % { 'company_id' : company_id, 'user_type' : user_type, } request_url = self.get_request_url(resource_path=resource_path) response = self.request_get(request_url) result = response.json() if type(result) == list: users = result else: users = [] if type(result) == dict and 'message' in result: message = result['message'] else: message = 'Error retrieving users by company' extra_data = { 'username' : self.username, 'company_id' : company_id, 'user_type' : user_type, } rollbar.report_message(message, extra_data=extra_data) return users
def greeting(): name = request.args.get('name') if not name: rollbar.report_message('No name passed for greeting', 'warning') return "Hello World!" return "Hello {}!".format(name)
def emit(self, record): if record.exc_info: rollbar.report_exc_info(record.exc_info) else: request = None rollbar.report_message(record.msg, record.levelname, request=request)
def __task_aggregation__(self,classifications,task_id,aggregations): to_retire = [] for subject_id in classifications: users,everything_transcribed = zip(*classifications[subject_id]) # count how many people have said everything is transcribed count = sum([1. for e in everything_transcribed if e == True]) # and perent percent = sum([1. for e in everything_transcribed if e == True]) / float(len(everything_transcribed)) if (count >= 3) and (percent >= 0.6): to_retire.append(subject_id) # having some trouble with the format for submitting retirement requests # so going to try and if we have a problem - just keep on going # also, report back the results so we know what's going try: headers = {"Accept":"application/vnd.api+json; version=1","Content-Type": "application/json", "Authorization":"Bearer "+self.token} params = {"retired_subjects":to_retire} # r = requests.post("https://panoptes.zooniverse.org/api/workflows/"+str(self.workflow_id)+"/links/retired_subjects",headers=headers,json=params) r = requests.post("https://panoptes.zooniverse.org/api/workflows/"+str(self.workflow_id)+"/links/retired_subjects",headers=headers,data=json.dumps(params)) rollbar.report_message("results from trying to retire subjects","info",extra_data=r.text) except TypeError: rollbar.report_exc_info() return aggregations
def _handle_player(player): stats = PlayerStats.query.filter_by( player=player, server_id=app.config.get('MAIN_SERVER_ID') ).first() if stats and stats.last_seen > datetime.utcnow() - timedelta(days=1): # ignore players that have joined since the job started return False try: actual_username = minecraft_uuid.lookup_latest_username_by_uuid(player.uuid) except requests.RequestException as e: rollbar.report_message('Exception looking up uuid, skipping group', level='warning', extra_data={ 'exception': unicode(e) }) return False if not actual_username: rollbar.report_message('Error getting actual username, skipping', level='warning', extra_data={ 'uuid': player.uuid }) return False if actual_username != player.username: h.avoid_duplicate_username(actual_username, player.uuid) player.set_username(actual_username) player.save(commit=True) return True return False
def send_email_task(from_email, to_email, subject, text_body, html_body): from standardweb.lib.email import EMAIL_URL auth = ('api', app.config['MAILGUN_API_KEY']) data = { 'from': from_email, 'to': to_email, 'subject': subject, 'text': text_body, 'html': html_body } result = None try: result = requests.post(EMAIL_URL, auth=auth, data=data) except Exception: rollbar.report_exc_info() else: if result.status_code == 200: rollbar.report_message('Email sent', level='info', extra_data={ 'data': data, 'result': result.json() }) else: rollbar.report_message('Problem sending email', level='error', extra_data={ 'data': data, 'result': result }) return result
def api_call(server, type, data=None): api = get_api(server.address) try: if data: result = api.call(type, data) else: result = api.call(type) except Exception: rollbar.report_exc_info( extra_data={ 'server_id': server.id, 'type': type, 'data': data } ) return None if not result or result.get('result') == API_CALL_RESULTS['exception']: extra_data = { 'server_id': server.id, 'data': data } if result: extra_data['message'] = result.get('message') else: extra_data['message'] = 'No result!' rollbar.report_message('Exception while calling server API', level='error', extra_data=extra_data) return None return result
def check_uuids(): num_changed = 0 offset = cache.get(COUNTER_CACHE_NAME) or 0 players = _get_players(offset) if not players: rollbar.report_message('All players checked, check_uuids wrapping around', level='info', extra_data={ 'offset': offset }) offset = 0 players = _get_players(offset) for player in players: try: changed = _handle_player(player) except (IntegrityError, OperationalError): db.session.rollback() rollbar.report_exc_info(level='warning', extra_data={ 'uuid': player.uuid }) else: if changed: num_changed += 1 cache.set(COUNTER_CACHE_NAME, offset + PLAYERS_PER_JOB, 86400) rollbar.report_message('Finished checking uuid group', level='info', extra_data={ 'offset': offset, 'num_changed': num_changed })
def valid_csrf_or_abort(): if is_csrf_token_valid(): return current_app.logger.info( u'csrf.invalid_token: Aborting request, user_id: {user_id}', extra={'user_id': session.get('user_id', '<unknown')}) rollbar.report_message('csrf.invalid_token: Aborting request valid_csrf_or_abort()', 'error', request) abort(400, 'Invalid CSRF token. Please try again.')
def _get_tip_subscription(event_body): sub_id = event_body['data']['object']['subscription'] try: return RecurringTip.objects.get(stripe_subscription_id=sub_id) except RecurringTip.DoesNotExist: rollbar.report_message( 'Event on unknown subscription: %s' % sub_id, 'warn') return None
def rollbar_log_event(event, request=None, log_level="info", message=None): """Log the Stripe event `event` to Rollbar """ if message: message = "%s - Stripe Event: %s" % (message, get_event_type(event)) else: message = "Stripe Event: %s" % get_event_type(event) rollbar.report_message(message, log_level, request)
def task_exception(err, instance): rollbar.report_message( "Exception evaluating mp %s::%s" % ( cls.__name__, mp_name, ), extra_data={'instance': fmt(instance)}, level='error' )
def process_event(header, event): """ Notify Rollbar about this query if the event passes the heuristics. """ for name, heuristic in heuristics.iteritems(): level = heuristic(header, event) if level and NOTIFICATION_LEVELS[level] >= notification_level: extra = {'header': header, 'data': event} rollbar.report_message(name, level=level, extra_data=extra, payload_data={'language': 'sql'})
def test_trigger_failsafe(self, post, _send_failsafe): rollbar.report_message('derp') self.assertEqual(_send_failsafe.call_count, 1) try: raise Exception('trigger_failsafe') except: rollbar.report_exc_info() self.assertEqual(_send_failsafe.call_count, 2)
def register(): uuid = request.form.get('uuid') email = request.form.get('email') username = request.form.get('username') player = Player.query.filter_by(uuid=uuid).first() if not player: return jsonify({ 'err': 1, 'message': 'Please try again later' }) user = User.query.filter_by(player=player).first() if user and user.email: return jsonify({ 'err': 1, 'message': 'You are already registered!' }) if not h.is_valid_email(email): return jsonify({ 'err': 1, 'message': 'Not a valid email address' }) other_user = User.query.filter_by(email=email).first() if other_user: return jsonify({ 'err': 1, 'message': 'Email already in use' }) email_tokens = EmailToken.query.filter_by(uuid=uuid) for email_token in email_tokens: db.session.delete(email_token) # old-style user without an email, just let them verify an email if user: send_verify_email(email, user) else: send_creation_email(email, uuid, username) total_time = libplayer.get_total_player_time(player.id) if total_time < app.config['MINIMUM_REGISTER_PLAYER_TIME']: rollbar.report_message('Player creating user account super quickly', level='error', request=request) AuditLog.create( AuditLog.QUICK_USER_CREATE, player_id=player.id, username=player.username, commit=True ) return jsonify({ 'err': 0, 'message': 'Email sent! Check your inbox for further instructions' })
def send_message(username): user = g.user if user.forum_ban: return jsonify({ 'err': 1, 'message': 'Oops, you are blocked from sending any messages. Awkward...' }) body = request.form.get('body') to_user = User.query.outerjoin(Player).options( joinedload(User.player) ).filter( or_(Player.username == username, User.username == username) ).first() if to_user: to_player = to_user.player else: # for cases of messages sent to players with no users created yet to_player = Player.query.filter_by(username=username).first() if not to_player: rollbar.report_message('to_player None', request=request) return jsonify({ 'err': 1 }) if libmessages.is_sender_spamming(user, to_user, to_player): rollbar.report_message('User blocked from spamming messages', request=request, extra_data={ 'to_user_id': to_user.id if to_user else None, 'to_player_id': to_player.id if to_player else None }) return jsonify({ 'err': 1, 'message': 'Whoa there, you sent too many messages recently! Try sending a bit later.' }) message = Message( from_user=user, to_user=to_user, to_player=to_player, body=body, user_ip=request.remote_addr ) message.save() notify_new_message(message) stats.incr('messages.created') return jsonify({ 'err': 0, 'message': message.to_dict() })
def test_disabled(self, send_payload): rollbar.SETTINGS['enabled'] = False rollbar.report_message('foo') try: raise Exception('foo') except: rollbar.report_exc_info() self.assertEqual(send_payload.called, False)
def check_csrf_token(): if request.method in ('POST', 'PATCH', 'PUT', 'DELETE'): ''' Only check CSRF tokens if there is no valid API key in the request. The API key comes via a header which will not be forwarded by browsers automatically in authenticated requests, so the presence of a valid API key in the request proves authenticity like a CSRF token. ''' api_key = get_api_key_from_request(request) if not api_key or not api_key_service.get_key(api_key): new_csrf_valid = check_valid_csrf() if not (new_csrf_valid): rollbar.report_message( 'csrf.invalid_token: Aborting request check_csrf_token()', 'error', request) abort('Invalid CSRF token. Please try again.')
def resolve_edit_user(_, info, **query_args): """Resolve edit_user mutation.""" project_name = query_args.get('project_name') success = False user_data = util.get_jwt_content(info.context) role = get_user_role(user_data) modified_user_data = { 'email': query_args.get('email'), 'organization': query_args.get('organization'), 'responsibility': query_args.get('responsibility'), 'role': query_args.get('role'), 'phone_number': query_args.get('phone_number') } if (role == 'admin' and modified_user_data['role'] in ['admin', 'analyst', 'customer', 'customeradmin']) \ or (is_customeradmin(project_name, user_data['user_email']) and modified_user_data['role'] in ['customer', 'customeradmin']): if user_domain.assign_role( modified_user_data['email'], modified_user_data['role']): modify_user_information(info.context, modified_user_data, project_name) success = True else: rollbar.report_message('Error: Couldn\'t update user role', 'error', info.context) else: rollbar.report_message('Error: Invalid role provided: ' + modified_user_data['role'], 'error', info.context) if success: util.invalidate_cache(project_name) util.invalidate_cache(query_args.get('email')) util.cloudwatch_log( info.context, f'Security: Modified user data:{query_args.get("email")} \ in {project_name} project succesfully') else: util.cloudwatch_log( info.context, 'Security: Attempted to modify user \ data:{query_args.get("email")} in {project_name} project') return dict( success=success, modified_user=dict(project_name=project_name, email=modified_user_data['email']))
def create_user_action(user, action_type, request=None, metadata=None): """Create a UserAction for the specified action type. Args: user (User): The User object. action_type (str): The type of action to record. request (Request): The request object. Defaults to: None. metadata (dict): Any accompanying metadata to be added. Defaults to: {}. Returns: bool: Whether or not the UserAction was created successfully. """ from app.utils import handle_location_request if action_type not in dict(UserAction.ACTION_TYPES).keys(): print('UserAction.create_action received an invalid action_type') return False if metadata is None: metadata = {} kwargs = { 'metadata': metadata, 'action': action_type, 'user': user } if request: geolocation_data, ip_address = handle_location_request(request) if geolocation_data: kwargs['location_data'] = geolocation_data if ip_address: kwargs['ip_address'] = ip_address if user and hasattr(user, 'profile'): kwargs['profile'] = user.profile if user and user.profile else None try: UserAction.objects.create(**kwargs) return True except Exception as e: logger.error(f'Failure in UserAction.create_action - ({e})') rollbar.report_message( f'Failure in UserAction.create_action - ({e})', 'error', extra_data=kwargs) return False
def verify_and_call(*args, **kwargs): context = args[1].context event_id = kwargs.get('event_id') \ if kwargs.get('identifier') is None else kwargs.get('identifier') user_data = util.get_jwt_content(context) if not re.match('^[0-9]*$', event_id): rollbar.report_message('Error: Invalid event id format', 'error', context) raise GraphQLError('Invalid event id format') if not has_access_to_event(user_data['user_email'], event_id, user_data['user_role']): util.cloudwatch_log( context, 'Security: \ Attempted to retrieve event-related info without permission') raise GraphQLError('Access denied') return func(*args, **kwargs)
def sync_profile(handle, user=None, hide_profile=True): data = get_user(handle) email = '' is_error = 'name' not in data.keys() if is_error: print("- error main") rollbar.report_message('Failed to fetch github username', 'warning', extra_data=data) return None repos_data = get_user(handle, '/repos') repos_data = sorted(repos_data, key=lambda repo: repo['stargazers_count'], reverse=True) repos_data = [add_contributors(repo_data) for repo_data in repos_data] defaults = { 'last_sync_date': timezone.now(), 'data': data, 'repos_data': repos_data, 'hide_profile': hide_profile, } if user and isinstance(user, User): defaults['user'] = user try: defaults['github_access_token'] = user.social_auth.filter(provider='github').latest('pk').access_token if user and user.email: defaults['email'] = user.email except UserSocialAuth.DoesNotExist: pass # store the org info in postgres try: profile, created = Profile.objects.update_or_create(handle=handle, defaults=defaults) print("Profile:", profile, "- created" if created else "- updated") except Exception as e: logger.error(e) return None if user and user.email: email = user.email elif profile and profile.email: email = profile.email if email and profile: get_or_save_email_subscriber(email, 'sync_profile', profile=profile) return profile
def register(): uuid = request.form.get('uuid') email = request.form.get('email') username = request.form.get('username') player = Player.query.filter_by(uuid=uuid).first() if not player: return jsonify({'err': 1, 'message': 'Please try again later'}) user = User.query.filter_by(player=player).first() if user and user.email: return jsonify({'err': 1, 'message': 'You are already registered!'}) if not h.is_valid_email(email): return jsonify({'err': 1, 'message': 'Not a valid email address'}) other_user = User.query.filter_by(email=email).first() if other_user: return jsonify({'err': 1, 'message': 'Email already in use'}) email_tokens = EmailToken.query.filter_by(uuid=uuid) for email_token in email_tokens: db.session.delete(email_token) # old-style user without an email, just let them verify an email if user: send_verify_email(email, user) else: send_creation_email(email, uuid, username) total_time = libplayer.get_total_player_time(player.id) if total_time < app.config['MINIMUM_REGISTER_PLAYER_TIME']: rollbar.report_message('Player creating user account super quickly', level='error', request=request) AuditLog.create(AuditLog.QUICK_USER_CREATE, player_id=player.id, username=player.username, commit=True) return jsonify({ 'err': 0, 'message': 'Email sent! Check your inbox for further instructions' })
def format_vulnerabilities( vulnerabilities: List[Dict[str, FindingType]]) -> \ Dict[str, List[FindingType]]: """Format vulnerabilitites.""" finding: Dict[str, List[FindingType]] = { 'ports': [], 'lines': [], 'inputs': [] } vulns_types = ['ports', 'lines', 'inputs'] vuln_values = { 'ports': { 'where': 'host', 'specific': 'port', }, 'lines': { 'where': 'path', 'specific': 'line' }, 'inputs': { 'where': 'url', 'specific': 'field' } } for vuln in vulnerabilities: all_states = cast(List[Dict[str, FindingType]], vuln.get('historic_state')) current_state = all_states[len(all_states) - 1].get('state') vuln_type = str(vuln.get('vuln_type', '')) if vuln_type in vulns_types: finding[vuln_type].append({ vuln_values[vuln_type]['where']: html.parser.HTMLParser().unescape( vuln.get('where')), # type: ignore vuln_values[vuln_type]['specific']: html.parser.HTMLParser().unescape( vuln.get('specific')), # type: ignore 'state': str(current_state) }) else: error_msg = 'Error: Vulnerability {vuln_id} of finding \ {finding_id} does not have the right type'\ .format(vuln_id=vuln.get('UUID'), finding_id=vuln.get('finding_id')) rollbar.report_message(error_msg, 'error') return finding
def get_jwt_content(context): try: cookie_token = context.COOKIES.get(settings.JWT_COOKIE_NAME) header_token = context.META.get('HTTP_AUTHORIZATION') token = header_token.split()[1] if header_token else cookie_token content = jwt.decode(token=token, key=settings.JWT_SECRET) return content except AttributeError: raise InvalidAuthorization() except IndexError: rollbar.report_message( 'Error: Malformed auth header', 'error', context) raise InvalidAuthorization() except JWTError: cloudwatch_log_plain('Security: Invalid token signature') raise InvalidAuthorization()
def patch_issue_comment(comment_id, owner, repo, comment): """Update a comment on an issue via patch.""" url = f'https://api.github.com/repos/{owner}/{repo}/issues/comments/{comment_id}' response = requests.patch(url, data=json.dumps({'body': comment}), auth=_AUTH) if response.status_code == 200: return response.json() rollbar.report_message( 'Github issue comment patch returned non-200 status code', 'warning', request=response.request, extra_data={ 'status_code': response.status_code, 'reason': response.reason }) return {}
def verify_vulnerabilities(finding_id: str, user_email: str, user_fullname: str, info, parameters: Dict[str, FindingType]) -> bool: finding = finding_dal.get_finding(finding_id) vuln_ids = \ cast(List[str], parameters.get('open_vulns', [])) + \ cast(List[str], parameters.get('closed_vulns', [])) vulnerabilities = get_by_ids(finding_id, vuln_ids) vulnerabilities = [validate_verify(vuln) for vuln in vulnerabilities] vulnerabilities = [validate_closed(vuln) for vuln in vulnerabilities] if not vulnerabilities: raise VulnNotFound() tzn = pytz.timezone(settings.TIME_ZONE) # type: ignore today = datetime.now(tz=tzn).today().strftime('%Y-%m-%d %H:%M:%S') comment_id = int(round(time() * 1000)) historic_verification = cast(List[Dict[str, Union[str, int, List[str]]]], finding.get('historic_verification', [])) historic_verification.append({ 'date': today, 'user': user_email, 'status': 'VERIFIED', 'comment': comment_id, 'vulns': vuln_ids }) update_finding = finding_dal.update( finding_id, {'historic_verification': historic_verification}) comment_data: comment_dal.CommentType = { 'comment_type': 'verification', 'content': parameters.get('justification', ''), 'created': today, 'email': user_email, 'finding_id': int(finding_id), 'fullname': user_fullname, 'modified': today, 'parent': 0, } comment_dal.create(comment_id, comment_data) success = [vuln_dal.verify_vulnerability(vuln) for vuln in vulnerabilities] if all(success) and update_finding: success = verify(info, finding_id, cast(List[Dict[str, str]], vulnerabilities), cast(List[str], parameters.get('closed_vulns', [])), today) else: rollbar.report_message('Error: An error occurred verifying', 'error') return all(success)
def csrf_protect(): if request.method == "POST": func = app.view_functions.get(request.endpoint) if func and func not in csrf.exempt_funcs and 'debugtoolbar' not in request.endpoint: session_token = session.get('csrf_token') request_token = request.form.get( 'csrf_token') or request.headers.get('X-CSRFToken') if not session_token or session_token != request_token: rollbar.report_message( 'CSRF mismatch', request=request, extra_data={'session_token': session_token}) csrf.regenerate_token() abort(403)
def get_users_by_company(self, company_id, user_type): """Returns a list of users in a company based on `user_type` provided. Only shows users that the requesting account is allowed to manage """ resource_path = DSS_321FORMS_API_RESOURCE_COMPANY_USERS % { 'company_id': company_id, 'user_type': user_type, } offset = 0 limit = 50 users = [] while offset is not None: params = { 'offset': offset, 'limit': limit, } request_url = self.get_request_url(resource_path=resource_path) response_json = self.request_get(request_url, params=params) if type(response_json) == list: if len(response_json) == limit: offset += limit else: offset = None users.extend(response_json) else: offset = None if type(response_json) == dict and 'message' in response_json: message = response_json['message'] else: message = 'Error retrieving users by company' extra_data = { 'username': self.username, 'company_id': company_id, 'user_type': user_type, } rollbar.report_message(message, extra_data=extra_data) return users
def migrate_event_files(event): """Migrate event files to s3.""" project_name = event.get('projectName').lower() event_id = event.get('id') evidence_id = event.get('evidence') evidence_file = event.get('evidenceFile') files = [ {'id': evidence_id, 'file_type': {'image/jpeg': '.jpeg', 'image/gif': '.gif', 'image/png': '.png'}}, {'id': evidence_file, 'file_type': {'application/zip': '.zip', 'text/plain': '.csv', 'text/csv': '.csv', 'application/pdf': '.pdf', 'application/vnd.ms-office': '.xls'}} ] for curr_file in files: if curr_file.get('id'): file_name = '{project_name}/{event_id}/{file_id}'.format( project_name=project_name, event_id=event_id, file_id=curr_file['id']) folder = util.list_s3_objects(CLIENT_S3, BUCKET_S3, file_name) if folder: # File exist in s3 pass else: fileroute = '/tmp/:id.tmp'.replace(':id', curr_file['id']) if os.path.exists(fileroute): send_file_to_s3(file_name, curr_file, event_id) else: drive_api = DriveAPI() file_download_route = drive_api.download( curr_file['id']) if file_download_route: send_file_to_s3(file_name, curr_file, event_id) else: rollbar.report_message( 'Error: An error occurred downloading \ file from Drive', 'error') else: # Event does not have evidences pass
def get_records_from_file( project_name: str, finding_id: str, file_name: str) -> List[Dict[object, object]]: file_path = _download_evidence_file(project_name, finding_id, file_name) file_content = [] encoding = Magic(mime_encoding=True).from_file(file_path) try: with io.open(file_path, mode='r', encoding=encoding) as records_file: csv_reader = csv.reader(records_file) max_rows = 1000 headers = next(csv_reader) file_content = [util.list_to_dict(headers, row) for row in itertools.islice(csv_reader, max_rows)] except (csv.Error, LookupError, UnicodeDecodeError) as ex: rollbar.report_message('Error: Couldnt read records file', 'error', extra_data=ex, payload_data=locals()) return file_content
def verificate_hash_token(access_token: Dict[str, str], jti_token: str) -> bool: resp = False backend = default_backend() token_hashed = Scrypt(salt=binascii.unhexlify(access_token['salt']), length=NUMBER_OF_BYTES, n=SCRYPT_N, r=SCRYPT_R, p=SCRYPT_P, backend=backend) try: token_hashed.verify(binascii.unhexlify(jti_token), binascii.unhexlify(access_token['jti'])) resp = True except InvalidKey: rollbar.report_message('Error: Access token does not match', 'error') return resp
def reject_user_if_non_existent(strategy, details, user=None, is_new=False, *args, **kwargs): if user is None: # We want to know if someone can't get in. rollbar.report_message( '[Invalid User Login Attempt] Someone has attempted to access the system with an invalid google account.', 'warning', extra_data={ 'fullname': details['fullname'], 'email': details['email'] }) print('REJECTING USER: {}'.format(details)) return strategy.redirect( '/administration/invite-only/?email={}'.format(details['email']))
def debug_term_frequency_to_rollbar(self): """ Sends to Rollbar the term frequcny document so we can easily debug what terms and weights we work with. This should hopefully allow us to see what terms are being used and what sort of weights they get. To spot anomalies, stopwords, etc. """ # List of (term, weight) tuples sorted descending by weight. Example: [("lol", 9.89), ("kek", 3.37)] sorted_by_weight = sorted(self.termfreq_doc.items(), key=operator.itemgetter(1), reverse=True) pretty_termdoc_string = u"Term weights: \n weight: term \n" for term, weight in sorted_by_weight: pretty_termdoc_string += u"{0:.3f}: {1}\n".format(weight, term) rollbar.report_message(pretty_termdoc_string, "debug")
def async_store_mp(resolved_instance_class, resolved_instance_pk, mp_name): cls = get_model_by_name(resolved_instance_class) if cls is None: raise Exception("Can't locate {}".format(resolved_instance_class)) mp = registered_mps[cls][mp_name] try: resolved_instance = cls.objects.get(pk=resolved_instance_pk) except cls.DoesNotExist: if settings.TEST: raise msg = 'Can not find instance {}::{} to update mp {}'.format( resolved_instance_class, resolved_instance_pk, mp_name ) rollbar.report_message(msg, level='warning') return mp.store_mp(resolved_instance)
def test_mp(it, model, mp, fix=False, log_dbg=default_dbg, log_stat=default_stat, log_err=default_exception, throttler=NOP_THROTTLER): if isinstance(mp, basestring): mp_name = mp mp = registered_mps[model].get(mp_name, None) if not mp: raise Exception('Unknown mp {} on {}'.format( mp_name, model.__name__)) total = 0 failed = 0 not_set = 0 attr_name = mp.field_name suspects = [] pk_name = model._meta.pk.name if fix: for suspect_pk in suspects: try: instance = model.objects.get(**{pk_name: suspect_pk}) except model.DoesNotExist: rollbar.report_message( '{}(pk={}) did not exist in test_mp'.format( model.__name__, suspect_pk), level='warning') continue throttler.throttle() try: invalidate_for_instance(instance, mp.name, save=True) except: rollbar.report_exc_info( extra={ 'message': 'Invalidation failed in test_mp', 'model': model.__name__, 'suspect_pk': suspect_pk }) continue log_stat(failed, total, not_set)
def photo(bot, update): try: print(" Received a photo.") pic = update.message.photo[-1] file_id = pic['file_id'] print(" File id: " + file_id) print(update.message.chat) print(update.message.chat.id) bot.sendChatAction(update.message.chat.id, ChatAction.TYPING) print(" Sent typing notification") print(" Getting the image URL: ") # Request a link to a file that'll be valid for an hour. pic_url = bot.getFile(file_id)['file_path'] print(" Done, img url: " + pic_url) print(" Downloading the pic to tmp...") pic_file_name = pic_url.split("/")[-1] urlretrieve(pic_url, data_path/pic_file_name) print(" Evaluating the image...") img = open_image(data_path/pic_file_name) pred_class, confidence, preds = learn.predict(img) print(f" Breed class: {pred_class}") best_idx = np.argpartition(preds, -4)[-4:-1] keyboard = [[InlineKeyboardButton("Yep!", callback_data='correct')]] + \ [[InlineKeyboardButton(class_to_human(classes[i]), \ callback_data=f"{pic_file_name},{classes[i]}")] for i in best_idx] reply_markup = InlineKeyboardMarkup(keyboard) update.message.reply_text(f"It looks like a {class_to_human(pred_class)}!", reply_markup=reply_markup) # Report to analytics and rollbar rollbar.report_message('Processed a picture', level='info') except: update.message.reply_text("That was a bit too hard for me ;-(") rollbar.report_exc_info()
def mutate(self, info, repository_data, project_name): success = False environment_url = repository_data.get('urlEnv') env_list = \ integrates_dao.get_project_dynamo(project_name)[0]['environments'] index = -1 cont = 0 while index < 0 and len(env_list) > cont: if env_list[cont]['urlEnv'] == environment_url: json_data = [env_list[cont]] index = cont else: index = -1 cont += 1 if index >= 0: remove_env = integrates_dao.remove_list_resource_dynamo( 'FI_projects', 'project_name', project_name, 'environments', index) if remove_env: user_email = info.context.session['username'] resources.send_mail(project_name, user_email, json_data, 'removed', 'environment') success = True else: rollbar.report_message( 'Error: \ An error occurred removing an environment', 'error', info.context) else: util.cloudwatch_log( info.context, 'Security: \ Attempted to remove an environment that does not exist') if success: util.cloudwatch_log( info.context, 'Security: Removed environments from \ {project} project succesfully'.format(project=project_name)) else: util.cloudwatch_log( info.context, 'Security: Attempted to remove environments \ from {project} project'.format(project=project_name)) ret = RemoveEnvironments(success=success, resources=Resource(project_name)) util.invalidate_cache(project_name) return ret
def project_to_pdf(request, lang, project, doctype): "Export a project to a PDF" assert project.strip() if not has_access_to_project(request.session['username'], project, request.session['role']): util.cloudwatch_log(request, 'Security: Attempted to export project' ' pdf without permission') return util.response([], 'Access denied', True) else: user = request.session['username'].split('@')[0] validator = validation_project_to_pdf(request, lang, doctype) if validator is not None: return validator findings = finding_domain.get_findings( project_domain.list_findings(project.lower())) findings = [cast_new_vulnerabilities( get_open_vuln_by_type(finding['findingId'], request), finding) for finding in findings] description = project_domain.get_description(project.lower()) pdf_maker = CreatorPDF(lang, doctype) secure_pdf = SecurePDF() findings_ord = util.ord_asc_by_criticidad(findings) findings = pdf_evidences(findings_ord) report_filename = '' if doctype == 'tech': pdf_maker.tech(findings, project, description) report_filename = secure_pdf.create_full(user, pdf_maker.out_name, project) else: return HttpResponse( 'Disabled report generation', content_type='text/html') if not os.path.isfile(report_filename): rollbar.report_message( 'Couldn\'t generate pdf report', 'error', request) return HttpResponse( 'Couldn\'t generate pdf report', content_type='text/html') with open(report_filename, 'rb') as document: response = HttpResponse(document.read(), content_type='application/pdf') response['Content-Disposition'] = \ 'inline;filename={}_IT.pdf'.format(project) return response
def get(self, resource_type, params, headers=None, auth_type='bearer', refresh_token=True): """Performs a Fitbit API GET request `auth_type` the string 'basic' or 'bearer' `refresh_token` if True, will refresh the OAuth token when needed """ url = self.get_resource_url(resource_type) headers = self.make_headers(auth_type, headers=headers) response = requests.get(url, headers=headers, params=params) if response.status_code == 401: if refresh_token: was_refreshed = self.refresh_oauth2_token() if was_refreshed: # if token was successfully refreshed, repeat request response = self.get(resource_type, params, auth_type=auth_type, refresh_token=False) else: pass else: extra_data = { 'user_id': self.social_auth_user.user.id, 'username': self.social_auth_user.user.username, 'response': response.json(), } rollbar.report_message( 'Fitbit OAuth token expired, needs refreshing', extra_data=extra_data) elif response.status_code == 200: pass else: extra_data = { 'response': response.json(), } rollbar.report_message( 'Unexpected response from Fitbit API GET request', extra_data=extra_data) return response
def db_backup(): backup_dir = '/tmp/db_backups/' if not os.path.exists(backup_dir): os.makedirs(backup_dir) # delete old backups for f in os.listdir(backup_dir): path = os.path.join(backup_dir, f) if os.path.isfile(path): os.unlink(path) filename = datetime.utcnow().strftime('backup-%Y-%m-%d-%H-%M-%S.sql') backup_path = os.path.join(backup_dir, filename) password = '******' + app.config['DB_BACKUP_PASSWORD'] if app.config[ 'DB_BACKUP_PASSWORD'] else '' # do the backup os.system('mysqldump -u ' + app.config['DB_BACKUP_USER'] + password + ' standardsurvival > ' + backup_path) gzip_filename = filename + '.gz' gzip_backup_path = os.path.join(backup_dir, gzip_filename) # gzip the backup with open(backup_path, 'rb') as f_in: with gzip.open(gzip_backup_path, 'wb') as f_out: f_out.writelines(f_in) # upload to S3 conn = S3Connection(app.config['AWS_ACCESS_KEY_ID'], app.config['AWS_SECRET_ACCESS_KEY']) bucket = conn.get_bucket(app.config['BACKUP_BUCKET_NAME'], validate=False) key = Key(bucket) key.key = 'mysql/' + gzip_filename key.set_contents_from_filename(gzip_backup_path, reduced_redundancy=True) rollbar.report_message('Database backup complete', level='info', extra_data={ 'filename': gzip_filename, })
def mutate(self, info, **query_args): project_name = query_args.get('project_name') success = False user_data = util.get_jwt_content(info.context) role = get_user_role(user_data) new_user_data = { 'email': query_args.get('email'), 'organization': query_args.get('organization'), 'responsibility': query_args.get('responsibility', '-'), 'role': query_args.get('role'), 'phone_number': query_args.get('phone_number', '') } if (role == 'admin' and new_user_data['role'] in ['admin', 'analyst', 'customer', 'customeradmin']) \ or (is_customeradmin(project_name, user_data['user_email']) and new_user_data['role'] in ['customer', 'customeradmin']): if create_new_user(info.context, new_user_data, project_name): success = True else: rollbar.report_message( 'Error: Couldn\'t grant access to project', 'error', info.context) else: rollbar.report_message( 'Error: Invalid role provided: ' + new_user_data['role'], 'error', info.context) if success: util.invalidate_cache(project_name) util.invalidate_cache(query_args.get('email')) util.cloudwatch_log( info.context, 'Security: Given grant access to {user} \ in {project} project'.format(user=query_args.get('email'), project=project_name)) else: util.cloudwatch_log( info.context, 'Security: Attempted to give grant \ access to {user} in {project} project'.format( user=query_args.get('email'), project=project_name)) ret = \ GrantUserAccess(success=success, granted_user=User(project_name, new_user_data['email'])) return ret
def mutate(self, info, **query_args): project_name = query_args.get('project_name') success = False modified_user_data = { 'email': query_args.get('email'), 'organization': query_args.get('organization'), 'responsibility': query_args.get('responsibility'), 'role': query_args.get('role'), 'phone_number': query_args.get('phone_number') } if (info.context.session['role'] == 'admin' and modified_user_data['role'] in ['admin', 'analyst', 'customer', 'customeradmin']) \ or (is_customeradmin(project_name, info.context.session['username']) and modified_user_data['role'] in ['customer', 'customeradmin']): if integrates_dao.assign_role(modified_user_data['email'], modified_user_data['role']) is None: modify_user_information(info.context, modified_user_data, project_name) success = True else: rollbar.report_message('Error: Couldn\'t update user role', 'error', info.context) else: rollbar.report_message('Error: Invalid role provided: ' + modified_user_data['role'], 'error', info.context) if success: util.cloudwatch_log(info.context, 'Security: Modified user data:{user} \ in {project} project succesfully'.format(user=query_args.get('email'), project=project_name)) else: util.cloudwatch_log(info.context, 'Security: Attempted to modify user \ data:{user} in {project} project'.format(user=query_args.get('email'), project=project_name)) ret = \ EditUser(success=success, modified_user=User(project_name, modified_user_data['email'])) util.invalidate_cache(project_name) util.invalidate_cache(query_args.get('email')) return ret
def verify_and_call(*args, **kwargs): request = args[0] if request.method == "POST": findingid = request.POST.get('findingid', '') else: findingid = request.GET.get('findingid', '') if not re.match("^[0-9]*$", findingid): rollbar.report_message('Error: Invalid finding id format', 'error', request) return util.response([], 'Invalid finding id format', True) if not has_access_to_finding(request.session['username'], findingid, request.session['role']): util.cloudwatch_log( request, 'Security: \ Attempted to retrieve finding-related info without permission') return util.response([], 'Access denied', True) return func(*args, **kwargs)
def handler(event, context): try: cause = json.loads(event["Cause"]) task_name = cause["TaskDefinitionArn"].split("/")[1].split(":")[0] reason = cause["StoppedReason"] command = " ".join( cause["Overrides"]["ContainerOverrides"][0]["Command"]) if reason.endswith("."): reason = reason[:-1] # Container exit reason takes precedence for container in cause["Containers"]: if "Reason" in container: reason = container["Reason"] # Extract "OutOfMemoryError" from "OutOfMemoryError: ..." if re.match("^(.+): (.+)$", reason): reason = reason.partition(":")[0] # Extract "Error" from "Error (...)": elif re.match("^(.+) \((.+)\)$", reason): reason = reason.partition(" (")[0] ecs_url = ECS_TASK_URL.format( region=cause["AvailabilityZone"][:-1], cluster=cause["ClusterArn"].split("/")[1], task=cause["TaskArn"].split("/")[2], ) msg = f""" {task_name} `{command}` failed: {reason} {ecs_url} """ rollbar.report_message(msg, level="error", extra_data=event) except json.decoder.JSONDecodeError: error = event["Error"] rollbar.report_message(f"A State Machine has failed: {error}", level="error", extra_data=event)
def mutate(self, info, repository_data, project_name): success = False repository = repository_data.get('urlRepo') branch = repository_data.get('branch') repo_list = \ integrates_dao.get_project_dynamo(project_name)[0]['repositories'] index = -1 cont = 0 json_data = [] while index < 0 and len(repo_list) > cont: if repo_list[cont]['urlRepo'] == repository and \ repo_list[cont]['branch'] == branch: json_data = [repo_list[cont]] index = cont else: index = -1 cont += 1 if index >= 0: remove_repo = integrates_dao.remove_list_resource_dynamo( 'FI_projects', 'project_name', project_name, 'repositories', index) if remove_repo: user_email = info.context.session['username'] resources.send_mail(project_name, user_email, json_data, 'removed', 'repository') success = True else: rollbar.report_message( 'Error: \ An error occurred removing repository', 'error', info.context) if success: util.cloudwatch_log( info.context, 'Security: Removed repositories from \ {project} project succesfully'.format(project=project_name)) else: util.cloudwatch_log( info.context, 'Security: Attempted to remove repositories \ from {project} project'.format(project=project_name)) ret = RemoveRepositories(success=success, resources=Resource(project_name)) util.invalidate_cache(project_name) return ret
async def stability_loop(self): # TODO: Break this method up while True: logging.debug("Checking system stability...") try: asyncio.get_event_loop().create_task( self._cleanup_rogue_docker_containers()) if STABILITY_CHECK and not self._stability.check_stable(): rollbar.report_message("System is unstable, rebooting") logging.error("System is unstable, rebooting") # Note that the current user has to have NOPASSWD set up in # /etc/sudoers for /sbin/reboot on Ubuntu systems for this to # work from Python os.system("sudo reboot") except Exception as e: rollbar.report_exc_info(e) logger.exception("Error while checking stability") await asyncio.sleep(15)