def display(args, logger): """Sets up Xvfb if necessary. """ # Display if not args.no_xvfb: logger.info("Checking if Xvfb is installed") utils.installed('xvfb', logger) logger.debug("Xvfb found, starting it") crawlglobs.display = Display(backend='xvfb', visible=False) crawlglobs.display.start() else: logger.info("Not using Xvfb")
def form_valid(self, form): self.source = BlacklistSource.objects.get(id=1) self.expiry = form.cleaned_data.get('expiry_date', None) if not self.expiry: self.expiry = now() + timedelta(days=50*365) # 50 year default self.level = form.cleaned_data.get('level', 0) self.reason = form.cleaned_data.get('reason', 'No reason provided') # Blacklist email address self.blacklist_item(BLACKLIST_TYPE_EMAIL, self.blacklist_user.email) # Blacklist API keys for account in self.blacklist_user.eveaccount_set.all(): self.blacklist_item(BLACKLIST_TYPE_APIUSERID, account.api_user_id) # Blacklist Characters for character in EVEPlayerCharacter.objects.filter(eveaccount__user=self.blacklist_user).distinct(): self.blacklist_item(BLACKLIST_TYPE_CHARACTER, character.name) # Blacklist Reddit accounts if installed('reddit'): for account in self.blacklist_user.redditaccount_set.all(): self.blacklist_item(BLACKLIST_TYPE_REDDIT, account.username) messages.add_message(self.request, messages.INFO, "User %s has been blacklisted" % self.blacklist_user.username ) # Disable the account if requested if form.cleaned_data.get('disable', None) and self.request.user.has_perm('auth.change_user') and self.request.user.has_perm('sso.delete_serviceaccount'): self.blacklist_user.active = False self.blacklist_user.save() messages.add_message(self.request, messages.INFO, "User %s disabled" % self.blacklist_user.username) update_user_access.delay(user=self.blacklist_user.id) return HttpResponseRedirect(reverse('sso-viewuser', args=[self.blacklist_user.username]))
def run(): args = setup_args() logger = setup_logging(args) display(args, logger) # Check if pngnq is installed. utils.installed('pngnq', logger) if args.browser and args.browser == 'Firefox': # Check firefox installation and version. prep_firefox(args, logger) # Set up global variables. if args.screenshot_dir: crawlglobs.img_dir = args.screenshot_dir if args.dom_dir: crawlglobs.dom_dir = args.dom_dir if args.visit_chain_dir: crawlglobs.visit_chain_dir = args.visit_chain_dir if args.proxy_file: crawlglobs.proxy_file = args.proxy_file if args.proxy_scheme: crawlglobs.proxy_scheme = args.proxy_scheme # Get all files given as input. input_file = args.input_file # Check if the input file is a directory. if os.path.isdir(args.input_file[0]): logger.info( ("All .json files in %s will be crawled") % args.input_file[0]) input_file = [ args.input_file[0] + "/" + f for f in os.listdir(args.input_file[0]) if f[-5:] == '.json' ] if args.tags_file: logger.debug("Tag file supplied. Loading tags.") # Create a tags list with [(tag_name, {"threshold": value, "regexes": # [compiled regexes]} try: crawlglobs.tags_l = [] for (tag_name, attrs) in json.load(args.tags_file).iteritems(): temp_obj = {"threshold": attrs["threshold"]} temp_obj["regexes"] = [ re.compile(regex) for regex in attrs["regexes"] ] crawlglobs.tags_l.append((tag_name, temp_obj)) except Exception, e: logger.critical("Error loading tags file: %s" % e.message) exit()
def run(): args = setup_args() logger = setup_logging(args) display(args, logger) # Check if pngnq is installed. utils.installed('pngnq', logger) if args.browser and args.browser == 'Firefox': # Check firefox installation and version. prep_firefox(args, logger) # Set up global variables. if args.screenshot_dir: crawlglobs.img_dir = args.screenshot_dir if args.dom_dir: crawlglobs.dom_dir = args.dom_dir if args.visit_chain_dir: crawlglobs.visit_chain_dir = args.visit_chain_dir if args.proxy_file: crawlglobs.proxy_file = args.proxy_file if args.proxy_scheme: crawlglobs.proxy_scheme = args.proxy_scheme # Get all files given as input. input_file = args.input_file # Check if the input file is a directory. if os.path.isdir(args.input_file[0]): logger.info(("All .json files in %s will be crawled") % args.input_file[0]) input_file = [args.input_file[0] + "/" + f for f in os.listdir(args.input_file[0]) if f[-5:] == '.json'] if args.tags_file: logger.debug("Tag file supplied. Loading tags.") # Create a tags list with [(tag_name, {"threshold": value, "regexes": # [compiled regexes]} try: crawlglobs.tags_l = [] for (tag_name, attrs) in json.load(args.tags_file).iteritems(): temp_obj = {"threshold": attrs["threshold"]} temp_obj["regexes"] = [re.compile(regex) for regex in attrs["regexes"]] crawlglobs.tags_l.append((tag_name, temp_obj)) except Exception, e: logger.critical("Error loading tags file: %s" % e.message) exit()
def prep_firefox(args, logger): """Checks if a supported version of firefox is installed. Exits with error message if it isn't. Finally creates a directory to store firefox profiles. """ utils.installed('firefox', logger) logger.info("Checking Firefox version compatibility") fh = open( "firefox_template/extensions/[email protected]/" + "install.rdf", "r") data = fh.read() fh.close() match = re.search('maxVersion="((\d+\.\d+)\.?.*)"', data) if match: crawler_ff_version = float(match.group(2)) logger.debug("Expected maximum firefox version: %s " % crawler_ff_version) ff_version_output = subprocess.Popen( ["firefox", "--version"], stdout=subprocess.PIPE).communicate()[0] match = re.search('Mozilla Firefox (\d+\.\d+).*', ff_version_output) if match: system_ff_version = float(match.group(1)) logger.debug("System's firefox version: %s" % system_ff_version) if system_ff_version > crawler_ff_version: logger.critical( ("Crawler only supports Firefox up to \n" + "%.1f. The crawler extension needs to be updated. \n" + "Updating the maxVersion in install.rdf file in \n" + "[email protected] to the system firefox version \n" + "might work. \nExiting.") % crawler_ff_version) exit(1) # Create tmp directory for storing firefox profiles. profile_dir = os.path.join(utils.default_tmp_location(), config.PROFILE_DIR) if not os.path.exists(profile_dir): logger.info("Creating directory for firefox profiles") os.makedirs(profile_dir) else: logger.error("Firefox profile directory already exists. Something's" + "wrong. Please file a bug.")
def prep_firefox(args, logger): """Checks if a supported version of firefox is installed. Exits with error message if it isn't. Finally creates a directory to store firefox profiles. """ utils.installed('firefox', logger) logger.info("Checking Firefox version compatibility") fh = open("firefox_template/extensions/[email protected]/" + "install.rdf", "r") data = fh.read() fh.close() match = re.search('maxVersion="((\d+\.\d+)\.?.*)"', data) if match: crawler_ff_version = float(match.group(2)) logger.debug("Expected maximum firefox version: %s " % crawler_ff_version) ff_version_output = subprocess.Popen(["firefox", "--version"], stdout=subprocess.PIPE).communicate()[0] match = re.search('Mozilla Firefox (\d+\.\d+).*', ff_version_output) if match: system_ff_version = float(match.group(1)) logger.debug("System's firefox version: %s" % system_ff_version) if system_ff_version > crawler_ff_version: logger.critical(("Crawler only supports Firefox up to \n" + "%.1f. The crawler extension needs to be updated. \n"+ "Updating the maxVersion in install.rdf file in \n" + "[email protected] to the system firefox version \n" + "might work. \nExiting.") % crawler_ff_version) exit(1) # Create tmp directory for storing firefox profiles. profile_dir = os.path.join(utils.default_tmp_location(), config.PROFILE_DIR) if not os.path.exists(profile_dir): logger.info("Creating directory for firefox profiles") os.makedirs(profile_dir) else: logger.error("Firefox profile directory already exists. Something's"+ "wrong. Please file a bug.")
def __init__(self, *args, **kwargs): request = kwargs.pop("request", None) super(UserLookupForm, self).__init__(*args, **kwargs) choices = [ (1, "Auth Username"), (2, "Character"), (4, "Email Address"), (5, "EVE API Key ID"), (6, "Service UID"), ] if installed("reddit") and gargoyle.is_active("reddit", request): choices.append((3, "Reddit ID")) self.fields["type"] = forms.ChoiceField(label=u"Search type", choices=choices) self.fields["username"] = forms.CharField(label=u"Value", max_length=64)
def user_lookup(request): """ Lookup a user's account by providing a matching criteria """ form = UserLookupForm(request=request) if not request.user.has_perm('sso.can_search_users'): return redirect('sso-profile') if request.method == 'POST': form = UserLookupForm(request.POST, request=request) if form.is_valid(): users = None uids = [] username = form.cleaned_data['username'].strip() if form.cleaned_data['type'] == '1': users = User.objects.filter(username__icontains=username).only('username') elif form.cleaned_data['type'] == '2': uid = EVEAccount.objects.filter(characters__name__icontains=username).values('user') for u in uid: uids.append(u['user']) users = User.objects.filter(id__in=uids).only('username') elif installed('reddit') and gargoyle.is_active('reddit', request) and form.cleaned_data['type'] == '3': from reddit.models import RedditAccount uid = RedditAccount.objects.filter(username__icontains=username).values('user') for u in uid: uids.append(u['user']) users = User.objects.filter(id__in=uids).only('username') elif form.cleaned_data['type'] == '4': users = User.objects.filter(email__icontains=username).only('username') elif form.cleaned_data['type'] == '5': uids = EVEAccount.objects.filter(api_user_id__icontains=username).values_list('user', flat=True) users = User.objects.filter(id__in=uids).only('username') elif form.cleaned_data['type'] == '6': uids = ServiceAccount.objects.filter(service_uid__icontains=username).values_list('user', flat=True) users = User.objects.filter(id__in=uids).only('username') else: messages.add_message(request, messages.ERROR, "Error parsing form, Type: %s, Value: %s" % (form.cleaned_data['type'], username)) return redirect('sso.views.user_lookup') if users and len(users) == 1: return redirect('sso-viewuser', username=users[0].username) elif users and len(users) > 1: return render_to_response('sso/lookup/lookuplist.html', locals(), context_instance=RequestContext(request)) else: messages.add_message(request, messages.INFO, "No results found") return redirect('sso.views.user_lookup') return render_to_response('sso/lookup/userlookup.html', locals(), context_instance=RequestContext(request))
def get_context_data(self, **kwargs): ctx = super(UserDetailView, self).get_context_data(**kwargs) ctx.update({ 'profile': self.object.get_profile(), 'services': ServiceAccount.objects.select_related('service').filter(user=self.object).only('service__name', 'service_uid', 'active'), 'characters': EVEPlayerCharacter.objects.select_related('corporation', 'corporation__alliance').filter(eveaccount__user=self.object).only('id', 'name', 'corporation__name'), }) # If the HR app is installed, check the blacklist if installed('hr'): if self.request.user.has_perm('hr.add_blacklist'): from hr.utils import blacklist_values output = blacklist_values(self.object) ctx.update({ 'blacklisted': bool(len(output)), 'blacklist_items': output, }) return ctx
url(r'^market/', include('market.urls')), url(r'^updates/', include('notifications.urls')), url('logout/', auth_views.LogoutView.as_view(), name='logout'), #url(r'^o2/authorize/$', SpecialAuthorizationView.as_view(), name="authorize"), #url(r'^o2/token/$', oauth_views.TokenView.as_view(), name="token"), #url(r'^o2/revoke_token/$', oauth_views.RevokeTokenView.as_view(), name="revoke-token"), ] if settings.DEBUG: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns if installed('sentry'): urlpatterns += [ url(r'^sentry/', include('sentry.web.urls')), ] if installed('nexus'): import nexus nexus.autodiscover() urlpatterns += [ url(r'^admin/', include('nexus.site.urls')), ] else: urlpatterns += [ url(r'^admin/', admin.site.urls), ]
from registration.views import register from sso.forms import RegistrationFormUniqueEmailBlocked admin.autodiscover() urlpatterns = patterns('', ('', include('registration.backends.default.urls')), (r'^register/$', register, {'backend': 'registration.backends.default.DefaultBackend', 'form_class': RegistrationFormUniqueEmailBlocked}), ('', include('sso.urls')), (r'^eve/', include('eve_api.urls')), (r'^eveapi/', include('eve_proxy.urls')), (r'^api/', include('api.urls')), ) if installed('reddit'): urlpatterns += patterns('', ('', include('reddit.urls')), ) if installed('hr'): urlpatterns += patterns('', (r'^hr/', include('hr.urls')), ) if installed('groups'): urlpatterns += patterns('', (r'^groups/', include('groups.urls')), ) if installed('sentry'):
def update_user_access(user, **kwargs): """ Process all corporate and alliance entries and correct access groups. """ logger = update_user_access.get_logger() user = User.objects.get(id=user) # Create a list of all Corp and Alliance groups corpgroups = [] for corp in EVEPlayerCorporation.objects.filter(group__isnull=False): if corp.group: corpgroups.append(corp.group) for alliance in EVEPlayerAlliance.objects.filter(group__isnull=False): if alliance.group: corpgroups.append(alliance.group) # Create a list of Char groups chargroups = [] for eacc in EVEAccount.objects.filter(user=user, api_status__in=[API_STATUS_OK, API_STATUS_OTHER_ERROR], api_keytype__in=getattr(settings, 'SSO_ACCEPTED_KEYTYPES', [API_KEYTYPE_LIMITED, API_KEYTYPE_FULL, API_KEYTYPE_ACCOUNT])): for char in eacc.characters.all(): if char.corporation.group: chargroups.append(char.corporation.group) elif char.corporation.alliance and char.corporation.alliance.group: chargroups.append(char.corporation.alliance.group) # Generate the list of groups to add/remove delgroups = set(set(user.groups.all()) & set(corpgroups)) - set(chargroups) addgroups = set(chargroups) - set(set(user.groups.all()) & set(corpgroups)) # Check that user's groups fufil requirements if installed('groups'): ugroups = set(user.groups.all()) for g in user.groups.all(): if g in delgroups or not g.groupinformation.parent_groups.count(): continue if not bool(set(g.groupinformation.parent_groups.all()) & ugroups): delgroups.add(g) for g in delgroups: user.groups.remove(g) for g in addgroups: user.groups.add(g) # For users set to not active, delete all accounts if not user.is_active: for servacc in ServiceAccount.objects.filter(user=user): servacc.active = 0 servacc.save() else: # For each of the user's services, check they're in a valid group for it and enable/disable as needed. for servacc in ServiceAccount.objects.filter(user=user): if not (set(user.groups.all()) & set(servacc.service.groups.all())): if servacc.active: servacc.active = 0 servacc.save() else: if not servacc.active: servacc.active = 1 servacc.save() notifyurls = AuthAPIKey.objects.filter(active=True).exclude(callback='') if notifyurls.count(): data = {'username': user.username, 'groups': list(user.groups.all().values('id', 'name'))} # Update remote services with poking the notification URLs for endpoint in notifyurls: url, key = endpoint.callback, endpoint.key jsonstr = json.dumps(data) hash = sha512('%s-%s' % (key, jsonstr)).hexdigest() req = urllib2.Request(url, urllib.urlencode({'data': jsonstr, 'auth': hash})) try: if sys.version_info < (2, 6): conn = urllib2.urlopen(req) else: conn = urllib2.urlopen(req, timeout=5) except (urllib2.HTTPError, urllib2.URLError) as e: logger.error('Error notifying SSO service: %s' % e, exc_info=sys.exc_info(), extra={'data': {'url': url, 'data': jsonstr, 'auth': hash}}) pass update_service_groups.delay(user_id=user.id)