def restore_from_dump(ctx, target_database=_DEFAULT_DATABASE, dumpfile=None, force=False, migrate=True, clean_migration=False): db_config = get_database_settings(target_database) if dumpfile is None: # XXX: core assumption about how autopostgresqlbackup names new dumps dumpfile = glob.glob('/shared/backups/latest/comsesnet_*.sql.gz')[0] logger.debug("Using latest autopostgresqlbackup dump %s", dumpfile) dumpfile_path = pathlib.Path(dumpfile) if dumpfile_path.is_file(): if not force: confirm( "This will destroy the database and reload it from {0}. Continue? (y/n) " .format(dumpfile)) cat_cmd = 'cat' if dumpfile.endswith('.sql.gz'): cat_cmd = 'zcat' drop(ctx, database=target_database, create=True) ctx.run( '{cat_cmd} {dumpfile} | psql -w -q -o restore-from-dump-log.txt -h db {db_name} {db_user}' .format(cat_cmd=cat_cmd, dumpfile=dumpfile, **db_config), echo=True) if migrate: run_migrations(ctx, clean=clean_migration, initial=True) else: logger.warning("Unable to restore from dumpfile %s", dumpfile)
def setup_site(ctx, site_name='CoRe @ CoMSES Net', site_domain='www.comses.net'): confirm( "This is a destructive process and will remove all existing root pages. Are you sure you want to run this? (y/n) " ) dj(ctx, f'setup_site --site-name="{site_name}" --site-domain="{site_domain}"') if not settings.DEPLOY_ENVIRONMENT.is_production(): deny_robots(ctx)
def restore_files(ctx, repo=settings.BORG_ROOT, archive=None): confirm("Are you sure you want to restore all file content (y/n)? ") if archive is None: archive = get_latest_borg_backup_archive_name(ctx, repo=repo) with tempfile.TemporaryDirectory( dir=settings.SHARE_DIR) as working_directory: with ctx.cd(working_directory): delete_latest_uncompressed_backup() _extract(ctx, repo, archive) _restore_files(working_directory)
def restore_database(ctx, repo=settings.BORG_ROOT, archive=None, target_database=db._DEFAULT_DATABASE): confirm("Are you sure you want to restore the database (y/n)? ") if archive is None: archive = get_latest_borg_backup_archive_name(ctx, repo=repo) with tempfile.TemporaryDirectory( dir=settings.SHARE_DIR) as working_directory: with ctx.cd(working_directory): _extract(ctx, repo, archive, ['backups']) _restore_database(ctx, working_directory=working_directory, target_database=target_database)
def form_valid(self, form): try: user = self.get_user(form.cleaned_data) payload = self.handle_valid(form, user) except GpgError as e: form.add_error(e.field, e.message) return self.form_invalid(form) except User.DoesNotExist: form.add_error(None, self.user_not_found_error) return self.form_invalid(form) except UserNotFound as e: if e.args and e.args[0]: form.add_error(None, e.args[0].encode('utf-8')) else: form.add_error(None, self.user_not_found_error) return self.form_invalid(form) # log user address: address = Address.objects.get_or_create(address=self.request.META['REMOTE_ADDR'])[0] UserAddresses.objects.create(address=address, user=user, purpose=self.purpose) # Send confirmation email to the user key, kwargs = confirm(self.request, user, purpose=self.purpose, payload=payload) if settings.BROKER_URL is None: key.send(**kwargs) else: send_email.delay(key_id=key.pk, **kwargs) return super(ConfirmationView, self).form_valid(form)
def form_valid(self, form): try: user = self.get_user(form.cleaned_data) payload = self.handle_valid(form, user) except GpgError as e: form.add_error(e.field, e.message) return self.form_invalid(form) except User.DoesNotExist: form.add_error(None, self.user_not_found_error) return self.form_invalid(form) except UserNotFound as e: if e.args and e.args[0]: form.add_error(None, e.args[0].encode('utf-8')) else: form.add_error(None, self.user_not_found_error) return self.form_invalid(form) # log user address: address = Address.objects.get_or_create( address=self.request.META['REMOTE_ADDR'])[0] UserAddresses.objects.create(address=address, user=user, purpose=self.purpose) # Send confirmation email to the user key, kwargs = confirm(self.request, user, purpose=self.purpose, payload=payload) if settings.BROKER_URL is None: key.send(**kwargs) else: send_email.delay(key_id=key.pk, **kwargs) return super(ConfirmationView, self).form_valid(form)
def handle(self, *args, **options): editor_group = ComsesGroups.EDITOR.get_group() if confirm("Reset Editors group permissions (y/n)? "): editor_group.permissions.clear() editor_group.page_permissions.all().delete() # special case for wagtail admin access wagtail_admin_content_type = ContentType.objects.get( app_label='wagtailadmin', model='admin') editor_group.permissions.add( Permission.objects.get(content_type=wagtail_admin_content_type)) for app_label, model_name in self.CONTENT_TYPE_TUPLES: ct = ContentType.objects.get(app_label=app_label, model=model_name) for permission in Permission.objects.filter( content_type=ct).exclude(codename__startswith='delete'): editor_group.permissions.add(permission) for page_slug in self.EDITOR_PAGE_SLUGS: page = Page.objects.get(slug=page_slug) for permission_type in ('add', 'edit'): GroupPagePermission.objects.create( group=editor_group, page=page, permission_type=permission_type)
def restore(ctx, repo=settings.BORG_ROOT, archive=None, target_database=db._DEFAULT_DATABASE): """Restore the library files, media files and database to the state given in the borg repo at path REPO using archive ARCHIVE. The target_database argument is for testing so a different database can be used to make sure the database is getting restored properly""" confirm( "Are you sure you want to restore the database and all file content (y/n)? " ) with tempfile.TemporaryDirectory( dir=settings.SHARE_DIR) as working_directory: _restore(ctx, repo, archive=archive, working_directory=working_directory, target_database=target_database)
def handle(self, *args, **options): # restore staging site SocialApp client_id + secrets orcid = SocialApp.objects.get(provider='orcid') orcid.client_id = settings.ORCID_CLIENT_ID orcid.secret = settings.ORCID_CLIENT_SECRET orcid.save() github = SocialApp.objects.get(provider='github') github.client_id = settings.GITHUB_CLIENT_ID github.secret = settings.GITHUB_CLIENT_SECRET github.save() if settings.DEPLOY_ENVIRONMENT.is_production(): confirm("Update staging Site objects and robots.txt? (y/n) ") # set Django Site object metadata appropriately site = Site.objects.first() site.site_name = 'CoMSES Net Test Site' site.hostname = 'localhost:8000' if settings.DEPLOY_ENVIRONMENT.is_development( ) else 'test.comses.net' site.save() # set up robots.txt to deny all call_command('setup_robots_txt', '--no-allow') logger.debug("Completed test site setup for environment %s", settings.DEPLOY_ENVIRONMENT)
def initialize(request, wordlist): url = request['url'] if not url.startswith('http'): print('%s %s is not a valid URL' % (bad, url)) return 'skipped' print('%s Probing the target for stability' % run) stable = stable_request(url, request['headers']) if not stable: return 'skipped' else: fuzz = randomString(6) response_1 = requester(request, {fuzz : fuzz[::-1]}) print('%s Analysing HTTP response for anamolies' % run) fuzz = randomString(6) response_2 = requester(request, {fuzz : fuzz[::-1]}) if type(response_1) == str or type(response_2) == str: return 'skipped' factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist) print('%s Analysing HTTP response for potential parameter names' % run) found = heuristic(response_1.text, wordlist) if found: num = len(found) s = 's' if num > 1 else '' print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found))) print('%s Logicforcing the URL endpoint' % run) populated = populate(wordlist) param_groups = slicer(populated, int(len(wordlist)/args.chunks)) last_params = [] while True: param_groups = narrower(request, factors, param_groups) if mem.var['kill']: return 'skipped' param_groups = confirm(param_groups, last_params) if not param_groups: break confirmed_params = [] for param in last_params: reason = bruter(request, factors, param, mode='verify') if reason: name = list(param.keys())[0] confirmed_params.append(name) print('%s name: %s, factor: %s' % (res, name, reason)) return confirmed_params
def _confirm(self, request, user, purpose, payload=None): key, kwargs = confirm(request, user, purpose=purpose, payload=payload) if settings.BROKER_URL is None: key.send(**kwargs) else: send_email.delay(key_id=key.pk, **kwargs)