def setUp(self): self.remote_workspace = self.mk_workspace( name='%s_remote' % (self.id().lower(),)) # there needs to be an initial commit for diffs to work index = self.remote_workspace.repo.index index.commit('Initial Commit') # cloning ensures initial commit is on both and sets up remote EG.clone_repo( repo_url=self.remote_workspace.repo.working_dir, workdir='.test_repos/%s' % self.id().lower()) self.local_workspace = self.mk_workspace(name=self.id().lower()) self.config = testing.setUp() self.app = self.mk_app(self.local_workspace)
def clone_repo(self, repo_name, repo_url, repo_dir='repos', clobber=False, verbose=False): self.verbose = verbose workdir = os.path.join(repo_dir, repo_name) self.emit('Cloning %s to %s.' % (repo_url, workdir)) if os.path.isdir(workdir) and not clobber: self.emit('Destination already exists, skipping.') return workdir, EG.read_repo(workdir) elif os.path.isdir(workdir): self.emit('Clobbering existing repository.') shutil.rmtree(workdir) repo = EG.clone_repo(repo_url, workdir) return workdir, repo
def run(self, workdir, models=None, introspect_models=None): namespace = {} if models is not None: namespace.update(load_models(models)) if introspect_models: possible_models = [ m for m in os.listdir(workdir) if (os.path.isdir(os.path.join(workdir, m)) and not m.startswith('.')) ] for models in possible_models: try: found_models = load_models(models) namespace.update(found_models) except ValueError: print '%s does not look like a models module.' % (models, ) namespace.update({ 'workspace': EG.workspace(workdir), 'Q': Q, 'EG': EG, 'F': F, }) launcher = self.launcher or default_launcher return launcher(namespace)
def mk_workspace(self, working_dir=None, name=None, url='http://localhost', index_prefix=None, auto_destroy=None, author_name='Test Kees', author_email='*****@*****.**'): # pragma: no cover name = name or self.id() working_dir = working_dir or self.WORKING_DIR index_prefix = index_prefix or get_index_prefix(name) auto_destroy = auto_destroy or self.destroy workspace = EG.workspace(os.path.join(working_dir, name), es={ 'urls': [url], }, index_prefix=index_prefix) if auto_destroy: self.addCleanup(workspace.destroy) workspace.setup(author_name, author_email) while not workspace.index_ready(): pass return workspace
def resync(self, working_dir, index_prefix, model_class, mapping=None, recreate_index=False, es={}): workspace = EG.workspace(working_dir, index_prefix=index_prefix, es=es) branch = workspace.sm.repo.active_branch if recreate_index and workspace.im.index_exists(branch.name): self.stdout.writelines( 'Destroying index for %s.\n' % (branch.name,)) workspace.im.destroy_index(branch.name) if not workspace.im.index_exists(branch.name): self.stdout.writelines( 'Creating index for %s.\n' % (branch.name,)) # create the index and wait for it to become ready workspace.im.create_index(branch.name) while not workspace.index_ready(): pass if mapping is not None: self.stdout.writelines( 'Creating mapping for %s.\n' % (fqcn(model_class),)) workspace.setup_custom_mapping(model_class, mapping) updated, removed = workspace.sync(model_class) self.stdout.writelines('%s: %d updated, %d removed.\n' % ( fqcn(model_class), len(updated), len(removed)))
def auto_save_category_to_git(sender, instance, created, **kwargs): data = { "title": instance.title, "subtitle": instance.subtitle, "slug": instance.slug, "position": instance.position, "language": ( instance.localisation.get_code() if instance.localisation else None), "featured_in_navbar": instance.featured_in_navbar, "source": ( instance.source.uuid if instance.source else None), } # TODO: Not yet implemented # author = utils.get_author_from_user(instance.last_author) workspace = EG.workspace(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) try: # FIXME: This can fail if we ever store a value with None # as the uuid. [category] = workspace.S(eg_models.Category).filter(uuid=instance.uuid) original = category.get_object() updated = original.update(data) workspace.save(updated, 'Category updated: %s' % instance.title) workspace.refresh_index() except (GitCommandError, ValueError): category = eg_models.Category(data) workspace.save(category, 'Category created: %s' % instance.title) workspace.refresh_index() Category.objects.filter(pk=instance.pk).update(uuid=category.uuid)
def sync_data(self, workdir, model_class, verbose=False, clobber=False): self.verbose = verbose workdir = EG.workspace( workdir, index_prefix=os.path.basename(workdir)) self.emit('Syncing data for %s.' % (fqcn(model_class),)) workdir.sync(model_class) self.emit('Data synced.')
def mk_workspace(self, working_dir=None, name=None, url='http://localhost', index_prefix=None, auto_destroy=None, author_name='Test Kees', author_email='*****@*****.**'): # pragma: no cover name = name or self.id().lower() working_dir = working_dir or self.working_dir index_prefix = index_prefix or name auto_destroy = auto_destroy or self.destroy workspace = EG.workspace(os.path.join(working_dir, name), es={ 'urls': [url], }, index_prefix=index_prefix) if auto_destroy: self.addCleanup(workspace.destroy) workspace.setup(author_name, author_email) workspace while not workspace.index_ready(): pass with open(self.bootstrap_file, 'r') as fp: bootstrap_data = yaml.safe_load(fp) for model, mapping in bootstrap_data['models'].items(): workspace.setup_custom_mapping(load_class(model), mapping) return workspace
def pull(repo_url, index_prefix, es=None): if is_remote_repo_url(repo_url): sm = RemoteStorageManager(repo_url) sm.pull() else: workspace = EG.workspace(repo_url, index_prefix=index_prefix, es=es) workspace.pull()
def create_mapping(self, workdir, model_class, mapping, verbose=False): self.verbose = verbose workspace = EG.workspace( workdir, index_prefix=os.path.basename(workdir)) self.emit('Creating mapping for %s.' % (fqcn(model_class),)) workspace.setup_custom_mapping(model_class, mapping) self.emit('Mapping created.')
def test_clone_from(self): workspace = self.workspace person = TestPerson({ 'age': 1, 'name': 'Test Kees 1' }) workspace.save(person, 'Saving a person') clone_source = workspace.working_dir clone_dest = '%s_clone' % (workspace.working_dir,) cloned_repo = EG.clone_repo(clone_source, clone_dest) workspace = EG.workspace(cloned_repo.working_dir) self.addCleanup(workspace.destroy) sm = StorageManager(cloned_repo) [cloned_person] = sm.iterate(TestPerson) self.assertEqual(person, cloned_person)
def test_clone_from_bare_repository(self): bare_repo_name = '%s_bare' % (self.id(),) bare_repo_path = os.path.join(self.WORKING_DIR, bare_repo_name) bare_repo = EG.init_repo(bare_repo_path, bare=True) self.assertEqual(bare_repo.bare, True) if self.destroy: self.addCleanup(lambda: shutil.rmtree(bare_repo_path)) cloned_repo_path = '%s_clone' % (bare_repo_path,) EG.clone_repo(bare_repo_path, cloned_repo_path) new_workspace = EG.workspace(cloned_repo_path) if self.destroy: self.addCleanup(new_workspace.destroy) # create an initial commit initial_commit = new_workspace.sm.store_data( 'README.md', '# Hello World', 'Initial commit') repo = new_workspace.repo # NOTE: this is a bare remote repo and so it doesn't have a working # copy checked out, there's nothing on the remote. [origin] = repo.remotes origin.push('refs/heads/master:refs/heads/master') # Now pull in the changes in a remote repo to ensure we've # succesfully are able to push & pull things around second_cloned_repo_path = '%s_second_clone' % (bare_repo_path,) EG.clone_repo(bare_repo_path, second_cloned_repo_path) second_workspace = EG.workspace(second_cloned_repo_path) second_workspace.fast_forward() self.addCleanup(second_workspace.destroy) [found_commit] = second_workspace.repo.iter_commits() self.assertEqual(found_commit, initial_commit)
def auto_delete_post_to_git(sender, instance, **kwargs): # TODO: Allow author information to be set in EG. # author = utils.get_author_from_user(instance.last_author) workspace = EG.workspace(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) [page] = workspace.S(eg_models.Page).filter(uuid=instance.uuid) workspace.delete( page.get_object(), 'Page deleted: %s' % (instance.title,)) workspace.refresh_index()
def push_to_git(repo_path, index_prefix): workspace = EG.workspace(repo_path, index_prefix=index_prefix) if workspace.repo.remotes: repo = workspace.repo remote = repo.remote() remote.fetch() remote_master = remote.refs.master remote.push(remote_master.remote_head)
def auto_delete_category_to_git(sender, instance, **kwargs): # TODO: Not yet implemented # author = utils.get_author_from_user(instance.last_author) workspace = EG.workspace(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) [category] = workspace.S(eg_models.Category).filter(uuid=instance.uuid) original = category.get_object() workspace.delete(original, 'Category deleted: %s' % instance.title) workspace.refresh_index()
def test_collection_post_success(self): # NOTE: cloning to a different directory called `remote` because # the API is trying to clone into the same folder as the # tests: self.WORKING_DIR. # # FIXME: This is too error prone & tricky to reason about api_repo_name = '%s_remote' % (self.id(), ) self.remote_workspace = self.mk_workspace(working_dir=os.path.join( self.WORKING_DIR, 'remote'), name=api_repo_name) request = testing.DummyRequest({}) request.validated = { 'repo_url': self.remote_workspace.working_dir, 'repo_name': None } # Cleanup the repo created by the API on tear down self.addCleanup(lambda: EG.workspace( os.path.join(self.WORKING_DIR, api_repo_name)).destroy()) self.addCleanup(lambda: EG.workspace( os.path.join(self.WORKING_DIR, 'foo-bar')).destroy()) request.route_url = lambda route, name: ('/repos/%s.json' % (api_repo_name, )) request.errors = Errors() resource = RepositoryResource(request) with patch.object(request.registry, 'notify') as mocked_notify: resource.collection_post() self.assertEqual(request.response.headers['Location'], '/repos/%s.json' % (api_repo_name, )) self.assertEqual(request.response.status_code, 301) mocked_notify.assert_called() (event, ) = mocked_notify.call_args[0] self.assertIsInstance(event, RepositoryCloned) self.assertIs(event.config, self.config.registry.settings) self.assertEqual( event.repo.working_dir, os.path.abspath(os.path.join(self.WORKING_DIR, api_repo_name))) # check that the repo can be cloned with a different name request.validated['repo_name'] = 'foo-bar' resource.collection_post() self.assertEqual(request.response.status_code, 301) self.assertTrue( os.path.exists(os.path.join(self.WORKING_DIR, 'foo-bar')))
def workspace_changes(request): workspace = EG.workspace(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) repo = workspace.repo index = repo.index origin = repo.remote() [fetch_info] = origin.fetch() remote_master = origin.refs.master return { 'repo_changes': len(index.diff(remote_master.commit)), }
def auto_save_post_to_git(sender, instance, created, **kwargs): data = { "title": instance.title, "subtitle": instance.subtitle, "slug": instance.slug, "description": instance.description, "content": instance.content, "created_at": instance.created_at.isoformat(), "modified_at": instance.modified_at.isoformat(), # TODO: We should migrate this to localisation everywhere "language": ( instance.localisation.get_code() if instance.localisation else None), "featured_in_category": instance.featured_in_category, "featured": instance.featured, "position": instance.position, "linked_pages": [related_post.uuid for related_post in instance.related_posts.all()], "primary_category": ( instance.primary_category.uuid if instance.primary_category else None), "source": ( instance.source.uuid if instance.source else None), } # NOTE: If newly created always give it the highest ordering position if created: Post.objects.exclude(pk=instance.pk).update(position=F('position') + 1) # TODO: Setting the author on a commit is currently not supported # by elastic-git # See: https://github.com/universalcore/elastic-git/issues/23 # author = utils.get_author_from_user(instance.last_author) try: workspace = EG.workspace( settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) [page] = workspace.S(eg_models.Page).filter(uuid=instance.uuid) original = page.get_object() updated = original.update(data) workspace.save(updated, 'Page updated: %s' % instance.title) workspace.refresh_index() except ValueError: page = eg_models.Page(data) workspace.save(page, 'Page created: %s' % instance.title) workspace.refresh_index() # Always update the UUID as we've just generated a new one. Post.objects.filter(pk=instance.pk).update(uuid=page.uuid)
def pull_from_git(repo_path, index_prefix, es_host): workspace = EG.workspace(repo_path, index_prefix=index_prefix, es={'urls': [es_host]}) # replace bottoms stuff with worksapce.pull if workspace.repo.remotes: repo = workspace.repo remote = repo.remote() remote.fetch() storyList = workspace.S(TestStory) # print(list(storyList)) # print ([dict(a.get_object()) for a in storyList]) return json.dumps([dict(a.to_object()) for a in storyList])
def auto_create_repo(instance, **kwargs): try: userUUID = uuid.uuid4().hex # creating repo on GitHub gh = GitHub('*****@*****.**', password='******') githubRepo = gh.create_repository(userUUID, description=u'', homepage=u'', private=False, has_issues=True, has_wiki=True, auto_init=True, gitignore_template=u'') githubRepo.create_blob('hello', 'utf-8') githubRepo.create_commit('first commit', '', '') # creating local repo repoPath = 'repos/' + userUUID UserProfile(user=instance, uuid=userUUID) EG.init_repo(repoPath, bare=False) # creating workspace in local repo workspace = EG.workspace(repoPath, index_prefix='', es={'urls': ['http://localhost:9200']}) # pushing local repo to GitHub repo workspace.repo.create_remote('origin', githubRepo.html_url) repo = workspace.repo remote = repo.remote() remote.fetch() remote_master = remote.refs.master remote.push(remote_master.remote_head) except ValueError: raise workspace.refresh_index()
def my_view(request, *args, **kwargs): workspace = EG.workspace(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) commits = workspace.repo.iter_commits(max_count=10) context = { 'github_url': settings.GIT_REPO_URL, 'repo': workspace.repo, 'commits': [ { 'message': c.message, 'author': c.author.name, 'commit_time': datetime.fromtimestamp(c.committed_date) } for c in commits ] } return render(request, 'cms/admin/github.html', context)
def create_index(self, workdir, verbose=False, clobber=False): self.verbose = verbose workspace = EG.workspace( workdir, index_prefix=os.path.basename(workdir)) branch = workspace.repo.active_branch self.emit('Creating index for %s.' % (branch.name,)) if workspace.im.index_exists(branch.name) and not clobber: self.emit('Index already exists, skipping.') return False elif workspace.im.index_exists(branch.name) and clobber: self.emit('Clobbering existing index.') workspace.im.destroy_index(branch.name) workspace.im.create_index(branch.name) while not workspace.index_ready(): pass self.emit('Index created.') return True
def mk_workspace(self, working_dir='.test_repos/', name=None, url='http://localhost', index_prefix=None, auto_destroy=None, author_name='Test Kees', author_email='*****@*****.**'): name = name or self.id() index_prefix = index_prefix or name.lower().replace('.', '-') auto_destroy = auto_destroy or self.destroy workspace = EG.workspace(os.path.join(working_dir, name), es={ 'urls': [url], }, index_prefix=index_prefix) if auto_destroy: self.addCleanup(workspace.destroy) workspace.setup(author_name, author_email) while not workspace.index_ready(): pass return workspace
def auto_save_to_git(instance, **kwargs): data = TestStory({ "title": instance.title, "author": instance.author, "category": instance.category, "body": instance.body, "uuid": uuid.uuid4().hex}) try: ws = EG.workspace(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX, es={'urls': [settings.ELASTICSEARCH_HOST]}) ws.setup('Codie Roelf', '*****@*****.**') ws.save(data, 'saving') ws.refresh_index() push_to_git(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX, es_host=settings.ELASTICSEARCH_HOST) except ValueError: raise ws.refresh_index()
def collection_post(self): storage_path = self.config.get('repo.storage_path') repo_url = self.request.validated['repo_url'] repo_name = self.request.validated['repo_name'] if not repo_name: repo_url_info = urlparse(repo_url) repo_name_dot_git = os.path.basename(repo_url_info.path) repo_name = repo_name_dot_git.partition('.git')[0] try: repo = EG.clone_repo(repo_url, os.path.join(storage_path, repo_name)) self.request.registry.notify( RepositoryCloned(config=self.config, repo=repo)) self.request.response.headers['Location'] = self.request.route_url( 'repositoryresource', name=repo_name) self.request.response.status = 301 return '' except (GitCommandError, ), e: self.request.errors.status = 400 self.request.errors.add('body', 'repo_url', e.stderr)
def auto_save_content_repository_to_git(sender, instance, created, **kwargs): workspace = EG.workspace(settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) workspace.sm.store_data( 'LICENSE', instance.get_license_text(), 'Specify license.')
from elasticgit import EG from elasticgit.tests.base import TestPerson if __name__ == '__main__': workspace = EG.workspace('.test_repo') workspace.destroy() # putting this here because doctests don't have support for tearDown() workspace.setup('Simon de Haan', '*****@*****.**') person1 = TestPerson({'age': 10, 'name': 'Foo'}) workspace.save(person1, 'Saving Person 1') person2 = TestPerson({'age': 20, 'name': 'Bar'}) workspace.save(person2, 'Saving Person 2') person3 = TestPerson({'age': 30, 'name': 'Baz'}) workspace.save(person3, 'Saving Person 3') # Elasticsearch does this automatically every few seconds # but not fast enough for unit tests. workspace.refresh_index() for person in workspace.S(TestPerson).filter(age__gte=20): print person.uuid, person.name, person.age workspace.destroy()
def fastforward(repo_path, index_prefix, es={}): workspace = EG.workspace(repo_path, index_prefix=index_prefix, es=es) workspace.fast_forward() workspace.reindex(Page) workspace.reindex(Category) workspace.reindex(Localisation)
def test_index_prefix(self): repo_path = os.path.join(self.WORKING_DIR, 'bar') workspace = EG.workspace(repo_path) self.addCleanup(workspace.destroy) self.assertEqual(workspace.index_prefix, 'bar')
def sync_data(self, workdir, model_class, verbose=False, clobber=False): self.verbose = verbose workdir = EG.workspace(workdir, index_prefix=os.path.basename(workdir)) self.emit('Syncing data for %s.' % (fqcn(model_class), )) workdir.sync(model_class) self.emit('Data synced.')
def handle(self, *args, **options): self.disconnect_signals() self.quiet = options.get('quiet') workspace = EG.workspace( settings.GIT_REPO_PATH, index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX) if not self.quiet: must_delete = self.get_input_data( 'Do you want to delete existing data? Y/n: ', 'y') else: must_delete = 'y' if must_delete.lower() == 'y': self.emit('deleting existing content..') Post.objects.all().delete() Category.objects.all().delete() self.emit('creating categories..') categories = workspace.S(eg_models.Category).everything() for instance in categories: Category.objects.create( slug=instance.slug, title=instance.title, subtitle=instance.subtitle, localisation=Localisation._for(instance.language), featured_in_navbar=instance.featured_in_navbar or False, uuid=instance.uuid, position=instance.position, ) # second pass to add related fields for instance in categories: if instance.source: c = Category.objects.get(uuid=instance.uuid) c.source = Category.objects.get(uuid=instance.source) c.save() # Manually refresh stuff because the command disables signals workspace.refresh_index() pages = workspace.S(eg_models.Page).everything() for instance in pages: primary_category = None if instance.primary_category: primary_category = Category.objects.get( uuid=instance.primary_category) try: Post.objects.create( title=instance.title, subtitle=instance.subtitle, slug=instance.slug, description=instance.description, content=html2text(instance.content), created_at=instance.created_at, modified_at=instance.modified_at, featured_in_category=( instance.featured_in_category or False), featured=( instance.featured or False), localisation=Localisation._for(instance.language), primary_category=primary_category, uuid=instance.uuid ) except ValidationError, e: # pragma: no cover self.stderr.write('An error occured with: %s(%s)' % ( instance.title, instance.uuid)) self.stderr.write(e)