def signatures(collection_path, git_name, git_mail): """Identifies signature files for collection and entities. @param collection_path: Absolute path to collection repo. @param git_name: Username of git committer. @param git_mail: Email of git committer. @return collection_path: Absolute path to collection. """ gitstatus.lock(settings.MEDIA_BASE, 'collection_signatures') collection = Collection.from_identifier(Identifier(path=collection_path)) updates = signatures.find_updates(collection) files_written = signatures.write_updates(updates) # TODO move this code to webui.models.Collection status,msg = signatures.commit_updates( collection, files_written, git_name, git_mail, agent='ddr-local' ) logger.debug('DONE') logger.debug('Updating Elasticsearch') if settings.DOCSTORE_ENABLED: collection = Collection.from_identifier(Identifier(path=collection_path)) try: collection.post_json() except ConnectionError: logger.error('Could not update search index') return collection_path
def delete_file( git_name, git_mail, collection_path, entity_id, file_basename, agent='' ): """ @param collection_path: string @param entity_id: string @param file_basename: string @param git_name: Username of git committer. @param git_mail: Email of git committer. @param agent: (optional) Name of software making the change. """ logger.debug('delete_file(%s,%s,%s,%s,%s,%s)' % (git_name, git_mail, collection_path, entity_id, file_basename, agent)) gitstatus.lock(settings.MEDIA_BASE, 'delete_file') file_id = os.path.splitext(file_basename)[0] file_ = DDRFile.from_identifier(Identifier(file_id)) entity = Entity.from_identifier(Identifier(entity_id)) collection = Collection.from_identifier(Identifier(path=collection_path)) logger.debug('delete from repository') rm_files,updated_files = entity.prep_rm_file(file_) status,message = commands.file_destroy( git_name, git_mail, collection, entity, rm_files, updated_files, agent ) logger.debug('delete from search index') try: docstore.delete(settings.DOCSTORE_HOSTS, settings.DOCSTORE_INDEX, file_.id) except ConnectionError: logger.error('Could not delete document from Elasticsearch.') return status,message,collection_path,file_basename
def collections( request ): """ We are displaying collection status vis-a-vis the project Gitolite server. It takes too long to run git-status on every repo so, if repo statuses are not cached they will be updated by jQuery after page load has finished. """ collections = [] collection_status_urls = [] for object_id in gitolite.get_repos_orgs(): identifier = Identifier(object_id) # TODO Identifier: Organization object instead of repo and org repo,org = identifier.parts.values() collection_paths = Collection.collection_paths(settings.MEDIA_BASE, repo, org) colls = [] for collection_path in collection_paths: if collection_path: identifier = Identifier(path=collection_path) collection = Collection.from_identifier(identifier) colls.append(collection) gitstatus = collection.gitstatus() if gitstatus and gitstatus.get('sync_status'): collection.sync_status = gitstatus['sync_status'] else: collection_status_urls.append( "'%s'" % collection.sync_status_url()) collections.append( (object_id,repo,org,colls) ) # load statuses in random order random.shuffle(collection_status_urls) return render_to_response( 'webui/collections/index.html', {'collections': collections, 'collection_status_urls': ', '.join(collection_status_urls),}, context_instance=RequestContext(request, processors=[]) )
def csv_export( request, cid, model=None ): """ """ if (not model) or (not (model in ['entity','file'])): raise Http404 collection = Collection.from_identifier(Identifier(cid)) things = {'entity':'objects', 'file':'files'} csv_path = settings.CSV_EXPORT_PATH[model] % collection.id csv_filename = os.path.basename(csv_path) if model == 'entity': file_url = reverse('webui-collection-csv-entities', args=[collection.id]) elif model == 'file': file_url = reverse('webui-collection-csv-files', args=[collection.id]) # do it result = collection_tasks.csv_export_model.apply_async( (collection.path,model), countdown=2 ) # add celery task_id to session celery_tasks = request.session.get(settings.CELERY_TASKS_SESSION_KEY, {}) # IMPORTANT: 'action' *must* match a message in webui.tasks.TASK_STATUS_MESSAGES. task = {'task_id': result.task_id, 'action': 'csv-export-model', 'collection_id': collection.id, 'collection_url': collection.absolute_url(), 'things': things[model], 'file_name': csv_filename, 'file_url': file_url, 'start': converters.datetime_to_text(datetime.now(settings.TZ)),} celery_tasks[result.task_id] = task request.session[settings.CELERY_TASKS_SESSION_KEY] = celery_tasks return HttpResponseRedirect(collection.absolute_url())
def csv_download( request, cid, model=None ): """Offers CSV file in settings.CSV_TMPDIR for download. File must actually exist in settings.CSV_EXPORT_PATH and be readable. File must be readable by Python csv module. If all that is true then it must be a legal CSV file. """ collection = Collection.from_identifier(Identifier(cid)) path = settings.CSV_EXPORT_PATH[model] % collection.id filename = os.path.basename(path) if not os.path.exists(path): raise Http404 import csv response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="%s"' % filename writer = csv.writer( response, delimiter=fileio.CSV_DELIMITER, quotechar=fileio.CSV_QUOTECHAR, quoting=fileio.CSV_QUOTING ) with open(path, 'rb') as f: reader = csv.reader( f, delimiter=fileio.CSV_DELIMITER, quotechar=fileio.CSV_QUOTECHAR, quoting=fileio.CSV_QUOTING ) for row in reader: writer.writerow(row) return response
def after_return(self, status, retval, task_id, args, kwargs, einfo): logger.debug('EntityNewExpertTask.after_return(%s, %s, %s, %s, %s)' % (status, retval, task_id, args, kwargs)) collection_path = args[0] collection = Collection.from_identifier(Identifier(path=collection_path)) lockstatus = collection.unlock(task_id) gitstatus.update(settings.MEDIA_BASE, collection.path) gitstatus.unlock(settings.MEDIA_BASE, 'entity_newexpert')
def save(collection_path, cleaned_data, git_name, git_mail): """The time-consuming parts of collection-edit. @param collection_path: str Absolute path to collection @param cleaned_data: dict form.cleaned_data @param git_name: Username of git committer. @param git_mail: Email of git committer. """ logger.debug('tasks.collection.save(%s,%s,%s)' % ( git_name, git_mail, collection_path)) collection = Collection.from_identifier(Identifier(path=collection_path)) gitstatus.lock(settings.MEDIA_BASE, 'collection_edit') exit,status,updated_files = collection.save( git_name, git_mail, cleaned_data ) dvcs_tasks.gitstatus_update.apply_async( (collection_path,), countdown=2 ) return status,collection_path
def sync( git_name, git_mail, collection_path ): """Synchronizes collection repo with workbench server. @param git_name: Username of git committer. @param git_mail: Email of git committer. @param collection_path: Absolute path to collection repo. @return collection_path: Absolute path to collection. """ gitstatus.lock(settings.MEDIA_BASE, 'collection_sync') ci = Identifier(path=collection_path) collection = Collection.from_identifier(ci) # TODO move this code to webui.models.Collection.sync exit,status = commands.sync( git_name, git_mail, collection ) logger.debug('Updating Elasticsearch') if settings.DOCSTORE_ENABLED: try: collection.reindex() except ConnectionError: logger.error('Could not update search index') return collection_path
def entity_edit(collection_path, entity_id, form_data, git_name, git_mail, agent=''): """The time-consuming parts of entity-edit. @param collection_path: str Absolute path to collection @param entity_id: str @param form_data: dict @param git_name: Username of git committer. @param git_mail: Email of git committer. @param agent: (optional) Name of software making the change. """ logger.debug('tasks.entity.entity_edit(%s,%s,%s,%s,%s)' % ( git_name, git_mail, collection_path, entity_id, agent)) collection = Collection.from_identifier(Identifier(path=collection_path)) entity = Entity.from_identifier(Identifier(id=entity_id)) gitstatus.lock(settings.MEDIA_BASE, 'entity_edit') exit,status,updated_files = entity.save( git_name, git_mail, collection, form_data ) dvcs_tasks.gitstatus_update.apply_async( (collection.path,), countdown=2 ) return status,collection_path,entity_id
def after_return(self, status, retval, task_id, args, kwargs, einfo): logger.debug('DeleteFileTask.after_return(%s, %s, %s, %s, %s)' % (status, retval, task_id, args, kwargs)) collection_path = args[2] collection = Collection.from_identifier(Identifier(path=collection_path)) lockstatus = collection.unlock(task_id) gitstatus.update(settings.MEDIA_BASE, collection_path) gitstatus.unlock(settings.MEDIA_BASE, 'delete_file')
def sync_status_ajax( request, cid ): collection = Collection.from_identifier(Identifier(cid)) gitstatus = collection.gitstatus() if gitstatus: sync_status = gitstatus['sync_status'] if sync_status.get('timestamp',None): sync_status['timestamp'] = converters.datetime_to_text(sync_status['timestamp']) return HttpResponse(json.dumps(sync_status), content_type="application/json") raise Http404
def after_return(self, status, retval, task_id, args, kwargs, cinfo): collection_path = args[2] collection = Collection.from_identifier(Identifier(path=collection_path)) # NOTE: collection is locked immediately after collection_sync task # starts in webui.views.collections.sync collection.unlock(task_id) collection.cache_delete() gitstatus.update(settings.MEDIA_BASE, collection_path) gitstatus.unlock(settings.MEDIA_BASE, 'collection_sync')
def after_return(self, status, retval, task_id, args, kwargs, einfo): logger.debug('EntityReloadTask.after_return(%s, %s, %s, %s, %s)' % (status, retval, task_id, args, kwargs)) collection_path = args[0] collection = Collection.from_identifier(Identifier(path=collection_path)) entity_id = args[1] entity = Entity.from_identifier(Identifier(id=entity_id)) lockstatus = entity.unlock(task_id) gitstatus.update(settings.MEDIA_BASE, collection_path) gitstatus.unlock(settings.MEDIA_BASE, 'reload_files')
def new_idservice( request, oid ): """Gets new EID from idservice, creates new entity record. If it messes up, goes back to collection. """ git_name,git_mail = enforce_git_credentials(request) # note: oid could be either a Collection or an Entity collection = Collection.from_identifier( Identifier(oid).collection() ) check_parent(collection) ic = idservice.IDServiceClient() # resume session auth_status,auth_reason = ic.resume(request.session['idservice_token']) if auth_status != 200: request.session['idservice_username'] = None request.session['idservice_token'] = None messages.warning( request, 'Session resume failed: %s %s (%s)' % ( auth_status,auth_reason,settings.IDSERVICE_API_BASE ) ) return HttpResponseRedirect(collection.absolute_url()) # get new entity ID new_object_parent = Identifier(oid) model = request.GET.get('model', 'entity') ENTITY_MODELS = ['entity', 'segment'] if model not in ENTITY_MODELS: raise Exception('Model "%s% not an entity model.' % model) http_status,http_reason,new_entity_id = ic.next_object_id( new_object_parent, model, register=True, ) # abort! if http_status not in [200,201]: err = '%s %s' % (http_status, http_reason) msg = WEBUI_MESSAGES['VIEWS_ENT_ERR_NO_IDS'] % (settings.IDSERVICE_API_BASE, err) logger.error(msg) messages.error(request, msg) return HttpResponseRedirect(collection.absolute_url()) # Create entity and redirect to edit page eidentifier = Identifier(id=new_entity_id) entity = _create_entity(request, eidentifier, collection, git_name, git_mail) if entity: return HttpResponseRedirect(reverse('webui-entity-edit', args=[entity.id])) # something happened... logger.error('Could not create new entity!') messages.error(request, WEBUI_MESSAGES['VIEWS_ENT_ERR_CREATE']) return HttpResponseRedirect(collection.absolute_url())
def detail( request, cid ): collection = Collection.from_identifier(Identifier(cid)) collection.model_def_commits() collection.model_def_fields() alert_if_conflicted(request, collection) return render(request, 'webui/collections/detail.html', { 'collection': collection, 'collection_unlock_url': collection.unlock_url(collection.locked()), # cache this for later 'annex_info': annex_info(repository(collection.path_abs)), })
def unlock( request, cid, task_id ): """Provides a way to remove collection lockfile through the web UI. """ git_name = request.session.get('git_name') git_mail = request.session.get('git_mail') if not git_name and git_mail: messages.error(request, WEBUI_MESSAGES['LOGIN_REQUIRED']) collection = Collection.from_identifier(Identifier(cid)) if task_id and collection.locked() and (task_id == collection.locked()): collection.unlock(task_id) messages.success(request, 'Collection <b>%s</b> unlocked.' % collection.id) return HttpResponseRedirect(collection.absolute_url())
def collection_sync( git_name, git_mail, collection_path ): """Synchronizes collection repo with workbench server. @param src_path: Absolute path to collection repo. @param git_name: Username of git committer. @param git_mail: Email of git committer. @return collection_path: Absolute path to collection. """ gitstatus.lock(settings.MEDIA_BASE, 'collection_sync') collection = Collection.from_identifier(Identifier(path=collection_path)) exit,status = commands.sync( git_name, git_mail, collection ) # update search index collection = Collection.from_identifier(Identifier(path=collection_path)) try: collection.post_json(settings.DOCSTORE_HOSTS, settings.DOCSTORE_INDEX) except ConnectionError: logger.error('Could not update search index') return collection_path
def git_status( request, cid ): collection = Collection.from_identifier(Identifier(cid)) alert_if_conflicted(request, collection) gitstatus = collection.gitstatus() remotes = dvcs.remotes(dvcs.repository(collection.path)) return render(request, 'webui/collections/git-status.html', { 'collection': collection, 'status': gitstatus.get('status', 'git-status unavailable'), 'astatus': gitstatus.get('annex_status', 'annex-status unavailable'), 'timestamp': gitstatus.get('timestamp'), 'remotes': remotes, })
def new_manual( request, oid ): """Ask for Entity ID, then create new Entity. """ git_name,git_mail = enforce_git_credentials(request) # note: oid could be either a Collection or an Entity parent = Identifier(oid).object() collection = Collection.from_identifier( Identifier(oid).collection() ) check_parent(collection) oidentifier = Identifier(oid) model = request.GET.get('model', 'entity') if request.method == 'POST': form = ObjectIDForm(request.POST) if form.is_valid(): eid = form.cleaned_data['object_id'] eidentifier = Identifier(id=eid) # Create entity and redirect to edit page entity = _create_entity( request, eidentifier, collection, git_name, git_mail ) if entity: messages.warning(request, 'IMPORTANT: Register this ID with the ID service as soon as possible!') return HttpResponseRedirect( reverse('webui-entity-edit', args=[entity.id]) ) else: form = ObjectIDForm(initial={ 'model': model, 'parent_id': oidentifier.id, }) if isinstance(parent, Collection): existing_ids = sorted([entity.id for entity in parent.children(quick=True)]) elif isinstance(parent, Entity): existing_ids = sorted([e['id'] for e in parent.children_meta]) existing_ids.reverse() return render(request, 'webui/entities/new-manual.html', { 'collection': collection, 'parent': parent, 'model': model, 'form': form, 'existing_ids': existing_ids, })
def children( request, cid ): collection = Collection.from_identifier(Identifier(cid)) alert_if_conflicted(request, collection) objects = collection.children() # paginate thispage = request.GET.get('page', 1) paginator = Paginator(objects, settings.RESULTS_PER_PAGE) page = paginator.page(thispage) return render(request, 'webui/collections/entities.html', { 'collection': collection, 'paginator': paginator, 'page': page, 'thispage': thispage, })
def csv_export_model( collection_path, model ): """Export collection {model} metadata to CSV file. @return collection_path: Absolute path to collection. @return model: 'entity' or 'file'. """ collection = Collection.from_identifier(Identifier(path=collection_path)) csv_path = settings.CSV_EXPORT_PATH[model] % collection.id logger.info('All paths in %s' % collection_path) paths = util.find_meta_files( basedir=collection_path, model=model, recursive=1, force_read=1 ) logger.info('Exporting %s paths' % len(paths)) batch.Exporter.export( paths, model, csv_path, required_only=False ) return csv_path
def entity_newexpert(collection_path, entity_id, git_name, git_mail): """Create new entity using known entity ID. @param collection_path: str Absolute path to collection @param entity_id: str @param git_name: Username of git committer. @param git_mail: Email of git committer. """ logger.debug('collection_entity_newexpert(%s,%s,%s,%s)' % ( collection_path, entity_id, git_name, git_mail)) collection = Collection.from_identifier(Identifier(path=collection_path)) gitstatus.lock(settings.MEDIA_BASE, 'entity_newexpert') entity = Entity.create(collection, entity_id, git_name, git_mail) gitstatus_update.apply_async((collection.path,), countdown=2) return 'status',collection_path,entity.id
def signatures( request, cid ): try: collection = Collection.from_identifier(Identifier(cid)) except: raise Http404 git_name = request.session.get('git_name') git_mail = request.session.get('git_mail') if not git_name and git_mail: messages.error(request, WEBUI_MESSAGES['LOGIN_REQUIRED']) alert_if_conflicted(request, collection) if collection.locked(): messages.error(request, WEBUI_MESSAGES['VIEWS_COLL_LOCKED'].format(collection.id)) return HttpResponseRedirect(collection.absolute_url()) if request.method == 'POST': form = SignaturesConfirmForm(request.POST) form_is_valid = form.is_valid() if form.is_valid() and form.cleaned_data['confirmed']: result = collection_tasks.signatures.apply_async( (collection.path,git_name,git_mail), countdown=2 ) lockstatus = collection.lock(result.task_id) # add celery task_id to session celery_tasks = request.session.get(settings.CELERY_TASKS_SESSION_KEY, {}) # IMPORTANT: 'action' *must* match a message in webui.tasks.TASK_STATUS_MESSAGES. task = { 'task_id': result.task_id, 'action': 'collection-signatures', 'collection_id': collection.id, 'collection_url': collection.absolute_url(), 'start': converters.datetime_to_text(datetime.now(settings.TZ)), } celery_tasks[result.task_id] = task request.session[settings.CELERY_TASKS_SESSION_KEY] = celery_tasks return HttpResponseRedirect(collection.absolute_url()) else: form = SignaturesConfirmForm() return render(request, 'webui/collections/signatures-confirm.html', { 'collection': collection, 'form': form, })
def _create_collection(request, cidentifier, git_name, git_mail): """used by both new_idservice and new_manual """ exit,status = Collection.new(cidentifier, git_name, git_mail, settings.AGENT) collection = Collection.from_identifier(cidentifier) if exit: logger.error(exit) logger.error(status) messages.error(request, WEBUI_MESSAGES['ERROR'].format(status)) else: # update search index try: collection.post_json() except ConnectionError: logger.error('Could not post to Elasticsearch.') dvcs_tasks.gitstatus_update.apply_async( (cidentifier.path_abs(),), countdown=2 ) return collection
def reload_files(collection_path, entity_id, git_name, git_mail, agent=''): """Regenerate entity.json's list of child files. @param collection_path: string @param entity_id: string @param git_name: Username of git committer. @param git_mail: Email of git committer. @param agent: (optional) Name of software making the change. """ logger.debug('tasks.entity.reload_files(%s,%s,%s,%s,%s)' % (collection_path, entity_id, git_name, git_mail, agent)) gitstatus.lock(settings.MEDIA_BASE, 'reload_files') entity = Entity.from_identifier(Identifier(entity_id)) collection = Collection.from_identifier(Identifier(path=collection_path)) exit,status,updated_files = entity.save( git_name, git_mail, collection, {} ) return status,collection_path,entity_id
def edit( request, cid ): git_name = request.session.get('git_name') git_mail = request.session.get('git_mail') if not git_name and git_mail: messages.error(request, WEBUI_MESSAGES['LOGIN_REQUIRED']) collection = Collection.from_identifier(Identifier(cid)) module = collection.identifier.fields_module() collection.model_def_commits() collection.model_def_fields() if collection.locked(): messages.error(request, WEBUI_MESSAGES['VIEWS_COLL_LOCKED'].format(collection.id)) return HttpResponseRedirect(collection.absolute_url()) collection.repo_fetch() if collection.repo_behind(): messages.error(request, WEBUI_MESSAGES['VIEWS_COLL_BEHIND'].format(collection.id)) return HttpResponseRedirect(collection.absolute_url()) if request.method == 'POST': form = DDRForm(request.POST, fields=module.FIELDS) if form.is_valid(): collection.form_post(form.cleaned_data) # write these so we see a change on refresh # will be rewritten in collection.save() collection.write_json() # commit files, delete cache, update search index, update git status collection_tasks.edit( request, collection, form.cleaned_data, git_name, git_mail ) return HttpResponseRedirect(collection.absolute_url()) else: form = DDRForm(collection.form_prep(), fields=module.FIELDS) return render(request, 'webui/collections/edit-json.html', { 'collection': collection, 'form': form, })
def delete_entity( git_name, git_mail, collection_path, entity_id, agent='' ): """ @param collection_path: string @param entity_id: string @param git_name: Username of git committer. @param git_mail: Email of git committer. @param agent: (optional) Name of software making the change. """ gitstatus.lock(settings.MEDIA_BASE, 'delete_entity') logger.debug('collection_delete_entity(%s,%s,%s,%s,%s)' % (git_name, git_mail, collection_path, entity_id, agent)) # remove the entity collection = Collection.from_identifier(Identifier(collection_path)) entity = Entity.from_identifier(Identifier(entity_id)) status,message = commands.entity_destroy( git_name, git_mail, collection, entity, agent ) # update search index try: docstore.delete(settings.DOCSTORE_HOSTS, settings.DOCSTORE_INDEX, entity_id) except ConnectionError: logger.error('Could not delete document from Elasticsearch.') return status,message,collection_path,entity_id
def csv_import( request, cid, model=None ): """Accepts a CSV file for batch.Import """ if (not model) or (not (model in ['entity','file'])): raise Http404 collection = Collection.from_identifier(Identifier(cid)) repo = dvcs.repository(collection.identifier.path_abs()) if request.method == 'POST': form = UploadFileForm(request.POST, request.FILES) if form.is_valid(): git_name = request.session['git_name'] git_mail = request.session['git_mail'] csv_path = CSV_IMPORT_FILE.format( cid=collection.identifier.id, model=collection.identifier.model ) handle_uploaded_file( collection.identifier.id, collection.identifier.model, request.FILES['file'] ) if os.path.exists(csv_path): messages.success(request, 'CSV file upload success!.') else: messages.error(request, 'CSV file upload failed!') return HttpResponseRedirect(collection.absolute_url()) if model == 'entity': imported = batch.Importer.import_entities( csv_path=csv_path, cidentifier=collection.identifier, vocabs_url=settings.VOCABS_URL, git_name=git_name, git_mail=git_mail, agent='ddrlocal-csv-import-entity', dryrun=False, ) imported_rel = [ o.identifier.path_rel() for o in imported ] changelogs = list(set([ os.path.join( os.path.dirname(path_rel), 'changelog' ) for path_rel in imported_rel if 'entity.json' in path_rel ])) imported_all = imported_rel + changelogs result = commands.commit_files( repo=repo, message='Imported by ddr-local from file "%s"' % csv_path, git_files=imported_all, annex_files=[] ) msg = 'Successfully imported {} objects from {}.'.format( str(len(imported)), request.FILES['file'].name, ) messages.success(request, msg) elif model == 'file': imported = batch.Importer.import_files( csv_path=csv_path, cidentifier=collection.identifier, vocabs_url=settings.VOCABS_URL, git_name=git_name, git_mail=git_mail, agent='ddrlocal-csv-import-file', row_start=0, row_end=9999999, dryrun=False ) # flatten: import_files returns a list of file,entity lists imported_flat = [i for imprtd in imported for i in imprtd] # import_files returns absolute paths but we need relative imported_rel = [ os.path.relpath( file_path_abs, collection.identifier.path_abs() ) for file_path_abs in imported_flat ] # Add changelog for each entity changelogs = list(set([ os.path.join( os.path.dirname(path_rel), 'changelog' ) for path_rel in imported_rel if 'entity.json' in path_rel ])) imported_all = imported_rel + changelogs result = commands.commit_files( repo=repo, message='Imported by ddr-local from file "%s"' % csv_path, git_files=imported_all, annex_files=[], ) msg = 'Successfully imported {} files from {}.'.format( str(len(imported)), request.FILES['file'].name, ) messages.success(request, msg) return HttpResponseRedirect(collection.absolute_url()) else: form = UploadFileForm() return render(request, 'webui/collections/csv-import.html', { 'collection': collection, 'form': form, })
def changelog( request, cid ): collection = Collection.from_identifier(Identifier(cid)) alert_if_conflicted(request, collection) return render(request, 'webui/collections/changelog.html', { 'collection': collection, })
def new( request, repo, org ): """Gets new CID from workbench, creates new collection record. If it messes up, goes back to collection list. """ oidentifier = Identifier(request) git_name = request.session.get('git_name') git_mail = request.session.get('git_mail') if not (git_name and git_mail): messages.error(request, WEBUI_MESSAGES['LOGIN_REQUIRED']) ic = idservice.IDServiceClient() # resume session auth_status,auth_reason = ic.resume(request.session['idservice_token']) if auth_status != 200: request.session['idservice_username'] = None request.session['idservice_token'] = None messages.warning( request, 'Session resume failed: %s %s (%s)' % ( auth_status,auth_reason,settings.IDSERVICE_API_BASE ) ) return HttpResponseRedirect(reverse('webui-collections')) # get new collection ID http_status,http_reason,collection_id = ic.next_object_id( oidentifier, 'collection' ) if http_status not in [200,201]: err = '%s %s' % (http_status, http_reason) msg = WEBUI_MESSAGES['VIEWS_COLL_ERR_NO_IDS'] % (settings.IDSERVICE_API_BASE, err) logger.error(msg) messages.error(request, msg) return HttpResponseRedirect(reverse('webui-collections')) identifier = Identifier(id=collection_id, base_path=settings.MEDIA_BASE) # create the new collection repo collection_path = identifier.path_abs() # collection.json template fileio.write_text( Collection(collection_path).dump_json(template=True), settings.TEMPLATE_CJSON ) exit,status = commands.create( git_name, git_mail, identifier, [settings.TEMPLATE_CJSON, settings.TEMPLATE_EAD], agent=settings.AGENT ) if exit: logger.error(exit) logger.error(status) messages.error(request, WEBUI_MESSAGES['ERROR'].format(status)) else: # update search index collection = Collection.from_identifier(identifier) try: collection.post_json(settings.DOCSTORE_HOSTS, settings.DOCSTORE_INDEX) except ConnectionError: logger.error('Could not post to Elasticsearch.') gitstatus_update.apply_async((collection_path,), countdown=2) # positive feedback return HttpResponseRedirect( reverse('webui-collection-edit', args=collection.idparts) ) # something happened... logger.error('Could not create new collecion!') messages.error(request, WEBUI_MESSAGES['VIEWS_COLL_ERR_CREATE']) return HttpResponseRedirect(reverse('webui-collections'))