def next_repo( queue, local=False ): """Gets next collection_path or time til next ready to be updated @param queue: @param local: Boolean Use local per-collection locks or global lock. @returns: collection_path or (msg,timedelta) """ collection_path = None message = None next_available = None # sorts collections in ascending order by timestamp collections = sorted(queue['collections']) # now choose if local: # choose first collection that is not locked for timestamp,cid in collections: if datetime.now() > timestamp: ci = Identifier(id=cid) if not Collection.from_identifier(ci).locked(): return ci.path_abs() if (not next_available) or (timestamp < next_available): next_available = timestamp else: # global lock - just take the first collection for timestamp,cid in collections: if datetime.now() > timestamp: ci = Identifier(id=cid) return ci.path_abs() if (not next_available) or (timestamp < next_available): next_available = timestamp return ('notready',next_available)
def check(request, cid): ci = Identifier(cid) result = collection_tasks.check.apply_async( [ci.path_abs()], countdown=2 ) # add celery task_id to session celery_tasks = request.session.get(settings.CELERY_TASKS_SESSION_KEY, {}) # IMPORTANT: 'action' *must* match a message in webui.tasks.TASK_STATUS_MESSAGES. task = { 'task_id': result.task_id, 'action': 'collection-check', 'collection_id': ci.id, 'collection_url': ci.urlpath('editor'), 'start': converters.datetime_to_text(datetime.now(settings.TZ)), } celery_tasks[result.task_id] = task request.session[settings.CELERY_TASKS_SESSION_KEY] = celery_tasks return HttpResponseRedirect(ci.urlpath('editor'))
def create(collection, entity_id, git_name, git_mail, agent=settings.AGENT): """create new entity given an entity ID TODO remove write and commit, just create object """ eidentifier = Identifier(id=entity_id) entity_path = eidentifier.path_abs() # write entity.json template to entity location and commit fileio.write_text(Entity(entity_path).dump_json(template=True), settings.TEMPLATE_EJSON) exit,status = commands.entity_create( git_name, git_mail, collection, eidentifier, [collection.json_path_rel, collection.ead_path_rel], [settings.TEMPLATE_EJSON, settings.TEMPLATE_METS], agent=agent) # load new entity, inherit values from parent, write and commit entity = Entity.from_json(entity_path) entity.inherit(collection) entity.write_json() updated_files = [entity.json_path] exit,status = commands.entity_update( git_name, git_mail, collection, entity, updated_files, agent=agent) # delete cache, update search index collection.cache_delete() with open(entity.json_path, 'r') as f: document = json.loads(f.read()) try: docstore.post(settings.DOCSTORE_HOSTS, settings.DOCSTORE_INDEX, document) except ConnectionError: logger.error('Could not post to Elasticsearch.') return entity
def new( request, repo, org ): """Gets new CID from workbench, creates new collection record. If it messes up, goes back to collection list. """ oidentifier = Identifier(request) git_name = request.session.get('git_name') git_mail = request.session.get('git_mail') if not (git_name and git_mail): messages.error(request, WEBUI_MESSAGES['LOGIN_REQUIRED']) ic = idservice.IDServiceClient() # resume session auth_status,auth_reason = ic.resume(request.session['idservice_token']) if auth_status != 200: request.session['idservice_username'] = None request.session['idservice_token'] = None messages.warning( request, 'Session resume failed: %s %s (%s)' % ( auth_status,auth_reason,settings.IDSERVICE_API_BASE ) ) return HttpResponseRedirect(reverse('webui-collections')) # get new collection ID http_status,http_reason,collection_id = ic.next_object_id( oidentifier, 'collection' ) if http_status not in [200,201]: err = '%s %s' % (http_status, http_reason) msg = WEBUI_MESSAGES['VIEWS_COLL_ERR_NO_IDS'] % (settings.IDSERVICE_API_BASE, err) logger.error(msg) messages.error(request, msg) return HttpResponseRedirect(reverse('webui-collections')) identifier = Identifier(id=collection_id, base_path=settings.MEDIA_BASE) # create the new collection repo collection_path = identifier.path_abs() # collection.json template fileio.write_text( Collection(collection_path).dump_json(template=True), settings.TEMPLATE_CJSON ) exit,status = commands.create( git_name, git_mail, identifier, [settings.TEMPLATE_CJSON, settings.TEMPLATE_EAD], agent=settings.AGENT ) if exit: logger.error(exit) logger.error(status) messages.error(request, WEBUI_MESSAGES['ERROR'].format(status)) else: # update search index collection = Collection.from_identifier(identifier) try: collection.post_json(settings.DOCSTORE_HOSTS, settings.DOCSTORE_INDEX) except ConnectionError: logger.error('Could not post to Elasticsearch.') gitstatus_update.apply_async((collection_path,), countdown=2) # positive feedback return HttpResponseRedirect( reverse('webui-collection-edit', args=collection.idparts) ) # something happened... logger.error('Could not create new collecion!') messages.error(request, WEBUI_MESSAGES['VIEWS_COLL_ERR_CREATE']) return HttpResponseRedirect(reverse('webui-collections'))
def new( request, repo, org, cid ): """Gets new EID from workbench, creates new entity record. If it messes up, goes back to collection. """ git_name = request.session.get('git_name') git_mail = request.session.get('git_mail') if not (git_name and git_mail): messages.error(request, WEBUI_MESSAGES['LOGIN_REQUIRED']) collection = Collection.from_request(request) if collection.locked(): messages.error(request, WEBUI_MESSAGES['VIEWS_COLL_LOCKED'].format(collection.id)) return HttpResponseRedirect(collection.absolute_url()) collection.repo_fetch() if collection.repo_behind(): messages.error(request, WEBUI_MESSAGES['VIEWS_COLL_BEHIND'].format(collection.id)) return HttpResponseRedirect(collection.absolute_url()) ic = idservice.IDServiceClient() # resume session auth_status,auth_reason = ic.resume(request.session['idservice_token']) if auth_status != 200: request.session['idservice_username'] = None request.session['idservice_token'] = None messages.warning( request, 'Session resume failed: %s %s (%s)' % ( auth_status,auth_reason,settings.IDSERVICE_API_BASE ) ) return HttpResponseRedirect(collection.absolute_url()) # get new entity ID http_status,http_reason,new_entity_id = ic.next_object_id( collection.identifier, 'entity' ) if http_status not in [200,201]: err = '%s %s' % (http_status, http_reason) msg = WEBUI_MESSAGES['VIEWS_ENT_ERR_NO_IDS'] % (settings.IDSERVICE_API_BASE, err) logger.error(msg) messages.error(request, msg) return HttpResponseRedirect(collection.absolute_url()) eidentifier = Identifier(id=new_entity_id) # create new entity entity_path = eidentifier.path_abs() # write entity.json template to entity location fileio.write_text( Entity(entity_path).dump_json(template=True), settings.TEMPLATE_EJSON ) # commit files exit,status = commands.entity_create( git_name, git_mail, collection, eidentifier, [collection.json_path_rel, collection.ead_path_rel], [settings.TEMPLATE_EJSON, settings.TEMPLATE_METS], agent=settings.AGENT ) # load Entity object, inherit values from parent, write back to file entity = Entity.from_identifier(eidentifier) entity.inherit(collection) entity.write_json() updated_files = [entity.json_path] exit,status = commands.entity_update( git_name, git_mail, collection, entity, updated_files, agent=settings.AGENT ) collection.cache_delete() if exit: logger.error(exit) logger.error(status) messages.error(request, WEBUI_MESSAGES['ERROR'].format(status)) else: # update search index try: entity.post_json(settings.DOCSTORE_HOSTS, settings.DOCSTORE_INDEX) except ConnectionError: logger.error('Could not post to Elasticsearch.') gitstatus_update.apply_async((collection.path,), countdown=2) # positive feedback return HttpResponseRedirect(reverse('webui-entity-edit', args=entity.idparts)) # something happened... logger.error('Could not create new entity!') messages.error(request, WEBUI_MESSAGES['VIEWS_ENT_ERR_CREATE']) return HttpResponseRedirect(collection.absolute_url())