def review(self, pid, *args, **kwargs): """Review actions for a deposit. Adds review and comments for a deposit. """ with ReviewDepositPermission(self).require(403): if self.schema_is_reviewable(): data = request.get_json() if data is None: raise InvalidDataRESTError() if "id" in data: self.update_review(data) else: self.create_review(data) post_action_notifications("review", self, host_url=request.host_url) else: raise ReviewError(None) self.commit() return self
def put(self, **kwargs): """Create a uuid and return a links dict. file upload step is below ② upload file to server. :returns: Json Response have a links dict. """ fn = request.view_args['key'] # if ".xsd" not in fn: # abort(405, "Xsd File only !!") pid = request.view_args['pid_value'] furl = self.xsd_location_folder + 'tmp/' + pid + '/' + fn # size = len(request.data) try: fs = PyFSFileStorage(furl) fileurl, bytes_written, checksum = fs.save(request.stream) except Exception: raise InvalidDataRESTError() else: pass jd = { 'key': fn, 'mimetype': request.mimetype, 'links': {}, 'size': bytes_written } data = dict(key=fn, mimetype='text/plain') response = current_app.response_class(json.dumps(jd), mimetype='application/json') response.status_code = 200 return response
def put(self, pid, record): """Update by query endpoint.""" if request.mimetype not in self.loaders: raise UnsupportedMediaRESTError(request.mimetype) data = self.loaders[request.mimetype]() if data is None: raise InvalidDataRESTError() # Make query with record 'control_number' self.check_etag(str(record.revision_id)) # Perform ES API _updated_by_query control_num_query = 'control_number:"{recid}"'.format(recid=record['control_number']) script = data["ubq"] index, doc = default_record_to_index(data) es_response = current_search_client.update_by_query( index=index, q=control_num_query, doc_type=doc, body=script) # Check that the query has only updated one record if es_response['updated'] == 1 and es_response['updated'] == es_response['total']: # Get record from ES search_obj = self.search_class() search = search_obj.get_record(str(record.id)) # Execute search search_result = search.execute().to_dict() if search_result["hits"]["total"] == 1: # Update record in DB record.clear() record.update(search_result["hits"]["hits"][0]["_source"]) record.commit() db.session.commit() # Close DB session # Return success return self.make_response( pid, record, links_factory=self.links_factory) # If more than one record was updated return error and the querystring # so the user can handle the issue return make_response(( json.dumps({ 'message': 'Something went wrong, the provided script might have caused inconsistency.' 'More than one value was updated or the amount of updated values do not ' 'match the total modified', 'elasticsearch_response': es_response }), 503) )
def post(self, **kwargs): """ :param kwargs: :return: """ if request.mimetype not in self.loaders: raise UnsupportedMediaRESTError(request.mimetype) data = self.loaders[request.mimetype]() if data is None: raise InvalidDataRESTError()
def patch(self, pid, record, **kwargs): """Modify a record. The data should be a JSON-patch, which will be applied to the record. Procedure description: #. The record is deserialized using the proper loader. #. The ETag is checked. #. The record is patched. #. The HTTP response is built with the help of the link factory. :param pid: Persistent identifier for record. :param record: Record object. :returns: The modified record. """ data = self.loaders[request.mimetype]() if data is None: raise InvalidDataRESTError() self.check_etag(str(record.revision_id)) try: record = record.patch(data) except (JsonPatchException, JsonPointerException): raise PatchJSONFailureRESTError() record.commit() db.session.commit() if not (current_app.config.get('TESTING', False) or current_app.config.get('FAKE_DOI', False)): doi = None for rec_pid in record['_pid']: if rec_pid['type'] == 'DOI': doi = rec_pid doi_pid = PersistentIdentifier.get('doi', doi['value']) if doi_pid: from .serializers import datacite_v44 from .minters import make_record_url try: datacite_provider = DataCiteProvider(doi_pid) doc = datacite_v44.serialize(doi_pid, record) url = make_record_url(pid.pid_value) datacite_provider.update(url, doc) except: current_app.logger.error( "Error in DataCite metadata update", exc_info=True) return self.make_response(pid, record, links_factory=self.links_factory)
def post(self, **kwargs): """Create a uuid and return a links dict. file upload step is below create a uuuid. :returns: Json Response have a links dict. """ if request.mimetype not in self.loaders: raise UnsupportedMediaRESTError(request.mimetype) data = self.loaders[request.mimetype]() if data is None: raise InvalidDataRESTError() pid = request.view_args.get('pid_value') # the second post if pid: # furl = self.xsd_location_folder + 'tmp/' + pid + '/' furl = os.path.join(self.xsd_location_folder, 'tmp', pid) # dst = self.xsd_location_folder + pid + '/' dst = os.path.join(self.xsd_location_folder, pid) data.pop('$schema') sn = data.get('name') if 'name' in data else None root_name = data.get('root_name') if 'root_name' in data else None if root_name is None or len(root_name) == 0: abort(400, 'Root Name is empty.') if sn is None or len(sn) == 0: sn = root_name if not sn.endswith('_mapping'): sn = sn + '_mapping' if not os.path.exists(furl): return jsonify({'status': 'Please upload file first.'}) fn = data.get('file_name') if 'file_name' in data else None zip_file = data.get('zip_name') if 'zip_name' in data else None fn = os.path.join(furl, (fn if '.' in fn else fn + '.xsd')) # if zip file unzip first if not zip_file is None and zip_file.endswith('.zip'): with zipfile.ZipFile(os.path.join(furl, zip_file)) as fp: fp.extractall(furl) xsd = SchemaConverter(fn, root_name) try: self.record_class.create(pid, sn.lower(), data, xsd.to_dict(), data.get('xsd_file'), xsd.namespaces) db.session.commit() except BaseException: abort(400, 'Schema of the same name already exists.') # set the schema to be vaild # for k, v in current_app.config["RECORDS_UI_EXPORT_FORMATS"].items(): # if isinstance(v, dict): # for k1, v1 in v.items(): # if isinstance(v1, dict): # v1 = v1.copy() # v1["title"] = sn.upper() # v1["order"] = len(v) # v.update({sn: v1}) # break # update oai metadata formats oad = current_app.config.get('OAISERVER_METADATA_FORMATS', {}) sel = list(oad.values())[0].get('serializer') scm = dict() if isinstance(xsd.namespaces, dict): ns = xsd.namespaces.get('') or xsd.namespaces.get(sn) scm.update({'namespace': ns}) scm.update({'schema': data.get('xsd_file')}) scm.update({'serializer': (sel[0], {'schema_type': sn})}) oad.update({sn: scm}) # move out those files from tmp folder shutil.move(furl, dst) return jsonify({'message': 'uploaded successfully.'}) else: # the first post # Create uuid for record record_uuid = uuid.uuid4() # Create persistent identifier pid = self.pid_minter(record_uuid, data=data) db.session.commit() url = request.base_url + str(pid.object_uuid) links = dict(self=url) links['bucket'] = request.base_url + 'put/' + str(pid.object_uuid) response = current_app.response_class(json.dumps({'links': links}), mimetype=request.mimetype) response.status_code = 201 return response