def post(project_id): # trigger upload on datasafe req = request.get_json(force=True, silent=True, cache=True) if req is None: req = request.form.to_dict() logger.debug("got request body: {}", req) try: service, userId, password = Util.parseUserId(req["userId"]) if service != "port-datasafe": logger.debug("got wrong service token") raise ValueError except ValueError: token = Util.loadToken(req["userId"], "port-datasafe") userId = token.user.username password = token.access_token owncloud_token = Util.loadToken(req["username"], "port-owncloud") data = Util.parseToken(owncloud_token) data.update({ "filepath": "{}/ro-crate-metadata.json".format(req["folder"]) }) logger.debug("send data: {}".format(data)) metadata = json.loads( BytesIO( requests.get( "http://circle1-{}/storage/file".format("port-owncloud"), json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ).content ) .read() .decode("UTF-8") ) logger.debug("got metadata: {}".format(metadata)) doc = ROParser(metadata) logger.debug("parsed metadata: {}".format(doc)) datasafe = Datasafe( userId, owncloud_token.access_token, doc.getElement(doc.rootIdentifier, expand=True, clean=True), req["folder"], os.getenv("DATASAFE_PUBLICKEY"), os.getenv("DATASAFE_PRIVATEKEY") ) logger.debug("Trigger file upload") success = datasafe.triggerUploadForProject() logger.debug(f"Finished trigger, result was: {success}") return jsonify({"success": success}), 200 if success else 500
def removeFile(self, file_id): found = False file = self.files[file_id] data = Util.parseToken(Util.loadToken(self.userId, self.port)) if self.fileStorage: data["filepath"] = "{}/{}".format(self.getFilepath(), file) req = requests.delete( f"{self.portaddress}/storage/file", json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) if req.status_code < 300: found = True if self.metadata: req = requests.delete( f"{self.portaddress}/metadata/project/{self.getProjectId()}/files/{file_id}", json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) if req.status_code < 300: found = True if found: del self.files[file_id] return True return False
def addFile(self, filename, fileContent): """Adds given file with filename to this service. Args: filename (str): Set the filename of this file. fileContent (io.BytesIO): Set the content of this file. Returns: bool: Return True, if the file was uploaded successfully, otherwise False. """ data = Util.parseToken(Util.loadToken(self.userId, self.port)) files = {"file": (filename, fileContent.getvalue())} data["filename"] = filename logger.debug("add file {} with data {} in service {}".format( files, data, self.getJSON())) if self.metadata: response_to = requests.post( f"{self.portaddress}/metadata/project/{self.getProjectId()}/files", files=files, data=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) if response_to.status_code >= 300: logger.error(response_to.json()) return False if self.fileStorage: # TODO: fileStorage can also add files return False return True
def triggerPassiveMode(self, folder, servicename): """Trigger passive upload for given folder Args: folder (str): Set the folder. servicename (str): Given port, where files should be taken from. Returns: bool: Return True, if the trigger was successfully, otherwise False. """ data = { "folder": folder, "service": servicename, "username": self.userId } data.update(Util.parseToken(Util.loadToken(self.userId, self.port))) logger.debug("start passive mode with data {} in service {}".format( data, self.getJSON())) if self.metadata: response_to = requests.post( f"{self.portaddress}/metadata/project/{self.getProjectId()}/files", data=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) if response_to.status_code >= 300: logger.error(response_to.json()) return False pass return True
def reload(self): if self.fileStorage: data = {"filepath": self.getFilepath(), "userId": self.userId} req = requests.get(f"{self.portaddress}/storage/folder", json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True")) if req.status_code >= 300: # for convenience data.update( Util.parseToken(Util.loadToken(self.userId, self.port))) req = requests.get(f"{self.portaddress}/storage/folder", json=data, verify=(os.environ.get( "VERIFY_SSL", "True") == "True")) if req.status_code >= 300: return False json = req.json() logger.debug("reload fileStorage in Service got: {}".format(json)) self.files = json.get("files") if self.metadata: # TODO: metadata ports can also response with files self.reloadInformations()
def check_api_key(*args, **kwargs): g.zenodo = None try: req = request.get_json(force=True, cache=True) except: req = request.form.to_dict() try: service, userId, apiKey = Util.parseUserId(req.get("userId")) except: apiKey = Util.loadToken(req.get("userId"), "port-openscienceframework").access_token logger.debug("req data: {}".format(req)) if apiKey is None: logger.error("apiKey or userId not found.") abort(401) logger.debug("found apiKey") g.osf = OSF( token=apiKey, address=os.getenv( "OPENSCIENCEFRAMEWORK_API_ADDRESS", "https://api.test.osf.io/v2" ), ) return api_method(*args, **kwargs)
def check_api_key(*args, **kwargs): g.zenodo = None try: req = request.get_json(force=True) except Exception as e: logger.error(e, exc_info=True) req = request.form.to_dict() logger.debug("got request data: {}".format(req)) try: service, userId, apiKey = Util.parseUserId(req.get("userId")) except Exception as e: logger.error(e, exc_info=True) apiKey = Util.loadToken( req.get("userId"), "port-zenodo").access_token if apiKey is None: logger.error("apiKey or userId not found.") abort(401) logger.debug("found apiKey") g.zenodo = Zenodo(apiKey, address=current_app.zenodo_address) return api_method(*args, **kwargs)
def removeAllFiles(self): data = Util.parseToken(Util.loadToken(self.userId, self.port)) logger.debug("remove files in service {}".format(self.servicename)) found = False if self.fileStorage: # todo: implements me found = True if self.metadata: req = requests.delete( f"{self.portaddress}/metadata/project/{self.getProjectId()}/files", json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) if req.status_code < 300: found = True if found: self.reload() return found
def updateMetadataForResearch(self, researchId: int, updateMetadata: dict): """ This method changes the metadata in all available ports to the given metadata values in given dict for specified researchId. """ # get all ports registered to researchId allMetadata = [] # get all ports registered to researchId logger.debug("start update for research method") research = Research(testing=self.testing, researchId=researchId) ports = research.getPortsWithProjectId() logger.debug("research ports: {}".format(ports)) # FIXME: parallize me for (port, projectId) in ports: if projectId is None: continue portname = port["port"] if not portname.startswith("port-"): portname = "port-{}".format(portname) logger.debug("work on port {}".format(port)) data = Util.parseToken(Util.loadToken(research.userId, portname)) data["metadata"] = updateMetadata metadata = self.updateMetadataForResearchFromPort( portname, projectId, data) d = {"port": portname, "metadata": metadata} allMetadata.append(d) return allMetadata
def getMetadataForResearch( self, userId: str = None, researchIndex: int = None, researchId: int = None, metadataFields=None, ): """ This method returns the metadata from all available ports for specified researchId. """ allMetadata = [] logger.debug("start get metadata method for research") research = Research( testing=self.testing, userId=userId, researchIndex=researchIndex, researchId=researchId, ) ports = research.getPortsWithProjectId() logger.debug(f"got ports {ports}") # FIXME: parallize me for port, projectId in ports: # beware, that projectId could also be a string or sth else if projectId is None: continue portname = port["port"] if not portname.startswith("port-"): portname = "port-{}".format(portname) token = Util.loadToken(research.userId, portname) data = Util.parseToken(token) data["metadata"] = metadataFields logger.debug(f"work on port {port} with apiKey {token}") port = port["port"] metadata = self.getMetadataForProjectFromPort( port, projectId, apiKeyMetadata=data, ) d = {"port": port, "metadata": metadata} allMetadata.append(d) return allMetadata
def index(): json = request.json try: service, userId, apiKey = Util.parseUserId(json.get("userId")) except: apiKey = Util.loadToken(json.get("userId"), "port-owncloud").access_token filepath = json.get("filepath") logger.debug(f"userid {userId}") files = OwncloudUser(userId, apiKey).getFolder(filepath) return jsonify({"files": files})
def publish(self, researchId: int = None): """Publishes research in all configured export services. This function implements the parameters like self.getProjects. If you provide only user, then all researches will be published at once. Otherwise only the given research with Index or Id. Args: researchId (int, optional): Defaults to None. """ # TODO: needs tests def publishInPort(port, projectId, token): headers = {"content-type": "application/json"} data = Util.parseToken(token) req = requests.put( "http://{}/metadata/project/{}".format( self.getPortString(port), projectId), data=json.dumps(data), headers=headers, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) if req.status_code >= 300: logger.exception(Exception(f'Publishing fails')) return req.status_code == 200 research = Research(testing=self.testing, researchId=researchId) ports = research.getPortsWithProjectId() logger.debug("research ports: {}".format(ports)) # FIXME: parallize me for (port, projectId) in ports: if projectId is None: continue portname = port["port"] if not portname.startswith("port-"): portname = "port-{}".format(portname) token = Util.loadToken(research.userId, portname) logger.debug("work on port {}".format(port)) publishInPort(portname, projectId, token) return True
def __init__(self, userId, apiKey=None): self._user_id = userId self._access_token = (apiKey if apiKey is not None else Util.loadToken( userId, "port-owncloud")) options = { "webdav_hostname": "{}/remote.php/webdav".format( os.getenv("OWNCLOUD_INSTALLATION_URL", "http://localhost:3000")), "webdav_token": self._access_token, } self.client = Client(options) self.client.verify = os.environ.get("VERIFY_SSL", "True") == "True"
def getFile(self, file_id): from io import BytesIO file = self.files[file_id] if self.fileStorage: # this condition is for ports, which does not comply to the doc for urls path = "{}/{}".format(self.getFilepath(), file) if str(file).startswith(self.getFilepath()): path = file data = { "userId": self.userId, "filepath": path, } logger.debug("request data {}".format(data)) response_to = requests.get( f"{self.portaddress}/storage/file", json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) if response_to.status_code >= 300: data.update( Util.parseToken(Util.loadToken(self.userId, self.port))) logger.debug("request data {}".format(data)) response_to = requests.get( f"{self.portaddress}/storage/file", json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) cnt = response_to.content logger.debug("got content size: {}".format(len(cnt))) return BytesIO(cnt) if self.metadata: # TODO: metadata can respond with files too. pass return BytesIO(b"")
def index(): json = request.json try: service, userId, apiKey = Util.parseUserId(json.get("userId")) except: userId = json.get("userId") service = "port-owncloud" apiKey = Util.loadToken(userId, service).access_token filepath = json.get("filepath") logger.debug(f"userid {userId} for service {service}") import os file = OwncloudUser(userId, apiKey).getFile(filepath) rv = send_file(file, attachment_filename=os.path.basename(filepath), as_attachment=False, mimetype="multipart/form-data") rv.direct_passthrough = False logger.debug("send response") return rv
def check_api_key(*args, **kwargs): g.zenodo = None try: req = request.get_json(force=True, cache=True) except: req = request.form.to_dict() service, userId, apiKey = Util.parseUserId(req.get("userId")) logger.debug("req data: {}".format(req)) if apiKey is None and userId is not None: apiKey = Util.loadToken(userId, "port-owncloud").access_token if apiKey is None: logger.error("apiKey or userId not found.") abort(401) logger.debug("found apiKey") g.apiKey = apiKey return api_method(*args, **kwargs)
def patch(user_id, research_index): try: req = request.json except: req = None if req is None or not req: # get ro crate file from portIn crates = [] researchObj = Research(userId=user_id, researchIndex=research_index) for port in researchObj.portIn: filepath = "" for prop in port["properties"]: if prop["portType"] == "customProperties": for cProp in prop["value"]: if cProp["key"] == "filepath": if str(cProp["value"]).endswith("/"): filepath = "{}{}".format( cProp["value"], "ro-crate-metadata.json") else: filepath = "{}{}".format( cProp["value"], "/ro-crate-metadata.json") data = Util.parseToken(Util.loadToken(user_id, port["port"])) data["filepath"] = filepath crates.append( json.loads( BytesIO( requests.get( "http://layer1-{}/storage/file".format( port["port"]), json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ).content).read().decode("UTF-8"))) # push ro crate content to all portOut metadata for crate in crates: for port in researchObj.portOut: projectId = "" for prop in port["properties"]: if prop["portType"] == "customProperties": for cProp in prop["value"]: if cProp["key"] == "projectId": projectId = cProp["value"] data = Util.parseToken(Util.loadToken(user_id, port["port"])) data["metadata"] = crate requests.patch( "http://layer1-{}/metadata/project/{}".format( port["port"], projectId), json=data, verify=(os.environ.get("VERIFY_SSL", "True") == "True"), ) return "", 202 mdService = Metadata(testing=current_app.config.get("TESTING")) research_id = mdService.getResearchId(user_id, research_index) result = mdService.updateMetadataForResearch(research_id, req) return jsonify({"length": len(result), "list": result})