def get(self, cid): """Retrieve (and build) notebook for a single contribution [internal]. --- operationId: get_entry parameters: - name: cid in: path type: string pattern: '^[a-f0-9]{24}$' required: true description: contribution ID (ObjectId) responses: 200: description: single notebook schema: $ref: '#/definitions/NotebooksSchema' """ try: nb = Notebooks.objects.get(id=cid) nb.restore() except DoesNotExist: cells = [ nbf.new_code_cell( "# provide apikey to `load_client` in order to connect to api.mpcontribs.org\n" "# or use bravado (see https://mpcontribs.org/api)\n" "from mpcontribs.client import load_client\n" "client = load_client()" ), nbf.new_code_cell( "from mpcontribs.io.archieml.mpfile import MPFile\n" f"result = client.contributions.get_entry(cid='{cid}').response().result\n" "mpfile = MPFile.from_contribution(result)" ) ] for typ in ['h', 't', 'g', 's']: cells.append(nbf.new_code_cell(f"mpfile.{typ}data")) nb = nbf.new_notebook() nb['cells'] = cells exprep.preprocess(nb, {}) nb = Notebooks(**nb) nb.id = cid # to link to the according contribution nb.save() # calls Notebooks.clean() del nb.id return nb
def get(self, cid): """Retrieve (and build) notebook for a single contribution [internal]. --- operationId: get_entry parameters: - name: cid in: path type: string pattern: '^[a-f0-9]{24}$' required: true description: contribution ID (ObjectId) responses: 200: description: single notebook schema: $ref: '#/definitions/NotebooksSchema' """ try: nb = Notebooks.objects.get(id=cid) nb.restore() except DoesNotExist: cells = [ nbf.new_code_cell( "# provide apikey to `load_client` in order to connect to api.mpcontribs.org\n" "# or use bravado (see https://mpcontribs.org/api)\n" "from mpcontribs.client import load_client\n" "client = load_client()"), nbf.new_code_cell( "from mpcontribs.io.archieml.mpfile import MPFile\n" f"result = client.contributions.get_entry(cid='{cid}').response().result\n" "mpfile = MPFile.from_contribution(result)") ] for typ in ['h', 't', 'g', 's']: cells.append(nbf.new_code_cell(f"mpfile.{typ}data")) nb = nbf.new_notebook() nb['cells'] = cells exprep.preprocess(nb, {}) nb = Notebooks(**nb) nb.id = cid # to link to the according contribution nb.save() # calls Notebooks.clean() del nb.id return nb
def get(self, **kwargs): cid = kwargs["pk"] try: super().get(**kwargs) # trigger DoesNotExist if necessary nb = Notebooks.objects.get(pk=cid) try: if not nb.cells[-1]["outputs"]: kernel = client.start_kernel("python3") for idx, cell in enumerate(nb.cells): if cell["cell_type"] == "code": output = kernel.execute(cell["source"]) if output: outtype = ("text/html" if output.startswith("<div") else "text/plain") cell["outputs"].append({ "data": { outtype: output }, "metadata": {}, "transient": {}, "output_type": "display_data", }) sse.publish({"message": idx + 1}, type="notebook", channel=cid) nb.cells[1] = nbf.new_code_cell( "client = load_client('<your-api-key-here>')") nb.save() # calls Notebooks.clean() sse.publish({"message": 0}, type="notebook", channel=cid) client.shutdown_kernel(kernel) except Exception as ex: print(ex) sse.publish({"message": -1}, type="notebook", channel=cid) return super().get(**kwargs) except DoesNotExist: nb = None try: nb = Notebooks.objects.only("pk").get(pk=cid) except DoesNotExist: # create and save unexecuted notebook, also start entry to avoid rebuild on subsequent requests contrib = Contributions.objects.get(id=cid) cells = [ nbf.new_code_cell( "headers = {'X-Consumer-Groups': 'admin', 'X-Consumer-Username': '******'}\n" "client = load_client(headers=headers)"), nbf.new_code_cell( f"contrib = client.contributions.get_entry(pk='{cid}', _fields=['_all']).result()" ), nbf.new_markdown_cell("## Info"), nbf.new_code_cell( "fields = ['title', 'owner', 'authors', 'description', 'urls']\n" "prov = client.projects.get_entry(pk=contrib['project'], _fields=fields).result()\n" "HierarchicalData(prov)"), nbf.new_markdown_cell("## HData"), nbf.new_code_cell("HierarchicalData(contrib['data'])"), ] tables = Tables.objects.only("id", "name").filter(contribution=cid) if tables: cells.append(nbf.new_markdown_cell("## Tables")) for table in tables: cells.append(nbf.new_markdown_cell(table.name)) cells.append( nbf.new_code_cell( f"table = client.tables.get_entry(pk='{table.id}', _fields=['_all']).result()\n" "Table.from_dict(table)")) cells.append( nbf.new_code_cell("Plot.from_dict(table)")) structures = Structures.objects.only( "id", "name").filter(contribution=cid) if structures: cells.append(nbf.new_markdown_cell("## Structures")) for structure in structures: cells.append(nbf.new_markdown_cell(structure.name)) cells.append( nbf.new_code_cell( "structure = client.structures.get_entry(\n" f"\tpk='{structure.id}', _fields=['lattice', 'sites', 'charge']\n" ").result()\n" "Structure.from_dict(structure)")) nb = Notebooks(pk=cid, is_public=contrib.is_public) doc = deepcopy(seed_nb) doc["cells"] += cells self.Schema().update(nb, doc) nb.save() # calls Notebooks.clean() return super().get(**kwargs) if nb is not None: raise DoesNotExist( f"Notebook {nb.id} exists but user not in project group")
def get(self, **kwargs): cid = kwargs["pk"] qfilter = lambda qs: self.has_read_permission(request, qs.clone()) try: # trigger DoesNotExist if necessary (due to permissions or non-existence) nb = self._resource.get_object(cid, qfilter=qfilter) try: if not nb.cells[-1]["outputs"]: ws = connect_kernel() for idx, cell in enumerate(nb.cells): if cell["cell_type"] == "code": cell["outputs"] = execute(ws, cid, cell["source"]) sse.publish({"message": idx + 1}, type="notebook", channel=cid) ws.close() nb.cells[1] = nbf.new_code_cell( "client = Client('<your-api-key-here>')") nb.save() # calls Notebooks.clean() sse.publish({"message": 0}, type="notebook", channel=cid) except Exception as ex: print(ex) sse.publish({"message": -1}, type="notebook", channel=cid) return self._resource.serialize(nb, params=request.args) except DoesNotExist: nb = None try: nb = Notebooks.objects.only("pk").get(pk=cid) except DoesNotExist: # create and save unexecuted notebook, also start entry to avoid rebuild on subsequent requests from mpcontribs.api.contributions.views import ContributionsResource res = ContributionsResource() res._params = {"_fields": "_all"} contrib = res.get_object(cid, qfilter=qfilter) cells = [ nbf.new_code_cell( 'client = Client(headers={"X-Consumer-Groups": "admin"})' ), nbf.new_markdown_cell("## Project"), nbf.new_code_cell( f'client.get_project("{contrib.project.pk}").pretty()' ), nbf.new_markdown_cell("## Contribution"), nbf.new_code_cell( f'client.get_contribution("{cid}").pretty()'), ] if contrib.tables: cells.append(nbf.new_markdown_cell("## Tables")) for _, tables in contrib.tables.items(): for table in tables: tid = table["id"] cells.append( nbf.new_code_cell( f'client.get_table("{tid}").plot()')) if contrib.structures: cells.append(nbf.new_markdown_cell("## Structures")) for _, structures in contrib.structures.items(): for structure in structures: sid = structure["id"] cells.append( nbf.new_code_cell( f'client.get_structure("{sid}")')) nb = Notebooks(pk=cid, is_public=contrib.is_public) doc = deepcopy(seed_nb) doc["cells"] += cells self.Schema().update(nb, doc) nb.save() # calls Notebooks.clean() return self._resource.serialize(nb, params=request.args) if nb is not None: raise DoesNotExist( f"Notebook {nb.id} exists but user not in project group")
def get(self, cid): """Retrieve (and build) notebook for a single contribution [internal]. --- operationId: get_entry parameters: - name: cid in: path type: string pattern: '^[a-f0-9]{24}$' required: true description: contribution ID (ObjectId) responses: 200: description: single notebook schema: $ref: '#/definitions/NotebooksSchema' """ try: nb = Notebooks.objects.get(id=cid) nb.restore() except DoesNotExist: contrib = Contributions.objects.no_dereference().get(id=cid) cells = [ nbf.new_code_cell( "client = load_client() # provide apikey as argument to use api.mpcontribs.org\n" f"contrib = client.contributions.get_entry(cid='{cid}').response().result" ), nbf.new_markdown_cell("## Provenance Info"), nbf.new_code_cell( "mask = ['title', 'authors', 'description', 'urls', 'other', 'project']\n" "prov = client.projects.get_entry(project=contrib['project'], mask=mask).response().result\n" "RecursiveDict(prov)"), nbf.new_markdown_cell( f"## Hierarchical Data for {contrib['identifier']}"), nbf.new_code_cell("HierarchicalData(contrib['content'])") ] tables = contrib.content['tables'] if tables: cells.append( nbf.new_markdown_cell( f"## Tabular Data for {contrib['identifier']}")) for ref in tables: cells.append( nbf.new_code_cell( f"table = client.tables.get_entry(tid='{ref.id}').response().result # Pandas DataFrame format\n" "Table.from_dict(table)")) cells.append(nbf.new_code_cell("Plot.from_dict(table)")) structures = contrib.content['structures'] if structures: cells.append( nbf.new_markdown_cell( f"## Pymatgen Structures for {contrib['identifier']}")) for ref in structures: cells.append( nbf.new_code_cell( f"Structure.from_dict(client.structures.get_entry(sid='{ref.id}').response().result)" )) kernel = client.start_kernel() for cell in cells: if cell.cell_type == 'code': cell.outputs = kernel.execute(cell.source) client.shutdown_kernel(kernel) nb = deepcopy(seed_nb) nb.cells += cells nb = Notebooks(**nb) nb.id = cid # to link to the according contribution nb.save() # calls Notebooks.clean() del nb.id return nb