def test_entry_set_month( month: Tuple[int, str], expected: str, caplog: pytest.LogCaptureFixture ) -> None: """Test month setting. Args: month: a pair containing the month index and full name. expected: the expected three-letter code of the month. caplog: the built-in pytest fixture. """ entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) assert entry.data["month"] == "aug" entry.month = month[0] # type: ignore assert entry.data["month"] == expected assert ( "cobib.database.entry", 20, f"Converting field 'month' of entry 'Cao_2019' from '{month[0]}' to '{expected}'.", ) in caplog.record_tuples entry.month = month[1] assert entry.data["month"] == expected assert ( "cobib.database.entry", 20, f"Converting field 'month' of entry 'Cao_2019' from '{month[1]}' to '{expected}'.", ) in caplog.record_tuples
def assert_default_test_entry(entry: Entry) -> None: """Asserts that the passed entry is the default testing entry. Args: entry: the entry to assert. """ entry.escape_special_chars() assert entry.label == "Grimsley_2019" assert entry.data["doi"] == "10.1038/s41467-019-10988-2" assert entry.data["url"] == [ "https://doi.org/10.1038%2Fs41467-019-10988-2" ] assert entry.data["year"] == 2019 assert entry.data["month"] == "jul" assert entry.data[ "publisher"] == "Springer Science and Business Media {LLC}" assert entry.data["volume"] == 10 assert entry.data["number"] == 1 assert ( entry.data["author"] == "Harper R. Grimsley and Sophia E. Economou and Edwin Barnes and Nicholas J. Mayhall" ) assert ( entry.data["title"] == "An adaptive variational algorithm for exact molecular simulations on a quantum computer" ) assert entry.data["journal"] == "Nature Communications"
def test_save() -> None: """Test the `cobib.database.Entry.save` method.""" entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) entry_str = entry.save() with open(EXAMPLE_YAML_FILE, "r", encoding="utf-8") as expected: for line, truth in zip(entry_str.split("\n"), expected): assert line == truth.strip("\n")
def test_search(query: str, context: int, ignore_case: bool, expected: List[List[str]]) -> None: """Test search method. Args: query: the string to search for. context: the number of lines to provide as context for the search results. ignore_case: whether to perform a case-insensitive search. expected: the expected lines. """ entry = Entry( "search_dummy", { "ENTRYTYPE": "article", "abstract": "\n".join( [ "search_query", "something else", "Search_Query", "something else", ] * 2 ), }, ) results = entry.search(query, context=context, ignore_case=ignore_case) assert results == expected
def test_equality() -> None: """Test entry equality.""" entry_1 = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) entry_2 = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) # assert mutability of Entry assert entry_1 is not entry_2 # assert equality of entries assert entry_1 == entry_2
def test_match_with_wrong_key() -> None: """Asserts issue #1 is fixed. When `cobib.database.Entry.matches` is called with a key in the filter which does not exist in the entry, the key should be ignored and the function should return normally. """ entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) filter_ = {("tags", False): ["test"]} assert entry.matches(filter_, or_=False)
def test_entry_matches(filter_: Dict[Tuple[str, bool], Any], or_: bool) -> None: """Test match filter. Args: filter_: a filter as explained be `cobib.database.Entry.matches`. or_: whether to use logical `OR` rather than `AND` for filter combination. """ entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) # author must match assert entry.matches(filter_, or_=or_)
def test_search_with_file() -> None: """Test the `cobib.database.Entry.search` method with associated file.""" entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) entry.file = EXAMPLE_YAML_FILE # type: ignore results = entry.search("Chemical", context=0) expected = [ [" journal = {Chemical Reviews},"], [" publisher = {American Chemical Society ({ACS})},"], ["journal: Chemical Reviews"], ["publisher: American Chemical Society ({ACS})"], ] for res, exp in zip(results, expected): assert res == exp
def test_entry_set_url(caplog: pytest.LogCaptureFixture) -> None: """Test url setting. Args: caplog: the built-in pytest fixture. """ entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) entry.url = "https://dummy.org/, https://dummy.com/" # type: ignore assert entry.url == ["https://dummy.org/", "https://dummy.com/"] assert ( "cobib.database.entry", 20, "Converted the field 'url' of entry 'Cao_2019' to a list. You can consider storing it as " "such directly.", ) in caplog.record_tuples
def dump(self, entry: Entry) -> str: # pdoc will inherit the docstring from the base class # noqa: D102 Event.PreBibtexDump.fire(entry) database = bibtexparser.bibdatabase.BibDatabase() stringified_entry = entry.stringify() stringified_entry["ID"] = stringified_entry.pop("label") if "month" in stringified_entry.keys(): # convert month to bibtexexpression stringified_entry[ "month"] = bibtexparser.bibtexexpression.BibDataStringExpression( [ bibtexparser.bibdatabase.BibDataString( database, stringified_entry["month"]) ]) database.entries = [stringified_entry] LOGGER.debug("Converting entry %s to BibTex format.", entry.label) writer = bibtexparser.bwriter.BibTexWriter() writer.common_strings = True string: str = writer.write(database) string = Event.PostBibtexDump.fire(string) or string return string
def parse(self, string: str) -> Dict[str, Entry]: # pdoc will inherit the docstring from the base class # noqa: D102 string = Event.PreBibtexParse.fire(string) or string bparser = bibtexparser.bparser.BibTexParser() bparser.ignore_nonstandard_types = config.parsers.bibtex.ignore_non_standard_types bparser.common_strings = True bparser.interpolate_strings = False try: LOGGER.debug("Loading BibTex data from file: %s.", string) with open(string, "r", encoding="utf-8") as file: database = bibtexparser.load(file, parser=bparser) except (OSError, FileNotFoundError): LOGGER.debug("Loading BibTex string: %s.", string) database = bibtexparser.loads(string, parser=bparser) bib = OrderedDict() for entry in database.entries: if "month" in entry.keys() and isinstance( entry["month"], bibtexparser.bibtexexpression.BibDataStringExpression): entry["month"] = entry["month"].expr[0].name label = entry.pop("ID") bib[label] = Entry(label, entry) Event.PostBibtexParse.fire(bib) return bib
def test_stringify() -> None: """Test the `cobib.database.Entry.stringify` method.""" entry = Entry( "dummy", { "file": ["/tmp/a.txt", "/tmp/b.txt"], "month": 8, "tags": ["tag1", "tag2"], }, ) expected = { "label": "dummy", "file": "/tmp/a.txt, /tmp/b.txt", "month": "aug", "tags": "tag1, tag2", } assert entry.stringify() == expected
def test_event_post_yaml_dump(self) -> None: """Tests the PostYAMLDump event.""" @Event.PostYAMLDump.subscribe def hook(string: str) -> Optional[str]: return "Hello world!" assert Event.PostYAMLDump.validate() entry = Entry("Cao_2019", self.EXAMPLE_ENTRY_DICT.copy()) entry_str = YAMLParser().dump(entry) assert entry_str == "Hello world!"
def test_event_pre_yaml_dump(self) -> None: """Tests the PreYAMLDump event.""" @Event.PreYAMLDump.subscribe def hook(entry: Entry) -> None: entry.label = "Cao2019" assert Event.PreYAMLDump.validate() entry = Entry("Cao_2019", self.EXAMPLE_ENTRY_DICT.copy()) entry_str = YAMLParser().dump(entry) assert cast(str, entry_str).split("\n")[1] == "Cao2019:"
def test_event_pre_bibtex_dump(self) -> None: """Tests the PreBibtexDump event.""" @Event.PreBibtexDump.subscribe def hook(entry: Entry) -> None: entry.label = "Cao2019" assert Event.PreBibtexDump.validate() entry = Entry("Cao_2019", self.EXAMPLE_ENTRY_DICT.copy()) entry_str = BibtexParser().dump(entry) assert entry_str.split("\n")[0] == "@article{Cao2019,"
def test_entry_set_file(files: List[str], caplog: pytest.LogCaptureFixture) -> None: """Test file setting. Args: files: a list of paths to files. caplog: the built-in pytest fixture. """ entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) entry.file = files[0] if len(files) == 1 else files # type: ignore expected = [str(RelPath(file)) for file in files] assert entry.file == expected # check lint logging if len(files) > 1: entry.file = ", ".join(files) # type: ignore assert entry.file == expected assert ( "cobib.database.entry", 20, "Converted the field 'file' of entry 'Cao_2019' to a list. You can consider storing it " "as such directly.", ) in caplog.record_tuples
def assert_default_test_entry(entry: Entry) -> None: """Asserts that the passed entry is the default testing entry. Args: entry: the entry to assert. """ entry.escape_special_chars() assert entry.label == "Cao2018" assert entry.data["archivePrefix"] == "arXiv" assert entry.data["arxivid"].startswith("1812.09976") assert ( entry.data["author"] == "Yudong Cao and Jonathan Romero and Jonathan P. Olson and Matthias Degroote and " + "Peter D. Johnson and M{\\'a}ria Kieferov{\\'a} and Ian D. Kivlichan and Tim Menke " + "and Borja Peropadre and Nicolas P. D. Sawaya and Sukin Sim and Libor Veis and " + "Al{\\'a}n Aspuru-Guzik") assert entry.data["doi"].startswith("10.1021/acs.chemrev.8b00803") assert entry.data[ "title"] == "Quantum Chemistry in the Age of Quantum Computing" assert entry.data["year"] == 2018 assert entry.data["_download"] == "http://arxiv.org/pdf/1812.09976v2"
def test_dump(self, caplog: pytest.LogCaptureFixture) -> None: """Test dumping. Args: caplog: the built-in pytest fixture. """ entry = Entry("dummy", {"ENTRYTYPE": "unpublished"}) ISBNParser().dump(entry) assert ( "cobib.parsers.isbn", logging.ERROR, "Cannot dump an entry as an ISBN.", ) in caplog.record_tuples
def test_entry_set_tags(caplog: pytest.LogCaptureFixture) -> None: """Test tags setting. Args: caplog: the built-in pytest fixture. """ entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) assert entry.tags == [] # NB: tags must be a list entry.tags = ["foo"] assert entry.tags == ["foo"] # list works as expected entry.tags = ["foo", "bar"] assert entry.tags == ["foo", "bar"] # check lint logging entry.tags = "foo, bar" # type: ignore assert entry.tags == ["foo", "bar"] assert ( "cobib.database.entry", 20, "Converted the field 'tags' of entry 'Cao_2019' to a list. You can consider storing it as " "such directly.", ) in caplog.record_tuples
def test_init_logging(caplog: pytest.LogCaptureFixture) -> None: """Test init logging for linting purposes. Args: caplog: the built-in pytest fixture. """ entry = Entry("dummy", {"ID": "dummy", "number": "1"}) assert entry.data["number"] == 1 assert ( "cobib.database.entry", 20, "Converting field 'number' of entry 'dummy' to integer: 1.", ) in caplog.record_tuples assert entry.label == "dummy" assert "ID" not in entry.data assert ( "cobib.database.entry", 20, "The field 'ID' of entry 'dummy' is no longer required. It will be inferred from the entry " "label.", ) in caplog.record_tuples
def parse(self, string: str) -> Dict[str, Entry]: # pdoc will inherit the docstring from the base class # noqa: D102 string = Event.PreArxivParse.fire(string) or string try: match = re.search(ARXIV_REGEX, string) if match is None: raise AssertionError except AssertionError: msg = f"'{string}' is not a valid arXiv ID." LOGGER.warning(msg) return OrderedDict() arxiv_id = match.group(1) LOGGER.info("Gathering BibTex data for arXiv ID: %s.", arxiv_id) try: page = requests.get(ARXIV_URL + arxiv_id, timeout=10) except requests.exceptions.RequestException as err: LOGGER.error("An Exception occurred while trying to query the arXiv ID: %s.", arxiv_id) LOGGER.error(err) return OrderedDict() xml = BeautifulSoup(page.text, features="html.parser") if xml.feed.entry.title.contents[0] == "Error": msg = ( "The arXiv API returned the following error: " + xml.feed.entry.summary.contents[0] ) LOGGER.warning(msg) return OrderedDict() label = "" entry: Dict[str, Any] = {} entry["archivePrefix"] = "arXiv" for key in xml.feed.entry.findChildren(recursive=False): if key.name == "arxiv:doi": entry["doi"] = str(key.contents[0]) elif key.name == "id": entry["arxivid"] = str(key.contents[0]).replace("http://arxiv.org/abs/", "") entry["eprint"] = str(key.contents[0]) elif key.name == "arxiv:primary_category": entry["primaryClass"] = str(key.attrs["term"]) elif key.name == "published": # The year must also be stored as a string for compatibility reasons with # bibtexparser. However, we perform a conversion to an integer first, to ensure that # the year can actually be represented as such. entry["year"] = int(key.contents[0].split("-")[0]) label += str(entry["year"]) elif key.name == "title": entry["title"] = re.sub(r"\s+", " ", key.contents[0].strip().replace("\n", " ")) elif key.name == "author": if "author" not in entry: first = True entry["author"] = "" name = [n.contents[0] for n in key.findChildren()][0] if first: label = name.split()[-1] + label first = False entry["author"] += f"{name} and " elif key.name == "summary": entry["abstract"] = re.sub(r"\s+", " ", key.contents[0].strip().replace("\n", " ")) elif key.name == "link": if key.attrs.get("title", None) == "doi": entry["url"] = key.attrs["href"] elif key.attrs.get("title", None) == "pdf": entry["_download"] = key.attrs.get("href", "") else: LOGGER.warning("The key '%s' of this arXiv entry is not being processed!", key.name) if "doi" in entry: entry["ENTRYTYPE"] = "article" else: entry["ENTRYTYPE"] = "unpublished" # strip last 'and' from author field entry["author"] = entry["author"][:-5] bib = OrderedDict() bib[label] = Entry(label, entry) Event.PostArxivParse.fire(bib) return bib
def hook(new_entry: Entry) -> None: new_entry.data["tags"] = "test"
from typing import Any, Callable, Generator, Tuple import pytest from cobib.config import LabelSuffix, config from cobib.database import Database, Entry from .. import get_resource TMPDIR = Path(tempfile.gettempdir()) EXAMPLE_LITERATURE = get_resource("example_literature.yaml") DUMMY_ENTRY = Entry( "dummy", { "ENTRYTYPE": "misc", "author": "D. Dummy", "title": "Something dumb", }, ) DUMMY_ENTRY_YAML = """--- dummy: ENTRYTYPE: misc author: D. Dummy title: Something dumb ... """ @pytest.fixture(autouse=True) def setup() -> Generator[Any, None, None]:
def execute(self, args: List[str], out: IO[Any] = sys.stdout) -> None: """Adds a new entry. Depending on the `args`, if a keyword for one of the available `cobib.parsers` was used together with a matching input, that parser will be used to create the new entry. Otherwise, the command is only valid if the `--label` option was used to specify a new entry label, in which case this command will trigger the `cobib.commands.edit.EditCommand` for a manual entry addition. Args: args: a sequence of additional arguments used for the execution. The following values are allowed for this command: * `-l`, `--label`: the label to give to the new entry. * `-u`, `--update`: updates an existing database entry if it already exists. * `-f`, `--file`: one or multiple files to associate with this entry. This data will be stored in the `cobib.database.Entry.file` property. * `-p`, `--path`: the path to store the downloaded associated file in. This can be used to overwrite the `config.utils.file_downloader.default_location`. * `--skip-download`: skips the automatic download of an associated file. * `--skip-existing`: skips entry if label exists instead of running label disambiguation. * in addition to the options above, a *mutually exclusive group* of keyword arguments for all available `cobib.parsers` are registered at runtime. Please check the output of `cobib add --help` for the exact list. * any *positional* arguments (i.e. those, not preceded by a keyword) are interpreted as tags and will be stored in the `cobib.database.Entry.tags` property. out: the output IO stream. This defaults to `sys.stdout`. """ LOGGER.debug("Starting Add command.") parser = ArgumentParser(prog="add", description="Add subcommand parser.") parser.add_argument("-l", "--label", type=str, help="the label for the new database entry") parser.add_argument( "-u", "--update", action="store_true", help="update an entry if the label exists already", ) file_action = "extend" if sys.version_info[1] >= 8 else "append" parser.add_argument( "-f", "--file", type=str, nargs="+", action=file_action, help="files associated with this entry", ) parser.add_argument("-p", "--path", type=str, help="the path for the associated file") parser.add_argument( "--skip-download", action="store_true", help="skip the automatic download of an associated file", ) parser.add_argument( "--skip-existing", action="store_true", help="skips entry addition if existent instead of using label disambiguation", ) group_add = parser.add_mutually_exclusive_group() avail_parsers = { cls.name: cls for _, cls in inspect.getmembers(parsers) if inspect.isclass(cls) } for name in avail_parsers.keys(): try: group_add.add_argument( f"-{name[0]}", f"--{name}", type=str, help=f"{name} object identfier" ) except argparse.ArgumentError: try: group_add.add_argument(f"--{name}", type=str, help=f"{name} object identfier") except argparse.ArgumentError: continue parser.add_argument( "tags", nargs=argparse.REMAINDER, help="A list of space-separated tags to associate with this entry." "\nYou can use quotes to specify tags with spaces in them.", ) if not args: parser.print_usage(sys.stderr) sys.exit(1) try: largs = parser.parse_args(args) except argparse.ArgumentError as exc: LOGGER.error(exc.message) return Event.PreAddCommand.fire(largs) new_entries: Dict[str, Entry] = OrderedDict() edit_entries = False for name, cls in avail_parsers.items(): string = getattr(largs, name, None) if string is None: continue LOGGER.debug("Adding entries from %s: '%s'.", name, string) new_entries = cls().parse(string) break else: if largs.label is not None: LOGGER.warning("No input to parse. Creating new entry '%s' manually.", largs.label) new_entries = { largs.label: Entry( largs.label, {"ENTRYTYPE": config.commands.edit.default_entry_type}, ) } edit_entries = True else: msg = "Neither an input to parse nor a label for manual creation specified!" LOGGER.error(msg) return if largs.label is not None: assert len(new_entries.values()) == 1 for value in new_entries.values(): # logging done by cobib/database/entry.py value.label = largs.label new_entries = OrderedDict((largs.label, value) for value in new_entries.values()) else: formatted_entries = OrderedDict() for label, value in new_entries.items(): formatted_label = evaluate_as_f_string( config.database.format.label_default, {"label": label, **value.data.copy()} ) value.label = formatted_label formatted_entries[formatted_label] = value new_entries = formatted_entries if largs.file is not None: if file_action == "append": # We need to flatten the potentially nested list. # pylint: disable=import-outside-toplevel from itertools import chain largs.file = list(chain.from_iterable(largs.file)) assert len(new_entries.values()) == 1 for value in new_entries.values(): # logging done by cobib/database/entry.py value.file = largs.file if largs.tags != []: assert len(new_entries.values()) == 1 for value in new_entries.values(): # logging done by cobib/database/entry.py value.tags = largs.tags bib = Database() existing_labels = set(bib.keys()) for lbl, entry in new_entries.copy().items(): # check if label already exists if lbl in existing_labels: if not largs.update: msg = f"You tried to add a new entry '{lbl}' which already exists!" LOGGER.warning(msg) if edit_entries or largs.skip_existing: msg = f"Please use `cobib edit {lbl}` instead!" LOGGER.warning(msg) continue msg = ( "The label will be disambiguated based on the configuration option: " "config.database.format.label_suffix" ) LOGGER.warning(msg) new_label = bib.disambiguate_label(lbl, entry) entry.label = new_label new_entries[new_label] = entry new_entries.pop(lbl) else: # label exists but the user asked to update an existing entry existing_data = bib[lbl].data.copy() existing_data.update(entry.data) entry.data = existing_data.copy() # download associated file (if requested) if "_download" in entry.data.keys(): if largs.skip_download: entry.data.pop("_download") else: path = FileDownloader().download( entry.data.pop("_download"), lbl, folder=largs.path, overwrite=largs.update ) if path is not None: entry.data["file"] = str(path) # check journal abbreviation if "journal" in entry.data.keys(): entry.data["journal"] = JournalAbbreviations.elongate(entry.data["journal"]) Event.PostAddCommand.fire(new_entries) bib.update(new_entries) if edit_entries: EditCommand().execute([largs.label]) bib.save() self.git(args=vars(largs)) for label in new_entries: msg = f"'{label}' was added to the database." LOGGER.info(msg)
def test_entry_set_label() -> None: """Test label changing.""" # this test may fail if the input dict is not copied entry = Entry("Cao_2019", EXAMPLE_ENTRY_DICT) entry.label = "Cao2019" assert entry.label == "Cao2019"
def execute(self, args: List[str], out: IO[Any] = sys.stdout) -> None: """Opens an entry for manual editing. This command opens an `cobib.database.Entry` in YAML format for manual editing. The editor program can be configured via `config.commands.edit.editor`. By default, this setting will respect your `$EDITOR` environment variable, but fall back to using `vim` if that variable is not set. Args: args: a sequence of additional arguments used for the execution. The following values are allowed for this command: * `label`: the label of the entry to edit. * `-a`, `--add`: if specified, allows adding new entries for non-existent labels. The default entry type of this new entry can be configured via `config.commands.edit.default_entry_type`. out: the output IO stream. This defaults to `sys.stdout`. """ LOGGER.debug("Starting Edit command.") parser = ArgumentParser(prog="edit", description="Edit subcommand parser.") parser.add_argument("label", type=str, help="label of the entry") parser.add_argument( "-a", "--add", action="store_true", help="if specified, will add a new entry for unknown labels", ) parser.add_argument("--preserve-files", action="store_true", help="do not rename associated files") if not args: parser.print_usage(sys.stderr) sys.exit(1) try: largs = parser.parse_args(args) except argparse.ArgumentError as exc: LOGGER.error(exc.message) return Event.PreEditCommand.fire(largs) yml = YAMLParser() bib = Database() try: entry = bib[largs.label] prv = yml.dump(entry) if largs.add: LOGGER.warning( "Entry '%s' already exists! Ignoring the `--add` argument.", largs.label) largs.add = False except KeyError: # No entry for given label found if largs.add: # add a new entry for the unknown label entry = Entry( largs.label, {"ENTRYTYPE": config.commands.edit.default_entry_type}, ) prv = yml.dump(entry) else: msg = ( f"No entry with the label '{largs.label}' could be found." "\nUse `--add` to add a new entry with this label.") LOGGER.error(msg) return if prv is None: # No entry found to be edited. This should never occur unless the YAMLParser experiences # an unexpected error. return LOGGER.debug("Creating temporary file.") with tempfile.NamedTemporaryFile(mode="w+", prefix="cobib-", suffix=".yaml") as tmp_file: tmp_file_name = tmp_file.name tmp_file.write(prv) tmp_file.flush() LOGGER.debug('Starting editor "%s".', config.commands.edit.editor) status = os.system(config.commands.edit.editor + " " + tmp_file.name) assert status == 0 LOGGER.debug("Editor finished successfully.") new_entries = YAMLParser().parse(tmp_file.name) new_entry = list(new_entries.values())[0] assert not Path(tmp_file_name).exists() if entry == new_entry and not largs.add: LOGGER.info("No changes detected.") return bib.update({new_entry.label: new_entry}) if new_entry.label != largs.label: bib.rename(largs.label, new_entry.label) if not largs.preserve_files: new_files = [] for file in new_entry.file: path = RelPath(file) if path.path.stem == largs.label: LOGGER.info("Also renaming associated file '%s'.", str(path)) target = RelPath(path.path.parent / f"{new_entry.label}.pdf") if target.path.exists(): LOGGER.warning( "Found conflicting file, not renaming '%s'.", str(path)) else: path.path.rename(target.path) new_files.append(str(target)) continue new_files.append(file) new_entry.file = new_files Event.PostEditCommand.fire(new_entry) bib.save() self.git(args=vars(largs)) msg = f"'{largs.label}' was successfully edited." LOGGER.info(msg)
def parse(self, string: str) -> Dict[str, Entry]: # pdoc will inherit the docstring from the base class # noqa: D102 string = Event.PreISBNParse.fire(string) or string try: match = re.search(ISBN_REGEX, string) if match is None: raise AssertionError except AssertionError: msg = f"'{string}' is not a valid ISBN." LOGGER.warning(msg) return OrderedDict() isbn = match.group(1) LOGGER.info("Gathering BibTex data for ISBN: %s.", isbn) isbn_plain = "".join([i for i in isbn if i.isdigit()]) try: page = requests.get(ISBN_URL + isbn_plain + "&jscmd=data&format=json", timeout=10) except requests.exceptions.RequestException as err: LOGGER.error( "An Exception occurred while trying to query the ISBN: %s.", isbn) LOGGER.error(err) return OrderedDict() try: contents = dict(json.loads(page.content)) except json.JSONDecodeError as err: LOGGER.error( "An Exception occurred while parsing the query results: %s.", page.content) LOGGER.error(err) return OrderedDict() if not contents: msg = ( f'No data was found for ISBN "{isbn}". If you think this is an error and ' + "the openlibrary API should provide an entry, please file a bug report. " + "Otherwise please try adding this entry manually until more APIs are " + "available in coBib.") LOGGER.warning(msg) return OrderedDict() label = "" entry = {} for key, value in contents[list(contents.keys())[0]].items(): if key in ["title", "url"]: entry[key] = value elif key == "number_of_pages": # we explicitly convert to a string to prevent type errors in the bibtexparser str_val = str(value) entry["pages"] = int( str_val) if str_val.isnumeric() else str_val elif key == "publish_date": entry["date"] = value try: match = re.search(r"\d{4}", value) if match is None: raise AttributeError # pragma: no cover entry["year"] = int(match.group()) label += str(entry["year"]) except AttributeError: # pragma: no cover pass # pragma: no cover elif key == "authors": label = value[0]["name"].split()[-1] + label entry["author"] = " and".join([a["name"] for a in value]) elif key == "publishers": entry["publisher"] = " and".join([a["name"] for a in value]) # set entry-type do 'book' entry["ENTRYTYPE"] = "book" bib = OrderedDict() bib[label] = Entry(label, entry) Event.PostISBNParse.fire(bib) return bib
def test_to_yaml(self) -> None: """Test to yaml conversion.""" entry = Entry("Cao_2019", self.EXAMPLE_ENTRY_DICT) yaml_str = YAMLParser().dump(entry) with open(self.EXAMPLE_YAML_FILE, "r", encoding="utf-8") as file: assert yaml_str == file.read()
def hook(entry: Entry) -> None: entry.label = "Cao2019"