def perform_romeo_query(search_terms): search_terms = search_terms.copy() if ROMEO_API_KEY: search_terms['ak'] = ROMEO_API_KEY base_url = 'http://'+ROMEO_API_DOMAIN+'/romeo/api29.php' # Perform the query try: response = urlopen_retry(base_url, data=search_terms).encode('utf-8') except requests.exceptions.RequestException as e: raise MetadataSourceException('Error while querying RoMEO.\n'+ 'URL was: '+base_url+'\n'+ 'Parameters were: '+str(search_terms)+'\n'+ 'Error is: '+str(e)) # Parse it try: parser = ET.XMLParser(encoding='utf-8') root = ET.parse(BytesIO(response), parser) except ET.ParseError as e: with open('/tmp/romeo_response.xml', 'w') as f: f.write(response) f.write('\n') raise MetadataSourceException('RoMEO returned an invalid XML response, dumped at /tmp/romeo_response.xml\n'+ 'URL was: '+base_url+'\n'+ 'Parameters were: '+str(search_terms)+'\n'+ 'Error is: '+str(e)) return root
def perform_romeo_query(search_terms): search_terms = search_terms.copy() if ROMEO_API_KEY: search_terms['ak'] = ROMEO_API_KEY base_url = 'http://' + ROMEO_API_DOMAIN + '/romeo/api29.php' # Perform the query try: response = urlopen_retry(base_url, data=search_terms).encode('utf-8') except requests.exceptions.RequestException as e: raise MetadataSourceException('Error while querying RoMEO.\n' + 'URL was: ' + base_url + '\n' + 'Parameters were: ' + str(search_terms) + '\n' + 'Error is: ' + str(e)) # Parse it try: parser = ET.XMLParser(encoding='utf-8') root = ET.parse(BytesIO(response), parser) except ET.ParseError as e: with open('/tmp/romeo_response.xml', 'w') as f: f.write(response) f.write('\n') raise MetadataSourceException( 'RoMEO returned an invalid XML response, dumped at /tmp/romeo_response.xml\n' + 'URL was: ' + base_url + '\n' + 'Parameters were: ' + str(search_terms) + '\n' + 'Error is: ' + str(e)) return root
def fetch_metadata_by_DOI(doi): """ Fetch the metadata for a single DOI. This is supported by the standard proxy, doi.org, as well as more advanced proxies such as doi_cache """ if doi is None: return addheaders = {'Accept': 'application/citeproc+json'} try: request = 'http://'+DOI_PROXY_DOMAIN+'/'+doi response = urlopen_retry(request, timeout=crossref_timeout, headers=addheaders, retries=0) parsed = json.loads(response) return parsed except ValueError as e: raise MetadataSourceException('Error while fetching DOI metadata:\nInvalid JSON response.\n' + 'Error: '+str(e))
def fetch_metadata_by_DOI(doi): """ Fetch the metadata for a single DOI. This is supported by the standard proxy, doi.org, as well as more advanced proxies such as doi_cache """ if doi is None: return addheaders = {'Accept': 'application/citeproc+json'} try: request = 'http://'+settings.DOI_PROXY_DOMAIN+'/'+doi response = urlopen_retry(request, timeout=crossref_timeout, headers=addheaders, retries=0) parsed = json.loads(response) return parsed except ValueError as e: raise MetadataSourceException('Error while fetching DOI metadata:\nInvalid JSON response.\n' + 'Error: '+str(e))