def pubmed_citation(args=sys.argv[1:], out=sys.stdout): """Get a citation via the command line using a PubMed ID or PubMed URL""" parser = argparse.ArgumentParser( description='Get a citation using a PubMed ID or PubMed URL') parser.add_argument('query', help='PubMed ID or PubMed URL') parser.add_argument('-m', '--mini', action='store_true', help='get mini citation') parser.add_argument('-e', '--email', action='store', help='set user email', default='') args = parser.parse_args(args=args) lookup = PubMedLookup(args.query, args.email) publication = Publication(lookup, resolve_doi=False) if args.mini: out.write(publication.cite_mini() + '\n') else: out.write(publication.cite() + '\n')
def search(PubMedID): # Retrieve a PubMed record: # NCBI will contact user by email if excessive queries are detected """ Retrieve a PubMed record using its PubMed ID or PubMed URL. (e.g., '22331878' or 'http://www.ncbi.nlm.nih.gov/pubmed/22331878') """ email = '' #url = 'http://www.ncbi.nlm.nih.gov/pubmed/22331878' #lookup = PubMedLookup(url, email) #print(type(PubMedID)) lookup = PubMedLookup(PubMedID, email) # Create a Publication object: publication = Publication( lookup) # Use 'resolve_doi=False' to keep DOI URL # Access the Publication object's attributes: tupleoutput = [( PubMedID, publication.title, publication.authors, publication.journal, publication.year, publication.month, publication.day, #publication.url, # doesn't seem to work w/ dataframe publication.pubmed_url, publication.cite(), publication.cite_mini(), repr(publication.abstract), 0, )] return tupleoutput
def search(PubMedID): # Retrieve a PubMed record: # NCBI will contact user by email if excessive queries are detected """ Retrieve a PubMed record using its PubMed ID or PubMed URL. (e.g., '22331878' or 'http://www.ncbi.nlm.nih.gov/pubmed/22331878') """ email = '' #url = 'http://www.ncbi.nlm.nih.gov/pubmed/22331878' #lookup = PubMedLookup(url, email) lookup = PubMedLookup(PubMedID, email) # Create a Publication object: publication = Publication( lookup) # Use 'resolve_doi=False' to keep DOI URL # Access the Publication object's attributes: tupleoutput = (""" TITLE:\n{title}\n AUTHORS:\n{authors}\n JOURNAL:\n{journal}\n YEAR:\n{year}\n MONTH:\n{month}\n DAY:\n{day}\n URL:\n{url}\n PUBMED:\n{pubmed}\n CITATION:\n{citation}\n MINICITATION:\n{mini_citation}\n ABSTRACT:\n{abstract}\n """.format( **{ 'title': publication.title, 'authors': publication.authors, 'journal': publication.journal, 'year': publication.year, 'month': publication.month, 'day': publication.day, 'url': publication.url, 'pubmed': publication.pubmed_url, 'citation': publication.cite(), 'mini_citation': publication.cite_mini(), 'abstract': repr(publication.abstract), })) return tupleoutput
def pubmed_citation(args=sys.argv[1:], out=sys.stdout): """Get a citation via the command line using a PubMed ID or PubMed URL""" parser = argparse.ArgumentParser( description='Get a citation using a PubMed ID or PubMed URL') parser.add_argument('query', help='PubMed ID or PubMed URL') parser.add_argument( '-m', '--mini', action='store_true', help='get mini citation') parser.add_argument( '-e', '--email', action='store', help='set user email', default='') args = parser.parse_args(args=args) lookup = PubMedLookup(args.query, args.email) publication = Publication(lookup, resolve_doi=False) if args.mini: out.write(publication.cite_mini() + '\n') else: out.write(publication.cite() + '\n')
def pubmed_search(request): publications = LabPublication.objects.all().order_by('-year') if 'pubmed_id' in request.GET: paper_id = request.GET['pubmed_id'] if not paper_id: message = '<strong>No PubMed ID or PubMed URL provided!</strong>' else: email = request.user.email url = paper_id try: lookup = PubMedLookup(url, email) except Exception: message = '<strong>Error using your PubMed ID/URL</strong>' return render(request, 'public_pages/publications.html', {'publications': publications, 'message': message}) publication = Publication(lookup) if publication == '': message = '<strong>No match found for your pubmed ID</strong>' return render(request, 'public_pages/publications.html', {'publications': publications, 'message': message}) user = request.user title = publication.title authors = publication.authors journal = publication.journal year = publication.year journal_url = publication.url pubmed = publication.pubmed_url citation = publication.cite() mini_citation = publication.cite_mini() abstract = repr(publication.abstract) try: new_publication, created = LabPublication.objects.get_or_create(user = user, title = title, authors = authors, journal = journal, year = year, journal_url = journal_url, pubmed = pubmed, citation = citation, mini_citation = mini_citation, abstract = abstract) except: message = '<strong>Could not add your publication. Are you trying to add a duplicate entry?</strong>' return render(request, 'public_pages/publications.html', {'publications': publications, 'message': message}) if created == '': message = '<strong>Could not add your publication. Are you trying to add a duplicate entry?</strong>' return render(request, 'public_pages/publications.html', {'publications': publications, 'message': message}) else: return HttpResponseRedirect('publications') return render(request, 'public_pages/publications.html', {'publications': publications, 'message': message})
def handle(self, *args, **options): print('importing {0}'.format(options['path'])) file = os.path.normpath(options['path']) df = pd.read_csv(file, header=None) with open(options['search_function'], "r") as sf_file: search_function = sf_file.readlines() import_record = PubmedImport() import_record.import_date = timezone.now() import_record.search_function = search_function import_record.save() import_id = import_record.id failed_ids = list() for index, record in tqdm(df.iterrows(), total=df.shape[0]): pmid = record[0] find_pmid = PubmedImportedArticle.objects.filter(pmid=pmid) if len(find_pmid) > 0: continue try: email = '' url = 'http://www.ncbi.nlm.nih.gov/pubmed/{0}'.format(pmid) lookup = PubMedLookup(url, email) publication = Publication(lookup, resolve_doi=False) pia = PubmedImportedArticle() pia.pmid = pmid year = self.get_int(publication.year) month = self.get_int(publication.month) day = self.get_int(publication.day) pia.pub_date = datetime(year, month, day, 0, 0, 0, 0, tzinfo=pytz.UTC) pia.title = publication.title pia.authors = publication.authors pia.journal = publication.journal pia.citation = publication.cite() # pia.mini_citation = publication.cite_mini() pia.url = publication.url pia.pubmed_url = publication.pubmed_url pia.abstract = repr(publication.abstract) pia.screened = False pia.tagged = False pia.landmark = False pia.pmimport = import_record pia.save() except: print('\nPMID {0} failed'.format(pmid)) failed_ids.append(pmid) np.savetxt('failed_ids.txt', np.asarray(failed_ids), fmt='%d') print('finished') np.savetxt(np.asarray(pmid))
publication = Publication(lookup) # Use 'resolve_doi=False' to keep DOI URL #Access the Publication object’s attributes: print( """ TITLE:\n{title}\n AUTHORS:\n{authors}\n JOURNAL:\n{journal}\n YEAR:\n{year}\n MONTH:\n{month}\n DAY:\n{day}\n URL:\n{url}\n PUBMED:\n{pubmed}\n CITATION:\n{citation}\n MINICITATION:\n{mini_citation}\n ABSTRACT:\n{abstract}\n """ .format(**{ 'title': publication.title, 'authors': publication.authors, 'journal': publication.journal, 'year': publication.year, 'month': publication.month, 'day': publication.day, 'url': publication.url, 'pubmed': publication.pubmed_url, 'citation': publication.cite(), 'mini_citation': publication.cite_mini(), 'abstract': repr(publication.abstract), }))