def mutate_and_get_payload(cls, root, info, **input): # проверяет токен token = input.get('token', None) if token != '******************': return "Invalid token" publication = Publication( title=input.get('title'), preview_text=input.get('preview_text', ''), detail_text=input.get('detail_text', ''), image=input.get('image', ''), rating=input.get('rating', 0), user_id=input.get('user_id', 5227), date_create=input.get('date_create'), show_counter=input.get('show_counter'), ) if input.get('pk'): publication.pk = input.get('pk') if input.get('age'): publication.age = input.get('age', 0) publication.save() if input.get('author'): publication.author.set(input.get('author')) publication.save() if input.get('book'): try: publication.book.set(input.get('book')) publication.save() except Exception as e: pass return AddPublication(publication=publication)
def post(self, request): """ Procesa el formulario :param request: objeto HttpRequest :return: HttpResponse con la respuesta """ publication = Publication() publication.publisher = request.user form = PublicationForm(request.POST, request.FILES, instance=publication) if form.is_valid(): # Guardar el anuncio new_publication = form.save() # Limpiar el formulario form = PublicationForm() # Devolvemos un mensaje de OK messages.success(request, 'Publicación creada correctamente') context = {'form': form, 'navbar': "publication-form"} return render(request, 'publications/publication_form.html', context)
def vue_publication_save(request): to_template = {} if request.user.is_authenticated: image = request.FILES.get('image') data = request.POST.get('data') data = json.loads(data) id = data.get('id', None) title = data.get('title') text = data.get('text') books = data.get('books', None) authors = data.get('authors', None) series = data.get('series', None) if id: # Сохраняет изменения publication = Publication.objects.get(pk=id, user=request.user) publication.title = title publication.preview_text = Truncator(strip_tags(text)).words(33) publication.detail_text = text publication.user = request.user publication.series_id = series publication.save() publication.book.set(books) publication.author.set(authors) publication.series.set(series) else: # Создает новую коллекцию publication = Publication(title=title, user=request.user, preview_text=Truncator( strip_tags(text)).words(33), detail_text=text) publication.save() if books: publication.book.set(books) if authors: publication.author.set(authors) if series: publication.series.set(series) if image: publication.image.save(image.name, image) to_template['image'] = publication.image.url to_template['id'] = publication.id return JsonResponse(to_template)
def add_association(acc, article, is_associated=False): """ Adding association between `Publication` and `Experiment` objects in the Django models which is used by curators to approve or reject linkage between an article and a study. :param acc: ArrayExpress accession. e.g. ``E-MTAB-xxxx`` :type acc: str :param article: Json object as collected from Europe BMC. :type article: dict :param is_associated: Flag indicating whether the publication is already associated with the study in the AE database or not :type is_associated: bool """ experiment = retrieve_study_by_acc(acc)[0] exp = Experiment.objects.filter( Q(accession=acc) | Q(title=experiment.title) | Q(description=experiment.description)).first() # print exp, exp_created if not exp: exp = Experiment(accession=acc, title=experiment.title, description=experiment.description) exp.save() pub = Publication.objects.filter( Q(pubmed=article.get('pmid', -1)) | Q(pmc_id=article.get('pmcid')) | Q(doi=article.get('doi', 'ANY THING ELSE')) | Q(title=article['title'])).first() if not pub: pub = Publication(pubmed=article.get('pmid', None), pmc_id=article.get('pmcid'), doi=article.get('doi', None), title=article['title'], whole_article=json.dumps(article)) pub.save() else: pub.whole_article = json.dumps(article) pub.save() ass, ass_created = Association.objects.get_or_create(experiment=exp, publication=pub) if ass_created: ass.is_associated = is_associated ass.save()
def add_publications(self, number): if number > 0: random = Random() users = User.objects.all() charts = string.lowercase + " " if len(users) > 0: for i in range(number): content = "".join([random.choice(charts) for a in range(random.randint(40, 100))]) publication_title = "".join( [random.choice(string.lowercase) for a in range(random.randint(15, 30))]).title() author = random.choice(users) publication_type = random.choice(publication_types) publication = Publication(author=author, title=publication_title, content=content, publication_type=publication_type) publication.save() self.stdout.write("%i publications was added to database" % number) else: self.stdout.write("No user to add publication") else: self.stdout.write("Invalid number of publications to add was set. Nothing done")
def import_bibtex(request): if request.method == 'POST': # try to parse BibTex bib = parse(request.POST['bibliography']) # container for error messages errors = {} # publication types types = Type.objects.all() # check for errors if not bib: if not request.POST['bibliography']: errors['bibliography'] = 'This field is required.' if not errors: publications = [] # try adding publications for entry in bib: if entry.has_key('title') and \ entry.has_key('author') and \ entry.has_key('year'): # parse authors authors = split(entry['author'], ' and ') for i in range(len(authors)): author = split(authors[i], ',') author = [author[-1]] + author[:-1] authors[i] = join(author, ' ') authors = join(authors, ', ') # add missing keys keys = [ 'journal', 'booktitle', 'publisher', 'url', 'doi', 'keywords', 'note', 'abstract', 'month' ] for key in keys: if not entry.has_key(key): entry[key] = '' # map integer fields to integers entry['month'] = MONTHS.get(entry['month'].lower(), 0) entry['volume'] = entry.get('volume', None) entry['number'] = entry.get('number', None) # determine type type_id = None for t in types: if entry['type'] in t.bibtex_type_list: type_id = t.id break if type_id is None: errors['bibliography'] = 'Type "' + entry[ 'type'] + '" unknown.' break # add publication publications.append( Publication(type_id=type_id, citekey=entry['key'], title=entry['title'], authors=authors, year=entry['year'], month=entry['month'], journal=entry['journal'], book_title=entry['booktitle'], publisher=entry['publisher'], volume=entry['volume'], number=entry['number'], note=entry['note'], url=entry['url'], doi=entry['doi'], abstract=entry['abstract'], keywords=entry['keywords'])) else: errors[ 'bibliography'] = 'Make sure that the keys title, author and year are present.' break if not errors and not publications: errors['bibliography'] = 'No valid BibTex entries found.' if errors: # some error occurred return render_to_response( 'admin/publications/import_bibtex.html', { 'errors': errors, 'title': 'Import BibTex', 'types': Type.objects.all(), 'request': request }, RequestContext(request)) else: try: # save publications for publication in publications: publication.save() except: msg = 'Some error occured during saving of publications.' else: if len(publications) > 1: msg = 'Successfully added ' + str( len(publications)) + ' publications.' else: msg = 'Successfully added ' + str( len(publications)) + ' publication.' # show message messages.info(request, msg) # redirect to publication listing return HttpResponseRedirect('../') else: return render_to_response( 'admin/publications/import_bibtex.html', { 'title': 'Import BibTex', 'types': Type.objects.all(), 'request': request }, RequestContext(request))
def import_bibtex(request): if request.method == 'POST': # try to parse BibTex bib = parse(request.POST['bibliography']) # container for error messages errors = {} # publication types types = Type.objects.all() # check for errors if not bib: if not request.POST['bibliography']: errors['bibliography'] = 'This field is required.' if not errors: publications = [] # try adding publications for entry in bib: if 'title' in entry and \ 'author' in entry and \ 'year' in entry: # parse authors authors = entry['author'].split(' and ') for i in range(len(authors)): author = authors[i].split(',') author = [author[-1]] + author[:-1] authors[i] = ' '.join(author) authors = ', '.join(authors) # add missing keys keys = [ 'journal', 'booktitle', 'publisher', 'institution', 'url', 'doi', 'isbn', 'keywords', 'pages', 'note', 'abstract', 'month' ] for key in keys: if not key in entry: entry[key] = '' # map integer fields to integers entry['month'] = MONTHS.get(entry['month'].lower(), 0) entry['volume'] = entry.get('volume', None) entry['number'] = entry.get('number', None) if isinstance(entry['volume'], six.text_type): entry['volume'] = int( re.sub('[^0-9]', '', entry['volume'])) if isinstance(entry['number'], six.text_type): entry['number'] = int( re.sub('[^0-9]', '', entry['number'])) # remove whitespace characters (likely due to line breaks) entry['url'] = re.sub(r'\s', '', entry['url']) # determine type type_id = None for t in types: if entry['type'] in t.bibtex_type_list: type_id = t.id break if type_id is None: errors['bibliography'] = 'Type "' + entry[ 'type'] + '" unknown.' break # add publication publications.append( Publication(type_id=type_id, citekey=entry['key'], title=entry['title'], authors=authors, year=entry['year'], month=entry['month'], journal=entry['journal'], book_title=entry['booktitle'], publisher=entry['publisher'], institution=entry['institution'], volume=entry['volume'], number=entry['number'], pages=entry['pages'], note=entry['note'], url=entry['url'], doi=entry['doi'], isbn=entry['isbn'], external=False, abstract=entry['abstract'], keywords=entry['keywords'])) else: errors[ 'bibliography'] = 'Make sure that the keys title, author and year are present.' break if not errors and not publications: errors['bibliography'] = 'No valid BibTex entries found.' if errors: # some error occurred return render( request, 'admin/publications/import_bibtex.html', { 'errors': errors, 'title': 'Import BibTex', 'types': Type.objects.all(), 'request': request }) else: try: # save publications for publication in publications: publication.save() except: msg = 'Some error occured during saving of publications.' else: if len(publications) > 1: msg = 'Successfully added ' + str( len(publications)) + ' publications.' else: msg = 'Successfully added ' + str( len(publications)) + ' publication.' # show message messages.info(request, msg) # redirect to publication listing if len(publications) == 1: return HttpResponseRedirect('../%s/change/' % publications[0].id) else: return HttpResponseRedirect('../') else: return render( request, 'admin/publications/import_bibtex.html', { 'title': 'Import BibTex', 'types': Type.objects.all(), 'request': request })
def import_bibtex(request): if request.method == 'POST': # try to parse BibTex the_bibtex_file_content = '' creators = request.POST.getlist('creators') productions = request.POST.getlist('productions') work_records = request.POST.getlist('work_records') the_bibtex_file = request.FILES.get('bibtex_file', '') if the_bibtex_file: if the_bibtex_file.multiple_chunks(): the_bibtex_file_content = ''.join( chunk for chunk in the_bibtex_file.chunks()) else: the_bibtex_file_content = the_bibtex_file.read() bib = parse(the_bibtex_file_content) if not bib: bib = parse(request.POST['bibliography']) # container for error messages errors = {} # publication types types = Type.objects.all() # check for errors if not bib: if not request.POST['bibliography']: errors[ 'bibliography'] = 'Please populate Bibliography or click browse to upload a Bibtex format file.' if not errors: publications = [] # try adding publications for entry in bib: if (entry.has_key('title') and entry.has_key('author') and entry.has_key('year')): # parse authors authors = split(entry['author'], ' and ') for i in range(len(authors)): author = split(authors[i], ',') author = [author[-1]] + author[:-1] authors[i] = join(author, ' ') authors = join(authors, ', ') # add missing keys keys = [ 'annote', 'booktitle', 'chapter', 'edition', 'section', 'editor', 'howpublished', 'institution', 'journal', 'key', 'month', 'note', 'number', 'organization', 'pages', 'publisher', 'address', 'school', 'series', 'volume', 'issue', 'url', 'isbn', 'issn', 'lccn', 'abstract', 'keywords', 'price', 'copyright', 'language', 'contents', 'doi' ] for key in keys: if not entry.has_key(key): if key == 'price': entry[key] = 0 else: entry[key] = '' # map integer fields to integers entry['month'] = MONTHS.get(entry['month'].lower(), 0) entry['volume'] = entry.get('volume', None) entry['number'] = entry.get('number', None) # determine type type_id = None for t in types: if entry['type'] in t.bibtex_type_list: type_id = t.id break if type_id is None: errors['bibliography'] = 'Type "' + entry[ 'type'] + '" unknown.' break # add publication publications.append( Publication(type_id=type_id, annote=entry['annote'], authors=authors, book_title=entry['booktitle'], chapter=entry['chapter'], edition=entry['edition'], section=entry['section'], editor=entry['editor'], how_published=entry['howpublished'], institution=entry['institution'], journal=entry['journal'], citekey=entry['key'], year=entry['year'], month=entry['month'], note=entry['note'], organization=entry['organization'], pages=entry['pages'], publisher=entry['publisher'], address=entry['address'], university=entry['school'], series=entry['series'], title=entry['title'], volume=entry['volume'], number=entry['issue'], url=entry['url'], isbn=entry['isbn'], issn=entry['issn'], archive_location=entry['lccn'], abstract=entry['abstract'], keywords=entry['keywords'], price=entry['price'], rights=entry['copyright'], language=entry['language'], table_of_content=entry['contents'], doi=entry['doi'])) else: errors[ 'bibliography'] = 'Make sure that the keys title, author and year are present.' break if not errors and not publications: errors['bibliography'] = 'No valid BibTex entries found.' if errors: # some error occurred return render_to_response( 'admin/publications/import_bibtex.html', { 'errors': errors, 'title': 'Import BibTex', 'types': Type.objects.all(), 'request': request }, RequestContext(request)) else: try: # save publications for publication in publications: publication.save() try: for creator_id in creators: creator = Creator.objects.get(pk=creator_id) creator.primary_publications.add(publication) except: pass try: for production_id in productions: production = Production.objects.get( pk=production_id) production.primary_publications.add(publication) except: pass try: for work_record_id in work_records: work_record = WorkRecord.objects.get( pk=work_record_id) work_record.primary_publications.add(publication) except: pass except: msg = 'Some error occured during saving of publications.' else: if len(publications) > 1: msg = 'Successfully added ' + str( len(publications)) + ' publications.' else: msg = 'Successfully added ' + str( len(publications)) + ' publication.' # show message messages.info(request, msg) # redirect to publication listing return HttpResponseRedirect('../') else: Creators_qs = Creator.objects.all() Productions_qs = Production.objects.all().order_by('title') WorkRecord_qs = WorkRecord.objects.all().order_by('title') CREATOR_CHOICES = [] #[("", "-- Select a Creator --")] CREATOR_CHOICES += [(e.id, e.creator_name) for e in Creators_qs] PRODUCTION_CHOICES = [] #[("", "-- Select a Production --")] PRODUCTION_CHOICES += [(e.id, e.title) for e in Productions_qs] WORKRECORD_CHOICES = [] #[("", "-- Select a Written Work --")] WORKRECORD_CHOICES += [(e.id, e.title) for e in WorkRecord_qs] return render_to_response( 'admin/publications/import_bibtex.html', { 'title': 'Import BibTex', 'types': Type.objects.all(), 'creators': CREATOR_CHOICES, 'productions': PRODUCTION_CHOICES, 'work_records': WORKRECORD_CHOICES, 'request': request }, RequestContext(request))
def load_pmids(pmids, force_update=False): """ Loads publications into the database from a list of PubMed IDs passed as integers into the database when they do not already exist. """ pmids = list(set([int(x) for x in pmids])) logger.debug('Starting to load PMID(S) %s', pmids) if not force_update: logger.info('Checking %s PMIDS', len(pmids)) existing_pubs = set(Publication.objects.filter(pmid__in=pmids).values_list('pmid', flat=True)) pmids = set(pmids) pmids.difference_update(existing_pubs) logger.info('About to fetch %s new PMIDs.', len(pmids), extra={'data':{'pmids': pmids}}) if not pmids: logger.debug('pmids are none') return None pmids_mia = [str(x) for x in pmids] for i in xrange(len(pmids_mia) / 5000 + 1): #Efetch Maximum, Batch 5000 per request query_list = pmids_mia[i * 5000:(i + 1) * 5000] query_str = ','.join(query_list) qdict = settings.ETOOLS_CONFIG['query_params'] qdict['id'] = query_str # Have to use post if data being sent is > 200 r = requests.post(settings.ETOOLS_CONFIG['base_url'], data=qdict) error_cnt = 0 while r.status_code != 200 and error_cnt < NUM_PUBMED_RETRIES: error_cnt += 1 time.sleep(0.5) r = requests.post(settings.ETOOLS_CONFIG['base_url'], data=qdict) if r.status_code != 200: logger.warning('Requests to the PubMed server with data %s failed ' 'after %s attempts.', qdict, NUM_PUBMED_RETRIES + 1) pub_page = r.text if pub_page: logger.debug('Request to pubmed server returned pub_page') xmltree = ET.fromstring(pub_page.encode('utf-8')) pubs = xmltree.findall('.//DocumentSummary') # pub_dicts will be a list of publications, where each # of them is a dictionary pub_dicts = map(parse_pub, pubs) for index, pub in enumerate(pub_dicts): logger.debug('Making new pub %s', pub) if pub is not None: new_pub = None if force_update: try: new_pub = Publication.objects.get(pmid=pub['pmid']) except Publication.DoesNotExist: new_pub = Publication() new_pub.pmid = pub['pmid'] new_pub.title = pub['title'] new_pub.authors = pub['authors'] new_pub.date = pub['date'] new_pub.journal = pub['journal'] new_pub.volume = pub['volume'] new_pub.pages = pub['pages'] new_pub.issue = pub['issue'] else: new_pub = Publication(**pub) if not new_pub.issue: logger.info('no issue for %s', new_pub.pmid) if not new_pub.volume: logger.info('no volume for %s', new_pub.pmid) if not new_pub.pages: logger.info('no pages for %s', new_pub.pmid) new_pub.save() logger.debug('Finished saving pub %s', new_pub) else: bad_pmid = pubs[index].get('uid') logger.warning('PMID %s has no publication in pub_page %s', bad_pmid, pub_page) else: logger.warning('There was no page returned from pubmed server!!')