def setAuthor(self, request, pk): for pk in range(63, 92): try: paper = Paper.objects.get(id=pk) except: continue print(paper.title) sort = paper.title url = paper.url try: data = requestUrl(url, sort) soup = BeautifulSoup(data, 'html.parser') # tags_div = soup.find_all('li', class_="publication-author-list__item") tags_div = soup.find_all('meta', property="citation_author") for tag_div in tags_div: # tag_a = tag_div.find('a', class_='nova-e-link nova-e-link--color-inherit nova-e-link--theme-bare') name = tag_div.get('content') paper.save() try: a1 = Author.objects.get(name=name) except: a1 = Author(name=name) a1.save() paper.authors.add(a1) paper.save() except Exception as e: print(e) return Response('ok')
def post(self): args = self.parser.parse_args() portrait_name, portrait = args['portrait_filename'], args['portrait'] author = Author(args['name']) if portrait_name and portrait: author.portrait = decode_image(name=secure_filename(portrait_name), data64=portrait, max_length=255) author.save() return marshal(author, self.fields, envelope='author_list'), 200
def isCit(self, request, pk): for pk in range(1, 2): print(pk) try: paper = Paper.objects.get(id=pk) except: continue url = paper.url sort = re.search('\d+_(.+)', url).group(1).strip().replace('_', ' ') try: data = requestUrl(url, sort) soup = BeautifulSoup(data, 'html.parser') is_cit = soup.find('span', class_='title-tab-interaction') is_cit = is_cit.text if '0' in is_cit and 'Citations' in is_cit: continue else: tags_ul = soup.find_all( 'ul', class_= "nova-e-list nova-e-list--size-m nova-e-list--type-inline nova-e-list--spacing-none nova-v-publication-item__person-list" ) tags_div = soup.find_all('a', itemprop="mainEntityOfPage") for index, tag_div in enumerate(tags_div): href = ((tag_div).get('href')).strip() tags_a = (tags_ul)[index].find_all('a') title = re.search('\d+_(.+)', href).group(1).strip().replace( '_', ' ') try: p1 = Paper.objects.get(title=title) except: p1 = Paper(title=title, url=base_url + href) p1.save() paper.cit_paper.add(p1) p1.save() for tag_a in tags_a: name = (tag_a.text).strip() try: a1 = Author.objects.get(name=name) except: a1 = Author(name=name) a1.save() p1.authors.add(a1) p1.save() except Exception as e: print(e) return Response('ok')
def __handle_create_author_post(request): try: first_name = request.POST['first_name'] last_name = request.POST['last_name'] age = request.POST['age'] author = Author( first_name = first_name, last_name = last_name, age = age ) author.save() return generate_response("author created", True, author) except KeyError as e: return generate_response("missing %s" % e.args[0].strip("'"), False) except Exception as e: return generate_response(str(e), False)
def author_add(request): #token = request.POST.get('token') name = request.POST.get('name') try: checkAuthorName = Author.objects.get(name=name) response = { 'result': 0, 'message' : 'Blog exist' } except Author.DoesNotExist: author = Author(name = name) author.save() response = { 'result': 1, 'message' : 'success' } return JsonResponse(response)
async def get_top_users() -> Dict[str, List[Tuple[int, int]]]: """ :return: dict(query -> list((author_id, count_of_its_tweets))) """ queries = await Query.query.gino.all() query_map: Dict[str, List[Tuple[int, int]]] = {} for query in queries: query_users = db.select([ Author.id, db.func.count(Author.id) ]).select_from( Author.join(TweetXQuery)).where(TweetXQuery.q == query.q).group_by( Author.id).order_by(db.func.count(Author.id).desc()).limit(3) query_users = await query_users.gino.all() query_map[query.q] = list(map(lambda row: tuple(row), query_users)) return query_map
def setAuthor(self, request, pk): name = request.GET['name'] a = Author(name=name) a.save() return Response(name)
def post(self): name = request.json.get("name") author = Author(name=name) db.session.add(author) db.session.commit() return {"code": 1000, "msg": "ok", "data": author_ser(author)}