def import_to_lianpeng(file_name): f = open(file_name) xml = f.read() f.close() soup = BeautifulSoup(xml) posts = soup.findAll('post') #import pdb;pdb.set_trace() h = HTMLParser.HTMLParser() for post in posts: stamp = float(post.createtime.string) / 1000 created_time = datetime.datetime.utcfromtimestamp(stamp).replace( tzinfo=utc) tags = " ".join([tag.string for tag in post.findAll('tag')]) title = post.find('title').string if not title: title = "No Title" print created_time print title note = post.find('text') if note and note.string: note = h.unescape(note.string) else: note = "" bookmark = Bookmark(list_id=1397, user_id=3, title=title, note=note, created_time=created_time, domain='note.lianpeng.me', tags=tags) bookmark.save() bookmark = Bookmark.objects.get(id=bookmark.id) bookmark.url = 'http://lianpeng.me/note/{}/'.format(bookmark.id) bookmark.save()
def import_to_lianpeng(file_name): f = open(file_name) xml = f.read() f.close() soup = BeautifulSoup(xml) posts = soup.findAll('post') #import pdb;pdb.set_trace() h = HTMLParser.HTMLParser() for post in posts: stamp = float(post.createtime.string) / 1000 created_time = datetime.datetime.utcfromtimestamp(stamp).replace(tzinfo=utc) tags = " ".join([tag.string for tag in post.findAll('tag')]) title = post.find('title').string if not title: title = "No Title" print created_time print title note = post.find('text') if note and note.string: note = h.unescape(note.string) else: note = "" bookmark = Bookmark(list_id=1397, user_id=3, title=title, note=note, created_time=created_time, domain='note.lianpeng.me', tags=tags) bookmark.save() bookmark = Bookmark.objects.get(id=bookmark.id) bookmark.url = 'http://lianpeng.me/note/{}/'.format(bookmark.id) bookmark.save()
def index(request): l, created = List.objects.get_or_create(kind=LIST_KIND_CHOICES.NOTE, user=request.user, defaults={"name": _("Note")}) site = Site.objects.get_current() bookmark = Bookmark(url="http://{}/note/empty/".format(site.domain), domain="note.lianpeng.me", title="输入标题", note="", user=request.user, charset="UTF-8", list=l) bookmark.save() bookmark.url = 'http://{}{}'.format(site.domain, reverse('note_detail', args=(bookmark.id, ))) bookmark.save() return redirect('note_edit', id=bookmark.id)
def post(self, request): form = BookmarkForm(request.POST) if form.is_valid(): bookmark = Bookmark() bookmark.title = form.cleaned_data.get("title") bookmark.url = form.cleaned_data.get("url") bookmark.save() new_tags = form.cleaned_data.get("tags") tags = [x.strip() for x in new_tags.split(',')] for t in tags: tag, created = Tag.objects.get_or_create(name=t) bookmark_tag, created = \ BookmarkTag.objects.get_or_create(bookmark=bookmark, tag=tag) return HttpResponseRedirect(reverse('bookmark:home'))
def index(request): l, created = List.objects.get_or_create(kind=LIST_KIND_CHOICES.NOTE, user=request.user, defaults={"name": _("Note")}) site = Site.objects.get_current() bookmark = Bookmark(url="http://{}/note/empty/".format(site.domain), domain="note.lianpeng.me", title="输入标题", note="", user=request.user, charset="UTF-8", list=l) bookmark.save() bookmark = Bookmark.objects.get(id=bookmark.id) bookmark.url = 'http://{}{}'.format( site.domain, reverse('note_detail', args=(bookmark.id, ))) bookmark.save() return redirect('note_edit', id=bookmark.id)
def handle_imported_file(data, user, site, list_name): soup = BeautifulSoup(data) entries = soup.findAll('dt') default_list, created = List.objects.get_or_create(name=list_name, user=user) for entry in entries: link = entry.find('a') bookmark = Bookmark() l = None date = None if not link: continue for attr in link.attrs: if attr[0] == 'href': bookmark.url = attr[1] if attr[0] == 'add_date': stamp = int(attr[1]) scale = len(attr[1]) - 10 stamp = stamp / pow(10, scale) date = datetime.datetime.utcfromtimestamp(stamp).replace( tzinfo=utc) bookmark.created_time = date if attr[0] == 'tags': tags = attr[1].replace(',', ' ') bookmark.tags = tags if attr[0] == 'list': name = attr[1] l, created = List.objects.get_or_create(name=name, user=user) if site == 'kippt': dd = entry.findNext('dd') if dd: tags = dd.string if tags: tags = re.findall('#([^ ]+)', tags) tags = ' '.join(tags) bookmark.tags = tags if site == 'google': tag_ele = entry.find('h3') if tag_ele: tag = tag_ele.string continue else: try: existed_bookmark = Bookmark.objects.get(url=bookmark.url, user=user) existed_bookmark.tags = existed_bookmark.tags + " " + tag existed_bookmark.save() continue except Bookmark.DoesNotExist: try: bookmark.tags = bookmark.tags + " " + tag except UnboundLocalError: pass except: continue bookmark.domain = urlparse(bookmark.url).netloc if l: bookmark.list = l else: bookmark.list = default_list bookmark.title = link.string bookmark.user = user # we can not have two bookmark with the same url in the same list try: Bookmark.objects.get(url=bookmark.url, list=bookmark.list) continue except Bookmark.DoesNotExist: pass try: bookmark.save() except: continue
def handle_imported_file(data, user, site, list_name): soup = BeautifulSoup(data) entries = soup.findAll('dt') default_list, created = List.objects.get_or_create(name=list_name, user=user) for entry in entries: link = entry.find('a') bookmark = Bookmark() l = None date = None if not link: continue for attr in link.attrs: if attr[0] == 'href': bookmark.url = attr[1] if attr[0] == 'add_date': stamp = int(attr[1]) scale = len(attr[1]) - 10 stamp = stamp / pow(10, scale) date = datetime.datetime.utcfromtimestamp(stamp).replace(tzinfo=utc) bookmark.created_time = date if attr[0] == 'tags': tags = attr[1].replace(',', ' ') bookmark.tags = tags if attr[0] == 'list': name = attr[1] l, created = List.objects.get_or_create(name=name, user=user) if site == 'kippt': dd = entry.findNext('dd') if dd: tags = dd.string if tags: tags = re.findall('#([^ ]+)', tags) tags = ' '.join(tags) bookmark.tags = tags if site == 'google': tag_ele = entry.find('h3') if tag_ele: tag = tag_ele.string continue else: try: existed_bookmark = Bookmark.objects.get(url=bookmark.url, user=user) existed_bookmark.tags = existed_bookmark.tags + " " + tag existed_bookmark.save() continue except Bookmark.DoesNotExist: try: bookmark.tags = bookmark.tags + " " + tag except UnboundLocalError: pass except: continue bookmark.domain = urlparse(bookmark.url).netloc if l: bookmark.list = l else: bookmark.list = default_list bookmark.title = link.string bookmark.user = user # we can not have two bookmark with the same url in the same list try: Bookmark.objects.get(url=bookmark.url, list=bookmark.list) continue except Bookmark.DoesNotExist: pass try: bookmark.save() except: continue