def insert_end(node, decision): """Insert DecisionEnd between node and node parents""" parent_links = node.get_parent_links().exclude(name='default') decision_end = decision.get_child_end() # Find parent decision node for every end's parent. # If the decision node is the one passed, # change the parent to link to the Decision node's DecisionEnd node. # Skip embedded decisions and forks along the way. decision_end_used = False for parent_link in parent_links: parent = parent_link.parent.get_full_node() node_temp = parent while node_temp and not isinstance(node_temp, Decision): if isinstance(node_temp, Join): node_temp = node_temp.get_parent_fork().get_parent() elif isinstance(node_temp, DecisionEnd): node_temp = node_temp.get_parent_decision().get_parent() else: node_temp = node_temp.get_parent() if node_temp.id == decision.id and parent.node_type != Decision.node_type: links = Link.objects.filter(parent=parent).exclude(name__in=['related', 'kill', 'error']) if len(links) != 1: raise RuntimeError(_('Cannot import workflows that have decision DAG leaf nodes with multiple children or no children.')) link = links[0] link.child = decision_end link.save() decision_end_used = True # Create link between DecisionEnd and terminal node. if decision_end_used and not Link.objects.filter(name='to', parent=decision_end, child=node).exists(): link = Link(name='to', parent=decision_end, child=node) link.save()
def post_link(): link = request.values.get('q') author = request.values.get('author') key = hashlib.md5(link).hexdigest() exists = Link.get_by_key_name(key) try: if not exists: link = Link(key_name=key, link_url=link, authors=[author]) link.put() taskqueue.add(url='/_worker/fetch_title', params={ 'url': link.link_url, 'key': key }) else: if author not in exists.authors: exists.authors.append(author) exists.updated_at = datetime.now() exists.put() taskqueue.add(url='/_worker/fetch_title', params={ 'url': link, 'key': key }) return jsonify({'result': True}) except: logging.exception('api call failed') return jsonify({'result': False})
def save_link(): collection_id = request.args.get("collection_id") payload = request.get_json() link_name = extract_name_from_url(payload["url"]) id = uuid.uuid4().hex collection = Collection.query.filter( Collection.id == collection_id).one_or_none() if not collection: abort(404) new_link = Link(id=id, url=payload["url"], description=payload["description"], name=link_name, collection=collection_id) new_link.insert() return jsonify({ "code": 200, "data": { "name": link_name, "id": id, "url": payload["url"], "description": payload["description"], "collection": collection_id } })
def fake_links(): twitter = Link(name='Twitter', url='#') facebook = Link(name='Facebook', url='#') linkedin = Link(name='LinkedIn', url='#') google = Link(name='Google+', url='#') db.session.add_all([twitter, facebook, linkedin, google]) db.session.commit()
def copyLink(params): logging.info('copylink(): Start') #Get link from GUI link = params['link_key'].get() #Create new link new_link = Link(parent=genLinkParentKey()) #Copy properties of this link to new link new_link.name = link.name new_link.description = link.description new_link.url = link.url new_link.website = link.website #new_link.link_category = link.link_category new_link.parent_folder = params['target_folder_key'] #parent should be the one provided by the user new_link.date_c = datetime.datetime.now() new_link.date_m = datetime.datetime.now() #save new link new_link.put() #logging.info('copyLink: '+params['target_folder_key'].get().name) #increment n_items of new parent folder params['target_folder_key'].get().n_items += 1 params['target_folder_key'].get().put() #logging.info('copyLink: '+ str(params['target_folder_key'].get().n_items)) status = 'Success: from copyLink' return status
def post(self): url = self.request.get("url") if url: URL_RE = re.compile(r"(https|http)") if not URL_RE.match(url): url = "http://%s" % url u = memcache.get(url) # if url is already cached if u: params = {"hash": str(u.hash), "link": str(u.url)} self.render("url_form.html", **params) else: try: l = Link() l.seed(url) memcache.set(url, l) memcache.set(str(l.hash), l) params = {"hash": str(l.hash), "link": str(l.url)} self.render("url_form.html", **params) except ValueError: params = {"error": True} self.render("url_form.html", **params) else: params = {"error": True} self.render("url_form.html", **params)
def submit(request): """ View for submitting a URL """ if settings.SHORTENER_REQUIRES_LOGIN and not request.user.is_authenticated(): # TODO redirect to an error page raise Http404 url = None link_form = None if request.GET: link_form = LinkSubmitForm(request.GET) elif request.POST: link_form = LinkSubmitForm(request.POST) if link_form and link_form.is_valid(): url = link_form.cleaned_data['u'] link = None try: link = Link.objects.get(url = url) except Link.DoesNotExist: pass if link == None: new_link = Link(url = url) new_link.save() link = new_link values = default_values(request) values['link'] = link return render_to_response( 'url_shortener_submit_success.html', values, context_instance=RequestContext(request)) values = default_values(request, link_form=link_form) return render_to_response( 'url_shortener_submit_failed.html', values, context_instance=RequestContext(request))
def fake_link(): baidu = Link(name='Twitter', url='#') taobao = Link(name='Facebook', url='#') QQ = Link(name='LinkedIn', url='#') LOL = Link(name='Google+', url='#') db.session.add_all([baidu, taobao, QQ, LOL]) db.session.commit()
def add(request): # add a new link link = Link ( title = request.POST["title"], desc = request.POST["desc"], url = request.POST["url"] ) link.save() return HttpResponseRedirect("/links")
def shorten(url, title, size): q = Link.all() q.filter("url =", url) urls = q.fetch(1) if (len(urls) > 0): return urls[0].short_url() newLink = Link(url = url, title = title, size = size) newLink.save() return newLink.short_url()
def wwwinit_add_urls_to_text_set(text_set_id, urls): text_set = get(text_set_id) for url in urls: link = Link() link.url = url text_set_link = TextSetLink() text_set_link.link = link text_set.links.append(text_set_link) db.session.commit()
def review(request): if request.method=='POST': link=Link.get_by_id(int(request.POST['id'])) link.status=request.POST['action'] link.put() approved_links=Link.all().filter('status =', 'approved') submitted_links=Link.all().filter('status =', 'submitted') return(render_to_response('links/review.html', locals(), context_instance=RequestContext(request)))
def decision_helper(decision): """ Iterates through children, waits for ends. When an end is found, finish the decision. If the end has more parents than the decision has branches, bubble the end upwards. """ # Create decision end if it does not exist. if not Link.objects.filter(parent=decision, name='related').exists(): end = DecisionEnd(workflow=workflow, node_type=DecisionEnd.node_type) end.save() link = Link(name='related', parent=decision, child=end) link.save() children = [link.child.get_full_node() for link in decision.get_children_links().exclude(name__in=['error','default'])] ends = set() for child in children: end = helper(child) if end: ends.add(end) # A single end means that we've found a unique end for this decision. # Multiple ends mean that we've found a bad decision. if len(ends) > 1: raise RuntimeError(_('Cannot import workflows that have decisions paths with multiple terminal nodes that converge on a single terminal node.')) elif len(ends) == 1: end = ends.pop() # Branch count will vary with each call if we have multiple decision nodes embedded within decision paths. # This is because parents are replaced with DecisionEnd nodes. fan_in_count = len(end.get_parent_links().exclude(name__in=['error','default'])) # IF it covers all branches, then it is an end that perfectly matches this decision. # ELSE it is an end for a decision path that the current decision node is a part of as well. # The unhandled case is multiple ends for a single decision that converge on a single end. # This is not handled in Hue. fan_out_count = len(decision.get_children_links().exclude(name__in=['error','default'])) if fan_in_count > fan_out_count: insert_end(end, decision) return end elif fan_in_count == fan_out_count: insert_end(end, decision) # End node is a decision node. # This means that there are multiple decision nodes in sequence. # If both decision nodes are within a single decision path, # then the end may need to be returned, if found. if isinstance(end, Decision): end = decision_helper(end) if end: return end # Can do this because we've replace all its parents with a single DecisionEnd node. return helper(end) else: raise RuntimeError(_('Cannot import workflows that have decisions paths with multiple terminal nodes that converge on a single terminal node.')) else: raise RuntimeError(_('Cannot import workflows that have decisions paths that never end.')) return None
def post(self): url = self.request.get("url") custom_path = self.request.get("custom_path") #if custom_path: # exists = Link.filter("path =", custom_path).get() # if exists: # return "path already exists, choose another" link = Link(url=url, custom_path=custom_path) link.put()
def add(request): if request.GET.has_key('url') and request.GET.has_key('user') and request.GET.has_key('channel') and request.GET.has_key('title'): link = Link(url=request.GET['url'], user=request.GET['user'], channel=request.GET['channel'], title=request.GET['title']) link.save() #if request.GET['title'] == "Binary Data or File": # p = subprocess.Popen(['wget', '-P', '/raid/archives/Incoming/sumdumbot/', request.GET['url']]) return HttpResponse('OK') else: return HttpResponse('Error')
def key_url(key): l = Link(key=key) url = l.url if url: l.incr_click() return redirect(l.url) else: abort(404)
def test_link_model(self): link = Link(id='http://www.github.com/', title='My Github') Link(id='http://www.bitbucket.com/', title='My bitbucket') link_key = link.put() self.assertEquals(link_key.id(), 'http://www.github.com/') # Link queries link = Link.get_by_id('http://www.github.com/') self.assertEquals(link.title, 'My Github')
def create(): # gen id url_id = util.generate_id(8) # save to DB Link.insert_url(url_id, request.form['url']) # return link to page return redirect(url_for('share', url_id=url_id))
def linkmgnt(): perpage = g.config["ADMIN_ITEM_COUNT"] page = int(request.args.get("page", 1)) linklist = Link.get_page(offset=perpage*(page-1), limit=perpage) pager = gen_pager(page, Link.count(), perpage, request.url) return render_template('admin/linkmgnt.html', linklist=linklist, admin_url="linkmgnt", pager=pager )
def redir(request, encoded): #Potential shortened link. Decode and check to see if it's in the database. If so, redirect to full url page. If not, error. toRedirect = Link() findId = toRedirect.decode(encoded) try: check = Link.objects.get(id=findId) except Link.DoesNotExist: return HttpResponse("ERROR, YO") return HttpResponseRedirect(check.longUrl())
def submit(request): #Allow link input to create shortened link. link_form = LinkSubmitForm(request.POST) print(request.POST); if link_form.is_valid(): url = link_form.cleaned_data['submitForm'] #Arg Parse urlPart = url.partition('?') args = urlPart[2].split('&') combinations = [] for i in range(0,len(args)+1): for perm in itertools.combinations(args,i): combinations.append(perm) print combinations count = 0 #Image Gen for c in combinations: urlCombination = urlPart[0]+"?" for arg in c: urlCombination = urlCombination + arg +"&" urlCombination = urlCombination[0:len(urlCombination)-1] webkit2png.generate_image(urlCombination, os.path.join(settings.PROJECT_ROOT, "media/ss_%d.png"%count)) count = count + 1 #Gets the shortened link if this url has been shortened already. If not, it makes a new one. link = None try: link = Link.objects.get(url = url) except Link.DoesNotExist: pass if link == None: new_link = Link(url = url) new_link.save() link = new_link return render_to_response('results.html', {"encodedLink": link.shortUrl(), "perm": simplejson.dumps(combinations)}, context_instance=RequestContext(request))
def setUp(self): self.state_code = 'NY' create_state(code=self.state_code) self.category = create_category() self.subcategory = create_subcategory(self.category.id) self.link = Link( subcategory_id=self.subcategory.id, state=self.state_code, text='Section 20 of Statute 39-B', url='ny.gov/link/to/statute', ).save()
def review(request): if request.method == 'POST': link = Link.get_by_id(int(request.POST['id'])) link.status = request.POST['action'] link.put() approved_links = Link.all().filter('status =', 'approved') submitted_links = Link.all().filter('status =', 'submitted') return (render_to_response('links/review.html', locals(), context_instance=RequestContext(request)))
def create(request): """ Create an link. """ content_id = int(request.POST["contentid"]) supporter_id = int(request.POST["supporterid"]) link = Link(content_id=content_id, supporter_id=supporter_id, compromised=False) link_id = link.put().id() return HttpResponse("Created an link: %s %s %s" % (content_id, supporter_id, link_id))
def t005_Link(self): """lista dei link""" L.i("LinkTestData load start") c = 0 for el in TestData.listLinkProperty: L.i(" Dataloaded #{0}".format(c)) c += 1 link = Link(title=el['title'], url=el['url'], clicks=el['clicks']) link.save() self.links.append(link) L.i("LinkTestData load ended") return True
def get_default_discovery_link(fab_id): logger.debug("creating default link for discovery") link_default = Link() link_default.topology_id = fab_id link_default.dummy = True link_default.src_ports = SPINE link_default.dst_ports = LEAF link_default.link_type = PHYSICAL link_default.num_links = 1 link_default.save() logger.debug("Default link created") return link_default
def test(): import hashlib from google.appengine.api import taskqueue url = 'http://cusee.net/2462861' key = hashlib.md5(url).hexdigest() link = Link(key_name=key, authors=['kkung'], link_url=url) link.put() taskqueue.add(url='/_worker/fetch_title', params={ 'url': url, 'key': key }) return u'OK', 200
def post(self): json = {} if not self.current_user: json = { 'error': 1, 'msg': self._('Access denied') } self.write(json) return title = self.get_argument('title', None) url = self.get_argument('url', None) description = self.get_argument('description', '') # valid arguments if not title: json = { 'error': 1, 'msg': self._('Title field can not be empty') } self.write(json) return if not url: json = { 'error': 1, 'msg': self._('URL field can not be empty') } self.write(json) return # create link link = Link() link.title = title link.url = url link.description = description self.db.add(link) self.db.commit() # delete cache keys = ['LinkList', 'SystemStatus'] self.cache.delete_multi(keys) json = { 'error': 0, 'msg': self._('Successfully created'), 'link': { 'id': link.id, 'title': link.title, 'url': link.url, 'description': link.description } } self.write(json)
def build_average_rssi_for_distance_plot(log: str): measured_log = create_linkpairs(log, as_pair=False) bar_y, x, dist_x = [], [], [] count = 0 prev = 0 max_dist = max([val.distance for val in measured_log]) + 20 for dist in range(0, int(max_dist), 20): bucket = [ link.rssi for link in measured_log if prev < link.distance <= dist ] prev = dist if len(bucket) < 1: continue count += len(bucket) avg_rssi = sum(bucket) / len(bucket) bar_y.append(avg_rssi) x.append(f'{prev + 1}-{dist}') dist_x.append(dist) # print(list(zip(range(len(bar_y)), bar_y))) # print(list(zip(range(len(bar_y)), [26 - Link.l_d(dist) for dist in range(1, int(max_dist), 20)]))) print(x) print(dist_x) print(list(zip(dist_x, bar_y))) print(list(zip(dist_x, [26 - Link.l_d_org(dist) for dist in dist_x]))) print() data = [ go.Scatter(x=x, y=bar_y, name='Average RSSI'), go.Scatter(x=x, y=[26 - Link.l_d_org(dist) for dist in dist_x], name='Distance function') ] log_name = parse_logfile_name(log) layout = go.Layout( title= f'{log_name} data - The average RSSI in each distance bucket raw - Sample size: {count}', xaxis=dict(title='Distance bucket'), yaxis=dict(title='Average RSSI')) fig = go.Figure(data=data, layout=layout) plotly.offline.plot( fig, filename= f'plots/The_average_RSSI_in_each_distance_bucket_{log_name}.html')
def main(): unvisited_links = session.query(Link).\ filter(Link.visited_at == None).all() if len(unvisited_links) == 0: print("Nothing to visit right now.") for link in unvisited_links: try: r = requests.get(link) soup = BeautifulSoup(r.text, 'html.parser') for site_url in set([o.get('href') for o in soup.find_all('a')]): if site_url is None: continue url = site_url if not is_url(site_url): url = urljoin(link.get_domain(), site_url) print('Found: {}'.format(url)) l = session.query(Link).\ filter(Link.url == url).first() if l is not None: continue l = Link(url=url) domain = l.get_domain() domain_in_db = session.query(Domain).\ filter(Domain.url == domain).\ first() if domain_in_db in None: print("Found new domain: {}".format(domain)) domain_in_db = Domain(url=domain) save(domain_in_db) l.domain = domain_in_db save(l) except: print('Something went wrong.') finally: link.visited_at = datetime.now() save(link)
def t005_Link(self): """lista dei link""" L.i("LinkTestData load start") c = 0 for el in TestData.listLinkProperty: L.i(" Dataloaded #{0}".format(c)); c+=1; link = Link( title = el['title'], url = el['url'], clicks = el['clicks'] ) link.save() self.links.append(link) L.i("LinkTestData load ended") return True
def post(self): if users.get_current_user(): link = Link() link.author = users.get_current_user() link.content = urllib2.quote(self.request.get("link")) link.put() self.response.out.write("Sent %s to the cloud." % self.request.get("link")) elif oauth.get_current_user(): link = Link() link.author = oauth.get_current_user() link.content = urllib2.quote(self.request.get("link")) link.put() self.response.out.write("Sent %s to the cloud." % self.request.get("link")) else: self.redirect(users.create_login_url("/links/add"))
def linklistcreateaddlinkpost(request): ''' create a new link, add it to an existing link list, then save to storage ''' (name, link, tags, linklist_id) = map(lambda var_name: request.POST[var_name], ['name', 'url', 'tags', 'linklist_id']) # create a new link new_link = Link(name=name, link=link, tags=tags) new_link.save() # add id to the new link linklist = LinkList.objects.get(id=linklist_id) # add link to db linklist.links.add(new_link) linklist.save() # return the success page template = loader.get_template('bookmarks/linklistcreateaddlinkpost.html') context = RequestContext(request, {'linklist': linklist,}) return HttpResponse(template.render(context))
def post(self): url = self.request.get("url") custom_path = self.request.get("custom_path") if custom_path: exists = Link.filter("path =", custom_path).get() if exists: return "path already exists, choose another"
def xtest_ordered_list(self): u, p = self.make_profile('rowena') home = Link(url='htp://mysite/com', text='My Site', owner=p) flickr = Service(name='Flickr') flickr_pics = Link(service=flickr, text='More Photos', url='http://www.flickr.com/ropix', owner=p) picassa_pics = Link(text="Photos", url="http://www.picassa.com/ropix", owner=p) ll = ListOfLinks([home, flickr_pics, picassa_pics], p) self.assertEquals(len(ll), 3) self.assertEquals(ll[0], home)
def get(request, link_id, sms_code): link = Link.query(Link.uuid == link_id).get() if (link is None): return HttpResponseServerError("bad link") authcode = AuthCode.query(AuthCode.uuid == link.key).get() if (authcode is None): return HttpResponseServerError("bad link") content = link.content.get() if(content.status != "published"): return HttpResponseServerError("deactivated link") # Verify code if (sms_code != authcode.code): return HttpResponse("bad code", None, 403) #Unauthorized # check timestamp now = datetime.datetime.now() if (now > authcode.timeout): return HttpResponse("timeout", None, 403) #Unauthorized if (content is None): return HttpResponseServerError("bad link") return HttpResponse(json_fixed.dumps(content))
def deleteUrls(): if request.method == 'POST': app.logger.info('request form: {}'.format(request.form)) topic = request.form.get('topic') elif request.method == 'GET': app.logger.info('request args: {}'.format(request.args)) topic = request.args.get('topic') if not topic: abort(400) app.logger.info('Topic param received: {}'.format(topic)) cnt = Link.removeOld(topic, datetime.datetime.utcnow() - datetime.timedelta(days=30)) # continue with scoring urls taskqueue.add(url='/cron/score/urls', params={'topic': topic}) # mail.send_mail( # sender='*****@*****.**', # to='*****@*****.**', # subject='Delete Urls {}'.format(topic), # body='Removed {} links for topic {}'.format(cnt, topic), # ) return Response('OK')
def get(self, name=''): objs = Tag.get_tag_page_posts(name, 1) catobj = Tag.get_tag_by_name(name) if catobj: pass else: self.redirect(BASE_URL) return allpost = catobj.id_num allpage = allpost / EACH_PAGE_POST_NUM if allpost % EACH_PAGE_POST_NUM: allpage += 1 output = self.render('index.html', { 'title': "%s - %s" % (catobj.name, SITE_TITLE), 'keywords': catobj.name, 'description': SITE_DECR, 'objs': objs, 'cats': Category.get_all_cat_name(), 'tags': Tag.get_hot_tag_name(), 'page': 1, 'allpage': allpage, 'listtype': 'tag', 'name': name, 'namemd5': md5(name.encode('utf-8')).hexdigest(), 'comments': Comment.get_recent_comments(), 'links': Link.get_all_links(), }, layout='_layout.html') self.write(output) return output
def submit_item(request): if request.method == 'POST': form = SubmissionForm(request.POST) if form.is_valid(): cd = form.cleaned_data cd['up_votes'] = 0 cd['down_votes'] = 0 cd['date_time'] = datetime.datetime.now() link = Link(**cd) link.save() #Need to insert a link object into the database here url = reverse('vote_submit', args=['True']) return HttpResponseRedirect(url) else: form = SubmissionForm return render_to_response('submit_item.html', {'form':form}, context_instance = RequestContext(request))
def deleteUrls(): if request.method == 'POST': app.logger.info('request form: {}'.format(request.form)) topic = request.form.get('topic') elif request.method == 'GET': app.logger.info('request args: {}'.format(request.args)) topic = request.args.get('topic') if not topic: abort(400) app.logger.info('Topic param received: {}'.format(topic)) cnt = Link.removeOld( topic, datetime.datetime.utcnow() - datetime.timedelta(days=30)) # continue with scoring urls taskqueue.add(url='/cron/score/urls', params={'topic': topic}) # mail.send_mail( # sender='*****@*****.**', # to='*****@*****.**', # subject='Delete Urls {}'.format(topic), # body='Removed {} links for topic {}'.format(cnt, topic), # ) return Response('OK')
def test_put_link(self, mock_auth): link = Link(state=self.state1_code, subcategory_id=self.subcategory.id).save() data = { 'text': 'Section 20 of Statute 39-B', 'url': 'ny.gov/link/to/statute', } response = self.client.put('/links/%i' % link.id, json=data, headers=auth_headers()) self.assertEqual(response.status_code, 200) mock_auth.assert_called_once() # Refresh link object link = Link.query.first() subcategory = Subcategory.query.first() self.assertEqual(link.text, 'Section 20 of Statute 39-B') self.assertEqual(link.url, 'ny.gov/link/to/statute') json_response = json.loads(response.data.decode('utf-8')) self.assertEqual( json_response, { 'id': link.id, 'subcategory_id': subcategory.id, 'state': self.state1_code, 'text': 'Section 20 of Statute 39-B', 'url': 'ny.gov/link/to/statute', 'active': True, 'deactivated_at': None, })
def retrieve_link(self, short_code): link = None try: link = Link.get(Link.short_code == short_code) except DoesNotExist: pass return link
def test_new(self): rev = self.scott_rev link = Link.new(rev, 0, 'sally', 'Dear Diary', 'Sally' 's Diary') self.assertEqual(0, link.link_num) self.assertEqual('sally', link.tgt_page_uid) self.assertEqual('Dear Diary', link.tgt_page_title) self.assertEqual('Sally' 's Diary', link.tgt_page_alias)
def get(self, name = ''): objs = Tag.get_tag_page_posts(name, 1) catobj = Tag.get_tag_by_name(name) if catobj: pass else: self.redirect(BASE_URL) return allpost = catobj.id_num allpage = allpost/EACH_PAGE_POST_NUM if allpost%EACH_PAGE_POST_NUM: allpage += 1 output = self.render('index.html', { 'title': "%s - %s"%( catobj.name, SITE_TITLE), 'keywords':catobj.name, 'description':SITE_DECR, 'objs': objs, 'cats': Category.get_all_cat_name(), 'tags': Tag.get_hot_tag_name(), 'page': 1, 'allpage': allpage, 'listtype': 'tag', 'name': name, 'namemd5': md5(name.encode('utf-8')).hexdigest(), 'comments': Comment.get_recent_comments(), 'links':Link.get_all_links(), },layout='_layout.html') self.write(output) return output
def fetch_title(): url = request.form.get("url") key = request.form.get("key") link = Link.get_by_key_name(key) if not link: return ("Model %s does not exist" % key), 404 result = urlfetch.fetch(url=url, follow_redirects=True, headers={"Accept": "text/html"}, method=urlfetch.HEAD) if result.status_code == 200 and result.headers.get("Content-Type").startswith("text/html"): result = urlfetch.fetch(url=url, follow_redirects=True, headers={"Accept": "text/html"}, method=urlfetch.GET) if result.status_code == 200: _search = _TITLE_RE_.search(result.content) if _search: r_title = _search.groups()[0] guess_encoding = chardet.detect(r_title) logging.error("title: %s guess: %s" % (r_title, repr(guess_encoding))) link.title = unicode(r_title, guess_encoding["encoding"]) link.put() return u"Link %s title updated with %s." % (url, link.title), 200 else: return u"Link %s does not have html title", 200 else: return u"Link %s return status %d" % (url, result.status_code), 500 else: return u"Link %s return status %d" % (url, result.status_code), 500
def get(self, direction = 'next', page = '2', base_id = '1'): if page == '1': self.redirect(BASE_URL) return objs = Article.get_page_posts(direction, page, base_id) if objs: if direction == 'prev': objs.reverse() fromid = objs[0].id endid = objs[-1].id else: fromid = endid = '' allpost = Article.count_all_post() allpage = allpost/EACH_PAGE_POST_NUM if allpost%EACH_PAGE_POST_NUM: allpage += 1 output = self.render('index.html', { 'title': "%s - %s | Part %s"%(SITE_TITLE,SITE_SUB_TITLE, page), 'keywords':KEYWORDS, 'description':SITE_DECR, 'objs': objs, 'cats': Category.get_all_cat_name(), 'tags': Tag.get_hot_tag_name(), 'page': int(page), 'allpage': allpage, 'listtype': 'index', 'fromid': fromid, 'endid': endid, 'comments': Comment.get_recent_comments(), 'links':Link.get_all_links(), },layout='_layout.html') self.write(output) return output
def extract_section_to_urls(topics: List[Topic]) -> Dict[str, List[Link]]: """Create Dict of topic section to mapping of URL to titles, like: { "Streamlit Updates": [ Link(url="https://www.streamlit.io/sharing", title="Streamlit sharing was announced today"), Link(url="https://share.streamlit.io/streamlit/demo-uber-nyc-pickups/", title="Check out the new Streamlit Sharing") ], "Articles": [ ... ], ... } """ parsed_html_per_topic = [ BeautifulSoup(topic.post_stream.posts[0].cooked, "html.parser") for topic in topics ] section_to_links_per_topic = [{ h2_section.text.strip(): [ Link(url=li.find("a")["href"], title=li.text.strip()) for li in h2_section.find_next_sibling("ul").find_all("li") if li.find("a") is not None and _validate_link(li.find("a")["href"]) ] for h2_section in topic_html.find_all("h2") } for topic_html in parsed_html_per_topic] section_to_links = reduce(_merge_dict_of_lists, section_to_links_per_topic) return section_to_links
def send_url(message): if db.session.query(User).get(message.from_user.id): url = message.text if db.session.query(Link).filter_by(user_id=message.from_user.id, url=url).first(): bot.send_message(message.from_user.id, "You have already have this URL in your ten last links. Please, type /last to see it.") return data = {"url": url} response = requests.post(url='https://rel.ink/api/links/', json=data) if response.status_code == 400: bot.send_message(message.from_user.id, "Enter a valid URL.") return short_link = 'https://rel.ink/' + response.json()["hashid"] data = {"url": url, "short_link": short_link, "user_id": message.from_user.id} links = db.session.query(Link).filter_by(user_id=message.from_user.id).all() if len(links) >= 10: delete_link = db.session.query(Link).filter_by(user_id=message.from_user.id).first() db.session.delete(delete_link) link = Link(**data) db.session.add(link) db.session.commit() bot.send_message(message.from_user.id, f"Short link: {short_link} .") else: bot.send_message(message.from_user.id, 'Type /start to use this bot')
def index(): link = request.json newlink = Link() newlink.name = link["name"] newlink.href = link["href"] newlink.display = link["display"] newlink.description = link["description"] newlink.save() return jsonify(success=True, message="success")
def add_link(): original_url = request.form['original_url'] link = Link(original_url=original_url) db.session.add(link) db.session.commit() return render_template('link_added.html', new_link=link.short_url, original_url=link.original_url)
def _validate_link(link: str) -> bool: """Validate URL using Pydantic's AnyHttpUrl's validator""" try: Link(url=link) return True except ValidationError: print(f"error parsing URL: {link}") return False
def delete(): removelist = request.json for link_id in removelist: link = Link.get_one(Link.id == link_id) if link: link.delete_instance() signal_update_sidebar.send() return jsonify(success=True, message="success")
def update_or_create_link(data, link=None): ''' Takes a dict of data where the keys are fields of the link model. Valid keys are subcategory_id, state, text, url, and active. The 'active' key only uses a False value to deactivate the link. Once created, a link's subcategory or state cannot be changed. ''' subcategory_id = data.get('subcategory_id') state = data.get('state') if link is None: link = Link(subcategory_id=subcategory_id, state=state) elif subcategory_id is not None and subcategory_id != link.subcategory_id: raise ValueError(strings.cannot_change_subcategory) elif state is not None and state != link.state: raise ValueError(strings.cannot_change_state) if 'text' in data.keys(): link.text = data['text'] if 'url' in data.keys(): link.url = data['url'] # You cannot reactivate a link after deactivating it if 'active' in data.keys() and not data['active']: link.deactivate() return link.save()
def test_init_invalid_category(self): with self.assertRaises(ValueError) as e: Link( subcategory_id=0, state=self.state_code, text='Section 20 of Statute 39-B', url='ny.gov/link/to/statute', ) self.assertEqual(str(e.exception), subcategory_not_found)
def test_init_invalid_state_code(self): with self.assertRaises(ValueError) as e: Link( subcategory_id=self.subcategory.id, state='fake-state-code', text='Section 20 of Statute 39-B', url='ny.gov/link/to/statute', ) self.assertEqual(str(e.exception), invalid_state)
def links(): perpage = g.config["ADMIN_ITEM_COUNT"] page = int(request.args.get("page", 1)) count, links = Link.get_page(page, limit=perpage) pager = gen_pager(page, count, perpage, request.url) return render_template('admin/links.html', links=links, admin_url="links", pager=pager)