def __FullQualifyLink(self, param_name): link = self.request.get(param_name) or None if link: if not link.startswith('http'): link = db.Link('http://' + link) else: link = db.Link(link) return link
def save_container_friends(self, container_user, friends): ''' takes a list of friends and saves to data store the friend list is gathered by the browser and sent via ajax ''' saved = 0 failed = 0 if not container_user.character: return saved, failed user_friend_ids = set( ) # set(container_user.friend_ids) don't save existing friends character_keys = set() # has_app = db.BooleanProperty() # friend_ids = db.StringListProperty() # character = db.ReferenceProperty(reference_class=Character) for friend in friends.itervalues(): domain = container_user.domain user_id = friend['id'] try: profile_image_url = db.Link(friend['thumbnailUrl']) except Exception, e: logging.debug( 'bad thumbnail url (or no image) for friend: %s' % friend) logging.debug(e) profile_image_url = 'http://static.ak.fbcdn.net/rsrc.php/zBPOE/hash/k9bm7yii.gif' #use facebook no-pic image failed += 1 continue user_key = domain + ":" + user_id user = Container_user.get_by_key_name(user_key) # if not user: # user = Container_user(key_name = user_key, # container_user_id = user_id, # domain = domain, # character = None, # display_name = friend['displayName'], # profile_image_url = profile_image_url) # else: #update existing fields: if user: user.display_name = friend['displayName'] user.profile_image_url = profile_image_url if user.character: character_keys.add( user.character.key() ) #this friend has a character. Add it to the character's friend list if 'profileUrl' in friend: user.profile_url = db.Link(friend['profileUrl']) user.ip_address = "1.2.3.4" if friend.has_key('hasAppInstalled'): user.has_app = friend['hasAppInstalled'] else: user.has_app = False user.friend_ids = [] user.put() user_friend_ids.add(user.container_user_id) saved += 1
def _generate_short_url(long_url_value): """ Generate the short url from the original url. First check if the url present in the memcache if not check in the model store and again that fails, create one.""" short_url_id = memcache.get(long_url_value) if short_url_id is not None: return '%s/%s' % (_ROOT_URL, short_url_id) url_matches = db.GqlQuery("SELECT * FROM UrlStore WHERE long_url = :1", long_url_value) if url_matches.count() > 0: for url in url_matches: memcache.add(long_url_value, url.short_url_key, 7200) memcache.add(url.short_url_key, long_url_value, 7200) return '%s/%s' % (_ROOT_URL, url.short_url_key) tentatives = 0 while tentatives < _MAX_TENTATIVE: short_url_id = _random_id() if db.GqlQuery("SELECT * FROM UrlStore WHERE short_url_key = :1", short_url_id).count() == 0: url_obj = UrlStore(short_url_key=short_url_id, long_url=db.Link(long_url_value), creation_date=datetime.now()) url_obj.put() memcache.add(long_url_value, short_url_id, 7200) memcache.add(short_url_id, long_url_value, 7200) return '%s/%s' % (_ROOT_URL, short_url_id) tentatives += 1
def create(self, url, max_time, max_visits): """Creates a new VapUrl and returns it if successful""" random.seed(str(random.random()) + url) name = ''.join([random.choice('abcdefghijklmnopqrstuvwxyz-0123456789') for i in range(10)]) vapUrl = None try: vapUrl = VapUrl() vapUrl.name = name vapUrl.link = db.Link(url) vapUrl.vaporized = False vapUrl.exp_datetime = datetime.datetime.now() + datetime.timedelta(minutes=max_time) vapUrl.visits_max = max_visits vapUrl.visits_remaining = max_visits vapUrl.put() except: vapUrl = None if vapUrl != None: counters = Counter.all() counters.filter('type = ', 'creates/alltime') if counters.count() > 0: counter = counters[0] #TODO: make transactional counter.count += 1 counter.put() else: counter = Counter() counter.type = 'creates/alltime' counter.count = 1 counter.put() return vapUrl;
def post(self): self.access_init() if self.access.check('sponsor_add'): try: title = unicode(self.request.get('title')) url = db.Link(self.request.get('url')) probability = int(self.request.get('probability')) if self.request.get('sponsor-logo'): logo = db.Key(encoded=self.request.get('sponsor-logo')) else: logo = None published = bool(self.request.get('published')) record_clicks = bool(self.request.get('record-clicks')) record_impressions = bool( self.request.get('record-impressions')) owner = self.access.current_user sponsor = Sponsor(title=title, url=url, probability=probability, logo=logo, published=published, record_clicks=record_clicks, record_impressions=record_impressions, owner=owner) sponsor.put() SponsorCache.cache_sponsors(self) self.redirect_to(settings.sponsor_add_redirect_url) except: self.render_error(404) else: self.render_error(403)
def find_image_links(self, html_message): soup = BeautifulSoup(html_message) images = soup('img') links = [] for img in images: links.append(db.Link(img['src'])) return links
def fetch_base(self): """Fetch base file for the patch. Returns: A models.Content instance. Raises: FetchError: For any kind of problem fetching the content. """ rev = patching.ParseRevision(self.lines) if rev is not None: if rev == 0: # rev=0 means it's a new file. return Content(text=db.Text(u''), parent=self) # AppEngine can only fetch URLs that db.Link() thinks are OK, # so try converting to a db.Link() here. try: base = db.Link(self.patchset.issue.base) except db.BadValueError: msg = 'Invalid base URL for fetching: %s' % self.patchset.issue.base logging.warn(msg) raise FetchError(msg) url = utils.make_url(base, self.filename, rev) logging.info('Fetching %s', url) try: result = urlfetch.fetch(url) except urlfetch.Error, err: msg = 'Error fetching %s: %s: %s' % (url, err.__class__.__name__, err) logging.warn('FetchBase: %s', msg) raise FetchError(msg)
def FetchBase(base, patch): """Fetch the content of the file to which the file is relative. Args: base: the base property of the Issue to which the Patch belongs. patch: a models.Patch instance. Returns: A models.Content instance. Raises: FetchError: For any kind of problem fetching the content. """ filename, lines = patch.filename, patch.lines rev = patching.ParseRevision(lines) if rev is not None: if rev == 0: # rev=0 means it's a new file. return models.Content(text=db.Text(u''), parent=patch) try: base = db.Link(base) except db.BadValueError: msg = 'Invalid base URL: %s' % base logging.warn(msg) raise FetchError(msg) url = _MakeUrl(base, filename, rev) logging.info('Fetching %s', url) try: result = urlfetch.fetch(url) except Exception, err: msg = 'Error fetching %s: %s: %s' % (url, err.__class__.__name__, err) logging.warn('FetchBase: %s', msg) raise FetchError(msg)
def test_entity_to_dict(self): """Converts a datastore.Entity instance to a JSON encodable dict.""" from datetime import datetime from gaesynkit import handlers from google.appengine.api import datastore from google.appengine.api import datastore_types from google.appengine.api import users from google.appengine.ext import db entity = datastore.Entity("Test") entity.update({ "string": "A string.", "byte_string": datastore_types.ByteString("Byte String"), "boolean": True, "int": 42, "float": 1.82, "date": datetime(2011, 01, 06), "list": [1, 2, 3, 4], "key": db.Key.from_path("Kind", "name"), "user": users.User("*****@*****.**"), "email": db.Email("*****@*****.**"), "location": db.GeoPt(52.500556, 13.398889), "category": db.Category("coding"), "link": db.Link("http://www.google.com"), "im": db.IM("sip", "foobar"), "phone": db.PhoneNumber("1 (206) 555-1212"), "address": db.PostalAddress("Address"), "rating": db.Rating(99) })
def addAudioCVToDB(self, response): session = get_current_session() state = session.get('state', "not present") if (state == "RecordCV"): logging.info("url %s", self.request.get('data')) resumeURL = urllib2.unquote(self.request.get('data')) if resumeURL == "": resumeURL = "http://tfhackday.appspot.com/static/audionotfound.wav" candidateId = random.randint(1, 10000) candidate = Candidate(key_name=str(candidateId), candidateId=str(candidateId), date=datetime.datetime.now().date(), resumelink=db.Link(resumeURL), status="InScreen") candidate.put() htmlBody = "<html> <body> Dear Hiring Manager: When: Friday, January 06, 2012 11:00 AM-11:30 AM (UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi. Where: Banner (on phone) Please let us know if you have any questions. The HR Team " htmlBody += "<a href='http://tfhackday.appspot.com/AcceptCandidate?id=" htmlBody += str(candidateId) + "'>ACCEPT</a><br><br> " htmlBody += "<a href='http://tfhackday.appspot.com/RejectCandidate?id=" + str( candidateId) + "'> REJECT</a> </body></html>" mail.send_mail( sender="TribalHelpDesk.com Support <*****@*****.**>", to= "HR Desk <*****@*****.**>, Vipul Jhawar <*****@*****.**>", subject="Candidate ID:" + str(candidateId) + "Interview Schedule", body="My Body", html=htmlBody)
def harvest_facebook_links(self, social_account): recent_post_date = None start_post_date = None # Connect to facebook fb_connector = connectors.facebook.FacebookConnector(app_id = config.FACEBOOK_API_APP_ID, app_secret = config.FACEBOOK_API_APP_SECRET, app_url = config.APP_URL, graph_url = config.FACEBOOK_API_GRAPH_URL, oauth_url = config.FACEBOOK_API_OAUTH_URL) # Get most recent post date to see what date we should start pulling from query = datamodel.SocialPost.gql('WHERE social_account = :1 AND post_type = :2 ORDER BY original_post_date DESC LIMIT 1', social_account, 'link') last_social_post = query.get() # If a post exists, then lets load from that last date... if not, then lets try to load everything if last_social_post: recent_post_date = last_social_post.original_post_date else: recent_post_date = None #start_post_date = datetime.datetime(2000,1,1) # Lets try this...build list of all post ids on last post date post_id_list = [] q = db.GqlQuery("SELECT __key__ FROM SocialPost WHERE social_account = :1 AND post_type = :2 AND original_post_date = :3", social_account, 'link', recent_post_date) for post_key in q: post_id_list.append(datamodel.SocialPost.get(post_key).social_account_item_id) json_posts = fb_connector.get_statuses(social_account.user_id, social_account.access_token, recent_post_date,start_post_date) # Parse JSON returns for post in json_posts['data']: # If item exists, dont add it. if (post['id'] not in post_id_list): post_date = datetime.datetime.strptime(post['created_time'], "%Y-%m-%dT%H:%M:%S+0000") social_post = datamodel.SocialPost(user=social_account.user, social_account=social_account, social_account_item_id=post['id'], post_type='link', raw_text=db.Text(post['message']), url_list = [db.Link(post['link'])], url_description = post['description'], original_post_date=post_date) social_post.put() pass
def create(cls, nickname, address): address = address.strip() if address not in [u.address for u in User.all()]: user = cls(nickname=nickname, address=address) user.avatar = db.Link(gravatar(address, size=48)) user.put() return user else: return None
def create(cls, program_name, title, file_url): program = Program.get_or_insert(program_name, title=program_name) episode = Episode(key_name=title, program=program, title=title, file_url=db.Link(file_url)) episode.put() return episode
def harvest_facebook_posts(self, social_account): recent_post_date = None start_post_date = None # Connect to facebook fb_connector = connectors.facebook.FacebookConnector(app_id = config.FACEBOOK_API_APP_ID, app_secret = config.FACEBOOK_API_APP_SECRET, app_url = config.APP_URL, graph_url = config.FACEBOOK_API_GRAPH_URL, oauth_url = config.FACEBOOK_API_OAUTH_URL) # Get most recent post date to see what date we should start pulling from query = datamodel.SocialPost.gql('WHERE social_account = :1 ORDER BY original_post_date DESC', social_account) last_social_post = query.get() # If a post exists, then lets load from that last date... if not, then lets try to load everything if last_social_post: recent_post_date = last_social_post.original_post_date else: recent_post_date = None #start_post_date = datetime.datetime(2000,1,1) json_posts = fb_connector.get_posts(social_account.user_id, social_account.access_token, recent_post_date,start_post_date) # Parse JSON returns for post in json_posts['data']: # If item exists, dont restore it. # TODO: Make this query more efficient!!! Do similar to the harvest_friends... query_check = datamodel.SocialPost.gql('WHERE social_account_item_id = :1', post['id']) if (query_check.count() < 1): post_date = datetime.datetime.strptime(post['created_time'], "%Y-%m-%dT%H:%M:%S+0000") if (post['type'] == 'status'): social_post = datamodel.SocialPost(user=social_account.user, social_account=social_account, social_account_item_id=post['id'], post_type='status', raw_text=db.Text(post['message']), original_post_date=post_date) social_post.put() elif (post['type'] == 'video'): social_post = datamodel.SocialPost(user=social_account.user, social_account=social_account, social_account_item_id=post['id'], post_type='video', raw_text=db.Text(post['message']), url_list=[db.Link(post['link'])], original_post_date=post_date) social_post.put() else: pass
def post(self): key = int(self.request.get('key')) lua_script_link = self.request.get('lua_script_link') lua_output_link = self.request.get('lua_output_link') completed_time = datetime.datetime.now() lua_task = LuaTasks.get_lua_task(key)[0] lua_task.lua_script_link = db.Link(lua_script_link) if self.request.get('lua_aggregator_link'): lua_task.lua_aggregator_link = db.Link(self.request.get( 'lua_aggregator_link')) lua_task.lua_output_link = db.Link(lua_output_link) lua_task.completed_time = completed_time lua_task.put() self.response.out.write('<br/><br/>Updated the datastore-<br/><br/>') self.response.out.write('key: %s<br/>' % key) self.response.out.write('lua_script_link: %s<br/>' % lua_script_link) self.response.out.write('lua_output_link: %s<br/>' % lua_output_link) self.response.out.write('completed_time: %s<br/>' % completed_time)
def add_task(email, url, xpath, html): e = db.Email(email) u = db.Link(url) if isduplicated(e, u, xpath) == False: task = Task(email=e, url=u, xpath=xpath, html=_set_task_html(html)) try: task.put() return True except db.Error: return 'Datastore save error' else: return "You have inspected this portion."
def post(self): migration = self._RequireMigration() if not migration: self._WriteJsonResult() return diff = self.request.get('diff', '') link = self.request.get('link', '') if diff: migration.diff = db.Text(diff) if link: migration.link = db.Link(link) migration.put() self._WriteJsonResult()
def post(self): req = self.request errorUrl = '/?error=true' # is targetUrl a valid image? leadImgFetch = urlfetch.fetch(req.get("targetUrl")) if leadImgFetch.status_code != 200: self.redirect(errorUrl) return try: # verify it's an image, and convert to PNG lead_img = images.crop(leadImgFetch.content, 0.0, 0.0, 1.0, 1.0, output_encoding=images.PNG) except: # not an image self.redirect(errorUrl) return # is feedUrl a valid feed? feedUrlData = urlfetch.fetch(req.get("feedUrl")) if feedUrlData.status_code != 200: self.redirect(errorUrl) return # save the job to the database try: feedfile = StringIO(feedUrlData.content) feedimages = picasafeed.getthumbnailsfromfile(feedfile) assert len(feedimages) > 0 except: self.redirect(errorUrl) return content_type, width, height = getImageInfo(lead_img) job = Job(user=users.get_current_user(), lead_img_url=db.Link(req.get("targetUrl")), lead_img=lead_img, lead_img_size=[width, height], feed_urls=feedimages, uncreated_thumb_indexes=range(len(feedimages))) job.put() self.redirect('/mosaic/%s/build' % job.key().id())
def post(self, range_id, model_id): model_range = ModelRange.get(range_id) if self.request.get('cancel') == '1': return self.redirect('/ranges/' + str(model_range.key()) + '/edit') model = CarModel.get(model_id) model.name = self.request.get('name') model.year_from = int(self.request.get('yearfrom')) model.year_to = int(self.request.get('yearto')) model.typeno = self.request.get('tipo') model.engine_code = self.request.get('engine') url = self.request.get('image_url') if url != '': model.image_url = db.Link(url) model.notes = self.request.get('notes') model.put() return self.redirect('/ranges/' + str(model_range.key()) + '/edit')
def get(self): import feedparser #_id = self.request.get_range('id') _link = db.Link(self.request.get('link')) #_name = self.request.get('name') _description = self.request.get('description') _content = self.request.get('content') _filter = self.request.get('filter') _encoding = None m = re.match(r"^#.*?encoding.*?([a-zA-z0-9\-_]+)", _description) if m: _encoding = m.group(1) # get rss try: response = urlfetch.fetch(_link) except Exception, e: self.response.out.write(e) return
def post(self): bot_test = self.request.get('bot_test').strip().lower() if (bot_test != 'orange'): self.redirect('/') return post = Post() if users.get_current_user(): post.author = users.get_current_user().nickname() url = self.request.get('url').strip() if url: if not url.startswith("http"): url = "http://" + url post.images = [db.Link(url)] post.caption = self.request.get('caption') post.source = "homepage" post.put() self.redirect('/')
def parse(self, html): """ Parse the information table on USFIRSTs site to extract relevant team information. Return a dictionary. """ team = dict() soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES) if soup.find(text='No team found.') is not None: logging.error('FIRST lacks team.') return None for tr in soup.findAll('tr'): tds = tr.findAll('td') if len(tds) > 1: field = str(tds[0].string) if field == "Team Number": team["team_number"] = int(tds[1].b.string) if field == "Team Name": team["name"] = unicode(tds[1].string) if field == "Team Location": #TODO: Filter out 's and stuff -greg 5/21/2010 team["address"] = unicode(tds[1].string) if field == "Rookie Season": team["rookie_year"] = int(tds[1].string) if field == "Team Nickname": team["nickname"] = unicode(tds[1].string) if field == "Team Website": try: website_str = re.sub( r'^/|/$', '', unicode(tds[1].a["href"]) ) # strip starting and trailing slashes if not website_str.startswith( 'http://') and not website_str.startswith( 'https://'): website_str = 'http://%s' % website_str team['website'] = db.Link(website_str) except Exception, details: logging.info("Team website is invalid for team %s." % team['team_number']) logging.info(details)
def post(self): try: guser = users.get_current_user() user = model.User.gql('WHERE google_user = :1', guser).get() url = db.Link(self.request.get('url')) title = self.request.get('title') desc = db.Text(self.request.get('desc')) tags = self.request.get('tags').split(',') for i in range(len(tags)): tags[i] = tags[i].strip() private = self.request.get('private') == 'on' bookmark = model.Bookmark(parent=user, user=user, url=url, title=title, desc=desc, tags=tags, private=private) bookmark.put() counter = model.Counter.get_by_key_name('%s_bookmark_counter' % guser, parent=user) if counter is None: counter = model.Counter(key_name='%s_bookmark_counter' % guser, parent=user) counter.count += 1 counter.put() latest = model.Stat.get_by_key_name('stat_latest') if latest is None: latest = model.Stat(key_name='stat_latest') latest.data = [bookmark.key()] + latest.data latest.put() self.redirect('/home') except Exception, ex: self.response.out.write(ex)
def import_car_models(self, csv): ranges = ModelRange.all().fetch(100) imported = 0 lines = csv.split('\n') for line in lines: if len(line.strip()) == 0: print 'Skipping line', line continue model = CarModel() fields = line.strip().split(';') model.model_range = self.get_range(ranges, fields[0].strip()) model.name = fields[1].strip() model.engine_code = fields[2].strip() model.typeno = fields[3].strip() if fields[4].strip() != '': model.image_url = db.Link(fields[4].strip()) model.year_from = int(fields[5]) model.year_to = int(fields[6]) model.notes = fields[7].strip() model.put() imported = imported + 1 print imported, 'models imported'
def task_model_restore(items): model_count = 0 for item in items: mrange = ModelRange() mrange.name = item['name'] mrange.year_start = item['yearStart'] mrange.year_end = item['yearEnd'] mrange.notes = item['notes'] mrange.put() for model in item['carModels']: carmodel = CarModel(key_name=model['modelId']) carmodel.name = model['name'] carmodel.engine_code = model['engineCode'] carmodel.typeno = model['typeNo'] if model['imageUrl'] != None: carmodel.imageUrl = db.Link(model['imageUrl']) carmodel.year_from = model['yearFrom'] carmodel.year_to = model['yearTo'] carmodel.notes = model['notes'] carmodel.model_range = mrange carmodel.put() model_count = model_count + 1
def get(self, query_type): user = users.get_current_user() query = self.request.get('q') if query_type == 'user': query = users.User(query) projects = list( db.GqlQuery("SELECT * FROM Project WHERE %s = :1" % query_type, query)) else: projects = Project.all().search( query, properties=['name', 'link', 'description']).fetch(20) template_values = { 'user': user, 'login': users.create_login_url(self.request.uri), 'logout': users.create_logout_url(self.request.uri), 'projects': projects, } if query_type == 'all': try: template_values['link'] = db.Link(query) except Exception: pass self.response.out.write( template.render('template/search.html', template_values))
def post(self, id): self.access_init() if self.access.check('sponsor_edit'): try: sponsor = Sponsor.get_by_id(int(id)) sponsor.title = unicode(self.request.get('title')) sponsor.url = db.Link(self.request.get('url')) sponsor.probability = int(self.request.get('probability')) if self.request.get('sponsor-logo'): sponsor.logo = db.Key( encoded=self.request.get('sponsor-logo')) else: sponsor.logo = None sponsor.published = bool(self.request.get('published')) sponsor.record_clicks = bool(self.request.get('record-clicks')) sponsor.record_impressions = bool( self.request.get('record-impressions')) sponsor.put() SponsorCache.cache_sponsors(self) self.redirect_to(settings.sponsor_edit_redirect_url) except: self.render_error(404) else: self.render_error(403)
e1 = Everything( str=u"hello", bool=True, int=10, float=5.05, datetime=d, date=d.date(), time=d.time(), list=[1, 2, 3], strlist=["hello", u'world'], user=users.User("*****@*****.**"), blob=db.Blob("somerandomdata"), text=db.Text("some random text"), category=db.Category("awesome"), link=db.Link("http://www.10gen.com"), email=db.Email("*****@*****.**"), geopt=db.GeoPt(40.74067, -73.99367), im=db.IM("http://aim.com/", "example"), phonenumber=db.PhoneNumber("1 (999) 123-4567"), postaladdress=db.PostalAddress("40 W 20th St., New York, NY"), rating=db.Rating(99), ) out = db.get(e1.put()) def failIfNot(reference, value, type): assert value == reference assert isinstance(value, type)
def post(self): user = users.get_current_user() if not user: self.response.set_status(403) self.response.out.write('这个操作需要登录') return _id = self.request.get_range('id') _link = db.Link(self.request.get('link')) _name = self.request.get('name') _description = self.request.get('description') _content = self.request.get('content') _filter = self.request.get('filter') _delete = self.request.get('delete') _encoding = None m = re.search(r"^#.*?encoding.*?([a-zA-z0-9\-_]+)", _description, re.M) if m: _encoding = m.group(1) if not _delete and _id == 0: project = Project(link=_link, name=_name, description=_description, user=user, contentSelector=_content, filterSelector=_filter, encoding=_encoding) project.save() self.redirect('/e/?id=' + str(project.key().id()) + '&saved=1') elif _id != 0: project = Project.get_by_id(_id) if project and project.link == _link and ( project.user == user or users.is_current_user_admin()): should_remove_cache = False if _delete or project.contentSelector != _content or project.filterSelector != _filter or project.encoding == _encoding: should_remove_cache = True if _delete: project.delete() self.redirect('/s/user/?q=' + user.email()) else: project.name = _name project.description = _description project.contentSelector = _content project.filterSelector = _filter project.encoding = _encoding project.lastModifiedDate = datetime.datetime.now() project.save() self.redirect('/e/?id=' + str(project.key().id()) + '&saved=1') if should_remove_cache: q = db.GqlQuery( "SELECT __key__ FROM DescriptionCache WHERE project_id = :1", project.key().id()) r = q.fetch(q.count()) db.delete(r) else: self.response.set_status(400) self.response.out.write("无法找到porject。或您没有修改权限") else: self.response.set_status(400) self.response.out.write("无法找到porject。")
def createLink(self, url, default=DEFAULT_LINK): if url == None or url == '': return default else: return db.Link(url)