def post(self): short_url = self.request.get('short_url') link = Link.find_by_short_url(short_url) if link: if link.title: return # nothing to be done if title is already populated else: title = self.get_title(link.url) if title: link.title = title link.put() memcache.delete('Link_' + short_url) # To ensure that the next find on Link fetches the updated entry from the datastore
def resolve(self): if is_empty(self.path): return None link = Link.find_by_short_url(self.path) if link and link.url: # Find a Link that matches self.resolvable = True self.resolution_result = link.url else: # Try finding an AggregateLink aggregate_link = AggregateLink.find_by_aggregate_url(self.path) if aggregate_link and aggregate_link.url: self.aggregate = True self.resolvable = True self.resolution_result = aggregate_link.url return self.resolution_result
def post(self): request_data_key = self.request.get('request_data_key') request_data = RequestData.get(request_data_key) if request_data: parser = HeaderParser(request_data.raw_headers) raw_headers = parser.headers() short_url = request_data.short_url self.update_stats(short_url, request_data, raw_headers) # TODO: Too many DataStore calls? Denormalize? # Update stats for the Aggregate Link link = Link.find_by_short_url(short_url) if link: resolved_url = link.url aggregate_link = AggregateLink.find_by_url(resolved_url) if aggregate_link: aggregate_url = aggregate_link.aggregate_url self.update_stats(aggregate_url, request_data, raw_headers) request_data.processed = True # Mark the request data as processed, so that we can maybe delete it later.. self.to_commit.append(request_data) self.commit_entities()