def discover(): source = util.load_source() # validate URL, find silo post url = request.form['url'] domain = util.domain_from_link(url) path = urllib.parse.urlparse(url).path msg = 'Discovering now. Refresh in a minute to see the results!' gr_source = source.gr_source if domain == gr_source.DOMAIN: post_id = gr_source.post_id(url) if post_id: type = 'event' if path.startswith('/events/') else None util.add_discover_task(source, post_id, type=type) else: msg = f"Sorry, that doesn't look like a {gr_source.NAME} post URL." elif util.domain_or_parent_in(domain, source.domains): synd_links = original_post_discovery.process_entry( source, url, {}, False, []) if synd_links: for link in synd_links: util.add_discover_task(source, gr_source.post_id(link)) source.updates = {'last_syndication_url': util.now_fn()} models.Source.put_updates(source) else: msg = f'Failed to fetch {util.pretty_link(url)} or find a {gr_source.NAME} syndication link.' else: msg = f'Please enter a URL on either your web site or {gr_source.NAME}.' flash(msg) return redirect(source.bridgy_url())
def auth(self): """Loads the source and token and checks that they're valid. Expects token in the `token` query param, source in `key` or `username`. Raises: :class:`HTTPException` with HTTP 400 if the token or source are missing or invalid Returns: BrowserSource or None """ # Load source source = util.load_source(self, param='key') if not source: self.abort( 404, f'No account found for {self.gr_source().NAME} user {key or username}' ) # Load and check token token = util.get_required_param(self, 'token') for domain in Domain.query(Domain.tokens == token): if domain.key.id() in source.domains: return source self.abort( 403, f'Token {token} is not authorized for any of: {source.domains}')
def retry(): entity = util.load_source() if not isinstance(entity, Webmentions): error(f'Unexpected key kind {entity.key.kind()}') source = entity.source.get() # run OPD to pick up any new SyndicatedPosts. note that we don't refetch # their h-feed, so if they've added a syndication URL since we last crawled, # retry won't make us pick it up. background in #524. if entity.key.kind() == 'Response': source = entity.source.get() for activity in [json_loads(a) for a in entity.activities_json]: originals, mentions = original_post_discovery.discover( source, activity, fetch_hfeed=False, include_redirect_sources=False) entity.unsent += original_post_discovery.targets_for_response( json_loads(entity.response_json), originals=originals, mentions=mentions) entity.restart() flash('Retrying. Refresh in a minute to see the results!') return redirect(request.values.get('redirect_to') or source.bridgy_url())
def authorize(self): from_source = util.load_source() if from_source.key != self.source.key: msg = f'Try publishing that page from <a href="{self.source.bridgy_path()}">{self.source.label()}</a> instead.' self.error(msg, html=msg) return False return True
def post(self): logging.debug('Params: %s', self.request.params) type = self.request.get('type') if type: assert type in ('event', ) source = util.load_source(self) if not source or source.status == 'disabled' or 'listen' not in source.features: logging.error('Source not found or disabled. Dropping task.') return logging.info('Source: %s %s, %s', source.label(), source.key.string_id(), source.bridgy_url(self)) post_id = util.get_required_param(self, 'post_id') source.updates = {} try: if type == 'event': activities = [source.gr_source.get_event(post_id)] else: activities = source.get_activities(fetch_replies=True, fetch_likes=True, fetch_shares=True, activity_id=post_id, user_id=source.key.id()) if not activities or not activities[0]: logging.info('Post %s not found.', post_id) return assert len(activities) == 1, activities self.backfeed(source, activities={activities[0]['id']: activities[0]}) obj = activities[0].get('object') or activities[0] in_reply_to = util.get_first(obj, 'inReplyTo') if in_reply_to: parsed = util.parse_tag_uri(in_reply_to.get( 'id', '')) # TODO: fall back to url if parsed: util.add_discover_task(source, parsed[1]) except Exception, e: code, body = util.interpret_http_exception(e) if (code and (code in source.RATE_LIMIT_HTTP_CODES or code in ('400', '404') or int(code) / 100 == 5) or util.is_connection_failure(e)): logging.error('API call failed; giving up. %s: %s\n%s', code, body, e) self.abort(util.ERROR_HTTP_RETURN_CODE) else: raise
def dispatch_request(self): logger.debug(f'Params: {list(request.values.items())}') g.TRANSIENT_ERROR_HTTP_CODES = ('400', '404') type = request.values.get('type') if type: assert type in ('event', ) source = g.source = util.load_source() if not source or source.status == 'disabled' or 'listen' not in source.features: logger.error('Source not found or disabled. Dropping task.') return '' logger.info( f'Source: {source.label()} {source.key_id()}, {source.bridgy_url()}' ) post_id = request.values['post_id'] source.updates = {} if type == 'event': activities = [source.gr_source.get_event(post_id)] else: activities = source.get_activities(fetch_replies=True, fetch_likes=True, fetch_shares=True, activity_id=post_id, user_id=source.key_id()) if not activities or not activities[0]: logger.info(f'Post {post_id} not found.') return '' assert len(activities) == 1, activities activity = activities[0] activities = {activity['id']: activity} # STATE: propagate tasks created by backfeed() here get started before their Response entities get created/updated, so they fail with https://github.com/snarfed/bridgy/issues/237 , but that's a red herring, it's really that activities_json and urls_to_activity are empty # is poll transactional somehow, and this isn't? # no more transactional tasks. https://github.com/googleapis/python-tasks/issues/26 # they're still supported in the new "bundled services" thing, but that seems like a dead end. # https://groups.google.com/g/google-appengine/c/22BKInlWty0/m/05ObNEdsAgAJ self.backfeed(source, responses=activities, activities=activities) obj = activity.get('object') or activity in_reply_to = util.get_first(obj, 'inReplyTo') if in_reply_to: parsed = util.parse_tag_uri(in_reply_to.get( 'id', '')) # TODO: fall back to url if parsed: util.add_discover_task(source, parsed[1]) return 'OK'
def edit_websites_post(): source = util.load_source() redirect_url = f'{request.path}?{urllib.parse.urlencode({"source_key": source.key.urlsafe().decode()})}' add = request.values.get('add') delete = request.values.get('delete') if (add and delete) or (not add and not delete): error('Either add or delete param (but not both) required') link = util.pretty_link(add or delete) if add: resolved = Source.resolve_profile_url(add) if resolved: if resolved in source.domain_urls: flash(f'{link} already exists.') else: source.domain_urls.append(resolved) domain = util.domain_from_link(resolved) source.domains.append(domain) source.put() flash(f'Added {link}.') else: flash(f"{link} doesn't look like your web site. Try again?") else: assert delete try: source.domain_urls.remove(delete) except ValueError: error( f"{delete} not found in {source.label()}'s current web sites") domain = util.domain_from_link(delete) if domain not in { util.domain_from_link(url) for url in source.domain_urls }: source.domains.remove(domain) source.put() flash(f'Removed {link}.') return redirect(redirect_url)
def post(self): logging.debug('Params: %s', list(self.request.params.items())) type = self.request.get('type') if type: assert type in ('event', ) source = self.source = util.load_source(self) if not source or source.status == 'disabled' or 'listen' not in source.features: logging.error('Source not found or disabled. Dropping task.') return logging.info('Source: %s %s, %s', source.label(), source.key_id(), source.bridgy_url(self)) post_id = util.get_required_param(self, 'post_id') source.updates = {} if type == 'event': activities = [source.gr_source.get_event(post_id)] else: activities = source.get_activities(fetch_replies=True, fetch_likes=True, fetch_shares=True, activity_id=post_id, user_id=source.key_id()) if not activities or not activities[0]: logging.info('Post %s not found.', post_id) return assert len(activities) == 1, activities activity = activities[0] activities = {activity['id']: activity} self.backfeed(source, responses=activities, activities=activities) obj = activity.get('object') or activity in_reply_to = util.get_first(obj, 'inReplyTo') if in_reply_to: parsed = util.parse_tag_uri(in_reply_to.get( 'id', '')) # TODO: fall back to url if parsed: util.add_discover_task(source, parsed[1])
def auth(self): """Loads the source and token and checks that they're valid. Expects token in the `token` query param, source in `key` or `username`. Raises: :class:`HTTPException` with HTTP 400 if the token or source are missing or invalid Returns: BrowserSource or None """ # Load source source = util.load_source(error_fn=self.error) # Load and check token token = request.values['token'] for domain in Domain.query(Domain.tokens == token): if domain.key.id() in source.domains: return source self.error( f'Token {token} is not authorized for any of: {source.domains}', 403)
def delete_start(): source = util.load_source() kind = source.key.kind() feature = request.form['feature'] state = util.encode_oauth_state({ 'operation': 'delete', 'feature': feature, 'source': source.key.urlsafe().decode(), 'callback': request.values.get('callback'), }) # Blogger don't support redirect_url() yet if kind == 'Blogger': return redirect(f'/blogger/delete/start?state={state}') path = ('/reddit/callback' if kind == 'Reddit' else '/wordpress/add' if kind == 'WordPress' else f'/{source.SHORT_NAME}/delete/finish') kwargs = {} if kind == 'Twitter': kwargs['access_type'] = 'read' if feature == 'listen' else 'write' try: return redirect(source.OAUTH_START(path).redirect_url(state=state)) except werkzeug.exceptions.HTTPException: # raised by us, probably via self.error() raise except Exception as e: code, body = util.interpret_http_exception(e) if not code and util.is_connection_failure(e): code = '-' body = str(e) if code: flash(f'{source.GR_CLASS.NAME} API error {code}: {body}') return redirect(source.bridgy_url()) else: raise
def edit_websites_get(): return render_template('edit_websites.html', source=util.preprocess_source(util.load_source()))
def setup_refetch_hfeed(): nonlocal source source = util.load_source() source.last_hfeed_refetch = models.REFETCH_HFEED_TRIGGER source.last_feed_syndication_url = None source.put()
def poll_now(): source = util.load_source() util.add_poll_task(source, now=True) flash("Polling now. Refresh in a minute to see what's new!") return redirect(source.bridgy_url())
def disable(): source = util.load_source() logger.info(f'Disabling {source.label()}') source.status = 'disabled' source.put() return util.redirect(source.bridgy_path())